mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-04-30 06:38:14 +00:00
Update SetVector to rely on the underlying set's insert to return a pair<iterator, bool>
This is to be consistent with StringSet and ultimately with the standard library's associative container insert function. This lead to updating SmallSet::insert to return pair<iterator, bool>, and then to update SmallPtrSet::insert to return pair<iterator, bool>, and then to update all the existing users of those functions... git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@222334 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
0e8675a621
commit
5401ba7099
@ -57,7 +57,7 @@ public:
|
|||||||
// Return true if edge destination should be visited.
|
// Return true if edge destination should be visited.
|
||||||
template<typename NodeType>
|
template<typename NodeType>
|
||||||
bool insertEdge(NodeType *From, NodeType *To) {
|
bool insertEdge(NodeType *From, NodeType *To) {
|
||||||
return Visited.insert(To);
|
return Visited.insert(To).second;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called after all children of BB have been visited.
|
// Called after all children of BB have been visited.
|
||||||
|
@ -100,7 +100,7 @@ public:
|
|||||||
/// \brief Insert a new element into the SetVector.
|
/// \brief Insert a new element into the SetVector.
|
||||||
/// \returns true iff the element was inserted into the SetVector.
|
/// \returns true iff the element was inserted into the SetVector.
|
||||||
bool insert(const value_type &X) {
|
bool insert(const value_type &X) {
|
||||||
bool result = set_.insert(X);
|
bool result = set_.insert(X).second;
|
||||||
if (result)
|
if (result)
|
||||||
vector_.push_back(X);
|
vector_.push_back(X);
|
||||||
return result;
|
return result;
|
||||||
@ -110,7 +110,7 @@ public:
|
|||||||
template<typename It>
|
template<typename It>
|
||||||
void insert(It Start, It End) {
|
void insert(It Start, It End) {
|
||||||
for (; Start != End; ++Start)
|
for (; Start != End; ++Start)
|
||||||
if (set_.insert(*Start))
|
if (set_.insert(*Start).second)
|
||||||
vector_.push_back(*Start);
|
vector_.push_back(*Start);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ protected:
|
|||||||
/// insert_imp - This returns true if the pointer was new to the set, false if
|
/// insert_imp - This returns true if the pointer was new to the set, false if
|
||||||
/// it was already in the set. This is hidden from the client so that the
|
/// it was already in the set. This is hidden from the client so that the
|
||||||
/// derived class can check that the right type of pointer is passed in.
|
/// derived class can check that the right type of pointer is passed in.
|
||||||
bool insert_imp(const void * Ptr);
|
std::pair<const void *const *, bool> insert_imp(const void *Ptr);
|
||||||
|
|
||||||
/// erase_imp - If the set contains the specified pointer, remove it and
|
/// erase_imp - If the set contains the specified pointer, remove it and
|
||||||
/// return true, otherwise return false. This is hidden from the client so
|
/// return true, otherwise return false. This is hidden from the client so
|
||||||
@ -253,10 +253,14 @@ protected:
|
|||||||
: SmallPtrSetImplBase(SmallStorage, SmallSize) {}
|
: SmallPtrSetImplBase(SmallStorage, SmallSize) {}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
typedef SmallPtrSetIterator<PtrType> iterator;
|
||||||
|
typedef SmallPtrSetIterator<PtrType> const_iterator;
|
||||||
|
|
||||||
/// insert - This returns true if the pointer was new to the set, false if it
|
/// insert - This returns true if the pointer was new to the set, false if it
|
||||||
/// was already in the set.
|
/// was already in the set.
|
||||||
bool insert(PtrType Ptr) {
|
std::pair<iterator, bool> insert(PtrType Ptr) {
|
||||||
return insert_imp(PtrTraits::getAsVoidPointer(Ptr));
|
auto p = insert_imp(PtrTraits::getAsVoidPointer(Ptr));
|
||||||
|
return std::make_pair(iterator(p.first, CurArray + CurArraySize), p.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// erase - If the set contains the specified pointer, remove it and return
|
/// erase - If the set contains the specified pointer, remove it and return
|
||||||
@ -276,8 +280,6 @@ public:
|
|||||||
insert(*I);
|
insert(*I);
|
||||||
}
|
}
|
||||||
|
|
||||||
typedef SmallPtrSetIterator<PtrType> iterator;
|
|
||||||
typedef SmallPtrSetIterator<PtrType> const_iterator;
|
|
||||||
inline iterator begin() const {
|
inline iterator begin() const {
|
||||||
return iterator(CurArray, CurArray+CurArraySize);
|
return iterator(CurArray, CurArray+CurArraySize);
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
#ifndef LLVM_ADT_SMALLSET_H
|
#ifndef LLVM_ADT_SMALLSET_H
|
||||||
#define LLVM_ADT_SMALLSET_H
|
#define LLVM_ADT_SMALLSET_H
|
||||||
|
|
||||||
|
#include "llvm/ADT/None.h"
|
||||||
#include "llvm/ADT/SmallPtrSet.h"
|
#include "llvm/ADT/SmallPtrSet.h"
|
||||||
#include "llvm/ADT/SmallVector.h"
|
#include "llvm/ADT/SmallVector.h"
|
||||||
#include <set>
|
#include <set>
|
||||||
@ -60,16 +61,21 @@ public:
|
|||||||
|
|
||||||
/// insert - Insert an element into the set if it isn't already there.
|
/// insert - Insert an element into the set if it isn't already there.
|
||||||
/// Returns true if the element is inserted (it was not in the set before).
|
/// Returns true if the element is inserted (it was not in the set before).
|
||||||
bool insert(const T &V) {
|
/// The first value of the returned pair is unused and provided for
|
||||||
|
/// partial compatibility with the standard library self-associative container
|
||||||
|
/// concept.
|
||||||
|
// FIXME: Add iterators that abstract over the small and large form, and then
|
||||||
|
// return those here.
|
||||||
|
std::pair<NoneType, bool> insert(const T &V) {
|
||||||
if (!isSmall())
|
if (!isSmall())
|
||||||
return Set.insert(V).second;
|
return std::make_pair(None, Set.insert(V).second);
|
||||||
|
|
||||||
VIterator I = vfind(V);
|
VIterator I = vfind(V);
|
||||||
if (I != Vector.end()) // Don't reinsert if it already exists.
|
if (I != Vector.end()) // Don't reinsert if it already exists.
|
||||||
return false;
|
return std::make_pair(None, false);
|
||||||
if (Vector.size() < N) {
|
if (Vector.size() < N) {
|
||||||
Vector.push_back(V);
|
Vector.push_back(V);
|
||||||
return true;
|
return std::make_pair(None, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, grow from vector to set.
|
// Otherwise, grow from vector to set.
|
||||||
@ -78,7 +84,7 @@ public:
|
|||||||
Vector.pop_back();
|
Vector.pop_back();
|
||||||
}
|
}
|
||||||
Set.insert(V);
|
Set.insert(V);
|
||||||
return true;
|
return std::make_pair(None, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename IterT>
|
template <typename IterT>
|
||||||
|
@ -172,7 +172,7 @@ ForwardDominanceFrontierBase<BlockT>::calculate(const DomTreeT &DT,
|
|||||||
DomSetType &S = this->Frontiers[currentBB];
|
DomSetType &S = this->Frontiers[currentBB];
|
||||||
|
|
||||||
// Visit each block only once.
|
// Visit each block only once.
|
||||||
if (visited.insert(currentBB)) {
|
if (visited.insert(currentBB).second) {
|
||||||
// Loop over CFG successors to calculate DFlocal[currentNode]
|
// Loop over CFG successors to calculate DFlocal[currentNode]
|
||||||
for (auto SI = BlockTraits::child_begin(currentBB),
|
for (auto SI = BlockTraits::child_begin(currentBB),
|
||||||
SE = BlockTraits::child_end(currentBB);
|
SE = BlockTraits::child_end(currentBB);
|
||||||
|
@ -577,7 +577,7 @@ namespace llvm {
|
|||||||
SmallPtrSet<const SCEV *, 8> Visited;
|
SmallPtrSet<const SCEV *, 8> Visited;
|
||||||
|
|
||||||
void push(const SCEV *S) {
|
void push(const SCEV *S) {
|
||||||
if (Visited.insert(S) && Visitor.follow(S))
|
if (Visited.insert(S).second && Visitor.follow(S))
|
||||||
Worklist.push_back(S);
|
Worklist.push_back(S);
|
||||||
}
|
}
|
||||||
public:
|
public:
|
||||||
|
@ -303,7 +303,7 @@ public:
|
|||||||
void recordSplitCriticalEdge(MachineBasicBlock *FromBB,
|
void recordSplitCriticalEdge(MachineBasicBlock *FromBB,
|
||||||
MachineBasicBlock *ToBB,
|
MachineBasicBlock *ToBB,
|
||||||
MachineBasicBlock *NewBB) {
|
MachineBasicBlock *NewBB) {
|
||||||
bool Inserted = NewBBs.insert(NewBB);
|
bool Inserted = NewBBs.insert(NewBB).second;
|
||||||
(void)Inserted;
|
(void)Inserted;
|
||||||
assert(Inserted &&
|
assert(Inserted &&
|
||||||
"A basic block inserted via edge splitting cannot appear twice");
|
"A basic block inserted via edge splitting cannot appear twice");
|
||||||
|
@ -125,7 +125,7 @@ Eval(DominatorTreeBase<typename GraphT::NodeType>& DT,
|
|||||||
typename GraphT::NodeType* VAncestor = DT.Vertex[VInfo.Parent];
|
typename GraphT::NodeType* VAncestor = DT.Vertex[VInfo.Parent];
|
||||||
|
|
||||||
// Process Ancestor first
|
// Process Ancestor first
|
||||||
if (Visited.insert(VAncestor) && VInfo.Parent >= LastLinked) {
|
if (Visited.insert(VAncestor).second && VInfo.Parent >= LastLinked) {
|
||||||
Work.push_back(VAncestor);
|
Work.push_back(VAncestor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -614,7 +614,7 @@ BasicAliasAnalysis::pointsToConstantMemory(const Location &Loc, bool OrLocal) {
|
|||||||
Worklist.push_back(Loc.Ptr);
|
Worklist.push_back(Loc.Ptr);
|
||||||
do {
|
do {
|
||||||
const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
|
const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
|
||||||
if (!Visited.insert(V)) {
|
if (!Visited.insert(V).second) {
|
||||||
Visited.clear();
|
Visited.clear();
|
||||||
return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
|
return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
|
||||||
}
|
}
|
||||||
@ -1235,7 +1235,7 @@ BasicAliasAnalysis::aliasPHI(const PHINode *PN, uint64_t PNSize,
|
|||||||
// sides are PHI nodes. In which case, this is O(m x n) time where 'm'
|
// sides are PHI nodes. In which case, this is O(m x n) time where 'm'
|
||||||
// and 'n' are the number of PHI sources.
|
// and 'n' are the number of PHI sources.
|
||||||
return MayAlias;
|
return MayAlias;
|
||||||
if (UniqueSrc.insert(PV1))
|
if (UniqueSrc.insert(PV1).second)
|
||||||
V1Srcs.push_back(PV1);
|
V1Srcs.push_back(PV1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ void llvm::FindFunctionBackedges(const Function &F,
|
|||||||
bool FoundNew = false;
|
bool FoundNew = false;
|
||||||
while (I != succ_end(ParentBB)) {
|
while (I != succ_end(ParentBB)) {
|
||||||
BB = *I++;
|
BB = *I++;
|
||||||
if (Visited.insert(BB)) {
|
if (Visited.insert(BB).second) {
|
||||||
FoundNew = true;
|
FoundNew = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -141,7 +141,7 @@ static bool isPotentiallyReachableInner(SmallVectorImpl<BasicBlock *> &Worklist,
|
|||||||
SmallSet<const BasicBlock*, 64> Visited;
|
SmallSet<const BasicBlock*, 64> Visited;
|
||||||
do {
|
do {
|
||||||
BasicBlock *BB = Worklist.pop_back_val();
|
BasicBlock *BB = Worklist.pop_back_val();
|
||||||
if (!Visited.insert(BB))
|
if (!Visited.insert(BB).second)
|
||||||
continue;
|
continue;
|
||||||
if (BB == StopBB)
|
if (BB == StopBB)
|
||||||
return true;
|
return true;
|
||||||
|
@ -239,7 +239,7 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
|
|||||||
if (Count++ >= Threshold)
|
if (Count++ >= Threshold)
|
||||||
return Tracker->tooManyUses();
|
return Tracker->tooManyUses();
|
||||||
|
|
||||||
if (Visited.insert(&UU))
|
if (Visited.insert(&UU).second)
|
||||||
if (Tracker->shouldExplore(&UU))
|
if (Tracker->shouldExplore(&UU))
|
||||||
Worklist.push_back(&UU);
|
Worklist.push_back(&UU);
|
||||||
}
|
}
|
||||||
|
@ -40,7 +40,7 @@ static void completeEphemeralValues(SmallVector<const Value *, 16> &WorkSet,
|
|||||||
const Value *V = WorkSet.front();
|
const Value *V = WorkSet.front();
|
||||||
WorkSet.erase(WorkSet.begin());
|
WorkSet.erase(WorkSet.begin());
|
||||||
|
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// If all uses of this value are ephemeral, then so is this value.
|
// If all uses of this value are ephemeral, then so is this value.
|
||||||
|
@ -981,7 +981,7 @@ ConstantFoldConstantExpressionImpl(const ConstantExpr *CE, const DataLayout *TD,
|
|||||||
// Recursively fold the ConstantExpr's operands. If we have already folded
|
// Recursively fold the ConstantExpr's operands. If we have already folded
|
||||||
// a ConstantExpr, we don't have to process it again.
|
// a ConstantExpr, we don't have to process it again.
|
||||||
if (ConstantExpr *NewCE = dyn_cast<ConstantExpr>(NewC)) {
|
if (ConstantExpr *NewCE = dyn_cast<ConstantExpr>(NewC)) {
|
||||||
if (FoldedOps.insert(NewCE))
|
if (FoldedOps.insert(NewCE).second)
|
||||||
NewC = ConstantFoldConstantExpressionImpl(NewCE, TD, TLI, FoldedOps);
|
NewC = ConstantFoldConstantExpressionImpl(NewCE, TD, TLI, FoldedOps);
|
||||||
}
|
}
|
||||||
Ops.push_back(NewC);
|
Ops.push_back(NewC);
|
||||||
|
@ -977,7 +977,7 @@ ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
||||||
} while (Visited.insert(V));
|
} while (Visited.insert(V).second);
|
||||||
|
|
||||||
Type *IntPtrTy = DL->getIntPtrType(V->getContext());
|
Type *IntPtrTy = DL->getIntPtrType(V->getContext());
|
||||||
return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
|
return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
|
||||||
|
@ -115,7 +115,7 @@ bool IVUsers::AddUsersImpl(Instruction *I,
|
|||||||
SmallPtrSetImpl<Loop*> &SimpleLoopNests) {
|
SmallPtrSetImpl<Loop*> &SimpleLoopNests) {
|
||||||
// Add this IV user to the Processed set before returning false to ensure that
|
// Add this IV user to the Processed set before returning false to ensure that
|
||||||
// all IV users are members of the set. See IVUsers::isIVUserOrOperand.
|
// all IV users are members of the set. See IVUsers::isIVUserOrOperand.
|
||||||
if (!Processed.insert(I))
|
if (!Processed.insert(I).second)
|
||||||
return true; // Instruction already handled.
|
return true; // Instruction already handled.
|
||||||
|
|
||||||
if (!SE->isSCEVable(I->getType()))
|
if (!SE->isSCEVable(I->getType()))
|
||||||
@ -145,7 +145,7 @@ bool IVUsers::AddUsersImpl(Instruction *I,
|
|||||||
SmallPtrSet<Instruction *, 4> UniqueUsers;
|
SmallPtrSet<Instruction *, 4> UniqueUsers;
|
||||||
for (Use &U : I->uses()) {
|
for (Use &U : I->uses()) {
|
||||||
Instruction *User = cast<Instruction>(U.getUser());
|
Instruction *User = cast<Instruction>(U.getUser());
|
||||||
if (!UniqueUsers.insert(User))
|
if (!UniqueUsers.insert(User).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Do not infinitely recurse on PHI nodes.
|
// Do not infinitely recurse on PHI nodes.
|
||||||
|
@ -631,7 +631,7 @@ static Constant *stripAndComputeConstantOffsets(const DataLayout *DL,
|
|||||||
}
|
}
|
||||||
assert(V->getType()->getScalarType()->isPointerTy() &&
|
assert(V->getType()->getScalarType()->isPointerTy() &&
|
||||||
"Unexpected operand type!");
|
"Unexpected operand type!");
|
||||||
} while (Visited.insert(V));
|
} while (Visited.insert(V).second);
|
||||||
|
|
||||||
Constant *OffsetIntPtr = ConstantInt::get(IntPtrTy, Offset);
|
Constant *OffsetIntPtr = ConstantInt::get(IntPtrTy, Offset);
|
||||||
if (V->getType()->isVectorTy())
|
if (V->getType()->isVectorTy())
|
||||||
|
@ -48,7 +48,7 @@ static void findCallees(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (Value *Op : C->operand_values())
|
for (Value *Op : C->operand_values())
|
||||||
if (Visited.insert(cast<Constant>(Op)))
|
if (Visited.insert(cast<Constant>(Op)).second)
|
||||||
Worklist.push_back(cast<Constant>(Op));
|
Worklist.push_back(cast<Constant>(Op));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -66,7 +66,7 @@ LazyCallGraph::Node::Node(LazyCallGraph &G, Function &F)
|
|||||||
for (Instruction &I : BB)
|
for (Instruction &I : BB)
|
||||||
for (Value *Op : I.operand_values())
|
for (Value *Op : I.operand_values())
|
||||||
if (Constant *C = dyn_cast<Constant>(Op))
|
if (Constant *C = dyn_cast<Constant>(Op))
|
||||||
if (Visited.insert(C))
|
if (Visited.insert(C).second)
|
||||||
Worklist.push_back(C);
|
Worklist.push_back(C);
|
||||||
|
|
||||||
// We've collected all the constant (and thus potentially function or
|
// We've collected all the constant (and thus potentially function or
|
||||||
@ -113,7 +113,7 @@ LazyCallGraph::LazyCallGraph(Module &M) : NextDFSNumber(0) {
|
|||||||
SmallPtrSet<Constant *, 16> Visited;
|
SmallPtrSet<Constant *, 16> Visited;
|
||||||
for (GlobalVariable &GV : M.globals())
|
for (GlobalVariable &GV : M.globals())
|
||||||
if (GV.hasInitializer())
|
if (GV.hasInitializer())
|
||||||
if (Visited.insert(GV.getInitializer()))
|
if (Visited.insert(GV.getInitializer()).second)
|
||||||
Worklist.push_back(GV.getInitializer());
|
Worklist.push_back(GV.getInitializer());
|
||||||
|
|
||||||
DEBUG(dbgs() << " Adding functions referenced by global initializers to the "
|
DEBUG(dbgs() << " Adding functions referenced by global initializers to the "
|
||||||
@ -688,7 +688,7 @@ static void printNodes(raw_ostream &OS, LazyCallGraph::Node &N,
|
|||||||
SmallPtrSetImpl<LazyCallGraph::Node *> &Printed) {
|
SmallPtrSetImpl<LazyCallGraph::Node *> &Printed) {
|
||||||
// Recurse depth first through the nodes.
|
// Recurse depth first through the nodes.
|
||||||
for (LazyCallGraph::Node &ChildN : N)
|
for (LazyCallGraph::Node &ChildN : N)
|
||||||
if (Printed.insert(&ChildN))
|
if (Printed.insert(&ChildN).second)
|
||||||
printNodes(OS, ChildN, Printed);
|
printNodes(OS, ChildN, Printed);
|
||||||
|
|
||||||
OS << " Call edges in function: " << N.getFunction().getName() << "\n";
|
OS << " Call edges in function: " << N.getFunction().getName() << "\n";
|
||||||
@ -717,7 +717,7 @@ PreservedAnalyses LazyCallGraphPrinterPass::run(Module *M,
|
|||||||
|
|
||||||
SmallPtrSet<LazyCallGraph::Node *, 16> Printed;
|
SmallPtrSet<LazyCallGraph::Node *, 16> Printed;
|
||||||
for (LazyCallGraph::Node &N : G)
|
for (LazyCallGraph::Node &N : G)
|
||||||
if (Printed.insert(&N))
|
if (Printed.insert(&N).second)
|
||||||
printNodes(OS, N, Printed);
|
printNodes(OS, N, Printed);
|
||||||
|
|
||||||
for (LazyCallGraph::SCC &SCC : G.postorder_sccs())
|
for (LazyCallGraph::SCC &SCC : G.postorder_sccs())
|
||||||
|
@ -631,7 +631,7 @@ Value *Lint::findValue(Value *V, bool OffsetOk) const {
|
|||||||
Value *Lint::findValueImpl(Value *V, bool OffsetOk,
|
Value *Lint::findValueImpl(Value *V, bool OffsetOk,
|
||||||
SmallPtrSetImpl<Value *> &Visited) const {
|
SmallPtrSetImpl<Value *> &Visited) const {
|
||||||
// Detect self-referential values.
|
// Detect self-referential values.
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
return UndefValue::get(V->getType());
|
return UndefValue::get(V->getType());
|
||||||
|
|
||||||
// TODO: Look through sext or zext cast, when the result is known to
|
// TODO: Look through sext or zext cast, when the result is known to
|
||||||
@ -645,7 +645,8 @@ Value *Lint::findValueImpl(Value *V, bool OffsetOk,
|
|||||||
BasicBlock *BB = L->getParent();
|
BasicBlock *BB = L->getParent();
|
||||||
SmallPtrSet<BasicBlock *, 4> VisitedBlocks;
|
SmallPtrSet<BasicBlock *, 4> VisitedBlocks;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (!VisitedBlocks.insert(BB)) break;
|
if (!VisitedBlocks.insert(BB).second)
|
||||||
|
break;
|
||||||
if (Value *U = FindAvailableLoadedValue(L->getPointerOperand(),
|
if (Value *U = FindAvailableLoadedValue(L->getPointerOperand(),
|
||||||
BB, BBI, 6, AA))
|
BB, BBI, 6, AA))
|
||||||
return findValueImpl(U, OffsetOk, Visited);
|
return findValueImpl(U, OffsetOk, Visited);
|
||||||
|
@ -416,7 +416,7 @@ SizeOffsetType ObjectSizeOffsetVisitor::compute(Value *V) {
|
|||||||
if (Instruction *I = dyn_cast<Instruction>(V)) {
|
if (Instruction *I = dyn_cast<Instruction>(V)) {
|
||||||
// If we have already seen this instruction, bail out. Cycles can happen in
|
// If we have already seen this instruction, bail out. Cycles can happen in
|
||||||
// unreachable code after constant propagation.
|
// unreachable code after constant propagation.
|
||||||
if (!SeenInsts.insert(I))
|
if (!SeenInsts.insert(I).second)
|
||||||
return unknown();
|
return unknown();
|
||||||
|
|
||||||
if (GEPOperator *GEP = dyn_cast<GEPOperator>(V))
|
if (GEPOperator *GEP = dyn_cast<GEPOperator>(V))
|
||||||
@ -652,7 +652,7 @@ SizeOffsetEvalType ObjectSizeOffsetEvaluator::compute_(Value *V) {
|
|||||||
// Record the pointers that were handled in this run, so that they can be
|
// Record the pointers that were handled in this run, so that they can be
|
||||||
// cleaned later if something fails. We also use this set to break cycles that
|
// cleaned later if something fails. We also use this set to break cycles that
|
||||||
// can occur in dead code.
|
// can occur in dead code.
|
||||||
if (!SeenVals.insert(V)) {
|
if (!SeenVals.insert(V).second) {
|
||||||
Result = unknown();
|
Result = unknown();
|
||||||
} else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
} else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
||||||
Result = visitGEPOperator(*GEP);
|
Result = visitGEPOperator(*GEP);
|
||||||
|
@ -776,7 +776,7 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
|
|||||||
DirtyBlocks.pop_back();
|
DirtyBlocks.pop_back();
|
||||||
|
|
||||||
// Already processed this block?
|
// Already processed this block?
|
||||||
if (!Visited.insert(DirtyBB))
|
if (!Visited.insert(DirtyBB).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Do a binary search to see if we already have an entry for this block in
|
// Do a binary search to see if we already have an entry for this block in
|
||||||
|
@ -17,7 +17,7 @@ using namespace llvm;
|
|||||||
|
|
||||||
void detail::PtrUseVisitorBase::enqueueUsers(Instruction &I) {
|
void detail::PtrUseVisitorBase::enqueueUsers(Instruction &I) {
|
||||||
for (Use &U : I.uses()) {
|
for (Use &U : I.uses()) {
|
||||||
if (VisitedUses.insert(&U)) {
|
if (VisitedUses.insert(&U).second) {
|
||||||
UseToVisit NewU = {
|
UseToVisit NewU = {
|
||||||
UseToVisit::UseAndIsOffsetKnownPair(&U, IsOffsetKnown),
|
UseToVisit::UseAndIsOffsetKnownPair(&U, IsOffsetKnown),
|
||||||
Offset
|
Offset
|
||||||
|
@ -3395,7 +3395,8 @@ ScalarEvolution::ForgetSymbolicName(Instruction *PN, const SCEV *SymName) {
|
|||||||
Visited.insert(PN);
|
Visited.insert(PN);
|
||||||
while (!Worklist.empty()) {
|
while (!Worklist.empty()) {
|
||||||
Instruction *I = Worklist.pop_back_val();
|
Instruction *I = Worklist.pop_back_val();
|
||||||
if (!Visited.insert(I)) continue;
|
if (!Visited.insert(I).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
ValueExprMapType::iterator It =
|
ValueExprMapType::iterator It =
|
||||||
ValueExprMap.find_as(static_cast<Value *>(I));
|
ValueExprMap.find_as(static_cast<Value *>(I));
|
||||||
@ -4593,7 +4594,8 @@ ScalarEvolution::getBackedgeTakenInfo(const Loop *L) {
|
|||||||
SmallPtrSet<Instruction *, 8> Visited;
|
SmallPtrSet<Instruction *, 8> Visited;
|
||||||
while (!Worklist.empty()) {
|
while (!Worklist.empty()) {
|
||||||
Instruction *I = Worklist.pop_back_val();
|
Instruction *I = Worklist.pop_back_val();
|
||||||
if (!Visited.insert(I)) continue;
|
if (!Visited.insert(I).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
ValueExprMapType::iterator It =
|
ValueExprMapType::iterator It =
|
||||||
ValueExprMap.find_as(static_cast<Value *>(I));
|
ValueExprMap.find_as(static_cast<Value *>(I));
|
||||||
@ -4645,7 +4647,8 @@ void ScalarEvolution::forgetLoop(const Loop *L) {
|
|||||||
SmallPtrSet<Instruction *, 8> Visited;
|
SmallPtrSet<Instruction *, 8> Visited;
|
||||||
while (!Worklist.empty()) {
|
while (!Worklist.empty()) {
|
||||||
Instruction *I = Worklist.pop_back_val();
|
Instruction *I = Worklist.pop_back_val();
|
||||||
if (!Visited.insert(I)) continue;
|
if (!Visited.insert(I).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
ValueExprMapType::iterator It =
|
ValueExprMapType::iterator It =
|
||||||
ValueExprMap.find_as(static_cast<Value *>(I));
|
ValueExprMap.find_as(static_cast<Value *>(I));
|
||||||
@ -4679,7 +4682,8 @@ void ScalarEvolution::forgetValue(Value *V) {
|
|||||||
SmallPtrSet<Instruction *, 8> Visited;
|
SmallPtrSet<Instruction *, 8> Visited;
|
||||||
while (!Worklist.empty()) {
|
while (!Worklist.empty()) {
|
||||||
I = Worklist.pop_back_val();
|
I = Worklist.pop_back_val();
|
||||||
if (!Visited.insert(I)) continue;
|
if (!Visited.insert(I).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
ValueExprMapType::iterator It =
|
ValueExprMapType::iterator It =
|
||||||
ValueExprMap.find_as(static_cast<Value *>(I));
|
ValueExprMap.find_as(static_cast<Value *>(I));
|
||||||
@ -7792,7 +7796,7 @@ void ScalarEvolution::SCEVCallbackVH::allUsesReplacedWith(Value *V) {
|
|||||||
// that until everything else is done.
|
// that until everything else is done.
|
||||||
if (U == Old)
|
if (U == Old)
|
||||||
continue;
|
continue;
|
||||||
if (!Visited.insert(U))
|
if (!Visited.insert(U).second)
|
||||||
continue;
|
continue;
|
||||||
if (PHINode *PN = dyn_cast<PHINode>(U))
|
if (PHINode *PN = dyn_cast<PHINode>(U))
|
||||||
SE->ConstantEvolutionLoopExitValue.erase(PN);
|
SE->ConstantEvolutionLoopExitValue.erase(PN);
|
||||||
|
@ -1443,7 +1443,7 @@ Value *SCEVExpander::visitAddRecExpr(const SCEVAddRecExpr *S) {
|
|||||||
Constant *One = ConstantInt::get(Ty, 1);
|
Constant *One = ConstantInt::get(Ty, 1);
|
||||||
for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) {
|
for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) {
|
||||||
BasicBlock *HP = *HPI;
|
BasicBlock *HP = *HPI;
|
||||||
if (!PredSeen.insert(HP)) {
|
if (!PredSeen.insert(HP).second) {
|
||||||
// There must be an incoming value for each predecessor, even the
|
// There must be an incoming value for each predecessor, even the
|
||||||
// duplicates!
|
// duplicates!
|
||||||
CanonicalIV->addIncoming(CanonicalIV->getIncomingValueForBlock(HP), HP);
|
CanonicalIV->addIncoming(CanonicalIV->getIncomingValueForBlock(HP), HP);
|
||||||
|
@ -363,7 +363,7 @@ template <typename T> class StratifiedSetsBuilder {
|
|||||||
SmallSet<StratifiedIndex, 16> Visited;
|
SmallSet<StratifiedIndex, 16> Visited;
|
||||||
for (unsigned I = 0, E = Links.size(); I < E; ++I) {
|
for (unsigned I = 0, E = Links.size(); I < E; ++I) {
|
||||||
auto CurrentIndex = getHighestParentAbove(I);
|
auto CurrentIndex = getHighestParentAbove(I);
|
||||||
if (!Visited.insert(CurrentIndex)) {
|
if (!Visited.insert(CurrentIndex).second) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,7 +331,7 @@ static bool isEphemeralValueOf(Instruction *I, const Value *E) {
|
|||||||
|
|
||||||
while (!WorkSet.empty()) {
|
while (!WorkSet.empty()) {
|
||||||
const Value *V = WorkSet.pop_back_val();
|
const Value *V = WorkSet.pop_back_val();
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// If all uses of this value are ephemeral, then so is this value.
|
// If all uses of this value are ephemeral, then so is this value.
|
||||||
@ -2405,7 +2405,7 @@ static uint64_t GetStringLengthH(Value *V, SmallPtrSetImpl<PHINode*> &PHIs) {
|
|||||||
// If this is a PHI node, there are two cases: either we have already seen it
|
// If this is a PHI node, there are two cases: either we have already seen it
|
||||||
// or we haven't.
|
// or we haven't.
|
||||||
if (PHINode *PN = dyn_cast<PHINode>(V)) {
|
if (PHINode *PN = dyn_cast<PHINode>(V)) {
|
||||||
if (!PHIs.insert(PN))
|
if (!PHIs.insert(PN).second)
|
||||||
return ~0ULL; // already in the set.
|
return ~0ULL; // already in the set.
|
||||||
|
|
||||||
// If it was new, see if all the input strings are the same length.
|
// If it was new, see if all the input strings are the same length.
|
||||||
@ -2499,7 +2499,7 @@ llvm::GetUnderlyingObjects(Value *V,
|
|||||||
Value *P = Worklist.pop_back_val();
|
Value *P = Worklist.pop_back_val();
|
||||||
P = GetUnderlyingObject(P, TD, MaxLookup);
|
P = GetUnderlyingObject(P, TD, MaxLookup);
|
||||||
|
|
||||||
if (!Visited.insert(P))
|
if (!Visited.insert(P).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (SelectInst *SI = dyn_cast<SelectInst>(P)) {
|
if (SelectInst *SI = dyn_cast<SelectInst>(P)) {
|
||||||
|
@ -3735,7 +3735,7 @@ bool LLParser::ParseSwitch(Instruction *&Inst, PerFunctionState &PFS) {
|
|||||||
ParseTypeAndBasicBlock(DestBB, PFS))
|
ParseTypeAndBasicBlock(DestBB, PFS))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
if (!SeenCases.insert(Constant))
|
if (!SeenCases.insert(Constant).second)
|
||||||
return Error(CondLoc, "duplicate case value in switch");
|
return Error(CondLoc, "duplicate case value in switch");
|
||||||
if (!isa<ConstantInt>(Constant))
|
if (!isa<ConstantInt>(Constant))
|
||||||
return Error(CondLoc, "case value is not a constant integer");
|
return Error(CondLoc, "case value is not a constant integer");
|
||||||
|
@ -256,7 +256,7 @@ static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
|
|||||||
for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
|
for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
|
||||||
P != PE; ++P) {
|
P != PE; ++P) {
|
||||||
if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
|
if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
|
||||||
if (RegSet.insert(P->getReg()))
|
if (RegSet.insert(P->getReg()).second)
|
||||||
Edges.push_back(&*P);
|
Edges.push_back(&*P);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1167,7 +1167,8 @@ void AsmPrinter::EmitJumpTableInfo() {
|
|||||||
const MCExpr *Base = TLI->getPICJumpTableRelocBaseExpr(MF,JTI,OutContext);
|
const MCExpr *Base = TLI->getPICJumpTableRelocBaseExpr(MF,JTI,OutContext);
|
||||||
for (unsigned ii = 0, ee = JTBBs.size(); ii != ee; ++ii) {
|
for (unsigned ii = 0, ee = JTBBs.size(); ii != ee; ++ii) {
|
||||||
const MachineBasicBlock *MBB = JTBBs[ii];
|
const MachineBasicBlock *MBB = JTBBs[ii];
|
||||||
if (!EmittedSets.insert(MBB)) continue;
|
if (!EmittedSets.insert(MBB).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
// .set LJTSet, LBB32-base
|
// .set LJTSet, LBB32-base
|
||||||
const MCExpr *LHS =
|
const MCExpr *LHS =
|
||||||
|
@ -990,7 +990,7 @@ DwarfDebug::collectVariableInfo(DwarfCompileUnit &TheCU, DISubprogram SP,
|
|||||||
for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
|
for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
|
||||||
DIVariable DV(Variables.getElement(i));
|
DIVariable DV(Variables.getElement(i));
|
||||||
assert(DV.isVariable());
|
assert(DV.isVariable());
|
||||||
if (!Processed.insert(DV))
|
if (!Processed.insert(DV).second)
|
||||||
continue;
|
continue;
|
||||||
if (LexicalScope *Scope = LScopes.findLexicalScope(DV.getContext())) {
|
if (LexicalScope *Scope = LScopes.findLexicalScope(DV.getContext())) {
|
||||||
ensureAbstractVariableIsCreatedIfScoped(DV, Scope->getScopeNode());
|
ensureAbstractVariableIsCreatedIfScoped(DV, Scope->getScopeNode());
|
||||||
@ -1287,7 +1287,7 @@ void DwarfDebug::endFunction(const MachineFunction *MF) {
|
|||||||
for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
|
for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
|
||||||
DIVariable DV(Variables.getElement(i));
|
DIVariable DV(Variables.getElement(i));
|
||||||
assert(DV && DV.isVariable());
|
assert(DV && DV.isVariable());
|
||||||
if (!ProcessedVars.insert(DV))
|
if (!ProcessedVars.insert(DV).second)
|
||||||
continue;
|
continue;
|
||||||
ensureAbstractVariableIsCreated(DV, DV.getContext());
|
ensureAbstractVariableIsCreated(DV, DV.getContext());
|
||||||
assert(LScopes.getAbstractScopesList().size() == NumAbstractScopes
|
assert(LScopes.getAbstractScopesList().size() == NumAbstractScopes
|
||||||
|
@ -916,7 +916,7 @@ bool BranchFolder::TailMergeBlocks(MachineFunction &MF) {
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Visit each predecessor only once.
|
// Visit each predecessor only once.
|
||||||
if (!UniquePreds.insert(PBB))
|
if (!UniquePreds.insert(PBB).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Skip blocks which may jump to a landing pad. Can't tail merge these.
|
// Skip blocks which may jump to a landing pad. Can't tail merge these.
|
||||||
|
@ -120,7 +120,7 @@ VirtRegAuxInfo::calculateSpillWeightAndHint(LiveInterval &li) {
|
|||||||
numInstr++;
|
numInstr++;
|
||||||
if (mi->isIdentityCopy() || mi->isImplicitDef() || mi->isDebugValue())
|
if (mi->isIdentityCopy() || mi->isImplicitDef() || mi->isDebugValue())
|
||||||
continue;
|
continue;
|
||||||
if (!visited.insert(mi))
|
if (!visited.insert(mi).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
float weight = 1.0f;
|
float weight = 1.0f;
|
||||||
|
@ -1001,7 +1001,7 @@ bool CodeGenPrepare::DupRetToEnableTailCallOpts(BasicBlock *BB) {
|
|||||||
} else {
|
} else {
|
||||||
SmallPtrSet<BasicBlock*, 4> VisitedBBs;
|
SmallPtrSet<BasicBlock*, 4> VisitedBBs;
|
||||||
for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB); PI != PE; ++PI) {
|
for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB); PI != PE; ++PI) {
|
||||||
if (!VisitedBBs.insert(*PI))
|
if (!VisitedBBs.insert(*PI).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
BasicBlock::InstListType &InstList = (*PI)->getInstList();
|
BasicBlock::InstListType &InstList = (*PI)->getInstList();
|
||||||
@ -2401,7 +2401,7 @@ static bool FindAllMemoryUses(Instruction *I,
|
|||||||
SmallPtrSetImpl<Instruction*> &ConsideredInsts,
|
SmallPtrSetImpl<Instruction*> &ConsideredInsts,
|
||||||
const TargetLowering &TLI) {
|
const TargetLowering &TLI) {
|
||||||
// If we already considered this instruction, we're done.
|
// If we already considered this instruction, we're done.
|
||||||
if (!ConsideredInsts.insert(I))
|
if (!ConsideredInsts.insert(I).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// If this is an obviously unfoldable instruction, bail out.
|
// If this is an obviously unfoldable instruction, bail out.
|
||||||
@ -2615,7 +2615,7 @@ bool CodeGenPrepare::OptimizeMemoryInst(Instruction *MemoryInst, Value *Addr,
|
|||||||
worklist.pop_back();
|
worklist.pop_back();
|
||||||
|
|
||||||
// Break use-def graph loops.
|
// Break use-def graph loops.
|
||||||
if (!Visited.insert(V)) {
|
if (!Visited.insert(V).second) {
|
||||||
Consensus = nullptr;
|
Consensus = nullptr;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -245,7 +245,7 @@ bool SSAIfConv::canSpeculateInstrs(MachineBasicBlock *MBB) {
|
|||||||
MachineInstr *DefMI = MRI->getVRegDef(Reg);
|
MachineInstr *DefMI = MRI->getVRegDef(Reg);
|
||||||
if (!DefMI || DefMI->getParent() != Head)
|
if (!DefMI || DefMI->getParent() != Head)
|
||||||
continue;
|
continue;
|
||||||
if (InsertAfter.insert(DefMI))
|
if (InsertAfter.insert(DefMI).second)
|
||||||
DEBUG(dbgs() << "BB#" << MBB->getNumber() << " depends on " << *DefMI);
|
DEBUG(dbgs() << "BB#" << MBB->getNumber() << " depends on " << *DefMI);
|
||||||
if (DefMI->isTerminator()) {
|
if (DefMI->isTerminator()) {
|
||||||
DEBUG(dbgs() << "Can't insert instructions below terminator.\n");
|
DEBUG(dbgs() << "Can't insert instructions below terminator.\n");
|
||||||
|
@ -823,7 +823,7 @@ void InlineSpiller::markValueUsed(LiveInterval *LI, VNInfo *VNI) {
|
|||||||
WorkList.push_back(std::make_pair(LI, VNI));
|
WorkList.push_back(std::make_pair(LI, VNI));
|
||||||
do {
|
do {
|
||||||
std::tie(LI, VNI) = WorkList.pop_back_val();
|
std::tie(LI, VNI) = WorkList.pop_back_val();
|
||||||
if (!UsedValues.insert(VNI))
|
if (!UsedValues.insert(VNI).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (VNI->isPHIDef()) {
|
if (VNI->isPHIDef()) {
|
||||||
|
@ -206,7 +206,7 @@ void LiveRange::RenumberValues() {
|
|||||||
valnos.clear();
|
valnos.clear();
|
||||||
for (const_iterator I = begin(), E = end(); I != E; ++I) {
|
for (const_iterator I = begin(), E = end(); I != E; ++I) {
|
||||||
VNInfo *VNI = I->valno;
|
VNInfo *VNI = I->valno;
|
||||||
if (!Seen.insert(VNI))
|
if (!Seen.insert(VNI).second)
|
||||||
continue;
|
continue;
|
||||||
assert(!VNI->isUnused() && "Unused valno used by live segment");
|
assert(!VNI->isUnused() && "Unused valno used by live segment");
|
||||||
VNI->id = (unsigned)valnos.size();
|
VNI->id = (unsigned)valnos.size();
|
||||||
|
@ -379,12 +379,13 @@ bool LiveIntervals::shrinkToUses(LiveInterval *li,
|
|||||||
(void)ExtVNI;
|
(void)ExtVNI;
|
||||||
assert(ExtVNI == VNI && "Unexpected existing value number");
|
assert(ExtVNI == VNI && "Unexpected existing value number");
|
||||||
// Is this a PHIDef we haven't seen before?
|
// Is this a PHIDef we haven't seen before?
|
||||||
if (!VNI->isPHIDef() || VNI->def != BlockStart || !UsedPHIs.insert(VNI))
|
if (!VNI->isPHIDef() || VNI->def != BlockStart ||
|
||||||
|
!UsedPHIs.insert(VNI).second)
|
||||||
continue;
|
continue;
|
||||||
// The PHI is live, make sure the predecessors are live-out.
|
// The PHI is live, make sure the predecessors are live-out.
|
||||||
for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
|
for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
|
||||||
PE = MBB->pred_end(); PI != PE; ++PI) {
|
PE = MBB->pred_end(); PI != PE; ++PI) {
|
||||||
if (!LiveOut.insert(*PI))
|
if (!LiveOut.insert(*PI).second)
|
||||||
continue;
|
continue;
|
||||||
SlotIndex Stop = getMBBEndIdx(*PI);
|
SlotIndex Stop = getMBBEndIdx(*PI);
|
||||||
// A predecessor is not required to have a live-out value for a PHI.
|
// A predecessor is not required to have a live-out value for a PHI.
|
||||||
@ -401,7 +402,7 @@ bool LiveIntervals::shrinkToUses(LiveInterval *li,
|
|||||||
// Make sure VNI is live-out from the predecessors.
|
// Make sure VNI is live-out from the predecessors.
|
||||||
for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
|
for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
|
||||||
PE = MBB->pred_end(); PI != PE; ++PI) {
|
PE = MBB->pred_end(); PI != PE; ++PI) {
|
||||||
if (!LiveOut.insert(*PI))
|
if (!LiveOut.insert(*PI).second)
|
||||||
continue;
|
continue;
|
||||||
SlotIndex Stop = getMBBEndIdx(*PI);
|
SlotIndex Stop = getMBBEndIdx(*PI);
|
||||||
assert(li->getVNInfoBefore(Stop) == VNI &&
|
assert(li->getVNInfoBefore(Stop) == VNI &&
|
||||||
@ -784,7 +785,7 @@ private:
|
|||||||
/// Update a single live range, assuming an instruction has been moved from
|
/// Update a single live range, assuming an instruction has been moved from
|
||||||
/// OldIdx to NewIdx.
|
/// OldIdx to NewIdx.
|
||||||
void updateRange(LiveRange &LR, unsigned Reg) {
|
void updateRange(LiveRange &LR, unsigned Reg) {
|
||||||
if (!Updated.insert(&LR))
|
if (!Updated.insert(&LR).second)
|
||||||
return;
|
return;
|
||||||
DEBUG({
|
DEBUG({
|
||||||
dbgs() << " ";
|
dbgs() << " ";
|
||||||
|
@ -1066,7 +1066,7 @@ bool MachineBasicBlock::CorrectExtraCFGEdges(MachineBasicBlock *DestA,
|
|||||||
MachineBasicBlock::succ_iterator SI = succ_begin();
|
MachineBasicBlock::succ_iterator SI = succ_begin();
|
||||||
while (SI != succ_end()) {
|
while (SI != succ_end()) {
|
||||||
const MachineBasicBlock *MBB = *SI;
|
const MachineBasicBlock *MBB = *SI;
|
||||||
if (!SeenMBBs.insert(MBB) ||
|
if (!SeenMBBs.insert(MBB).second ||
|
||||||
(MBB != DestA && MBB != DestB && !MBB->isLandingPad())) {
|
(MBB != DestA && MBB != DestB && !MBB->isLandingPad())) {
|
||||||
// This is a superfluous edge, remove it.
|
// This is a superfluous edge, remove it.
|
||||||
SI = removeSuccessor(SI);
|
SI = removeSuccessor(SI);
|
||||||
|
@ -813,7 +813,7 @@ void MachineBlockPlacement::buildLoopChains(MachineFunction &F,
|
|||||||
BE = L.block_end();
|
BE = L.block_end();
|
||||||
BI != BE; ++BI) {
|
BI != BE; ++BI) {
|
||||||
BlockChain &Chain = *BlockToChain[*BI];
|
BlockChain &Chain = *BlockToChain[*BI];
|
||||||
if (!UpdatedPreds.insert(&Chain))
|
if (!UpdatedPreds.insert(&Chain).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
assert(Chain.LoopPredecessors == 0);
|
assert(Chain.LoopPredecessors == 0);
|
||||||
@ -914,7 +914,7 @@ void MachineBlockPlacement::buildCFGChains(MachineFunction &F) {
|
|||||||
for (MachineFunction::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
|
for (MachineFunction::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
|
||||||
MachineBasicBlock *BB = &*FI;
|
MachineBasicBlock *BB = &*FI;
|
||||||
BlockChain &Chain = *BlockToChain[BB];
|
BlockChain &Chain = *BlockToChain[BB];
|
||||||
if (!UpdatedPreds.insert(&Chain))
|
if (!UpdatedPreds.insert(&Chain).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
assert(Chain.LoopPredecessors == 0);
|
assert(Chain.LoopPredecessors == 0);
|
||||||
|
@ -141,7 +141,7 @@ void llvm::finalizeBundle(MachineBasicBlock &MBB,
|
|||||||
// Internal def is now killed.
|
// Internal def is now killed.
|
||||||
KilledDefSet.insert(Reg);
|
KilledDefSet.insert(Reg);
|
||||||
} else {
|
} else {
|
||||||
if (ExternUseSet.insert(Reg)) {
|
if (ExternUseSet.insert(Reg).second) {
|
||||||
ExternUses.push_back(Reg);
|
ExternUses.push_back(Reg);
|
||||||
if (MO.isUndef())
|
if (MO.isUndef())
|
||||||
UndefUseSet.insert(Reg);
|
UndefUseSet.insert(Reg);
|
||||||
@ -158,7 +158,7 @@ void llvm::finalizeBundle(MachineBasicBlock &MBB,
|
|||||||
if (!Reg)
|
if (!Reg)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (LocalDefSet.insert(Reg)) {
|
if (LocalDefSet.insert(Reg).second) {
|
||||||
LocalDefs.push_back(Reg);
|
LocalDefs.push_back(Reg);
|
||||||
if (MO.isDead()) {
|
if (MO.isDead()) {
|
||||||
DeadDefSet.insert(Reg);
|
DeadDefSet.insert(Reg);
|
||||||
@ -174,7 +174,7 @@ void llvm::finalizeBundle(MachineBasicBlock &MBB,
|
|||||||
if (!MO.isDead()) {
|
if (!MO.isDead()) {
|
||||||
for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
|
for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
|
||||||
unsigned SubReg = *SubRegs;
|
unsigned SubReg = *SubRegs;
|
||||||
if (LocalDefSet.insert(SubReg))
|
if (LocalDefSet.insert(SubReg).second)
|
||||||
LocalDefs.push_back(SubReg);
|
LocalDefs.push_back(SubReg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -186,7 +186,7 @@ void llvm::finalizeBundle(MachineBasicBlock &MBB,
|
|||||||
SmallSet<unsigned, 32> Added;
|
SmallSet<unsigned, 32> Added;
|
||||||
for (unsigned i = 0, e = LocalDefs.size(); i != e; ++i) {
|
for (unsigned i = 0, e = LocalDefs.size(); i != e; ++i) {
|
||||||
unsigned Reg = LocalDefs[i];
|
unsigned Reg = LocalDefs[i];
|
||||||
if (Added.insert(Reg)) {
|
if (Added.insert(Reg).second) {
|
||||||
// If it's not live beyond end of the bundle, mark it dead.
|
// If it's not live beyond end of the bundle, mark it dead.
|
||||||
bool isDead = DeadDefSet.count(Reg) || KilledDefSet.count(Reg);
|
bool isDead = DeadDefSet.count(Reg) || KilledDefSet.count(Reg);
|
||||||
MIB.addReg(Reg, getDefRegState(true) | getDeadRegState(isDead) |
|
MIB.addReg(Reg, getDefRegState(true) | getDeadRegState(isDead) |
|
||||||
|
@ -818,7 +818,7 @@ void MachineLICM::InitRegPressure(MachineBasicBlock *BB) {
|
|||||||
if (!TargetRegisterInfo::isVirtualRegister(Reg))
|
if (!TargetRegisterInfo::isVirtualRegister(Reg))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
bool isNew = RegSeen.insert(Reg);
|
bool isNew = RegSeen.insert(Reg).second;
|
||||||
unsigned RCId, RCCost;
|
unsigned RCId, RCCost;
|
||||||
getRegisterClassIDAndCost(MI, Reg, i, RCId, RCCost);
|
getRegisterClassIDAndCost(MI, Reg, i, RCId, RCCost);
|
||||||
if (MO.isDef())
|
if (MO.isDef())
|
||||||
@ -850,7 +850,7 @@ void MachineLICM::UpdateRegPressure(const MachineInstr *MI) {
|
|||||||
if (!TargetRegisterInfo::isVirtualRegister(Reg))
|
if (!TargetRegisterInfo::isVirtualRegister(Reg))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
bool isNew = RegSeen.insert(Reg);
|
bool isNew = RegSeen.insert(Reg).second;
|
||||||
if (MO.isDef())
|
if (MO.isDef())
|
||||||
Defs.push_back(Reg);
|
Defs.push_back(Reg);
|
||||||
else if (!isNew && isOperandKill(MO, MRI)) {
|
else if (!isNew && isOperandKill(MO, MRI)) {
|
||||||
|
@ -340,7 +340,7 @@ bool MachineSinking::isWorthBreakingCriticalEdge(MachineInstr *MI,
|
|||||||
// If the pass has already considered breaking this edge (during this pass
|
// If the pass has already considered breaking this edge (during this pass
|
||||||
// through the function), then let's go ahead and break it. This means
|
// through the function), then let's go ahead and break it. This means
|
||||||
// sinking multiple "cheap" instructions into the same block.
|
// sinking multiple "cheap" instructions into the same block.
|
||||||
if (!CEBCandidates.insert(std::make_pair(From, To)))
|
if (!CEBCandidates.insert(std::make_pair(From, To)).second)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
if (!MI->isCopy() && !TII->isAsCheapAsAMove(MI))
|
if (!MI->isCopy() && !TII->isAsCheapAsAMove(MI))
|
||||||
|
@ -449,7 +449,7 @@ public:
|
|||||||
}
|
}
|
||||||
// To is a new block. Mark the block as visited in case the CFG has cycles
|
// To is a new block. Mark the block as visited in case the CFG has cycles
|
||||||
// that MachineLoopInfo didn't recognize as a natural loop.
|
// that MachineLoopInfo didn't recognize as a natural loop.
|
||||||
return LB.Visited.insert(To);
|
return LB.Visited.insert(To).second;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,7 @@ bool OptimizePHIs::IsSingleValuePHICycle(MachineInstr *MI,
|
|||||||
unsigned DstReg = MI->getOperand(0).getReg();
|
unsigned DstReg = MI->getOperand(0).getReg();
|
||||||
|
|
||||||
// See if we already saw this register.
|
// See if we already saw this register.
|
||||||
if (!PHIsInCycle.insert(MI))
|
if (!PHIsInCycle.insert(MI).second)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
// Don't scan crazily complex things.
|
// Don't scan crazily complex things.
|
||||||
@ -137,7 +137,7 @@ bool OptimizePHIs::IsDeadPHICycle(MachineInstr *MI, InstrSet &PHIsInCycle) {
|
|||||||
"PHI destination is not a virtual register");
|
"PHI destination is not a virtual register");
|
||||||
|
|
||||||
// See if we already saw this register.
|
// See if we already saw this register.
|
||||||
if (!PHIsInCycle.insert(MI))
|
if (!PHIsInCycle.insert(MI).second)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
// Don't scan crazily complex things.
|
// Don't scan crazily complex things.
|
||||||
|
@ -367,7 +367,7 @@ void PHIElimination::LowerPHINode(MachineBasicBlock &MBB,
|
|||||||
// Check to make sure we haven't already emitted the copy for this block.
|
// Check to make sure we haven't already emitted the copy for this block.
|
||||||
// This can happen because PHI nodes may have multiple entries for the same
|
// This can happen because PHI nodes may have multiple entries for the same
|
||||||
// basic block.
|
// basic block.
|
||||||
if (!MBBsInsertedInto.insert(&opBlock))
|
if (!MBBsInsertedInto.insert(&opBlock).second)
|
||||||
continue; // If the copy has already been emitted, we're done.
|
continue; // If the copy has already been emitted, we're done.
|
||||||
|
|
||||||
// Find a safe location to insert the copy, this may be the first terminator
|
// Find a safe location to insert the copy, this may be the first terminator
|
||||||
|
@ -708,7 +708,7 @@ void RAFast::handleThroughOperands(MachineInstr *MI,
|
|||||||
continue;
|
continue;
|
||||||
if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
|
if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
|
||||||
(MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
|
(MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
|
||||||
if (ThroughRegs.insert(Reg))
|
if (ThroughRegs.insert(Reg).second)
|
||||||
DEBUG(dbgs() << ' ' << PrintReg(Reg));
|
DEBUG(dbgs() << ' ' << PrintReg(Reg));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -965,7 +965,7 @@ void RegisterCoalescer::updateRegDefsUses(unsigned SrcReg,
|
|||||||
// the UseMI operands removes them from the SrcReg use-def chain, but when
|
// the UseMI operands removes them from the SrcReg use-def chain, but when
|
||||||
// SrcReg is DstReg we could encounter UseMI twice if it has multiple
|
// SrcReg is DstReg we could encounter UseMI twice if it has multiple
|
||||||
// operands mentioning the virtual register.
|
// operands mentioning the virtual register.
|
||||||
if (SrcReg == DstReg && !Visited.insert(UseMI))
|
if (SrcReg == DstReg && !Visited.insert(UseMI).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
SmallVector<unsigned,8> Ops;
|
SmallVector<unsigned,8> Ops;
|
||||||
|
@ -109,7 +109,7 @@ static void getUnderlyingObjects(const Value *V,
|
|||||||
for (SmallVectorImpl<Value *>::iterator I = Objs.begin(), IE = Objs.end();
|
for (SmallVectorImpl<Value *>::iterator I = Objs.begin(), IE = Objs.end();
|
||||||
I != IE; ++I) {
|
I != IE; ++I) {
|
||||||
V = *I;
|
V = *I;
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
continue;
|
continue;
|
||||||
if (Operator::getOpcode(V) == Instruction::IntToPtr) {
|
if (Operator::getOpcode(V) == Instruction::IntToPtr) {
|
||||||
const Value *O =
|
const Value *O =
|
||||||
@ -588,7 +588,7 @@ iterateChainSucc(AliasAnalysis *AA, const MachineFrameInfo *MFI,
|
|||||||
return *Depth;
|
return *Depth;
|
||||||
|
|
||||||
// Remember visited nodes.
|
// Remember visited nodes.
|
||||||
if (!Visited.insert(SUb))
|
if (!Visited.insert(SUb).second)
|
||||||
return *Depth;
|
return *Depth;
|
||||||
// If there is _some_ dependency already in place, do not
|
// If there is _some_ dependency already in place, do not
|
||||||
// descend any further.
|
// descend any further.
|
||||||
|
@ -1493,7 +1493,7 @@ SDValue DAGCombiner::visitTokenFactor(SDNode *N) {
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
// Only add if it isn't already in the list.
|
// Only add if it isn't already in the list.
|
||||||
if (SeenOps.insert(Op.getNode()))
|
if (SeenOps.insert(Op.getNode()).second)
|
||||||
Ops.push_back(Op);
|
Ops.push_back(Op);
|
||||||
else
|
else
|
||||||
Changed = true;
|
Changed = true;
|
||||||
@ -12267,7 +12267,7 @@ void DAGCombiner::GatherAllAliases(SDNode *N, SDValue OriginalChain,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Don't bother if we've been before.
|
// Don't bother if we've been before.
|
||||||
if (!Visited.insert(Chain.getNode()))
|
if (!Visited.insert(Chain.getNode()).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
switch (Chain.getOpcode()) {
|
switch (Chain.getOpcode()) {
|
||||||
@ -12355,7 +12355,8 @@ void DAGCombiner::GatherAllAliases(SDNode *N, SDValue OriginalChain,
|
|||||||
|
|
||||||
for (SDNode::use_iterator UI = M->use_begin(),
|
for (SDNode::use_iterator UI = M->use_begin(),
|
||||||
UIE = M->use_end(); UI != UIE; ++UI)
|
UIE = M->use_end(); UI != UIE; ++UI)
|
||||||
if (UI.getUse().getValueType() == MVT::Other && Visited.insert(*UI)) {
|
if (UI.getUse().getValueType() == MVT::Other &&
|
||||||
|
Visited.insert(*UI).second) {
|
||||||
if (isa<MemIntrinsicSDNode>(*UI) || isa<MemSDNode>(*UI)) {
|
if (isa<MemIntrinsicSDNode>(*UI) || isa<MemSDNode>(*UI)) {
|
||||||
// We've not visited this use, and we care about it (it could have an
|
// We've not visited this use, and we care about it (it could have an
|
||||||
// ordering dependency with the original node).
|
// ordering dependency with the original node).
|
||||||
|
@ -1976,7 +1976,7 @@ bool FastISel::handlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB) {
|
|||||||
|
|
||||||
// If this terminator has multiple identical successors (common for
|
// If this terminator has multiple identical successors (common for
|
||||||
// switches), only handle each succ once.
|
// switches), only handle each succ once.
|
||||||
if (!SuccsHandled.insert(SuccMBB))
|
if (!SuccsHandled.insert(SuccMBB).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
MachineBasicBlock::iterator MBBI = SuccMBB->begin();
|
MachineBasicBlock::iterator MBBI = SuccMBB->begin();
|
||||||
|
@ -4343,7 +4343,7 @@ void SelectionDAG::Legalize() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (LegalizedNodes.insert(N)) {
|
if (LegalizedNodes.insert(N).second) {
|
||||||
AnyLegalized = true;
|
AnyLegalized = true;
|
||||||
Legalizer.LegalizeOp(N);
|
Legalizer.LegalizeOp(N);
|
||||||
|
|
||||||
|
@ -460,7 +460,7 @@ static bool CheckForLiveRegDef(SUnit *SU, unsigned Reg,
|
|||||||
bool Added = false;
|
bool Added = false;
|
||||||
for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
|
for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
|
||||||
if (LiveRegDefs[*AI] && LiveRegDefs[*AI] != SU) {
|
if (LiveRegDefs[*AI] && LiveRegDefs[*AI] != SU) {
|
||||||
if (RegAdded.insert(*AI)) {
|
if (RegAdded.insert(*AI).second) {
|
||||||
LRegs.push_back(*AI);
|
LRegs.push_back(*AI);
|
||||||
Added = true;
|
Added = true;
|
||||||
}
|
}
|
||||||
|
@ -1223,7 +1223,7 @@ static void CheckForLiveRegDef(SUnit *SU, unsigned Reg,
|
|||||||
if (LiveRegDefs[*AliasI] == SU) continue;
|
if (LiveRegDefs[*AliasI] == SU) continue;
|
||||||
|
|
||||||
// Add Reg to the set of interfering live regs.
|
// Add Reg to the set of interfering live regs.
|
||||||
if (RegAdded.insert(*AliasI)) {
|
if (RegAdded.insert(*AliasI).second) {
|
||||||
LRegs.push_back(*AliasI);
|
LRegs.push_back(*AliasI);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1240,7 +1240,7 @@ static void CheckForLiveRegDefMasked(SUnit *SU, const uint32_t *RegMask,
|
|||||||
if (!LiveRegDefs[i]) continue;
|
if (!LiveRegDefs[i]) continue;
|
||||||
if (LiveRegDefs[i] == SU) continue;
|
if (LiveRegDefs[i] == SU) continue;
|
||||||
if (!MachineOperand::clobbersPhysReg(RegMask, i)) continue;
|
if (!MachineOperand::clobbersPhysReg(RegMask, i)) continue;
|
||||||
if (RegAdded.insert(i))
|
if (RegAdded.insert(i).second)
|
||||||
LRegs.push_back(i);
|
LRegs.push_back(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1315,7 +1315,8 @@ DelayForLiveRegsBottomUp(SUnit *SU, SmallVectorImpl<unsigned> &LRegs) {
|
|||||||
SDNode *Gen = LiveRegGens[CallResource]->getNode();
|
SDNode *Gen = LiveRegGens[CallResource]->getNode();
|
||||||
while (SDNode *Glued = Gen->getGluedNode())
|
while (SDNode *Glued = Gen->getGluedNode())
|
||||||
Gen = Glued;
|
Gen = Glued;
|
||||||
if (!IsChainDependent(Gen, Node, 0, TII) && RegAdded.insert(CallResource))
|
if (!IsChainDependent(Gen, Node, 0, TII) &&
|
||||||
|
RegAdded.insert(CallResource).second)
|
||||||
LRegs.push_back(CallResource);
|
LRegs.push_back(CallResource);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -230,7 +230,7 @@ void ScheduleDAGSDNodes::ClusterNeighboringLoads(SDNode *Node) {
|
|||||||
for (SDNode::use_iterator I = Chain->use_begin(), E = Chain->use_end();
|
for (SDNode::use_iterator I = Chain->use_begin(), E = Chain->use_end();
|
||||||
I != E && UseCount < 100; ++I, ++UseCount) {
|
I != E && UseCount < 100; ++I, ++UseCount) {
|
||||||
SDNode *User = *I;
|
SDNode *User = *I;
|
||||||
if (User == Node || !Visited.insert(User))
|
if (User == Node || !Visited.insert(User).second)
|
||||||
continue;
|
continue;
|
||||||
int64_t Offset1, Offset2;
|
int64_t Offset1, Offset2;
|
||||||
if (!TII->areLoadsFromSameBasePtr(Base, User, Offset1, Offset2) ||
|
if (!TII->areLoadsFromSameBasePtr(Base, User, Offset1, Offset2) ||
|
||||||
@ -343,7 +343,7 @@ void ScheduleDAGSDNodes::BuildSchedUnits() {
|
|||||||
|
|
||||||
// Add all operands to the worklist unless they've already been added.
|
// Add all operands to the worklist unless they've already been added.
|
||||||
for (unsigned i = 0, e = NI->getNumOperands(); i != e; ++i)
|
for (unsigned i = 0, e = NI->getNumOperands(); i != e; ++i)
|
||||||
if (Visited.insert(NI->getOperand(i).getNode()))
|
if (Visited.insert(NI->getOperand(i).getNode()).second)
|
||||||
Worklist.push_back(NI->getOperand(i).getNode());
|
Worklist.push_back(NI->getOperand(i).getNode());
|
||||||
|
|
||||||
if (isPassiveNode(NI)) // Leaf node, e.g. a TargetImmediate.
|
if (isPassiveNode(NI)) // Leaf node, e.g. a TargetImmediate.
|
||||||
@ -737,7 +737,7 @@ ProcessSourceNode(SDNode *N, SelectionDAG *DAG, InstrEmitter &Emitter,
|
|||||||
SmallVectorImpl<std::pair<unsigned, MachineInstr*> > &Orders,
|
SmallVectorImpl<std::pair<unsigned, MachineInstr*> > &Orders,
|
||||||
SmallSet<unsigned, 8> &Seen) {
|
SmallSet<unsigned, 8> &Seen) {
|
||||||
unsigned Order = N->getIROrder();
|
unsigned Order = N->getIROrder();
|
||||||
if (!Order || !Seen.insert(Order)) {
|
if (!Order || !Seen.insert(Order).second) {
|
||||||
// Process any valid SDDbgValues even if node does not have any order
|
// Process any valid SDDbgValues even if node does not have any order
|
||||||
// assigned.
|
// assigned.
|
||||||
ProcessSDDbgValues(N, DAG, Emitter, Orders, VRBaseMap, 0);
|
ProcessSDDbgValues(N, DAG, Emitter, Orders, VRBaseMap, 0);
|
||||||
|
@ -6385,7 +6385,7 @@ SDNode::hasPredecessorHelper(const SDNode *N,
|
|||||||
const SDNode *M = Worklist.pop_back_val();
|
const SDNode *M = Worklist.pop_back_val();
|
||||||
for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) {
|
for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) {
|
||||||
SDNode *Op = M->getOperand(i).getNode();
|
SDNode *Op = M->getOperand(i).getNode();
|
||||||
if (Visited.insert(Op))
|
if (Visited.insert(Op).second)
|
||||||
Worklist.push_back(Op);
|
Worklist.push_back(Op);
|
||||||
if (Op == N)
|
if (Op == N)
|
||||||
return true;
|
return true;
|
||||||
@ -6753,7 +6753,7 @@ static void checkForCyclesHelper(const SDNode *N,
|
|||||||
|
|
||||||
// If a node has already been visited on this depth-first walk, reject it as
|
// If a node has already been visited on this depth-first walk, reject it as
|
||||||
// a cycle.
|
// a cycle.
|
||||||
if (!Visited.insert(N)) {
|
if (!Visited.insert(N).second) {
|
||||||
errs() << "Detected cycle in SelectionDAG\n";
|
errs() << "Detected cycle in SelectionDAG\n";
|
||||||
dbgs() << "Offending node:\n";
|
dbgs() << "Offending node:\n";
|
||||||
N->dumprFull(DAG); dbgs() << "\n";
|
N->dumprFull(DAG); dbgs() << "\n";
|
||||||
|
@ -2760,7 +2760,7 @@ void SelectionDAGBuilder::visitIndirectBr(const IndirectBrInst &I) {
|
|||||||
SmallSet<BasicBlock*, 32> Done;
|
SmallSet<BasicBlock*, 32> Done;
|
||||||
for (unsigned i = 0, e = I.getNumSuccessors(); i != e; ++i) {
|
for (unsigned i = 0, e = I.getNumSuccessors(); i != e; ++i) {
|
||||||
BasicBlock *BB = I.getSuccessor(i);
|
BasicBlock *BB = I.getSuccessor(i);
|
||||||
bool Inserted = Done.insert(BB);
|
bool Inserted = Done.insert(BB).second;
|
||||||
if (!Inserted)
|
if (!Inserted)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
@ -7697,7 +7697,8 @@ SelectionDAGBuilder::HandlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB) {
|
|||||||
|
|
||||||
// If this terminator has multiple identical successors (common for
|
// If this terminator has multiple identical successors (common for
|
||||||
// switches), only handle each succ once.
|
// switches), only handle each succ once.
|
||||||
if (!SuccsHandled.insert(SuccMBB)) continue;
|
if (!SuccsHandled.insert(SuccMBB).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
MachineBasicBlock::iterator MBBI = SuccMBB->begin();
|
MachineBasicBlock::iterator MBBI = SuccMBB->begin();
|
||||||
|
|
||||||
|
@ -569,7 +569,7 @@ void SDNode::printr(raw_ostream &OS, const SelectionDAG *G) const {
|
|||||||
typedef SmallPtrSet<const SDNode *, 128> VisitedSDNodeSet;
|
typedef SmallPtrSet<const SDNode *, 128> VisitedSDNodeSet;
|
||||||
static void DumpNodesr(raw_ostream &OS, const SDNode *N, unsigned indent,
|
static void DumpNodesr(raw_ostream &OS, const SDNode *N, unsigned indent,
|
||||||
const SelectionDAG *G, VisitedSDNodeSet &once) {
|
const SelectionDAG *G, VisitedSDNodeSet &once) {
|
||||||
if (!once.insert(N)) // If we've been here before, return now.
|
if (!once.insert(N).second) // If we've been here before, return now.
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Dump the current SDNode, but don't end the line yet.
|
// Dump the current SDNode, but don't end the line yet.
|
||||||
|
@ -615,7 +615,7 @@ void SelectionDAGISel::ComputeLiveOutVRegInfo() {
|
|||||||
SDNode *N = Worklist.pop_back_val();
|
SDNode *N = Worklist.pop_back_val();
|
||||||
|
|
||||||
// If we've already seen this node, ignore it.
|
// If we've already seen this node, ignore it.
|
||||||
if (!VisitedNodes.insert(N))
|
if (!VisitedNodes.insert(N).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Otherwise, add all chain operands to the worklist.
|
// Otherwise, add all chain operands to the worklist.
|
||||||
@ -1741,7 +1741,7 @@ static bool findNonImmUse(SDNode *Use, SDNode* Def, SDNode *ImmedUse,
|
|||||||
|
|
||||||
// Don't revisit nodes if we already scanned it and didn't fail, we know we
|
// Don't revisit nodes if we already scanned it and didn't fail, we know we
|
||||||
// won't fail if we scan it again.
|
// won't fail if we scan it again.
|
||||||
if (!Visited.insert(Use))
|
if (!Visited.insert(Use).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
for (unsigned i = 0, e = Use->getNumOperands(); i != e; ++i) {
|
for (unsigned i = 0, e = Use->getNumOperands(); i != e; ++i) {
|
||||||
|
@ -140,7 +140,7 @@ void SjLjEHPrepare::insertCallSiteStore(Instruction *I, int Number) {
|
|||||||
/// we reach blocks we've already seen.
|
/// we reach blocks we've already seen.
|
||||||
static void MarkBlocksLiveIn(BasicBlock *BB,
|
static void MarkBlocksLiveIn(BasicBlock *BB,
|
||||||
SmallPtrSetImpl<BasicBlock *> &LiveBBs) {
|
SmallPtrSetImpl<BasicBlock *> &LiveBBs) {
|
||||||
if (!LiveBBs.insert(BB))
|
if (!LiveBBs.insert(BB).second)
|
||||||
return; // already been here.
|
return; // already been here.
|
||||||
|
|
||||||
for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
|
for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
|
||||||
|
@ -169,7 +169,7 @@ bool StackProtector::HasAddressTaken(const Instruction *AI) {
|
|||||||
} else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
|
} else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
|
||||||
// Keep track of what PHI nodes we have already visited to ensure
|
// Keep track of what PHI nodes we have already visited to ensure
|
||||||
// they are only visited once.
|
// they are only visited once.
|
||||||
if (VisitedPHIs.insert(PN))
|
if (VisitedPHIs.insert(PN).second)
|
||||||
if (HasAddressTaken(PN))
|
if (HasAddressTaken(PN))
|
||||||
return true;
|
return true;
|
||||||
} else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
|
} else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
|
||||||
|
@ -678,7 +678,7 @@ TwoAddressInstructionPass::scanUses(unsigned DstReg) {
|
|||||||
unsigned Reg = DstReg;
|
unsigned Reg = DstReg;
|
||||||
while (MachineInstr *UseMI = findOnlyInterestingUse(Reg, MBB, MRI, TII,IsCopy,
|
while (MachineInstr *UseMI = findOnlyInterestingUse(Reg, MBB, MRI, TII,IsCopy,
|
||||||
NewReg, IsDstPhys)) {
|
NewReg, IsDstPhys)) {
|
||||||
if (IsCopy && !Processed.insert(UseMI))
|
if (IsCopy && !Processed.insert(UseMI).second)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
DenseMap<MachineInstr*, unsigned>::iterator DI = DistanceMap.find(UseMI);
|
DenseMap<MachineInstr*, unsigned>::iterator DI = DistanceMap.find(UseMI);
|
||||||
|
@ -316,7 +316,7 @@ static bool canTrapImpl(const Constant *C,
|
|||||||
// ConstantExpr traps if any operands can trap.
|
// ConstantExpr traps if any operands can trap.
|
||||||
for (unsigned i = 0, e = C->getNumOperands(); i != e; ++i) {
|
for (unsigned i = 0, e = C->getNumOperands(); i != e; ++i) {
|
||||||
if (ConstantExpr *Op = dyn_cast<ConstantExpr>(CE->getOperand(i))) {
|
if (ConstantExpr *Op = dyn_cast<ConstantExpr>(CE->getOperand(i))) {
|
||||||
if (NonTrappingOps.insert(Op) && canTrapImpl(Op, NonTrappingOps))
|
if (NonTrappingOps.insert(Op).second && canTrapImpl(Op, NonTrappingOps))
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -363,7 +363,7 @@ ConstHasGlobalValuePredicate(const Constant *C,
|
|||||||
const Constant *ConstOp = dyn_cast<Constant>(Op);
|
const Constant *ConstOp = dyn_cast<Constant>(Op);
|
||||||
if (!ConstOp)
|
if (!ConstOp)
|
||||||
continue;
|
continue;
|
||||||
if (Visited.insert(ConstOp))
|
if (Visited.insert(ConstOp).second)
|
||||||
WorkList.push_back(ConstOp);
|
WorkList.push_back(ConstOp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ void DIBuilder::finalize() {
|
|||||||
// TrackingVHs back into Values.
|
// TrackingVHs back into Values.
|
||||||
SmallPtrSet<Value *, 16> RetainSet;
|
SmallPtrSet<Value *, 16> RetainSet;
|
||||||
for (unsigned I = 0, E = AllRetainTypes.size(); I < E; I++)
|
for (unsigned I = 0, E = AllRetainTypes.size(); I < E; I++)
|
||||||
if (RetainSet.insert(AllRetainTypes[I]))
|
if (RetainSet.insert(AllRetainTypes[I]).second)
|
||||||
RetainValues.push_back(AllRetainTypes[I]);
|
RetainValues.push_back(AllRetainTypes[I]);
|
||||||
DIArray RetainTypes = getOrCreateArray(RetainValues);
|
DIArray RetainTypes = getOrCreateArray(RetainValues);
|
||||||
DIType(TempRetainTypes).replaceAllUsesWith(RetainTypes);
|
DIType(TempRetainTypes).replaceAllUsesWith(RetainTypes);
|
||||||
|
@ -1127,7 +1127,7 @@ void DebugInfoFinder::processDeclare(const Module &M,
|
|||||||
if (!DV.isVariable())
|
if (!DV.isVariable())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (!NodesSeen.insert(DV))
|
if (!NodesSeen.insert(DV).second)
|
||||||
return;
|
return;
|
||||||
processScope(DIVariable(N).getContext());
|
processScope(DIVariable(N).getContext());
|
||||||
processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
|
processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
|
||||||
@ -1143,7 +1143,7 @@ void DebugInfoFinder::processValue(const Module &M, const DbgValueInst *DVI) {
|
|||||||
if (!DV.isVariable())
|
if (!DV.isVariable())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (!NodesSeen.insert(DV))
|
if (!NodesSeen.insert(DV).second)
|
||||||
return;
|
return;
|
||||||
processScope(DIVariable(N).getContext());
|
processScope(DIVariable(N).getContext());
|
||||||
processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
|
processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
|
||||||
@ -1153,7 +1153,7 @@ bool DebugInfoFinder::addType(DIType DT) {
|
|||||||
if (!DT)
|
if (!DT)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!NodesSeen.insert(DT))
|
if (!NodesSeen.insert(DT).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
TYs.push_back(DT);
|
TYs.push_back(DT);
|
||||||
@ -1163,7 +1163,7 @@ bool DebugInfoFinder::addType(DIType DT) {
|
|||||||
bool DebugInfoFinder::addCompileUnit(DICompileUnit CU) {
|
bool DebugInfoFinder::addCompileUnit(DICompileUnit CU) {
|
||||||
if (!CU)
|
if (!CU)
|
||||||
return false;
|
return false;
|
||||||
if (!NodesSeen.insert(CU))
|
if (!NodesSeen.insert(CU).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
CUs.push_back(CU);
|
CUs.push_back(CU);
|
||||||
@ -1174,7 +1174,7 @@ bool DebugInfoFinder::addGlobalVariable(DIGlobalVariable DIG) {
|
|||||||
if (!DIG)
|
if (!DIG)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!NodesSeen.insert(DIG))
|
if (!NodesSeen.insert(DIG).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
GVs.push_back(DIG);
|
GVs.push_back(DIG);
|
||||||
@ -1185,7 +1185,7 @@ bool DebugInfoFinder::addSubprogram(DISubprogram SP) {
|
|||||||
if (!SP)
|
if (!SP)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!NodesSeen.insert(SP))
|
if (!NodesSeen.insert(SP).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
SPs.push_back(SP);
|
SPs.push_back(SP);
|
||||||
@ -1199,7 +1199,7 @@ bool DebugInfoFinder::addScope(DIScope Scope) {
|
|||||||
// as null for now.
|
// as null for now.
|
||||||
if (Scope->getNumOperands() == 0)
|
if (Scope->getNumOperands() == 0)
|
||||||
return false;
|
return false;
|
||||||
if (!NodesSeen.insert(Scope))
|
if (!NodesSeen.insert(Scope).second)
|
||||||
return false;
|
return false;
|
||||||
Scopes.push_back(Scope);
|
Scopes.push_back(Scope);
|
||||||
return true;
|
return true;
|
||||||
|
@ -562,7 +562,7 @@ bool StructType::isSized(SmallPtrSetImpl<const Type*> *Visited) const {
|
|||||||
if (isOpaque())
|
if (isOpaque())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (Visited && !Visited->insert(this))
|
if (Visited && !Visited->insert(this).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Okay, our struct is sized if all of the elements are, but if one of the
|
// Okay, our struct is sized if all of the elements are, but if one of the
|
||||||
|
@ -292,7 +292,7 @@ void Value::takeName(Value *V) {
|
|||||||
#ifndef NDEBUG
|
#ifndef NDEBUG
|
||||||
static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
|
static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
|
||||||
Constant *C) {
|
Constant *C) {
|
||||||
if (!Cache.insert(Expr))
|
if (!Cache.insert(Expr).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
for (auto &O : Expr->operands()) {
|
for (auto &O : Expr->operands()) {
|
||||||
@ -401,7 +401,7 @@ static Value *stripPointerCastsAndOffsets(Value *V) {
|
|||||||
return V;
|
return V;
|
||||||
}
|
}
|
||||||
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
||||||
} while (Visited.insert(V));
|
} while (Visited.insert(V).second);
|
||||||
|
|
||||||
return V;
|
return V;
|
||||||
}
|
}
|
||||||
@ -451,7 +451,7 @@ Value *Value::stripAndAccumulateInBoundsConstantOffsets(const DataLayout &DL,
|
|||||||
return V;
|
return V;
|
||||||
}
|
}
|
||||||
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
assert(V->getType()->isPointerTy() && "Unexpected operand type!");
|
||||||
} while (Visited.insert(V));
|
} while (Visited.insert(V).second);
|
||||||
|
|
||||||
return V;
|
return V;
|
||||||
}
|
}
|
||||||
@ -522,7 +522,7 @@ static bool isDereferenceablePointer(const Value *V, const DataLayout *DL,
|
|||||||
// For GEPs, determine if the indexing lands within the allocated object.
|
// For GEPs, determine if the indexing lands within the allocated object.
|
||||||
if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
||||||
// Conservatively require that the base pointer be fully dereferenceable.
|
// Conservatively require that the base pointer be fully dereferenceable.
|
||||||
if (!Visited.insert(GEP->getOperand(0)))
|
if (!Visited.insert(GEP->getOperand(0)).second)
|
||||||
return false;
|
return false;
|
||||||
if (!isDereferenceablePointer(GEP->getOperand(0), DL, Visited))
|
if (!isDereferenceablePointer(GEP->getOperand(0), DL, Visited))
|
||||||
return false;
|
return false;
|
||||||
|
@ -480,7 +480,7 @@ void Verifier::visitGlobalVariable(const GlobalVariable &GV) {
|
|||||||
|
|
||||||
while (!WorkStack.empty()) {
|
while (!WorkStack.empty()) {
|
||||||
const Value *V = WorkStack.pop_back_val();
|
const Value *V = WorkStack.pop_back_val();
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (const User *U = dyn_cast<User>(V)) {
|
if (const User *U = dyn_cast<User>(V)) {
|
||||||
@ -510,7 +510,7 @@ void Verifier::visitAliaseeSubExpr(SmallPtrSetImpl<const GlobalAlias*> &Visited,
|
|||||||
Assert1(!GV->isDeclaration(), "Alias must point to a definition", &GA);
|
Assert1(!GV->isDeclaration(), "Alias must point to a definition", &GA);
|
||||||
|
|
||||||
if (const auto *GA2 = dyn_cast<GlobalAlias>(GV)) {
|
if (const auto *GA2 = dyn_cast<GlobalAlias>(GV)) {
|
||||||
Assert1(Visited.insert(GA2), "Aliases cannot form a cycle", &GA);
|
Assert1(Visited.insert(GA2).second, "Aliases cannot form a cycle", &GA);
|
||||||
|
|
||||||
Assert1(!GA2->mayBeOverridden(), "Alias cannot point to a weak alias",
|
Assert1(!GA2->mayBeOverridden(), "Alias cannot point to a weak alias",
|
||||||
&GA);
|
&GA);
|
||||||
@ -568,7 +568,7 @@ void Verifier::visitNamedMDNode(const NamedMDNode &NMD) {
|
|||||||
void Verifier::visitMDNode(MDNode &MD, Function *F) {
|
void Verifier::visitMDNode(MDNode &MD, Function *F) {
|
||||||
// Only visit each node once. Metadata can be mutually recursive, so this
|
// Only visit each node once. Metadata can be mutually recursive, so this
|
||||||
// avoids infinite recursion here, as well as being an optimization.
|
// avoids infinite recursion here, as well as being an optimization.
|
||||||
if (!MDNodes.insert(&MD))
|
if (!MDNodes.insert(&MD).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
for (unsigned i = 0, e = MD.getNumOperands(); i != e; ++i) {
|
for (unsigned i = 0, e = MD.getNumOperands(); i != e; ++i) {
|
||||||
@ -1218,7 +1218,7 @@ void Verifier::visitSwitchInst(SwitchInst &SI) {
|
|||||||
for (SwitchInst::CaseIt i = SI.case_begin(), e = SI.case_end(); i != e; ++i) {
|
for (SwitchInst::CaseIt i = SI.case_begin(), e = SI.case_end(); i != e; ++i) {
|
||||||
Assert1(i.getCaseValue()->getType() == SwitchTy,
|
Assert1(i.getCaseValue()->getType() == SwitchTy,
|
||||||
"Switch constants must all be same type as switch value!", &SI);
|
"Switch constants must all be same type as switch value!", &SI);
|
||||||
Assert2(Constants.insert(i.getCaseValue()),
|
Assert2(Constants.insert(i.getCaseValue()).second,
|
||||||
"Duplicate integer as switch case", &SI, i.getCaseValue());
|
"Duplicate integer as switch case", &SI, i.getCaseValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2253,7 +2253,7 @@ void Verifier::visitInstruction(Instruction &I) {
|
|||||||
|
|
||||||
while (!Stack.empty()) {
|
while (!Stack.empty()) {
|
||||||
const ConstantExpr *V = Stack.pop_back_val();
|
const ConstantExpr *V = Stack.pop_back_val();
|
||||||
if (!Visited.insert(V))
|
if (!Visited.insert(V).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
VerifyConstantExprBitcastType(V);
|
VerifyConstantExprBitcastType(V);
|
||||||
|
@ -152,7 +152,7 @@ bool TypeMapTy::areTypesIsomorphic(Type *DstTy, Type *SrcTy) {
|
|||||||
// same opaque type then we fail.
|
// same opaque type then we fail.
|
||||||
if (cast<StructType>(DstTy)->isOpaque()) {
|
if (cast<StructType>(DstTy)->isOpaque()) {
|
||||||
// We can only map one source type onto the opaque destination type.
|
// We can only map one source type onto the opaque destination type.
|
||||||
if (!DstResolvedOpaqueTypes.insert(cast<StructType>(DstTy)))
|
if (!DstResolvedOpaqueTypes.insert(cast<StructType>(DstTy)).second)
|
||||||
return false;
|
return false;
|
||||||
SrcDefinitionsToResolve.push_back(SSTy);
|
SrcDefinitionsToResolve.push_back(SSTy);
|
||||||
Entry = DstTy;
|
Entry = DstTy;
|
||||||
|
@ -1455,7 +1455,7 @@ sortOpts(StringMap<Option*> &OptMap,
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
// If we've already seen this option, don't add it to the list again.
|
// If we've already seen this option, don't add it to the list again.
|
||||||
if (!OptionSet.insert(I->second))
|
if (!OptionSet.insert(I->second).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
Opts.push_back(std::pair<const char *, Option*>(I->getKey().data(),
|
Opts.push_back(std::pair<const char *, Option*>(I->getKey().data(),
|
||||||
|
@ -34,18 +34,19 @@ void SmallPtrSetImplBase::shrink_and_clear() {
|
|||||||
memset(CurArray, -1, CurArraySize*sizeof(void*));
|
memset(CurArray, -1, CurArraySize*sizeof(void*));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SmallPtrSetImplBase::insert_imp(const void * Ptr) {
|
std::pair<const void *const *, bool>
|
||||||
|
SmallPtrSetImplBase::insert_imp(const void *Ptr) {
|
||||||
if (isSmall()) {
|
if (isSmall()) {
|
||||||
// Check to see if it is already in the set.
|
// Check to see if it is already in the set.
|
||||||
for (const void **APtr = SmallArray, **E = SmallArray+NumElements;
|
for (const void **APtr = SmallArray, **E = SmallArray+NumElements;
|
||||||
APtr != E; ++APtr)
|
APtr != E; ++APtr)
|
||||||
if (*APtr == Ptr)
|
if (*APtr == Ptr)
|
||||||
return false;
|
return std::make_pair(APtr, false);
|
||||||
|
|
||||||
// Nope, there isn't. If we stay small, just 'pushback' now.
|
// Nope, there isn't. If we stay small, just 'pushback' now.
|
||||||
if (NumElements < CurArraySize) {
|
if (NumElements < CurArraySize) {
|
||||||
SmallArray[NumElements++] = Ptr;
|
SmallArray[NumElements++] = Ptr;
|
||||||
return true;
|
return std::make_pair(SmallArray + (NumElements - 1), true);
|
||||||
}
|
}
|
||||||
// Otherwise, hit the big set case, which will call grow.
|
// Otherwise, hit the big set case, which will call grow.
|
||||||
}
|
}
|
||||||
@ -61,14 +62,15 @@ bool SmallPtrSetImplBase::insert_imp(const void * Ptr) {
|
|||||||
|
|
||||||
// Okay, we know we have space. Find a hash bucket.
|
// Okay, we know we have space. Find a hash bucket.
|
||||||
const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
|
const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
|
||||||
if (*Bucket == Ptr) return false; // Already inserted, good.
|
if (*Bucket == Ptr)
|
||||||
|
return std::make_pair(Bucket, false); // Already inserted, good.
|
||||||
|
|
||||||
// Otherwise, insert it!
|
// Otherwise, insert it!
|
||||||
if (*Bucket == getTombstoneMarker())
|
if (*Bucket == getTombstoneMarker())
|
||||||
--NumTombstones;
|
--NumTombstones;
|
||||||
*Bucket = Ptr;
|
*Bucket = Ptr;
|
||||||
++NumElements; // Track density.
|
++NumElements; // Track density.
|
||||||
return true;
|
return std::make_pair(Bucket, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SmallPtrSetImplBase::erase_imp(const void * Ptr) {
|
bool SmallPtrSetImplBase::erase_imp(const void * Ptr) {
|
||||||
|
@ -569,7 +569,7 @@ bool AArch64PromoteConstant::runOnFunction(Function &F) {
|
|||||||
// global. Do not promote constant expressions either, as they may
|
// global. Do not promote constant expressions either, as they may
|
||||||
// require some code expansion.
|
// require some code expansion.
|
||||||
if (Cst && !isa<GlobalValue>(Cst) && !isa<ConstantExpr>(Cst) &&
|
if (Cst && !isa<GlobalValue>(Cst) && !isa<ConstantExpr>(Cst) &&
|
||||||
AlreadyChecked.insert(Cst))
|
AlreadyChecked.insert(Cst).second)
|
||||||
LocalChange |= promoteConstant(Cst);
|
LocalChange |= promoteConstant(Cst);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6943,7 +6943,7 @@ EmitSjLjDispatchBlock(MachineInstr *MI, MachineBasicBlock *MBB) const {
|
|||||||
for (std::vector<MachineBasicBlock*>::iterator
|
for (std::vector<MachineBasicBlock*>::iterator
|
||||||
I = LPadList.begin(), E = LPadList.end(); I != E; ++I) {
|
I = LPadList.begin(), E = LPadList.end(); I != E; ++I) {
|
||||||
MachineBasicBlock *CurMBB = *I;
|
MachineBasicBlock *CurMBB = *I;
|
||||||
if (SeenMBBs.insert(CurMBB))
|
if (SeenMBBs.insert(CurMBB).second)
|
||||||
DispContBB->addSuccessor(CurMBB);
|
DispContBB->addSuccessor(CurMBB);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,7 +455,8 @@ bool MemDefsUses::hasHazard_(const MachineInstr &MI) {
|
|||||||
|
|
||||||
bool MemDefsUses::updateDefsUses(ValueType V, bool MayStore) {
|
bool MemDefsUses::updateDefsUses(ValueType V, bool MayStore) {
|
||||||
if (MayStore)
|
if (MayStore)
|
||||||
return !Defs.insert(V) || Uses.count(V) || SeenNoObjStore || SeenNoObjLoad;
|
return !Defs.insert(V).second || Uses.count(V) || SeenNoObjStore ||
|
||||||
|
SeenNoObjLoad;
|
||||||
|
|
||||||
Uses.insert(V);
|
Uses.insert(V);
|
||||||
return Defs.count(V) || SeenNoObjStore;
|
return Defs.count(V) || SeenNoObjStore;
|
||||||
|
@ -7649,7 +7649,7 @@ static bool findConsecutiveLoad(LoadSDNode *LD, SelectionDAG &DAG) {
|
|||||||
// nodes just above the top-level loads and token factors.
|
// nodes just above the top-level loads and token factors.
|
||||||
while (!Queue.empty()) {
|
while (!Queue.empty()) {
|
||||||
SDNode *ChainNext = Queue.pop_back_val();
|
SDNode *ChainNext = Queue.pop_back_val();
|
||||||
if (!Visited.insert(ChainNext))
|
if (!Visited.insert(ChainNext).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (MemSDNode *ChainLD = dyn_cast<MemSDNode>(ChainNext)) {
|
if (MemSDNode *ChainLD = dyn_cast<MemSDNode>(ChainNext)) {
|
||||||
@ -7680,7 +7680,7 @@ static bool findConsecutiveLoad(LoadSDNode *LD, SelectionDAG &DAG) {
|
|||||||
|
|
||||||
while (!Queue.empty()) {
|
while (!Queue.empty()) {
|
||||||
SDNode *LoadRoot = Queue.pop_back_val();
|
SDNode *LoadRoot = Queue.pop_back_val();
|
||||||
if (!Visited.insert(LoadRoot))
|
if (!Visited.insert(LoadRoot).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (MemSDNode *ChainLD = dyn_cast<MemSDNode>(LoadRoot))
|
if (MemSDNode *ChainLD = dyn_cast<MemSDNode>(LoadRoot))
|
||||||
@ -7810,7 +7810,7 @@ SDValue PPCTargetLowering::DAGCombineTruncBoolExt(SDNode *N,
|
|||||||
SDValue BinOp = BinOps.back();
|
SDValue BinOp = BinOps.back();
|
||||||
BinOps.pop_back();
|
BinOps.pop_back();
|
||||||
|
|
||||||
if (!Visited.insert(BinOp.getNode()))
|
if (!Visited.insert(BinOp.getNode()).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
PromOps.push_back(BinOp);
|
PromOps.push_back(BinOp);
|
||||||
@ -8024,7 +8024,7 @@ SDValue PPCTargetLowering::DAGCombineExtBoolTrunc(SDNode *N,
|
|||||||
SDValue BinOp = BinOps.back();
|
SDValue BinOp = BinOps.back();
|
||||||
BinOps.pop_back();
|
BinOps.pop_back();
|
||||||
|
|
||||||
if (!Visited.insert(BinOp.getNode()))
|
if (!Visited.insert(BinOp.getNode()).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
PromOps.push_back(BinOp);
|
PromOps.push_back(BinOp);
|
||||||
|
@ -330,7 +330,7 @@ bool FPS::runOnMachineFunction(MachineFunction &MF) {
|
|||||||
// Process any unreachable blocks in arbitrary order now.
|
// Process any unreachable blocks in arbitrary order now.
|
||||||
if (MF.size() != Processed.size())
|
if (MF.size() != Processed.size())
|
||||||
for (MachineFunction::iterator BB = MF.begin(), E = MF.end(); BB != E; ++BB)
|
for (MachineFunction::iterator BB = MF.begin(), E = MF.end(); BB != E; ++BB)
|
||||||
if (Processed.insert(BB))
|
if (Processed.insert(BB).second)
|
||||||
Changed |= processBasicBlock(MF, *BB);
|
Changed |= processBasicBlock(MF, *BB);
|
||||||
|
|
||||||
LiveBundles.clear();
|
LiveBundles.clear();
|
||||||
|
@ -182,7 +182,7 @@ bool ArgPromotion::canPaddingBeAccessed(Argument *arg) {
|
|||||||
Value *V = WorkList.back();
|
Value *V = WorkList.back();
|
||||||
WorkList.pop_back();
|
WorkList.pop_back();
|
||||||
if (isa<GetElementPtrInst>(V) || isa<PHINode>(V)) {
|
if (isa<GetElementPtrInst>(V) || isa<PHINode>(V)) {
|
||||||
if (PtrValues.insert(V))
|
if (PtrValues.insert(V).second)
|
||||||
WorkList.insert(WorkList.end(), V->user_begin(), V->user_end());
|
WorkList.insert(WorkList.end(), V->user_begin(), V->user_end());
|
||||||
} else if (StoreInst *Store = dyn_cast<StoreInst>(V)) {
|
} else if (StoreInst *Store = dyn_cast<StoreInst>(V)) {
|
||||||
Stores.push_back(Store);
|
Stores.push_back(Store);
|
||||||
|
@ -446,7 +446,7 @@ determinePointerReadAttrs(Argument *A,
|
|||||||
case Instruction::AddrSpaceCast:
|
case Instruction::AddrSpaceCast:
|
||||||
// The original value is not read/written via this if the new value isn't.
|
// The original value is not read/written via this if the new value isn't.
|
||||||
for (Use &UU : I->uses())
|
for (Use &UU : I->uses())
|
||||||
if (Visited.insert(&UU))
|
if (Visited.insert(&UU).second)
|
||||||
Worklist.push_back(&UU);
|
Worklist.push_back(&UU);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
@ -460,7 +460,7 @@ determinePointerReadAttrs(Argument *A,
|
|||||||
auto AddUsersToWorklistIfCapturing = [&] {
|
auto AddUsersToWorklistIfCapturing = [&] {
|
||||||
if (Captures)
|
if (Captures)
|
||||||
for (Use &UU : I->uses())
|
for (Use &UU : I->uses())
|
||||||
if (Visited.insert(&UU))
|
if (Visited.insert(&UU).second)
|
||||||
Worklist.push_back(&UU);
|
Worklist.push_back(&UU);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -185,7 +185,7 @@ bool GlobalDCE::runOnModule(Module &M) {
|
|||||||
/// recursively mark anything that it uses as also needed.
|
/// recursively mark anything that it uses as also needed.
|
||||||
void GlobalDCE::GlobalIsNeeded(GlobalValue *G) {
|
void GlobalDCE::GlobalIsNeeded(GlobalValue *G) {
|
||||||
// If the global is already in the set, no need to reprocess it.
|
// If the global is already in the set, no need to reprocess it.
|
||||||
if (!AliveGlobals.insert(G))
|
if (!AliveGlobals.insert(G).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
Module *M = G->getParent();
|
Module *M = G->getParent();
|
||||||
@ -238,7 +238,7 @@ void GlobalDCE::MarkUsedGlobalsAsNeeded(Constant *C) {
|
|||||||
for (User::op_iterator I = C->op_begin(), E = C->op_end(); I != E; ++I) {
|
for (User::op_iterator I = C->op_begin(), E = C->op_end(); I != E; ++I) {
|
||||||
// If we've already processed this constant there's no need to do it again.
|
// If we've already processed this constant there's no need to do it again.
|
||||||
Constant *Op = dyn_cast<Constant>(*I);
|
Constant *Op = dyn_cast<Constant>(*I);
|
||||||
if (Op && SeenConstants.insert(Op))
|
if (Op && SeenConstants.insert(Op).second)
|
||||||
MarkUsedGlobalsAsNeeded(Op);
|
MarkUsedGlobalsAsNeeded(Op);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -639,7 +639,7 @@ static bool AllUsesOfValueWillTrapIfNull(const Value *V,
|
|||||||
} else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
|
} else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
|
||||||
// If we've already seen this phi node, ignore it, it has already been
|
// If we've already seen this phi node, ignore it, it has already been
|
||||||
// checked.
|
// checked.
|
||||||
if (PHIs.insert(PN) && !AllUsesOfValueWillTrapIfNull(PN, PHIs))
|
if (PHIs.insert(PN).second && !AllUsesOfValueWillTrapIfNull(PN, PHIs))
|
||||||
return false;
|
return false;
|
||||||
} else if (isa<ICmpInst>(U) &&
|
} else if (isa<ICmpInst>(U) &&
|
||||||
isa<ConstantPointerNull>(U->getOperand(1))) {
|
isa<ConstantPointerNull>(U->getOperand(1))) {
|
||||||
@ -982,7 +982,7 @@ static bool ValueIsOnlyUsedLocallyOrStoredToOneGlobal(const Instruction *V,
|
|||||||
if (const PHINode *PN = dyn_cast<PHINode>(Inst)) {
|
if (const PHINode *PN = dyn_cast<PHINode>(Inst)) {
|
||||||
// PHIs are ok if all uses are ok. Don't infinitely recurse through PHI
|
// PHIs are ok if all uses are ok. Don't infinitely recurse through PHI
|
||||||
// cycles.
|
// cycles.
|
||||||
if (PHIs.insert(PN))
|
if (PHIs.insert(PN).second)
|
||||||
if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(PN, GV, PHIs))
|
if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(PN, GV, PHIs))
|
||||||
return false;
|
return false;
|
||||||
continue;
|
continue;
|
||||||
@ -1073,11 +1073,11 @@ static bool LoadUsesSimpleEnoughForHeapSRA(const Value *V,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (const PHINode *PN = dyn_cast<PHINode>(UI)) {
|
if (const PHINode *PN = dyn_cast<PHINode>(UI)) {
|
||||||
if (!LoadUsingPHIsPerLoad.insert(PN))
|
if (!LoadUsingPHIsPerLoad.insert(PN).second)
|
||||||
// This means some phi nodes are dependent on each other.
|
// This means some phi nodes are dependent on each other.
|
||||||
// Avoid infinite looping!
|
// Avoid infinite looping!
|
||||||
return false;
|
return false;
|
||||||
if (!LoadUsingPHIs.insert(PN))
|
if (!LoadUsingPHIs.insert(PN).second)
|
||||||
// If we have already analyzed this PHI, then it is safe.
|
// If we have already analyzed this PHI, then it is safe.
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
@ -2045,7 +2045,8 @@ isSimpleEnoughValueToCommit(Constant *C,
|
|||||||
SmallPtrSetImpl<Constant*> &SimpleConstants,
|
SmallPtrSetImpl<Constant*> &SimpleConstants,
|
||||||
const DataLayout *DL) {
|
const DataLayout *DL) {
|
||||||
// If we already checked this constant, we win.
|
// If we already checked this constant, we win.
|
||||||
if (!SimpleConstants.insert(C)) return true;
|
if (!SimpleConstants.insert(C).second)
|
||||||
|
return true;
|
||||||
// Check the constant.
|
// Check the constant.
|
||||||
return isSimpleEnoughValueToCommitHelper(C, SimpleConstants, DL);
|
return isSimpleEnoughValueToCommitHelper(C, SimpleConstants, DL);
|
||||||
}
|
}
|
||||||
@ -2670,7 +2671,7 @@ bool Evaluator::EvaluateFunction(Function *F, Constant *&RetVal,
|
|||||||
// Okay, we succeeded in evaluating this control flow. See if we have
|
// Okay, we succeeded in evaluating this control flow. See if we have
|
||||||
// executed the new block before. If so, we have a looping function,
|
// executed the new block before. If so, we have a looping function,
|
||||||
// which we cannot evaluate in reasonable time.
|
// which we cannot evaluate in reasonable time.
|
||||||
if (!ExecutedBlocks.insert(NextBB))
|
if (!ExecutedBlocks.insert(NextBB).second)
|
||||||
return false; // looped!
|
return false; // looped!
|
||||||
|
|
||||||
// Okay, we have never been in this block before. Check to see if there
|
// Okay, we have never been in this block before. Check to see if there
|
||||||
@ -2779,8 +2780,10 @@ public:
|
|||||||
}
|
}
|
||||||
bool usedErase(GlobalValue *GV) { return Used.erase(GV); }
|
bool usedErase(GlobalValue *GV) { return Used.erase(GV); }
|
||||||
bool compilerUsedErase(GlobalValue *GV) { return CompilerUsed.erase(GV); }
|
bool compilerUsedErase(GlobalValue *GV) { return CompilerUsed.erase(GV); }
|
||||||
bool usedInsert(GlobalValue *GV) { return Used.insert(GV); }
|
bool usedInsert(GlobalValue *GV) { return Used.insert(GV).second; }
|
||||||
bool compilerUsedInsert(GlobalValue *GV) { return CompilerUsed.insert(GV); }
|
bool compilerUsedInsert(GlobalValue *GV) {
|
||||||
|
return CompilerUsed.insert(GV).second;
|
||||||
|
}
|
||||||
|
|
||||||
void syncVariablesAndSets() {
|
void syncVariablesAndSets() {
|
||||||
if (UsedV)
|
if (UsedV)
|
||||||
@ -2973,7 +2976,7 @@ static bool cxxDtorIsEmpty(const Function &Fn,
|
|||||||
SmallPtrSet<const Function *, 8> NewCalledFunctions(CalledFunctions);
|
SmallPtrSet<const Function *, 8> NewCalledFunctions(CalledFunctions);
|
||||||
|
|
||||||
// Don't treat recursive functions as empty.
|
// Don't treat recursive functions as empty.
|
||||||
if (!NewCalledFunctions.insert(CalledFn))
|
if (!NewCalledFunctions.insert(CalledFn).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!cxxDtorIsEmpty(*CalledFn, NewCalledFunctions))
|
if (!cxxDtorIsEmpty(*CalledFn, NewCalledFunctions))
|
||||||
|
@ -219,7 +219,7 @@ static bool InlineCallIfPossible(CallSite CS, InlineFunctionInfo &IFI,
|
|||||||
|
|
||||||
// If the inlined function already uses this alloca then we can't reuse
|
// If the inlined function already uses this alloca then we can't reuse
|
||||||
// it.
|
// it.
|
||||||
if (!UsedAllocas.insert(AvailableAlloca))
|
if (!UsedAllocas.insert(AvailableAlloca).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Otherwise, we *can* reuse it, RAUW AI into AvailableAlloca and declare
|
// Otherwise, we *can* reuse it, RAUW AI into AvailableAlloca and declare
|
||||||
|
@ -1049,7 +1049,7 @@ int FunctionComparator::compare() {
|
|||||||
|
|
||||||
assert(TermL->getNumSuccessors() == TermR->getNumSuccessors());
|
assert(TermL->getNumSuccessors() == TermR->getNumSuccessors());
|
||||||
for (unsigned i = 0, e = TermL->getNumSuccessors(); i != e; ++i) {
|
for (unsigned i = 0, e = TermL->getNumSuccessors(); i != e; ++i) {
|
||||||
if (!VisitedBBs.insert(TermL->getSuccessor(i)))
|
if (!VisitedBBs.insert(TermL->getSuccessor(i)).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
FnLBBs.push_back(TermL->getSuccessor(i));
|
FnLBBs.push_back(TermL->getSuccessor(i));
|
||||||
|
@ -511,7 +511,7 @@ static bool DeadPHICycle(PHINode *PN,
|
|||||||
if (!PN->hasOneUse()) return false;
|
if (!PN->hasOneUse()) return false;
|
||||||
|
|
||||||
// Remember this node, and if we find the cycle, return.
|
// Remember this node, and if we find the cycle, return.
|
||||||
if (!PotentiallyDeadPHIs.insert(PN))
|
if (!PotentiallyDeadPHIs.insert(PN).second)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
// Don't scan crazily complex things.
|
// Don't scan crazily complex things.
|
||||||
@ -530,7 +530,7 @@ static bool DeadPHICycle(PHINode *PN,
|
|||||||
static bool PHIsEqualValue(PHINode *PN, Value *NonPhiInVal,
|
static bool PHIsEqualValue(PHINode *PN, Value *NonPhiInVal,
|
||||||
SmallPtrSetImpl<PHINode*> &ValueEqualPHIs) {
|
SmallPtrSetImpl<PHINode*> &ValueEqualPHIs) {
|
||||||
// See if we already saw this PHI node.
|
// See if we already saw this PHI node.
|
||||||
if (!ValueEqualPHIs.insert(PN))
|
if (!ValueEqualPHIs.insert(PN).second)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
// Don't scan crazily complex things.
|
// Don't scan crazily complex things.
|
||||||
@ -654,7 +654,7 @@ Instruction *InstCombiner::SliceUpIllegalIntegerPHI(PHINode &FirstPhi) {
|
|||||||
|
|
||||||
// If the user is a PHI, inspect its uses recursively.
|
// If the user is a PHI, inspect its uses recursively.
|
||||||
if (PHINode *UserPN = dyn_cast<PHINode>(UserI)) {
|
if (PHINode *UserPN = dyn_cast<PHINode>(UserI)) {
|
||||||
if (PHIsInspected.insert(UserPN))
|
if (PHIsInspected.insert(UserPN).second)
|
||||||
PHIsToSlice.push_back(UserPN);
|
PHIsToSlice.push_back(UserPN);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -2341,7 +2341,7 @@ Instruction *InstCombiner::visitLandingPadInst(LandingPadInst &LI) {
|
|||||||
|
|
||||||
// If we already saw this clause, there is no point in having a second
|
// If we already saw this clause, there is no point in having a second
|
||||||
// copy of it.
|
// copy of it.
|
||||||
if (AlreadyCaught.insert(TypeInfo)) {
|
if (AlreadyCaught.insert(TypeInfo).second) {
|
||||||
// This catch clause was not already seen.
|
// This catch clause was not already seen.
|
||||||
NewClauses.push_back(CatchClause);
|
NewClauses.push_back(CatchClause);
|
||||||
} else {
|
} else {
|
||||||
@ -2423,7 +2423,7 @@ Instruction *InstCombiner::visitLandingPadInst(LandingPadInst &LI) {
|
|||||||
continue;
|
continue;
|
||||||
// There is no point in having multiple copies of the same typeinfo in
|
// There is no point in having multiple copies of the same typeinfo in
|
||||||
// a filter, so only add it if we didn't already.
|
// a filter, so only add it if we didn't already.
|
||||||
if (SeenInFilter.insert(TypeInfo))
|
if (SeenInFilter.insert(TypeInfo).second)
|
||||||
NewFilterElts.push_back(cast<Constant>(Elt));
|
NewFilterElts.push_back(cast<Constant>(Elt));
|
||||||
}
|
}
|
||||||
// A filter containing a catch-all cannot match anything by definition.
|
// A filter containing a catch-all cannot match anything by definition.
|
||||||
@ -2675,7 +2675,8 @@ static bool AddReachableCodeToWorklist(BasicBlock *BB,
|
|||||||
BB = Worklist.pop_back_val();
|
BB = Worklist.pop_back_val();
|
||||||
|
|
||||||
// We have now visited this block! If we've already been here, ignore it.
|
// We have now visited this block! If we've already been here, ignore it.
|
||||||
if (!Visited.insert(BB)) continue;
|
if (!Visited.insert(BB).second)
|
||||||
|
continue;
|
||||||
|
|
||||||
for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E; ) {
|
for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E; ) {
|
||||||
Instruction *Inst = BBI++;
|
Instruction *Inst = BBI++;
|
||||||
|
@ -1317,7 +1317,7 @@ bool AddressSanitizer::runOnFunction(Function &F) {
|
|||||||
if (Value *Addr =
|
if (Value *Addr =
|
||||||
isInterestingMemoryAccess(&Inst, &IsWrite, &Alignment)) {
|
isInterestingMemoryAccess(&Inst, &IsWrite, &Alignment)) {
|
||||||
if (ClOpt && ClOptSameTemp) {
|
if (ClOpt && ClOptSameTemp) {
|
||||||
if (!TempsToInstrument.insert(Addr))
|
if (!TempsToInstrument.insert(Addr).second)
|
||||||
continue; // We've seen this temp in the current BB.
|
continue; // We've seen this temp in the current BB.
|
||||||
}
|
}
|
||||||
} else if (ClInvalidPointerPairs &&
|
} else if (ClInvalidPointerPairs &&
|
||||||
|
@ -229,7 +229,7 @@ llvm::objcarc::FindDependencies(DependenceKind Flavor,
|
|||||||
// Add the predecessors to the worklist.
|
// Add the predecessors to the worklist.
|
||||||
do {
|
do {
|
||||||
BasicBlock *PredBB = *PI;
|
BasicBlock *PredBB = *PI;
|
||||||
if (Visited.insert(PredBB))
|
if (Visited.insert(PredBB).second)
|
||||||
Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
|
Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
|
||||||
} while (++PI != PE);
|
} while (++PI != PE);
|
||||||
break;
|
break;
|
||||||
|
@ -188,7 +188,7 @@ static inline bool AreAnyUnderlyingObjectsAnAlloca(const Value *V) {
|
|||||||
if (isa<AllocaInst>(P))
|
if (isa<AllocaInst>(P))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
if (!Visited.insert(P))
|
if (!Visited.insert(P).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (const SelectInst *SI = dyn_cast<const SelectInst>(P)) {
|
if (const SelectInst *SI = dyn_cast<const SelectInst>(P)) {
|
||||||
@ -412,7 +412,7 @@ bool RRInfo::Merge(const RRInfo &Other) {
|
|||||||
// that makes this a partial merge.
|
// that makes this a partial merge.
|
||||||
bool Partial = ReverseInsertPts.size() != Other.ReverseInsertPts.size();
|
bool Partial = ReverseInsertPts.size() != Other.ReverseInsertPts.size();
|
||||||
for (Instruction *Inst : Other.ReverseInsertPts)
|
for (Instruction *Inst : Other.ReverseInsertPts)
|
||||||
Partial |= ReverseInsertPts.insert(Inst);
|
Partial |= ReverseInsertPts.insert(Inst).second;
|
||||||
return Partial;
|
return Partial;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2194,7 +2194,7 @@ ComputePostOrders(Function &F,
|
|||||||
|
|
||||||
while (SuccStack.back().second != SE) {
|
while (SuccStack.back().second != SE) {
|
||||||
BasicBlock *SuccBB = *SuccStack.back().second++;
|
BasicBlock *SuccBB = *SuccStack.back().second++;
|
||||||
if (Visited.insert(SuccBB)) {
|
if (Visited.insert(SuccBB).second) {
|
||||||
TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
|
TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
|
||||||
SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
|
SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
|
||||||
BBStates[CurrBB].addSucc(SuccBB);
|
BBStates[CurrBB].addSucc(SuccBB);
|
||||||
@ -2235,7 +2235,7 @@ ComputePostOrders(Function &F,
|
|||||||
BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
|
BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
|
||||||
while (PredStack.back().second != PE) {
|
while (PredStack.back().second != PE) {
|
||||||
BasicBlock *BB = *PredStack.back().second++;
|
BasicBlock *BB = *PredStack.back().second++;
|
||||||
if (Visited.insert(BB)) {
|
if (Visited.insert(BB).second) {
|
||||||
PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
|
PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
|
||||||
goto reverse_dfs_next_succ;
|
goto reverse_dfs_next_succ;
|
||||||
}
|
}
|
||||||
@ -2390,7 +2390,7 @@ ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
|
|||||||
if (!NewRetainReleaseRRI.Calls.count(NewRetain))
|
if (!NewRetainReleaseRRI.Calls.count(NewRetain))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
|
if (ReleasesToMove.Calls.insert(NewRetainRelease).second) {
|
||||||
|
|
||||||
// If we overflow when we compute the path count, don't remove/move
|
// If we overflow when we compute the path count, don't remove/move
|
||||||
// anything.
|
// anything.
|
||||||
@ -2422,7 +2422,7 @@ ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
|
|||||||
// Collect the optimal insertion points.
|
// Collect the optimal insertion points.
|
||||||
if (!KnownSafe)
|
if (!KnownSafe)
|
||||||
for (Instruction *RIP : NewRetainReleaseRRI.ReverseInsertPts) {
|
for (Instruction *RIP : NewRetainReleaseRRI.ReverseInsertPts) {
|
||||||
if (ReleasesToMove.ReverseInsertPts.insert(RIP)) {
|
if (ReleasesToMove.ReverseInsertPts.insert(RIP).second) {
|
||||||
// If we overflow when we compute the path count, don't
|
// If we overflow when we compute the path count, don't
|
||||||
// remove/move anything.
|
// remove/move anything.
|
||||||
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
||||||
@ -2467,7 +2467,7 @@ ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
|
|||||||
if (!NewReleaseRetainRRI.Calls.count(NewRelease))
|
if (!NewReleaseRetainRRI.Calls.count(NewRelease))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
|
if (RetainsToMove.Calls.insert(NewReleaseRetain).second) {
|
||||||
// If we overflow when we compute the path count, don't remove/move
|
// If we overflow when we compute the path count, don't remove/move
|
||||||
// anything.
|
// anything.
|
||||||
const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
|
const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
|
||||||
@ -2483,7 +2483,7 @@ ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
|
|||||||
// Collect the optimal insertion points.
|
// Collect the optimal insertion points.
|
||||||
if (!KnownSafe)
|
if (!KnownSafe)
|
||||||
for (Instruction *RIP : NewReleaseRetainRRI.ReverseInsertPts) {
|
for (Instruction *RIP : NewReleaseRetainRRI.ReverseInsertPts) {
|
||||||
if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
|
if (RetainsToMove.ReverseInsertPts.insert(RIP).second) {
|
||||||
// If we overflow when we compute the path count, don't
|
// If we overflow when we compute the path count, don't
|
||||||
// remove/move anything.
|
// remove/move anything.
|
||||||
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
||||||
|
@ -62,7 +62,7 @@ bool ProvenanceAnalysis::relatedPHI(const PHINode *A,
|
|||||||
SmallPtrSet<const Value *, 4> UniqueSrc;
|
SmallPtrSet<const Value *, 4> UniqueSrc;
|
||||||
for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
|
for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
|
||||||
const Value *PV1 = A->getIncomingValue(i);
|
const Value *PV1 = A->getIncomingValue(i);
|
||||||
if (UniqueSrc.insert(PV1) && related(PV1, B))
|
if (UniqueSrc.insert(PV1).second && related(PV1, B))
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ static bool IsStoredObjCPointer(const Value *P) {
|
|||||||
if (isa<PtrToIntInst>(P))
|
if (isa<PtrToIntInst>(P))
|
||||||
// Assume the worst.
|
// Assume the worst.
|
||||||
return true;
|
return true;
|
||||||
if (Visited.insert(Ur))
|
if (Visited.insert(Ur).second)
|
||||||
Worklist.push_back(Ur);
|
Worklist.push_back(Ur);
|
||||||
}
|
}
|
||||||
} while (!Worklist.empty());
|
} while (!Worklist.empty());
|
||||||
|
@ -73,7 +73,7 @@ bool ADCE::runOnFunction(Function& F) {
|
|||||||
for (Instruction::op_iterator OI = curr->op_begin(), OE = curr->op_end();
|
for (Instruction::op_iterator OI = curr->op_begin(), OE = curr->op_end();
|
||||||
OI != OE; ++OI)
|
OI != OE; ++OI)
|
||||||
if (Instruction* Inst = dyn_cast<Instruction>(OI))
|
if (Instruction* Inst = dyn_cast<Instruction>(OI))
|
||||||
if (alive.insert(Inst))
|
if (alive.insert(Inst).second)
|
||||||
worklist.push_back(Inst);
|
worklist.push_back(Inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1102,7 +1102,7 @@ void WidenIV::pushNarrowIVUsers(Instruction *NarrowDef, Instruction *WideDef) {
|
|||||||
Instruction *NarrowUser = cast<Instruction>(U);
|
Instruction *NarrowUser = cast<Instruction>(U);
|
||||||
|
|
||||||
// Handle data flow merges and bizarre phi cycles.
|
// Handle data flow merges and bizarre phi cycles.
|
||||||
if (!Widened.insert(NarrowUser))
|
if (!Widened.insert(NarrowUser).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
NarrowIVUsers.push_back(NarrowIVDefUse(NarrowDef, NarrowUser, WideDef));
|
NarrowIVUsers.push_back(NarrowIVDefUse(NarrowDef, NarrowUser, WideDef));
|
||||||
@ -1284,7 +1284,7 @@ void IndVarSimplify::SimplifyAndExtend(Loop *L,
|
|||||||
static bool isHighCostExpansion(const SCEV *S, BranchInst *BI,
|
static bool isHighCostExpansion(const SCEV *S, BranchInst *BI,
|
||||||
SmallPtrSetImpl<const SCEV*> &Processed,
|
SmallPtrSetImpl<const SCEV*> &Processed,
|
||||||
ScalarEvolution *SE) {
|
ScalarEvolution *SE) {
|
||||||
if (!Processed.insert(S))
|
if (!Processed.insert(S).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// If the backedge-taken count is a UDiv, it's very likely a UDiv that
|
// If the backedge-taken count is a UDiv, it's very likely a UDiv that
|
||||||
@ -1475,7 +1475,7 @@ static bool hasConcreteDefImpl(Value *V, SmallPtrSetImpl<Value*> &Visited,
|
|||||||
|
|
||||||
// Optimistically handle other instructions.
|
// Optimistically handle other instructions.
|
||||||
for (User::op_iterator OI = I->op_begin(), E = I->op_end(); OI != E; ++OI) {
|
for (User::op_iterator OI = I->op_begin(), E = I->op_end(); OI != E; ++OI) {
|
||||||
if (!Visited.insert(*OI))
|
if (!Visited.insert(*OI).second)
|
||||||
continue;
|
continue;
|
||||||
if (!hasConcreteDefImpl(*OI, Visited, Depth+1))
|
if (!hasConcreteDefImpl(*OI, Visited, Depth+1))
|
||||||
return false;
|
return false;
|
||||||
|
@ -932,7 +932,7 @@ bool JumpThreading::SimplifyPartiallyRedundantLoad(LoadInst *LI) {
|
|||||||
BasicBlock *PredBB = *PI;
|
BasicBlock *PredBB = *PI;
|
||||||
|
|
||||||
// If we already scanned this predecessor, skip it.
|
// If we already scanned this predecessor, skip it.
|
||||||
if (!PredsScanned.insert(PredBB))
|
if (!PredsScanned.insert(PredBB).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Scan the predecessor to see if the value is available in the pred.
|
// Scan the predecessor to see if the value is available in the pred.
|
||||||
@ -1151,7 +1151,7 @@ bool JumpThreading::ProcessThreadableEdges(Value *Cond, BasicBlock *BB,
|
|||||||
|
|
||||||
for (unsigned i = 0, e = PredValues.size(); i != e; ++i) {
|
for (unsigned i = 0, e = PredValues.size(); i != e; ++i) {
|
||||||
BasicBlock *Pred = PredValues[i].second;
|
BasicBlock *Pred = PredValues[i].second;
|
||||||
if (!SeenPreds.insert(Pred))
|
if (!SeenPreds.insert(Pred).second)
|
||||||
continue; // Duplicate predecessor entry.
|
continue; // Duplicate predecessor entry.
|
||||||
|
|
||||||
// If the predecessor ends with an indirect goto, we can't change its
|
// If the predecessor ends with an indirect goto, we can't change its
|
||||||
|
@ -152,7 +152,7 @@ bool LoopInstSimplify::runOnLoop(Loop *L, LPPassManager &LPM) {
|
|||||||
for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE;
|
for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE;
|
||||||
++SI) {
|
++SI) {
|
||||||
BasicBlock *SuccBB = *SI;
|
BasicBlock *SuccBB = *SI;
|
||||||
if (!Visited.insert(SuccBB))
|
if (!Visited.insert(SuccBB).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
const Loop *SuccLoop = LI->getLoopFor(SuccBB);
|
const Loop *SuccLoop = LI->getLoopFor(SuccBB);
|
||||||
@ -165,7 +165,7 @@ bool LoopInstSimplify::runOnLoop(Loop *L, LPPassManager &LPM) {
|
|||||||
|
|
||||||
for (unsigned i = 0; i < SubLoopExitBlocks.size(); ++i) {
|
for (unsigned i = 0; i < SubLoopExitBlocks.size(); ++i) {
|
||||||
BasicBlock *ExitBB = SubLoopExitBlocks[i];
|
BasicBlock *ExitBB = SubLoopExitBlocks[i];
|
||||||
if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB))
|
if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB).second)
|
||||||
VisitStack.push_back(WorklistItem(ExitBB, false));
|
VisitStack.push_back(WorklistItem(ExitBB, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -762,7 +762,7 @@ static bool isHighCostExpansion(const SCEV *S,
|
|||||||
Processed, SE);
|
Processed, SE);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!Processed.insert(S))
|
if (!Processed.insert(S).second)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(S)) {
|
if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(S)) {
|
||||||
@ -975,7 +975,7 @@ void Cost::RatePrimaryRegister(const SCEV *Reg,
|
|||||||
Lose();
|
Lose();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (Regs.insert(Reg)) {
|
if (Regs.insert(Reg).second) {
|
||||||
RateRegister(Reg, Regs, L, SE, DT);
|
RateRegister(Reg, Regs, L, SE, DT);
|
||||||
if (LoserRegs && isLoser())
|
if (LoserRegs && isLoser())
|
||||||
LoserRegs->insert(Reg);
|
LoserRegs->insert(Reg);
|
||||||
@ -2802,7 +2802,7 @@ void LSRInstance::CollectChains() {
|
|||||||
User::op_iterator IVOpIter = findIVOperand(I->op_begin(), IVOpEnd, L, SE);
|
User::op_iterator IVOpIter = findIVOperand(I->op_begin(), IVOpEnd, L, SE);
|
||||||
while (IVOpIter != IVOpEnd) {
|
while (IVOpIter != IVOpEnd) {
|
||||||
Instruction *IVOpInst = cast<Instruction>(*IVOpIter);
|
Instruction *IVOpInst = cast<Instruction>(*IVOpIter);
|
||||||
if (UniqueOperands.insert(IVOpInst))
|
if (UniqueOperands.insert(IVOpInst).second)
|
||||||
ChainInstruction(I, IVOpInst, ChainUsersVec);
|
ChainInstruction(I, IVOpInst, ChainUsersVec);
|
||||||
IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
|
IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
|
||||||
}
|
}
|
||||||
@ -3122,7 +3122,7 @@ LSRInstance::CollectLoopInvariantFixupsAndFormulae() {
|
|||||||
const SCEV *S = Worklist.pop_back_val();
|
const SCEV *S = Worklist.pop_back_val();
|
||||||
|
|
||||||
// Don't process the same SCEV twice
|
// Don't process the same SCEV twice
|
||||||
if (!Visited.insert(S))
|
if (!Visited.insert(S).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (const SCEVNAryExpr *N = dyn_cast<SCEVNAryExpr>(S))
|
if (const SCEVNAryExpr *N = dyn_cast<SCEVNAryExpr>(S))
|
||||||
@ -3774,7 +3774,7 @@ void LSRInstance::GenerateCrossUseConstantOffsets() {
|
|||||||
for (int LUIdx = UsedByIndices.find_first(); LUIdx != -1;
|
for (int LUIdx = UsedByIndices.find_first(); LUIdx != -1;
|
||||||
LUIdx = UsedByIndices.find_next(LUIdx))
|
LUIdx = UsedByIndices.find_next(LUIdx))
|
||||||
// Make a memo of this use, offset, and register tuple.
|
// Make a memo of this use, offset, and register tuple.
|
||||||
if (UniqueItems.insert(std::make_pair(LUIdx, Imm)))
|
if (UniqueItems.insert(std::make_pair(LUIdx, Imm)).second)
|
||||||
WorkItems.push_back(WorkItem(LUIdx, Imm, OrigReg));
|
WorkItems.push_back(WorkItem(LUIdx, Imm, OrigReg));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,7 +623,7 @@ static bool LinearizeExprTree(BinaryOperator *I,
|
|||||||
// If this is a binary operation of the right kind with only one use then
|
// If this is a binary operation of the right kind with only one use then
|
||||||
// add its operands to the expression.
|
// add its operands to the expression.
|
||||||
if (BinaryOperator *BO = isReassociableOp(Op, Opcode)) {
|
if (BinaryOperator *BO = isReassociableOp(Op, Opcode)) {
|
||||||
assert(Visited.insert(Op) && "Not first visit!");
|
assert(Visited.insert(Op).second && "Not first visit!");
|
||||||
DEBUG(dbgs() << "DIRECT ADD: " << *Op << " (" << Weight << ")\n");
|
DEBUG(dbgs() << "DIRECT ADD: " << *Op << " (" << Weight << ")\n");
|
||||||
Worklist.push_back(std::make_pair(BO, Weight));
|
Worklist.push_back(std::make_pair(BO, Weight));
|
||||||
continue;
|
continue;
|
||||||
@ -633,7 +633,7 @@ static bool LinearizeExprTree(BinaryOperator *I,
|
|||||||
LeafMap::iterator It = Leaves.find(Op);
|
LeafMap::iterator It = Leaves.find(Op);
|
||||||
if (It == Leaves.end()) {
|
if (It == Leaves.end()) {
|
||||||
// Not in the leaf map. Must be the first time we saw this operand.
|
// Not in the leaf map. Must be the first time we saw this operand.
|
||||||
assert(Visited.insert(Op) && "Not first visit!");
|
assert(Visited.insert(Op).second && "Not first visit!");
|
||||||
if (!Op->hasOneUse()) {
|
if (!Op->hasOneUse()) {
|
||||||
// This value has uses not accounted for by the expression, so it is
|
// This value has uses not accounted for by the expression, so it is
|
||||||
// not safe to modify. Mark it as being a leaf.
|
// not safe to modify. Mark it as being a leaf.
|
||||||
@ -1609,7 +1609,7 @@ Value *Reassociate::OptimizeAdd(Instruction *I,
|
|||||||
SmallPtrSet<Value*, 8> Duplicates;
|
SmallPtrSet<Value*, 8> Duplicates;
|
||||||
for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
|
for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
|
||||||
Value *Factor = Factors[i];
|
Value *Factor = Factors[i];
|
||||||
if (!Duplicates.insert(Factor))
|
if (!Duplicates.insert(Factor).second)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
unsigned Occ = ++FactorOccurrences[Factor];
|
unsigned Occ = ++FactorOccurrences[Factor];
|
||||||
@ -1960,7 +1960,7 @@ void Reassociate::EraseInst(Instruction *I) {
|
|||||||
// and add that since that's where optimization actually happens.
|
// and add that since that's where optimization actually happens.
|
||||||
unsigned Opcode = Op->getOpcode();
|
unsigned Opcode = Op->getOpcode();
|
||||||
while (Op->hasOneUse() && Op->user_back()->getOpcode() == Opcode &&
|
while (Op->hasOneUse() && Op->user_back()->getOpcode() == Opcode &&
|
||||||
Visited.insert(Op))
|
Visited.insert(Op).second)
|
||||||
Op = Op->user_back();
|
Op = Op->user_back();
|
||||||
RedoInsts.insert(Op);
|
RedoInsts.insert(Op);
|
||||||
}
|
}
|
||||||
|
@ -214,7 +214,8 @@ public:
|
|||||||
///
|
///
|
||||||
/// This returns true if the block was not considered live before.
|
/// This returns true if the block was not considered live before.
|
||||||
bool MarkBlockExecutable(BasicBlock *BB) {
|
bool MarkBlockExecutable(BasicBlock *BB) {
|
||||||
if (!BBExecutable.insert(BB)) return false;
|
if (!BBExecutable.insert(BB).second)
|
||||||
|
return false;
|
||||||
DEBUG(dbgs() << "Marking Block Executable: " << BB->getName() << '\n');
|
DEBUG(dbgs() << "Marking Block Executable: " << BB->getName() << '\n');
|
||||||
BBWorkList.push_back(BB); // Add the block to the work list!
|
BBWorkList.push_back(BB); // Add the block to the work list!
|
||||||
return true;
|
return true;
|
||||||
|
@ -349,7 +349,7 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
void markAsDead(Instruction &I) {
|
void markAsDead(Instruction &I) {
|
||||||
if (VisitedDeadInsts.insert(&I))
|
if (VisitedDeadInsts.insert(&I).second)
|
||||||
AS.DeadUsers.push_back(&I);
|
AS.DeadUsers.push_back(&I);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -639,7 +639,7 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (User *U : I->users())
|
for (User *U : I->users())
|
||||||
if (Visited.insert(cast<Instruction>(U)))
|
if (Visited.insert(cast<Instruction>(U)).second)
|
||||||
Uses.push_back(std::make_pair(I, cast<Instruction>(U)));
|
Uses.push_back(std::make_pair(I, cast<Instruction>(U)));
|
||||||
} while (!Uses.empty());
|
} while (!Uses.empty());
|
||||||
|
|
||||||
@ -848,7 +848,7 @@ public:
|
|||||||
else
|
else
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
} while (Visited.insert(Ptr));
|
} while (Visited.insert(Ptr).second);
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -1461,7 +1461,7 @@ static Value *getAdjustedPtr(IRBuilderTy &IRB, const DataLayout &DL, Value *Ptr,
|
|||||||
break;
|
break;
|
||||||
Offset += GEPOffset;
|
Offset += GEPOffset;
|
||||||
Ptr = GEP->getPointerOperand();
|
Ptr = GEP->getPointerOperand();
|
||||||
if (!Visited.insert(Ptr))
|
if (!Visited.insert(Ptr).second)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1498,7 +1498,7 @@ static Value *getAdjustedPtr(IRBuilderTy &IRB, const DataLayout &DL, Value *Ptr,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
assert(Ptr->getType()->isPointerTy() && "Unexpected operand type!");
|
assert(Ptr->getType()->isPointerTy() && "Unexpected operand type!");
|
||||||
} while (Visited.insert(Ptr));
|
} while (Visited.insert(Ptr).second);
|
||||||
|
|
||||||
if (!OffsetPtr) {
|
if (!OffsetPtr) {
|
||||||
if (!Int8Ptr) {
|
if (!Int8Ptr) {
|
||||||
@ -2861,7 +2861,7 @@ private:
|
|||||||
/// This uses a set to de-duplicate users.
|
/// This uses a set to de-duplicate users.
|
||||||
void enqueueUsers(Instruction &I) {
|
void enqueueUsers(Instruction &I) {
|
||||||
for (Use &U : I.uses())
|
for (Use &U : I.uses())
|
||||||
if (Visited.insert(U.getUser()))
|
if (Visited.insert(U.getUser()).second)
|
||||||
Queue.push_back(&U);
|
Queue.push_back(&U);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3588,7 +3588,7 @@ static void enqueueUsersInWorklist(Instruction &I,
|
|||||||
SmallVectorImpl<Instruction *> &Worklist,
|
SmallVectorImpl<Instruction *> &Worklist,
|
||||||
SmallPtrSetImpl<Instruction *> &Visited) {
|
SmallPtrSetImpl<Instruction *> &Visited) {
|
||||||
for (User *U : I.users())
|
for (User *U : I.users())
|
||||||
if (Visited.insert(cast<Instruction>(U)))
|
if (Visited.insert(cast<Instruction>(U)).second)
|
||||||
Worklist.push_back(cast<Instruction>(U));
|
Worklist.push_back(cast<Instruction>(U));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -305,7 +305,7 @@ void SampleProfileLoader::findEquivalencesFor(
|
|||||||
for (auto *BB2 : Descendants) {
|
for (auto *BB2 : Descendants) {
|
||||||
bool IsDomParent = DomTree->dominates(BB2, BB1);
|
bool IsDomParent = DomTree->dominates(BB2, BB1);
|
||||||
bool IsInSameLoop = LI->getLoopFor(BB1) == LI->getLoopFor(BB2);
|
bool IsInSameLoop = LI->getLoopFor(BB1) == LI->getLoopFor(BB2);
|
||||||
if (BB1 != BB2 && VisitedBlocks.insert(BB2) && IsDomParent &&
|
if (BB1 != BB2 && VisitedBlocks.insert(BB2).second && IsDomParent &&
|
||||||
IsInSameLoop) {
|
IsInSameLoop) {
|
||||||
EquivalenceClass[BB2] = BB1;
|
EquivalenceClass[BB2] = BB1;
|
||||||
|
|
||||||
@ -494,7 +494,7 @@ bool SampleProfileLoader::propagateThroughEdges(Function &F) {
|
|||||||
<< " known. Set weight for block: ";
|
<< " known. Set weight for block: ";
|
||||||
printBlockWeight(dbgs(), BB););
|
printBlockWeight(dbgs(), BB););
|
||||||
}
|
}
|
||||||
if (VisitedBlocks.insert(BB))
|
if (VisitedBlocks.insert(BB).second)
|
||||||
Changed = true;
|
Changed = true;
|
||||||
} else if (NumUnknownEdges == 1 && VisitedBlocks.count(BB)) {
|
} else if (NumUnknownEdges == 1 && VisitedBlocks.count(BB)) {
|
||||||
// If there is a single unknown edge and the block has been
|
// If there is a single unknown edge and the block has been
|
||||||
@ -540,7 +540,7 @@ void SampleProfileLoader::buildEdges(Function &F) {
|
|||||||
llvm_unreachable("Found a stale predecessors list in a basic block.");
|
llvm_unreachable("Found a stale predecessors list in a basic block.");
|
||||||
for (pred_iterator PI = pred_begin(B1), PE = pred_end(B1); PI != PE; ++PI) {
|
for (pred_iterator PI = pred_begin(B1), PE = pred_end(B1); PI != PE; ++PI) {
|
||||||
BasicBlock *B2 = *PI;
|
BasicBlock *B2 = *PI;
|
||||||
if (Visited.insert(B2))
|
if (Visited.insert(B2).second)
|
||||||
Predecessors[B1].push_back(B2);
|
Predecessors[B1].push_back(B2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -550,7 +550,7 @@ void SampleProfileLoader::buildEdges(Function &F) {
|
|||||||
llvm_unreachable("Found a stale successors list in a basic block.");
|
llvm_unreachable("Found a stale successors list in a basic block.");
|
||||||
for (succ_iterator SI = succ_begin(B1), SE = succ_end(B1); SI != SE; ++SI) {
|
for (succ_iterator SI = succ_begin(B1), SE = succ_end(B1); SI != SE; ++SI) {
|
||||||
BasicBlock *B2 = *SI;
|
BasicBlock *B2 = *SI;
|
||||||
if (Visited.insert(B2))
|
if (Visited.insert(B2).second)
|
||||||
Successors[B1].push_back(B2);
|
Successors[B1].push_back(B2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1669,7 +1669,7 @@ void SROA::isSafePHISelectUseForScalarRepl(Instruction *I, uint64_t Offset,
|
|||||||
AllocaInfo &Info) {
|
AllocaInfo &Info) {
|
||||||
// If we've already checked this PHI, don't do it again.
|
// If we've already checked this PHI, don't do it again.
|
||||||
if (PHINode *PN = dyn_cast<PHINode>(I))
|
if (PHINode *PN = dyn_cast<PHINode>(I))
|
||||||
if (!Info.CheckedPHIs.insert(PN))
|
if (!Info.CheckedPHIs.insert(PN).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
for (User *U : I->users()) {
|
for (User *U : I->users()) {
|
||||||
|
@ -179,7 +179,7 @@ struct AllocaDerivedValueTracker {
|
|||||||
|
|
||||||
auto AddUsesToWorklist = [&](Value *V) {
|
auto AddUsesToWorklist = [&](Value *V) {
|
||||||
for (auto &U : V->uses()) {
|
for (auto &U : V->uses()) {
|
||||||
if (!Visited.insert(&U))
|
if (!Visited.insert(&U).second)
|
||||||
continue;
|
continue;
|
||||||
Worklist.push_back(&U);
|
Worklist.push_back(&U);
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ static bool analyzeGlobalAux(const Value *V, GlobalStatus &GS,
|
|||||||
} else if (const PHINode *PN = dyn_cast<PHINode>(I)) {
|
} else if (const PHINode *PN = dyn_cast<PHINode>(I)) {
|
||||||
// PHI nodes we can check just like select or GEP instructions, but we
|
// PHI nodes we can check just like select or GEP instructions, but we
|
||||||
// have to be careful about infinite recursion.
|
// have to be careful about infinite recursion.
|
||||||
if (PhiUsers.insert(PN)) // Not already visited.
|
if (PhiUsers.insert(PN).second) // Not already visited.
|
||||||
if (analyzeGlobalAux(I, GS, PhiUsers))
|
if (analyzeGlobalAux(I, GS, PhiUsers))
|
||||||
return true;
|
return true;
|
||||||
} else if (isa<CmpInst>(I)) {
|
} else if (isa<CmpInst>(I)) {
|
||||||
|
@ -392,7 +392,7 @@ bool llvm::RecursivelyDeleteDeadPHINode(PHINode *PN,
|
|||||||
|
|
||||||
// If we find an instruction more than once, we're on a cycle that
|
// If we find an instruction more than once, we're on a cycle that
|
||||||
// won't prove fruitful.
|
// won't prove fruitful.
|
||||||
if (!Visited.insert(I)) {
|
if (!Visited.insert(I).second) {
|
||||||
// Break the cycle and delete the instruction and its operands.
|
// Break the cycle and delete the instruction and its operands.
|
||||||
I->replaceAllUsesWith(UndefValue::get(I->getType()));
|
I->replaceAllUsesWith(UndefValue::get(I->getType()));
|
||||||
(void)RecursivelyDeleteTriviallyDeadInstructions(I, TLI);
|
(void)RecursivelyDeleteTriviallyDeadInstructions(I, TLI);
|
||||||
@ -1266,7 +1266,7 @@ static bool markAliveBlocks(BasicBlock *BB,
|
|||||||
|
|
||||||
Changed |= ConstantFoldTerminator(BB, true);
|
Changed |= ConstantFoldTerminator(BB, true);
|
||||||
for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE; ++SI)
|
for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE; ++SI)
|
||||||
if (Reachable.insert(*SI))
|
if (Reachable.insert(*SI).second)
|
||||||
Worklist.push_back(*SI);
|
Worklist.push_back(*SI);
|
||||||
} while (!Worklist.empty());
|
} while (!Worklist.empty());
|
||||||
return Changed;
|
return Changed;
|
||||||
|
@ -112,7 +112,7 @@ FoldBlockIntoPredecessor(BasicBlock *BB, LoopInfo* LI, LPPassManager *LPM,
|
|||||||
if (LPM) {
|
if (LPM) {
|
||||||
if (ScalarEvolution *SE = LPM->getAnalysisIfAvailable<ScalarEvolution>()) {
|
if (ScalarEvolution *SE = LPM->getAnalysisIfAvailable<ScalarEvolution>()) {
|
||||||
if (Loop *L = LI->getLoopFor(BB)) {
|
if (Loop *L = LI->getLoopFor(BB)) {
|
||||||
if (ForgottenLoops.insert(L))
|
if (ForgottenLoops.insert(L).second)
|
||||||
SE->forgetLoop(L);
|
SE->forgetLoop(L);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user