Replace std::copy with a back inserter with vector append where feasible

All of the cases were just appending from random access iterators to a
vector. Using insert/append can grow the vector to the perfect size
directly and moves the growing out of the loop. No intended functionalty
change.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@230845 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Benjamin Kramer 2015-02-28 10:11:12 +00:00
parent 12d1e53db8
commit 31fbd9f7b0
8 changed files with 15 additions and 21 deletions

View File

@ -78,7 +78,7 @@ inline void addNodeToInterval(Interval *Int, BasicBlock *BB) {
//
inline void addNodeToInterval(Interval *Int, Interval *I) {
// Add all of the nodes in I as new nodes in Int.
copy(I->Nodes.begin(), I->Nodes.end(), back_inserter(Int->Nodes));
Int->Nodes.insert(Int->Nodes.end(), I->Nodes.begin(), I->Nodes.end());
}

View File

@ -312,8 +312,7 @@ static const Value *getNoopInput(const Value *V,
// previous aggregate. Combine the two paths to obtain the true address of
// our element.
ArrayRef<unsigned> ExtractLoc = EVI->getIndices();
std::copy(ExtractLoc.rbegin(), ExtractLoc.rend(),
std::back_inserter(ValLoc));
ValLoc.append(ExtractLoc.rbegin(), ExtractLoc.rend());
NoopInput = Op;
}
// Terminate if we couldn't find anything to look through.
@ -601,10 +600,8 @@ bool llvm::returnTypeIsEligibleForTailCall(const Function *F,
// The manipulations performed when we're looking through an insertvalue or
// an extractvalue would happen at the front of the RetPath list, so since
// we have to copy it anyway it's more efficient to create a reversed copy.
using std::copy;
SmallVector<unsigned, 4> TmpRetPath, TmpCallPath;
copy(RetPath.rbegin(), RetPath.rend(), std::back_inserter(TmpRetPath));
copy(CallPath.rbegin(), CallPath.rend(), std::back_inserter(TmpCallPath));
SmallVector<unsigned, 4> TmpRetPath(RetPath.rbegin(), RetPath.rend());
SmallVector<unsigned, 4> TmpCallPath(CallPath.rbegin(), CallPath.rend());
// Finally, we can check whether the value produced by the tail call at this
// index is compatible with the value we return.

View File

@ -1555,7 +1555,7 @@ void IfConverter::PredicateBlock(BBInfo &BBI,
UpdatePredRedefs(I, Redefs);
}
std::copy(Cond.begin(), Cond.end(), std::back_inserter(BBI.Predicate));
BBI.Predicate.append(Cond.begin(), Cond.end());
BBI.IsAnalyzed = false;
BBI.NonPredSize = 0;
@ -1620,9 +1620,8 @@ void IfConverter::CopyAndPredicateBlock(BBInfo &ToBBI, BBInfo &FromBBI,
}
}
std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
std::back_inserter(ToBBI.Predicate));
std::copy(Cond.begin(), Cond.end(), std::back_inserter(ToBBI.Predicate));
ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
ToBBI.Predicate.append(Cond.begin(), Cond.end());
ToBBI.ClobbersPred |= FromBBI.ClobbersPred;
ToBBI.IsAnalyzed = false;
@ -1661,8 +1660,7 @@ void IfConverter::MergeBlocks(BBInfo &ToBBI, BBInfo &FromBBI, bool AddEdges) {
if (NBB && !FromBBI.BB->isSuccessor(NBB))
FromBBI.BB->addSuccessor(NBB);
std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
std::back_inserter(ToBBI.Predicate));
ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
FromBBI.Predicate.clear();
ToBBI.NonPredSize += FromBBI.NonPredSize;

View File

@ -411,8 +411,7 @@ optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB,
if (ExtendLife && !ExtendedUses.empty())
// Extend the liveness of the extension result.
std::copy(ExtendedUses.begin(), ExtendedUses.end(),
std::back_inserter(Uses));
Uses.append(ExtendedUses.begin(), ExtendedUses.end());
// Now replace all uses.
bool Changed = false;

View File

@ -67,8 +67,7 @@ void DWARFDebugLoc::parse(DataExtractor data, unsigned AddressSize) {
// A single location description describing the location of the object...
StringRef str = data.getData().substr(Offset, Bytes);
Offset += Bytes;
E.Loc.reserve(str.size());
std::copy(str.begin(), str.end(), std::back_inserter(E.Loc));
E.Loc.append(str.begin(), str.end());
Loc.Entries.push_back(std::move(E));
}
}

View File

@ -5559,7 +5559,7 @@ X86InstrInfo::unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N,
}
if (Load)
BeforeOps.push_back(SDValue(Load, 0));
std::copy(AfterOps.begin(), AfterOps.end(), std::back_inserter(BeforeOps));
BeforeOps.insert(BeforeOps.end(), AfterOps.begin(), AfterOps.end());
SDNode *NewNode= DAG.getMachineNode(Opc, dl, VTs, BeforeOps);
NewNodes.push_back(NewNode);

View File

@ -247,7 +247,7 @@ public:
/// hold.
void insert(ArrayRef<Slice> NewSlices) {
int OldSize = Slices.size();
std::move(NewSlices.begin(), NewSlices.end(), std::back_inserter(Slices));
Slices.append(NewSlices.begin(), NewSlices.end());
auto SliceI = Slices.begin() + OldSize;
std::sort(SliceI, Slices.end());
std::inplace_merge(Slices.begin(), SliceI, Slices.end());

View File

@ -1447,8 +1447,9 @@ void AsmMatcherInfo::buildInfo() {
II->buildAliasResultOperands();
}
if (!NewMatchables.empty())
std::move(NewMatchables.begin(), NewMatchables.end(),
std::back_inserter(Matchables));
Matchables.insert(Matchables.end(),
std::make_move_iterator(NewMatchables.begin()),
std::make_move_iterator(NewMatchables.end()));
// Process token alias definitions and set up the associated superclass
// information.