Always skip ptr-to-ptr bitcasts when counting,

per Chris' suggestion.  Slightly faster.



git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@65999 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Dale Johannesen 2009-03-04 01:53:05 +00:00
parent 599a6a88ce
commit cdb16aa5ab
2 changed files with 10 additions and 7 deletions

View File

@ -11484,12 +11484,12 @@ Instruction *InstCombiner::visitStoreInst(StoreInst &SI) {
for (unsigned ScanInsts = 6; BBI != SI.getParent()->begin() && ScanInsts; for (unsigned ScanInsts = 6; BBI != SI.getParent()->begin() && ScanInsts;
--ScanInsts) { --ScanInsts) {
--BBI; --BBI;
// Don't count debug info directives, lest they affect codegen. // Don't count debug info directives, lest they affect codegen,
// Likewise, we skip bitcasts that feed into a llvm.dbg.declare; these are // and we skip pointer-to-pointer bitcasts, which are NOPs.
// not present when debugging is off. // It is necessary for correctness to skip those that feed into a
// llvm.dbg.declare, as these are not present when debugging is off.
if (isa<DbgInfoIntrinsic>(BBI) || if (isa<DbgInfoIntrinsic>(BBI) ||
(isa<BitCastInst>(BBI) && BBI->hasOneUse() && (isa<BitCastInst>(BBI) && isa<PointerType>(BBI->getType()))) {
isa<DbgDeclareInst>(BBI->use_begin()))) {
ScanInsts++; ScanInsts++;
continue; continue;
} }

View File

@ -235,7 +235,7 @@ bool llvm::InlineFunction(CallSite CS, CallGraph *CG, const TargetData *TD) {
// function. // function.
std::vector<ReturnInst*> Returns; std::vector<ReturnInst*> Returns;
ClonedCodeInfo InlinedFunctionInfo; ClonedCodeInfo InlinedFunctionInfo;
Function::iterator FirstNewBlock; Function::iterator FirstNewBlock, LastNewBlock;
{ // Scope to destroy ValueMap after cloning. { // Scope to destroy ValueMap after cloning.
DenseMap<const Value*, Value*> ValueMap; DenseMap<const Value*, Value*> ValueMap;
@ -312,6 +312,7 @@ bool llvm::InlineFunction(CallSite CS, CallGraph *CG, const TargetData *TD) {
// Remember the first block that is newly cloned over. // Remember the first block that is newly cloned over.
FirstNewBlock = LastBlock; ++FirstNewBlock; FirstNewBlock = LastBlock; ++FirstNewBlock;
LastNewBlock = &Caller->back();
// Update the callgraph if requested. // Update the callgraph if requested.
if (CG) if (CG)
@ -537,7 +538,9 @@ bool llvm::InlineFunction(CallSite CS, CallGraph *CG, const TargetData *TD) {
// Add a branch to the merge points and remove return instructions. // Add a branch to the merge points and remove return instructions.
for (unsigned i = 0, e = Returns.size(); i != e; ++i) { for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
ReturnInst *RI = Returns[i]; ReturnInst *RI = Returns[i];
BranchInst::Create(AfterCallBB, RI); // A return in the last block in the function falls through.
// if (isa<InvokeInst>(TheCall) || RI->getParent() != LastNewBlock)
BranchInst::Create(AfterCallBB, RI);
RI->eraseFromParent(); RI->eraseFromParent();
} }
} else if (!Returns.empty()) { } else if (!Returns.empty()) {