diff --git a/lib/CodeGen/InlineSpiller.cpp b/lib/CodeGen/InlineSpiller.cpp index 462ca6dbc94..443b2d077ab 100644 --- a/lib/CodeGen/InlineSpiller.cpp +++ b/lib/CodeGen/InlineSpiller.cpp @@ -85,7 +85,8 @@ private: bool coalesceStackAccess(MachineInstr *MI); bool foldMemoryOperand(MachineBasicBlock::iterator MI, - const SmallVectorImpl &Ops); + const SmallVectorImpl &Ops, + MachineInstr *LoadMI = 0); void insertReload(LiveInterval &NewLI, MachineBasicBlock::iterator MI); void insertSpill(LiveInterval &NewLI, MachineBasicBlock::iterator MI); }; @@ -141,6 +142,14 @@ bool InlineSpiller::reMaterializeFor(MachineBasicBlock::iterator MI) { } } + // Before rematerializing into a register for a single instruction, try to + // fold a load into the instruction. That avoids allocating a new register. + if (RM.OrigMI->getDesc().canFoldAsLoad() && + foldMemoryOperand(MI, Ops, RM.OrigMI)) { + edit_->markRematerialized(RM.ParentVNI); + return true; + } + // Alocate a new register for the remat. LiveInterval &NewLI = edit_->create(mri_, lis_, vrm_); NewLI.markNotSpillable(); @@ -243,9 +252,13 @@ bool InlineSpiller::coalesceStackAccess(MachineInstr *MI) { } /// foldMemoryOperand - Try folding stack slot references in Ops into MI. -/// Return true on success, and MI will be erased. +/// @param MI Instruction using or defining the current register. +/// @param Ops Operandices from readsWritesVirtualRegister(). +/// @param LoadMI Load instruction to use instead of stack slot when non-null. +/// @return True on success, and MI will be erased. bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI, - const SmallVectorImpl &Ops) { + const SmallVectorImpl &Ops, + MachineInstr *LoadMI) { // TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied // operands. SmallVector FoldOps; @@ -262,11 +275,14 @@ bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI, FoldOps.push_back(Idx); } - MachineInstr *FoldMI = tii_.foldMemoryOperand(MI, FoldOps, stackSlot_); + MachineInstr *FoldMI = + LoadMI ? tii_.foldMemoryOperand(MI, FoldOps, LoadMI) + : tii_.foldMemoryOperand(MI, FoldOps, stackSlot_); if (!FoldMI) return false; lis_.ReplaceMachineInstrInMaps(MI, FoldMI); - vrm_.addSpillSlotUse(stackSlot_, FoldMI); + if (!LoadMI) + vrm_.addSpillSlotUse(stackSlot_, FoldMI); MI->eraseFromParent(); DEBUG(dbgs() << "\tfolded: " << *FoldMI); return true; diff --git a/lib/CodeGen/LiveRangeEdit.h b/lib/CodeGen/LiveRangeEdit.h index ad248bf4002..37b58279b1b 100644 --- a/lib/CodeGen/LiveRangeEdit.h +++ b/lib/CodeGen/LiveRangeEdit.h @@ -117,6 +117,12 @@ public: const TargetInstrInfo&, const TargetRegisterInfo&); + /// markRematerialized - explicitly mark a value as rematerialized after doing + /// it manually. + void markRematerialized(VNInfo *ParentVNI) { + rematted_.insert(ParentVNI); + } + /// didRematerialize - Return true if ParentVNI was rematerialized anywhere. bool didRematerialize(VNInfo *ParentVNI) const { return rematted_.count(ParentVNI);