diff --git a/include/llvm/CodeGen/MachineBasicBlock.h b/include/llvm/CodeGen/MachineBasicBlock.h index 09cca93e2ff..46a82884efa 100644 --- a/include/llvm/CodeGen/MachineBasicBlock.h +++ b/include/llvm/CodeGen/MachineBasicBlock.h @@ -102,6 +102,8 @@ public: MachineInstr& front() { return Insts.front(); } MachineInstr& back() { return Insts.back(); } + const MachineInstr& front() const { return Insts.front(); } + const MachineInstr& back() const { return Insts.back(); } iterator begin() { return Insts.begin(); } const_iterator begin() const { return Insts.begin(); } diff --git a/include/llvm/Target/TargetInstrInfo.h b/include/llvm/Target/TargetInstrInfo.h index 510da1adfaf..a357574112d 100644 --- a/include/llvm/Target/TargetInstrInfo.h +++ b/include/llvm/Target/TargetInstrInfo.h @@ -288,7 +288,7 @@ public: /// stream. virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const { return 0; } @@ -298,7 +298,7 @@ public: /// stack slot. virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } @@ -306,8 +306,8 @@ public: /// canFoldMemoryOperand - Returns true if the specified load / store is /// folding is possible. virtual - bool canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const{ + bool canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const { return false; } @@ -338,7 +338,7 @@ public: /// fall-through into its successor block. This is primarily used when a /// branch is unanalyzable. It is useful for things like unconditional /// indirect branches (jump tables). - virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const { + virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { return false; } diff --git a/lib/Target/ARM/ARMInstrInfo.cpp b/lib/Target/ARM/ARMInstrInfo.cpp index e1f44bd7d0f..e03977b12b2 100644 --- a/lib/Target/ARM/ARMInstrInfo.cpp +++ b/lib/Target/ARM/ARMInstrInfo.cpp @@ -663,7 +663,7 @@ bool ARMInstrInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB, MachineInstr *ARMInstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FI) const { if (Ops.size() != 1) return NULL; @@ -747,8 +747,8 @@ MachineInstr *ARMInstrInfo::foldMemoryOperand(MachineFunction &MF, return NewMI; } -bool ARMInstrInfo::canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const { +bool ARMInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const { if (Ops.size() != 1) return false; unsigned OpNum = Ops[0]; @@ -780,7 +780,7 @@ bool ARMInstrInfo::canFoldMemoryOperand(MachineInstr *MI, return false; } -bool ARMInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const { +bool ARMInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { if (MBB.empty()) return false; switch (MBB.back().getOpcode()) { diff --git a/lib/Target/ARM/ARMInstrInfo.h b/lib/Target/ARM/ARMInstrInfo.h index cd19ae15bf6..c59b03c0d17 100644 --- a/lib/Target/ARM/ARMInstrInfo.h +++ b/lib/Target/ARM/ARMInstrInfo.h @@ -198,20 +198,20 @@ public: virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } - virtual bool canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const; + virtual bool canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const; - virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const; + virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const; virtual bool ReverseBranchCondition(SmallVectorImpl &Cond) const; diff --git a/lib/Target/Alpha/AlphaInstrInfo.cpp b/lib/Target/Alpha/AlphaInstrInfo.cpp index 7f3b32f13b1..b365aba7413 100644 --- a/lib/Target/Alpha/AlphaInstrInfo.cpp +++ b/lib/Target/Alpha/AlphaInstrInfo.cpp @@ -255,7 +255,7 @@ void AlphaInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, MachineInstr *AlphaInstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const { if (Ops.size() != 1) return NULL; @@ -408,7 +408,7 @@ void AlphaInstrInfo::insertNoop(MachineBasicBlock &MBB, .addReg(Alpha::R31); } -bool AlphaInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const { +bool AlphaInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { if (MBB.empty()) return false; switch (MBB.back().getOpcode()) { diff --git a/lib/Target/Alpha/AlphaInstrInfo.h b/lib/Target/Alpha/AlphaInstrInfo.h index 9aa5ecd74c7..abee7222b97 100644 --- a/lib/Target/Alpha/AlphaInstrInfo.h +++ b/lib/Target/Alpha/AlphaInstrInfo.h @@ -69,12 +69,12 @@ public: virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } @@ -85,7 +85,7 @@ public: unsigned RemoveBranch(MachineBasicBlock &MBB) const; void insertNoop(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI) const; - bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const; + bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const; bool ReverseBranchCondition(SmallVectorImpl &Cond) const; }; diff --git a/lib/Target/CellSPU/SPUInstrInfo.cpp b/lib/Target/CellSPU/SPUInstrInfo.cpp index cc562eba76c..29facb95cf0 100644 --- a/lib/Target/CellSPU/SPUInstrInfo.cpp +++ b/lib/Target/CellSPU/SPUInstrInfo.cpp @@ -399,7 +399,7 @@ void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, MachineInstr * SPUInstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const { #if SOMEDAY_SCOTT_LOOKS_AT_ME_AGAIN diff --git a/lib/Target/CellSPU/SPUInstrInfo.h b/lib/Target/CellSPU/SPUInstrInfo.h index 5f3aaaadf5d..9c8bcaf7f7b 100644 --- a/lib/Target/CellSPU/SPUInstrInfo.h +++ b/lib/Target/CellSPU/SPUInstrInfo.h @@ -79,13 +79,13 @@ namespace llvm { //! Fold spills into load/store instructions virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; //! Fold any load/store to an operand virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } diff --git a/lib/Target/Mips/MipsInstrInfo.cpp b/lib/Target/Mips/MipsInstrInfo.cpp index 3e6ce535eea..146c5ca5f3f 100644 --- a/lib/Target/Mips/MipsInstrInfo.cpp +++ b/lib/Target/Mips/MipsInstrInfo.cpp @@ -281,7 +281,7 @@ void MipsInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, MachineInstr *MipsInstrInfo:: foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, int FI) const + const SmallVectorImpl &Ops, int FI) const { if (Ops.size() != 1) return NULL; @@ -602,7 +602,7 @@ RemoveBranch(MachineBasicBlock &MBB) const /// BlockHasNoFallThrough - Analyze if MachineBasicBlock does not /// fall-through into its successor block. bool MipsInstrInfo:: -BlockHasNoFallThrough(MachineBasicBlock &MBB) const +BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { if (MBB.empty()) return false; diff --git a/lib/Target/Mips/MipsInstrInfo.h b/lib/Target/Mips/MipsInstrInfo.h index 7615c715557..31e9cfaf7cb 100644 --- a/lib/Target/Mips/MipsInstrInfo.h +++ b/lib/Target/Mips/MipsInstrInfo.h @@ -196,17 +196,17 @@ public: virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } - virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const; + virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const; virtual bool ReverseBranchCondition(SmallVectorImpl &Cond) const; diff --git a/lib/Target/PowerPC/PPCInstrInfo.cpp b/lib/Target/PowerPC/PPCInstrInfo.cpp index ca65febebaa..c2756685a40 100644 --- a/lib/Target/PowerPC/PPCInstrInfo.cpp +++ b/lib/Target/PowerPC/PPCInstrInfo.cpp @@ -657,7 +657,7 @@ void PPCInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, /// copy instructions, turning them into load/store instructions. MachineInstr *PPCInstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const { if (Ops.size() != 1) return NULL; @@ -730,8 +730,8 @@ MachineInstr *PPCInstrInfo::foldMemoryOperand(MachineFunction &MF, return NewMI; } -bool PPCInstrInfo::canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const { +bool PPCInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const { if (Ops.size() != 1) return false; // Make sure this is a reg-reg copy. Note that we can't handle MCRF, because @@ -751,7 +751,7 @@ bool PPCInstrInfo::canFoldMemoryOperand(MachineInstr *MI, } -bool PPCInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const { +bool PPCInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { if (MBB.empty()) return false; switch (MBB.back().getOpcode()) { diff --git a/lib/Target/PowerPC/PPCInstrInfo.h b/lib/Target/PowerPC/PPCInstrInfo.h index cb5a0e6c1f6..2e950a397cf 100644 --- a/lib/Target/PowerPC/PPCInstrInfo.h +++ b/lib/Target/PowerPC/PPCInstrInfo.h @@ -142,20 +142,20 @@ public: /// copy instructions, turning them into load/store instructions. virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } - virtual bool canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const; + virtual bool canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const; - virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const; + virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const; virtual bool ReverseBranchCondition(SmallVectorImpl &Cond) const; diff --git a/lib/Target/Sparc/SparcInstrInfo.cpp b/lib/Target/Sparc/SparcInstrInfo.cpp index 0d7370b9bb5..47c0672cedb 100644 --- a/lib/Target/Sparc/SparcInstrInfo.cpp +++ b/lib/Target/Sparc/SparcInstrInfo.cpp @@ -227,7 +227,7 @@ void SparcInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, MachineInstr *SparcInstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FI) const { if (Ops.size() != 1) return NULL; diff --git a/lib/Target/Sparc/SparcInstrInfo.h b/lib/Target/Sparc/SparcInstrInfo.h index aadbefd9db0..7c633169adb 100644 --- a/lib/Target/Sparc/SparcInstrInfo.h +++ b/lib/Target/Sparc/SparcInstrInfo.h @@ -96,12 +96,12 @@ public: virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const { return 0; } diff --git a/lib/Target/X86/X86InstrInfo.cpp b/lib/Target/X86/X86InstrInfo.cpp index d2e2cf1efc0..e105b0f3cd8 100644 --- a/lib/Target/X86/X86InstrInfo.cpp +++ b/lib/Target/X86/X86InstrInfo.cpp @@ -1543,7 +1543,7 @@ unsigned X86InstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { } static const MachineInstrBuilder &X86InstrAddOperand(MachineInstrBuilder &MIB, - MachineOperand &MO) { + const MachineOperand &MO) { if (MO.isReg()) MIB = MIB.addReg(MO.getReg(), MO.isDef(), MO.isImplicit(), MO.isKill(), MO.isDead(), MO.getSubReg()); @@ -1872,7 +1872,7 @@ bool X86InstrInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB, } static MachineInstr *FuseTwoAddrInst(MachineFunction &MF, unsigned Opcode, - SmallVector &MOs, + const SmallVector &MOs, MachineInstr *MI, const TargetInstrInfo &TII) { // Create the base instruction with the memory operand as the first part. MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), true); @@ -1898,7 +1898,7 @@ static MachineInstr *FuseTwoAddrInst(MachineFunction &MF, unsigned Opcode, static MachineInstr *FuseInst(MachineFunction &MF, unsigned Opcode, unsigned OpNo, - SmallVector &MOs, + const SmallVector &MOs, MachineInstr *MI, const TargetInstrInfo &TII) { MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), true); MachineInstrBuilder MIB(NewMI); @@ -1920,7 +1920,7 @@ static MachineInstr *FuseInst(MachineFunction &MF, } static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode, - SmallVector &MOs, + const SmallVector &MOs, MachineInstr *MI) { MachineFunction &MF = *MI->getParent()->getParent(); MachineInstrBuilder MIB = BuildMI(MF, TII.get(Opcode)); @@ -1936,7 +1936,7 @@ static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode, MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, unsigned i, - SmallVector &MOs) const { + const SmallVector &MOs) const{ const DenseMap *OpcodeTablePtr = NULL; bool isTwoAddrFold = false; unsigned NumOps = MI->getDesc().getNumOperands(); @@ -1995,7 +1995,7 @@ X86InstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const { // Check switch flag if (NoFusing) return NULL; @@ -2042,7 +2042,7 @@ MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF, MachineInstr *MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr *LoadMI) const { // Check switch flag if (NoFusing) return NULL; @@ -2093,8 +2093,8 @@ MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF, } -bool X86InstrInfo::canFoldMemoryOperand(MachineInstr *MI, - SmallVectorImpl &Ops) const { +bool X86InstrInfo::canFoldMemoryOperand(const MachineInstr *MI, + const SmallVectorImpl &Ops) const { // Check switch flag if (NoFusing) return 0; @@ -2350,7 +2350,7 @@ unsigned X86InstrInfo::getOpcodeAfterMemoryUnfold(unsigned Opc, return I->second.first; } -bool X86InstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const { +bool X86InstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { if (MBB.empty()) return false; switch (MBB.back().getOpcode()) { diff --git a/lib/Target/X86/X86InstrInfo.h b/lib/Target/X86/X86InstrInfo.h index 1413310b3c0..5edc19d0ef8 100644 --- a/lib/Target/X86/X86InstrInfo.h +++ b/lib/Target/X86/X86InstrInfo.h @@ -359,7 +359,7 @@ public: /// references has been changed. virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, int FrameIndex) const; /// foldMemoryOperand - Same as the previous version except it allows folding @@ -367,12 +367,13 @@ public: /// stack slot. virtual MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, - SmallVectorImpl &Ops, + const SmallVectorImpl &Ops, MachineInstr* LoadMI) const; /// canFoldMemoryOperand - Returns true if the specified load / store is /// folding is possible. - virtual bool canFoldMemoryOperand(MachineInstr*, SmallVectorImpl &) const; + virtual bool canFoldMemoryOperand(const MachineInstr*, + const SmallVectorImpl &) const; /// unfoldMemoryOperand - Separate a single instruction which folded a load or /// a store or a load and a store into two or more instruction. If this is @@ -391,7 +392,7 @@ public: virtual unsigned getOpcodeAfterMemoryUnfold(unsigned Opc, bool UnfoldLoad, bool UnfoldStore) const; - virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const; + virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const; virtual bool ReverseBranchCondition(SmallVectorImpl &Cond) const; @@ -431,7 +432,7 @@ private: MachineInstr* foldMemoryOperand(MachineFunction &MF, MachineInstr* MI, unsigned OpNum, - SmallVector &MOs) const; + const SmallVector &MOs) const; }; } // End llvm namespace