Const-ify several TargetInstrInfo methods.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@57622 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Dan Gohman 2008-10-16 01:49:15 +00:00
parent fcab2bd2f3
commit 8e8b8a223c
16 changed files with 57 additions and 54 deletions

View File

@ -102,6 +102,8 @@ public:
MachineInstr& front() { return Insts.front(); }
MachineInstr& back() { return Insts.back(); }
const MachineInstr& front() const { return Insts.front(); }
const MachineInstr& back() const { return Insts.back(); }
iterator begin() { return Insts.begin(); }
const_iterator begin() const { return Insts.begin(); }

View File

@ -288,7 +288,7 @@ public:
/// stream.
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const {
return 0;
}
@ -298,7 +298,7 @@ public:
/// stack slot.
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}
@ -306,8 +306,8 @@ public:
/// canFoldMemoryOperand - Returns true if the specified load / store is
/// folding is possible.
virtual
bool canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const{
bool canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const {
return false;
}
@ -338,7 +338,7 @@ public:
/// fall-through into its successor block. This is primarily used when a
/// branch is unanalyzable. It is useful for things like unconditional
/// indirect branches (jump tables).
virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const {
virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const {
return false;
}

View File

@ -663,7 +663,7 @@ bool ARMInstrInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB,
MachineInstr *ARMInstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FI) const {
if (Ops.size() != 1) return NULL;
@ -747,8 +747,8 @@ MachineInstr *ARMInstrInfo::foldMemoryOperand(MachineFunction &MF,
return NewMI;
}
bool ARMInstrInfo::canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const {
bool ARMInstrInfo::canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const {
if (Ops.size() != 1) return false;
unsigned OpNum = Ops[0];
@ -780,7 +780,7 @@ bool ARMInstrInfo::canFoldMemoryOperand(MachineInstr *MI,
return false;
}
bool ARMInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const {
bool ARMInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const {
if (MBB.empty()) return false;
switch (MBB.back().getOpcode()) {

View File

@ -198,20 +198,20 @@ public:
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}
virtual bool canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const;
virtual bool canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const;
virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const;
virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const;
virtual
bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;

View File

@ -255,7 +255,7 @@ void AlphaInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
MachineInstr *AlphaInstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const {
if (Ops.size() != 1) return NULL;
@ -408,7 +408,7 @@ void AlphaInstrInfo::insertNoop(MachineBasicBlock &MBB,
.addReg(Alpha::R31);
}
bool AlphaInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const {
bool AlphaInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const {
if (MBB.empty()) return false;
switch (MBB.back().getOpcode()) {

View File

@ -69,12 +69,12 @@ public:
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}
@ -85,7 +85,7 @@ public:
unsigned RemoveBranch(MachineBasicBlock &MBB) const;
void insertNoop(MachineBasicBlock &MBB,
MachineBasicBlock::iterator MI) const;
bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const;
bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const;
bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;
};

View File

@ -399,7 +399,7 @@ void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
MachineInstr *
SPUInstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const
{
#if SOMEDAY_SCOTT_LOOKS_AT_ME_AGAIN

View File

@ -79,13 +79,13 @@ namespace llvm {
//! Fold spills into load/store instructions
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
//! Fold any load/store to an operand
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}

View File

@ -281,7 +281,7 @@ void MipsInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
MachineInstr *MipsInstrInfo::
foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops, int FI) const
const SmallVectorImpl<unsigned> &Ops, int FI) const
{
if (Ops.size() != 1) return NULL;
@ -602,7 +602,7 @@ RemoveBranch(MachineBasicBlock &MBB) const
/// BlockHasNoFallThrough - Analyze if MachineBasicBlock does not
/// fall-through into its successor block.
bool MipsInstrInfo::
BlockHasNoFallThrough(MachineBasicBlock &MBB) const
BlockHasNoFallThrough(const MachineBasicBlock &MBB) const
{
if (MBB.empty()) return false;

View File

@ -196,17 +196,17 @@ public:
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}
virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const;
virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const;
virtual
bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;

View File

@ -657,7 +657,7 @@ void PPCInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
/// copy instructions, turning them into load/store instructions.
MachineInstr *PPCInstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const {
if (Ops.size() != 1) return NULL;
@ -730,8 +730,8 @@ MachineInstr *PPCInstrInfo::foldMemoryOperand(MachineFunction &MF,
return NewMI;
}
bool PPCInstrInfo::canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const {
bool PPCInstrInfo::canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const {
if (Ops.size() != 1) return false;
// Make sure this is a reg-reg copy. Note that we can't handle MCRF, because
@ -751,7 +751,7 @@ bool PPCInstrInfo::canFoldMemoryOperand(MachineInstr *MI,
}
bool PPCInstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const {
bool PPCInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const {
if (MBB.empty()) return false;
switch (MBB.back().getOpcode()) {

View File

@ -142,20 +142,20 @@ public:
/// copy instructions, turning them into load/store instructions.
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}
virtual bool canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const;
virtual bool canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const;
virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const;
virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const;
virtual
bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;

View File

@ -227,7 +227,7 @@ void SparcInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
MachineInstr *SparcInstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FI) const {
if (Ops.size() != 1) return NULL;

View File

@ -96,12 +96,12 @@ public:
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const {
return 0;
}

View File

@ -1543,7 +1543,7 @@ unsigned X86InstrInfo::RemoveBranch(MachineBasicBlock &MBB) const {
}
static const MachineInstrBuilder &X86InstrAddOperand(MachineInstrBuilder &MIB,
MachineOperand &MO) {
const MachineOperand &MO) {
if (MO.isReg())
MIB = MIB.addReg(MO.getReg(), MO.isDef(), MO.isImplicit(),
MO.isKill(), MO.isDead(), MO.getSubReg());
@ -1872,7 +1872,7 @@ bool X86InstrInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB,
}
static MachineInstr *FuseTwoAddrInst(MachineFunction &MF, unsigned Opcode,
SmallVector<MachineOperand,4> &MOs,
const SmallVector<MachineOperand,4> &MOs,
MachineInstr *MI, const TargetInstrInfo &TII) {
// Create the base instruction with the memory operand as the first part.
MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), true);
@ -1898,7 +1898,7 @@ static MachineInstr *FuseTwoAddrInst(MachineFunction &MF, unsigned Opcode,
static MachineInstr *FuseInst(MachineFunction &MF,
unsigned Opcode, unsigned OpNo,
SmallVector<MachineOperand,4> &MOs,
const SmallVector<MachineOperand,4> &MOs,
MachineInstr *MI, const TargetInstrInfo &TII) {
MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), true);
MachineInstrBuilder MIB(NewMI);
@ -1920,7 +1920,7 @@ static MachineInstr *FuseInst(MachineFunction &MF,
}
static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode,
SmallVector<MachineOperand,4> &MOs,
const SmallVector<MachineOperand,4> &MOs,
MachineInstr *MI) {
MachineFunction &MF = *MI->getParent()->getParent();
MachineInstrBuilder MIB = BuildMI(MF, TII.get(Opcode));
@ -1936,7 +1936,7 @@ static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode,
MachineInstr*
X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI, unsigned i,
SmallVector<MachineOperand,4> &MOs) const {
const SmallVector<MachineOperand,4> &MOs) const{
const DenseMap<unsigned*, unsigned> *OpcodeTablePtr = NULL;
bool isTwoAddrFold = false;
unsigned NumOps = MI->getDesc().getNumOperands();
@ -1995,7 +1995,7 @@ X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const {
// Check switch flag
if (NoFusing) return NULL;
@ -2042,7 +2042,7 @@ MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr *LoadMI) const {
// Check switch flag
if (NoFusing) return NULL;
@ -2093,8 +2093,8 @@ MachineInstr* X86InstrInfo::foldMemoryOperand(MachineFunction &MF,
}
bool X86InstrInfo::canFoldMemoryOperand(MachineInstr *MI,
SmallVectorImpl<unsigned> &Ops) const {
bool X86InstrInfo::canFoldMemoryOperand(const MachineInstr *MI,
const SmallVectorImpl<unsigned> &Ops) const {
// Check switch flag
if (NoFusing) return 0;
@ -2350,7 +2350,7 @@ unsigned X86InstrInfo::getOpcodeAfterMemoryUnfold(unsigned Opc,
return I->second.first;
}
bool X86InstrInfo::BlockHasNoFallThrough(MachineBasicBlock &MBB) const {
bool X86InstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const {
if (MBB.empty()) return false;
switch (MBB.back().getOpcode()) {

View File

@ -359,7 +359,7 @@ public:
/// references has been changed.
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
int FrameIndex) const;
/// foldMemoryOperand - Same as the previous version except it allows folding
@ -367,12 +367,13 @@ public:
/// stack slot.
virtual MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
SmallVectorImpl<unsigned> &Ops,
const SmallVectorImpl<unsigned> &Ops,
MachineInstr* LoadMI) const;
/// canFoldMemoryOperand - Returns true if the specified load / store is
/// folding is possible.
virtual bool canFoldMemoryOperand(MachineInstr*, SmallVectorImpl<unsigned> &) const;
virtual bool canFoldMemoryOperand(const MachineInstr*,
const SmallVectorImpl<unsigned> &) const;
/// unfoldMemoryOperand - Separate a single instruction which folded a load or
/// a store or a load and a store into two or more instruction. If this is
@ -391,7 +392,7 @@ public:
virtual unsigned getOpcodeAfterMemoryUnfold(unsigned Opc,
bool UnfoldLoad, bool UnfoldStore) const;
virtual bool BlockHasNoFallThrough(MachineBasicBlock &MBB) const;
virtual bool BlockHasNoFallThrough(const MachineBasicBlock &MBB) const;
virtual
bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;
@ -431,7 +432,7 @@ private:
MachineInstr* foldMemoryOperand(MachineFunction &MF,
MachineInstr* MI,
unsigned OpNum,
SmallVector<MachineOperand,4> &MOs) const;
const SmallVector<MachineOperand,4> &MOs) const;
};
} // End llvm namespace