mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2024-12-27 13:30:05 +00:00
Update a few calls to getSubtarget<> to either be getSubtargetImpl
when we didn't need the cast to the base class or the cached version off of the subtarget. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@227176 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
f57b52c8ba
commit
fd0f7927e0
@ -225,7 +225,7 @@ void BasicTTI::getUnrollingPreferences(const Function *F, Loop *L,
|
||||
// until someone finds a case where it matters in practice.
|
||||
|
||||
unsigned MaxOps;
|
||||
const TargetSubtargetInfo *ST = &TM->getSubtarget<TargetSubtargetInfo>(F);
|
||||
const TargetSubtargetInfo *ST = TM->getSubtargetImpl(F);
|
||||
if (PartialUnrollingThreshold.getNumOccurrences() > 0)
|
||||
MaxOps = PartialUnrollingThreshold;
|
||||
else if (ST->getSchedModel().LoopMicroOpBufferSize > 0)
|
||||
|
@ -3145,8 +3145,8 @@ bool CodeGenPrepare::OptimizeMemoryInst(Instruction *MemoryInst, Value *Addr,
|
||||
SunkAddr = Builder.CreateBitCast(SunkAddr, Addr->getType());
|
||||
} else if (AddrSinkUsingGEPs ||
|
||||
(!AddrSinkUsingGEPs.getNumOccurrences() && TM &&
|
||||
TM->getSubtarget<TargetSubtargetInfo>(
|
||||
MemoryInst->getParent()->getParent()).useAA())) {
|
||||
TM->getSubtargetImpl(*MemoryInst->getParent()->getParent())
|
||||
->useAA())) {
|
||||
// By default, we use the GEP-based method when AA is used later. This
|
||||
// prevents new inttoptr/ptrtoint pairs from degrading AA capabilities.
|
||||
DEBUG(dbgs() << "CGP: SINKING nonlocal addrmode: " << AddrMode << " for "
|
||||
|
@ -249,7 +249,7 @@ TargetPassConfig::TargetPassConfig(TargetMachine *tm, PassManagerBase &pm)
|
||||
substitutePass(&PostRAMachineLICMID, &MachineLICMID);
|
||||
|
||||
// Temporarily disable experimental passes.
|
||||
const TargetSubtargetInfo &ST = TM->getSubtarget<TargetSubtargetInfo>();
|
||||
const TargetSubtargetInfo &ST = *TM->getSubtargetImpl();
|
||||
if (!ST.useMachineScheduler())
|
||||
disablePass(&MachineSchedulerID);
|
||||
}
|
||||
|
@ -2716,15 +2716,14 @@ bool RegisterCoalescer::runOnMachineFunction(MachineFunction &fn) {
|
||||
MF = &fn;
|
||||
MRI = &fn.getRegInfo();
|
||||
TM = &fn.getTarget();
|
||||
TRI = TM->getSubtargetImpl()->getRegisterInfo();
|
||||
TII = TM->getSubtargetImpl()->getInstrInfo();
|
||||
const TargetSubtargetInfo &STI = fn.getSubtarget();
|
||||
TRI = STI.getRegisterInfo();
|
||||
TII = STI.getInstrInfo();
|
||||
LIS = &getAnalysis<LiveIntervals>();
|
||||
AA = &getAnalysis<AliasAnalysis>();
|
||||
Loops = &getAnalysis<MachineLoopInfo>();
|
||||
|
||||
const TargetSubtargetInfo &ST = TM->getSubtarget<TargetSubtargetInfo>();
|
||||
if (EnableGlobalCopies == cl::BOU_UNSET)
|
||||
JoinGlobalCopies = ST.useMachineScheduler();
|
||||
JoinGlobalCopies = STI.useMachineScheduler();
|
||||
else
|
||||
JoinGlobalCopies = (EnableGlobalCopies == cl::BOU_TRUE);
|
||||
|
||||
|
@ -51,18 +51,17 @@ static cl::opt<bool> UseTBAA("use-tbaa-in-sched-mi", cl::Hidden,
|
||||
|
||||
ScheduleDAGInstrs::ScheduleDAGInstrs(MachineFunction &mf,
|
||||
const MachineLoopInfo *mli,
|
||||
bool IsPostRAFlag,
|
||||
bool RemoveKillFlags,
|
||||
bool IsPostRAFlag, bool RemoveKillFlags,
|
||||
LiveIntervals *lis)
|
||||
: ScheduleDAG(mf), MLI(mli), MFI(mf.getFrameInfo()), LIS(lis),
|
||||
IsPostRA(IsPostRAFlag), RemoveKillFlags(RemoveKillFlags),
|
||||
CanHandleTerminators(false), FirstDbgValue(nullptr) {
|
||||
: ScheduleDAG(mf), MLI(mli), MFI(mf.getFrameInfo()), LIS(lis),
|
||||
IsPostRA(IsPostRAFlag), RemoveKillFlags(RemoveKillFlags),
|
||||
CanHandleTerminators(false), FirstDbgValue(nullptr) {
|
||||
assert((IsPostRA || LIS) && "PreRA scheduling requires LiveIntervals");
|
||||
DbgValues.clear();
|
||||
assert(!(IsPostRA && MRI.getNumVirtRegs()) &&
|
||||
"Virtual registers must be removed prior to PostRA scheduling");
|
||||
|
||||
const TargetSubtargetInfo &ST = TM.getSubtarget<TargetSubtargetInfo>();
|
||||
const TargetSubtargetInfo &ST = mf.getSubtarget();
|
||||
SchedModel.init(ST.getSchedModel(), &ST, TII);
|
||||
}
|
||||
|
||||
@ -253,7 +252,7 @@ void ScheduleDAGInstrs::addPhysRegDataDeps(SUnit *SU, unsigned OperIdx) {
|
||||
assert(MO.isDef() && "expect physreg def");
|
||||
|
||||
// Ask the target if address-backscheduling is desirable, and if so how much.
|
||||
const TargetSubtargetInfo &ST = TM.getSubtarget<TargetSubtargetInfo>();
|
||||
const TargetSubtargetInfo &ST = MF.getSubtarget();
|
||||
|
||||
for (MCRegAliasIterator Alias(MO.getReg(), TRI, true);
|
||||
Alias.isValid(); ++Alias) {
|
||||
@ -444,7 +443,7 @@ void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) {
|
||||
int DefOp = Def->findRegisterDefOperandIdx(Reg);
|
||||
dep.setLatency(SchedModel.computeOperandLatency(Def, DefOp, MI, OperIdx));
|
||||
|
||||
const TargetSubtargetInfo &ST = TM.getSubtarget<TargetSubtargetInfo>();
|
||||
const TargetSubtargetInfo &ST = MF.getSubtarget();
|
||||
ST.adjustSchedDependency(DefSU, SU, const_cast<SDep &>(dep));
|
||||
SU->addPred(dep);
|
||||
}
|
||||
@ -743,7 +742,7 @@ void ScheduleDAGInstrs::initSUnits() {
|
||||
void ScheduleDAGInstrs::buildSchedGraph(AliasAnalysis *AA,
|
||||
RegPressureTracker *RPTracker,
|
||||
PressureDiffs *PDiffs) {
|
||||
const TargetSubtargetInfo &ST = TM.getSubtarget<TargetSubtargetInfo>();
|
||||
const TargetSubtargetInfo &ST = MF.getSubtarget();
|
||||
bool UseAA = EnableAASchedMI.getNumOccurrences() > 0 ? EnableAASchedMI
|
||||
: ST.useAA();
|
||||
AliasAnalysis *AAForDep = UseAA ? AA : nullptr;
|
||||
|
Loading…
Reference in New Issue
Block a user