mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-08-02 08:26:39 +00:00
[AArch64][LoadStoreOptimizer] Form LDPSW when possible.
This patch adds the missing LD[U]RSW variants to the load store optimizer, so that we generate LDPSW when possible. <rdar://problem/19583480> git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@226978 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
@@ -135,6 +135,8 @@ static bool isUnscaledLdst(unsigned Opc) {
|
||||
return true;
|
||||
case AArch64::LDURXi:
|
||||
return true;
|
||||
case AArch64::LDURSWi:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,6 +175,9 @@ int AArch64LoadStoreOpt::getMemSize(MachineInstr *MemMI) {
|
||||
case AArch64::LDRXui:
|
||||
case AArch64::LDURXi:
|
||||
return 8;
|
||||
case AArch64::LDRSWui:
|
||||
case AArch64::LDURSWi:
|
||||
return 4;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,6 +215,9 @@ static unsigned getMatchingPairOpcode(unsigned Opc) {
|
||||
case AArch64::LDRXui:
|
||||
case AArch64::LDURXi:
|
||||
return AArch64::LDPXi;
|
||||
case AArch64::LDRSWui:
|
||||
case AArch64::LDURSWi:
|
||||
return AArch64::LDPSWi;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,6 +245,8 @@ static unsigned getPreIndexedOpcode(unsigned Opc) {
|
||||
return AArch64::LDRWpre;
|
||||
case AArch64::LDRXui:
|
||||
return AArch64::LDRXpre;
|
||||
case AArch64::LDRSWui:
|
||||
return AArch64::LDRSWpre;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,6 +274,8 @@ static unsigned getPostIndexedOpcode(unsigned Opc) {
|
||||
return AArch64::LDRWpost;
|
||||
case AArch64::LDRXui:
|
||||
return AArch64::LDRXpost;
|
||||
case AArch64::LDRSWui:
|
||||
return AArch64::LDRSWpost;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -780,6 +792,7 @@ bool AArch64LoadStoreOpt::optimizeBlock(MachineBasicBlock &MBB) {
|
||||
case AArch64::LDRQui:
|
||||
case AArch64::LDRXui:
|
||||
case AArch64::LDRWui:
|
||||
case AArch64::LDRSWui:
|
||||
// do the unscaled versions as well
|
||||
case AArch64::STURSi:
|
||||
case AArch64::STURDi:
|
||||
@@ -790,7 +803,8 @@ bool AArch64LoadStoreOpt::optimizeBlock(MachineBasicBlock &MBB) {
|
||||
case AArch64::LDURDi:
|
||||
case AArch64::LDURQi:
|
||||
case AArch64::LDURWi:
|
||||
case AArch64::LDURXi: {
|
||||
case AArch64::LDURXi:
|
||||
case AArch64::LDURSWi: {
|
||||
// If this is a volatile load/store, don't mess with it.
|
||||
if (MI->hasOrderedMemoryRef()) {
|
||||
++MBBI;
|
||||
|
Reference in New Issue
Block a user