Move the entries for 64-bit CMP, IMUL, and a few others into the correct

tables so that they are eligible for reload/remat folding. And add
entries for JMP and CALL.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@42094 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Dan Gohman 2007-09-18 14:59:14 +00:00
parent 8ef426baa3
commit 869b2b2c23

View File

@ -629,10 +629,14 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
}
static const TableEntry OpcodeTable[] = {
{ X86::CALL32r, X86::CALL32m },
{ X86::CALL64r, X86::CALL64m },
{ X86::CMP16ri, X86::CMP16mi },
{ X86::CMP16ri8, X86::CMP16mi8 },
{ X86::CMP32ri, X86::CMP32mi },
{ X86::CMP32ri8, X86::CMP32mi8 },
{ X86::CMP64ri32, X86::CMP64mi32 },
{ X86::CMP64ri8, X86::CMP64mi8 },
{ X86::CMP8ri, X86::CMP8mi },
{ X86::DIV16r, X86::DIV16m },
{ X86::DIV32r, X86::DIV32m },
@ -648,6 +652,8 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::IMUL32r, X86::IMUL32m },
{ X86::IMUL64r, X86::IMUL64m },
{ X86::IMUL8r, X86::IMUL8m },
{ X86::JMP32r, X86::JMP32m },
{ X86::JMP64r, X86::JMP64m },
{ X86::MOV16ri, X86::MOV16mi },
{ X86::MOV16rr, X86::MOV16mr },
{ X86::MOV32ri, X86::MOV32mi },
@ -671,6 +677,9 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::MUL32r, X86::MUL32m },
{ X86::MUL64r, X86::MUL64m },
{ X86::MUL8r, X86::MUL8m },
{ X86::PSHUFDri, X86::PSHUFDmi },
{ X86::PSHUFHWri, X86::PSHUFHWmi },
{ X86::PSHUFLWri, X86::PSHUFLWmi },
{ X86::SETAEr, X86::SETAEm },
{ X86::SETAr, X86::SETAm },
{ X86::SETBEr, X86::SETBEm },
@ -685,6 +694,7 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::SETNSr, X86::SETNSm },
{ X86::SETPr, X86::SETPm },
{ X86::SETSr, X86::SETSm },
{ X86::TAILJMPr, X86::TAILJMPm },
{ X86::TEST16ri, X86::TEST16mi },
{ X86::TEST32ri, X86::TEST32mi },
{ X86::TEST64ri32, X86::TEST64mi32 },
@ -702,8 +712,6 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
static const TableEntry OpcodeTable[] = {
{ X86::CMP16rr, X86::CMP16rm },
{ X86::CMP32rr, X86::CMP32rm },
{ X86::CMP64ri32, X86::CMP64mi32 },
{ X86::CMP64ri8, X86::CMP64mi8 },
{ X86::CMP64rr, X86::CMP64rm },
{ X86::CMP8rr, X86::CMP8rm },
{ X86::CMPPDrri, X86::CMPPDrmi },
@ -726,7 +734,6 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::IMUL16rri8, X86::IMUL16rmi8 },
{ X86::IMUL32rri, X86::IMUL32rmi },
{ X86::IMUL32rri8, X86::IMUL32rmi8 },
{ X86::IMUL64rr, X86::IMUL64rm },
{ X86::IMUL64rri32, X86::IMUL64rmi32 },
{ X86::IMUL64rri8, X86::IMUL64rmi8 },
{ X86::Int_CMPSDrr, X86::Int_CMPSDrm },
@ -787,9 +794,6 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::MOVZX32rr8, X86::MOVZX32rm8 },
{ X86::MOVZX64rr16, X86::MOVZX64rm16 },
{ X86::MOVZX64rr8, X86::MOVZX64rm8 },
{ X86::PSHUFDri, X86::PSHUFDmi },
{ X86::PSHUFHWri, X86::PSHUFHWmi },
{ X86::PSHUFLWri, X86::PSHUFLWmi },
{ X86::PsMOVZX64rr32, X86::PsMOVZX64rm32 },
{ X86::TEST16rr, X86::TEST16rm },
{ X86::TEST32rr, X86::TEST32rm },
@ -881,6 +885,7 @@ X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
{ X86::HSUBPSrr, X86::HSUBPSrm },
{ X86::IMUL16rr, X86::IMUL16rm },
{ X86::IMUL32rr, X86::IMUL32rm },
{ X86::IMUL64rr, X86::IMUL64rm },
{ X86::MAXPDrr, X86::MAXPDrm },
{ X86::MAXPDrr_Int, X86::MAXPDrm_Int },
{ X86::MAXPSrr, X86::MAXPSrm },