Add a bunch of AVX instructions to the folding tables. Also fixed the alignment on 256-bit AVX2 instructions.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@148194 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Craig Topper 2012-01-14 18:14:53 +00:00
parent c464e998e6
commit 446626d236

View File

@ -351,6 +351,7 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VEXTRACTPSrr,X86::VEXTRACTPSmr, TB_FOLDED_STORE | TB_ALIGN_16 },
{ X86::FsVMOVAPDrr, X86::VMOVSDmr, TB_FOLDED_STORE | TB_NO_REVERSE },
{ X86::FsVMOVAPSrr, X86::VMOVSSmr, TB_FOLDED_STORE | TB_NO_REVERSE },
{ X86::VEXTRACTF128rr, X86::VEXTRACTF128mr, TB_FOLDED_STORE | TB_ALIGN_16 },
{ X86::VMOVAPDrr, X86::VMOVAPDmr, TB_FOLDED_STORE | TB_ALIGN_16 },
{ X86::VMOVAPSrr, X86::VMOVAPSmr, TB_FOLDED_STORE | TB_ALIGN_16 },
{ X86::VMOVDQArr, X86::VMOVDQAmr, TB_FOLDED_STORE | TB_ALIGN_16 },
@ -361,6 +362,7 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VMOVUPDrr, X86::VMOVUPDmr, TB_FOLDED_STORE },
{ X86::VMOVUPSrr, X86::VMOVUPSmr, TB_FOLDED_STORE },
// AVX 256-bit foldable instructions
{ X86::VEXTRACTI128rr, X86::VEXTRACTI128mr, TB_FOLDED_STORE | TB_ALIGN_16 },
{ X86::VMOVAPDYrr, X86::VMOVAPDYmr, TB_FOLDED_STORE | TB_ALIGN_32 },
{ X86::VMOVAPSYrr, X86::VMOVAPSYmr, TB_FOLDED_STORE | TB_ALIGN_32 },
{ X86::VMOVDQAYrr, X86::VMOVDQAYmr, TB_FOLDED_STORE | TB_ALIGN_32 },
@ -513,6 +515,10 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VPABSBrr128, X86::VPABSBrm128, TB_ALIGN_16 },
{ X86::VPABSDrr128, X86::VPABSDrm128, TB_ALIGN_16 },
{ X86::VPABSWrr128, X86::VPABSWrm128, TB_ALIGN_16 },
{ X86::VPERMILPDri, X86::VPERMILPDmi, TB_ALIGN_16 },
{ X86::VPERMILPDYri, X86::VPERMILPDYmi, TB_ALIGN_16 },
{ X86::VPERMILPSri, X86::VPERMILPSmi, TB_ALIGN_16 },
{ X86::VPERMILPSYri, X86::VPERMILPSYmi, TB_ALIGN_16 },
{ X86::VPSHUFDri, X86::VPSHUFDmi, TB_ALIGN_16 },
{ X86::VPSHUFHWri, X86::VPSHUFHWmi, TB_ALIGN_16 },
{ X86::VPSHUFLWri, X86::VPSHUFLWmi, TB_ALIGN_16 },
@ -575,6 +581,10 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::ANDNPSrr, X86::ANDNPSrm, TB_ALIGN_16 },
{ X86::ANDPDrr, X86::ANDPDrm, TB_ALIGN_16 },
{ X86::ANDPSrr, X86::ANDPSrm, TB_ALIGN_16 },
{ X86::BLENDPDrri, X86::BLENDPDrmi, TB_ALIGN_16 },
{ X86::BLENDPSrri, X86::BLENDPSrmi, TB_ALIGN_16 },
{ X86::BLENDVPDrr0, X86::BLENDVPDrm0, TB_ALIGN_16 },
{ X86::BLENDVPSrr0, X86::BLENDVPSrm0, TB_ALIGN_16 },
{ X86::CMOVA16rr, X86::CMOVA16rm, 0 },
{ X86::CMOVA32rr, X86::CMOVA32rm, 0 },
{ X86::CMOVA64rr, X86::CMOVA64rm, 0 },
@ -692,6 +702,7 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::PANDrr, X86::PANDrm, TB_ALIGN_16 },
{ X86::PAVGBrr, X86::PAVGBrm, TB_ALIGN_16 },
{ X86::PAVGWrr, X86::PAVGWrm, TB_ALIGN_16 },
{ X86::PBLENDWrri, X86::PBLENDWrmi, TB_ALIGN_16 },
{ X86::PCMPEQBrr, X86::PCMPEQBrm, TB_ALIGN_16 },
{ X86::PCMPEQDrr, X86::PCMPEQDrm, TB_ALIGN_16 },
{ X86::PCMPEQQrr, X86::PCMPEQQrm, TB_ALIGN_16 },
@ -809,6 +820,10 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VANDNPSrr, X86::VANDNPSrm, TB_ALIGN_16 },
{ X86::VANDPDrr, X86::VANDPDrm, TB_ALIGN_16 },
{ X86::VANDPSrr, X86::VANDPSrm, TB_ALIGN_16 },
{ X86::VBLENDPDrri, X86::VBLENDPDrmi, TB_ALIGN_16 },
{ X86::VBLENDPSrri, X86::VBLENDPSrmi, TB_ALIGN_16 },
{ X86::VBLENDVPDrr, X86::VBLENDVPDrm, TB_ALIGN_16 },
{ X86::VBLENDVPSrr, X86::VBLENDVPSrm, TB_ALIGN_16 },
{ X86::VCMPPDrri, X86::VCMPPDrmi, TB_ALIGN_16 },
{ X86::VCMPPSrri, X86::VCMPPSrmi, TB_ALIGN_16 },
{ X86::VCMPSDrr, X86::VCMPSDrm, 0 },
@ -871,6 +886,7 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VPANDrr, X86::VPANDrm, TB_ALIGN_16 },
{ X86::VPAVGBrr, X86::VPAVGBrm, TB_ALIGN_16 },
{ X86::VPAVGWrr, X86::VPAVGWrm, TB_ALIGN_16 },
{ X86::VPBLENDWrri, X86::VPBLENDWrmi, TB_ALIGN_16 },
{ X86::VPCMPEQBrr, X86::VPCMPEQBrm, TB_ALIGN_16 },
{ X86::VPCMPEQDrr, X86::VPCMPEQDrm, TB_ALIGN_16 },
{ X86::VPCMPEQQrr, X86::VPCMPEQQrm, TB_ALIGN_16 },
@ -885,6 +901,8 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VPHSUBDrr128, X86::VPHSUBDrm128, TB_ALIGN_16 },
{ X86::VPHSUBSWrr128, X86::VPHSUBSWrm128, TB_ALIGN_16 },
{ X86::VPHSUBWrr128, X86::VPHSUBWrm128, TB_ALIGN_16 },
{ X86::VPERMILPDrr, X86::VPERMILPDrm, TB_ALIGN_16 },
{ X86::VPERMILPSrr, X86::VPERMILPSrm, TB_ALIGN_16 },
{ X86::VPINSRWrri, X86::VPINSRWrmi, TB_ALIGN_16 },
{ X86::VPMADDUBSWrr128, X86::VPMADDUBSWrm128, TB_ALIGN_16 },
{ X86::VPMADDWDrr, X86::VPMADDWDrm, TB_ALIGN_16 },
@ -939,90 +957,142 @@ X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
{ X86::VUNPCKLPSrr, X86::VUNPCKLPSrm, TB_ALIGN_16 },
{ X86::VXORPDrr, X86::VXORPDrm, TB_ALIGN_16 },
{ X86::VXORPSrr, X86::VXORPSrm, TB_ALIGN_16 },
// AVX 256-bit foldable instructions
{ X86::VADDPDYrr, X86::VADDPDYrm, TB_ALIGN_32 },
{ X86::VADDPSYrr, X86::VADDPSYrm, TB_ALIGN_32 },
{ X86::VADDSUBPDYrr, X86::VADDSUBPDYrm, TB_ALIGN_32 },
{ X86::VADDSUBPSYrr, X86::VADDSUBPSYrm, TB_ALIGN_32 },
{ X86::VANDNPDYrr, X86::VANDNPDYrm, TB_ALIGN_32 },
{ X86::VANDNPSYrr, X86::VANDNPSYrm, TB_ALIGN_32 },
{ X86::VANDPDYrr, X86::VANDPDYrm, TB_ALIGN_32 },
{ X86::VANDPSYrr, X86::VANDPSYrm, TB_ALIGN_32 },
{ X86::VBLENDPDYrri, X86::VBLENDPDYrmi, TB_ALIGN_32 },
{ X86::VBLENDPSYrri, X86::VBLENDPSYrmi, TB_ALIGN_32 },
{ X86::VBLENDVPDYrr, X86::VBLENDVPDYrm, TB_ALIGN_32 },
{ X86::VBLENDVPSYrr, X86::VBLENDVPSYrm, TB_ALIGN_32 },
{ X86::VCMPPDYrri, X86::VCMPPDYrmi, TB_ALIGN_32 },
{ X86::VCMPPSYrri, X86::VCMPPSYrmi, TB_ALIGN_32 },
{ X86::VDIVPDYrr, X86::VDIVPDYrm, TB_ALIGN_32 },
{ X86::VDIVPSYrr, X86::VDIVPSYrm, TB_ALIGN_32 },
{ X86::VHADDPDYrr, X86::VHADDPDYrm, TB_ALIGN_32 },
{ X86::VHADDPSYrr, X86::VHADDPSYrm, TB_ALIGN_32 },
{ X86::VHSUBPDYrr, X86::VHSUBPDYrm, TB_ALIGN_32 },
{ X86::VHSUBPSYrr, X86::VHSUBPSYrm, TB_ALIGN_32 },
{ X86::VINSERTF128rr, X86::VINSERTF128rm, TB_ALIGN_32 },
{ X86::VMAXPDYrr, X86::VMAXPDYrm, TB_ALIGN_32 },
{ X86::VMAXPDYrr_Int, X86::VMAXPDYrm_Int, TB_ALIGN_32 },
{ X86::VMAXPSYrr, X86::VMAXPSYrm, TB_ALIGN_32 },
{ X86::VMAXPSYrr_Int, X86::VMAXPSYrm_Int, TB_ALIGN_32 },
{ X86::VMINPDYrr, X86::VMINPDYrm, TB_ALIGN_32 },
{ X86::VMINPDYrr_Int, X86::VMINPDYrm_Int, TB_ALIGN_32 },
{ X86::VMINPSYrr, X86::VMINPSYrm, TB_ALIGN_32 },
{ X86::VMINPSYrr_Int, X86::VMINPSYrm_Int, TB_ALIGN_32 },
{ X86::VMULPDYrr, X86::VMULPDYrm, TB_ALIGN_32 },
{ X86::VMULPSYrr, X86::VMULPSYrm, TB_ALIGN_32 },
{ X86::VORPDYrr, X86::VORPDYrm, TB_ALIGN_32 },
{ X86::VORPSYrr, X86::VORPSYrm, TB_ALIGN_32 },
{ X86::VPERM2F128rr, X86::VPERM2F128rm, TB_ALIGN_32 },
{ X86::VPERMILPDYrr, X86::VPERMILPDYrm, TB_ALIGN_32 },
{ X86::VPERMILPSYrr, X86::VPERMILPSYrm, TB_ALIGN_32 },
{ X86::VSHUFPDYrri, X86::VSHUFPDYrmi, TB_ALIGN_32 },
{ X86::VSHUFPSYrri, X86::VSHUFPSYrmi, TB_ALIGN_32 },
{ X86::VSUBPDYrr, X86::VSUBPDYrm, TB_ALIGN_32 },
{ X86::VSUBPSYrr, X86::VSUBPSYrm, TB_ALIGN_32 },
{ X86::VUNPCKHPDYrr, X86::VUNPCKHPDYrm, TB_ALIGN_32 },
{ X86::VUNPCKHPSYrr, X86::VUNPCKHPSYrm, TB_ALIGN_32 },
{ X86::VUNPCKLPDYrr, X86::VUNPCKLPDYrm, TB_ALIGN_32 },
{ X86::VUNPCKLPSYrr, X86::VUNPCKLPSYrm, TB_ALIGN_32 },
{ X86::VXORPDYrr, X86::VXORPDYrm, TB_ALIGN_32 },
{ X86::VXORPSYrr, X86::VXORPSYrm, TB_ALIGN_32 },
// AVX2 foldable instructions
{ X86::VPACKSSDWYrr, X86::VPACKSSDWYrm, TB_ALIGN_16 },
{ X86::VPACKSSWBYrr, X86::VPACKSSWBYrm, TB_ALIGN_16 },
{ X86::VPACKUSDWYrr, X86::VPACKUSDWYrm, TB_ALIGN_16 },
{ X86::VPACKUSWBYrr, X86::VPACKUSWBYrm, TB_ALIGN_16 },
{ X86::VPADDBYrr, X86::VPADDBYrm, TB_ALIGN_16 },
{ X86::VPADDDYrr, X86::VPADDDYrm, TB_ALIGN_16 },
{ X86::VPADDQYrr, X86::VPADDQYrm, TB_ALIGN_16 },
{ X86::VPADDSBYrr, X86::VPADDSBYrm, TB_ALIGN_16 },
{ X86::VPADDSWYrr, X86::VPADDSWYrm, TB_ALIGN_16 },
{ X86::VPADDUSBYrr, X86::VPADDUSBYrm, TB_ALIGN_16 },
{ X86::VPADDUSWYrr, X86::VPADDUSWYrm, TB_ALIGN_16 },
{ X86::VPADDWYrr, X86::VPADDWYrm, TB_ALIGN_16 },
{ X86::VPALIGNR256rr, X86::VPALIGNR256rm, TB_ALIGN_16 },
{ X86::VPANDNYrr, X86::VPANDNYrm, TB_ALIGN_16 },
{ X86::VPANDYrr, X86::VPANDYrm, TB_ALIGN_16 },
{ X86::VPAVGBYrr, X86::VPAVGBYrm, TB_ALIGN_16 },
{ X86::VPAVGWYrr, X86::VPAVGWYrm, TB_ALIGN_16 },
{ X86::VPCMPEQBYrr, X86::VPCMPEQBYrm, TB_ALIGN_16 },
{ X86::VPCMPEQDYrr, X86::VPCMPEQDYrm, TB_ALIGN_16 },
{ X86::VPCMPEQQYrr, X86::VPCMPEQQYrm, TB_ALIGN_16 },
{ X86::VPCMPEQWYrr, X86::VPCMPEQWYrm, TB_ALIGN_16 },
{ X86::VPCMPGTBYrr, X86::VPCMPGTBYrm, TB_ALIGN_16 },
{ X86::VPCMPGTDYrr, X86::VPCMPGTDYrm, TB_ALIGN_16 },
{ X86::VPCMPGTQYrr, X86::VPCMPGTQYrm, TB_ALIGN_16 },
{ X86::VPCMPGTWYrr, X86::VPCMPGTWYrm, TB_ALIGN_16 },
{ X86::VPHADDDrr256, X86::VPHADDDrm256, TB_ALIGN_16 },
{ X86::VPHADDSWrr256, X86::VPHADDSWrm256, TB_ALIGN_16 },
{ X86::VPHADDWrr256, X86::VPHADDWrm256, TB_ALIGN_16 },
{ X86::VPHSUBDrr256, X86::VPHSUBDrm256, TB_ALIGN_16 },
{ X86::VPHSUBSWrr256, X86::VPHSUBSWrm256, TB_ALIGN_16 },
{ X86::VPHSUBWrr256, X86::VPHSUBWrm256, TB_ALIGN_16 },
{ X86::VPMADDUBSWrr256, X86::VPMADDUBSWrm256, TB_ALIGN_16 },
{ X86::VPMADDWDYrr, X86::VPMADDWDYrm, TB_ALIGN_16 },
{ X86::VPMAXSWYrr, X86::VPMAXSWYrm, TB_ALIGN_16 },
{ X86::VPMAXUBYrr, X86::VPMAXUBYrm, TB_ALIGN_16 },
{ X86::VPMINSWYrr, X86::VPMINSWYrm, TB_ALIGN_16 },
{ X86::VPMINUBYrr, X86::VPMINUBYrm, TB_ALIGN_16 },
{ X86::VMPSADBWYrri, X86::VMPSADBWYrmi, TB_ALIGN_16 },
{ X86::VPMULDQYrr, X86::VPMULDQYrm, TB_ALIGN_16 },
{ X86::VPMULHRSWrr256, X86::VPMULHRSWrm256, TB_ALIGN_16 },
{ X86::VPMULHUWYrr, X86::VPMULHUWYrm, TB_ALIGN_16 },
{ X86::VPMULHWYrr, X86::VPMULHWYrm, TB_ALIGN_16 },
{ X86::VPMULLDYrr, X86::VPMULLDYrm, TB_ALIGN_16 },
{ X86::VPMULLWYrr, X86::VPMULLWYrm, TB_ALIGN_16 },
{ X86::VPMULUDQYrr, X86::VPMULUDQYrm, TB_ALIGN_16 },
{ X86::VPORYrr, X86::VPORYrm, TB_ALIGN_16 },
{ X86::VPSADBWYrr, X86::VPSADBWYrm, TB_ALIGN_16 },
{ X86::VPSHUFBrr256, X86::VPSHUFBrm256, TB_ALIGN_16 },
{ X86::VPSIGNBrr256, X86::VPSIGNBrm256, TB_ALIGN_16 },
{ X86::VPSIGNWrr256, X86::VPSIGNWrm256, TB_ALIGN_16 },
{ X86::VPSIGNDrr256, X86::VPSIGNDrm256, TB_ALIGN_16 },
{ X86::VINSERTI128rr, X86::VINSERTI128rm, TB_ALIGN_16 },
{ X86::VPACKSSDWYrr, X86::VPACKSSDWYrm, TB_ALIGN_32 },
{ X86::VPACKSSWBYrr, X86::VPACKSSWBYrm, TB_ALIGN_32 },
{ X86::VPACKUSDWYrr, X86::VPACKUSDWYrm, TB_ALIGN_32 },
{ X86::VPACKUSWBYrr, X86::VPACKUSWBYrm, TB_ALIGN_32 },
{ X86::VPADDBYrr, X86::VPADDBYrm, TB_ALIGN_32 },
{ X86::VPADDDYrr, X86::VPADDDYrm, TB_ALIGN_32 },
{ X86::VPADDQYrr, X86::VPADDQYrm, TB_ALIGN_32 },
{ X86::VPADDSBYrr, X86::VPADDSBYrm, TB_ALIGN_32 },
{ X86::VPADDSWYrr, X86::VPADDSWYrm, TB_ALIGN_32 },
{ X86::VPADDUSBYrr, X86::VPADDUSBYrm, TB_ALIGN_32 },
{ X86::VPADDUSWYrr, X86::VPADDUSWYrm, TB_ALIGN_32 },
{ X86::VPADDWYrr, X86::VPADDWYrm, TB_ALIGN_32 },
{ X86::VPALIGNR256rr, X86::VPALIGNR256rm, TB_ALIGN_32 },
{ X86::VPANDNYrr, X86::VPANDNYrm, TB_ALIGN_32 },
{ X86::VPANDYrr, X86::VPANDYrm, TB_ALIGN_32 },
{ X86::VPAVGBYrr, X86::VPAVGBYrm, TB_ALIGN_32 },
{ X86::VPAVGWYrr, X86::VPAVGWYrm, TB_ALIGN_32 },
{ X86::VPBLENDDrri, X86::VPBLENDDrmi, TB_ALIGN_32 },
{ X86::VPBLENDDYrri, X86::VPBLENDDYrmi, TB_ALIGN_32 },
{ X86::VPBLENDWYrri, X86::VPBLENDWYrmi, TB_ALIGN_32 },
{ X86::VPCMPEQBYrr, X86::VPCMPEQBYrm, TB_ALIGN_32 },
{ X86::VPCMPEQDYrr, X86::VPCMPEQDYrm, TB_ALIGN_32 },
{ X86::VPCMPEQQYrr, X86::VPCMPEQQYrm, TB_ALIGN_32 },
{ X86::VPCMPEQWYrr, X86::VPCMPEQWYrm, TB_ALIGN_32 },
{ X86::VPCMPGTBYrr, X86::VPCMPGTBYrm, TB_ALIGN_32 },
{ X86::VPCMPGTDYrr, X86::VPCMPGTDYrm, TB_ALIGN_32 },
{ X86::VPCMPGTQYrr, X86::VPCMPGTQYrm, TB_ALIGN_32 },
{ X86::VPCMPGTWYrr, X86::VPCMPGTWYrm, TB_ALIGN_32 },
{ X86::VPERM2I128rr, X86::VPERM2I128rm, TB_ALIGN_32 },
{ X86::VPHADDDrr256, X86::VPHADDDrm256, TB_ALIGN_32 },
{ X86::VPHADDSWrr256, X86::VPHADDSWrm256, TB_ALIGN_32 },
{ X86::VPHADDWrr256, X86::VPHADDWrm256, TB_ALIGN_32 },
{ X86::VPHSUBDrr256, X86::VPHSUBDrm256, TB_ALIGN_32 },
{ X86::VPHSUBSWrr256, X86::VPHSUBSWrm256, TB_ALIGN_32 },
{ X86::VPHSUBWrr256, X86::VPHSUBWrm256, TB_ALIGN_32 },
{ X86::VPMADDUBSWrr256, X86::VPMADDUBSWrm256, TB_ALIGN_32 },
{ X86::VPMADDWDYrr, X86::VPMADDWDYrm, TB_ALIGN_32 },
{ X86::VPMAXSWYrr, X86::VPMAXSWYrm, TB_ALIGN_32 },
{ X86::VPMAXUBYrr, X86::VPMAXUBYrm, TB_ALIGN_32 },
{ X86::VPMINSWYrr, X86::VPMINSWYrm, TB_ALIGN_32 },
{ X86::VPMINUBYrr, X86::VPMINUBYrm, TB_ALIGN_32 },
{ X86::VMPSADBWYrri, X86::VMPSADBWYrmi, TB_ALIGN_32 },
{ X86::VPMULDQYrr, X86::VPMULDQYrm, TB_ALIGN_32 },
{ X86::VPMULHRSWrr256, X86::VPMULHRSWrm256, TB_ALIGN_32 },
{ X86::VPMULHUWYrr, X86::VPMULHUWYrm, TB_ALIGN_32 },
{ X86::VPMULHWYrr, X86::VPMULHWYrm, TB_ALIGN_32 },
{ X86::VPMULLDYrr, X86::VPMULLDYrm, TB_ALIGN_32 },
{ X86::VPMULLWYrr, X86::VPMULLWYrm, TB_ALIGN_32 },
{ X86::VPMULUDQYrr, X86::VPMULUDQYrm, TB_ALIGN_32 },
{ X86::VPORYrr, X86::VPORYrm, TB_ALIGN_32 },
{ X86::VPSADBWYrr, X86::VPSADBWYrm, TB_ALIGN_32 },
{ X86::VPSHUFBrr256, X86::VPSHUFBrm256, TB_ALIGN_32 },
{ X86::VPSIGNBrr256, X86::VPSIGNBrm256, TB_ALIGN_32 },
{ X86::VPSIGNWrr256, X86::VPSIGNWrm256, TB_ALIGN_32 },
{ X86::VPSIGNDrr256, X86::VPSIGNDrm256, TB_ALIGN_32 },
{ X86::VPSLLDYrr, X86::VPSLLDYrm, TB_ALIGN_16 },
{ X86::VPSLLQYrr, X86::VPSLLQYrm, TB_ALIGN_16 },
{ X86::VPSLLWYrr, X86::VPSLLWYrm, TB_ALIGN_16 },
{ X86::VPSLLVDrr, X86::VPSLLVDrm, TB_ALIGN_16 },
{ X86::VPSLLVDYrr, X86::VPSLLVDYrm, TB_ALIGN_16 },
{ X86::VPSLLVDYrr, X86::VPSLLVDYrm, TB_ALIGN_32 },
{ X86::VPSLLVQrr, X86::VPSLLVQrm, TB_ALIGN_16 },
{ X86::VPSLLVQYrr, X86::VPSLLVQYrm, TB_ALIGN_16 },
{ X86::VPSLLVQYrr, X86::VPSLLVQYrm, TB_ALIGN_32 },
{ X86::VPSRADYrr, X86::VPSRADYrm, TB_ALIGN_16 },
{ X86::VPSRAWYrr, X86::VPSRAWYrm, TB_ALIGN_16 },
{ X86::VPSRAVDrr, X86::VPSRAVDrm, TB_ALIGN_16 },
{ X86::VPSRAVDYrr, X86::VPSRAVDYrm, TB_ALIGN_16 },
{ X86::VPSRAVDYrr, X86::VPSRAVDYrm, TB_ALIGN_32 },
{ X86::VPSRLDYrr, X86::VPSRLDYrm, TB_ALIGN_16 },
{ X86::VPSRLQYrr, X86::VPSRLQYrm, TB_ALIGN_16 },
{ X86::VPSRLWYrr, X86::VPSRLWYrm, TB_ALIGN_16 },
{ X86::VPSRLVDrr, X86::VPSRLVDrm, TB_ALIGN_16 },
{ X86::VPSRLVDYrr, X86::VPSRLVDYrm, TB_ALIGN_16 },
{ X86::VPSRLVDYrr, X86::VPSRLVDYrm, TB_ALIGN_32 },
{ X86::VPSRLVQrr, X86::VPSRLVQrm, TB_ALIGN_16 },
{ X86::VPSRLVQYrr, X86::VPSRLVQYrm, TB_ALIGN_16 },
{ X86::VPSUBBYrr, X86::VPSUBBYrm, TB_ALIGN_16 },
{ X86::VPSUBDYrr, X86::VPSUBDYrm, TB_ALIGN_16 },
{ X86::VPSUBSBYrr, X86::VPSUBSBYrm, TB_ALIGN_16 },
{ X86::VPSUBSWYrr, X86::VPSUBSWYrm, TB_ALIGN_16 },
{ X86::VPSUBWYrr, X86::VPSUBWYrm, TB_ALIGN_16 },
{ X86::VPUNPCKHBWYrr, X86::VPUNPCKHBWYrm, TB_ALIGN_16 },
{ X86::VPUNPCKHDQYrr, X86::VPUNPCKHDQYrm, TB_ALIGN_16 },
{ X86::VPSRLVQYrr, X86::VPSRLVQYrm, TB_ALIGN_32 },
{ X86::VPSUBBYrr, X86::VPSUBBYrm, TB_ALIGN_32 },
{ X86::VPSUBDYrr, X86::VPSUBDYrm, TB_ALIGN_32 },
{ X86::VPSUBSBYrr, X86::VPSUBSBYrm, TB_ALIGN_32 },
{ X86::VPSUBSWYrr, X86::VPSUBSWYrm, TB_ALIGN_32 },
{ X86::VPSUBWYrr, X86::VPSUBWYrm, TB_ALIGN_32 },
{ X86::VPUNPCKHBWYrr, X86::VPUNPCKHBWYrm, TB_ALIGN_32 },
{ X86::VPUNPCKHDQYrr, X86::VPUNPCKHDQYrm, TB_ALIGN_32 },
{ X86::VPUNPCKHQDQYrr, X86::VPUNPCKHQDQYrm, TB_ALIGN_16 },
{ X86::VPUNPCKHWDYrr, X86::VPUNPCKHWDYrm, TB_ALIGN_16 },
{ X86::VPUNPCKLBWYrr, X86::VPUNPCKLBWYrm, TB_ALIGN_16 },
{ X86::VPUNPCKLDQYrr, X86::VPUNPCKLDQYrm, TB_ALIGN_16 },
{ X86::VPUNPCKLQDQYrr, X86::VPUNPCKLQDQYrm, TB_ALIGN_16 },
{ X86::VPUNPCKLWDYrr, X86::VPUNPCKLWDYrm, TB_ALIGN_16 },
{ X86::VPXORYrr, X86::VPXORYrm, TB_ALIGN_16 },
{ X86::VPUNPCKHWDYrr, X86::VPUNPCKHWDYrm, TB_ALIGN_32 },
{ X86::VPUNPCKLBWYrr, X86::VPUNPCKLBWYrm, TB_ALIGN_32 },
{ X86::VPUNPCKLDQYrr, X86::VPUNPCKLDQYrm, TB_ALIGN_32 },
{ X86::VPUNPCKLQDQYrr, X86::VPUNPCKLQDQYrm, TB_ALIGN_32 },
{ X86::VPUNPCKLWDYrr, X86::VPUNPCKLWDYrm, TB_ALIGN_32 },
{ X86::VPXORYrr, X86::VPXORYrm, TB_ALIGN_32 },
// FIXME: add AVX 256-bit foldable instructions
};