mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-04-14 22:38:03 +00:00
Encode the 32-bit wide Thumb (and Thumb2) instructions with the high order
halfword being emitted to the stream first. rdar://8728174 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@120848 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
5812b10adb
commit
d91f4e40e6
@ -995,6 +995,7 @@ getAddrMode6OffsetOpValue(const MCInst &MI, unsigned Op,
|
||||
void ARMMCCodeEmitter::
|
||||
EncodeInstruction(const MCInst &MI, raw_ostream &OS,
|
||||
SmallVectorImpl<MCFixup> &Fixups) const {
|
||||
const ARMSubtarget &Subtarget = TM.getSubtarget<ARMSubtarget>();
|
||||
// Pseudo instructions don't get encoded.
|
||||
const TargetInstrDesc &Desc = TII.get(MI.getOpcode());
|
||||
uint64_t TSFlags = Desc.TSFlags;
|
||||
@ -1007,7 +1008,14 @@ EncodeInstruction(const MCInst &MI, raw_ostream &OS,
|
||||
case ARMII::Size2Bytes: Size = 2; break;
|
||||
case ARMII::Size4Bytes: Size = 4; break;
|
||||
}
|
||||
EmitConstant(getBinaryCodeForInstr(MI, Fixups), Size, OS);
|
||||
uint32_t Binary = getBinaryCodeForInstr(MI, Fixups);
|
||||
// Thumb 32-bit wide instructions need to be have the high order halfword
|
||||
// emitted first.
|
||||
if (Subtarget.isThumb() && Size == 4) {
|
||||
EmitConstant(Binary >> 16, 2, OS);
|
||||
EmitConstant(Binary & 0xffff, 2, OS);
|
||||
} else
|
||||
EmitConstant(Binary, Size, OS);
|
||||
++MCNumEmitted; // Keep track of the # of mi's emitted.
|
||||
}
|
||||
|
||||
|
@ -2,32 +2,32 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vabs.s8 d16, d16 @ encoding: [0x20,0x03,0xf1,0xff]
|
||||
@ CHECK: vabs.s8 d16, d16 @ encoding: [0xf1,0xff,0x20,0x03]
|
||||
vabs.s8 d16, d16
|
||||
@ CHECK: vabs.s16 d16, d16 @ encoding: [0x20,0x03,0xf5,0xff]
|
||||
@ CHECK: vabs.s16 d16, d16 @ encoding: [0xf5,0xff,0x20,0x03]
|
||||
vabs.s16 d16, d16
|
||||
@ CHECK: vabs.s32 d16, d16 @ encoding: [0x20,0x03,0xf9,0xff]
|
||||
@ CHECK: vabs.s32 d16, d16 @ encoding: [0xf9,0xff,0x20,0x03]
|
||||
vabs.s32 d16, d16
|
||||
@ CHECK: vabs.f32 d16, d16 @ encoding: [0x20,0x07,0xf9,0xff]
|
||||
@ CHECK: vabs.f32 d16, d16 @ encoding: [0xf9,0xff,0x20,0x07]
|
||||
vabs.f32 d16, d16
|
||||
@ CHECK: vabs.s8 q8, q8 @ encoding: [0x60,0x03,0xf1,0xff]
|
||||
@ CHECK: vabs.s8 q8, q8 @ encoding: [0xf1,0xff,0x60,0x03]
|
||||
vabs.s8 q8, q8
|
||||
@ CHECK: vabs.s16 q8, q8 @ encoding: [0x60,0x03,0xf5,0xff]
|
||||
@ CHECK: vabs.s16 q8, q8 @ encoding: [0xf5,0xff,0x60,0x03]
|
||||
vabs.s16 q8, q8
|
||||
@ CHECK: vabs.s32 q8, q8 @ encoding: [0x60,0x03,0xf9,0xff]
|
||||
@ CHECK: vabs.s32 q8, q8 @ encoding: [0xf9,0xff,0x60,0x03]
|
||||
vabs.s32 q8, q8
|
||||
@ CHECK: vabs.f32 q8, q8 @ encoding: [0x60,0x07,0xf9,0xff]
|
||||
@ CHECK: vabs.f32 q8, q8 @ encoding: [0xf9,0xff,0x60,0x07]
|
||||
vabs.f32 q8, q8
|
||||
|
||||
@ CHECK: vqabs.s8 d16, d16 @ encoding: [0x20,0x07,0xf0,0xff]
|
||||
@ CHECK: vqabs.s8 d16, d16 @ encoding: [0xf0,0xff,0x20,0x07]
|
||||
vqabs.s8 d16, d16
|
||||
@ CHECK: vqabs.s16 d16, d16 @ encoding: [0x20,0x07,0xf4,0xff]
|
||||
@ CHECK: vqabs.s16 d16, d16 @ encoding: [0xf4,0xff,0x20,0x07]
|
||||
vqabs.s16 d16, d16
|
||||
@ CHECK: vqabs.s32 d16, d16 @ encoding: [0x20,0x07,0xf8,0xff]
|
||||
@ CHECK: vqabs.s32 d16, d16 @ encoding: [0xf8,0xff,0x20,0x07]
|
||||
vqabs.s32 d16, d16
|
||||
@ CHECK: vqabs.s8 q8, q8 @ encoding: [0x60,0x07,0xf0,0xff]
|
||||
@ CHECK: vqabs.s8 q8, q8 @ encoding: [0xf0,0xff,0x60,0x07]
|
||||
vqabs.s8 q8, q8
|
||||
@ CHECK: vqabs.s16 q8, q8 @ encoding: [0x60,0x07,0xf4,0xff]
|
||||
@ CHECK: vqabs.s16 q8, q8 @ encoding: [0xf4,0xff,0x60,0x07]
|
||||
vqabs.s16 q8, q8
|
||||
@ CHECK: vqabs.s32 q8, q8 @ encoding: [0x60,0x07,0xf8,0xff]
|
||||
@ CHECK: vqabs.s32 q8, q8 @ encoding: [0xf8,0xff,0x60,0x07]
|
||||
vqabs.s32 q8, q8
|
||||
|
@ -2,137 +2,137 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vadd.i8 d16, d17, d16 @ encoding: [0xa0,0x08,0x41,0xef]
|
||||
@ CHECK: vadd.i8 d16, d17, d16 @ encoding: [0x41,0xef,0xa0,0x08]
|
||||
vadd.i8 d16, d17, d16
|
||||
@ CHECK: vadd.i16 d16, d17, d16 @ encoding: [0xa0,0x08,0x51,0xef]
|
||||
@ CHECK: vadd.i16 d16, d17, d16 @ encoding: [0x51,0xef,0xa0,0x08]
|
||||
vadd.i16 d16, d17, d16
|
||||
@ CHECK: vadd.i64 d16, d17, d16 @ encoding: [0xa0,0x08,0x71,0xef]
|
||||
@ CHECK: vadd.i64 d16, d17, d16 @ encoding: [0x71,0xef,0xa0,0x08]
|
||||
vadd.i64 d16, d17, d16
|
||||
@ CHECK: vadd.i32 d16, d17, d16 @ encoding: [0xa0,0x08,0x61,0xef]
|
||||
@ CHECK: vadd.i32 d16, d17, d16 @ encoding: [0x61,0xef,0xa0,0x08]
|
||||
vadd.i32 d16, d17, d16
|
||||
@ CHECK: vadd.f32 d16, d16, d17 @ encoding: [0xa1,0x0d,0x40,0xef]
|
||||
@ CHECK: vadd.f32 d16, d16, d17 @ encoding: [0x40,0xef,0xa1,0x0d]
|
||||
vadd.f32 d16, d16, d17
|
||||
@ CHECK: vadd.f32 q8, q8, q9 @ encoding: [0xe2,0x0d,0x40,0xef]
|
||||
@ CHECK: vadd.f32 q8, q8, q9 @ encoding: [0x40,0xef,0xe2,0x0d]
|
||||
vadd.f32 q8, q8, q9
|
||||
|
||||
@ CHECK: vaddl.s8 q8, d17, d16 @ encoding: [0xa0,0x00,0xc1,0xef]
|
||||
@ CHECK: vaddl.s8 q8, d17, d16 @ encoding: [0xc1,0xef,0xa0,0x00]
|
||||
vaddl.s8 q8, d17, d16
|
||||
@ CHECK: vaddl.s16 q8, d17, d16 @ encoding: [0xa0,0x00,0xd1,0xef]
|
||||
@ CHECK: vaddl.s16 q8, d17, d16 @ encoding: [0xd1,0xef,0xa0,0x00]
|
||||
vaddl.s16 q8, d17, d16
|
||||
@ CHECK: vaddl.s32 q8, d17, d16 @ encoding: [0xa0,0x00,0xe1,0xef]
|
||||
@ CHECK: vaddl.s32 q8, d17, d16 @ encoding: [0xe1,0xef,0xa0,0x00]
|
||||
vaddl.s32 q8, d17, d16
|
||||
@ CHECK: vaddl.u8 q8, d17, d16 @ encoding: [0xa0,0x00,0xc1,0xff]
|
||||
@ CHECK: vaddl.u8 q8, d17, d16 @ encoding: [0xc1,0xff,0xa0,0x00]
|
||||
vaddl.u8 q8, d17, d16
|
||||
@ CHECK: vaddl.u16 q8, d17, d16 @ encoding: [0xa0,0x00,0xd1,0xff]
|
||||
@ CHECK: vaddl.u16 q8, d17, d16 @ encoding: [0xd1,0xff,0xa0,0x00]
|
||||
vaddl.u16 q8, d17, d16
|
||||
@ CHECK: vaddl.u32 q8, d17, d16 @ encoding: [0xa0,0x00,0xe1,0xff]
|
||||
@ CHECK: vaddl.u32 q8, d17, d16 @ encoding: [0xe1,0xff,0xa0,0x00]
|
||||
vaddl.u32 q8, d17, d16
|
||||
|
||||
@ CHECK: vaddw.s8 q8, q8, d18 @ encoding: [0xa2,0x01,0xc0,0xef]
|
||||
@ CHECK: vaddw.s8 q8, q8, d18 @ encoding: [0xc0,0xef,0xa2,0x01]
|
||||
vaddw.s8 q8, q8, d18
|
||||
@ CHECK: vaddw.s16 q8, q8, d18 @ encoding: [0xa2,0x01,0xd0,0xef]
|
||||
@ CHECK: vaddw.s16 q8, q8, d18 @ encoding: [0xd0,0xef,0xa2,0x01]
|
||||
vaddw.s16 q8, q8, d18
|
||||
@ CHECK: vaddw.s32 q8, q8, d18 @ encoding: [0xa2,0x01,0xe0,0xef]
|
||||
@ CHECK: vaddw.s32 q8, q8, d18 @ encoding: [0xe0,0xef,0xa2,0x01]
|
||||
vaddw.s32 q8, q8, d18
|
||||
@ CHECK: vaddw.u8 q8, q8, d18 @ encoding: [0xa2,0x01,0xc0,0xff]
|
||||
@ CHECK: vaddw.u8 q8, q8, d18 @ encoding: [0xc0,0xff,0xa2,0x01]
|
||||
vaddw.u8 q8, q8, d18
|
||||
@ CHECK: vaddw.u16 q8, q8, d18 @ encoding: [0xa2,0x01,0xd0,0xff]
|
||||
@ CHECK: vaddw.u16 q8, q8, d18 @ encoding: [0xd0,0xff,0xa2,0x01]
|
||||
vaddw.u16 q8, q8, d18
|
||||
@ CHECK: vaddw.u32 q8, q8, d18 @ encoding: [0xa2,0x01,0xe0,0xff]
|
||||
@ CHECK: vaddw.u32 q8, q8, d18 @ encoding: [0xe0,0xff,0xa2,0x01]
|
||||
vaddw.u32 q8, q8, d18
|
||||
|
||||
@ CHECK: vhadd.s8 d16, d16, d17 @ encoding: [0xa1,0x00,0x40,0xef]
|
||||
@ CHECK: vhadd.s8 d16, d16, d17 @ encoding: [0x40,0xef,0xa1,0x00]
|
||||
vhadd.s8 d16, d16, d17
|
||||
@ CHECK: vhadd.s16 d16, d16, d17 @ encoding: [0xa1,0x00,0x50,0xef]
|
||||
@ CHECK: vhadd.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xa1,0x00]
|
||||
vhadd.s16 d16, d16, d17
|
||||
@ CHECK: vhadd.s32 d16, d16, d17 @ encoding: [0xa1,0x00,0x60,0xef]
|
||||
@ CHECK: vhadd.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xa1,0x00]
|
||||
vhadd.s32 d16, d16, d17
|
||||
@ CHECK: vhadd.u8 d16, d16, d17 @ encoding: [0xa1,0x00,0x40,0xff]
|
||||
@ CHECK: vhadd.u8 d16, d16, d17 @ encoding: [0x40,0xff,0xa1,0x00]
|
||||
vhadd.u8 d16, d16, d17
|
||||
@ CHECK: vhadd.u16 d16, d16, d17 @ encoding: [0xa1,0x00,0x50,0xff]
|
||||
@ CHECK: vhadd.u16 d16, d16, d17 @ encoding: [0x50,0xff,0xa1,0x00]
|
||||
vhadd.u16 d16, d16, d17
|
||||
@ CHECK: vhadd.u32 d16, d16, d17 @ encoding: [0xa1,0x00,0x60,0xff]
|
||||
@ CHECK: vhadd.u32 d16, d16, d17 @ encoding: [0x60,0xff,0xa1,0x00]
|
||||
vhadd.u32 d16, d16, d17
|
||||
@ CHECK: vhadd.s8 q8, q8, q9 @ encoding: [0xe2,0x00,0x40,0xef]
|
||||
@ CHECK: vhadd.s8 q8, q8, q9 @ encoding: [0x40,0xef,0xe2,0x00]
|
||||
vhadd.s8 q8, q8, q9
|
||||
@ CHECK: vhadd.s16 q8, q8, q9 @ encoding: [0xe2,0x00,0x50,0xef]
|
||||
@ CHECK: vhadd.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xe2,0x00]
|
||||
vhadd.s16 q8, q8, q9
|
||||
@ CHECK: vhadd.s32 q8, q8, q9 @ encoding: [0xe2,0x00,0x60,0xef]
|
||||
@ CHECK: vhadd.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xe2,0x00]
|
||||
vhadd.s32 q8, q8, q9
|
||||
@ CHECK: vhadd.u8 q8, q8, q9 @ encoding: [0xe2,0x00,0x40,0xff]
|
||||
@ CHECK: vhadd.u8 q8, q8, q9 @ encoding: [0x40,0xff,0xe2,0x00]
|
||||
vhadd.u8 q8, q8, q9
|
||||
@ CHECK: vhadd.u16 q8, q8, q9 @ encoding: [0xe2,0x00,0x50,0xff]
|
||||
@ CHECK: vhadd.u16 q8, q8, q9 @ encoding: [0x50,0xff,0xe2,0x00]
|
||||
vhadd.u16 q8, q8, q9
|
||||
@ CHECK: vhadd.u32 q8, q8, q9 @ encoding: [0xe2,0x00,0x60,0xff]
|
||||
@ CHECK: vhadd.u32 q8, q8, q9 @ encoding: [0x60,0xff,0xe2,0x00]
|
||||
vhadd.u32 q8, q8, q9
|
||||
|
||||
@ CHECK: vrhadd.s8 d16, d16, d17 @ encoding: [0xa1,0x01,0x40,0xef]
|
||||
@ CHECK: vrhadd.s8 d16, d16, d17 @ encoding: [0x40,0xef,0xa1,0x01]
|
||||
vrhadd.s8 d16, d16, d17
|
||||
@ CHECK: vrhadd.s16 d16, d16, d17 @ encoding: [0xa1,0x01,0x50,0xef]
|
||||
@ CHECK: vrhadd.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xa1,0x01]
|
||||
vrhadd.s16 d16, d16, d17
|
||||
@ CHECK: vrhadd.s32 d16, d16, d17 @ encoding: [0xa1,0x01,0x60,0xef]
|
||||
@ CHECK: vrhadd.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xa1,0x01]
|
||||
vrhadd.s32 d16, d16, d17
|
||||
@ CHECK: vrhadd.u8 d16, d16, d17 @ encoding: [0xa1,0x01,0x40,0xff]
|
||||
@ CHECK: vrhadd.u8 d16, d16, d17 @ encoding: [0x40,0xff,0xa1,0x01]
|
||||
vrhadd.u8 d16, d16, d17
|
||||
@ CHECK: vrhadd.u16 d16, d16, d17 @ encoding: [0xa1,0x01,0x50,0xff]
|
||||
@ CHECK: vrhadd.u16 d16, d16, d17 @ encoding: [0x50,0xff,0xa1,0x01]
|
||||
vrhadd.u16 d16, d16, d17
|
||||
@ CHECK: vrhadd.u32 d16, d16, d17 @ encoding: [0xa1,0x01,0x60,0xff]
|
||||
@ CHECK: vrhadd.u32 d16, d16, d17 @ encoding: [0x60,0xff,0xa1,0x01]
|
||||
vrhadd.u32 d16, d16, d17
|
||||
@ CHECK: vrhadd.s8 q8, q8, q9 @ encoding: [0xe2,0x01,0x40,0xef]
|
||||
@ CHECK: vrhadd.s8 q8, q8, q9 @ encoding: [0x40,0xef,0xe2,0x01]
|
||||
vrhadd.s8 q8, q8, q9
|
||||
@ CHECK: vrhadd.s16 q8, q8, q9 @ encoding: [0xe2,0x01,0x50,0xef]
|
||||
@ CHECK: vrhadd.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xe2,0x01]
|
||||
vrhadd.s16 q8, q8, q9
|
||||
@ CHECK: vrhadd.s32 q8, q8, q9 @ encoding: [0xe2,0x01,0x60,0xef]
|
||||
@ CHECK: vrhadd.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xe2,0x01]
|
||||
vrhadd.s32 q8, q8, q9
|
||||
@ CHECK: vrhadd.u8 q8, q8, q9 @ encoding: [0xe2,0x01,0x40,0xff]
|
||||
@ CHECK: vrhadd.u8 q8, q8, q9 @ encoding: [0x40,0xff,0xe2,0x01]
|
||||
vrhadd.u8 q8, q8, q9
|
||||
@ CHECK: vrhadd.u16 q8, q8, q9 @ encoding: [0xe2,0x01,0x50,0xff]
|
||||
@ CHECK: vrhadd.u16 q8, q8, q9 @ encoding: [0x50,0xff,0xe2,0x01]
|
||||
vrhadd.u16 q8, q8, q9
|
||||
@ CHECK: vrhadd.u32 q8, q8, q9 @ encoding: [0xe2,0x01,0x60,0xff]
|
||||
@ CHECK: vrhadd.u32 q8, q8, q9 @ encoding: [0x60,0xff,0xe2,0x01]
|
||||
vrhadd.u32 q8, q8, q9
|
||||
|
||||
@ CHECK: vqadd.s8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xef]
|
||||
@ CHECK: vqadd.s8 d16, d16, d17 @ encoding: [0x40,0xef,0xb1,0x00]
|
||||
vqadd.s8 d16, d16, d17
|
||||
@ CHECK: vqadd.s16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xef]
|
||||
@ CHECK: vqadd.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xb1,0x00]
|
||||
vqadd.s16 d16, d16, d17
|
||||
@ CHECK: vqadd.s32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xef]
|
||||
@ CHECK: vqadd.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xb1,0x00]
|
||||
vqadd.s32 d16, d16, d17
|
||||
@ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xef]
|
||||
@ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0x70,0xef,0xb1,0x00]
|
||||
vqadd.s64 d16, d16, d17
|
||||
@ CHECK: vqadd.u8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xff]
|
||||
@ CHECK: vqadd.u8 d16, d16, d17 @ encoding: [0x40,0xff,0xb1,0x00]
|
||||
vqadd.u8 d16, d16, d17
|
||||
@ CHECK: vqadd.u16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xff]
|
||||
@ CHECK: vqadd.u16 d16, d16, d17 @ encoding: [0x50,0xff,0xb1,0x00]
|
||||
vqadd.u16 d16, d16, d17
|
||||
@ CHECK: vqadd.u32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xff]
|
||||
@ CHECK: vqadd.u32 d16, d16, d17 @ encoding: [0x60,0xff,0xb1,0x00]
|
||||
vqadd.u32 d16, d16, d17
|
||||
@ CHECK: vqadd.u64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xff]
|
||||
@ CHECK: vqadd.u64 d16, d16, d17 @ encoding: [0x70,0xff,0xb1,0x00]
|
||||
vqadd.u64 d16, d16, d17
|
||||
@ CHECK: vqadd.s8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xef]
|
||||
@ CHECK: vqadd.s8 q8, q8, q9 @ encoding: [0x40,0xef,0xf2,0x00]
|
||||
vqadd.s8 q8, q8, q9
|
||||
@ CHECK: vqadd.s16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xef]
|
||||
@ CHECK: vqadd.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xf2,0x00]
|
||||
vqadd.s16 q8, q8, q9
|
||||
@ CHECK: vqadd.s32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xef]
|
||||
@ CHECK: vqadd.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xf2,0x00]
|
||||
vqadd.s32 q8, q8, q9
|
||||
@ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xef]
|
||||
@ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0x70,0xef,0xf2,0x00]
|
||||
vqadd.s64 q8, q8, q9
|
||||
@ CHECK: vqadd.u8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xff]
|
||||
@ CHECK: vqadd.u8 q8, q8, q9 @ encoding: [0x40,0xff,0xf2,0x00]
|
||||
vqadd.u8 q8, q8, q9
|
||||
@ CHECK: vqadd.u16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xff]
|
||||
@ CHECK: vqadd.u16 q8, q8, q9 @ encoding: [0x50,0xff,0xf2,0x00]
|
||||
vqadd.u16 q8, q8, q9
|
||||
@ CHECK: vqadd.u32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xff]
|
||||
@ CHECK: vqadd.u32 q8, q8, q9 @ encoding: [0x60,0xff,0xf2,0x00]
|
||||
vqadd.u32 q8, q8, q9
|
||||
@ CHECK: vqadd.u64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xff]
|
||||
@ CHECK: vqadd.u64 q8, q8, q9 @ encoding: [0x70,0xff,0xf2,0x00]
|
||||
vqadd.u64 q8, q8, q9
|
||||
|
||||
@ CHECK: vaddhn.i16 d16, q8, q9 @ encoding: [0xa2,0x04,0xc0,0xef]
|
||||
@ CHECK: vaddhn.i16 d16, q8, q9 @ encoding: [0xc0,0xef,0xa2,0x04]
|
||||
vaddhn.i16 d16, q8, q9
|
||||
@ CHECK: vaddhn.i32 d16, q8, q9 @ encoding: [0xa2,0x04,0xd0,0xef]
|
||||
@ CHECK: vaddhn.i32 d16, q8, q9 @ encoding: [0xd0,0xef,0xa2,0x04]
|
||||
vaddhn.i32 d16, q8, q9
|
||||
@ CHECK: vaddhn.i64 d16, q8, q9 @ encoding: [0xa2,0x04,0xe0,0xef]
|
||||
@ CHECK: vaddhn.i64 d16, q8, q9 @ encoding: [0xe0,0xef,0xa2,0x04]
|
||||
vaddhn.i64 d16, q8, q9
|
||||
@ CHECK: vraddhn.i16 d16, q8, q9 @ encoding: [0xa2,0x04,0xc0,0xff]
|
||||
@ CHECK: vraddhn.i16 d16, q8, q9 @ encoding: [0xc0,0xff,0xa2,0x04]
|
||||
vraddhn.i16 d16, q8, q9
|
||||
@ CHECK: vraddhn.i32 d16, q8, q9 @ encoding: [0xa2,0x04,0xd0,0xff]
|
||||
@ CHECK: vraddhn.i32 d16, q8, q9 @ encoding: [0xd0,0xff,0xa2,0x04]
|
||||
vraddhn.i32 d16, q8, q9
|
||||
@ CHECK: vraddhn.i64 d16, q8, q9 @ encoding: [0xa2,0x04,0xe0,0xff]
|
||||
@ CHECK: vraddhn.i64 d16, q8, q9 @ encoding: [0xe0,0xff,0xa2,0x04]
|
||||
vraddhn.i64 d16, q8, q9
|
||||
|
@ -2,35 +2,35 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vcvt.s32.f32 d16, d16 @ encoding: [0x20,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.s32.f32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x07]
|
||||
vcvt.s32.f32 d16, d16
|
||||
@ CHECK: vcvt.u32.f32 d16, d16 @ encoding: [0xa0,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.u32.f32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x07]
|
||||
vcvt.u32.f32 d16, d16
|
||||
@ CHECK: vcvt.f32.s32 d16, d16 @ encoding: [0x20,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.s32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x06]
|
||||
vcvt.f32.s32 d16, d16
|
||||
@ CHECK: vcvt.f32.u32 d16, d16 @ encoding: [0xa0,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.u32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x06]
|
||||
vcvt.f32.u32 d16, d16
|
||||
@ CHECK: vcvt.s32.f32 q8, q8 @ encoding: [0x60,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.s32.f32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x07]
|
||||
vcvt.s32.f32 q8, q8
|
||||
@ CHECK: vcvt.u32.f32 q8, q8 @ encoding: [0xe0,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.u32.f32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x07]
|
||||
vcvt.u32.f32 q8, q8
|
||||
@ CHECK: vcvt.f32.s32 q8, q8 @ encoding: [0x60,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.s32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x06]
|
||||
vcvt.f32.s32 q8, q8
|
||||
@ CHECK: vcvt.f32.u32 q8, q8 @ encoding: [0xe0,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.u32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x06]
|
||||
vcvt.f32.u32 q8, q8
|
||||
@ CHECK: vcvt.s32.f32 d16, d16, #1 @ encoding: [0x30,0x0f,0xff,0xef]
|
||||
@ CHECK: vcvt.s32.f32 d16, d16, #1 @ encoding: [0xff,0xef,0x30,0x0f]
|
||||
vcvt.s32.f32 d16, d16, #1
|
||||
@ CHECK: vcvt.u32.f32 d16, d16, #1 @ encoding: [0x30,0x0f,0xff,0xff]
|
||||
@ CHECK: vcvt.u32.f32 d16, d16, #1 @ encoding: [0xff,0xff,0x30,0x0f]
|
||||
vcvt.u32.f32 d16, d16, #1
|
||||
@ CHECK: vcvt.f32.s32 d16, d16, #1 @ encoding: [0x30,0x0e,0xff,0xef]
|
||||
@ CHECK: vcvt.f32.s32 d16, d16, #1 @ encoding: [0xff,0xef,0x30,0x0e]
|
||||
vcvt.f32.s32 d16, d16, #1
|
||||
@ CHECK: vcvt.f32.u32 d16, d16, #1 @ encoding: [0x30,0x0e,0xff,0xff]
|
||||
@ CHECK: vcvt.f32.u32 d16, d16, #1 @ encoding: [0xff,0xff,0x30,0x0e]
|
||||
vcvt.f32.u32 d16, d16, #1
|
||||
@ CHECK: vcvt.s32.f32 q8, q8, #1 @ encoding: [0x70,0x0f,0xff,0xef]
|
||||
@ CHECK: vcvt.s32.f32 q8, q8, #1 @ encoding: [0xff,0xef,0x70,0x0f]
|
||||
vcvt.s32.f32 q8, q8, #1
|
||||
@ CHECK: vcvt.u32.f32 q8, q8, #1 @ encoding: [0x70,0x0f,0xff,0xff]
|
||||
@ CHECK: vcvt.u32.f32 q8, q8, #1 @ encoding: [0xff,0xff,0x70,0x0f]
|
||||
vcvt.u32.f32 q8, q8, #1
|
||||
@ CHECK: vcvt.f32.s32 q8, q8, #1 @ encoding: [0x70,0x0e,0xff,0xef]
|
||||
@ CHECK: vcvt.f32.s32 q8, q8, #1 @ encoding: [0xff,0xef,0x70,0x0e]
|
||||
vcvt.f32.s32 q8, q8, #1
|
||||
@ CHECK: vcvt.f32.u32 q8, q8, #1 @ encoding: [0x70,0x0e,0xff,0xff]
|
||||
@ CHECK: vcvt.f32.u32 q8, q8, #1 @ encoding: [0xff,0xff,0x70,0x0e]
|
||||
vcvt.f32.u32 q8, q8, #1
|
||||
|
@ -2,35 +2,35 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vcvt.s32.f32 d16, d16 @ encoding: [0x20,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.s32.f32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x07]
|
||||
vcvt.s32.f32 d16, d16
|
||||
@ CHECK: vcvt.u32.f32 d16, d16 @ encoding: [0xa0,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.u32.f32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x07]
|
||||
vcvt.u32.f32 d16, d16
|
||||
@ CHECK: vcvt.f32.s32 d16, d16 @ encoding: [0x20,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.s32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x06]
|
||||
vcvt.f32.s32 d16, d16
|
||||
@ CHECK: vcvt.f32.u32 d16, d16 @ encoding: [0xa0,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.u32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x06]
|
||||
vcvt.f32.u32 d16, d16
|
||||
@ CHECK: vcvt.s32.f32 q8, q8 @ encoding: [0x60,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.s32.f32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x07]
|
||||
vcvt.s32.f32 q8, q8
|
||||
@ CHECK: vcvt.u32.f32 q8, q8 @ encoding: [0xe0,0x07,0xfb,0xff]
|
||||
@ CHECK: vcvt.u32.f32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x07]
|
||||
vcvt.u32.f32 q8, q8
|
||||
@ CHECK: vcvt.f32.s32 q8, q8 @ encoding: [0x60,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.s32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x06]
|
||||
vcvt.f32.s32 q8, q8
|
||||
@ CHECK: vcvt.f32.u32 q8, q8 @ encoding: [0xe0,0x06,0xfb,0xff]
|
||||
@ CHECK: vcvt.f32.u32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x06]
|
||||
vcvt.f32.u32 q8, q8
|
||||
@ CHECK: vcvt.s32.f32 d16, d16, #1 @ encoding: [0x30,0x0f,0xff,0xef]
|
||||
@ CHECK: vcvt.s32.f32 d16, d16, #1 @ encoding: [0xff,0xef,0x30,0x0f]
|
||||
vcvt.s32.f32 d16, d16, #1
|
||||
@ CHECK: vcvt.u32.f32 d16, d16, #1 @ encoding: [0x30,0x0f,0xff,0xff]
|
||||
@ CHECK: vcvt.u32.f32 d16, d16, #1 @ encoding: [0xff,0xff,0x30,0x0f]
|
||||
vcvt.u32.f32 d16, d16, #1
|
||||
@ CHECK: vcvt.f32.s32 d16, d16, #1 @ encoding: [0x30,0x0e,0xff,0xef]
|
||||
@ CHECK: vcvt.f32.s32 d16, d16, #1 @ encoding: [0xff,0xef,0x30,0x0e]
|
||||
vcvt.f32.s32 d16, d16, #1
|
||||
@ CHECK: vcvt.f32.u32 d16, d16, #1 @ encoding: [0x30,0x0e,0xff,0xff]
|
||||
@ CHECK: vcvt.f32.u32 d16, d16, #1 @ encoding: [0xff,0xff,0x30,0x0e]
|
||||
vcvt.f32.u32 d16, d16, #1
|
||||
@ CHECK: vcvt.s32.f32 q8, q8, #1 @ encoding: [0x70,0x0f,0xff,0xef]
|
||||
@ CHECK: vcvt.s32.f32 q8, q8, #1 @ encoding: [0xff,0xef,0x70,0x0f]
|
||||
vcvt.s32.f32 q8, q8, #1
|
||||
@ CHECK: vcvt.u32.f32 q8, q8, #1 @ encoding: [0x70,0x0f,0xff,0xff]
|
||||
@ CHECK: vcvt.u32.f32 q8, q8, #1 @ encoding: [0xff,0xff,0x70,0x0f]
|
||||
vcvt.u32.f32 q8, q8, #1
|
||||
@ CHECK: vcvt.f32.s32 q8, q8, #1 @ encoding: [0x70,0x0e,0xff,0xef]
|
||||
@ CHECK: vcvt.f32.s32 q8, q8, #1 @ encoding: [0xff,0xef,0x70,0x0e]
|
||||
vcvt.f32.s32 q8, q8, #1
|
||||
@ CHECK: vcvt.f32.u32 q8, q8, #1 @ encoding: [0x70,0x0e,0xff,0xff]
|
||||
@ CHECK: vcvt.f32.u32 q8, q8, #1 @ encoding: [0xff,0xff,0x70,0x0e]
|
||||
vcvt.f32.u32 q8, q8, #1
|
||||
|
@ -2,59 +2,59 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vmin.s8 d16, d16, d17 @ encoding: [0xb1,0x06,0x40,0xef]
|
||||
@ CHECK: vmin.s8 d16, d16, d17 @ encoding: [0x40,0xef,0xb1,0x06]
|
||||
vmin.s8 d16, d16, d17
|
||||
@ CHECK: vmin.s16 d16, d16, d17 @ encoding: [0xb1,0x06,0x50,0xef]
|
||||
@ CHECK: vmin.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xb1,0x06]
|
||||
vmin.s16 d16, d16, d17
|
||||
@ CHECK: vmin.s32 d16, d16, d17 @ encoding: [0xb1,0x06,0x60,0xef]
|
||||
@ CHECK: vmin.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xb1,0x06]
|
||||
vmin.s32 d16, d16, d17
|
||||
@ CHECK: vmin.u8 d16, d16, d17 @ encoding: [0xb1,0x06,0x40,0xff]
|
||||
@ CHECK: vmin.u8 d16, d16, d17 @ encoding: [0x40,0xff,0xb1,0x06]
|
||||
vmin.u8 d16, d16, d17
|
||||
@ CHECK: vmin.u16 d16, d16, d17 @ encoding: [0xb1,0x06,0x50,0xff]
|
||||
@ CHECK: vmin.u16 d16, d16, d17 @ encoding: [0x50,0xff,0xb1,0x06]
|
||||
vmin.u16 d16, d16, d17
|
||||
@ CHECK: vmin.u32 d16, d16, d17 @ encoding: [0xb1,0x06,0x60,0xff]
|
||||
@ CHECK: vmin.u32 d16, d16, d17 @ encoding: [0x60,0xff,0xb1,0x06]
|
||||
vmin.u32 d16, d16, d17
|
||||
@ CHECK: vmin.f32 d16, d16, d17 @ encoding: [0xa1,0x0f,0x60,0xef]
|
||||
@ CHECK: vmin.f32 d16, d16, d17 @ encoding: [0x60,0xef,0xa1,0x0f]
|
||||
vmin.f32 d16, d16, d17
|
||||
@ CHECK: vmin.s8 q8, q8, q9 @ encoding: [0xf2,0x06,0x40,0xef]
|
||||
@ CHECK: vmin.s8 q8, q8, q9 @ encoding: [0x40,0xef,0xf2,0x06]
|
||||
vmin.s8 q8, q8, q9
|
||||
@ CHECK: vmin.s16 q8, q8, q9 @ encoding: [0xf2,0x06,0x50,0xef]
|
||||
@ CHECK: vmin.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xf2,0x06]
|
||||
vmin.s16 q8, q8, q9
|
||||
@ CHECK: vmin.s32 q8, q8, q9 @ encoding: [0xf2,0x06,0x60,0xef]
|
||||
@ CHECK: vmin.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xf2,0x06]
|
||||
vmin.s32 q8, q8, q9
|
||||
@ CHECK: vmin.u8 q8, q8, q9 @ encoding: [0xf2,0x06,0x40,0xff]
|
||||
@ CHECK: vmin.u8 q8, q8, q9 @ encoding: [0x40,0xff,0xf2,0x06]
|
||||
vmin.u8 q8, q8, q9
|
||||
@ CHECK: vmin.u16 q8, q8, q9 @ encoding: [0xf2,0x06,0x50,0xff]
|
||||
@ CHECK: vmin.u16 q8, q8, q9 @ encoding: [0x50,0xff,0xf2,0x06]
|
||||
vmin.u16 q8, q8, q9
|
||||
@ CHECK: vmin.u32 q8, q8, q9 @ encoding: [0xf2,0x06,0x60,0xff]
|
||||
@ CHECK: vmin.u32 q8, q8, q9 @ encoding: [0x60,0xff,0xf2,0x06]
|
||||
vmin.u32 q8, q8, q9
|
||||
@ CHECK: vmin.f32 q8, q8, q9 @ encoding: [0xe2,0x0f,0x60,0xef]
|
||||
@ CHECK: vmin.f32 q8, q8, q9 @ encoding: [0x60,0xef,0xe2,0x0f]
|
||||
vmin.f32 q8, q8, q9
|
||||
@ CHECK: vmax.s8 d16, d16, d17 @ encoding: [0xa1,0x06,0x40,0xef]
|
||||
@ CHECK: vmax.s8 d16, d16, d17 @ encoding: [0x40,0xef,0xa1,0x06]
|
||||
vmax.s8 d16, d16, d17
|
||||
@ CHECK: vmax.s16 d16, d16, d17 @ encoding: [0xa1,0x06,0x50,0xef]
|
||||
@ CHECK: vmax.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xa1,0x06]
|
||||
vmax.s16 d16, d16, d17
|
||||
@ CHECK: vmax.s32 d16, d16, d17 @ encoding: [0xa1,0x06,0x60,0xef]
|
||||
@ CHECK: vmax.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xa1,0x06]
|
||||
vmax.s32 d16, d16, d17
|
||||
@ CHECK: vmax.u8 d16, d16, d17 @ encoding: [0xa1,0x06,0x40,0xff]
|
||||
@ CHECK: vmax.u8 d16, d16, d17 @ encoding: [0x40,0xff,0xa1,0x06]
|
||||
vmax.u8 d16, d16, d17
|
||||
@ CHECK: vmax.u16 d16, d16, d17 @ encoding: [0xa1,0x06,0x50,0xff]
|
||||
@ CHECK: vmax.u16 d16, d16, d17 @ encoding: [0x50,0xff,0xa1,0x06]
|
||||
vmax.u16 d16, d16, d17
|
||||
@ CHECK: vmax.u32 d16, d16, d17 @ encoding: [0xa1,0x06,0x60,0xff]
|
||||
@ CHECK: vmax.u32 d16, d16, d17 @ encoding: [0x60,0xff,0xa1,0x06]
|
||||
vmax.u32 d16, d16, d17
|
||||
@ CHECK: vmax.f32 d16, d16, d17 @ encoding: [0xa1,0x0f,0x40,0xef]
|
||||
@ CHECK: vmax.f32 d16, d16, d17 @ encoding: [0x40,0xef,0xa1,0x0f]
|
||||
vmax.f32 d16, d16, d17
|
||||
@ CHECK: vmax.s8 q8, q8, q9 @ encoding: [0xe2,0x06,0x40,0xef]
|
||||
@ CHECK: vmax.s8 q8, q8, q9 @ encoding: [0x40,0xef,0xe2,0x06]
|
||||
vmax.s8 q8, q8, q9
|
||||
@ CHECK: vmax.s16 q8, q8, q9 @ encoding: [0xe2,0x06,0x50,0xef]
|
||||
@ CHECK: vmax.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xe2,0x06]
|
||||
vmax.s16 q8, q8, q9
|
||||
@ CHECK: vmax.s32 q8, q8, q9 @ encoding: [0xe2,0x06,0x60,0xef]
|
||||
@ CHECK: vmax.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xe2,0x06]
|
||||
vmax.s32 q8, q8, q9
|
||||
@ CHECK: vmax.u8 q8, q8, q9 @ encoding: [0xe2,0x06,0x40,0xff]
|
||||
@ CHECK: vmax.u8 q8, q8, q9 @ encoding: [0x40,0xff,0xe2,0x06]
|
||||
vmax.u8 q8, q8, q9
|
||||
@ CHECK: vmax.u16 q8, q8, q9 @ encoding: [0xe2,0x06,0x50,0xff]
|
||||
@ CHECK: vmax.u16 q8, q8, q9 @ encoding: [0x50,0xff,0xe2,0x06]
|
||||
vmax.u16 q8, q8, q9
|
||||
@ CHECK: vmax.u32 q8, q8, q9 @ encoding: [0xe2,0x06,0x60,0xff]
|
||||
@ CHECK: vmax.u32 q8, q8, q9 @ encoding: [0x60,0xff,0xe2,0x06]
|
||||
vmax.u32 q8, q8, q9
|
||||
@ CHECK: vmax.f32 q8, q8, q9 @ encoding: [0xe2,0x0f,0x40,0xef]
|
||||
@ CHECK: vmax.f32 q8, q8, q9 @ encoding: [0x40,0xef,0xe2,0x0f]
|
||||
vmax.f32 q8, q8, q9
|
||||
|
@ -2,57 +2,57 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vmul.i8 d16, d16, d17 @ encoding: [0xb1,0x09,0x40,0xef]
|
||||
@ CHECK: vmul.i8 d16, d16, d17 @ encoding: [0x40,0xef,0xb1,0x09]
|
||||
vmul.i8 d16, d16, d17
|
||||
@ CHECK: vmul.i16 d16, d16, d17 @ encoding: [0xb1,0x09,0x50,0xef]
|
||||
@ CHECK: vmul.i16 d16, d16, d17 @ encoding: [0x50,0xef,0xb1,0x09]
|
||||
vmul.i16 d16, d16, d17
|
||||
@ CHECK: vmul.i32 d16, d16, d17 @ encoding: [0xb1,0x09,0x60,0xef]
|
||||
@ CHECK: vmul.i32 d16, d16, d17 @ encoding: [0x60,0xef,0xb1,0x09]
|
||||
vmul.i32 d16, d16, d17
|
||||
@ CHECK: vmul.f32 d16, d16, d17 @ encoding: [0xb1,0x0d,0x40,0xff]
|
||||
@ CHECK: vmul.f32 d16, d16, d17 @ encoding: [0x40,0xff,0xb1,0x0d]
|
||||
vmul.f32 d16, d16, d17
|
||||
@ CHECK: vmul.i8 q8, q8, q9 @ encoding: [0xf2,0x09,0x40,0xef]
|
||||
@ CHECK: vmul.i8 q8, q8, q9 @ encoding: [0x40,0xef,0xf2,0x09]
|
||||
vmul.i8 q8, q8, q9
|
||||
@ CHECK: vmul.i16 q8, q8, q9 @ encoding: [0xf2,0x09,0x50,0xef]
|
||||
@ CHECK: vmul.i16 q8, q8, q9 @ encoding: [0x50,0xef,0xf2,0x09]
|
||||
vmul.i16 q8, q8, q9
|
||||
@ CHECK: vmul.i32 q8, q8, q9 @ encoding: [0xf2,0x09,0x60,0xef]
|
||||
@ CHECK: vmul.i32 q8, q8, q9 @ encoding: [0x60,0xef,0xf2,0x09]
|
||||
vmul.i32 q8, q8, q9
|
||||
@ CHECK: vmul.f32 q8, q8, q9 @ encoding: [0xf2,0x0d,0x40,0xff]
|
||||
@ CHECK: vmul.f32 q8, q8, q9 @ encoding: [0x40,0xff,0xf2,0x0d]
|
||||
vmul.f32 q8, q8, q9
|
||||
@ CHECK: vmul.p8 d16, d16, d17 @ encoding: [0xb1,0x09,0x40,0xff]
|
||||
@ CHECK: vmul.p8 d16, d16, d17 @ encoding: [0x40,0xff,0xb1,0x09]
|
||||
vmul.p8 d16, d16, d17
|
||||
@ CHECK: vmul.p8 q8, q8, q9 @ encoding: [0xf2,0x09,0x40,0xff]
|
||||
@ CHECK: vmul.p8 q8, q8, q9 @ encoding: [0x40,0xff,0xf2,0x09]
|
||||
vmul.p8 q8, q8, q9
|
||||
@ CHECK: vqdmulh.s16 d16, d16, d17 @ encoding: [0xa1,0x0b,0x50,0xef]
|
||||
@ CHECK: vqdmulh.s16 d16, d16, d17 @ encoding: [0x50,0xef,0xa1,0x0b]
|
||||
vqdmulh.s16 d16, d16, d17
|
||||
@ CHECK: vqdmulh.s32 d16, d16, d17 @ encoding: [0xa1,0x0b,0x60,0xef]
|
||||
@ CHECK: vqdmulh.s32 d16, d16, d17 @ encoding: [0x60,0xef,0xa1,0x0b]
|
||||
vqdmulh.s32 d16, d16, d17
|
||||
@ CHECK: vqdmulh.s16 q8, q8, q9 @ encoding: [0xe2,0x0b,0x50,0xef]
|
||||
@ CHECK: vqdmulh.s16 q8, q8, q9 @ encoding: [0x50,0xef,0xe2,0x0b]
|
||||
vqdmulh.s16 q8, q8, q9
|
||||
@ CHECK: vqdmulh.s32 q8, q8, q9 @ encoding: [0xe2,0x0b,0x60,0xef]
|
||||
@ CHECK: vqdmulh.s32 q8, q8, q9 @ encoding: [0x60,0xef,0xe2,0x0b]
|
||||
vqdmulh.s32 q8, q8, q9
|
||||
@ CHECK: vqrdmulh.s16 d16, d16, d17 @ encoding: [0xa1,0x0b,0x50,0xff]
|
||||
@ CHECK: vqrdmulh.s16 d16, d16, d17 @ encoding: [0x50,0xff,0xa1,0x0b]
|
||||
vqrdmulh.s16 d16, d16, d17
|
||||
@ CHECK: vqrdmulh.s32 d16, d16, d17 @ encoding: [0xa1,0x0b,0x60,0xff]
|
||||
@ CHECK: vqrdmulh.s32 d16, d16, d17 @ encoding: [0x60,0xff,0xa1,0x0b]
|
||||
vqrdmulh.s32 d16, d16, d17
|
||||
@ CHECK: vqrdmulh.s16 q8, q8, q9 @ encoding: [0xe2,0x0b,0x50,0xff]
|
||||
@ CHECK: vqrdmulh.s16 q8, q8, q9 @ encoding: [0x50,0xff,0xe2,0x0b]
|
||||
vqrdmulh.s16 q8, q8, q9
|
||||
@ CHECK: vqrdmulh.s32 q8, q8, q9 @ encoding: [0xe2,0x0b,0x60,0xff]
|
||||
@ CHECK: vqrdmulh.s32 q8, q8, q9 @ encoding: [0x60,0xff,0xe2,0x0b]
|
||||
vqrdmulh.s32 q8, q8, q9
|
||||
@ CHECK: vmull.s8 q8, d16, d17 @ encoding: [0xa1,0x0c,0xc0,0xef]
|
||||
@ CHECK: vmull.s8 q8, d16, d17 @ encoding: [0xc0,0xef,0xa1,0x0c]
|
||||
vmull.s8 q8, d16, d17
|
||||
@ CHECK: vmull.s16 q8, d16, d17 @ encoding: [0xa1,0x0c,0xd0,0xef]
|
||||
@ CHECK: vmull.s16 q8, d16, d17 @ encoding: [0xd0,0xef,0xa1,0x0c]
|
||||
vmull.s16 q8, d16, d17
|
||||
@ CHECK: vmull.s32 q8, d16, d17 @ encoding: [0xa1,0x0c,0xe0,0xef]
|
||||
@ CHECK: vmull.s32 q8, d16, d17 @ encoding: [0xe0,0xef,0xa1,0x0c]
|
||||
vmull.s32 q8, d16, d17
|
||||
@ CHECK: vmull.u8 q8, d16, d17 @ encoding: [0xa1,0x0c,0xc0,0xff]
|
||||
@ CHECK: vmull.u8 q8, d16, d17 @ encoding: [0xc0,0xff,0xa1,0x0c]
|
||||
vmull.u8 q8, d16, d17
|
||||
@ CHECK: vmull.u16 q8, d16, d17 @ encoding: [0xa1,0x0c,0xd0,0xff]
|
||||
@ CHECK: vmull.u16 q8, d16, d17 @ encoding: [0xd0,0xff,0xa1,0x0c]
|
||||
vmull.u16 q8, d16, d17
|
||||
@ CHECK: vmull.u32 q8, d16, d17 @ encoding: [0xa1,0x0c,0xe0,0xff]
|
||||
@ CHECK: vmull.u32 q8, d16, d17 @ encoding: [0xe0,0xff,0xa1,0x0c]
|
||||
vmull.u32 q8, d16, d17
|
||||
@ CHECK: vmull.p8 q8, d16, d17 @ encoding: [0xa1,0x0e,0xc0,0xef]
|
||||
@ CHECK: vmull.p8 q8, d16, d17 @ encoding: [0xc0,0xef,0xa1,0x0e]
|
||||
vmull.p8 q8, d16, d17
|
||||
@ CHECK: vqdmull.s16 q8, d16, d17 @ encoding: [0xa1,0x0d,0xd0,0xef]
|
||||
@ CHECK: vqdmull.s16 q8, d16, d17 @ encoding: [0xd0,0xef,0xa1,0x0d]
|
||||
vqdmull.s16 q8, d16, d17
|
||||
@ CHECK: vqdmull.s32 q8, d16, d17 @ encoding: [0xa1,0x0d,0xe0,0xef]
|
||||
@ CHECK: vqdmull.s32 q8, d16, d17 @ encoding: [0xe0,0xef,0xa1,0x0d]
|
||||
vqdmull.s32 q8, d16, d17
|
||||
|
@ -2,31 +2,31 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vneg.s8 d16, d16 @ encoding: [0xa0,0x03,0xf1,0xff]
|
||||
@ CHECK: vneg.s8 d16, d16 @ encoding: [0xf1,0xff,0xa0,0x03]
|
||||
vneg.s8 d16, d16
|
||||
@ CHECK: vneg.s16 d16, d16 @ encoding: [0xa0,0x03,0xf5,0xff]
|
||||
@ CHECK: vneg.s16 d16, d16 @ encoding: [0xf5,0xff,0xa0,0x03]
|
||||
vneg.s16 d16, d16
|
||||
@ CHECK: vneg.s32 d16, d16 @ encoding: [0xa0,0x03,0xf9,0xff]
|
||||
@ CHECK: vneg.s32 d16, d16 @ encoding: [0xf9,0xff,0xa0,0x03]
|
||||
vneg.s32 d16, d16
|
||||
@ CHECK: vneg.f32 d16, d16 @ encoding: [0xa0,0x07,0xf9,0xff]
|
||||
@ CHECK: vneg.f32 d16, d16 @ encoding: [0xf9,0xff,0xa0,0x07]
|
||||
vneg.f32 d16, d16
|
||||
@ CHECK: vneg.s8 q8, q8 @ encoding: [0xe0,0x03,0xf1,0xff]
|
||||
@ CHECK: vneg.s8 q8, q8 @ encoding: [0xf1,0xff,0xe0,0x03]
|
||||
vneg.s8 q8, q8
|
||||
@ CHECK: vneg.s16 q8, q8 @ encoding: [0xe0,0x03,0xf5,0xff]
|
||||
@ CHECK: vneg.s16 q8, q8 @ encoding: [0xf5,0xff,0xe0,0x03]
|
||||
vneg.s16 q8, q8
|
||||
@ CHECK: vneg.s32 q8, q8 @ encoding: [0xe0,0x03,0xf9,0xff]
|
||||
@ CHECK: vneg.s32 q8, q8 @ encoding: [0xf9,0xff,0xe0,0x03]
|
||||
vneg.s32 q8, q8
|
||||
@ CHECK: vneg.f32 q8, q8 @ encoding: [0xe0,0x07,0xf9,0xff]
|
||||
@ CHECK: vneg.f32 q8, q8 @ encoding: [0xf9,0xff,0xe0,0x07]
|
||||
vneg.f32 q8, q8
|
||||
@ CHECK: vqneg.s8 d16, d16 @ encoding: [0xa0,0x07,0xf0,0xff]
|
||||
@ CHECK: vqneg.s8 d16, d16 @ encoding: [0xf0,0xff,0xa0,0x07]
|
||||
vqneg.s8 d16, d16
|
||||
@ CHECK: vqneg.s16 d16, d16 @ encoding: [0xa0,0x07,0xf4,0xff]
|
||||
@ CHECK: vqneg.s16 d16, d16 @ encoding: [0xf4,0xff,0xa0,0x07]
|
||||
vqneg.s16 d16, d16
|
||||
@ CHECK: vqneg.s32 d16, d16 @ encoding: [0xa0,0x07,0xf8,0xff]
|
||||
@ CHECK: vqneg.s32 d16, d16 @ encoding: [0xf8,0xff,0xa0,0x07]
|
||||
vqneg.s32 d16, d16
|
||||
@ CHECK: vqneg.s8 q8, q8 @ encoding: [0xe0,0x07,0xf0,0xff]
|
||||
@ CHECK: vqneg.s8 q8, q8 @ encoding: [0xf0,0xff,0xe0,0x07]
|
||||
vqneg.s8 q8, q8
|
||||
@ CHECK: vqneg.s16 q8, q8 @ encoding: [0xe0,0x07,0xf4,0xff]
|
||||
@ CHECK: vqneg.s16 q8, q8 @ encoding: [0xf4,0xff,0xe0,0x07]
|
||||
vqneg.s16 q8, q8
|
||||
@ CHECK: vqneg.s32 q8, q8 @ encoding: [0xe0,0x07,0xf8,0xff]
|
||||
@ CHECK: vqneg.s32 q8, q8 @ encoding: [0xf8,0xff,0xe0,0x07]
|
||||
vqneg.s32 q8, q8
|
||||
|
@ -2,27 +2,27 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vrecpe.u32 d16, d16 @ encoding: [0x20,0x04,0xfb,0xff]
|
||||
@ CHECK: vrecpe.u32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x04]
|
||||
vrecpe.u32 d16, d16
|
||||
@ CHECK: vrecpe.u32 q8, q8 @ encoding: [0x60,0x04,0xfb,0xff]
|
||||
@ CHECK: vrecpe.u32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x04]
|
||||
vrecpe.u32 q8, q8
|
||||
@ CHECK: vrecpe.f32 d16, d16 @ encoding: [0x20,0x05,0xfb,0xff]
|
||||
@ CHECK: vrecpe.f32 d16, d16 @ encoding: [0xfb,0xff,0x20,0x05]
|
||||
vrecpe.f32 d16, d16
|
||||
@ CHECK: vrecpe.f32 q8, q8 @ encoding: [0x60,0x05,0xfb,0xff]
|
||||
@ CHECK: vrecpe.f32 q8, q8 @ encoding: [0xfb,0xff,0x60,0x05]
|
||||
vrecpe.f32 q8, q8
|
||||
@ CHECK: vrecps.f32 d16, d16, d17 @ encoding: [0xb1,0x0f,0x40,0xef]
|
||||
@ CHECK: vrecps.f32 d16, d16, d17 @ encoding: [0x40,0xef,0xb1,0x0f]
|
||||
vrecps.f32 d16, d16, d17
|
||||
@ CHECK: vrecps.f32 q8, q8, q9 @ encoding: [0xf2,0x0f,0x40,0xef]
|
||||
@ CHECK: vrecps.f32 q8, q8, q9 @ encoding: [0x40,0xef,0xf2,0x0f]
|
||||
vrecps.f32 q8, q8, q9
|
||||
@ CHECK: vrsqrte.u32 d16, d16 @ encoding: [0xa0,0x04,0xfb,0xff]
|
||||
@ CHECK: vrsqrte.u32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x04]
|
||||
vrsqrte.u32 d16, d16
|
||||
@ CHECK: vrsqrte.u32 q8, q8 @ encoding: [0xe0,0x04,0xfb,0xff]
|
||||
@ CHECK: vrsqrte.u32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x04]
|
||||
vrsqrte.u32 q8, q8
|
||||
@ CHECK: vrsqrte.f32 d16, d16 @ encoding: [0xa0,0x05,0xfb,0xff]
|
||||
@ CHECK: vrsqrte.f32 d16, d16 @ encoding: [0xfb,0xff,0xa0,0x05]
|
||||
vrsqrte.f32 d16, d16
|
||||
@ CHECK: vrsqrte.f32 q8, q8 @ encoding: [0xe0,0x05,0xfb,0xff]
|
||||
@ CHECK: vrsqrte.f32 q8, q8 @ encoding: [0xfb,0xff,0xe0,0x05]
|
||||
vrsqrte.f32 q8, q8
|
||||
@ CHECK: vrsqrts.f32 d16, d16, d17 @ encoding: [0xb1,0x0f,0x60,0xef]
|
||||
@ CHECK: vrsqrts.f32 d16, d16, d17 @ encoding: [0x60,0xef,0xb1,0x0f]
|
||||
vrsqrts.f32 d16, d16, d17
|
||||
@ CHECK: vrsqrts.f32 q8, q8, q9 @ encoding: [0xf2,0x0f,0x60,0xef]
|
||||
@ CHECK: vrsqrts.f32 q8, q8, q9 @ encoding: [0x60,0xef,0xf2,0x0f]
|
||||
vrsqrts.f32 q8, q8, q9
|
||||
|
@ -1,26 +1,26 @@
|
||||
@ RUN: llvm-mc -mcpu=cortex-a8 -triple thumb-unknown-unkown -show-encoding < %s | FileCheck %s
|
||||
|
||||
@ CHECK: vrev64.8 d16, d16 @ encoding: [0x20,0x00,0xf0,0xff]
|
||||
@ CHECK: vrev64.8 d16, d16 @ encoding: [0xf0,0xff,0x20,0x00]
|
||||
vrev64.8 d16, d16
|
||||
@ CHECK: vrev64.16 d16, d16 @ encoding: [0x20,0x00,0xf4,0xff]
|
||||
@ CHECK: vrev64.16 d16, d16 @ encoding: [0xf4,0xff,0x20,0x00]
|
||||
vrev64.16 d16, d16
|
||||
@ CHECK: vrev64.32 d16, d16 @ encoding: [0x20,0x00,0xf8,0xff]
|
||||
@ CHECK: vrev64.32 d16, d16 @ encoding: [0xf8,0xff,0x20,0x00]
|
||||
vrev64.32 d16, d16
|
||||
@ CHECK: vrev64.8 q8, q8 @ encoding: [0x60,0x00,0xf0,0xff]
|
||||
@ CHECK: vrev64.8 q8, q8 @ encoding: [0xf0,0xff,0x60,0x00]
|
||||
vrev64.8 q8, q8
|
||||
@ CHECK: vrev64.16 q8, q8 @ encoding: [0x60,0x00,0xf4,0xff]
|
||||
@ CHECK: vrev64.16 q8, q8 @ encoding: [0xf4,0xff,0x60,0x00]
|
||||
vrev64.16 q8, q8
|
||||
@ CHECK: vrev64.32 q8, q8 @ encoding: [0x60,0x00,0xf8,0xff]
|
||||
@ CHECK: vrev64.32 q8, q8 @ encoding: [0xf8,0xff,0x60,0x00]
|
||||
vrev64.32 q8, q8
|
||||
@ CHECK: vrev32.8 d16, d16 @ encoding: [0xa0,0x00,0xf0,0xff]
|
||||
@ CHECK: vrev32.8 d16, d16 @ encoding: [0xf0,0xff,0xa0,0x00]
|
||||
vrev32.8 d16, d16
|
||||
@ CHECK: vrev32.16 d16, d16 @ encoding: [0xa0,0x00,0xf4,0xff]
|
||||
@ CHECK: vrev32.16 d16, d16 @ encoding: [0xf4,0xff,0xa0,0x00]
|
||||
vrev32.16 d16, d16
|
||||
@ CHECK: vrev32.8 q8, q8 @ encoding: [0xe0,0x00,0xf0,0xff]
|
||||
@ CHECK: vrev32.8 q8, q8 @ encoding: [0xf0,0xff,0xe0,0x00]
|
||||
vrev32.8 q8, q8
|
||||
@ CHECK: vrev32.16 q8, q8 @ encoding: [0xe0,0x00,0xf4,0xff]
|
||||
@ CHECK: vrev32.16 q8, q8 @ encoding: [0xf4,0xff,0xe0,0x00]
|
||||
vrev32.16 q8, q8
|
||||
@ CHECK: vrev16.8 d16, d16 @ encoding: [0x20,0x01,0xf0,0xff]
|
||||
@ CHECK: vrev16.8 d16, d16 @ encoding: [0xf0,0xff,0x20,0x01]
|
||||
vrev16.8 d16, d16
|
||||
@ CHECK: vrev16.8 q8, q8 @ encoding: [0x60,0x01,0xf0,0xff]
|
||||
@ CHECK: vrev16.8 q8, q8 @ encoding: [0xf0,0xff,0x60,0x01]
|
||||
vrev16.8 q8, q8
|
||||
|
@ -2,151 +2,151 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vqshl.s8 d16, d16, d17 @ encoding: [0xb0,0x04,0x41,0xef]
|
||||
@ CHECK: vqshl.s8 d16, d16, d17 @ encoding: [0x41,0xef,0xb0,0x04]
|
||||
vqshl.s8 d16, d16, d17
|
||||
@ CHECK: vqshl.s16 d16, d16, d17 @ encoding: [0xb0,0x04,0x51,0xef]
|
||||
@ CHECK: vqshl.s16 d16, d16, d17 @ encoding: [0x51,0xef,0xb0,0x04]
|
||||
vqshl.s16 d16, d16, d17
|
||||
@ CHECK: vqshl.s32 d16, d16, d17 @ encoding: [0xb0,0x04,0x61,0xef]
|
||||
@ CHECK: vqshl.s32 d16, d16, d17 @ encoding: [0x61,0xef,0xb0,0x04]
|
||||
vqshl.s32 d16, d16, d17
|
||||
@ CHECK: vqshl.s64 d16, d16, d17 @ encoding: [0xb0,0x04,0x71,0xef]
|
||||
@ CHECK: vqshl.s64 d16, d16, d17 @ encoding: [0x71,0xef,0xb0,0x04]
|
||||
vqshl.s64 d16, d16, d17
|
||||
@ CHECK: vqshl.u8 d16, d16, d17 @ encoding: [0xb0,0x04,0x41,0xff]
|
||||
@ CHECK: vqshl.u8 d16, d16, d17 @ encoding: [0x41,0xff,0xb0,0x04]
|
||||
vqshl.u8 d16, d16, d17
|
||||
@ CHECK: vqshl.u16 d16, d16, d17 @ encoding: [0xb0,0x04,0x51,0xff]
|
||||
@ CHECK: vqshl.u16 d16, d16, d17 @ encoding: [0x51,0xff,0xb0,0x04]
|
||||
vqshl.u16 d16, d16, d17
|
||||
@ CHECK: vqshl.u32 d16, d16, d17 @ encoding: [0xb0,0x04,0x61,0xff]
|
||||
@ CHECK: vqshl.u32 d16, d16, d17 @ encoding: [0x61,0xff,0xb0,0x04]
|
||||
vqshl.u32 d16, d16, d17
|
||||
@ CHECK: vqshl.u64 d16, d16, d17 @ encoding: [0xb0,0x04,0x71,0xff]
|
||||
@ CHECK: vqshl.u64 d16, d16, d17 @ encoding: [0x71,0xff,0xb0,0x04]
|
||||
vqshl.u64 d16, d16, d17
|
||||
@ CHECK: vqshl.s8 q8, q8, q9 @ encoding: [0xf0,0x04,0x42,0xef]
|
||||
@ CHECK: vqshl.s8 q8, q8, q9 @ encoding: [0x42,0xef,0xf0,0x04]
|
||||
vqshl.s8 q8, q8, q9
|
||||
@ CHECK: vqshl.s16 q8, q8, q9 @ encoding: [0xf0,0x04,0x52,0xef]
|
||||
@ CHECK: vqshl.s16 q8, q8, q9 @ encoding: [0x52,0xef,0xf0,0x04]
|
||||
vqshl.s16 q8, q8, q9
|
||||
@ CHECK: vqshl.s32 q8, q8, q9 @ encoding: [0xf0,0x04,0x62,0xef]
|
||||
@ CHECK: vqshl.s32 q8, q8, q9 @ encoding: [0x62,0xef,0xf0,0x04]
|
||||
vqshl.s32 q8, q8, q9
|
||||
@ CHECK: vqshl.s64 q8, q8, q9 @ encoding: [0xf0,0x04,0x72,0xef]
|
||||
@ CHECK: vqshl.s64 q8, q8, q9 @ encoding: [0x72,0xef,0xf0,0x04]
|
||||
vqshl.s64 q8, q8, q9
|
||||
@ CHECK: vqshl.u8 q8, q8, q9 @ encoding: [0xf0,0x04,0x42,0xff]
|
||||
@ CHECK: vqshl.u8 q8, q8, q9 @ encoding: [0x42,0xff,0xf0,0x04]
|
||||
vqshl.u8 q8, q8, q9
|
||||
@ CHECK: vqshl.u16 q8, q8, q9 @ encoding: [0xf0,0x04,0x52,0xff]
|
||||
@ CHECK: vqshl.u16 q8, q8, q9 @ encoding: [0x52,0xff,0xf0,0x04]
|
||||
vqshl.u16 q8, q8, q9
|
||||
@ CHECK: vqshl.u32 q8, q8, q9 @ encoding: [0xf0,0x04,0x62,0xff]
|
||||
@ CHECK: vqshl.u32 q8, q8, q9 @ encoding: [0x62,0xff,0xf0,0x04]
|
||||
vqshl.u32 q8, q8, q9
|
||||
@ CHECK: vqshl.u64 q8, q8, q9 @ encoding: [0xf0,0x04,0x72,0xff]
|
||||
@ CHECK: vqshl.u64 q8, q8, q9 @ encoding: [0x72,0xff,0xf0,0x04]
|
||||
vqshl.u64 q8, q8, q9
|
||||
@ CHECK: vqshl.s8 d16, d16, #7 @ encoding: [0x30,0x07,0xcf,0xef]
|
||||
@ CHECK: vqshl.s8 d16, d16, #7 @ encoding: [0xcf,0xef,0x30,0x07]
|
||||
vqshl.s8 d16, d16, #7
|
||||
@ CHECK: vqshl.s16 d16, d16, #15 @ encoding: [0x30,0x07,0xdf,0xef]
|
||||
@ CHECK: vqshl.s16 d16, d16, #15 @ encoding: [0xdf,0xef,0x30,0x07]
|
||||
vqshl.s16 d16, d16, #15
|
||||
@ CHECK: vqshl.s32 d16, d16, #31 @ encoding: [0x30,0x07,0xff,0xef]
|
||||
@ CHECK: vqshl.s32 d16, d16, #31 @ encoding: [0xff,0xef,0x30,0x07]
|
||||
vqshl.s32 d16, d16, #31
|
||||
@ CHECK: vqshl.s64 d16, d16, #63 @ encoding: [0xb0,0x07,0xff,0xef]
|
||||
@ CHECK: vqshl.s64 d16, d16, #63 @ encoding: [0xff,0xef,0xb0,0x07]
|
||||
vqshl.s64 d16, d16, #63
|
||||
@ CHECK: vqshl.u8 d16, d16, #7 @ encoding: [0x30,0x07,0xcf,0xff]
|
||||
@ CHECK: vqshl.u8 d16, d16, #7 @ encoding: [0xcf,0xff,0x30,0x07]
|
||||
vqshl.u8 d16, d16, #7
|
||||
@ CHECK: vqshl.u16 d16, d16, #15 @ encoding: [0x30,0x07,0xdf,0xff]
|
||||
@ CHECK: vqshl.u16 d16, d16, #15 @ encoding: [0xdf,0xff,0x30,0x07]
|
||||
vqshl.u16 d16, d16, #15
|
||||
@ CHECK: vqshl.u32 d16, d16, #31 @ encoding: [0x30,0x07,0xff,0xff]
|
||||
@ CHECK: vqshl.u32 d16, d16, #31 @ encoding: [0xff,0xff,0x30,0x07]
|
||||
vqshl.u32 d16, d16, #31
|
||||
@ CHECK: vqshl.u64 d16, d16, #63 @ encoding: [0xb0,0x07,0xff,0xff]
|
||||
@ CHECK: vqshl.u64 d16, d16, #63 @ encoding: [0xff,0xff,0xb0,0x07]
|
||||
vqshl.u64 d16, d16, #63
|
||||
@ CHECK: vqshlu.s8 d16, d16, #7 @ encoding: [0x30,0x06,0xcf,0xff]
|
||||
@ CHECK: vqshlu.s8 d16, d16, #7 @ encoding: [0xcf,0xff,0x30,0x06]
|
||||
vqshlu.s8 d16, d16, #7
|
||||
@ CHECK: vqshlu.s16 d16, d16, #15 @ encoding: [0x30,0x06,0xdf,0xff]
|
||||
@ CHECK: vqshlu.s16 d16, d16, #15 @ encoding: [0xdf,0xff,0x30,0x06]
|
||||
vqshlu.s16 d16, d16, #15
|
||||
@ CHECK: vqshlu.s32 d16, d16, #31 @ encoding: [0x30,0x06,0xff,0xff]
|
||||
@ CHECK: vqshlu.s32 d16, d16, #31 @ encoding: [0xff,0xff,0x30,0x06]
|
||||
vqshlu.s32 d16, d16, #31
|
||||
@ CHECK: vqshlu.s64 d16, d16, #63 @ encoding: [0xb0,0x06,0xff,0xff]
|
||||
@ CHECK: vqshlu.s64 d16, d16, #63 @ encoding: [0xff,0xff,0xb0,0x06]
|
||||
vqshlu.s64 d16, d16, #63
|
||||
@ CHECK: vqshl.s8 q8, q8, #7 @ encoding: [0x70,0x07,0xcf,0xef]
|
||||
@ CHECK: vqshl.s8 q8, q8, #7 @ encoding: [0xcf,0xef,0x70,0x07]
|
||||
vqshl.s8 q8, q8, #7
|
||||
@ CHECK: vqshl.s16 q8, q8, #15 @ encoding: [0x70,0x07,0xdf,0xef]
|
||||
@ CHECK: vqshl.s16 q8, q8, #15 @ encoding: [0xdf,0xef,0x70,0x07]
|
||||
vqshl.s16 q8, q8, #15
|
||||
@ CHECK: vqshl.s32 q8, q8, #31 @ encoding: [0x70,0x07,0xff,0xef]
|
||||
@ CHECK: vqshl.s32 q8, q8, #31 @ encoding: [0xff,0xef,0x70,0x07]
|
||||
vqshl.s32 q8, q8, #31
|
||||
@ CHECK: vqshl.s64 q8, q8, #63 @ encoding: [0xf0,0x07,0xff,0xef]
|
||||
@ CHECK: vqshl.s64 q8, q8, #63 @ encoding: [0xff,0xef,0xf0,0x07]
|
||||
vqshl.s64 q8, q8, #63
|
||||
@ CHECK: vqshl.u8 q8, q8, #7 @ encoding: [0x70,0x07,0xcf,0xff]
|
||||
@ CHECK: vqshl.u8 q8, q8, #7 @ encoding: [0xcf,0xff,0x70,0x07]
|
||||
vqshl.u8 q8, q8, #7
|
||||
@ CHECK: vqshl.u16 q8, q8, #15 @ encoding: [0x70,0x07,0xdf,0xff]
|
||||
@ CHECK: vqshl.u16 q8, q8, #15 @ encoding: [0xdf,0xff,0x70,0x07]
|
||||
vqshl.u16 q8, q8, #15
|
||||
@ CHECK: vqshl.u32 q8, q8, #31 @ encoding: [0x70,0x07,0xff,0xff]
|
||||
@ CHECK: vqshl.u32 q8, q8, #31 @ encoding: [0xff,0xff,0x70,0x07]
|
||||
vqshl.u32 q8, q8, #31
|
||||
@ CHECK: vqshl.u64 q8, q8, #63 @ encoding: [0xf0,0x07,0xff,0xff]
|
||||
@ CHECK: vqshl.u64 q8, q8, #63 @ encoding: [0xff,0xff,0xf0,0x07]
|
||||
vqshl.u64 q8, q8, #63
|
||||
@ CHECK: vqshlu.s8 q8, q8, #7 @ encoding: [0x70,0x06,0xcf,0xff]
|
||||
@ CHECK: vqshlu.s8 q8, q8, #7 @ encoding: [0xcf,0xff,0x70,0x06]
|
||||
vqshlu.s8 q8, q8, #7
|
||||
@ CHECK: vqshlu.s16 q8, q8, #15 @ encoding: [0x70,0x06,0xdf,0xff]
|
||||
@ CHECK: vqshlu.s16 q8, q8, #15 @ encoding: [0xdf,0xff,0x70,0x06]
|
||||
vqshlu.s16 q8, q8, #15
|
||||
@ CHECK: vqshlu.s32 q8, q8, #31 @ encoding: [0x70,0x06,0xff,0xff]
|
||||
@ CHECK: vqshlu.s32 q8, q8, #31 @ encoding: [0xff,0xff,0x70,0x06]
|
||||
vqshlu.s32 q8, q8, #31
|
||||
@ CHECK: vqshlu.s64 q8, q8, #63 @ encoding: [0xf0,0x06,0xff,0xff]
|
||||
@ CHECK: vqshlu.s64 q8, q8, #63 @ encoding: [0xff,0xff,0xf0,0x06]
|
||||
vqshlu.s64 q8, q8, #63
|
||||
@ CHECK: vqrshl.s8 d16, d16, d17 @ encoding: [0xb0,0x05,0x41,0xef]
|
||||
@ CHECK: vqrshl.s8 d16, d16, d17 @ encoding: [0x41,0xef,0xb0,0x05]
|
||||
vqrshl.s8 d16, d16, d17
|
||||
@ CHECK: vqrshl.s16 d16, d16, d17 @ encoding: [0xb0,0x05,0x51,0xef]
|
||||
@ CHECK: vqrshl.s16 d16, d16, d17 @ encoding: [0x51,0xef,0xb0,0x05]
|
||||
vqrshl.s16 d16, d16, d17
|
||||
@ CHECK: vqrshl.s32 d16, d16, d17 @ encoding: [0xb0,0x05,0x61,0xef]
|
||||
@ CHECK: vqrshl.s32 d16, d16, d17 @ encoding: [0x61,0xef,0xb0,0x05]
|
||||
vqrshl.s32 d16, d16, d17
|
||||
@ CHECK: vqrshl.s64 d16, d16, d17 @ encoding: [0xb0,0x05,0x71,0xef]
|
||||
@ CHECK: vqrshl.s64 d16, d16, d17 @ encoding: [0x71,0xef,0xb0,0x05]
|
||||
vqrshl.s64 d16, d16, d17
|
||||
@ CHECK: vqrshl.u8 d16, d16, d17 @ encoding: [0xb0,0x05,0x41,0xff]
|
||||
@ CHECK: vqrshl.u8 d16, d16, d17 @ encoding: [0x41,0xff,0xb0,0x05]
|
||||
vqrshl.u8 d16, d16, d17
|
||||
@ CHECK: vqrshl.u16 d16, d16, d17 @ encoding: [0xb0,0x05,0x51,0xff]
|
||||
@ CHECK: vqrshl.u16 d16, d16, d17 @ encoding: [0x51,0xff,0xb0,0x05]
|
||||
vqrshl.u16 d16, d16, d17
|
||||
@ CHECK: vqrshl.u32 d16, d16, d17 @ encoding: [0xb0,0x05,0x61,0xff]
|
||||
@ CHECK: vqrshl.u32 d16, d16, d17 @ encoding: [0x61,0xff,0xb0,0x05]
|
||||
vqrshl.u32 d16, d16, d17
|
||||
@ CHECK: vqrshl.u64 d16, d16, d17 @ encoding: [0xb0,0x05,0x71,0xff]
|
||||
@ CHECK: vqrshl.u64 d16, d16, d17 @ encoding: [0x71,0xff,0xb0,0x05]
|
||||
vqrshl.u64 d16, d16, d17
|
||||
@ CHECK: vqrshl.s8 q8, q8, q9 @ encoding: [0xf0,0x05,0x42,0xef]
|
||||
@ CHECK: vqrshl.s8 q8, q8, q9 @ encoding: [0x42,0xef,0xf0,0x05]
|
||||
vqrshl.s8 q8, q8, q9
|
||||
@ CHECK: vqrshl.s16 q8, q8, q9 @ encoding: [0xf0,0x05,0x52,0xef]
|
||||
@ CHECK: vqrshl.s16 q8, q8, q9 @ encoding: [0x52,0xef,0xf0,0x05]
|
||||
vqrshl.s16 q8, q8, q9
|
||||
@ CHECK: vqrshl.s32 q8, q8, q9 @ encoding: [0xf0,0x05,0x62,0xef]
|
||||
@ CHECK: vqrshl.s32 q8, q8, q9 @ encoding: [0x62,0xef,0xf0,0x05]
|
||||
vqrshl.s32 q8, q8, q9
|
||||
@ CHECK: vqrshl.s64 q8, q8, q9 @ encoding: [0xf0,0x05,0x72,0xef]
|
||||
@ CHECK: vqrshl.s64 q8, q8, q9 @ encoding: [0x72,0xef,0xf0,0x05]
|
||||
vqrshl.s64 q8, q8, q9
|
||||
@ CHECK: vqrshl.u8 q8, q8, q9 @ encoding: [0xf0,0x05,0x42,0xff]
|
||||
@ CHECK: vqrshl.u8 q8, q8, q9 @ encoding: [0x42,0xff,0xf0,0x05]
|
||||
vqrshl.u8 q8, q8, q9
|
||||
@ CHECK: vqrshl.u16 q8, q8, q9 @ encoding: [0xf0,0x05,0x52,0xff]
|
||||
@ CHECK: vqrshl.u16 q8, q8, q9 @ encoding: [0x52,0xff,0xf0,0x05]
|
||||
vqrshl.u16 q8, q8, q9
|
||||
@ CHECK: vqrshl.u32 q8, q8, q9 @ encoding: [0xf0,0x05,0x62,0xff]
|
||||
@ CHECK: vqrshl.u32 q8, q8, q9 @ encoding: [0x62,0xff,0xf0,0x05]
|
||||
vqrshl.u32 q8, q8, q9
|
||||
@ CHECK: vqrshl.u64 q8, q8, q9 @ encoding: [0xf0,0x05,0x72,0xff]
|
||||
@ CHECK: vqrshl.u64 q8, q8, q9 @ encoding: [0x72,0xff,0xf0,0x05]
|
||||
vqrshl.u64 q8, q8, q9
|
||||
@ CHECK: vqshrn.s16 d16, q8, #8 @ encoding: [0x30,0x09,0xc8,0xef]
|
||||
@ CHECK: vqshrn.s16 d16, q8, #8 @ encoding: [0xc8,0xef,0x30,0x09]
|
||||
vqshrn.s16 d16, q8, #8
|
||||
@ CHECK: vqshrn.s32 d16, q8, #16 @ encoding: [0x30,0x09,0xd0,0xef]
|
||||
@ CHECK: vqshrn.s32 d16, q8, #16 @ encoding: [0xd0,0xef,0x30,0x09]
|
||||
vqshrn.s32 d16, q8, #16
|
||||
@ CHECK: vqshrn.s64 d16, q8, #32 @ encoding: [0x30,0x09,0xe0,0xef]
|
||||
@ CHECK: vqshrn.s64 d16, q8, #32 @ encoding: [0xe0,0xef,0x30,0x09]
|
||||
vqshrn.s64 d16, q8, #32
|
||||
@ CHECK: vqshrn.u16 d16, q8, #8 @ encoding: [0x30,0x09,0xc8,0xff]
|
||||
@ CHECK: vqshrn.u16 d16, q8, #8 @ encoding: [0xc8,0xff,0x30,0x09]
|
||||
vqshrn.u16 d16, q8, #8
|
||||
@ CHECK: vqshrn.u32 d16, q8, #16 @ encoding: [0x30,0x09,0xd0,0xff]
|
||||
@ CHECK: vqshrn.u32 d16, q8, #16 @ encoding: [0xd0,0xff,0x30,0x09]
|
||||
vqshrn.u32 d16, q8, #16
|
||||
@ CHECK: vqshrn.u64 d16, q8, #32 @ encoding: [0x30,0x09,0xe0,0xff]
|
||||
@ CHECK: vqshrn.u64 d16, q8, #32 @ encoding: [0xe0,0xff,0x30,0x09]
|
||||
vqshrn.u64 d16, q8, #32
|
||||
@ CHECK: vqshrun.s16 d16, q8, #8 @ encoding: [0x30,0x08,0xc8,0xff]
|
||||
@ CHECK: vqshrun.s16 d16, q8, #8 @ encoding: [0xc8,0xff,0x30,0x08]
|
||||
vqshrun.s16 d16, q8, #8
|
||||
@ CHECK: vqshrun.s32 d16, q8, #16 @ encoding: [0x30,0x08,0xd0,0xff]
|
||||
@ CHECK: vqshrun.s32 d16, q8, #16 @ encoding: [0xd0,0xff,0x30,0x08]
|
||||
vqshrun.s32 d16, q8, #16
|
||||
@ CHECK: vqshrun.s64 d16, q8, #32 @ encoding: [0x30,0x08,0xe0,0xff]
|
||||
@ CHECK: vqshrun.s64 d16, q8, #32 @ encoding: [0xe0,0xff,0x30,0x08]
|
||||
vqshrun.s64 d16, q8, #32
|
||||
@ CHECK: vqrshrn.s16 d16, q8, #8 @ encoding: [0x70,0x09,0xc8,0xef]
|
||||
@ CHECK: vqrshrn.s16 d16, q8, #8 @ encoding: [0xc8,0xef,0x70,0x09]
|
||||
vqrshrn.s16 d16, q8, #8
|
||||
@ CHECK: vqrshrn.s32 d16, q8, #16 @ encoding: [0x70,0x09,0xd0,0xef]
|
||||
@ CHECK: vqrshrn.s32 d16, q8, #16 @ encoding: [0xd0,0xef,0x70,0x09]
|
||||
vqrshrn.s32 d16, q8, #16
|
||||
@ CHECK: vqrshrn.s64 d16, q8, #32 @ encoding: [0x70,0x09,0xe0,0xef]
|
||||
@ CHECK: vqrshrn.s64 d16, q8, #32 @ encoding: [0xe0,0xef,0x70,0x09]
|
||||
vqrshrn.s64 d16, q8, #32
|
||||
@ CHECK: vqrshrn.u16 d16, q8, #8 @ encoding: [0x70,0x09,0xc8,0xff]
|
||||
@ CHECK: vqrshrn.u16 d16, q8, #8 @ encoding: [0xc8,0xff,0x70,0x09]
|
||||
vqrshrn.u16 d16, q8, #8
|
||||
@ CHECK: vqrshrn.u32 d16, q8, #16 @ encoding: [0x70,0x09,0xd0,0xff]
|
||||
@ CHECK: vqrshrn.u32 d16, q8, #16 @ encoding: [0xd0,0xff,0x70,0x09]
|
||||
vqrshrn.u32 d16, q8, #16
|
||||
@ CHECK: vqrshrn.u64 d16, q8, #32 @ encoding: [0x70,0x09,0xe0,0xff]
|
||||
@ CHECK: vqrshrn.u64 d16, q8, #32 @ encoding: [0xe0,0xff,0x70,0x09]
|
||||
vqrshrn.u64 d16, q8, #32
|
||||
@ CHECK: vqrshrun.s16 d16, q8, #8 @ encoding: [0x70,0x08,0xc8,0xff]
|
||||
@ CHECK: vqrshrun.s16 d16, q8, #8 @ encoding: [0xc8,0xff,0x70,0x08]
|
||||
vqrshrun.s16 d16, q8, #8
|
||||
@ CHECK: vqrshrun.s32 d16, q8, #16 @ encoding: [0x70,0x08,0xd0,0xff]
|
||||
@ CHECK: vqrshrun.s32 d16, q8, #16 @ encoding: [0xd0,0xff,0x70,0x08]
|
||||
vqrshrun.s32 d16, q8, #16
|
||||
@ CHECK: vqrshrun.s64 d16, q8, #32 @ encoding: [0x70,0x08,0xe0,0xff]
|
||||
@ CHECK: vqrshrun.s64 d16, q8, #32 @ encoding: [0xe0,0xff,0x70,0x08]
|
||||
vqrshrun.s64 d16, q8, #32
|
||||
|
@ -2,161 +2,161 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vshl.u8 d16, d17, d16 @ encoding: [0xa1,0x04,0x40,0xff]
|
||||
@ CHECK: vshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x04]
|
||||
vshl.u8 d16, d17, d16
|
||||
@ CHECK: vshl.u16 d16, d17, d16 @ encoding: [0xa1,0x04,0x50,0xff]
|
||||
@ CHECK: vshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x04]
|
||||
vshl.u16 d16, d17, d16
|
||||
@ CHECK: vshl.u32 d16, d17, d16 @ encoding: [0xa1,0x04,0x60,0xff]
|
||||
@ CHECK: vshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x04]
|
||||
vshl.u32 d16, d17, d16
|
||||
@ CHECK: vshl.u64 d16, d17, d16 @ encoding: [0xa1,0x04,0x70,0xff]
|
||||
@ CHECK: vshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x04]
|
||||
vshl.u64 d16, d17, d16
|
||||
@ CHECK: vshl.i8 d16, d16, #7 @ encoding: [0x30,0x05,0xcf,0xef]
|
||||
@ CHECK: vshl.i8 d16, d16, #7 @ encoding: [0xcf,0xef,0x30,0x05]
|
||||
vshl.i8 d16, d16, #7
|
||||
@ CHECK: vshl.i16 d16, d16, #15 @ encoding: [0x30,0x05,0xdf,0xef]
|
||||
@ CHECK: vshl.i16 d16, d16, #15 @ encoding: [0xdf,0xef,0x30,0x05]
|
||||
vshl.i16 d16, d16, #15
|
||||
@ CHECK: vshl.i32 d16, d16, #31 @ encoding: [0x30,0x05,0xff,0xef]
|
||||
@ CHECK: vshl.i32 d16, d16, #31 @ encoding: [0xff,0xef,0x30,0x05]
|
||||
vshl.i32 d16, d16, #31
|
||||
@ CHECK: vshl.i64 d16, d16, #63 @ encoding: [0xb0,0x05,0xff,0xef]
|
||||
@ CHECK: vshl.i64 d16, d16, #63 @ encoding: [0xff,0xef,0xb0,0x05]
|
||||
vshl.i64 d16, d16, #63
|
||||
@ CHECK: vshl.u8 q8, q9, q8 @ encoding: [0xe2,0x04,0x40,0xff]
|
||||
@ CHECK: vshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x04]
|
||||
vshl.u8 q8, q9, q8
|
||||
@ CHECK: vshl.u16 q8, q9, q8 @ encoding: [0xe2,0x04,0x50,0xff]
|
||||
@ CHECK: vshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x04]
|
||||
vshl.u16 q8, q9, q8
|
||||
@ CHECK: vshl.u32 q8, q9, q8 @ encoding: [0xe2,0x04,0x60,0xff]
|
||||
@ CHECK: vshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x04]
|
||||
vshl.u32 q8, q9, q8
|
||||
@ CHECK: vshl.u64 q8, q9, q8 @ encoding: [0xe2,0x04,0x70,0xff]
|
||||
@ CHECK: vshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x04]
|
||||
vshl.u64 q8, q9, q8
|
||||
@ CHECK: vshl.i8 q8, q8, #7 @ encoding: [0x70,0x05,0xcf,0xef]
|
||||
@ CHECK: vshl.i8 q8, q8, #7 @ encoding: [0xcf,0xef,0x70,0x05]
|
||||
vshl.i8 q8, q8, #7
|
||||
@ CHECK: vshl.i16 q8, q8, #15 @ encoding: [0x70,0x05,0xdf,0xef]
|
||||
@ CHECK: vshl.i16 q8, q8, #15 @ encoding: [0xdf,0xef,0x70,0x05]
|
||||
vshl.i16 q8, q8, #15
|
||||
@ CHECK: vshl.i32 q8, q8, #31 @ encoding: [0x70,0x05,0xff,0xef]
|
||||
@ CHECK: vshl.i32 q8, q8, #31 @ encoding: [0xff,0xef,0x70,0x05]
|
||||
vshl.i32 q8, q8, #31
|
||||
@ CHECK: vshl.i64 q8, q8, #63 @ encoding: [0xf0,0x05,0xff,0xef]
|
||||
@ CHECK: vshl.i64 q8, q8, #63 @ encoding: [0xff,0xef,0xf0,0x05]
|
||||
vshl.i64 q8, q8, #63
|
||||
@ CHECK: vshr.u8 d16, d16, #8 @ encoding: [0x30,0x00,0xc8,0xff]
|
||||
@ CHECK: vshr.u8 d16, d16, #8 @ encoding: [0xc8,0xff,0x30,0x00]
|
||||
vshr.u8 d16, d16, #8
|
||||
@ CHECK: vshr.u16 d16, d16, #16 @ encoding: [0x30,0x00,0xd0,0xff]
|
||||
@ CHECK: vshr.u16 d16, d16, #16 @ encoding: [0xd0,0xff,0x30,0x00]
|
||||
vshr.u16 d16, d16, #16
|
||||
@ CHECK: vshr.u32 d16, d16, #32 @ encoding: [0x30,0x00,0xe0,0xff]
|
||||
@ CHECK: vshr.u32 d16, d16, #32 @ encoding: [0xe0,0xff,0x30,0x00]
|
||||
vshr.u32 d16, d16, #32
|
||||
@ CHECK: vshr.u64 d16, d16, #64 @ encoding: [0xb0,0x00,0xc0,0xff]
|
||||
@ CHECK: vshr.u64 d16, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x00]
|
||||
vshr.u64 d16, d16, #64
|
||||
@ CHECK: vshr.u8 q8, q8, #8 @ encoding: [0x70,0x00,0xc8,0xff]
|
||||
@ CHECK: vshr.u8 q8, q8, #8 @ encoding: [0xc8,0xff,0x70,0x00]
|
||||
vshr.u8 q8, q8, #8
|
||||
@ CHECK: vshr.u16 q8, q8, #16 @ encoding: [0x70,0x00,0xd0,0xff]
|
||||
@ CHECK: vshr.u16 q8, q8, #16 @ encoding: [0xd0,0xff,0x70,0x00]
|
||||
vshr.u16 q8, q8, #16
|
||||
@ CHECK: vshr.u32 q8, q8, #32 @ encoding: [0x70,0x00,0xe0,0xff]
|
||||
@ CHECK: vshr.u32 q8, q8, #32 @ encoding: [0xe0,0xff,0x70,0x00]
|
||||
vshr.u32 q8, q8, #32
|
||||
@ CHECK: vshr.u64 q8, q8, #64 @ encoding: [0xf0,0x00,0xc0,0xff]
|
||||
@ CHECK: vshr.u64 q8, q8, #64 @ encoding: [0xc0,0xff,0xf0,0x00]
|
||||
vshr.u64 q8, q8, #64
|
||||
@ CHECK: vshr.s8 d16, d16, #8 @ encoding: [0x30,0x00,0xc8,0xef]
|
||||
@ CHECK: vshr.s8 d16, d16, #8 @ encoding: [0xc8,0xef,0x30,0x00]
|
||||
vshr.s8 d16, d16, #8
|
||||
@ CHECK: vshr.s16 d16, d16, #16 @ encoding: [0x30,0x00,0xd0,0xef]
|
||||
@ CHECK: vshr.s16 d16, d16, #16 @ encoding: [0xd0,0xef,0x30,0x00]
|
||||
vshr.s16 d16, d16, #16
|
||||
@ CHECK: vshr.s32 d16, d16, #32 @ encoding: [0x30,0x00,0xe0,0xef]
|
||||
@ CHECK: vshr.s32 d16, d16, #32 @ encoding: [0xe0,0xef,0x30,0x00]
|
||||
vshr.s32 d16, d16, #32
|
||||
@ CHECK: vshr.s64 d16, d16, #64 @ encoding: [0xb0,0x00,0xc0,0xef]
|
||||
@ CHECK: vshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x00]
|
||||
vshr.s64 d16, d16, #64
|
||||
@ CHECK: vshr.s8 q8, q8, #8 @ encoding: [0x70,0x00,0xc8,0xef]
|
||||
@ CHECK: vshr.s8 q8, q8, #8 @ encoding: [0xc8,0xef,0x70,0x00]
|
||||
vshr.s8 q8, q8, #8
|
||||
@ CHECK: vshr.s16 q8, q8, #16 @ encoding: [0x70,0x00,0xd0,0xef]
|
||||
@ CHECK: vshr.s16 q8, q8, #16 @ encoding: [0xd0,0xef,0x70,0x00]
|
||||
vshr.s16 q8, q8, #16
|
||||
@ CHECK: vshr.s32 q8, q8, #32 @ encoding: [0x70,0x00,0xe0,0xef]
|
||||
@ CHECK: vshr.s32 q8, q8, #32 @ encoding: [0xe0,0xef,0x70,0x00]
|
||||
vshr.s32 q8, q8, #32
|
||||
@ CHECK: vshr.s64 q8, q8, #64 @ encoding: [0xf0,0x00,0xc0,0xef]
|
||||
@ CHECK: vshr.s64 q8, q8, #64 @ encoding: [0xc0,0xef,0xf0,0x00]
|
||||
vshr.s64 q8, q8, #64
|
||||
@ CHECK: vshll.s8 q8, d16, #7 @ encoding: [0x30,0x0a,0xcf,0xef]
|
||||
@ CHECK: vshll.s8 q8, d16, #7 @ encoding: [0xcf,0xef,0x30,0x0a]
|
||||
vshll.s8 q8, d16, #7
|
||||
@ CHECK: vshll.s16 q8, d16, #15 @ encoding: [0x30,0x0a,0xdf,0xef]
|
||||
@ CHECK: vshll.s16 q8, d16, #15 @ encoding: [0xdf,0xef,0x30,0x0a]
|
||||
vshll.s16 q8, d16, #15
|
||||
@ CHECK: vshll.s32 q8, d16, #31 @ encoding: [0x30,0x0a,0xff,0xef]
|
||||
@ CHECK: vshll.s32 q8, d16, #31 @ encoding: [0xff,0xef,0x30,0x0a]
|
||||
vshll.s32 q8, d16, #31
|
||||
@ CHECK: vshll.u8 q8, d16, #7 @ encoding: [0x30,0x0a,0xcf,0xff]
|
||||
@ CHECK: vshll.u8 q8, d16, #7 @ encoding: [0xcf,0xff,0x30,0x0a]
|
||||
vshll.u8 q8, d16, #7
|
||||
@ CHECK: vshll.u16 q8, d16, #15 @ encoding: [0x30,0x0a,0xdf,0xff]
|
||||
@ CHECK: vshll.u16 q8, d16, #15 @ encoding: [0xdf,0xff,0x30,0x0a]
|
||||
vshll.u16 q8, d16, #15
|
||||
@ CHECK: vshll.u32 q8, d16, #31 @ encoding: [0x30,0x0a,0xff,0xff]
|
||||
@ CHECK: vshll.u32 q8, d16, #31 @ encoding: [0xff,0xff,0x30,0x0a]
|
||||
vshll.u32 q8, d16, #31
|
||||
@ CHECK: vshll.i8 q8, d16, #8 @ encoding: [0x20,0x03,0xf2,0xff]
|
||||
@ CHECK: vshll.i8 q8, d16, #8 @ encoding: [0xf2,0xff,0x20,0x03]
|
||||
vshll.i8 q8, d16, #8
|
||||
@ CHECK: vshll.i16 q8, d16, #16 @ encoding: [0x20,0x03,0xf6,0xff]
|
||||
@ CHECK: vshll.i16 q8, d16, #16 @ encoding: [0xf6,0xff,0x20,0x03]
|
||||
vshll.i16 q8, d16, #16
|
||||
@ CHECK: vshll.i32 q8, d16, #32 @ encoding: [0x20,0x03,0xfa,0xff]
|
||||
@ CHECK: vshll.i32 q8, d16, #32 @ encoding: [0xfa,0xff,0x20,0x03]
|
||||
vshll.i32 q8, d16, #32
|
||||
@ CHECK: vshrn.i16 d16, q8, #8 @ encoding: [0x30,0x08,0xc8,0xef]
|
||||
@ CHECK: vshrn.i16 d16, q8, #8 @ encoding: [0xc8,0xef,0x30,0x08]
|
||||
vshrn.i16 d16, q8, #8
|
||||
@ CHECK: vshrn.i32 d16, q8, #16 @ encoding: [0x30,0x08,0xd0,0xef]
|
||||
@ CHECK: vshrn.i32 d16, q8, #16 @ encoding: [0xd0,0xef,0x30,0x08]
|
||||
vshrn.i32 d16, q8, #16
|
||||
@ CHECK: vshrn.i64 d16, q8, #32 @ encoding: [0x30,0x08,0xe0,0xef]
|
||||
@ CHECK: vshrn.i64 d16, q8, #32 @ encoding: [0xe0,0xef,0x30,0x08]
|
||||
vshrn.i64 d16, q8, #32
|
||||
@ CHECK: vrshl.s8 d16, d17, d16 @ encoding: [0xa1,0x05,0x40,0xef]
|
||||
@ CHECK: vrshl.s8 d16, d17, d16 @ encoding: [0x40,0xef,0xa1,0x05]
|
||||
vrshl.s8 d16, d17, d16
|
||||
@ CHECK: vrshl.s16 d16, d17, d16 @ encoding: [0xa1,0x05,0x50,0xef]
|
||||
@ CHECK: vrshl.s16 d16, d17, d16 @ encoding: [0x50,0xef,0xa1,0x05]
|
||||
vrshl.s16 d16, d17, d16
|
||||
@ CHECK: vrshl.s32 d16, d17, d16 @ encoding: [0xa1,0x05,0x60,0xef]
|
||||
@ CHECK: vrshl.s32 d16, d17, d16 @ encoding: [0x60,0xef,0xa1,0x05]
|
||||
vrshl.s32 d16, d17, d16
|
||||
@ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0xa1,0x05,0x70,0
|
||||
@ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0x70,0xef,0xa1,0x05]
|
||||
vrshl.s64 d16, d17, d16
|
||||
@ CHECK: vrshl.u8 d16, d17, d16 @ encoding: [0xa1,0x05,0x40,0xff]
|
||||
@ CHECK: vrshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x05]
|
||||
vrshl.u8 d16, d17, d16
|
||||
@ CHECK: vrshl.u16 d16, d17, d16 @ encoding: [0xa1,0x05,0x50,0xff]
|
||||
@ CHECK: vrshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x05]
|
||||
vrshl.u16 d16, d17, d16
|
||||
@ CHECK: vrshl.u32 d16, d17, d16 @ encoding: [0xa1,0x05,0x60,0xff]
|
||||
@ CHECK: vrshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x05]
|
||||
vrshl.u32 d16, d17, d16
|
||||
@ CHECK: vrshl.u64 d16, d17, d16 @ encoding: [0xa1,0x05,0x70,0xff]
|
||||
@ CHECK: vrshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x05]
|
||||
vrshl.u64 d16, d17, d16
|
||||
@ CHECK: vrshl.s8 q8, q9, q8 @ encoding: [0xe2,0x05,0x40,0xef]
|
||||
@ CHECK: vrshl.s8 q8, q9, q8 @ encoding: [0x40,0xef,0xe2,0x05]
|
||||
vrshl.s8 q8, q9, q8
|
||||
@ CHECK: vrshl.s16 q8, q9, q8 @ encoding: [0xe2,0x05,0x50,0xef]
|
||||
@ CHECK: vrshl.s16 q8, q9, q8 @ encoding: [0x50,0xef,0xe2,0x05]
|
||||
vrshl.s16 q8, q9, q8
|
||||
@ CHECK: vrshl.s32 q8, q9, q8 @ encoding: [0xe2,0x05,0x60,0xef]
|
||||
@ CHECK: vrshl.s32 q8, q9, q8 @ encoding: [0x60,0xef,0xe2,0x05]
|
||||
vrshl.s32 q8, q9, q8
|
||||
@ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0xe2,0x05,0x70,0xef]
|
||||
@ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0x70,0xef,0xe2,0x05]
|
||||
vrshl.s64 q8, q9, q8
|
||||
@ CHECK: vrshl.u8 q8, q9, q8 @ encoding: [0xe2,0x05,0x40,0xff]
|
||||
@ CHECK: vrshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x05]
|
||||
vrshl.u8 q8, q9, q8
|
||||
@ CHECK: vrshl.u16 q8, q9, q8 @ encoding: [0xe2,0x05,0x50,0xff]
|
||||
@ CHECK: vrshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x05]
|
||||
vrshl.u16 q8, q9, q8
|
||||
@ CHECK: vrshl.u32 q8, q9, q8 @ encoding: [0xe2,0x05,0x60,0xff]
|
||||
@ CHECK: vrshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x05]
|
||||
vrshl.u32 q8, q9, q8
|
||||
@ CHECK: vrshl.u64 q8, q9, q8 @ encoding: [0xe2,0x05,0x70,0xff]
|
||||
@ CHECK: vrshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x05]
|
||||
vrshl.u64 q8, q9, q8
|
||||
@ CHECK: vrshr.s8 d16, d16, #8 @ encoding: [0x30,0x02,0xc8,0xef]
|
||||
@ CHECK: vrshr.s8 d16, d16, #8 @ encoding: [0xc8,0xef,0x30,0x02]
|
||||
vrshr.s8 d16, d16, #8
|
||||
@ CHECK: vrshr.s16 d16, d16, #16 @ encoding: [0x30,0x02,0xd0,0xef]
|
||||
@ CHECK: vrshr.s16 d16, d16, #16 @ encoding: [0xd0,0xef,0x30,0x02]
|
||||
vrshr.s16 d16, d16, #16
|
||||
@ CHECK: vrshr.s32 d16, d16, #32 @ encoding: [0x30,0x02,0xe0,0xef]
|
||||
@ CHECK: vrshr.s32 d16, d16, #32 @ encoding: [0xe0,0xef,0x30,0x02]
|
||||
vrshr.s32 d16, d16, #32
|
||||
@ CHECK: vrshr.s64 d16, d16, #64 @ encoding: [0xb0,0x02,0xc0,0xef]
|
||||
@ CHECK: vrshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x02]
|
||||
vrshr.s64 d16, d16, #64
|
||||
@ CHECK: vrshr.u8 d16, d16, #8 @ encoding: [0x30,0x02,0xc8,0xff]
|
||||
@ CHECK: vrshr.u8 d16, d16, #8 @ encoding: [0xc8,0xff,0x30,0x02]
|
||||
vrshr.u8 d16, d16, #8
|
||||
@ CHECK: vrshr.u16 d16, d16, #16 @ encoding: [0x30,0x02,0xd0,0xff]
|
||||
@ CHECK: vrshr.u16 d16, d16, #16 @ encoding: [0xd0,0xff,0x30,0x02]
|
||||
vrshr.u16 d16, d16, #16
|
||||
@ CHECK: vrshr.u32 d16, d16, #32 @ encoding: [0x30,0x02,0xe0,0xff]
|
||||
@ CHECK: vrshr.u32 d16, d16, #32 @ encoding: [0xe0,0xff,0x30,0x02]
|
||||
vrshr.u32 d16, d16, #32
|
||||
@ CHECK: vrshr.u64 d16, d16, #64 @ encoding: [0xb0,0x02,0xc0,0xff]
|
||||
@ CHECK: vrshr.u64 d16, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x02]
|
||||
vrshr.u64 d16, d16, #64
|
||||
@ CHECK: vrshr.s8 q8, q8, #8 @ encoding: [0x70,0x02,0xc8,0xef]
|
||||
@ CHECK: vrshr.s8 q8, q8, #8 @ encoding: [0xc8,0xef,0x70,0x02]
|
||||
vrshr.s8 q8, q8, #8
|
||||
@ CHECK: vrshr.s16 q8, q8, #16 @ encoding: [0x70,0x02,0xd0,0xef]
|
||||
@ CHECK: vrshr.s16 q8, q8, #16 @ encoding: [0xd0,0xef,0x70,0x02]
|
||||
vrshr.s16 q8, q8, #16
|
||||
@ CHECK: vrshr.s32 q8, q8, #32 @ encoding: [0x70,0x02,0xe0,0xef]
|
||||
@ CHECK: vrshr.s32 q8, q8, #32 @ encoding: [0xe0,0xef,0x70,0x02]
|
||||
vrshr.s32 q8, q8, #32
|
||||
@ CHECK: vrshr.s64 q8, q8, #64 @ encoding: [0xf0,0x02,0xc0,0xef]
|
||||
@ CHECK: vrshr.s64 q8, q8, #64 @ encoding: [0xc0,0xef,0xf0,0x02]
|
||||
vrshr.s64 q8, q8, #64
|
||||
@ CHECK: vrshr.u8 q8, q8, #8 @ encoding: [0x70,0x02,0xc8,0xff]
|
||||
@ CHECK: vrshr.u8 q8, q8, #8 @ encoding: [0xc8,0xff,0x70,0x02]
|
||||
vrshr.u8 q8, q8, #8
|
||||
@ CHECK: vrshr.u16 q8, q8, #16 @ encoding: [0x70,0x02,0xd0,0xff]
|
||||
@ CHECK: vrshr.u16 q8, q8, #16 @ encoding: [0xd0,0xff,0x70,0x02]
|
||||
vrshr.u16 q8, q8, #16
|
||||
@ CHECK: vrshr.u32 q8, q8, #32 @ encoding: [0x70,0x02,0xe0,0xff]
|
||||
@ CHECK: vrshr.u32 q8, q8, #32 @ encoding: [0xe0,0xff,0x70,0x02]
|
||||
vrshr.u32 q8, q8, #32
|
||||
@ CHECK: vrshr.u64 q8, q8, #64 @ encoding: [0xf0,0x02,0xc0,0xff]
|
||||
@ CHECK: vrshr.u64 q8, q8, #64 @ encoding: [0xc0,0xff,0xf0,0x02]
|
||||
vrshr.u64 q8, q8, #64
|
||||
@ CHECK: vrshrn.i16 d16, q8, #8 @ encoding: [0x70,0x08,0xc8,0xef]
|
||||
@ CHECK: vrshrn.i16 d16, q8, #8 @ encoding: [0xc8,0xef,0x70,0x08]
|
||||
vrshrn.i16 d16, q8, #8
|
||||
@ CHECK: vrshrn.i32 d16, q8, #16 @ encoding: [0x70,0x08,0xd0,0xef]
|
||||
@ CHECK: vrshrn.i32 d16, q8, #16 @ encoding: [0xd0,0xef,0x70,0x08]
|
||||
vrshrn.i32 d16, q8, #16
|
||||
@ CHECK: vrshrn.i64 d16, q8, #32 @ encoding: [0x70,0x08,0xe0,0xef]
|
||||
@ CHECK: vrshrn.i64 d16, q8, #32 @ encoding: [0xe0,0xef,0x70,0x08]
|
||||
vrshrn.i64 d16, q8, #32
|
||||
|
@ -2,99 +2,99 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vsra.s8 d17, d16, #8 @ encoding: [0x30,0x11,0xc8,0xef]
|
||||
@ CHECK: vsra.s8 d17, d16, #8 @ encoding: [0xc8,0xef,0x30,0x11]
|
||||
vsra.s8 d17, d16, #8
|
||||
@ CHECK: vsra.s16 d17, d16, #16 @ encoding: [0x30,0x11,0xd0,0xef]
|
||||
@ CHECK: vsra.s16 d17, d16, #16 @ encoding: [0xd0,0xef,0x30,0x11]
|
||||
vsra.s16 d17, d16, #16
|
||||
@ CHECK: vsra.s32 d17, d16, #32 @ encoding: [0x30,0x11,0xe0,0xef]
|
||||
@ CHECK: vsra.s32 d17, d16, #32 @ encoding: [0xe0,0xef,0x30,0x11]
|
||||
vsra.s32 d17, d16, #32
|
||||
@ CHECK: vsra.s64 d17, d16, #64 @ encoding: [0xb0,0x11,0xc0,0xef]
|
||||
@ CHECK: vsra.s64 d17, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x11]
|
||||
vsra.s64 d17, d16, #64
|
||||
@ CHECK: vsra.s8 q8, q9, #8 @ encoding: [0x72,0x01,0xc8,0xef]
|
||||
@ CHECK: vsra.s8 q8, q9, #8 @ encoding: [0xc8,0xef,0x72,0x01]
|
||||
vsra.s8 q8, q9, #8
|
||||
@ CHECK: vsra.s16 q8, q9, #16 @ encoding: [0x72,0x01,0xd0,0xef]
|
||||
@ CHECK: vsra.s16 q8, q9, #16 @ encoding: [0xd0,0xef,0x72,0x01]
|
||||
vsra.s16 q8, q9, #16
|
||||
@ CHECK: vsra.s32 q8, q9, #32 @ encoding: [0x72,0x01,0xe0,0xef]
|
||||
@ CHECK: vsra.s32 q8, q9, #32 @ encoding: [0xe0,0xef,0x72,0x01]
|
||||
vsra.s32 q8, q9, #32
|
||||
@ CHECK: vsra.s64 q8, q9, #64 @ encoding: [0xf2,0x01,0xc0,0xef]
|
||||
@ CHECK: vsra.s64 q8, q9, #64 @ encoding: [0xc0,0xef,0xf2,0x01]
|
||||
vsra.s64 q8, q9, #64
|
||||
@ CHECK: vsra.u8 d17, d16, #8 @ encoding: [0x30,0x11,0xc8,0xff]
|
||||
@ CHECK: vsra.u8 d17, d16, #8 @ encoding: [0xc8,0xff,0x30,0x11]
|
||||
vsra.u8 d17, d16, #8
|
||||
@ CHECK: vsra.u16 d17, d16, #16 @ encoding: [0x30,0x11,0xd0,0xff]
|
||||
@ CHECK: vsra.u16 d17, d16, #16 @ encoding: [0xd0,0xff,0x30,0x11]
|
||||
vsra.u16 d17, d16, #16
|
||||
@ CHECK: vsra.u32 d17, d16, #32 @ encoding: [0x30,0x11,0xe0,0xff]
|
||||
@ CHECK: vsra.u32 d17, d16, #32 @ encoding: [0xe0,0xff,0x30,0x11]
|
||||
vsra.u32 d17, d16, #32
|
||||
@ CHECK: vsra.u64 d17, d16, #64 @ encoding: [0xb0,0x11,0xc0,0xff]
|
||||
@ CHECK: vsra.u64 d17, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x11]
|
||||
vsra.u64 d17, d16, #64
|
||||
@ CHECK: vsra.u8 q8, q9, #8 @ encoding: [0x72,0x01,0xc8,0xff]
|
||||
@ CHECK: vsra.u8 q8, q9, #8 @ encoding: [0xc8,0xff,0x72,0x01]
|
||||
vsra.u8 q8, q9, #8
|
||||
@ CHECK: vsra.u16 q8, q9, #16 @ encoding: [0x72,0x01,0xd0,0xff]
|
||||
@ CHECK: vsra.u16 q8, q9, #16 @ encoding: [0xd0,0xff,0x72,0x01]
|
||||
vsra.u16 q8, q9, #16
|
||||
@ CHECK: vsra.u32 q8, q9, #32 @ encoding: [0x72,0x01,0xe0,0xff]
|
||||
@ CHECK: vsra.u32 q8, q9, #32 @ encoding: [0xe0,0xff,0x72,0x01]
|
||||
vsra.u32 q8, q9, #32
|
||||
@ CHECK: vsra.u64 q8, q9, #64 @ encoding: [0xf2,0x01,0xc0,0xff]
|
||||
@ CHECK: vsra.u64 q8, q9, #64 @ encoding: [0xc0,0xff,0xf2,0x01]
|
||||
vsra.u64 q8, q9, #64
|
||||
@ CHECK: vrsra.s8 d17, d16, #8 @ encoding: [0x30,0x13,0xc8,0xef]
|
||||
@ CHECK: vrsra.s8 d17, d16, #8 @ encoding: [0xc8,0xef,0x30,0x13]
|
||||
vrsra.s8 d17, d16, #8
|
||||
@ CHECK: vrsra.s16 d17, d16, #16 @ encoding: [0x30,0x13,0xd0,0xef]
|
||||
@ CHECK: vrsra.s16 d17, d16, #16 @ encoding: [0xd0,0xef,0x30,0x13]
|
||||
vrsra.s16 d17, d16, #16
|
||||
@ CHECK: vrsra.s32 d17, d16, #32 @ encoding: [0x30,0x13,0xe0,0xef]
|
||||
@ CHECK: vrsra.s32 d17, d16, #32 @ encoding: [0xe0,0xef,0x30,0x13]
|
||||
vrsra.s32 d17, d16, #32
|
||||
@ CHECK: vrsra.s64 d17, d16, #64 @ encoding: [0xb0,0x13,0xc0,0xef]
|
||||
@ CHECK: vrsra.s64 d17, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x13]
|
||||
vrsra.s64 d17, d16, #64
|
||||
@ CHECK: vrsra.u8 d17, d16, #8 @ encoding: [0x30,0x13,0xc8,0xff]
|
||||
@ CHECK: vrsra.u8 d17, d16, #8 @ encoding: [0xc8,0xff,0x30,0x13]
|
||||
vrsra.u8 d17, d16, #8
|
||||
@ CHECK: vrsra.u16 d17, d16, #16 @ encoding: [0x30,0x13,0xd0,0xff]
|
||||
@ CHECK: vrsra.u16 d17, d16, #16 @ encoding: [0xd0,0xff,0x30,0x13]
|
||||
vrsra.u16 d17, d16, #16
|
||||
@ CHECK: vrsra.u32 d17, d16, #32 @ encoding: [0x30,0x13,0xe0,0xff]
|
||||
@ CHECK: vrsra.u32 d17, d16, #32 @ encoding: [0xe0,0xff,0x30,0x13]
|
||||
vrsra.u32 d17, d16, #32
|
||||
@ CHECK: vrsra.u64 d17, d16, #64 @ encoding: [0xb0,0x13,0xc0,0xff]
|
||||
@ CHECK: vrsra.u64 d17, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x13]
|
||||
vrsra.u64 d17, d16, #64
|
||||
@ CHECK: vrsra.s8 q8, q9, #8 @ encoding: [0x72,0x03,0xc8,0xef]
|
||||
@ CHECK: vrsra.s8 q8, q9, #8 @ encoding: [0xc8,0xef,0x72,0x03]
|
||||
vrsra.s8 q8, q9, #8
|
||||
@ CHECK: vrsra.s16 q8, q9, #16 @ encoding: [0x72,0x03,0xd0,0xef]
|
||||
@ CHECK: vrsra.s16 q8, q9, #16 @ encoding: [0xd0,0xef,0x72,0x03]
|
||||
vrsra.s16 q8, q9, #16
|
||||
@ CHECK: vrsra.s32 q8, q9, #32 @ encoding: [0x72,0x03,0xe0,0xef]
|
||||
@ CHECK: vrsra.s32 q8, q9, #32 @ encoding: [0xe0,0xef,0x72,0x03]
|
||||
vrsra.s32 q8, q9, #32
|
||||
@ CHECK: vrsra.s64 q8, q9, #64 @ encoding: [0xf2,0x03,0xc0,0xef]
|
||||
@ CHECK: vrsra.s64 q8, q9, #64 @ encoding: [0xc0,0xef,0xf2,0x03]
|
||||
vrsra.s64 q8, q9, #64
|
||||
@ CHECK: vrsra.u8 q8, q9, #8 @ encoding: [0x72,0x03,0xc8,0xff]
|
||||
@ CHECK: vrsra.u8 q8, q9, #8 @ encoding: [0xc8,0xff,0x72,0x03]
|
||||
vrsra.u8 q8, q9, #8
|
||||
@ CHECK: vrsra.u16 q8, q9, #16 @ encoding: [0x72,0x03,0xd0,0xff]
|
||||
@ CHECK: vrsra.u16 q8, q9, #16 @ encoding: [0xd0,0xff,0x72,0x03]
|
||||
vrsra.u16 q8, q9, #16
|
||||
@ CHECK: vrsra.u32 q8, q9, #32 @ encoding: [0x72,0x03,0xe0,0xff]
|
||||
@ CHECK: vrsra.u32 q8, q9, #32 @ encoding: [0xe0,0xff,0x72,0x03]
|
||||
vrsra.u32 q8, q9, #32
|
||||
@ CHECK: vrsra.u64 q8, q9, #64 @ encoding: [0xf2,0x03,0xc0,0xff]
|
||||
@ CHECK: vrsra.u64 q8, q9, #64 @ encoding: [0xc0,0xff,0xf2,0x03]
|
||||
vrsra.u64 q8, q9, #64
|
||||
@ CHECK: vsli.8 d17, d16, #7 @ encoding: [0x30,0x15,0xcf,0xff]
|
||||
@ CHECK: vsli.8 d17, d16, #7 @ encoding: [0xcf,0xff,0x30,0x15]
|
||||
vsli.8 d17, d16, #7
|
||||
@ CHECK: vsli.16 d17, d16, #15 @ encoding: [0x30,0x15,0xdf,0xff]
|
||||
@ CHECK: vsli.16 d17, d16, #15 @ encoding: [0xdf,0xff,0x30,0x15]
|
||||
vsli.16 d17, d16, #15
|
||||
@ CHECK: vsli.32 d17, d16, #31 @ encoding: [0x30,0x15,0xff,0xff]
|
||||
@ CHECK: vsli.32 d17, d16, #31 @ encoding: [0xff,0xff,0x30,0x15]
|
||||
vsli.32 d17, d16, #31
|
||||
@ CHECK: vsli.64 d17, d16, #63 @ encoding: [0xb0,0x15,0xff,0xff]
|
||||
@ CHECK: vsli.64 d17, d16, #63 @ encoding: [0xff,0xff,0xb0,0x15]
|
||||
vsli.64 d17, d16, #63
|
||||
@ CHECK: vsli.8 q9, q8, #7 @ encoding: [0x70,0x25,0xcf,0xff]
|
||||
@ CHECK: vsli.8 q9, q8, #7 @ encoding: [0xcf,0xff,0x70,0x25]
|
||||
vsli.8 q9, q8, #7
|
||||
@ CHECK: vsli.16 q9, q8, #15 @ encoding: [0x70,0x25,0xdf,0xff]
|
||||
@ CHECK: vsli.16 q9, q8, #15 @ encoding: [0xdf,0xff,0x70,0x25]
|
||||
vsli.16 q9, q8, #15
|
||||
@ CHECK: vsli.32 q9, q8, #31 @ encoding: [0x70,0x25,0xff,0xff]
|
||||
@ CHECK: vsli.32 q9, q8, #31 @ encoding: [0xff,0xff,0x70,0x25]
|
||||
vsli.32 q9, q8, #31
|
||||
@ CHECK: vsli.64 q9, q8, #63 @ encoding: [0xf0,0x25,0xff,0xff]
|
||||
@ CHECK: vsli.64 q9, q8, #63 @ encoding: [0xff,0xff,0xf0,0x25]
|
||||
vsli.64 q9, q8, #63
|
||||
@ CHECK: vsri.8 d17, d16, #8 @ encoding: [0x30,0x14,0xc8,0xff]
|
||||
@ CHECK: vsri.8 d17, d16, #8 @ encoding: [0xc8,0xff,0x30,0x14]
|
||||
vsri.8 d17, d16, #8
|
||||
@ CHECK: vsri.16 d17, d16, #16 @ encoding: [0x30,0x14,0xd0,0xff]
|
||||
@ CHECK: vsri.16 d17, d16, #16 @ encoding: [0xd0,0xff,0x30,0x14]
|
||||
vsri.16 d17, d16, #16
|
||||
@ CHECK: vsri.32 d17, d16, #32 @ encoding: [0x30,0x14,0xe0,0xff]
|
||||
@ CHECK: vsri.32 d17, d16, #32 @ encoding: [0xe0,0xff,0x30,0x14]
|
||||
vsri.32 d17, d16, #32
|
||||
@ CHECK: vsri.64 d17, d16, #64 @ encoding: [0xb0,0x14,0xc0,0xff]
|
||||
@ CHECK: vsri.64 d17, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x14]
|
||||
vsri.64 d17, d16, #64
|
||||
@ CHECK: vsri.8 q9, q8, #8 @ encoding: [0x70,0x24,0xc8,0xff]
|
||||
@ CHECK: vsri.8 q9, q8, #8 @ encoding: [0xc8,0xff,0x70,0x24]
|
||||
vsri.8 q9, q8, #8
|
||||
@ CHECK: vsri.16 q9, q8, #16 @ encoding: [0x70,0x24,0xd0,0xff]
|
||||
@ CHECK: vsri.16 q9, q8, #16 @ encoding: [0xd0,0xff,0x70,0x24]
|
||||
vsri.16 q9, q8, #16
|
||||
@ CHECK: vsri.32 q9, q8, #32 @ encoding: [0x70,0x24,0xe0,0xff]
|
||||
@ CHECK: vsri.32 q9, q8, #32 @ encoding: [0xe0,0xff,0x70,0x24]
|
||||
vsri.32 q9, q8, #32
|
||||
@ CHECK: vsri.64 q9, q8, #64 @ encoding: [0xf0,0x24,0xc0,0xff]
|
||||
@ CHECK: vsri.64 q9, q8, #64 @ encoding: [0xc0,0xff,0xf0,0x24]
|
||||
vsri.64 q9, q8, #64
|
||||
|
@ -2,47 +2,47 @@
|
||||
|
||||
.code 16
|
||||
|
||||
@ CHECK: vext.8 d16, d17, d16, #3 @ encoding: [0xa0,0x03,0xf1,0xef]
|
||||
@ CHECK: vext.8 d16, d17, d16, #3 @ encoding: [0xf1,0xef,0xa0,0x03]
|
||||
vext.8 d16, d17, d16, #3
|
||||
@ CHECK: vext.8 d16, d17, d16, #5 @ encoding: [0xa0,0x05,0xf1,0xef]
|
||||
@ CHECK: vext.8 d16, d17, d16, #5 @ encoding: [0xf1,0xef,0xa0,0x05]
|
||||
vext.8 d16, d17, d16, #5
|
||||
@ CHECK: vext.8 q8, q9, q8, #3 @ encoding: [0xe0,0x03,0xf2,0xef]
|
||||
@ CHECK: vext.8 q8, q9, q8, #3 @ encoding: [0xf2,0xef,0xe0,0x03]
|
||||
vext.8 q8, q9, q8, #3
|
||||
@ CHECK: vext.8 q8, q9, q8, #7 @ encoding: [0xe0,0x07,0xf2,0xef]
|
||||
@ CHECK: vext.8 q8, q9, q8, #7 @ encoding: [0xf2,0xef,0xe0,0x07]
|
||||
vext.8 q8, q9, q8, #7
|
||||
@ CHECK: vext.16 d16, d17, d16, #3 @ encoding: [0xa0,0x06,0xf1,0xef]
|
||||
@ CHECK: vext.16 d16, d17, d16, #3 @ encoding: [0xf1,0xef,0xa0,0x06]
|
||||
vext.16 d16, d17, d16, #3
|
||||
@ CHECK: vext.32 q8, q9, q8, #3 @ encoding: [0xe0,0x0c,0xf2,0xef]
|
||||
@ CHECK: vext.32 q8, q9, q8, #3 @ encoding: [0xf2,0xef,0xe0,0x0c]
|
||||
vext.32 q8, q9, q8, #3
|
||||
@ CHECK: vtrn.8 d17, d16 @ encoding: [0xa0,0x10,0xf2,0xff]
|
||||
@ CHECK: vtrn.8 d17, d16 @ encoding: [0xf2,0xff,0xa0,0x10]
|
||||
vtrn.8 d17, d16
|
||||
@ CHECK: vtrn.16 d17, d16 @ encoding: [0xa0,0x10,0xf6,0xff]
|
||||
@ CHECK: vtrn.16 d17, d16 @ encoding: [0xf6,0xff,0xa0,0x10]
|
||||
vtrn.16 d17, d16
|
||||
@ CHECK: vtrn.32 d17, d16 @ encoding: [0xa0,0x10,0xfa,0xff]
|
||||
@ CHECK: vtrn.32 d17, d16 @ encoding: [0xfa,0xff,0xa0,0x10]
|
||||
vtrn.32 d17, d16
|
||||
@ CHECK: vtrn.8 q9, q8 @ encoding: [0xe0,0x20,0xf2,0xff]
|
||||
@ CHECK: vtrn.8 q9, q8 @ encoding: [0xf2,0xff,0xe0,0x20]
|
||||
vtrn.8 q9, q8
|
||||
@ CHECK: vtrn.16 q9, q8 @ encoding: [0xe0,0x20,0xf6,0xff]
|
||||
@ CHECK: vtrn.16 q9, q8 @ encoding: [0xf6,0xff,0xe0,0x20]
|
||||
vtrn.16 q9, q8
|
||||
@ CHECK: vtrn.32 q9, q8 @ encoding: [0xe0,0x20,0xfa,0xff]
|
||||
@ CHECK: vtrn.32 q9, q8 @ encoding: [0xfa,0xff,0xe0,0x20]
|
||||
vtrn.32 q9, q8
|
||||
@ CHECK: vuzp.8 d17, d16 @ encoding: [0x20,0x11,0xf2,0xff]
|
||||
@ CHECK: vuzp.8 d17, d16 @ encoding: [0xf2,0xff,0x20,0x11]
|
||||
vuzp.8 d17, d16
|
||||
@ CHECK: vuzp.16 d17, d16 @ encoding: [0x20,0x11,0xf6,0xff]
|
||||
@ CHECK: vuzp.16 d17, d16 @ encoding: [0xf6,0xff,0x20,0x11]
|
||||
vuzp.16 d17, d16
|
||||
@ CHECK: vuzp.8 q9, q8 @ encoding: [0x60,0x21,0xf2,0xff]
|
||||
@ CHECK: vuzp.8 q9, q8 @ encoding: [0xf2,0xff,0x60,0x21]
|
||||
vuzp.8 q9, q8
|
||||
@ CHECK: vuzp.16 q9, q8 @ encoding: [0x60,0x21,0xf6,0xff]
|
||||
@ CHECK: vuzp.16 q9, q8 @ encoding: [0xf6,0xff,0x60,0x21]
|
||||
vuzp.16 q9, q8
|
||||
@ CHECK: vuzp.32 q9, q8 @ encoding: [0x60,0x21,0xfa,0xff]
|
||||
@ CHECK: vuzp.32 q9, q8 @ encoding: [0xfa,0xff,0x60,0x21]
|
||||
vuzp.32 q9, q8
|
||||
@ CHECK: vzip.8 d17, d16 @ encoding: [0xa0,0x11,0xf2,0xff]
|
||||
@ CHECK: vzip.8 d17, d16 @ encoding: [0xf2,0xff,0xa0,0x11]
|
||||
vzip.8 d17, d16
|
||||
@ CHECK: vzip.16 d17, d16 @ encoding: [0xa0,0x11,0xf6,0xff]
|
||||
@ CHECK: vzip.16 d17, d16 @ encoding: [0xf6,0xff,0xa0,0x11]
|
||||
vzip.16 d17, d16
|
||||
@ CHECK: vzip.8 q9, q8 @ encoding: [0xe0,0x21,0xf2,0xff]
|
||||
@ CHECK: vzip.8 q9, q8 @ encoding: [0xf2,0xff,0xe0,0x21]
|
||||
vzip.8 q9, q8
|
||||
@ CHECK: vzip.16 q9, q8 @ encoding: [0xe0,0x21,0xf6,0xff]
|
||||
@ CHECK: vzip.16 q9, q8 @ encoding: [0xf6,0xff,0xe0,0x21]
|
||||
vzip.16 q9, q8
|
||||
@ CHECK: vzip.32 q9, q8 @ encoding: [0xe0,0x21,0xfa,0xff]
|
||||
@ CHECK: vzip.32 q9, q8 @ encoding: [0xfa,0xff,0xe0,0x21]
|
||||
vzip.32 q9, q8
|
||||
|
@ -1,46 +1,46 @@
|
||||
@ RUN: llvm-mc -mcpu=cortex-a8 -triple thumb-unknown-unkown -show-encoding < %s | FileCheck %s
|
||||
|
||||
@ CHECK: vext.8 d16, d17, d16, #3 @ encoding: [0xa0,0x03,0xf1,0xef]
|
||||
@ CHECK: vext.8 d16, d17, d16, #3 @ encoding: [0xf1,0xef,0xa0,0x03]
|
||||
vext.8 d16, d17, d16, #3
|
||||
@ CHECK: vext.8 d16, d17, d16, #5 @ encoding: [0xa0,0x05,0xf1,0xef]
|
||||
@ CHECK: vext.8 d16, d17, d16, #5 @ encoding: [0xf1,0xef,0xa0,0x05]
|
||||
vext.8 d16, d17, d16, #5
|
||||
@ CHECK: vext.8 q8, q9, q8, #3 @ encoding: [0xe0,0x03,0xf2,0xef]
|
||||
@ CHECK: vext.8 q8, q9, q8, #3 @ encoding: [0xf2,0xef,0xe0,0x03]
|
||||
vext.8 q8, q9, q8, #3
|
||||
@ CHECK: vext.8 q8, q9, q8, #7 @ encoding: [0xe0,0x07,0xf2,0xef]
|
||||
@ CHECK: vext.8 q8, q9, q8, #7 @ encoding: [0xf2,0xef,0xe0,0x07]
|
||||
vext.8 q8, q9, q8, #7
|
||||
@ CHECK: vext.16 d16, d17, d16, #3 @ encoding: [0xa0,0x06,0xf1,0xef]
|
||||
@ CHECK: vext.16 d16, d17, d16, #3 @ encoding: [0xf1,0xef,0xa0,0x06]
|
||||
vext.16 d16, d17, d16, #3
|
||||
@ CHECK: vext.32 q8, q9, q8, #3 @ encoding: [0xe0,0x0c,0xf2,0xef]
|
||||
@ CHECK: vext.32 q8, q9, q8, #3 @ encoding: [0xf2,0xef,0xe0,0x0c]
|
||||
vext.32 q8, q9, q8, #3
|
||||
@ CHECK: vtrn.8 d17, d16 @ encoding: [0xa0,0x10,0xf2,0xff]
|
||||
@ CHECK: vtrn.8 d17, d16 @ encoding: [0xf2,0xff,0xa0,0x10]
|
||||
vtrn.8 d17, d16
|
||||
@ CHECK: vtrn.16 d17, d16 @ encoding: [0xa0,0x10,0xf6,0xff]
|
||||
@ CHECK: vtrn.16 d17, d16 @ encoding: [0xf6,0xff,0xa0,0x10]
|
||||
vtrn.16 d17, d16
|
||||
@ CHECK: vtrn.32 d17, d16 @ encoding: [0xa0,0x10,0xfa,0xff]
|
||||
@ CHECK: vtrn.32 d17, d16 @ encoding: [0xfa,0xff,0xa0,0x10]
|
||||
vtrn.32 d17, d16
|
||||
@ CHECK: vtrn.8 q9, q8 @ encoding: [0xe0,0x20,0xf2,0xff]
|
||||
@ CHECK: vtrn.8 q9, q8 @ encoding: [0xf2,0xff,0xe0,0x20]
|
||||
vtrn.8 q9, q8
|
||||
@ CHECK: vtrn.16 q9, q8 @ encoding: [0xe0,0x20,0xf6,0xff]
|
||||
@ CHECK: vtrn.16 q9, q8 @ encoding: [0xf6,0xff,0xe0,0x20]
|
||||
vtrn.16 q9, q8
|
||||
@ CHECK: vtrn.32 q9, q8 @ encoding: [0xe0,0x20,0xfa,0xff]
|
||||
@ CHECK: vtrn.32 q9, q8 @ encoding: [0xfa,0xff,0xe0,0x20]
|
||||
vtrn.32 q9, q8
|
||||
@ CHECK: vuzp.8 d17, d16 @ encoding: [0x20,0x11,0xf2,0xff]
|
||||
@ CHECK: vuzp.8 d17, d16 @ encoding: [0xf2,0xff,0x20,0x11]
|
||||
vuzp.8 d17, d16
|
||||
@ CHECK: vuzp.16 d17, d16 @ encoding: [0x20,0x11,0xf6,0xff]
|
||||
@ CHECK: vuzp.16 d17, d16 @ encoding: [0xf6,0xff,0x20,0x11]
|
||||
vuzp.16 d17, d16
|
||||
@ CHECK: vuzp.8 q9, q8 @ encoding: [0x60,0x21,0xf2,0xff]
|
||||
@ CHECK: vuzp.8 q9, q8 @ encoding: [0xf2,0xff,0x60,0x21]
|
||||
vuzp.8 q9, q8
|
||||
@ CHECK: vuzp.16 q9, q8 @ encoding: [0x60,0x21,0xf6,0xff]
|
||||
@ CHECK: vuzp.16 q9, q8 @ encoding: [0xf6,0xff,0x60,0x21]
|
||||
vuzp.16 q9, q8
|
||||
@ CHECK: vuzp.32 q9, q8 @ encoding: [0x60,0x21,0xfa,0xff]
|
||||
@ CHECK: vuzp.32 q9, q8 @ encoding: [0xfa,0xff,0x60,0x21]
|
||||
vuzp.32 q9, q8
|
||||
@ CHECK: vzip.8 d17, d16 @ encoding: [0xa0,0x11,0xf2,0xff]
|
||||
@ CHECK: vzip.8 d17, d16 @ encoding: [0xf2,0xff,0xa0,0x11]
|
||||
vzip.8 d17, d16
|
||||
@ CHECK: vzip.16 d17, d16 @ encoding: [0xa0,0x11,0xf6,0xff]
|
||||
@ CHECK: vzip.16 d17, d16 @ encoding: [0xf6,0xff,0xa0,0x11]
|
||||
vzip.16 d17, d16
|
||||
@ CHECK: vzip.8 q9, q8 @ encoding: [0xe0,0x21,0xf2,0xff]
|
||||
@ CHECK: vzip.8 q9, q8 @ encoding: [0xf2,0xff,0xe0,0x21]
|
||||
vzip.8 q9, q8
|
||||
@ CHECK: vzip.16 q9, q8 @ encoding: [0xe0,0x21,0xf6,0xff]
|
||||
@ CHECK: vzip.16 q9, q8 @ encoding: [0xf6,0xff,0xe0,0x21]
|
||||
vzip.16 q9, q8
|
||||
@ CHECK: vzip.32 q9, q8 @ encoding: [0xe0,0x21,0xfa,0xff]
|
||||
@ CHECK: vzip.32 q9, q8 @ encoding: [0xfa,0xff,0xe0,0x21]
|
||||
vzip.32 q9, q8
|
||||
|
Loading…
x
Reference in New Issue
Block a user