2010-12-15 22:14:01 +00:00
|
|
|
@ RUN: llvm-mc -mcpu=cortex-a8 -triple arm-unknown-unknown -show-encoding < %s | FileCheck %s
|
2010-10-29 18:58:30 +00:00
|
|
|
|
|
|
|
vceq.i8 d16, d16, d17
|
|
|
|
vceq.i16 d16, d16, d17
|
|
|
|
vceq.i32 d16, d16, d17
|
|
|
|
vceq.f32 d16, d16, d17
|
|
|
|
vceq.i8 q8, q8, q9
|
|
|
|
vceq.i16 q8, q8, q9
|
|
|
|
vceq.i32 q8, q8, q9
|
|
|
|
vceq.f32 q8, q8, q9
|
|
|
|
|
2011-08-11 23:57:17 +00:00
|
|
|
@ CHECK: vceq.i8 d16, d16, d17 @ encoding: [0xb1,0x08,0x40,0xf3]
|
|
|
|
@ CHECK: vceq.i16 d16, d16, d17 @ encoding: [0xb1,0x08,0x50,0xf3]
|
|
|
|
@ CHECK: vceq.i32 d16, d16, d17 @ encoding: [0xb1,0x08,0x60,0xf3]
|
|
|
|
@ CHECK: vceq.f32 d16, d16, d17 @ encoding: [0xa1,0x0e,0x40,0xf2]
|
|
|
|
@ CHECK: vceq.i8 q8, q8, q9 @ encoding: [0xf2,0x08,0x40,0xf3]
|
|
|
|
@ CHECK: vceq.i16 q8, q8, q9 @ encoding: [0xf2,0x08,0x50,0xf3]
|
|
|
|
@ CHECK: vceq.i32 q8, q8, q9 @ encoding: [0xf2,0x08,0x60,0xf3]
|
|
|
|
@ CHECK: vceq.f32 q8, q8, q9 @ encoding: [0xe2,0x0e,0x40,0xf2]
|
|
|
|
|
2010-10-29 18:58:30 +00:00
|
|
|
vcge.s8 d16, d16, d17
|
|
|
|
vcge.s16 d16, d16, d17
|
|
|
|
vcge.s32 d16, d16, d17
|
|
|
|
vcge.u8 d16, d16, d17
|
|
|
|
vcge.u16 d16, d16, d17
|
|
|
|
vcge.u32 d16, d16, d17
|
|
|
|
vcge.f32 d16, d16, d17
|
|
|
|
vcge.s8 q8, q8, q9
|
|
|
|
vcge.s16 q8, q8, q9
|
|
|
|
vcge.s32 q8, q8, q9
|
|
|
|
vcge.u8 q8, q8, q9
|
|
|
|
vcge.u16 q8, q8, q9
|
|
|
|
vcge.u32 q8, q8, q9
|
|
|
|
vcge.f32 q8, q8, q9
|
|
|
|
vacge.f32 d16, d16, d17
|
|
|
|
vacge.f32 q8, q8, q9
|
|
|
|
|
2011-08-11 23:57:17 +00:00
|
|
|
@ CHECK: vcge.s8 d16, d16, d17 @ encoding: [0xb1,0x03,0x40,0xf2]
|
|
|
|
@ CHECK: vcge.s16 d16, d16, d17 @ encoding: [0xb1,0x03,0x50,0xf2]
|
|
|
|
@ CHECK: vcge.s32 d16, d16, d17 @ encoding: [0xb1,0x03,0x60,0xf2]
|
|
|
|
@ CHECK: vcge.u8 d16, d16, d17 @ encoding: [0xb1,0x03,0x40,0xf3]
|
|
|
|
@ CHECK: vcge.u16 d16, d16, d17 @ encoding: [0xb1,0x03,0x50,0xf3]
|
|
|
|
@ CHECK: vcge.u32 d16, d16, d17 @ encoding: [0xb1,0x03,0x60,0xf3]
|
|
|
|
@ CHECK: vcge.f32 d16, d16, d17 @ encoding: [0xa1,0x0e,0x40,0xf3]
|
|
|
|
@ CHECK: vcge.s8 q8, q8, q9 @ encoding: [0xf2,0x03,0x40,0xf2]
|
|
|
|
@ CHECK: vcge.s16 q8, q8, q9 @ encoding: [0xf2,0x03,0x50,0xf2]
|
|
|
|
@ CHECK: vcge.s32 q8, q8, q9 @ encoding: [0xf2,0x03,0x60,0xf2]
|
|
|
|
@ CHECK: vcge.u8 q8, q8, q9 @ encoding: [0xf2,0x03,0x40,0xf3]
|
|
|
|
@ CHECK: vcge.u16 q8, q8, q9 @ encoding: [0xf2,0x03,0x50,0xf3]
|
|
|
|
@ CHECK: vcge.u32 q8, q8, q9 @ encoding: [0xf2,0x03,0x60,0xf3]
|
|
|
|
@ CHECK: vcge.f32 q8, q8, q9 @ encoding: [0xe2,0x0e,0x40,0xf3]
|
|
|
|
@ CHECK: vacge.f32 d16, d16, d17 @ encoding: [0xb1,0x0e,0x40,0xf3]
|
|
|
|
@ CHECK: vacge.f32 q8, q8, q9 @ encoding: [0xf2,0x0e,0x40,0xf3]
|
|
|
|
|
2010-10-29 18:58:30 +00:00
|
|
|
vcgt.s8 d16, d16, d17
|
|
|
|
vcgt.s16 d16, d16, d17
|
|
|
|
vcgt.s32 d16, d16, d17
|
|
|
|
vcgt.u8 d16, d16, d17
|
|
|
|
vcgt.u16 d16, d16, d17
|
|
|
|
vcgt.u32 d16, d16, d17
|
|
|
|
vcgt.f32 d16, d16, d17
|
|
|
|
vcgt.s8 q8, q8, q9
|
|
|
|
vcgt.s16 q8, q8, q9
|
|
|
|
vcgt.s32 q8, q8, q9
|
|
|
|
vcgt.u8 q8, q8, q9
|
|
|
|
vcgt.u16 q8, q8, q9
|
|
|
|
vcgt.u32 q8, q8, q9
|
|
|
|
vcgt.f32 q8, q8, q9
|
|
|
|
vacgt.f32 d16, d16, d17
|
|
|
|
vacgt.f32 q8, q8, q9
|
|
|
|
|
2011-08-11 23:57:17 +00:00
|
|
|
@ CHECK: vcgt.s8 d16, d16, d17 @ encoding: [0xa1,0x03,0x40,0xf2]
|
|
|
|
@ CHECK: vcgt.s16 d16, d16, d17 @ encoding: [0xa1,0x03,0x50,0xf2]
|
|
|
|
@ CHECK: vcgt.s32 d16, d16, d17 @ encoding: [0xa1,0x03,0x60,0xf2]
|
|
|
|
@ CHECK: vcgt.u8 d16, d16, d17 @ encoding: [0xa1,0x03,0x40,0xf3]
|
|
|
|
@ CHECK: vcgt.u16 d16, d16, d17 @ encoding: [0xa1,0x03,0x50,0xf3]
|
|
|
|
@ CHECK: vcgt.u32 d16, d16, d17 @ encoding: [0xa1,0x03,0x60,0xf3]
|
|
|
|
@ CHECK: vcgt.f32 d16, d16, d17 @ encoding: [0xa1,0x0e,0x60,0xf3]
|
|
|
|
@ CHECK: vcgt.s8 q8, q8, q9 @ encoding: [0xe2,0x03,0x40,0xf2]
|
|
|
|
@ CHECK: vcgt.s16 q8, q8, q9 @ encoding: [0xe2,0x03,0x50,0xf2]
|
|
|
|
@ CHECK: vcgt.s32 q8, q8, q9 @ encoding: [0xe2,0x03,0x60,0xf2]
|
|
|
|
@ CHECK: vcgt.u8 q8, q8, q9 @ encoding: [0xe2,0x03,0x40,0xf3]
|
|
|
|
@ CHECK: vcgt.u16 q8, q8, q9 @ encoding: [0xe2,0x03,0x50,0xf3]
|
|
|
|
@ CHECK: vcgt.u32 q8, q8, q9 @ encoding: [0xe2,0x03,0x60,0xf3]
|
|
|
|
@ CHECK: vcgt.f32 q8, q8, q9 @ encoding: [0xe2,0x0e,0x60,0xf3]
|
|
|
|
@ CHECK: vacgt.f32 d16, d16, d17 @ encoding: [0xb1,0x0e,0x60,0xf3]
|
|
|
|
@ CHECK: vacgt.f32 q8, q8, q9 @ encoding: [0xf2,0x0e,0x60,0xf3]
|
|
|
|
|
2010-10-29 18:58:30 +00:00
|
|
|
vtst.8 d16, d16, d17
|
|
|
|
vtst.16 d16, d16, d17
|
|
|
|
vtst.32 d16, d16, d17
|
|
|
|
vtst.8 q8, q8, q9
|
|
|
|
vtst.16 q8, q8, q9
|
|
|
|
vtst.32 q8, q8, q9
|
2010-11-08 23:21:22 +00:00
|
|
|
|
2011-08-11 23:57:17 +00:00
|
|
|
@ CHECK: vtst.8 d16, d16, d17 @ encoding: [0xb1,0x08,0x40,0xf2]
|
|
|
|
@ CHECK: vtst.16 d16, d16, d17 @ encoding: [0xb1,0x08,0x50,0xf2]
|
|
|
|
@ CHECK: vtst.32 d16, d16, d17 @ encoding: [0xb1,0x08,0x60,0xf2]
|
|
|
|
@ CHECK: vtst.8 q8, q8, q9 @ encoding: [0xf2,0x08,0x40,0xf2]
|
|
|
|
@ CHECK: vtst.16 q8, q8, q9 @ encoding: [0xf2,0x08,0x50,0xf2]
|
|
|
|
@ CHECK: vtst.32 q8, q8, q9 @ encoding: [0xf2,0x08,0x60,0xf2]
|
|
|
|
|
|
|
|
vceq.i8 d16, d16, #0
|
|
|
|
vcge.s8 d16, d16, #0
|
|
|
|
vcle.s8 d16, d16, #0
|
|
|
|
vcgt.s8 d16, d16, #0
|
|
|
|
vclt.s8 d16, d16, #0
|
|
|
|
|
|
|
|
@ CHECK: vceq.i8 d16, d16, #0 @ encoding: [0x20,0x01,0xf1,0xf3]
|
|
|
|
@ CHECK: vcge.s8 d16, d16, #0 @ encoding: [0xa0,0x00,0xf1,0xf3]
|
|
|
|
@ CHECK: vcle.s8 d16, d16, #0 @ encoding: [0xa0,0x01,0xf1,0xf3]
|
|
|
|
@ CHECK: vcgt.s8 d16, d16, #0 @ encoding: [0x20,0x00,0xf1,0xf3]
|
|
|
|
@ CHECK: vclt.s8 d16, d16, #0 @ encoding: [0x20,0x02,0xf1,0xf3]
|
2011-12-07 17:51:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
vclt.s8 q12, q13, q3
|
|
|
|
vclt.s16 q12, q13, q3
|
|
|
|
vclt.s32 q12, q13, q3
|
|
|
|
vclt.u8 q12, q13, q3
|
|
|
|
vclt.u16 q12, q13, q3
|
|
|
|
vclt.u32 q12, q13, q3
|
|
|
|
vclt.f32 q12, q13, q3
|
|
|
|
|
|
|
|
vclt.s8 d12, d13, d3
|
|
|
|
vclt.s16 d12, d13, d3
|
|
|
|
vclt.s32 d12, d13, d3
|
|
|
|
vclt.u8 d12, d13, d3
|
|
|
|
vclt.u16 d12, d13, d3
|
|
|
|
vclt.u32 d12, d13, d3
|
|
|
|
vclt.f32 d12, d13, d3
|
|
|
|
|
|
|
|
@ CHECK: vcgt.s8 q12, q3, q13 @ encoding: [0x6a,0x83,0x46,0xf2]
|
|
|
|
@ CHECK: vcgt.s16 q12, q3, q13 @ encoding: [0x6a,0x83,0x56,0xf2]
|
|
|
|
@ CHECK: vcgt.s32 q12, q3, q13 @ encoding: [0x6a,0x83,0x66,0xf2]
|
|
|
|
@ CHECK: vcgt.u8 q12, q3, q13 @ encoding: [0x6a,0x83,0x46,0xf3]
|
|
|
|
@ CHECK: vcgt.u16 q12, q3, q13 @ encoding: [0x6a,0x83,0x56,0xf3]
|
|
|
|
@ CHECK: vcgt.u32 q12, q3, q13 @ encoding: [0x6a,0x83,0x66,0xf3]
|
|
|
|
@ CHECK: vcgt.f32 q12, q3, q13 @ encoding: [0x6a,0x8e,0x66,0xf3]
|
|
|
|
|
|
|
|
@ CHECK: vcgt.s8 d12, d3, d13 @ encoding: [0x0d,0xc3,0x03,0xf2]
|
|
|
|
@ CHECK: vcgt.s16 d12, d3, d13 @ encoding: [0x0d,0xc3,0x13,0xf2]
|
|
|
|
@ CHECK: vcgt.s32 d12, d3, d13 @ encoding: [0x0d,0xc3,0x23,0xf2]
|
|
|
|
@ CHECK: vcgt.u8 d12, d3, d13 @ encoding: [0x0d,0xc3,0x03,0xf3]
|
|
|
|
@ CHECK: vcgt.u16 d12, d3, d13 @ encoding: [0x0d,0xc3,0x13,0xf3]
|
|
|
|
@ CHECK: vcgt.u32 d12, d3, d13 @ encoding: [0x0d,0xc3,0x23,0xf3]
|
|
|
|
@ CHECK: vcgt.f32 d12, d3, d13 @ encoding: [0x0d,0xce,0x23,0xf3]
|
2011-12-15 22:56:33 +00:00
|
|
|
|
|
|
|
vcle.s8 d16, d16, d17
|
|
|
|
vcle.s16 d16, d16, d17
|
|
|
|
vcle.s32 d16, d16, d17
|
|
|
|
vcle.u8 d16, d16, d17
|
|
|
|
vcle.u16 d16, d16, d17
|
|
|
|
vcle.u32 d16, d16, d17
|
|
|
|
vcle.f32 d16, d16, d17
|
|
|
|
vcle.s8 q8, q8, q9
|
|
|
|
vcle.s16 q8, q8, q9
|
|
|
|
vcle.s32 q8, q8, q9
|
|
|
|
vcle.u8 q8, q8, q9
|
|
|
|
vcle.u16 q8, q8, q9
|
|
|
|
vcle.u32 q8, q8, q9
|
|
|
|
vcle.f32 q8, q8, q9
|
|
|
|
|
|
|
|
@ CHECK: vcge.s8 d16, d17, d16 @ encoding: [0xb0,0x03,0x41,0xf2]
|
|
|
|
@ CHECK: vcge.s16 d16, d17, d16 @ encoding: [0xb0,0x03,0x51,0xf2]
|
|
|
|
@ CHECK: vcge.s32 d16, d17, d16 @ encoding: [0xb0,0x03,0x61,0xf2]
|
|
|
|
@ CHECK: vcge.u8 d16, d17, d16 @ encoding: [0xb0,0x03,0x41,0xf3]
|
|
|
|
@ CHECK: vcge.u16 d16, d17, d16 @ encoding: [0xb0,0x03,0x51,0xf3]
|
|
|
|
@ CHECK: vcge.u32 d16, d17, d16 @ encoding: [0xb0,0x03,0x61,0xf3]
|
|
|
|
@ CHECK: vcge.f32 d16, d17, d16 @ encoding: [0xa0,0x0e,0x41,0xf3]
|
|
|
|
@ CHECK: vcge.s8 q8, q9, q8 @ encoding: [0xf0,0x03,0x42,0xf2]
|
|
|
|
@ CHECK: vcge.s16 q8, q9, q8 @ encoding: [0xf0,0x03,0x52,0xf2]
|
|
|
|
@ CHECK: vcge.s32 q8, q9, q8 @ encoding: [0xf0,0x03,0x62,0xf2]
|
|
|
|
@ CHECK: vcge.u8 q8, q9, q8 @ encoding: [0xf0,0x03,0x42,0xf3]
|
|
|
|
@ CHECK: vcge.u16 q8, q9, q8 @ encoding: [0xf0,0x03,0x52,0xf3]
|
|
|
|
@ CHECK: vcge.u32 q8, q9, q8 @ encoding: [0xf0,0x03,0x62,0xf3]
|
|
|
|
@ CHECK: vcge.f32 q8, q9, q8 @ encoding: [0xe0,0x0e,0x42,0xf3]
|
2013-04-15 22:42:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@ VACLT is an alias for VACGT w/ the source operands reversed.
|
|
|
|
@ VACLE is an alias for VACGE w/ the source operands reversed.
|
|
|
|
vaclt.f32 q9, q11, q12
|
|
|
|
vaclt.f32 d9, d11, d12
|
|
|
|
vaclt.f32 q11, q12
|
|
|
|
vaclt.f32 d11, d12
|
|
|
|
vacle.f32 q9, q11, q12
|
|
|
|
vacle.f32 d9, d11, d12
|
|
|
|
vacle.f32 q11, q12
|
|
|
|
vacle.f32 d11, d12
|
|
|
|
|
|
|
|
@ CHECK: vacgt.f32 q9, q12, q11 @ encoding: [0xf6,0x2e,0x68,0xf3]
|
|
|
|
@ CHECK: vacgt.f32 d9, d12, d11 @ encoding: [0x1b,0x9e,0x2c,0xf3]
|
|
|
|
@ CHECK: vacgt.f32 q11, q12, q11 @ encoding: [0xf6,0x6e,0x68,0xf3]
|
|
|
|
@ CHECK: vacgt.f32 d11, d12, d11 @ encoding: [0x1b,0xbe,0x2c,0xf3]
|
|
|
|
@ CHECK: vacge.f32 q9, q12, q11 @ encoding: [0xf6,0x2e,0x48,0xf3]
|
|
|
|
@ CHECK: vacge.f32 d9, d12, d11 @ encoding: [0x1b,0x9e,0x0c,0xf3]
|
|
|
|
@ CHECK: vacge.f32 q11, q12, q11 @ encoding: [0xf6,0x6e,0x48,0xf3]
|
|
|
|
@ CHECK: vacge.f32 d11, d12, d11 @ encoding: [0x1b,0xbe,0x0c,0xf3]
|