Add some missing pattern matches for AArch64 Neon intrinsics like vuqadd_s64 and friends.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@196192 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Jiangning Liu 2013-12-03 01:33:52 +00:00
parent 7f1f8d4146
commit bbc450c5cf
2 changed files with 81 additions and 0 deletions

View File

@ -5122,6 +5122,27 @@ defm : Neon_Scalar2SameMisc_accum_BHSD_size_patterns<int_aarch64_neon_vsqadd,
USQADDbb, USQADDhh,
USQADDss, USQADDdd>;
def : Pat<(v1i64 (int_aarch64_neon_suqadd (v1i64 FPR64:$Src),
(v1i64 FPR64:$Rn))),
(SUQADDdd FPR64:$Src, FPR64:$Rn)>;
def : Pat<(v1i64 (int_aarch64_neon_usqadd (v1i64 FPR64:$Src),
(v1i64 FPR64:$Rn))),
(USQADDdd FPR64:$Src, FPR64:$Rn)>;
def : Pat<(v1i64 (int_arm_neon_vabs (v1i64 FPR64:$Rn))),
(ABSdd FPR64:$Rn)>;
def : Pat<(v1i64 (int_arm_neon_vqabs (v1i64 FPR64:$Rn))),
(SQABSdd FPR64:$Rn)>;
def : Pat<(v1i64 (int_arm_neon_vqneg (v1i64 FPR64:$Rn))),
(SQNEGdd FPR64:$Rn)>;
def : Pat<(v1i64 (sub (v1i64 (bitconvert (v8i8 Neon_AllZero))),
(v1i64 FPR64:$Rn))),
(NEGdd FPR64:$Rn)>;
// Scalar Signed Saturating Extract Unsigned Narrow
defm SQXTUN : NeonI_Scalar2SameMisc_narrow_HSD_size<0b1, 0b10010, "sqxtun">;
defm : Neon_Scalar2SameMisc_narrow_HSD_size_patterns<int_arm_neon_vqmovnsu,

View File

@ -0,0 +1,60 @@
;RUN: llc < %s -verify-machineinstrs -mtriple=aarch64-none-linux-gnu -mattr=+neon | FileCheck %s
declare <1 x i64> @llvm.arm.neon.vqneg.v1i64(<1 x i64>)
declare <1 x i64> @llvm.arm.neon.vqabs.v1i64(<1 x i64>)
declare <1 x i64> @llvm.arm.neon.vabs.v1i64(<1 x i64>)
declare <1 x i64> @llvm.aarch64.neon.usqadd.v1i64(<1 x i64>, <1 x i64>)
declare <1 x i64> @llvm.aarch64.neon.suqadd.v1i64(<1 x i64>, <1 x i64>)
define <1 x i64> @test_vuqadd_s64(<1 x i64> %a, <1 x i64> %b) {
entry:
; CHECK: test_vuqadd_s64
%vuqadd2.i = tail call <1 x i64> @llvm.aarch64.neon.suqadd.v1i64(<1 x i64> %a, <1 x i64> %b)
; CHECK: suqadd d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %vuqadd2.i
}
define <1 x i64> @test_vsqadd_u64(<1 x i64> %a, <1 x i64> %b) {
entry:
; CHECK: test_vsqadd_u64
%vsqadd2.i = tail call <1 x i64> @llvm.aarch64.neon.usqadd.v1i64(<1 x i64> %a, <1 x i64> %b)
; CHECK: usqadd d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %vsqadd2.i
}
define <1 x i64> @test_vabs_s64(<1 x i64> %a) {
; CHECK: test_vabs_s64
entry:
%vabs1.i = tail call <1 x i64> @llvm.arm.neon.vabs.v1i64(<1 x i64> %a)
; CHECK: abs d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %vabs1.i
}
define <1 x i64> @test_vqabs_s64(<1 x i64> %a) {
; CHECK: test_vqabs_s64
entry:
%vqabs1.i = tail call <1 x i64> @llvm.arm.neon.vqabs.v1i64(<1 x i64> %a)
; CHECK: sqabs d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %vqabs1.i
}
define <1 x i64> @test_vqneg_s64(<1 x i64> %a) {
; CHECK: test_vqneg_s64
entry:
%vqneg1.i = tail call <1 x i64> @llvm.arm.neon.vqneg.v1i64(<1 x i64> %a)
; CHECK: sqneg d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %vqneg1.i
}
define <1 x i64> @test_vneg_s64(<1 x i64> %a) {
; CHECK: test_vneg_s64
entry:
%sub.i = sub <1 x i64> zeroinitializer, %a
; CHECK: neg d{{[0-9]+}}, d{{[0-9]+}}
ret <1 x i64> %sub.i
}