Update rlwimi tests to catch all the cases we care about

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@22623 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Nate Begeman 2005-08-03 18:11:23 +00:00
parent 82e6ef3ec3
commit 2bbcca30b7
2 changed files with 42 additions and 2 deletions

View File

@ -1,5 +1,6 @@
; All of these ands and shifts should be folded into rlwimi's
; RUN: llvm-as < rlwimi.ll | llc -march=ppc32 | not grep and
; RUN: llvm-as < rlwimi.ll | llc -march=ppc32 | not grep and &&
; RUN: llvm-as < rlwimi.ll | llc -march=ppc32 | grep rlwimi | wc -l | grep 8
implementation ; Functions:
@ -53,10 +54,19 @@ entry:
ret int %tmp.9
}
int %test9(int %x, int %y) {
int %test7(int %x, int %y) {
entry:
%tmp.2 = and int %x, -65536 ; <int> [#uses=1]
%tmp.5 = and int %y, 65535 ; <int> [#uses=1]
%tmp.7 = or int %tmp.5, %tmp.2 ; <int> [#uses=1]
ret int %tmp.7
}
uint %test8(uint %bar) {
entry:
%tmp.3 = shl uint %bar, ubyte 1 ; <uint> [#uses=1]
%tmp.4 = and uint %tmp.3, 2 ; <uint> [#uses=1]
%tmp.6 = and uint %bar, 4294967293 ; <uint> [#uses=1]
%tmp.7 = or uint %tmp.4, %tmp.6 ; <uint> [#uses=1]
ret uint %tmp.7
}

View File

@ -0,0 +1,30 @@
; All of these ands and shifts should be folded into rlwimi's
; RUN: llvm-as < rlwimi2.ll | llc -march=ppc32 | grep rlwimi | wc -l | grep 3 &&
; RUN: llvm-as < rlwimi2.ll | llc -march=ppc32 | grep srwi | wc -l | grep 1 &&
; RUN: llvm-as < rlwimi2.ll | llc -march=ppc32 | not grep slwi
implementation ; Functions:
ushort %test1(uint %srcA, uint %srcB, uint %alpha) {
entry:
%tmp.1 = shl uint %srcA, ubyte 15 ; <uint> [#uses=1]
%tmp.4 = and uint %tmp.1, 32505856 ; <uint> [#uses=1]
%tmp.6 = and uint %srcA, 31775 ; <uint> [#uses=1]
%tmp.7 = or uint %tmp.4, %tmp.6 ; <uint> [#uses=1]
%tmp.9 = shl uint %srcB, ubyte 15 ; <uint> [#uses=1]
%tmp.12 = and uint %tmp.9, 32505856 ; <uint> [#uses=1]
%tmp.14 = and uint %srcB, 31775 ; <uint> [#uses=1]
%tmp.15 = or uint %tmp.12, %tmp.14 ; <uint> [#uses=1]
%tmp.18 = mul uint %tmp.7, %alpha ; <uint> [#uses=1]
%tmp.20 = sub uint 32, %alpha ; <uint> [#uses=1]
%tmp.22 = mul uint %tmp.15, %tmp.20 ; <uint> [#uses=1]
%tmp.23 = add uint %tmp.22, %tmp.18 ; <uint> [#uses=2]
%tmp.27 = shr uint %tmp.23, ubyte 5 ; <uint> [#uses=1]
%tmp.28 = cast uint %tmp.27 to ushort ; <ushort> [#uses=1]
%tmp.29 = and ushort %tmp.28, 31775 ; <ushort> [#uses=1]
%tmp.33 = shr uint %tmp.23, ubyte 20 ; <uint> [#uses=1]
%tmp.34 = cast uint %tmp.33 to ushort ; <ushort> [#uses=1]
%tmp.35 = and ushort %tmp.34, 992 ; <ushort> [#uses=1]
%tmp.36 = or ushort %tmp.29, %tmp.35 ; <ushort> [#uses=1]
ret ushort %tmp.36
}