mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-04-11 00:39:36 +00:00
Turn x86 unaligned load/store intrinsics into aligned load/store instructions
if the pointer is known aligned. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@27781 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
80edfb3af5
commit
fd6bdf0b0f
@ -5471,7 +5471,11 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
|
||||
default: break;
|
||||
case Intrinsic::ppc_altivec_lvx:
|
||||
case Intrinsic::ppc_altivec_lvxl:
|
||||
// Turn lvx -> load if the pointer is known aligned.
|
||||
case Intrinsic::x86_sse_loadu_ps:
|
||||
case Intrinsic::x86_sse2_loadu_pd:
|
||||
case Intrinsic::x86_sse2_loadu_dq:
|
||||
// Turn PPC lvx -> load if the pointer is known aligned.
|
||||
// Turn X86 loadups -> load if the pointer is known aligned.
|
||||
if (GetKnownAlignment(II->getOperand(1), TD) >= 16) {
|
||||
Value *Ptr = InsertCastBefore(II->getOperand(1),
|
||||
PointerType::get(II->getType()), CI);
|
||||
@ -5487,6 +5491,17 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
|
||||
return new StoreInst(II->getOperand(1), Ptr);
|
||||
}
|
||||
break;
|
||||
case Intrinsic::x86_sse_storeu_ps:
|
||||
case Intrinsic::x86_sse2_storeu_pd:
|
||||
case Intrinsic::x86_sse2_storeu_dq:
|
||||
case Intrinsic::x86_sse2_storel_dq:
|
||||
// Turn X86 storeu -> store if the pointer is known aligned.
|
||||
if (GetKnownAlignment(II->getOperand(1), TD) >= 16) {
|
||||
const Type *OpPtrTy = PointerType::get(II->getOperand(2)->getType());
|
||||
Value *Ptr = InsertCastBefore(II->getOperand(1), OpPtrTy, CI);
|
||||
return new StoreInst(II->getOperand(2), Ptr);
|
||||
}
|
||||
break;
|
||||
case Intrinsic::ppc_altivec_vperm:
|
||||
// Turn vperm(V1,V2,mask) -> shuffle(V1,V2,mask) if mask is a constant.
|
||||
if (ConstantPacked *Mask = dyn_cast<ConstantPacked>(II->getOperand(3))) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user