From 3914f721cc95bb1e8534101a6bb62dc484ef7520 Mon Sep 17 00:00:00 2001 From: Chris Lattner Date: Sat, 24 Jan 2009 01:00:13 +0000 Subject: [PATCH] Make InstCombineStoreToCast handle aggregates more aggressively, handling the case in Transforms/InstCombine/cast-store-gep.ll, which is a heavily reduced testcase from Clang on x86-64. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@62904 91177308-0d34-0410-b5e6-96231b3b80d8 --- .../Scalar/InstructionCombining.cpp | 66 ++++++++++++++----- test/Transforms/InstCombine/cast-store-gep.ll | 17 +++++ 2 files changed, 65 insertions(+), 18 deletions(-) create mode 100644 test/Transforms/InstCombine/cast-store-gep.ll diff --git a/lib/Transforms/Scalar/InstructionCombining.cpp b/lib/Transforms/Scalar/InstructionCombining.cpp index 8b20453a0e4..dd55d65e163 100644 --- a/lib/Transforms/Scalar/InstructionCombining.cpp +++ b/lib/Transforms/Scalar/InstructionCombining.cpp @@ -7700,12 +7700,12 @@ Instruction *InstCombiner::commonCastTransforms(CastInst &CI) { /// FindElementAtOffset - Given a type and a constant offset, determine whether /// or not there is a sequence of GEP indices into the type that will land us at -/// the specified offset. If so, fill them into NewIndices and return true, -/// otherwise return false. -static bool FindElementAtOffset(const Type *Ty, int64_t Offset, - SmallVectorImpl &NewIndices, - const TargetData *TD) { - if (!Ty->isSized()) return false; +/// the specified offset. If so, fill them into NewIndices and return the +/// resultant element type, otherwise return null. +static const Type *FindElementAtOffset(const Type *Ty, int64_t Offset, + SmallVectorImpl &NewIndices, + const TargetData *TD) { + if (!Ty->isSized()) return 0; // Start with the index over the outer type. Note that the type size // might be zero (even if the offset isn't zero) if the indexed type @@ -7731,7 +7731,7 @@ static bool FindElementAtOffset(const Type *Ty, int64_t Offset, while (Offset) { // Indexing into tail padding between struct/array elements. if (uint64_t(Offset*8) >= TD->getTypeSizeInBits(Ty)) - return false; + return 0; if (const StructType *STy = dyn_cast(Ty)) { const StructLayout *SL = TD->getStructLayout(STy); @@ -7751,11 +7751,11 @@ static bool FindElementAtOffset(const Type *Ty, int64_t Offset, Ty = AT->getElementType(); } else { // Otherwise, we can't index into the middle of this atomic type, bail. - return false; + return 0; } } - return true; + return Ty; } /// @brief Implement the transforms for cast of pointer (bitcast/ptrtoint) @@ -11139,7 +11139,8 @@ Instruction *InstCombiner::visitLoadInst(LoadInst &LI) { } /// InstCombineStoreToCast - Fold store V, (cast P) -> store (cast V), P -/// when possible. +/// when possible. This makes it generally easy to do alias analysis and/or +/// SROA/mem2reg of the memory object. static Instruction *InstCombineStoreToCast(InstCombiner &IC, StoreInst &SI) { User *CI = cast(SI.getOperand(1)); Value *CastOp = CI->getOperand(0); @@ -11153,18 +11154,34 @@ static Instruction *InstCombineStoreToCast(InstCombiner &IC, StoreInst &SI) { if (!DestPTy->isInteger() && !isa(DestPTy)) return 0; + /// NewGEPIndices - If SrcPTy is an aggregate type, we can emit a "noop gep" + /// to its first element. This allows us to handle things like: + /// store i32 xxx, (bitcast {foo*, float}* %P to i32*) + /// on 32-bit hosts. + SmallVector NewGEPIndices; + // If the source is an array, the code below will not succeed. Check to // see if a trivial 'gep P, 0, 0' will help matters. Only do this for // constants. - if (const ArrayType *ASrcTy = dyn_cast(SrcPTy)) - if (Constant *CSrc = dyn_cast(CastOp)) - if (ASrcTy->getNumElements() != 0) { - Value* Idxs[2]; - Idxs[0] = Idxs[1] = Constant::getNullValue(Type::Int32Ty); - CastOp = ConstantExpr::getGetElementPtr(CSrc, Idxs, 2); - SrcTy = cast(CastOp->getType()); - SrcPTy = SrcTy->getElementType(); + if (isa(SrcPTy) || isa(SrcPTy)) { + // Index through pointer. + Constant *Zero = Constant::getNullValue(Type::Int32Ty); + NewGEPIndices.push_back(Zero); + + while (1) { + if (const StructType *STy = dyn_cast(SrcPTy)) { + NewGEPIndices.push_back(Zero); + SrcPTy = STy->getElementType(0); + } else if (const ArrayType *ATy = dyn_cast(SrcPTy)) { + NewGEPIndices.push_back(Zero); + SrcPTy = ATy->getElementType(); + } else { + break; } + } + + SrcTy = PointerType::get(SrcPTy, SrcTy->getAddressSpace()); + } if (!SrcPTy->isInteger() && !isa(SrcPTy)) return 0; @@ -11192,6 +11209,19 @@ static Instruction *InstCombineStoreToCast(InstCombiner &IC, StoreInst &SI) { if (isa(SIOp0->getType())) opcode = Instruction::PtrToInt; } + + // SIOp0 is a pointer to aggregate and this is a store to the first field, + // emit a GEP to index into its first field. + if (!NewGEPIndices.empty()) { + if (Constant *C = dyn_cast(CastOp)) + CastOp = ConstantExpr::getGetElementPtr(C, &NewGEPIndices[0], + NewGEPIndices.size()); + else + CastOp = IC.InsertNewInstBefore( + GetElementPtrInst::Create(CastOp, NewGEPIndices.begin(), + NewGEPIndices.end()), SI); + } + if (Constant *C = dyn_cast(SIOp0)) NewCast = ConstantExpr::getCast(opcode, C, CastDstTy); else diff --git a/test/Transforms/InstCombine/cast-store-gep.ll b/test/Transforms/InstCombine/cast-store-gep.ll new file mode 100644 index 00000000000..95a069d60b7 --- /dev/null +++ b/test/Transforms/InstCombine/cast-store-gep.ll @@ -0,0 +1,17 @@ +; RUN: llvm-as < %s | opt -instcombine | llvm-dis | grep inttoptr +; RUN: llvm-as < %s | opt -instcombine | llvm-dis | not grep alloca + +target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128" +target triple = "x86_64-apple-darwin10.0" + %T = type { i8*, i8 } + +define i8* @test(i8* %Val, i64 %V) nounwind { +entry: + %A = alloca %T, align 8 + %mrv_gep = bitcast %T* %A to i64* ; [#uses=1] + %B = getelementptr %T* %A, i64 0, i32 0 ; [#uses=1] + + store i64 %V, i64* %mrv_gep + %C = load i8** %B, align 8 ; [#uses=1] + ret i8* %C +}