|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
References
lib/Transforms/InstCombine/InstCombineAddSub.cpp 1316 if (haveNoCommonBitsSet(LHS, RHS, DL, &AC, &I, &DT))
lib/Transforms/InstCombine/InstCombineAtomicRMW.cpp 127 SI->setAlignment(MaybeAlign(DL.getABITypeAlignment(RMWI.getType())));
157 Load->setAlignment(MaybeAlign(DL.getABITypeAlignment(RMWI.getType())));
lib/Transforms/InstCombine/InstCombineCalls.cpp 111 unsigned DstAlign = getKnownAlignment(MI->getRawDest(), DL, MI, &AC, &DT);
118 unsigned SrcAlign = getKnownAlignment(MI->getRawSource(), DL, MI, &AC, &DT);
229 getKnownAlignment(MI->getDest(), DL, MI, &AC, &DT);
1886 if (Value *V = lowerObjectSizeCall(II, DL, &TLI, /*MustSucceed=*/false))
2334 if (getOrEnforceKnownAlignment(II->getArgOperand(0), 16, DL, II, &AC,
2351 if (getOrEnforceKnownAlignment(II->getArgOperand(1), 16, DL, II, &AC,
2368 if (getOrEnforceKnownAlignment(II->getArgOperand(0), 16, DL, II, &AC,
2380 if (getOrEnforceKnownAlignment(II->getArgOperand(0), 32, DL, II, &AC,
2389 if (getOrEnforceKnownAlignment(II->getArgOperand(1), 16, DL, II, &AC,
2401 if (getOrEnforceKnownAlignment(II->getArgOperand(1), 32, DL, II, &AC,
3196 if (DL.isLittleEndian())
3200 Value *Op0ToUse = (DL.isLittleEndian()) ? Op1 : Op0;
3201 Value *Op1ToUse = (DL.isLittleEndian()) ? Op0 : Op1;
3218 DL, II, &AC, &DT);
3238 getKnownAlignment(II->getArgOperand(0), DL, II, &AC, &DT);
4018 isKnownNonZero(DerivedPtr, DL, 0, &AC, II, &DT)) {
4131 LibCallSimplifier Simplifier(DL, &TLI, ORE, BFI, PSI, InstCombineRAUW,
4281 isKnownNonZero(V, DL, 0, &AC, &Call, &DT))
4368 if (CI && isSafeToEliminateVarargsCast(Call, DL, CI, ix)) {
4442 if (!CastInst::isBitOrNoopPointerCastable(NewRetTy, OldRetTy, DL)) {
4495 if (!CastInst::isBitOrNoopPointerCastable(ActTy, ParamTy, DL))
4513 if (DL.getTypeAllocSize(CurElTy) !=
4514 DL.getTypeAllocSize(ParamPTy->getElementType()))
lib/Transforms/InstCombine/InstCombineCasts.cpp 95 unsigned AllocElTyAlign = DL.getABITypeAlignment(AllocElTy);
96 unsigned CastElTyAlign = DL.getABITypeAlignment(CastElTy);
104 uint64_t AllocElTySize = DL.getTypeAllocSize(AllocElTy);
105 uint64_t CastElTySize = DL.getTypeAllocSize(CastElTy);
110 uint64_t AllocElTyStoreSize = DL.getTypeStoreSize(AllocElTy);
111 uint64_t CastElTyStoreSize = DL.getTypeStoreSize(CastElTy);
166 if (Constant *FoldedC = ConstantFoldConstant(C, DL, &TLI))
241 SrcTy->isPtrOrPtrVectorTy() ? DL.getIntPtrType(SrcTy) : nullptr;
243 MidTy->isPtrOrPtrVectorTy() ? DL.getIntPtrType(MidTy) : nullptr;
245 DstTy->isPtrOrPtrVectorTy() ? DL.getIntPtrType(DstTy) : nullptr;
1763 DL.getPointerSizeInBits(AS)) {
1764 Type *Ty = DL.getIntPtrType(CI.getContext(), AS);
1811 if (Ty->getScalarSizeInBits() == DL.getIndexSizeInBits(AS))
1814 Type *PtrTy = DL.getIntPtrType(CI.getContext(), AS);
2347 if (Src->getPointerDereferenceableBytes(DL, CanBeNull)) {
2440 DL.isLegalInteger(DestTy->getScalarSizeInBits()) &&
lib/Transforms/InstCombine/InstCombineCompares.cpp 236 CompareRHS, DL, &TLI);
314 Type *IntPtrTy = DL.getIntPtrType(GEP->getType());
402 Ty = DL.getSmallestLegalIntType(Init->getContext(), ArrayElementCount);
868 Value *Offset = evaluateGEPOffsetExpression(GEPLHS, *this, DL);
959 return transformToIndexedCompare(GEPLHS, RHS, Cond, DL);
1021 return transformToIndexedCompare(GEPLHS, RHS, Cond, DL);
1402 if (isKnownPositive(A, DL, 0, &AC, &Cmp, &DT))
1404 if (isKnownPositive(B, DL, 0, &AC, &Cmp, &DT))
1479 Optional<bool> Imp = isImpliedCondition(DomCond, &Cmp, DL, TrueBB == CmpBB);
1864 if (ExactLogBase2 != -1 && DL.isLegalInteger(ExactLogBase2 + 1)) {
2150 DL.isLegalInteger(TypeBits - Amt)) {
3243 DL.getIntPtrType(RHSC->getType()) == LHSI->getOperand(0)->getType())
4409 return DL.getPointerTypeSizeInBits(SrcTy) == DestTy->getIntegerBitWidth();
4933 : DL.getIndexTypeSizeInBits(Ty->getScalarType());
5498 if (auto *Alloca = dyn_cast<AllocaInst>(GetUnderlyingObject(Op0, DL)))
5501 if (auto *Alloca = dyn_cast<AllocaInst>(GetUnderlyingObject(Op1, DL)))
lib/Transforms/InstCombine/InstCombineInternal.h 354 const DataLayout &getDataLayout() const { return DL; }
725 llvm::computeKnownBits(V, Known, DL, Depth, &AC, CxtI, &DT);
730 return llvm::computeKnownBits(V, DL, Depth, &AC, CxtI, &DT);
736 return llvm::isKnownToBeAPowerOfTwo(V, DL, OrZero, Depth, &AC, CxtI, &DT);
741 return llvm::MaskedValueIsZero(V, Mask, DL, Depth, &AC, CxtI, &DT);
746 return llvm::ComputeNumSignBits(Op, DL, Depth, &AC, CxtI, &DT);
752 return llvm::computeOverflowForUnsignedMul(LHS, RHS, DL, &AC, CxtI, &DT);
758 return llvm::computeOverflowForSignedMul(LHS, RHS, DL, &AC, CxtI, &DT);
764 return llvm::computeOverflowForUnsignedAdd(LHS, RHS, DL, &AC, CxtI, &DT);
770 return llvm::computeOverflowForSignedAdd(LHS, RHS, DL, &AC, CxtI, &DT);
776 return llvm::computeOverflowForUnsignedSub(LHS, RHS, DL, &AC, CxtI, &DT);
781 return llvm::computeOverflowForSignedSub(LHS, RHS, DL, &AC, CxtI, &DT);
lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp 349 MaybeAlign(DL.getPrefTypeAlignment(AI.getAllocatedType())));
354 if (DL.getTypeAllocSize(AI.getAllocatedType()) == 0) {
372 DL.getTypeAllocSize(EntryAI->getAllocatedType()) != 0) {
381 MaybeAlign(DL.getPrefTypeAlignment(EntryAI->getAllocatedType())));
405 Copy->getSource(), AI.getAlignment(), DL, &AI, &AC, &DT);
407 isDereferenceableForAllocaSize(Copy->getSource(), &AI, DL)) {
958 Op, DL.getPrefTypeAlignment(LI.getType()), DL, &LI, &AC, &DT);
958 Op, DL.getPrefTypeAlignment(LI.getType()), DL, &LI, &AC, &DT);
961 LoadAlign != 0 ? LoadAlign : DL.getABITypeAlignment(LI.getType());
1025 Alignment, DL, SI) &&
1027 Alignment, DL, SI)) {
1360 Ptr, DL.getPrefTypeAlignment(Val->getType()), DL, &SI, &AC, &DT));
1360 Ptr, DL.getPrefTypeAlignment(Val->getType()), DL, &SI, &AC, &DT));
1363 StoreAlign ? *StoreAlign : Align(DL.getABITypeAlignment(Val->getType()));
lib/Transforms/InstCombine/InstCombinePHI.cpp 126 if (DL.getPointerSizeInBits(IntToPtr->getAddressSpace()) !=
127 DL.getTypeSizeInBits(IntToPtr->getOperand(0)->getType()))
1188 if (isKnownNonZero(VA, DL, 0, &AC, CtxI, &DT)) {
1261 !DL.isLegalInteger(PN.getType()->getPrimitiveSizeInBits()))
lib/Transforms/InstCombine/InstCombineSelect.cpp 1428 DL.getTypeSizeInBits(TrueVal->getType()->getScalarType());
lib/Transforms/InstCombine/InstCombineShifts.cpp 320 if (isKnownNonNegative(A, DL, 0, &AC, &I, &DT) &&
321 isKnownNonNegative(C, DL, 0, &AC, &I, &DT))
620 I, getShiftedValue(Op0, Op1C->getZExtValue(), isLeftShift, *this, DL));
lib/Transforms/InstCombine/InstCombineVectorOps.cpp 366 if (Instruction *I = foldBitcastExtElt(EI, Builder, DL.isBigEndian()))
1927 if (Instruction *I = foldSelectShuffle(SVI, Builder, DL))
2001 unsigned SrcElemBitWidth = DL.getTypeSizeInBits(SrcTy->getElementType());
2014 unsigned TgtElemBitWidth = DL.getTypeSizeInBits(TgtTy);
lib/Transforms/InstCombine/InstructionCombining.cpp 147 return llvm::EmitGEPOffset(&Builder, DL, GEP);
161 bool FromLegal = FromWidth == 1 || DL.isLegalInteger(FromWidth);
162 bool ToLegal = ToWidth == 1 || DL.isLegalInteger(ToWidth);
1101 Type *IndexTy = DL.getIndexType(PtrTy);
1103 if (int64_t TySize = DL.getTypeAllocSize(Ty)) {
1121 if (uint64_t(Offset * 8) >= DL.getTypeSizeInBits(Ty))
1125 const StructLayout *SL = DL.getStructLayout(STy);
1136 uint64_t EltSize = DL.getTypeAllocSize(AT->getElementType());
1651 DL.getIndexType(GEP.getPointerOperandType()->getScalarType());
1669 if (EltTy->isSized() && DL.getTypeAllocSize(EltTy) == 0)
1916 DL.getIndexSizeInBits(AS)) {
1917 uint64_t TyAllocSize = DL.getTypeAllocSize(GEPEltType);
2036 DL.getTypeAllocSize(StrippedPtrEltTy->getArrayElementType()) ==
2037 DL.getTypeAllocSize(GEPEltType)) {
2038 Type *IdxType = DL.getIndexType(GEPType);
2058 uint64_t ResSize = DL.getTypeAllocSize(GEPEltType);
2059 uint64_t SrcSize = DL.getTypeAllocSize(StrippedPtrEltTy);
2068 assert(Idx->getType() == DL.getIndexType(GEPType) &&
2098 uint64_t ResSize = DL.getTypeAllocSize(GEPEltType);
2100 DL.getTypeAllocSize(StrippedPtrEltTy->getArrayElementType());
2109 assert(Idx->getType() == DL.getIndexType(GEPType) &&
2117 Type *IndTy = DL.getIndexType(GEPType);
2189 unsigned OffsetBits = DL.getIndexTypeSizeInBits(GEPType);
2191 if (!isa<BitCastInst>(SrcOp) && GEP.accumulateConstantOffset(DL, Offset)) {
2237 DL.getIndexSizeInBits(PtrOp->getType()->getPointerAddressSpace());
2240 PtrOp->stripAndAccumulateInBoundsConstantOffsets(DL,
2243 if (GEP.accumulateConstantOffset(DL, BasePtrOffset) &&
2245 APInt AllocSize(IdxWidth, DL.getTypeAllocSize(AI->getAllocatedType()));
2387 lowerObjectSizeCall(II, DL, &TLI, /*MustSucceed=*/true);
2533 if (Instruction *I = tryToMoveFreeBeforeNullTest(FI, DL))
3226 if (Constant *C = ConstantFoldInstruction(I, DL, &TLI)) {