31#include "llvm/IR/InlineAsm.h"
32#include "llvm/IR/Instruction.h"
33#include "llvm/IR/Intrinsics.h"
34#include "llvm/IR/IntrinsicsX86.h"
35#include "llvm/IR/MatrixBuilder.h"
36#include "llvm/Support/ConvertUTF.h"
37#include "llvm/Support/ScopedPrinter.h"
42using namespace CodeGen;
53 LangOptions::FPExceptionModeKind::FPE_Ignore &&
58 case Builtin::BIlogbf:
59 case Builtin::BI__builtin_logbf:
61 case Builtin::BI__builtin_logb:
62 case Builtin::BIscalbnf:
63 case Builtin::BI__builtin_scalbnf:
64 case Builtin::BIscalbn:
65 case Builtin::BI__builtin_scalbn:
75 llvm::Triple::ArchType
Arch) {
87 case llvm::Triple::arm:
88 case llvm::Triple::armeb:
89 case llvm::Triple::thumb:
90 case llvm::Triple::thumbeb:
92 case llvm::Triple::aarch64:
93 case llvm::Triple::aarch64_32:
94 case llvm::Triple::aarch64_be:
96 case llvm::Triple::bpfeb:
97 case llvm::Triple::bpfel:
99 case llvm::Triple::dxil:
101 case llvm::Triple::x86:
102 case llvm::Triple::x86_64:
104 case llvm::Triple::ppc:
105 case llvm::Triple::ppcle:
106 case llvm::Triple::ppc64:
107 case llvm::Triple::ppc64le:
109 case llvm::Triple::r600:
110 case llvm::Triple::amdgcn:
112 case llvm::Triple::systemz:
114 case llvm::Triple::nvptx:
115 case llvm::Triple::nvptx64:
117 case llvm::Triple::wasm32:
118 case llvm::Triple::wasm64:
120 case llvm::Triple::hexagon:
122 case llvm::Triple::riscv32:
123 case llvm::Triple::riscv64:
125 case llvm::Triple::spirv32:
126 case llvm::Triple::spirv64:
130 case llvm::Triple::spirv:
140 if (
getContext().BuiltinInfo.isAuxBuiltinID(BuiltinID)) {
141 assert(
getContext().getAuxTargetInfo() &&
"Missing aux target info");
143 this,
getContext().BuiltinInfo.getAuxBuiltinID(BuiltinID),
E,
152 Align AlignmentInBytes) {
154 switch (CGF.
getLangOpts().getTrivialAutoVarInit()) {
159 Byte = CGF.
Builder.getInt8(0x00);
163 Byte = llvm::dyn_cast<llvm::ConstantInt>(
171 I->addAnnotationMetadata(
"auto-init");
177 unsigned BuiltinID) {
188 static SmallDenseMap<unsigned, StringRef, 64> F128Builtins{
189 {Builtin::BI__builtin___fprintf_chk,
"__fprintf_chkieee128"},
190 {Builtin::BI__builtin___printf_chk,
"__printf_chkieee128"},
191 {Builtin::BI__builtin___snprintf_chk,
"__snprintf_chkieee128"},
192 {Builtin::BI__builtin___sprintf_chk,
"__sprintf_chkieee128"},
193 {Builtin::BI__builtin___vfprintf_chk,
"__vfprintf_chkieee128"},
194 {Builtin::BI__builtin___vprintf_chk,
"__vprintf_chkieee128"},
195 {Builtin::BI__builtin___vsnprintf_chk,
"__vsnprintf_chkieee128"},
196 {Builtin::BI__builtin___vsprintf_chk,
"__vsprintf_chkieee128"},
197 {Builtin::BI__builtin_fprintf,
"__fprintfieee128"},
198 {Builtin::BI__builtin_printf,
"__printfieee128"},
199 {Builtin::BI__builtin_snprintf,
"__snprintfieee128"},
200 {Builtin::BI__builtin_sprintf,
"__sprintfieee128"},
201 {Builtin::BI__builtin_vfprintf,
"__vfprintfieee128"},
202 {Builtin::BI__builtin_vprintf,
"__vprintfieee128"},
203 {Builtin::BI__builtin_vsnprintf,
"__vsnprintfieee128"},
204 {Builtin::BI__builtin_vsprintf,
"__vsprintfieee128"},
205 {Builtin::BI__builtin_fscanf,
"__fscanfieee128"},
206 {Builtin::BI__builtin_scanf,
"__scanfieee128"},
207 {Builtin::BI__builtin_sscanf,
"__sscanfieee128"},
208 {Builtin::BI__builtin_vfscanf,
"__vfscanfieee128"},
209 {Builtin::BI__builtin_vscanf,
"__vscanfieee128"},
210 {Builtin::BI__builtin_vsscanf,
"__vsscanfieee128"},
211 {Builtin::BI__builtin_nexttowardf128,
"__nexttowardieee128"},
217 static SmallDenseMap<unsigned, StringRef, 4> AIXLongDouble64Builtins{
218 {Builtin::BI__builtin_frexpl,
"frexp"},
219 {Builtin::BI__builtin_ldexpl,
"ldexp"},
220 {Builtin::BI__builtin_modfl,
"modf"},
226 if (FD->
hasAttr<AsmLabelAttr>())
232 &
getTarget().getLongDoubleFormat() == &llvm::APFloat::IEEEquad() &&
233 F128Builtins.contains(BuiltinID))
234 Name = F128Builtins[BuiltinID];
237 &llvm::APFloat::IEEEdouble() &&
238 AIXLongDouble64Builtins.contains(BuiltinID))
239 Name = AIXLongDouble64Builtins[BuiltinID];
244 llvm::FunctionType *Ty =
247 return GetOrCreateLLVMFunction(Name, Ty,
D,
false);
253 QualType T, llvm::IntegerType *IntType) {
256 if (
V->getType()->isPointerTy())
257 return CGF.
Builder.CreatePtrToInt(
V, IntType);
259 assert(
V->getType() == IntType);
267 if (ResultType->isPointerTy())
268 return CGF.
Builder.CreateIntToPtr(
V, ResultType);
270 assert(
V->getType() == ResultType);
282 if (Align % Bytes != 0) {
295 AtomicOrdering Ordering) {
305 llvm::IntegerType *IntType = llvm::IntegerType::get(
309 llvm::Type *ValueType = Val->getType();
337 llvm::AtomicRMWInst::BinOp Kind,
346 llvm::AtomicRMWInst::BinOp Kind,
348 Instruction::BinaryOps Op,
349 bool Invert =
false) {
358 llvm::IntegerType *IntType = llvm::IntegerType::get(
362 llvm::Type *ValueType = Val->getType();
366 Kind, DestAddr, Val, llvm::AtomicOrdering::SequentiallyConsistent);
371 llvm::ConstantInt::getAllOnesValue(IntType));
395 llvm::IntegerType *IntType = llvm::IntegerType::get(
399 llvm::Type *ValueType = Cmp->getType();
404 DestAddr, Cmp,
New, llvm::AtomicOrdering::SequentiallyConsistent,
405 llvm::AtomicOrdering::SequentiallyConsistent);
408 return CGF.
Builder.CreateZExt(CGF.
Builder.CreateExtractValue(Pair, 1),
431 AtomicOrdering SuccessOrdering = AtomicOrdering::SequentiallyConsistent) {
443 auto *RTy = Exchange->getType();
447 if (RTy->isPointerTy()) {
453 auto FailureOrdering = SuccessOrdering == AtomicOrdering::Release ?
454 AtomicOrdering::Monotonic :
462 DestAddr, Comparand, Exchange, SuccessOrdering, FailureOrdering);
463 CmpXchg->setVolatile(
true);
466 if (RTy->isPointerTy()) {
487 AtomicOrdering SuccessOrdering) {
488 assert(
E->getNumArgs() == 4);
494 assert(DestPtr->getType()->isPointerTy());
495 assert(!ExchangeHigh->getType()->isPointerTy());
496 assert(!ExchangeLow->getType()->isPointerTy());
499 auto FailureOrdering = SuccessOrdering == AtomicOrdering::Release
500 ? AtomicOrdering::Monotonic
505 llvm::Type *Int128Ty = llvm::IntegerType::get(CGF.
getLLVMContext(), 128);
506 Address DestAddr(DestPtr, Int128Ty,
511 ExchangeHigh = CGF.
Builder.CreateZExt(ExchangeHigh, Int128Ty);
512 ExchangeLow = CGF.
Builder.CreateZExt(ExchangeLow, Int128Ty);
514 CGF.
Builder.CreateShl(ExchangeHigh, llvm::ConstantInt::get(Int128Ty, 64));
515 llvm::Value *Exchange = CGF.
Builder.CreateOr(ExchangeHigh, ExchangeLow);
521 SuccessOrdering, FailureOrdering);
527 CXI->setVolatile(
true);
539 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
545 AtomicRMWInst::Add, DestAddr, ConstantInt::get(IntTy, 1), Ordering);
546 return CGF.
Builder.CreateAdd(
Result, ConstantInt::get(IntTy, 1));
551 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
557 AtomicRMWInst::Sub, DestAddr, ConstantInt::get(IntTy, 1), Ordering);
558 return CGF.
Builder.CreateSub(
Result, ConstantInt::get(IntTy, 1));
569 Load->setVolatile(
true);
579 llvm::StoreInst *Store =
581 Store->setVolatile(
true);
590 unsigned ConstrainedIntrinsicID) {
594 if (CGF.
Builder.getIsFPConstrained()) {
596 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0 });
599 return CGF.
Builder.CreateCall(F, Src0);
607 unsigned ConstrainedIntrinsicID) {
612 if (CGF.
Builder.getIsFPConstrained()) {
614 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0, Src1 });
617 return CGF.
Builder.CreateCall(F, { Src0, Src1 });
624 Intrinsic::ID IntrinsicID,
625 Intrinsic::ID ConstrainedIntrinsicID) {
630 if (CGF.
Builder.getIsFPConstrained()) {
632 {Src0->getType(), Src1->getType()});
633 return CGF.
Builder.CreateConstrainedFPCall(F, {Src0, Src1});
638 return CGF.
Builder.CreateCall(F, {Src0, Src1});
645 unsigned ConstrainedIntrinsicID) {
651 if (CGF.
Builder.getIsFPConstrained()) {
653 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0, Src1, Src2 });
656 return CGF.
Builder.CreateCall(F, { Src0, Src1, Src2 });
663 unsigned IntrinsicID,
664 unsigned ConstrainedIntrinsicID) {
668 if (CGF.
Builder.getIsFPConstrained()) {
671 {ResultType, Src0->getType()});
672 return CGF.
Builder.CreateConstrainedFPCall(F, {Src0});
676 return CGF.
Builder.CreateCall(F, Src0);
681 Intrinsic::ID IntrinsicID) {
689 llvm::Value *
Call = CGF.
Builder.CreateCall(F, Src0);
691 llvm::Value *Exp = CGF.
Builder.CreateExtractValue(
Call, 1);
699 Intrinsic::ID IntrinsicID) {
704 llvm::Function *F = CGF.
CGM.
getIntrinsic(IntrinsicID, {Val->getType()});
705 llvm::Value *
Call = CGF.
Builder.CreateCall(F, Val);
707 llvm::Value *SinResult = CGF.
Builder.CreateExtractValue(
Call, 0);
708 llvm::Value *CosResult = CGF.
Builder.CreateExtractValue(
Call, 1);
714 llvm::StoreInst *StoreSin =
716 llvm::StoreInst *StoreCos =
723 MDNode *
Domain = MDHelper.createAnonymousAliasScopeDomain();
724 MDNode *AliasScope = MDHelper.createAnonymousAliasScope(
Domain);
725 MDNode *AliasScopeList = MDNode::get(
Call->getContext(), AliasScope);
726 StoreSin->setMetadata(LLVMContext::MD_alias_scope, AliasScopeList);
727 StoreCos->setMetadata(LLVMContext::MD_noalias, AliasScopeList);
731 Intrinsic::ID IntrinsicID) {
736 CGF.
Builder.CreateIntrinsic(IntrinsicID, {Val->getType()}, Val);
738 llvm::Value *FractionalResult = CGF.
Builder.CreateExtractValue(
Call, 0);
739 llvm::Value *IntegralResult = CGF.
Builder.CreateExtractValue(
Call, 1);
745 return FractionalResult;
752 Call->setDoesNotAccessMemory();
761 llvm::Type *Ty =
V->getType();
762 int Width = Ty->getPrimitiveSizeInBits();
763 llvm::Type *IntTy = llvm::IntegerType::get(
C, Width);
765 if (Ty->isPPC_FP128Ty()) {
775 Value *ShiftCst = llvm::ConstantInt::get(IntTy, Width);
780 IntTy = llvm::IntegerType::get(
C, Width);
783 Value *
Zero = llvm::Constant::getNullValue(IntTy);
793 auto IsIndirect = [&](
ABIArgInfo const &info) {
794 return info.isIndirect() || info.isIndirectAliased() || info.isInAlloca();
799 return IsIndirect(ArgInfo.info);
804 const CallExpr *
E, llvm::Constant *calleeValue) {
807 llvm::CallBase *callOrInvoke =
nullptr;
811 nullptr, &callOrInvoke, &FnInfo);
816 bool ConstWithoutErrnoAndExceptions =
820 if (ConstWithoutErrnoAndExceptions && CGF.
CGM.
getLangOpts().MathErrno &&
821 !CGF.
Builder.getIsFPConstrained() &&
Call.isScalar() &&
842 const Intrinsic::ID IntrinsicID,
843 llvm::Value *
X, llvm::Value *Y,
844 llvm::Value *&Carry) {
846 assert(
X->getType() == Y->getType() &&
847 "Arguments must be the same type. (Did you forget to make sure both "
848 "arguments have the same integer width?)");
851 llvm::Value *Tmp = CGF.
Builder.CreateCall(Callee, {
X, Y});
852 Carry = CGF.
Builder.CreateExtractValue(Tmp, 1);
853 return CGF.
Builder.CreateExtractValue(Tmp, 0);
857 struct WidthAndSignedness {
863static WidthAndSignedness
875static struct WidthAndSignedness
877 assert(Types.size() > 0 &&
"Empty list of types.");
881 for (
const auto &
Type : Types) {
890 for (
const auto &
Type : Types) {
892 if (Width < MinWidth) {
901 Intrinsic::ID inst = IsStart ? Intrinsic::vastart : Intrinsic::vaend;
912 return From == To || (From == 0 && To == 1) || (From == 3 && To == 2);
917 return ConstantInt::get(ResType, (
Type & 2) ? 0 : -1,
true);
921CodeGenFunction::evaluateOrEmitBuiltinObjectSize(
const Expr *
E,
unsigned Type,
922 llvm::IntegerType *ResType,
923 llvm::Value *EmittedE,
927 return emitBuiltinObjectSize(
E,
Type, ResType, EmittedE, IsDynamic);
928 return ConstantInt::get(ResType, ObjectSize,
true);
935class StructFieldAccess
937 bool AddrOfSeen =
false;
940 const Expr *ArrayIndex =
nullptr;
956 ArrayIndex =
E->getIdx();
957 ArrayElementTy =
E->getBase()->
getType();
958 return Visit(
E->getBase());
961 if (
E->getCastKind() == CK_LValueToRValue)
963 return Visit(
E->getSubExpr());
966 return Visit(
E->getSubExpr());
970 return Visit(
E->getSubExpr());
974 return Visit(
E->getSubExpr());
996 Ctx, FD, FD->getType(), StrictFlexArraysLevel,
1000 if (
const auto *RD = FD->getType()->getAsRecordDecl())
1018 uint32_t FieldNo = 0;
1027 if (
const auto *RD = Field->getType()->getAsRecordDecl()) {
1041static std::optional<int64_t>
1046 return std::optional<int64_t>(Offset);
1048 return std::nullopt;
1051llvm::Value *CodeGenFunction::emitCountedBySize(
const Expr *
E,
1052 llvm::Value *EmittedE,
1054 llvm::IntegerType *ResType) {
1072 StructFieldAccess Visitor;
1073 E = Visitor.Visit(
E);
1077 const Expr *Idx = Visitor.ArrayIndex;
1083 if (
const auto *IL = dyn_cast<IntegerLiteral>(Idx)) {
1084 int64_t Val = IL->getValue().getSExtValue();
1096 if (
const auto *ME = dyn_cast<MemberExpr>(
E))
1097 return emitCountedByMemberSize(ME, Idx, EmittedE, Visitor.ArrayElementTy,
1101 if (
const auto *ICE = dyn_cast<ImplicitCastExpr>(
E);
1102 ICE && ICE->getCastKind() == CK_LValueToRValue)
1103 return emitCountedByPointerSize(ICE, Idx, EmittedE, Visitor.ArrayElementTy,
1112 llvm::IntegerType *ResType,
1118 Cmp = CGF.
Builder.CreateAnd(CGF.
Builder.CreateIsNotNeg(Index), Cmp);
1121 return CGF.
Builder.CreateSelect(Cmp, Res,
1122 ConstantInt::get(ResType, 0, IsSigned));
1125static std::pair<llvm::Value *, llvm::Value *>
1128 const Expr *Idx, llvm::IntegerType *ResType,
1133 return std::make_pair<Value *>(
nullptr,
nullptr);
1134 Count = CGF.
Builder.CreateIntCast(Count, ResType, IsSigned,
"count");
1137 Value *Index =
nullptr;
1141 Index = CGF.
Builder.CreateIntCast(Index, ResType, IdxSigned,
"index");
1144 return std::make_pair(Count, Index);
1147llvm::Value *CodeGenFunction::emitCountedByPointerSize(
1149 QualType CastedArrayElementTy,
unsigned Type, llvm::IntegerType *ResType) {
1150 assert(
E->getCastKind() == CK_LValueToRValue &&
1151 "must be an LValue to RValue cast");
1158 const auto *ArrayBaseFD = dyn_cast<FieldDecl>(ME->
getMemberDecl());
1159 if (!ArrayBaseFD || !ArrayBaseFD->getType()->isPointerType() ||
1160 !ArrayBaseFD->getType()->isCountAttributedType())
1209 auto GetElementBaseSize = [&](
QualType ElementTy) {
1213 if (ElementSize.
isZero()) {
1221 return std::optional<CharUnits>();
1226 return std::optional<CharUnits>(ElementSize);
1231 std::optional<CharUnits> ArrayElementBaseSize =
1232 GetElementBaseSize(ArrayBaseFD->getType());
1233 if (!ArrayElementBaseSize)
1236 std::optional<CharUnits> CastedArrayElementBaseSize = ArrayElementBaseSize;
1238 CastedArrayElementBaseSize = GetElementBaseSize(CastedArrayElementTy);
1239 if (!CastedArrayElementBaseSize)
1247 Value *Count, *Index;
1249 *
this, ME, ArrayBaseFD, CountFD, Idx, ResType, IsSigned);
1254 auto *ArrayElementSize = llvm::ConstantInt::get(
1255 ResType, ArrayElementBaseSize->getQuantity(), IsSigned);
1258 auto *CastedArrayElementSize = llvm::ConstantInt::get(
1259 ResType, CastedArrayElementBaseSize->getQuantity(), IsSigned);
1262 Value *ArraySize =
Builder.CreateMul(Count, ArrayElementSize,
"array_size",
1263 !IsSigned, IsSigned);
1271 Value *IndexSize =
Builder.CreateMul(Index, CastedArrayElementSize,
1272 "index_size", !IsSigned, IsSigned);
1276 Builder.CreateSub(
Result, IndexSize,
"result", !IsSigned, IsSigned);
1282llvm::Value *CodeGenFunction::emitCountedByMemberSize(
1284 QualType CastedArrayElementTy,
unsigned Type, llvm::IntegerType *ResType) {
1293 const FieldDecl *FlexibleArrayMemberFD =
nullptr;
1296 Ctx, FD, FD->getType(),
getLangOpts().getStrictFlexArraysLevel(),
1298 FlexibleArrayMemberFD = FD;
1302 if (!FlexibleArrayMemberFD ||
1404 Value *FieldOffset =
nullptr;
1405 if (FlexibleArrayMemberFD != FD) {
1410 llvm::ConstantInt::get(ResType, *Offset / CharWidth, IsSigned);
1415 Value *Count, *Index;
1417 *
this, ME, FlexibleArrayMemberFD, CountFD, Idx, ResType, IsSigned);
1424 auto *FlexibleArrayMemberElementSize =
1425 llvm::ConstantInt::get(ResType, BaseSize.
getQuantity(), IsSigned);
1428 Value *FlexibleArrayMemberSize =
1429 Builder.CreateMul(Count, FlexibleArrayMemberElementSize,
1430 "flexible_array_member_size", !IsSigned, IsSigned);
1433 if (FlexibleArrayMemberFD == FD) {
1437 llvm::ConstantInt *CastedFlexibleArrayMemberElementSize =
1438 FlexibleArrayMemberElementSize;
1439 if (!CastedArrayElementTy.
isNull() &&
1443 CastedFlexibleArrayMemberElementSize =
1444 llvm::ConstantInt::get(ResType, BaseSize.
getQuantity(), IsSigned);
1449 Builder.CreateMul(Index, CastedFlexibleArrayMemberElementSize,
1450 "index_size", !IsSigned, IsSigned);
1453 Result =
Builder.CreateSub(FlexibleArrayMemberSize, IndexSize,
"result",
1454 !IsSigned, IsSigned);
1457 Result = FlexibleArrayMemberSize;
1463 TypeSize
Size = Layout.getTypeSizeInBits(StructTy);
1464 Value *SizeofStruct =
1465 llvm::ConstantInt::get(ResType,
Size.getKnownMinValue() / CharWidth);
1470 if (!CastedArrayElementTy.
isNull() &&
1479 llvm::ConstantInt *CastedFieldElementSize =
1480 llvm::ConstantInt::get(ResType, BaseSize.
getQuantity(), IsSigned);
1484 "field_offset", !IsSigned, IsSigned);
1485 FieldOffset =
Builder.CreateAdd(FieldOffset, Mul);
1489 Value *OffsetDiff =
Builder.CreateSub(SizeofStruct, FieldOffset,
1490 "offset_diff", !IsSigned, IsSigned);
1493 Result =
Builder.CreateAdd(FlexibleArrayMemberSize, OffsetDiff,
"result");
1509CodeGenFunction::emitBuiltinObjectSize(
const Expr *
E,
unsigned Type,
1510 llvm::IntegerType *ResType,
1511 llvm::Value *EmittedE,
bool IsDynamic) {
1515 auto *Param = dyn_cast<ParmVarDecl>(
D->getDecl());
1516 auto *PS =
D->getDecl()->
getAttr<PassObjectSizeAttr>();
1517 if (Param !=
nullptr && PS !=
nullptr &&
1519 auto Iter = SizeArguments.find(Param);
1520 assert(
Iter != SizeArguments.end());
1523 auto DIter = LocalDeclMap.find(
D);
1524 assert(DIter != LocalDeclMap.end());
1538 assert(Ptr->
getType()->isPointerTy() &&
1539 "Non-pointer passed to __builtin_object_size?");
1544 if (
Value *
V = emitCountedBySize(
E, Ptr,
Type, ResType))
1562 enum InterlockingKind : uint8_t {
1571 InterlockingKind Interlocking;
1574 static BitTest decodeBitTestBuiltin(
unsigned BuiltinID);
1579BitTest BitTest::decodeBitTestBuiltin(
unsigned BuiltinID) {
1580 switch (BuiltinID) {
1582 case Builtin::BI_bittest:
1583 return {TestOnly, Unlocked,
false};
1584 case Builtin::BI_bittestandcomplement:
1585 return {Complement, Unlocked,
false};
1586 case Builtin::BI_bittestandreset:
1587 return {
Reset, Unlocked,
false};
1588 case Builtin::BI_bittestandset:
1589 return {
Set, Unlocked,
false};
1590 case Builtin::BI_interlockedbittestandreset:
1591 return {
Reset, Sequential,
false};
1592 case Builtin::BI_interlockedbittestandset:
1593 return {
Set, Sequential,
false};
1596 case Builtin::BI_bittest64:
1597 return {TestOnly, Unlocked,
true};
1598 case Builtin::BI_bittestandcomplement64:
1599 return {Complement, Unlocked,
true};
1600 case Builtin::BI_bittestandreset64:
1601 return {
Reset, Unlocked,
true};
1602 case Builtin::BI_bittestandset64:
1603 return {
Set, Unlocked,
true};
1604 case Builtin::BI_interlockedbittestandreset64:
1605 return {
Reset, Sequential,
true};
1606 case Builtin::BI_interlockedbittestandset64:
1607 return {
Set, Sequential,
true};
1610 case Builtin::BI_interlockedbittestandset_acq:
1611 return {
Set, Acquire,
false};
1612 case Builtin::BI_interlockedbittestandset_rel:
1613 return {
Set, Release,
false};
1614 case Builtin::BI_interlockedbittestandset_nf:
1615 return {
Set, NoFence,
false};
1616 case Builtin::BI_interlockedbittestandreset_acq:
1617 return {
Reset, Acquire,
false};
1618 case Builtin::BI_interlockedbittestandreset_rel:
1619 return {
Reset, Release,
false};
1620 case Builtin::BI_interlockedbittestandreset_nf:
1621 return {
Reset, NoFence,
false};
1622 case Builtin::BI_interlockedbittestandreset64_acq:
1623 return {
Reset, Acquire,
false};
1624 case Builtin::BI_interlockedbittestandreset64_rel:
1625 return {
Reset, Release,
false};
1626 case Builtin::BI_interlockedbittestandreset64_nf:
1627 return {
Reset, NoFence,
false};
1628 case Builtin::BI_interlockedbittestandset64_acq:
1629 return {
Set, Acquire,
false};
1630 case Builtin::BI_interlockedbittestandset64_rel:
1631 return {
Set, Release,
false};
1632 case Builtin::BI_interlockedbittestandset64_nf:
1633 return {
Set, NoFence,
false};
1635 llvm_unreachable(
"expected only bittest intrinsics");
1640 case BitTest::TestOnly:
return '\0';
1641 case BitTest::Complement:
return 'c';
1642 case BitTest::Reset:
return 'r';
1643 case BitTest::Set:
return 's';
1645 llvm_unreachable(
"invalid action");
1653 char SizeSuffix = BT.Is64Bit ?
'q' :
'l';
1657 raw_svector_ostream AsmOS(
Asm);
1658 if (BT.Interlocking != BitTest::Unlocked)
1663 AsmOS << SizeSuffix <<
" $2, ($1)";
1666 std::string Constraints =
"={@ccc},r,r,~{cc},~{memory}";
1668 if (!MachineClobbers.empty()) {
1670 Constraints += MachineClobbers;
1672 llvm::IntegerType *IntType = llvm::IntegerType::get(
1675 llvm::FunctionType *FTy =
1676 llvm::FunctionType::get(CGF.
Int8Ty, {CGF.UnqualPtrTy, IntType},
false);
1678 llvm::InlineAsm *IA =
1679 llvm::InlineAsm::get(FTy,
Asm, Constraints,
true);
1680 return CGF.
Builder.CreateCall(IA, {BitBase, BitPos});
1683static llvm::AtomicOrdering
1686 case BitTest::Unlocked:
return llvm::AtomicOrdering::NotAtomic;
1687 case BitTest::Sequential:
return llvm::AtomicOrdering::SequentiallyConsistent;
1688 case BitTest::Acquire:
return llvm::AtomicOrdering::Acquire;
1689 case BitTest::Release:
return llvm::AtomicOrdering::Release;
1690 case BitTest::NoFence:
return llvm::AtomicOrdering::Monotonic;
1692 llvm_unreachable(
"invalid interlocking");
1697 llvm::Type *ArgType = ArgValue->getType();
1702 if (
auto *VT = dyn_cast<llvm::FixedVectorType>(ArgType);
1703 VT && VT->getElementType()->isIntegerTy(1)) {
1704 llvm::Type *StorageType =
1705 llvm::Type::getIntNTy(CGF.
getLLVMContext(), VT->getNumElements());
1706 ArgValue = CGF.
Builder.CreateBitCast(ArgValue, StorageType);
1722 BitTest BT = BitTest::decodeBitTestBuiltin(BuiltinID);
1734 BitPos, llvm::ConstantInt::get(BitPos->
getType(), 3),
"bittest.byteidx");
1736 "bittest.byteaddr"),
1740 llvm::ConstantInt::get(CGF.
Int8Ty, 0x7));
1743 Value *Mask =
nullptr;
1744 if (BT.Action != BitTest::TestOnly) {
1745 Mask = CGF.
Builder.CreateShl(llvm::ConstantInt::get(CGF.
Int8Ty, 1), PosLow,
1752 Value *OldByte =
nullptr;
1753 if (Ordering != llvm::AtomicOrdering::NotAtomic) {
1756 llvm::AtomicRMWInst::BinOp RMWOp = llvm::AtomicRMWInst::Or;
1757 if (BT.Action == BitTest::Reset) {
1758 Mask = CGF.
Builder.CreateNot(Mask);
1759 RMWOp = llvm::AtomicRMWInst::And;
1765 Value *NewByte =
nullptr;
1766 switch (BT.Action) {
1767 case BitTest::TestOnly:
1770 case BitTest::Complement:
1771 NewByte = CGF.
Builder.CreateXor(OldByte, Mask);
1773 case BitTest::Reset:
1774 NewByte = CGF.
Builder.CreateAnd(OldByte, CGF.
Builder.CreateNot(Mask));
1777 NewByte = CGF.
Builder.CreateOr(OldByte, Mask);
1786 Value *ShiftedByte = CGF.
Builder.CreateLShr(OldByte, PosLow,
"bittest.shr");
1788 ShiftedByte, llvm::ConstantInt::get(CGF.
Int8Ty, 1),
"bittest.res");
1792enum class MSVCSetJmpKind {
1804 llvm::Value *Arg1 =
nullptr;
1805 llvm::Type *Arg1Ty =
nullptr;
1807 bool IsVarArg =
false;
1808 if (SJKind == MSVCSetJmpKind::_setjmp3) {
1811 Arg1 = llvm::ConstantInt::get(CGF.
IntTy, 0);
1814 Name = SJKind == MSVCSetJmpKind::_setjmp ?
"_setjmp" :
"_setjmpex";
1817 Arg1 = CGF.
Builder.CreateCall(
1820 Arg1 = CGF.
Builder.CreateCall(
1822 llvm::ConstantInt::get(CGF.
Int32Ty, 0));
1826 llvm::Type *ArgTypes[2] = {CGF.
Int8PtrTy, Arg1Ty};
1827 llvm::AttributeList ReturnsTwiceAttr = llvm::AttributeList::get(
1829 llvm::Attribute::ReturnsTwice);
1831 llvm::FunctionType::get(CGF.
IntTy, ArgTypes, IsVarArg), Name,
1832 ReturnsTwiceAttr,
true);
1834 llvm::Value *Buf = CGF.
Builder.CreateBitOrPointerCast(
1836 llvm::Value *Args[] = {Buf, Arg1};
1838 CB->setAttributes(ReturnsTwiceAttr);
1845 switch (BuiltinID) {
1851 llvm::Type *ArgType = ArgValue->
getType();
1855 Value *ArgZero = llvm::Constant::getNullValue(ArgType);
1856 Value *ResZero = llvm::Constant::getNullValue(ResultType);
1857 Value *ResOne = llvm::ConstantInt::get(ResultType, 1);
1862 PHINode *
Result =
Builder.CreatePHI(ResultType, 2,
"bitscan_result");
1865 Value *IsZero =
Builder.CreateICmpEQ(ArgValue, ArgZero);
1867 Builder.CreateCondBr(IsZero, End, NotZero);
1870 Builder.SetInsertPoint(NotZero);
1875 ZeroCount =
Builder.CreateIntCast(ZeroCount, IndexType,
false);
1878 unsigned ArgWidth = cast<llvm::IntegerType>(ArgType)->getBitWidth();
1879 Value *ArgTypeLastIndex = llvm::ConstantInt::get(IndexType, ArgWidth - 1);
1883 ZeroCount =
Builder.CreateIntCast(ZeroCount, IndexType,
false);
1884 Value *Index =
Builder.CreateNSWSub(ArgTypeLastIndex, ZeroCount);
1888 Result->addIncoming(ResOne, NotZero);
1907 AtomicOrdering::Acquire);
1910 AtomicOrdering::Release);
1913 AtomicOrdering::Monotonic);
1916 AtomicOrdering::Acquire);
1919 AtomicOrdering::Release);
1922 AtomicOrdering::Monotonic);
1933 *
this,
E, AtomicOrdering::SequentiallyConsistent);
1942 AtomicOrdering::Acquire);
1945 AtomicOrdering::Release);
1948 AtomicOrdering::Monotonic);
1951 AtomicOrdering::Acquire);
1954 AtomicOrdering::Release);
1957 AtomicOrdering::Monotonic);
1960 AtomicOrdering::Acquire);
1963 AtomicOrdering::Release);
1966 AtomicOrdering::Monotonic);
1990 StringRef
Asm, Constraints;
1995 case llvm::Triple::x86:
1996 case llvm::Triple::x86_64:
1998 Constraints =
"{cx}";
2000 case llvm::Triple::thumb:
2002 Constraints =
"{r0}";
2004 case llvm::Triple::aarch64:
2005 Asm =
"brk #0xF003";
2006 Constraints =
"{w0}";
2008 llvm::FunctionType *FTy = llvm::FunctionType::get(
VoidTy, {
Int32Ty},
false);
2009 llvm::InlineAsm *IA =
2010 llvm::InlineAsm::get(FTy,
Asm, Constraints,
true);
2011 llvm::AttributeList NoReturnAttr = llvm::AttributeList::get(
2013 llvm::Attribute::NoReturn);
2015 CI->setAttributes(NoReturnAttr);
2019 llvm_unreachable(
"Incorrect MSVC intrinsic!");
2025 CallObjCArcUse(llvm::Value *
object) : object(object) {}
2026 llvm::Value *object;
2037 "Unsupported builtin check kind");
2043 auto CheckOrdinal = SanitizerKind::SO_Builtin;
2044 auto CheckHandler = SanitizerHandler::InvalidBuiltin;
2047 ArgValue, llvm::Constant::getNullValue(ArgValue->
getType()));
2048 EmitCheck(std::make_pair(Cond, CheckOrdinal), CheckHandler,
2050 llvm::ConstantInt::get(
Builder.getInt8Ty(), Kind)},
2060 auto CheckOrdinal = SanitizerKind::SO_Builtin;
2061 auto CheckHandler = SanitizerHandler::InvalidBuiltin;
2064 std::make_pair(ArgValue, CheckOrdinal), CheckHandler,
2072 return CGF.
Builder.CreateBinaryIntrinsic(
2073 Intrinsic::abs, ArgValue,
2074 ConstantInt::get(CGF.
Builder.getInt1Ty(), HasNSW));
2078 bool SanitizeOverflow) {
2082 if (
const auto *VCI = dyn_cast<llvm::ConstantInt>(ArgValue)) {
2083 if (!VCI->isMinSignedValue())
2084 return EmitAbs(CGF, ArgValue,
true);
2089 if (SanitizeOverflow) {
2090 Ordinals.push_back(SanitizerKind::SO_SignedIntegerOverflow);
2091 CheckHandler = SanitizerHandler::NegateOverflow;
2093 CheckHandler = SanitizerHandler::SubOverflow;
2097 Constant *
Zero = Constant::getNullValue(ArgValue->
getType());
2098 Value *ResultAndOverflow = CGF.
Builder.CreateBinaryIntrinsic(
2099 Intrinsic::ssub_with_overflow,
Zero, ArgValue);
2102 CGF.
Builder.CreateExtractValue(ResultAndOverflow, 1));
2105 if (SanitizeOverflow) {
2106 CGF.
EmitCheck({{NotOverflow, SanitizerKind::SO_SignedIntegerOverflow}},
2115 return CGF.
Builder.CreateSelect(CmpResult,
Result, ArgValue,
"abs");
2120 QualType UnsignedTy =
C.getIntTypeForBitwidth(Size * 8,
false);
2121 return C.getCanonicalType(UnsignedTy);
2131 raw_svector_ostream OS(Name);
2132 OS <<
"__os_log_helper";
2136 for (
const auto &Item : Layout.
Items)
2137 OS <<
"_" <<
int(Item.getSizeByte()) <<
"_"
2138 <<
int(Item.getDescriptorByte());
2141 if (llvm::Function *F =
CGM.
getModule().getFunction(Name))
2151 for (
unsigned int I = 0,
E = Layout.
Items.size(); I <
E; ++I) {
2152 char Size = Layout.
Items[I].getSizeByte();
2159 &Ctx.
Idents.
get(std::string(
"arg") + llvm::to_string(I)), ArgTy,
2161 ArgTys.emplace_back(ArgTy);
2172 llvm::Function *Fn = llvm::Function::Create(
2173 FuncTy, llvm::GlobalValue::LinkOnceODRLinkage, Name, &
CGM.
getModule());
2174 Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
2177 Fn->setDoesNotThrow();
2181 Fn->addFnAttr(llvm::Attribute::NoInline);
2199 for (
const auto &Item : Layout.
Items) {
2201 Builder.getInt8(Item.getDescriptorByte()),
2204 Builder.getInt8(Item.getSizeByte()),
2208 if (!Size.getQuantity())
2225 assert(
E.getNumArgs() >= 2 &&
2226 "__builtin_os_log_format takes at least 2 arguments");
2236 for (
const auto &Item : Layout.
Items) {
2237 int Size = Item.getSizeByte();
2241 llvm::Value *ArgVal;
2245 for (
unsigned I = 0,
E = Item.getMaskType().size(); I <
E; ++I)
2246 Val |= ((uint64_t)Item.getMaskType()[I]) << I * 8;
2247 ArgVal = llvm::Constant::getIntegerValue(
Int64Ty, llvm::APInt(64, Val));
2248 }
else if (
const Expr *TheExpr = Item.getExpr()) {
2254 auto LifetimeExtendObject = [&](
const Expr *
E) {
2262 if (isa<CallExpr>(
E) || isa<ObjCMessageExpr>(
E))
2267 if (TheExpr->getType()->isObjCRetainableType() &&
2268 getLangOpts().ObjCAutoRefCount && LifetimeExtendObject(TheExpr)) {
2270 "Only scalar can be a ObjC retainable type");
2271 if (!isa<Constant>(ArgVal)) {
2285 pushCleanupAfterFullExpr<CallObjCArcUse>(Cleanup, ArgVal);
2289 ArgVal =
Builder.getInt32(Item.getConstValue().getQuantity());
2292 unsigned ArgValSize =
2296 ArgVal =
Builder.CreateBitOrPointerCast(ArgVal,
IntTy);
2312 unsigned BuiltinID, WidthAndSignedness Op1Info, WidthAndSignedness Op2Info,
2313 WidthAndSignedness ResultInfo) {
2314 return BuiltinID == Builtin::BI__builtin_mul_overflow &&
2315 Op1Info.Width == Op2Info.Width && Op2Info.Width == ResultInfo.Width &&
2316 !Op1Info.Signed && !Op2Info.Signed && ResultInfo.Signed;
2321 const clang::Expr *Op2, WidthAndSignedness Op2Info,
2323 WidthAndSignedness ResultInfo) {
2325 Builtin::BI__builtin_mul_overflow, Op1Info, Op2Info, ResultInfo) &&
2326 "Cannot specialize this multiply");
2331 llvm::Value *HasOverflow;
2333 CGF, Intrinsic::umul_with_overflow, V1, V2, HasOverflow);
2338 auto IntMax = llvm::APInt::getSignedMaxValue(ResultInfo.Width);
2339 llvm::Value *IntMaxValue = llvm::ConstantInt::get(
Result->getType(), IntMax);
2341 llvm::Value *IntMaxOverflow = CGF.
Builder.CreateICmpUGT(
Result, IntMaxValue);
2342 HasOverflow = CGF.
Builder.CreateOr(HasOverflow, IntMaxOverflow);
2354 WidthAndSignedness Op1Info,
2355 WidthAndSignedness Op2Info,
2356 WidthAndSignedness ResultInfo) {
2357 return BuiltinID == Builtin::BI__builtin_mul_overflow &&
2358 std::max(Op1Info.Width, Op2Info.Width) >= ResultInfo.Width &&
2359 Op1Info.Signed != Op2Info.Signed;
2366 WidthAndSignedness Op1Info,
const clang::Expr *Op2,
2367 WidthAndSignedness Op2Info,
2369 WidthAndSignedness ResultInfo) {
2371 Op2Info, ResultInfo) &&
2372 "Not a mixed-sign multipliction we can specialize");
2375 const clang::Expr *SignedOp = Op1Info.Signed ? Op1 : Op2;
2376 const clang::Expr *UnsignedOp = Op1Info.Signed ? Op2 : Op1;
2379 unsigned SignedOpWidth = Op1Info.Signed ? Op1Info.Width : Op2Info.Width;
2380 unsigned UnsignedOpWidth = Op1Info.Signed ? Op2Info.Width : Op1Info.Width;
2383 if (SignedOpWidth < UnsignedOpWidth)
2385 if (UnsignedOpWidth < SignedOpWidth)
2388 llvm::Type *OpTy =
Signed->getType();
2389 llvm::Value *
Zero = llvm::Constant::getNullValue(OpTy);
2392 unsigned OpWidth = std::max(Op1Info.Width, Op2Info.Width);
2397 llvm::Value *AbsSigned =
2398 CGF.
Builder.CreateSelect(IsNegative, AbsOfNegative,
Signed);
2401 llvm::Value *UnsignedOverflow;
2402 llvm::Value *UnsignedResult =
2406 llvm::Value *Overflow, *
Result;
2407 if (ResultInfo.Signed) {
2411 llvm::APInt::getSignedMaxValue(ResultInfo.Width).zext(OpWidth);
2412 llvm::Value *MaxResult =
2413 CGF.
Builder.CreateAdd(llvm::ConstantInt::get(OpTy, IntMax),
2414 CGF.
Builder.CreateZExt(IsNegative, OpTy));
2415 llvm::Value *SignedOverflow =
2416 CGF.
Builder.CreateICmpUGT(UnsignedResult, MaxResult);
2417 Overflow = CGF.
Builder.CreateOr(UnsignedOverflow, SignedOverflow);
2420 llvm::Value *NegativeResult = CGF.
Builder.CreateNeg(UnsignedResult);
2421 llvm::Value *SignedResult =
2422 CGF.
Builder.CreateSelect(IsNegative, NegativeResult, UnsignedResult);
2426 llvm::Value *Underflow = CGF.
Builder.CreateAnd(
2427 IsNegative, CGF.
Builder.CreateIsNotNull(UnsignedResult));
2428 Overflow = CGF.
Builder.CreateOr(UnsignedOverflow, Underflow);
2429 if (ResultInfo.Width < OpWidth) {
2431 llvm::APInt::getMaxValue(ResultInfo.Width).zext(OpWidth);
2432 llvm::Value *TruncOverflow = CGF.
Builder.CreateICmpUGT(
2433 UnsignedResult, llvm::ConstantInt::get(OpTy, IntMax));
2434 Overflow = CGF.
Builder.CreateOr(Overflow, TruncOverflow);
2439 IsNegative, CGF.
Builder.CreateNeg(UnsignedResult), UnsignedResult);
2443 assert(Overflow &&
Result &&
"Missing overflow or result");
2454 llvm::SmallPtrSetImpl<const Decl *> &Seen) {
2463 if (!Seen.insert(
Record).second)
2466 assert(
Record->hasDefinition() &&
2467 "Incomplete types should already be diagnosed");
2469 if (
Record->isDynamicClass())
2494 llvm::Type *Ty = Src->getType();
2495 ShiftAmt =
Builder.CreateIntCast(ShiftAmt, Ty,
false);
2498 unsigned IID = IsRotateRight ? Intrinsic::fshr : Intrinsic::fshl;
2505 switch (BuiltinID) {
2506#define MUTATE_LDBL(func) \
2507 case Builtin::BI__builtin_##func##l: \
2508 return Builtin::BI__builtin_##func##f128;
2577 if (CGF.
Builder.getIsFPConstrained() &&
2578 CGF.
Builder.getDefaultConstrainedExcept() != fp::ebIgnore) {
2590 auto UBF = CGF->
CGM.
getModule().getOrInsertFunction(Name, FnTy);
2593 for (
auto &&FormalTy : FnTy->params())
2594 Args.push_back(llvm::PoisonValue::get(FormalTy));
2602 assert(!
getContext().BuiltinInfo.isImmediate(BuiltinID) &&
2603 "Should not codegen for consteval builtins");
2610 !
Result.hasSideEffects()) {
2614 if (
Result.Val.isFloat())
2623 if (
getTarget().getTriple().isPPC64() &&
2624 &
getTarget().getLongDoubleFormat() == &llvm::APFloat::IEEEquad())
2631 const unsigned BuiltinIDIfNoAsmLabel =
2632 FD->
hasAttr<AsmLabelAttr>() ? 0 : BuiltinID;
2634 std::optional<bool> ErrnoOverriden;
2638 if (
E->hasStoredFPFeatures()) {
2640 if (OP.hasMathErrnoOverride())
2641 ErrnoOverriden = OP.getMathErrnoOverride();
2650 bool ErrnoOverridenToFalseWithOpt =
2651 ErrnoOverriden.has_value() && !ErrnoOverriden.value() && !OptNone &&
2669 switch (BuiltinID) {
2670 case Builtin::BI__builtin_fma:
2671 case Builtin::BI__builtin_fmaf:
2672 case Builtin::BI__builtin_fmal:
2673 case Builtin::BI__builtin_fmaf16:
2674 case Builtin::BIfma:
2675 case Builtin::BIfmaf:
2676 case Builtin::BIfmal: {
2678 if (Trip.isGNUEnvironment() || Trip.isOSMSVCRT())
2686 bool ConstWithoutErrnoAndExceptions =
2688 bool ConstWithoutExceptions =
2706 bool ConstWithoutErrnoOrExceptions =
2707 ConstWithoutErrnoAndExceptions || ConstWithoutExceptions;
2708 bool GenerateIntrinsics =
2709 (ConstAlways && !OptNone) ||
2711 !(ErrnoOverriden.has_value() && ErrnoOverriden.value()) && !OptNone);
2712 if (!GenerateIntrinsics) {
2713 GenerateIntrinsics =
2714 ConstWithoutErrnoOrExceptions && !ConstWithoutErrnoAndExceptions;
2715 if (!GenerateIntrinsics)
2716 GenerateIntrinsics =
2717 ConstWithoutErrnoOrExceptions &&
2719 !(ErrnoOverriden.has_value() && ErrnoOverriden.value()) && !OptNone);
2720 if (!GenerateIntrinsics)
2721 GenerateIntrinsics =
2722 ConstWithoutErrnoOrExceptions && ErrnoOverridenToFalseWithOpt;
2724 if (GenerateIntrinsics) {
2725 switch (BuiltinIDIfNoAsmLabel) {
2726 case Builtin::BIacos:
2727 case Builtin::BIacosf:
2728 case Builtin::BIacosl:
2729 case Builtin::BI__builtin_acos:
2730 case Builtin::BI__builtin_acosf:
2731 case Builtin::BI__builtin_acosf16:
2732 case Builtin::BI__builtin_acosl:
2733 case Builtin::BI__builtin_acosf128:
2735 *
this,
E, Intrinsic::acos, Intrinsic::experimental_constrained_acos));
2737 case Builtin::BIasin:
2738 case Builtin::BIasinf:
2739 case Builtin::BIasinl:
2740 case Builtin::BI__builtin_asin:
2741 case Builtin::BI__builtin_asinf:
2742 case Builtin::BI__builtin_asinf16:
2743 case Builtin::BI__builtin_asinl:
2744 case Builtin::BI__builtin_asinf128:
2746 *
this,
E, Intrinsic::asin, Intrinsic::experimental_constrained_asin));
2748 case Builtin::BIatan:
2749 case Builtin::BIatanf:
2750 case Builtin::BIatanl:
2751 case Builtin::BI__builtin_atan:
2752 case Builtin::BI__builtin_atanf:
2753 case Builtin::BI__builtin_atanf16:
2754 case Builtin::BI__builtin_atanl:
2755 case Builtin::BI__builtin_atanf128:
2757 *
this,
E, Intrinsic::atan, Intrinsic::experimental_constrained_atan));
2759 case Builtin::BIatan2:
2760 case Builtin::BIatan2f:
2761 case Builtin::BIatan2l:
2762 case Builtin::BI__builtin_atan2:
2763 case Builtin::BI__builtin_atan2f:
2764 case Builtin::BI__builtin_atan2f16:
2765 case Builtin::BI__builtin_atan2l:
2766 case Builtin::BI__builtin_atan2f128:
2768 *
this,
E, Intrinsic::atan2,
2769 Intrinsic::experimental_constrained_atan2));
2771 case Builtin::BIceil:
2772 case Builtin::BIceilf:
2773 case Builtin::BIceill:
2774 case Builtin::BI__builtin_ceil:
2775 case Builtin::BI__builtin_ceilf:
2776 case Builtin::BI__builtin_ceilf16:
2777 case Builtin::BI__builtin_ceill:
2778 case Builtin::BI__builtin_ceilf128:
2781 Intrinsic::experimental_constrained_ceil));
2783 case Builtin::BIcopysign:
2784 case Builtin::BIcopysignf:
2785 case Builtin::BIcopysignl:
2786 case Builtin::BI__builtin_copysign:
2787 case Builtin::BI__builtin_copysignf:
2788 case Builtin::BI__builtin_copysignf16:
2789 case Builtin::BI__builtin_copysignl:
2790 case Builtin::BI__builtin_copysignf128:
2792 emitBuiltinWithOneOverloadedType<2>(*
this,
E, Intrinsic::copysign));
2794 case Builtin::BIcos:
2795 case Builtin::BIcosf:
2796 case Builtin::BIcosl:
2797 case Builtin::BI__builtin_cos:
2798 case Builtin::BI__builtin_cosf:
2799 case Builtin::BI__builtin_cosf16:
2800 case Builtin::BI__builtin_cosl:
2801 case Builtin::BI__builtin_cosf128:
2804 Intrinsic::experimental_constrained_cos));
2806 case Builtin::BIcosh:
2807 case Builtin::BIcoshf:
2808 case Builtin::BIcoshl:
2809 case Builtin::BI__builtin_cosh:
2810 case Builtin::BI__builtin_coshf:
2811 case Builtin::BI__builtin_coshf16:
2812 case Builtin::BI__builtin_coshl:
2813 case Builtin::BI__builtin_coshf128:
2815 *
this,
E, Intrinsic::cosh, Intrinsic::experimental_constrained_cosh));
2817 case Builtin::BIexp:
2818 case Builtin::BIexpf:
2819 case Builtin::BIexpl:
2820 case Builtin::BI__builtin_exp:
2821 case Builtin::BI__builtin_expf:
2822 case Builtin::BI__builtin_expf16:
2823 case Builtin::BI__builtin_expl:
2824 case Builtin::BI__builtin_expf128:
2827 Intrinsic::experimental_constrained_exp));
2829 case Builtin::BIexp2:
2830 case Builtin::BIexp2f:
2831 case Builtin::BIexp2l:
2832 case Builtin::BI__builtin_exp2:
2833 case Builtin::BI__builtin_exp2f:
2834 case Builtin::BI__builtin_exp2f16:
2835 case Builtin::BI__builtin_exp2l:
2836 case Builtin::BI__builtin_exp2f128:
2839 Intrinsic::experimental_constrained_exp2));
2840 case Builtin::BI__builtin_exp10:
2841 case Builtin::BI__builtin_exp10f:
2842 case Builtin::BI__builtin_exp10f16:
2843 case Builtin::BI__builtin_exp10l:
2844 case Builtin::BI__builtin_exp10f128: {
2846 if (
Builder.getIsFPConstrained())
2849 emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::exp10));
2851 case Builtin::BIfabs:
2852 case Builtin::BIfabsf:
2853 case Builtin::BIfabsl:
2854 case Builtin::BI__builtin_fabs:
2855 case Builtin::BI__builtin_fabsf:
2856 case Builtin::BI__builtin_fabsf16:
2857 case Builtin::BI__builtin_fabsl:
2858 case Builtin::BI__builtin_fabsf128:
2860 emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::fabs));
2862 case Builtin::BIfloor:
2863 case Builtin::BIfloorf:
2864 case Builtin::BIfloorl:
2865 case Builtin::BI__builtin_floor:
2866 case Builtin::BI__builtin_floorf:
2867 case Builtin::BI__builtin_floorf16:
2868 case Builtin::BI__builtin_floorl:
2869 case Builtin::BI__builtin_floorf128:
2872 Intrinsic::experimental_constrained_floor));
2874 case Builtin::BIfma:
2875 case Builtin::BIfmaf:
2876 case Builtin::BIfmal:
2877 case Builtin::BI__builtin_fma:
2878 case Builtin::BI__builtin_fmaf:
2879 case Builtin::BI__builtin_fmaf16:
2880 case Builtin::BI__builtin_fmal:
2881 case Builtin::BI__builtin_fmaf128:
2884 Intrinsic::experimental_constrained_fma));
2886 case Builtin::BIfmax:
2887 case Builtin::BIfmaxf:
2888 case Builtin::BIfmaxl:
2889 case Builtin::BI__builtin_fmax:
2890 case Builtin::BI__builtin_fmaxf:
2891 case Builtin::BI__builtin_fmaxf16:
2892 case Builtin::BI__builtin_fmaxl:
2893 case Builtin::BI__builtin_fmaxf128:
2896 Intrinsic::experimental_constrained_maxnum));
2898 case Builtin::BIfmin:
2899 case Builtin::BIfminf:
2900 case Builtin::BIfminl:
2901 case Builtin::BI__builtin_fmin:
2902 case Builtin::BI__builtin_fminf:
2903 case Builtin::BI__builtin_fminf16:
2904 case Builtin::BI__builtin_fminl:
2905 case Builtin::BI__builtin_fminf128:
2908 Intrinsic::experimental_constrained_minnum));
2910 case Builtin::BIfmaximum_num:
2911 case Builtin::BIfmaximum_numf:
2912 case Builtin::BIfmaximum_numl:
2913 case Builtin::BI__builtin_fmaximum_num:
2914 case Builtin::BI__builtin_fmaximum_numf:
2915 case Builtin::BI__builtin_fmaximum_numf16:
2916 case Builtin::BI__builtin_fmaximum_numl:
2917 case Builtin::BI__builtin_fmaximum_numf128:
2919 emitBuiltinWithOneOverloadedType<2>(*
this,
E, Intrinsic::maximumnum));
2921 case Builtin::BIfminimum_num:
2922 case Builtin::BIfminimum_numf:
2923 case Builtin::BIfminimum_numl:
2924 case Builtin::BI__builtin_fminimum_num:
2925 case Builtin::BI__builtin_fminimum_numf:
2926 case Builtin::BI__builtin_fminimum_numf16:
2927 case Builtin::BI__builtin_fminimum_numl:
2928 case Builtin::BI__builtin_fminimum_numf128:
2930 emitBuiltinWithOneOverloadedType<2>(*
this,
E, Intrinsic::minimumnum));
2934 case Builtin::BIfmod:
2935 case Builtin::BIfmodf:
2936 case Builtin::BIfmodl:
2937 case Builtin::BI__builtin_fmod:
2938 case Builtin::BI__builtin_fmodf:
2939 case Builtin::BI__builtin_fmodf16:
2940 case Builtin::BI__builtin_fmodl:
2941 case Builtin::BI__builtin_fmodf128:
2942 case Builtin::BI__builtin_elementwise_fmod: {
2949 case Builtin::BIlog:
2950 case Builtin::BIlogf:
2951 case Builtin::BIlogl:
2952 case Builtin::BI__builtin_log:
2953 case Builtin::BI__builtin_logf:
2954 case Builtin::BI__builtin_logf16:
2955 case Builtin::BI__builtin_logl:
2956 case Builtin::BI__builtin_logf128:
2959 Intrinsic::experimental_constrained_log));
2961 case Builtin::BIlog10:
2962 case Builtin::BIlog10f:
2963 case Builtin::BIlog10l:
2964 case Builtin::BI__builtin_log10:
2965 case Builtin::BI__builtin_log10f:
2966 case Builtin::BI__builtin_log10f16:
2967 case Builtin::BI__builtin_log10l:
2968 case Builtin::BI__builtin_log10f128:
2971 Intrinsic::experimental_constrained_log10));
2973 case Builtin::BIlog2:
2974 case Builtin::BIlog2f:
2975 case Builtin::BIlog2l:
2976 case Builtin::BI__builtin_log2:
2977 case Builtin::BI__builtin_log2f:
2978 case Builtin::BI__builtin_log2f16:
2979 case Builtin::BI__builtin_log2l:
2980 case Builtin::BI__builtin_log2f128:
2983 Intrinsic::experimental_constrained_log2));
2985 case Builtin::BInearbyint:
2986 case Builtin::BInearbyintf:
2987 case Builtin::BInearbyintl:
2988 case Builtin::BI__builtin_nearbyint:
2989 case Builtin::BI__builtin_nearbyintf:
2990 case Builtin::BI__builtin_nearbyintl:
2991 case Builtin::BI__builtin_nearbyintf128:
2993 Intrinsic::nearbyint,
2994 Intrinsic::experimental_constrained_nearbyint));
2996 case Builtin::BIpow:
2997 case Builtin::BIpowf:
2998 case Builtin::BIpowl:
2999 case Builtin::BI__builtin_pow:
3000 case Builtin::BI__builtin_powf:
3001 case Builtin::BI__builtin_powf16:
3002 case Builtin::BI__builtin_powl:
3003 case Builtin::BI__builtin_powf128:
3006 Intrinsic::experimental_constrained_pow));
3008 case Builtin::BIrint:
3009 case Builtin::BIrintf:
3010 case Builtin::BIrintl:
3011 case Builtin::BI__builtin_rint:
3012 case Builtin::BI__builtin_rintf:
3013 case Builtin::BI__builtin_rintf16:
3014 case Builtin::BI__builtin_rintl:
3015 case Builtin::BI__builtin_rintf128:
3018 Intrinsic::experimental_constrained_rint));
3020 case Builtin::BIround:
3021 case Builtin::BIroundf:
3022 case Builtin::BIroundl:
3023 case Builtin::BI__builtin_round:
3024 case Builtin::BI__builtin_roundf:
3025 case Builtin::BI__builtin_roundf16:
3026 case Builtin::BI__builtin_roundl:
3027 case Builtin::BI__builtin_roundf128:
3030 Intrinsic::experimental_constrained_round));
3032 case Builtin::BIroundeven:
3033 case Builtin::BIroundevenf:
3034 case Builtin::BIroundevenl:
3035 case Builtin::BI__builtin_roundeven:
3036 case Builtin::BI__builtin_roundevenf:
3037 case Builtin::BI__builtin_roundevenf16:
3038 case Builtin::BI__builtin_roundevenl:
3039 case Builtin::BI__builtin_roundevenf128:
3041 Intrinsic::roundeven,
3042 Intrinsic::experimental_constrained_roundeven));
3044 case Builtin::BIsin:
3045 case Builtin::BIsinf:
3046 case Builtin::BIsinl:
3047 case Builtin::BI__builtin_sin:
3048 case Builtin::BI__builtin_sinf:
3049 case Builtin::BI__builtin_sinf16:
3050 case Builtin::BI__builtin_sinl:
3051 case Builtin::BI__builtin_sinf128:
3054 Intrinsic::experimental_constrained_sin));
3056 case Builtin::BIsinh:
3057 case Builtin::BIsinhf:
3058 case Builtin::BIsinhl:
3059 case Builtin::BI__builtin_sinh:
3060 case Builtin::BI__builtin_sinhf:
3061 case Builtin::BI__builtin_sinhf16:
3062 case Builtin::BI__builtin_sinhl:
3063 case Builtin::BI__builtin_sinhf128:
3065 *
this,
E, Intrinsic::sinh, Intrinsic::experimental_constrained_sinh));
3067 case Builtin::BI__builtin_sincospi:
3068 case Builtin::BI__builtin_sincospif:
3069 case Builtin::BI__builtin_sincospil:
3070 if (
Builder.getIsFPConstrained())
3075 case Builtin::BIsincos:
3076 case Builtin::BIsincosf:
3077 case Builtin::BIsincosl:
3078 case Builtin::BI__builtin_sincos:
3079 case Builtin::BI__builtin_sincosf:
3080 case Builtin::BI__builtin_sincosf16:
3081 case Builtin::BI__builtin_sincosl:
3082 case Builtin::BI__builtin_sincosf128:
3083 if (
Builder.getIsFPConstrained())
3088 case Builtin::BIsqrt:
3089 case Builtin::BIsqrtf:
3090 case Builtin::BIsqrtl:
3091 case Builtin::BI__builtin_sqrt:
3092 case Builtin::BI__builtin_sqrtf:
3093 case Builtin::BI__builtin_sqrtf16:
3094 case Builtin::BI__builtin_sqrtl:
3095 case Builtin::BI__builtin_sqrtf128:
3096 case Builtin::BI__builtin_elementwise_sqrt: {
3098 *
this,
E, Intrinsic::sqrt, Intrinsic::experimental_constrained_sqrt);
3103 case Builtin::BItan:
3104 case Builtin::BItanf:
3105 case Builtin::BItanl:
3106 case Builtin::BI__builtin_tan:
3107 case Builtin::BI__builtin_tanf:
3108 case Builtin::BI__builtin_tanf16:
3109 case Builtin::BI__builtin_tanl:
3110 case Builtin::BI__builtin_tanf128:
3112 *
this,
E, Intrinsic::tan, Intrinsic::experimental_constrained_tan));
3114 case Builtin::BItanh:
3115 case Builtin::BItanhf:
3116 case Builtin::BItanhl:
3117 case Builtin::BI__builtin_tanh:
3118 case Builtin::BI__builtin_tanhf:
3119 case Builtin::BI__builtin_tanhf16:
3120 case Builtin::BI__builtin_tanhl:
3121 case Builtin::BI__builtin_tanhf128:
3123 *
this,
E, Intrinsic::tanh, Intrinsic::experimental_constrained_tanh));
3125 case Builtin::BItrunc:
3126 case Builtin::BItruncf:
3127 case Builtin::BItruncl:
3128 case Builtin::BI__builtin_trunc:
3129 case Builtin::BI__builtin_truncf:
3130 case Builtin::BI__builtin_truncf16:
3131 case Builtin::BI__builtin_truncl:
3132 case Builtin::BI__builtin_truncf128:
3135 Intrinsic::experimental_constrained_trunc));
3137 case Builtin::BIlround:
3138 case Builtin::BIlroundf:
3139 case Builtin::BIlroundl:
3140 case Builtin::BI__builtin_lround:
3141 case Builtin::BI__builtin_lroundf:
3142 case Builtin::BI__builtin_lroundl:
3143 case Builtin::BI__builtin_lroundf128:
3145 *
this,
E, Intrinsic::lround,
3146 Intrinsic::experimental_constrained_lround));
3148 case Builtin::BIllround:
3149 case Builtin::BIllroundf:
3150 case Builtin::BIllroundl:
3151 case Builtin::BI__builtin_llround:
3152 case Builtin::BI__builtin_llroundf:
3153 case Builtin::BI__builtin_llroundl:
3154 case Builtin::BI__builtin_llroundf128:
3156 *
this,
E, Intrinsic::llround,
3157 Intrinsic::experimental_constrained_llround));
3159 case Builtin::BIlrint:
3160 case Builtin::BIlrintf:
3161 case Builtin::BIlrintl:
3162 case Builtin::BI__builtin_lrint:
3163 case Builtin::BI__builtin_lrintf:
3164 case Builtin::BI__builtin_lrintl:
3165 case Builtin::BI__builtin_lrintf128:
3167 *
this,
E, Intrinsic::lrint,
3168 Intrinsic::experimental_constrained_lrint));
3170 case Builtin::BIllrint:
3171 case Builtin::BIllrintf:
3172 case Builtin::BIllrintl:
3173 case Builtin::BI__builtin_llrint:
3174 case Builtin::BI__builtin_llrintf:
3175 case Builtin::BI__builtin_llrintl:
3176 case Builtin::BI__builtin_llrintf128:
3178 *
this,
E, Intrinsic::llrint,
3179 Intrinsic::experimental_constrained_llrint));
3180 case Builtin::BI__builtin_ldexp:
3181 case Builtin::BI__builtin_ldexpf:
3182 case Builtin::BI__builtin_ldexpl:
3183 case Builtin::BI__builtin_ldexpf16:
3184 case Builtin::BI__builtin_ldexpf128: {
3186 *
this,
E, Intrinsic::ldexp,
3187 Intrinsic::experimental_constrained_ldexp));
3197 Value *Val = A.emitRawPointer(*
this);
3203 SkippedChecks.
set(SanitizerKind::All);
3204 SkippedChecks.
clear(SanitizerKind::Alignment);
3207 if (
auto *CE = dyn_cast<ImplicitCastExpr>(Arg))
3208 if (CE->getCastKind() == CK_BitCast)
3209 Arg = CE->getSubExpr();
3215 switch (BuiltinIDIfNoAsmLabel) {
3217 case Builtin::BI__builtin___CFStringMakeConstantString:
3218 case Builtin::BI__builtin___NSStringMakeConstantString:
3220 case Builtin::BI__builtin_stdarg_start:
3221 case Builtin::BI__builtin_va_start:
3222 case Builtin::BI__va_start:
3223 case Builtin::BI__builtin_c23_va_start:
3224 case Builtin::BI__builtin_va_end:
3228 BuiltinID != Builtin::BI__builtin_va_end);
3230 case Builtin::BI__builtin_va_copy: {
3237 case Builtin::BIabs:
3238 case Builtin::BIlabs:
3239 case Builtin::BIllabs:
3240 case Builtin::BI__builtin_abs:
3241 case Builtin::BI__builtin_labs:
3242 case Builtin::BI__builtin_llabs: {
3243 bool SanitizeOverflow =
SanOpts.
has(SanitizerKind::SignedIntegerOverflow);
3246 switch (
getLangOpts().getSignedOverflowBehavior()) {
3251 if (!SanitizeOverflow) {
3263 case Builtin::BI__builtin_complex: {
3268 case Builtin::BI__builtin_conj:
3269 case Builtin::BI__builtin_conjf:
3270 case Builtin::BI__builtin_conjl:
3271 case Builtin::BIconj:
3272 case Builtin::BIconjf:
3273 case Builtin::BIconjl: {
3275 Value *Real = ComplexVal.first;
3276 Value *Imag = ComplexVal.second;
3277 Imag =
Builder.CreateFNeg(Imag,
"neg");
3280 case Builtin::BI__builtin_creal:
3281 case Builtin::BI__builtin_crealf:
3282 case Builtin::BI__builtin_creall:
3283 case Builtin::BIcreal:
3284 case Builtin::BIcrealf:
3285 case Builtin::BIcreall: {
3290 case Builtin::BI__builtin_preserve_access_index: {
3311 case Builtin::BI__builtin_cimag:
3312 case Builtin::BI__builtin_cimagf:
3313 case Builtin::BI__builtin_cimagl:
3314 case Builtin::BIcimag:
3315 case Builtin::BIcimagf:
3316 case Builtin::BIcimagl: {
3321 case Builtin::BI__builtin_clrsb:
3322 case Builtin::BI__builtin_clrsbl:
3323 case Builtin::BI__builtin_clrsbll: {
3327 llvm::Type *ArgType = ArgValue->
getType();
3331 Value *
Zero = llvm::Constant::getNullValue(ArgType);
3334 Value *Tmp =
Builder.CreateSelect(IsNeg, Inverse, ArgValue);
3341 case Builtin::BI__builtin_ctzs:
3342 case Builtin::BI__builtin_ctz:
3343 case Builtin::BI__builtin_ctzl:
3344 case Builtin::BI__builtin_ctzll:
3345 case Builtin::BI__builtin_ctzg:
3346 case Builtin::BI__builtin_elementwise_cttz: {
3348 (BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_ctzg ||
3349 BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_cttz) &&
3350 E->getNumArgs() > 1;
3356 llvm::Type *ArgType = ArgValue->
getType();
3362 HasFallback ||
getTarget().isCLZForZeroUndef() ||
3363 BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_cttz);
3365 if (
Result->getType() != ResultType)
3371 Value *
Zero = Constant::getNullValue(ArgType);
3374 Value *ResultOrFallback =
3375 Builder.CreateSelect(IsZero, FallbackValue,
Result,
"ctzg");
3378 case Builtin::BI__builtin_clzs:
3379 case Builtin::BI__builtin_clz:
3380 case Builtin::BI__builtin_clzl:
3381 case Builtin::BI__builtin_clzll:
3382 case Builtin::BI__builtin_clzg:
3383 case Builtin::BI__builtin_elementwise_ctlz: {
3385 (BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_clzg ||
3386 BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_ctlz) &&
3387 E->getNumArgs() > 1;
3393 llvm::Type *ArgType = ArgValue->
getType();
3399 HasFallback ||
getTarget().isCLZForZeroUndef() ||
3400 BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_ctlz);
3402 if (
Result->getType() != ResultType)
3408 Value *
Zero = Constant::getNullValue(ArgType);
3411 Value *ResultOrFallback =
3412 Builder.CreateSelect(IsZero, FallbackValue,
Result,
"clzg");
3415 case Builtin::BI__builtin_ffs:
3416 case Builtin::BI__builtin_ffsl:
3417 case Builtin::BI__builtin_ffsll: {
3421 llvm::Type *ArgType = ArgValue->
getType();
3426 Builder.CreateAdd(
Builder.CreateCall(F, {ArgValue, Builder.getTrue()}),
3427 llvm::ConstantInt::get(ArgType, 1));
3428 Value *
Zero = llvm::Constant::getNullValue(ArgType);
3431 if (
Result->getType() != ResultType)
3436 case Builtin::BI__builtin_parity:
3437 case Builtin::BI__builtin_parityl:
3438 case Builtin::BI__builtin_parityll: {
3442 llvm::Type *ArgType = ArgValue->
getType();
3448 if (
Result->getType() != ResultType)
3453 case Builtin::BI__lzcnt16:
3454 case Builtin::BI__lzcnt:
3455 case Builtin::BI__lzcnt64: {
3458 llvm::Type *ArgType = ArgValue->
getType();
3463 if (
Result->getType() != ResultType)
3468 case Builtin::BI__popcnt16:
3469 case Builtin::BI__popcnt:
3470 case Builtin::BI__popcnt64:
3471 case Builtin::BI__builtin_popcount:
3472 case Builtin::BI__builtin_popcountl:
3473 case Builtin::BI__builtin_popcountll:
3474 case Builtin::BI__builtin_popcountg: {
3477 llvm::Type *ArgType = ArgValue->
getType();
3482 if (
Result->getType() != ResultType)
3487 case Builtin::BI__builtin_unpredictable: {
3493 case Builtin::BI__builtin_expect: {
3495 llvm::Type *ArgType = ArgValue->
getType();
3506 Builder.CreateCall(FnExpect, {ArgValue, ExpectedValue},
"expval");
3509 case Builtin::BI__builtin_expect_with_probability: {
3511 llvm::Type *ArgType = ArgValue->
getType();
3514 llvm::APFloat Probability(0.0);
3515 const Expr *ProbArg =
E->getArg(2);
3517 assert(EvalSucceed &&
"probability should be able to evaluate as float");
3519 bool LoseInfo =
false;
3520 Probability.convert(llvm::APFloat::IEEEdouble(),
3521 llvm::RoundingMode::Dynamic, &LoseInfo);
3523 Constant *Confidence = ConstantFP::get(Ty, Probability);
3533 FnExpect, {ArgValue, ExpectedValue, Confidence},
"expval");
3536 case Builtin::BI__builtin_assume_aligned: {
3537 const Expr *Ptr =
E->getArg(0);
3539 Value *OffsetValue =
3543 ConstantInt *AlignmentCI = cast<ConstantInt>(AlignmentValue);
3544 if (AlignmentCI->getValue().ugt(llvm::Value::MaximumAlignment))
3545 AlignmentCI = ConstantInt::get(AlignmentCI->getIntegerType(),
3546 llvm::Value::MaximumAlignment);
3550 AlignmentCI, OffsetValue);
3553 case Builtin::BI__builtin_assume_dereferenceable: {
3554 const Expr *Ptr =
E->getArg(0);
3555 const Expr *Size =
E->getArg(1);
3561 Builder.CreateDereferenceableAssumption(PtrValue, SizeValue);
3564 case Builtin::BI__assume:
3565 case Builtin::BI__builtin_assume: {
3571 Builder.CreateCall(FnAssume, ArgValue);
3574 case Builtin::BI__builtin_assume_separate_storage: {
3575 const Expr *Arg0 =
E->getArg(0);
3576 const Expr *Arg1 =
E->getArg(1);
3581 Value *Values[] = {Value0, Value1};
3582 OperandBundleDefT<Value *> OBD(
"separate_storage", Values);
3586 case Builtin::BI__builtin_allow_runtime_check: {
3592 llvm::MetadataAsValue::get(Ctx, llvm::MDString::get(Ctx, Kind)));
3595 case Builtin::BI__arithmetic_fence: {
3599 llvm::FastMathFlags FMF =
Builder.getFastMathFlags();
3600 bool isArithmeticFenceEnabled =
3601 FMF.allowReassoc() &&
3605 if (isArithmeticFenceEnabled) {
3608 Value *Real =
Builder.CreateArithmeticFence(ComplexVal.first,
3610 Value *Imag =
Builder.CreateArithmeticFence(ComplexVal.second,
3615 Value *Real = ComplexVal.first;
3616 Value *Imag = ComplexVal.second;
3620 if (isArithmeticFenceEnabled)
3625 case Builtin::BI__builtin_bswap16:
3626 case Builtin::BI__builtin_bswap32:
3627 case Builtin::BI__builtin_bswap64:
3628 case Builtin::BI_byteswap_ushort:
3629 case Builtin::BI_byteswap_ulong:
3630 case Builtin::BI_byteswap_uint64: {
3632 emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::bswap));
3634 case Builtin::BI__builtin_bitreverse8:
3635 case Builtin::BI__builtin_bitreverse16:
3636 case Builtin::BI__builtin_bitreverse32:
3637 case Builtin::BI__builtin_bitreverse64: {
3639 emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::bitreverse));
3641 case Builtin::BI__builtin_rotateleft8:
3642 case Builtin::BI__builtin_rotateleft16:
3643 case Builtin::BI__builtin_rotateleft32:
3644 case Builtin::BI__builtin_rotateleft64:
3645 case Builtin::BI_rotl8:
3646 case Builtin::BI_rotl16:
3647 case Builtin::BI_rotl:
3648 case Builtin::BI_lrotl:
3649 case Builtin::BI_rotl64:
3652 case Builtin::BI__builtin_rotateright8:
3653 case Builtin::BI__builtin_rotateright16:
3654 case Builtin::BI__builtin_rotateright32:
3655 case Builtin::BI__builtin_rotateright64:
3656 case Builtin::BI_rotr8:
3657 case Builtin::BI_rotr16:
3658 case Builtin::BI_rotr:
3659 case Builtin::BI_lrotr:
3660 case Builtin::BI_rotr64:
3663 case Builtin::BI__builtin_constant_p: {
3666 const Expr *Arg =
E->getArg(0);
3674 return RValue::get(ConstantInt::get(ResultType, 0));
3679 return RValue::get(ConstantInt::get(ResultType, 0));
3691 if (
Result->getType() != ResultType)
3695 case Builtin::BI__builtin_dynamic_object_size:
3696 case Builtin::BI__builtin_object_size: {
3703 bool IsDynamic = BuiltinID == Builtin::BI__builtin_dynamic_object_size;
3705 nullptr, IsDynamic));
3707 case Builtin::BI__builtin_counted_by_ref: {
3709 llvm::Value *
Result = llvm::ConstantPointerNull::get(
3714 if (
auto *UO = dyn_cast<UnaryOperator>(Arg);
3715 UO && UO->getOpcode() == UO_AddrOf) {
3718 if (
auto *ASE = dyn_cast<ArraySubscriptExpr>(Arg))
3722 if (
const MemberExpr *ME = dyn_cast_if_present<MemberExpr>(Arg)) {
3730 llvm::report_fatal_error(
"Cannot find the counted_by 'count' field");
3736 case Builtin::BI__builtin_prefetch: {
3740 llvm::ConstantInt::get(
Int32Ty, 0);
3742 llvm::ConstantInt::get(
Int32Ty, 3);
3748 case Builtin::BI__builtin_readcyclecounter: {
3752 case Builtin::BI__builtin_readsteadycounter: {
3756 case Builtin::BI__builtin___clear_cache: {
3762 case Builtin::BI__builtin_trap:
3765 case Builtin::BI__builtin_verbose_trap: {
3766 llvm::DILocation *TrapLocation =
Builder.getCurrentDebugLocation();
3777 case Builtin::BI__debugbreak:
3780 case Builtin::BI__builtin_unreachable: {
3789 case Builtin::BI__builtin_powi:
3790 case Builtin::BI__builtin_powif:
3791 case Builtin::BI__builtin_powil: {
3795 if (
Builder.getIsFPConstrained()) {
3805 { Src0->getType(), Src1->getType() });
3808 case Builtin::BI__builtin_frexpl: {
3812 if (&
getTarget().getLongDoubleFormat() == &llvm::APFloat::PPCDoubleDouble())
3816 case Builtin::BI__builtin_frexp:
3817 case Builtin::BI__builtin_frexpf:
3818 case Builtin::BI__builtin_frexpf128:
3819 case Builtin::BI__builtin_frexpf16:
3821 case Builtin::BImodf:
3822 case Builtin::BImodff:
3823 case Builtin::BImodfl:
3824 case Builtin::BI__builtin_modf:
3825 case Builtin::BI__builtin_modff:
3826 case Builtin::BI__builtin_modfl:
3827 if (
Builder.getIsFPConstrained())
3830 case Builtin::BI__builtin_isgreater:
3831 case Builtin::BI__builtin_isgreaterequal:
3832 case Builtin::BI__builtin_isless:
3833 case Builtin::BI__builtin_islessequal:
3834 case Builtin::BI__builtin_islessgreater:
3835 case Builtin::BI__builtin_isunordered: {
3842 switch (BuiltinID) {
3843 default: llvm_unreachable(
"Unknown ordered comparison");
3844 case Builtin::BI__builtin_isgreater:
3845 LHS =
Builder.CreateFCmpOGT(LHS, RHS,
"cmp");
3847 case Builtin::BI__builtin_isgreaterequal:
3848 LHS =
Builder.CreateFCmpOGE(LHS, RHS,
"cmp");
3850 case Builtin::BI__builtin_isless:
3851 LHS =
Builder.CreateFCmpOLT(LHS, RHS,
"cmp");
3853 case Builtin::BI__builtin_islessequal:
3854 LHS =
Builder.CreateFCmpOLE(LHS, RHS,
"cmp");
3856 case Builtin::BI__builtin_islessgreater:
3857 LHS =
Builder.CreateFCmpONE(LHS, RHS,
"cmp");
3859 case Builtin::BI__builtin_isunordered:
3860 LHS =
Builder.CreateFCmpUNO(LHS, RHS,
"cmp");
3867 case Builtin::BI__builtin_isnan: {
3877 case Builtin::BI__builtin_issignaling: {
3885 case Builtin::BI__builtin_isinf: {
3895 case Builtin::BIfinite:
3896 case Builtin::BI__finite:
3897 case Builtin::BIfinitef:
3898 case Builtin::BI__finitef:
3899 case Builtin::BIfinitel:
3900 case Builtin::BI__finitel:
3901 case Builtin::BI__builtin_isfinite: {
3911 case Builtin::BI__builtin_isnormal: {
3919 case Builtin::BI__builtin_issubnormal: {
3923 Builder.CreateZExt(
Builder.createIsFPClass(
V, FPClassTest::fcSubnormal),
3927 case Builtin::BI__builtin_iszero: {
3935 case Builtin::BI__builtin_isfpclass: {
3939 uint64_t Test =
Result.Val.getInt().getLimitedValue();
3946 case Builtin::BI__builtin_nondeterministic_value: {
3955 case Builtin::BI__builtin_elementwise_abs: {
3960 QT = VecTy->getElementType();
3964 nullptr,
"elt.abs");
3966 Result = emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::fabs,
3971 case Builtin::BI__builtin_elementwise_acos:
3972 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3973 *
this,
E, Intrinsic::acos,
"elt.acos"));
3974 case Builtin::BI__builtin_elementwise_asin:
3975 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3976 *
this,
E, Intrinsic::asin,
"elt.asin"));
3977 case Builtin::BI__builtin_elementwise_atan:
3978 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3979 *
this,
E, Intrinsic::atan,
"elt.atan"));
3980 case Builtin::BI__builtin_elementwise_atan2:
3981 return RValue::get(emitBuiltinWithOneOverloadedType<2>(
3982 *
this,
E, Intrinsic::atan2,
"elt.atan2"));
3983 case Builtin::BI__builtin_elementwise_ceil:
3984 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3985 *
this,
E, Intrinsic::ceil,
"elt.ceil"));
3986 case Builtin::BI__builtin_elementwise_exp:
3987 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3988 *
this,
E, Intrinsic::exp,
"elt.exp"));
3989 case Builtin::BI__builtin_elementwise_exp2:
3990 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3991 *
this,
E, Intrinsic::exp2,
"elt.exp2"));
3992 case Builtin::BI__builtin_elementwise_exp10:
3993 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3994 *
this,
E, Intrinsic::exp10,
"elt.exp10"));
3995 case Builtin::BI__builtin_elementwise_log:
3996 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
3997 *
this,
E, Intrinsic::log,
"elt.log"));
3998 case Builtin::BI__builtin_elementwise_log2:
3999 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4000 *
this,
E, Intrinsic::log2,
"elt.log2"));
4001 case Builtin::BI__builtin_elementwise_log10:
4002 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4003 *
this,
E, Intrinsic::log10,
"elt.log10"));
4004 case Builtin::BI__builtin_elementwise_pow: {
4006 emitBuiltinWithOneOverloadedType<2>(*
this,
E, Intrinsic::pow));
4008 case Builtin::BI__builtin_elementwise_bitreverse:
4009 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4010 *
this,
E, Intrinsic::bitreverse,
"elt.bitreverse"));
4011 case Builtin::BI__builtin_elementwise_cos:
4012 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4013 *
this,
E, Intrinsic::cos,
"elt.cos"));
4014 case Builtin::BI__builtin_elementwise_cosh:
4015 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4016 *
this,
E, Intrinsic::cosh,
"elt.cosh"));
4017 case Builtin::BI__builtin_elementwise_floor:
4018 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4019 *
this,
E, Intrinsic::floor,
"elt.floor"));
4020 case Builtin::BI__builtin_elementwise_popcount:
4021 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4022 *
this,
E, Intrinsic::ctpop,
"elt.ctpop"));
4023 case Builtin::BI__builtin_elementwise_roundeven:
4024 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4025 *
this,
E, Intrinsic::roundeven,
"elt.roundeven"));
4026 case Builtin::BI__builtin_elementwise_round:
4027 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4028 *
this,
E, Intrinsic::round,
"elt.round"));
4029 case Builtin::BI__builtin_elementwise_rint:
4030 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4031 *
this,
E, Intrinsic::rint,
"elt.rint"));
4032 case Builtin::BI__builtin_elementwise_nearbyint:
4033 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4034 *
this,
E, Intrinsic::nearbyint,
"elt.nearbyint"));
4035 case Builtin::BI__builtin_elementwise_sin:
4036 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4037 *
this,
E, Intrinsic::sin,
"elt.sin"));
4038 case Builtin::BI__builtin_elementwise_sinh:
4039 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4040 *
this,
E, Intrinsic::sinh,
"elt.sinh"));
4041 case Builtin::BI__builtin_elementwise_tan:
4042 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4043 *
this,
E, Intrinsic::tan,
"elt.tan"));
4044 case Builtin::BI__builtin_elementwise_tanh:
4045 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4046 *
this,
E, Intrinsic::tanh,
"elt.tanh"));
4047 case Builtin::BI__builtin_elementwise_trunc:
4048 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4049 *
this,
E, Intrinsic::trunc,
"elt.trunc"));
4050 case Builtin::BI__builtin_elementwise_canonicalize:
4051 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4052 *
this,
E, Intrinsic::canonicalize,
"elt.canonicalize"));
4053 case Builtin::BI__builtin_elementwise_copysign:
4055 emitBuiltinWithOneOverloadedType<2>(*
this,
E, Intrinsic::copysign));
4056 case Builtin::BI__builtin_elementwise_fma:
4058 emitBuiltinWithOneOverloadedType<3>(*
this,
E, Intrinsic::fma));
4059 case Builtin::BI__builtin_elementwise_fshl:
4061 emitBuiltinWithOneOverloadedType<3>(*
this,
E, Intrinsic::fshl));
4062 case Builtin::BI__builtin_elementwise_fshr:
4064 emitBuiltinWithOneOverloadedType<3>(*
this,
E, Intrinsic::fshr));
4066 case Builtin::BI__builtin_elementwise_add_sat:
4067 case Builtin::BI__builtin_elementwise_sub_sat: {
4071 assert(Op0->
getType()->isIntOrIntVectorTy() &&
"integer type expected");
4074 Ty = VecTy->getElementType();
4077 if (BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_add_sat)
4078 Opc = IsSigned ? Intrinsic::sadd_sat : Intrinsic::uadd_sat;
4080 Opc = IsSigned ? Intrinsic::ssub_sat : Intrinsic::usub_sat;
4081 Result =
Builder.CreateBinaryIntrinsic(Opc, Op0, Op1,
nullptr,
"elt.sat");
4085 case Builtin::BI__builtin_elementwise_max: {
4089 if (Op0->
getType()->isIntOrIntVectorTy()) {
4092 Ty = VecTy->getElementType();
4095 Op1,
nullptr,
"elt.max");
4097 Result =
Builder.CreateMaxNum(Op0, Op1,
nullptr,
"elt.max");
4100 case Builtin::BI__builtin_elementwise_min: {
4104 if (Op0->
getType()->isIntOrIntVectorTy()) {
4107 Ty = VecTy->getElementType();
4110 Op1,
nullptr,
"elt.min");
4112 Result =
Builder.CreateMinNum(Op0, Op1,
nullptr,
"elt.min");
4116 case Builtin::BI__builtin_elementwise_maxnum: {
4120 Op1,
nullptr,
"elt.maxnum");
4124 case Builtin::BI__builtin_elementwise_minnum: {
4128 Op1,
nullptr,
"elt.minnum");
4132 case Builtin::BI__builtin_elementwise_maximum: {
4136 nullptr,
"elt.maximum");
4140 case Builtin::BI__builtin_elementwise_minimum: {
4144 nullptr,
"elt.minimum");
4148 case Builtin::BI__builtin_elementwise_maximumnum: {
4152 Intrinsic::maximumnum, Op0, Op1,
nullptr,
"elt.maximumnum");
4156 case Builtin::BI__builtin_elementwise_minimumnum: {
4160 Intrinsic::minimumnum, Op0, Op1,
nullptr,
"elt.minimumnum");
4164 case Builtin::BI__builtin_reduce_max: {
4165 auto GetIntrinsicID = [
this](
QualType QT) {
4167 QT = VecTy->getElementType();
4168 else if (QT->isSizelessVectorType())
4171 if (QT->isSignedIntegerType())
4172 return Intrinsic::vector_reduce_smax;
4173 if (QT->isUnsignedIntegerType())
4174 return Intrinsic::vector_reduce_umax;
4175 assert(QT->isFloatingType() &&
"must have a float here");
4176 return Intrinsic::vector_reduce_fmax;
4178 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4179 *
this,
E, GetIntrinsicID(
E->getArg(0)->
getType()),
"rdx.min"));
4182 case Builtin::BI__builtin_reduce_min: {
4183 auto GetIntrinsicID = [
this](
QualType QT) {
4185 QT = VecTy->getElementType();
4186 else if (QT->isSizelessVectorType())
4189 if (QT->isSignedIntegerType())
4190 return Intrinsic::vector_reduce_smin;
4191 if (QT->isUnsignedIntegerType())
4192 return Intrinsic::vector_reduce_umin;
4193 assert(QT->isFloatingType() &&
"must have a float here");
4194 return Intrinsic::vector_reduce_fmin;
4197 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4198 *
this,
E, GetIntrinsicID(
E->getArg(0)->
getType()),
"rdx.min"));
4201 case Builtin::BI__builtin_reduce_add:
4202 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4203 *
this,
E, Intrinsic::vector_reduce_add,
"rdx.add"));
4204 case Builtin::BI__builtin_reduce_mul:
4205 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4206 *
this,
E, Intrinsic::vector_reduce_mul,
"rdx.mul"));
4207 case Builtin::BI__builtin_reduce_xor:
4208 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4209 *
this,
E, Intrinsic::vector_reduce_xor,
"rdx.xor"));
4210 case Builtin::BI__builtin_reduce_or:
4211 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4212 *
this,
E, Intrinsic::vector_reduce_or,
"rdx.or"));
4213 case Builtin::BI__builtin_reduce_and:
4214 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4215 *
this,
E, Intrinsic::vector_reduce_and,
"rdx.and"));
4216 case Builtin::BI__builtin_reduce_maximum:
4217 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4218 *
this,
E, Intrinsic::vector_reduce_fmaximum,
"rdx.maximum"));
4219 case Builtin::BI__builtin_reduce_minimum:
4220 return RValue::get(emitBuiltinWithOneOverloadedType<1>(
4221 *
this,
E, Intrinsic::vector_reduce_fminimum,
"rdx.minimum"));
4223 case Builtin::BI__builtin_matrix_transpose: {
4227 Value *
Result = MB.CreateMatrixTranspose(MatValue, MatrixTy->getNumRows(),
4228 MatrixTy->getNumColumns());
4232 case Builtin::BI__builtin_matrix_column_major_load: {
4238 assert(PtrTy &&
"arg0 must be of pointer type");
4248 ResultTy->getNumRows(), ResultTy->getNumColumns(),
"matrix");
4252 case Builtin::BI__builtin_matrix_column_major_store: {
4260 assert(PtrTy &&
"arg1 must be of pointer type");
4269 MatrixTy->getNumRows(), MatrixTy->getNumColumns());
4274 case Builtin::BI__builtin_masked_load:
4275 case Builtin::BI__builtin_masked_expand_load: {
4281 llvm::Value *AlignVal =
4284 llvm::Value *PassThru = llvm::PoisonValue::get(RetTy);
4285 if (
E->getNumArgs() > 2)
4289 if (BuiltinID == Builtin::BI__builtin_masked_load) {
4293 Builder.CreateCall(F, {Ptr, AlignVal, Mask, PassThru},
"masked_load");
4297 Builder.CreateCall(F, {Ptr, Mask, PassThru},
"masked_expand_load");
4301 case Builtin::BI__builtin_masked_store:
4302 case Builtin::BI__builtin_masked_compress_store: {
4309 llvm::Type *PtrTy = Ptr->getType();
4312 llvm::Value *AlignVal =
4315 if (BuiltinID == Builtin::BI__builtin_masked_store) {
4318 Builder.CreateCall(F, {Val, Ptr, AlignVal, Mask});
4322 Builder.CreateCall(F, {Val, Ptr, Mask});
4327 case Builtin::BI__builtin_isinf_sign: {
4334 AbsArg, ConstantFP::getInfinity(Arg->
getType()),
"isinf");
4340 Value *NegativeOne = ConstantInt::get(
IntTy, -1);
4341 Value *SignResult =
Builder.CreateSelect(IsNeg, NegativeOne, One);
4346 case Builtin::BI__builtin_flt_rounds: {
4351 if (
Result->getType() != ResultType)
4357 case Builtin::BI__builtin_set_flt_rounds: {
4365 case Builtin::BI__builtin_fpclassify: {
4377 "fpclassify_result");
4381 Value *IsZero =
Builder.CreateFCmpOEQ(
V, Constant::getNullValue(Ty),
4385 Builder.CreateCondBr(IsZero, End, NotZero);
4389 Builder.SetInsertPoint(NotZero);
4393 Builder.CreateCondBr(IsNan, End, NotNan);
4394 Result->addIncoming(NanLiteral, NotZero);
4397 Builder.SetInsertPoint(NotNan);
4400 Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(
V->getType()),
4404 Builder.CreateCondBr(IsInf, End, NotInf);
4405 Result->addIncoming(InfLiteral, NotNan);
4408 Builder.SetInsertPoint(NotInf);
4409 APFloat Smallest = APFloat::getSmallestNormalized(
4412 Builder.CreateFCmpUGE(VAbs, ConstantFP::get(
V->getContext(), Smallest),
4414 Value *NormalResult =
4418 Result->addIncoming(NormalResult, NotInf);
4431 case Builtin::BIalloca:
4432 case Builtin::BI_alloca:
4433 case Builtin::BI__builtin_alloca_uninitialized:
4434 case Builtin::BI__builtin_alloca: {
4438 const Align SuitableAlignmentInBytes =
4442 AllocaInst *AI =
Builder.CreateAlloca(
Builder.getInt8Ty(), Size);
4443 AI->setAlignment(SuitableAlignmentInBytes);
4444 if (BuiltinID != Builtin::BI__builtin_alloca_uninitialized)
4456 case Builtin::BI__builtin_alloca_with_align_uninitialized:
4457 case Builtin::BI__builtin_alloca_with_align: {
4460 auto *AlignmentInBitsCI = cast<ConstantInt>(AlignmentInBitsValue);
4461 unsigned AlignmentInBits = AlignmentInBitsCI->getZExtValue();
4462 const Align AlignmentInBytes =
4464 AllocaInst *AI =
Builder.CreateAlloca(
Builder.getInt8Ty(), Size);
4465 AI->setAlignment(AlignmentInBytes);
4466 if (BuiltinID != Builtin::BI__builtin_alloca_with_align_uninitialized)
4478 case Builtin::BIbzero:
4479 case Builtin::BI__builtin_bzero: {
4489 case Builtin::BIbcopy:
4490 case Builtin::BI__builtin_bcopy: {
4505 case Builtin::BImemcpy:
4506 case Builtin::BI__builtin_memcpy:
4507 case Builtin::BImempcpy:
4508 case Builtin::BI__builtin_mempcpy: {
4512 EmitArgCheck(
TCK_Store, Dest,
E->getArg(0), 0);
4513 EmitArgCheck(
TCK_Load, Src,
E->getArg(1), 1);
4516 if (BuiltinID == Builtin::BImempcpy ||
4517 BuiltinID == Builtin::BI__builtin_mempcpy)
4524 case Builtin::BI__builtin_memcpy_inline: {
4529 EmitArgCheck(
TCK_Store, Dest,
E->getArg(0), 0);
4530 EmitArgCheck(
TCK_Load, Src,
E->getArg(1), 1);
4536 case Builtin::BI__builtin_char_memchr:
4537 BuiltinID = Builtin::BI__builtin_memchr;
4540 case Builtin::BI__builtin___memcpy_chk: {
4546 llvm::APSInt Size = SizeResult.
Val.
getInt();
4547 llvm::APSInt DstSize = DstSizeResult.
Val.
getInt();
4548 if (Size.ugt(DstSize))
4552 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
4558 case Builtin::BI__builtin_objc_memmove_collectable: {
4563 DestAddr, SrcAddr, SizeVal);
4567 case Builtin::BI__builtin___memmove_chk: {
4573 llvm::APSInt Size = SizeResult.
Val.
getInt();
4574 llvm::APSInt DstSize = DstSizeResult.
Val.
getInt();
4575 if (Size.ugt(DstSize))
4579 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
4585 case Builtin::BI__builtin_trivially_relocate:
4586 case Builtin::BImemmove:
4587 case Builtin::BI__builtin_memmove: {
4591 if (BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_trivially_relocate)
4599 EmitArgCheck(
TCK_Store, Dest,
E->getArg(0), 0);
4600 EmitArgCheck(
TCK_Load, Src,
E->getArg(1), 1);
4605 case Builtin::BImemset:
4606 case Builtin::BI__builtin_memset: {
4617 case Builtin::BI__builtin_memset_inline: {
4630 case Builtin::BI__builtin___memset_chk: {
4636 llvm::APSInt Size = SizeResult.
Val.
getInt();
4637 llvm::APSInt DstSize = DstSizeResult.
Val.
getInt();
4638 if (Size.ugt(DstSize))
4643 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
4648 case Builtin::BI__builtin_wmemchr: {
4651 if (!
getTarget().getTriple().isOSMSVCRT())
4659 BasicBlock *Entry =
Builder.GetInsertBlock();
4664 Builder.CreateCondBr(SizeEq0, Exit, CmpEq);
4668 StrPhi->addIncoming(Str, Entry);
4670 SizePhi->addIncoming(Size, Entry);
4674 Value *FoundChr =
Builder.CreateConstInBoundsGEP1_32(WCharTy, StrPhi, 0);
4676 Builder.CreateCondBr(StrEqChr, Exit, Next);
4679 Value *NextStr =
Builder.CreateConstInBoundsGEP1_32(WCharTy, StrPhi, 1);
4681 Value *NextSizeEq0 =
4682 Builder.CreateICmpEQ(NextSize, ConstantInt::get(
SizeTy, 0));
4683 Builder.CreateCondBr(NextSizeEq0, Exit, CmpEq);
4684 StrPhi->addIncoming(NextStr, Next);
4685 SizePhi->addIncoming(NextSize, Next);
4689 Ret->addIncoming(llvm::Constant::getNullValue(Str->
getType()), Entry);
4690 Ret->addIncoming(llvm::Constant::getNullValue(Str->
getType()), Next);
4691 Ret->addIncoming(FoundChr, CmpEq);
4694 case Builtin::BI__builtin_wmemcmp: {
4697 if (!
getTarget().getTriple().isOSMSVCRT())
4706 BasicBlock *Entry =
Builder.GetInsertBlock();
4712 Builder.CreateCondBr(SizeEq0, Exit, CmpGT);
4716 DstPhi->addIncoming(Dst, Entry);
4718 SrcPhi->addIncoming(Src, Entry);
4720 SizePhi->addIncoming(Size, Entry);
4726 Builder.CreateCondBr(DstGtSrc, Exit, CmpLT);
4730 Builder.CreateCondBr(DstLtSrc, Exit, Next);
4733 Value *NextDst =
Builder.CreateConstInBoundsGEP1_32(WCharTy, DstPhi, 1);
4734 Value *NextSrc =
Builder.CreateConstInBoundsGEP1_32(WCharTy, SrcPhi, 1);
4736 Value *NextSizeEq0 =
4737 Builder.CreateICmpEQ(NextSize, ConstantInt::get(
SizeTy, 0));
4738 Builder.CreateCondBr(NextSizeEq0, Exit, CmpGT);
4739 DstPhi->addIncoming(NextDst, Next);
4740 SrcPhi->addIncoming(NextSrc, Next);
4741 SizePhi->addIncoming(NextSize, Next);
4745 Ret->addIncoming(ConstantInt::get(
IntTy, 0), Entry);
4746 Ret->addIncoming(ConstantInt::get(
IntTy, 1), CmpGT);
4747 Ret->addIncoming(ConstantInt::get(
IntTy, -1), CmpLT);
4748 Ret->addIncoming(ConstantInt::get(
IntTy, 0), Next);
4751 case Builtin::BI__builtin_dwarf_cfa: {
4764 llvm::ConstantInt::get(
Int32Ty, Offset)));
4766 case Builtin::BI__builtin_return_address: {
4772 case Builtin::BI_ReturnAddress: {
4776 case Builtin::BI__builtin_frame_address: {
4782 case Builtin::BI__builtin_extract_return_addr: {
4787 case Builtin::BI__builtin_frob_return_addr: {
4792 case Builtin::BI__builtin_dwarf_sp_column: {
4793 llvm::IntegerType *Ty
4802 case Builtin::BI__builtin_init_dwarf_reg_size_table: {
4808 case Builtin::BI__builtin_eh_return: {
4812 llvm::IntegerType *
IntTy = cast<llvm::IntegerType>(Int->getType());
4813 assert((
IntTy->getBitWidth() == 32 ||
IntTy->getBitWidth() == 64) &&
4814 "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
4817 : Intrinsic::eh_return_i64);
4818 Builder.CreateCall(F, {Int, Ptr});
4826 case Builtin::BI__builtin_unwind_init: {
4831 case Builtin::BI__builtin_extend_pointer: {
4856 case Builtin::BI__builtin_setjmp: {
4860 if (
getTarget().getTriple().getArch() == llvm::Triple::systemz) {
4871 ConstantInt::get(
Int32Ty, 0));
4885 case Builtin::BI__builtin_longjmp: {
4899 case Builtin::BI__builtin_launder: {
4900 const Expr *Arg =
E->getArg(0);
4908 case Builtin::BI__sync_fetch_and_add:
4909 case Builtin::BI__sync_fetch_and_sub:
4910 case Builtin::BI__sync_fetch_and_or:
4911 case Builtin::BI__sync_fetch_and_and:
4912 case Builtin::BI__sync_fetch_and_xor:
4913 case Builtin::BI__sync_fetch_and_nand:
4914 case Builtin::BI__sync_add_and_fetch:
4915 case Builtin::BI__sync_sub_and_fetch:
4916 case Builtin::BI__sync_and_and_fetch:
4917 case Builtin::BI__sync_or_and_fetch:
4918 case Builtin::BI__sync_xor_and_fetch:
4919 case Builtin::BI__sync_nand_and_fetch:
4920 case Builtin::BI__sync_val_compare_and_swap:
4921 case Builtin::BI__sync_bool_compare_and_swap:
4922 case Builtin::BI__sync_lock_test_and_set:
4923 case Builtin::BI__sync_lock_release:
4924 case Builtin::BI__sync_swap:
4925 llvm_unreachable(
"Shouldn't make it through sema");
4926 case Builtin::BI__sync_fetch_and_add_1:
4927 case Builtin::BI__sync_fetch_and_add_2:
4928 case Builtin::BI__sync_fetch_and_add_4:
4929 case Builtin::BI__sync_fetch_and_add_8:
4930 case Builtin::BI__sync_fetch_and_add_16:
4932 case Builtin::BI__sync_fetch_and_sub_1:
4933 case Builtin::BI__sync_fetch_and_sub_2:
4934 case Builtin::BI__sync_fetch_and_sub_4:
4935 case Builtin::BI__sync_fetch_and_sub_8:
4936 case Builtin::BI__sync_fetch_and_sub_16:
4938 case Builtin::BI__sync_fetch_and_or_1:
4939 case Builtin::BI__sync_fetch_and_or_2:
4940 case Builtin::BI__sync_fetch_and_or_4:
4941 case Builtin::BI__sync_fetch_and_or_8:
4942 case Builtin::BI__sync_fetch_and_or_16:
4944 case Builtin::BI__sync_fetch_and_and_1:
4945 case Builtin::BI__sync_fetch_and_and_2:
4946 case Builtin::BI__sync_fetch_and_and_4:
4947 case Builtin::BI__sync_fetch_and_and_8:
4948 case Builtin::BI__sync_fetch_and_and_16:
4950 case Builtin::BI__sync_fetch_and_xor_1:
4951 case Builtin::BI__sync_fetch_and_xor_2:
4952 case Builtin::BI__sync_fetch_and_xor_4:
4953 case Builtin::BI__sync_fetch_and_xor_8:
4954 case Builtin::BI__sync_fetch_and_xor_16:
4956 case Builtin::BI__sync_fetch_and_nand_1:
4957 case Builtin::BI__sync_fetch_and_nand_2:
4958 case Builtin::BI__sync_fetch_and_nand_4:
4959 case Builtin::BI__sync_fetch_and_nand_8:
4960 case Builtin::BI__sync_fetch_and_nand_16:
4964 case Builtin::BI__sync_fetch_and_min:
4966 case Builtin::BI__sync_fetch_and_max:
4968 case Builtin::BI__sync_fetch_and_umin:
4970 case Builtin::BI__sync_fetch_and_umax:
4973 case Builtin::BI__sync_add_and_fetch_1:
4974 case Builtin::BI__sync_add_and_fetch_2:
4975 case Builtin::BI__sync_add_and_fetch_4:
4976 case Builtin::BI__sync_add_and_fetch_8:
4977 case Builtin::BI__sync_add_and_fetch_16:
4979 llvm::Instruction::Add);
4980 case Builtin::BI__sync_sub_and_fetch_1:
4981 case Builtin::BI__sync_sub_and_fetch_2:
4982 case Builtin::BI__sync_sub_and_fetch_4:
4983 case Builtin::BI__sync_sub_and_fetch_8:
4984 case Builtin::BI__sync_sub_and_fetch_16:
4986 llvm::Instruction::Sub);
4987 case Builtin::BI__sync_and_and_fetch_1:
4988 case Builtin::BI__sync_and_and_fetch_2:
4989 case Builtin::BI__sync_and_and_fetch_4:
4990 case Builtin::BI__sync_and_and_fetch_8:
4991 case Builtin::BI__sync_and_and_fetch_16:
4993 llvm::Instruction::And);
4994 case Builtin::BI__sync_or_and_fetch_1:
4995 case Builtin::BI__sync_or_and_fetch_2:
4996 case Builtin::BI__sync_or_and_fetch_4:
4997 case Builtin::BI__sync_or_and_fetch_8:
4998 case Builtin::BI__sync_or_and_fetch_16:
5000 llvm::Instruction::Or);
5001 case Builtin::BI__sync_xor_and_fetch_1:
5002 case Builtin::BI__sync_xor_and_fetch_2:
5003 case Builtin::BI__sync_xor_and_fetch_4:
5004 case Builtin::BI__sync_xor_and_fetch_8:
5005 case Builtin::BI__sync_xor_and_fetch_16:
5007 llvm::Instruction::Xor);
5008 case Builtin::BI__sync_nand_and_fetch_1:
5009 case Builtin::BI__sync_nand_and_fetch_2:
5010 case Builtin::BI__sync_nand_and_fetch_4:
5011 case Builtin::BI__sync_nand_and_fetch_8:
5012 case Builtin::BI__sync_nand_and_fetch_16:
5014 llvm::Instruction::And,
true);
5016 case Builtin::BI__sync_val_compare_and_swap_1:
5017 case Builtin::BI__sync_val_compare_and_swap_2:
5018 case Builtin::BI__sync_val_compare_and_swap_4:
5019 case Builtin::BI__sync_val_compare_and_swap_8:
5020 case Builtin::BI__sync_val_compare_and_swap_16:
5023 case Builtin::BI__sync_bool_compare_and_swap_1:
5024 case Builtin::BI__sync_bool_compare_and_swap_2:
5025 case Builtin::BI__sync_bool_compare_and_swap_4:
5026 case Builtin::BI__sync_bool_compare_and_swap_8:
5027 case Builtin::BI__sync_bool_compare_and_swap_16:
5030 case Builtin::BI__sync_swap_1:
5031 case Builtin::BI__sync_swap_2:
5032 case Builtin::BI__sync_swap_4:
5033 case Builtin::BI__sync_swap_8:
5034 case Builtin::BI__sync_swap_16:
5037 case Builtin::BI__sync_lock_test_and_set_1:
5038 case Builtin::BI__sync_lock_test_and_set_2:
5039 case Builtin::BI__sync_lock_test_and_set_4:
5040 case Builtin::BI__sync_lock_test_and_set_8:
5041 case Builtin::BI__sync_lock_test_and_set_16:
5044 case Builtin::BI__sync_lock_release_1:
5045 case Builtin::BI__sync_lock_release_2:
5046 case Builtin::BI__sync_lock_release_4:
5047 case Builtin::BI__sync_lock_release_8:
5048 case Builtin::BI__sync_lock_release_16: {
5054 llvm::StoreInst *Store =
5056 Store->setAtomic(llvm::AtomicOrdering::Release);
5060 case Builtin::BI__sync_synchronize: {
5068 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent);
5072 case Builtin::BI__builtin_nontemporal_load:
5074 case Builtin::BI__builtin_nontemporal_store:
5076 case Builtin::BI__c11_atomic_is_lock_free:
5077 case Builtin::BI__atomic_is_lock_free: {
5081 const char *LibCallName =
"__atomic_is_lock_free";
5085 if (BuiltinID == Builtin::BI__atomic_is_lock_free)
5099 case Builtin::BI__atomic_thread_fence:
5100 case Builtin::BI__atomic_signal_fence:
5101 case Builtin::BI__c11_atomic_thread_fence:
5102 case Builtin::BI__c11_atomic_signal_fence: {
5103 llvm::SyncScope::ID SSID;
5104 if (BuiltinID == Builtin::BI__atomic_signal_fence ||
5105 BuiltinID == Builtin::BI__c11_atomic_signal_fence)
5106 SSID = llvm::SyncScope::SingleThread;
5108 SSID = llvm::SyncScope::System;
5110 if (isa<llvm::ConstantInt>(Order)) {
5111 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
5118 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
5121 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
5124 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
5127 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
5133 llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
5140 Order =
Builder.CreateIntCast(Order,
Builder.getInt32Ty(),
false);
5141 llvm::SwitchInst *SI =
Builder.CreateSwitch(Order, ContBB);
5143 Builder.SetInsertPoint(AcquireBB);
5144 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
5146 SI->addCase(
Builder.getInt32(1), AcquireBB);
5147 SI->addCase(
Builder.getInt32(2), AcquireBB);
5149 Builder.SetInsertPoint(ReleaseBB);
5150 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
5152 SI->addCase(
Builder.getInt32(3), ReleaseBB);
5154 Builder.SetInsertPoint(AcqRelBB);
5155 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
5157 SI->addCase(
Builder.getInt32(4), AcqRelBB);
5159 Builder.SetInsertPoint(SeqCstBB);
5160 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
5162 SI->addCase(
Builder.getInt32(5), SeqCstBB);
5164 Builder.SetInsertPoint(ContBB);
5167 case Builtin::BI__scoped_atomic_thread_fence: {
5172 auto Ord = dyn_cast<llvm::ConstantInt>(Order);
5173 auto Scp = dyn_cast<llvm::ConstantInt>(
Scope);
5175 SyncScope SS = ScopeModel->isValid(Scp->getZExtValue())
5176 ? ScopeModel->map(Scp->getZExtValue())
5177 : ScopeModel->map(ScopeModel->getFallBackValue());
5178 switch (Ord->getZExtValue()) {
5185 llvm::AtomicOrdering::Acquire,
5187 llvm::AtomicOrdering::Acquire,
5192 llvm::AtomicOrdering::Release,
5194 llvm::AtomicOrdering::Release,
5198 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease,
5201 llvm::AtomicOrdering::AcquireRelease,
5205 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent,
5208 llvm::AtomicOrdering::SequentiallyConsistent,
5220 switch (Ord->getZExtValue()) {
5223 ContBB->eraseFromParent();
5227 OrderBBs.emplace_back(
Builder.GetInsertBlock(),
5228 llvm::AtomicOrdering::Acquire);
5231 OrderBBs.emplace_back(
Builder.GetInsertBlock(),
5232 llvm::AtomicOrdering::Release);
5235 OrderBBs.emplace_back(
Builder.GetInsertBlock(),
5236 llvm::AtomicOrdering::AcquireRelease);
5239 OrderBBs.emplace_back(
Builder.GetInsertBlock(),
5240 llvm::AtomicOrdering::SequentiallyConsistent);
5249 Order =
Builder.CreateIntCast(Order,
Builder.getInt32Ty(),
false);
5250 llvm::SwitchInst *SI =
Builder.CreateSwitch(Order, ContBB);
5251 SI->addCase(
Builder.getInt32(1), AcquireBB);
5252 SI->addCase(
Builder.getInt32(2), AcquireBB);
5253 SI->addCase(
Builder.getInt32(3), ReleaseBB);
5254 SI->addCase(
Builder.getInt32(4), AcqRelBB);
5255 SI->addCase(
Builder.getInt32(5), SeqCstBB);
5257 OrderBBs.emplace_back(AcquireBB, llvm::AtomicOrdering::Acquire);
5258 OrderBBs.emplace_back(ReleaseBB, llvm::AtomicOrdering::Release);
5259 OrderBBs.emplace_back(AcqRelBB, llvm::AtomicOrdering::AcquireRelease);
5260 OrderBBs.emplace_back(SeqCstBB,
5261 llvm::AtomicOrdering::SequentiallyConsistent);
5264 for (
auto &[OrderBB, Ordering] : OrderBBs) {
5265 Builder.SetInsertPoint(OrderBB);
5267 SyncScope SS = ScopeModel->isValid(Scp->getZExtValue())
5268 ? ScopeModel->map(Scp->getZExtValue())
5269 : ScopeModel->map(ScopeModel->getFallBackValue());
5275 llvm::DenseMap<unsigned, llvm::BasicBlock *> BBs;
5276 for (
unsigned Scp : ScopeModel->getRuntimeValues())
5280 llvm::SwitchInst *SI =
Builder.CreateSwitch(SC, ContBB);
5281 for (
unsigned Scp : ScopeModel->getRuntimeValues()) {
5283 SI->addCase(
Builder.getInt32(Scp), B);
5294 Builder.SetInsertPoint(ContBB);
5298 case Builtin::BI__builtin_signbit:
5299 case Builtin::BI__builtin_signbitf:
5300 case Builtin::BI__builtin_signbitl: {
5305 case Builtin::BI__warn_memset_zero_len:
5307 case Builtin::BI__annotation: {
5310 for (
const Expr *Arg :
E->arguments()) {
5311 const auto *Str = cast<StringLiteral>(Arg->IgnoreParenCasts());
5312 assert(Str->getCharByteWidth() == 2);
5313 StringRef WideBytes = Str->getBytes();
5314 std::string StrUtf8;
5315 if (!convertUTF16ToUTF8String(
5316 ArrayRef(WideBytes.data(), WideBytes.size()), StrUtf8)) {
5320 Strings.push_back(llvm::MDString::get(
getLLVMContext(), StrUtf8));
5324 llvm::Function *F =
CGM.
getIntrinsic(Intrinsic::codeview_annotation, {});
5329 case Builtin::BI__builtin_annotation: {
5337 StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
5341 case Builtin::BI__builtin_addcb:
5342 case Builtin::BI__builtin_addcs:
5343 case Builtin::BI__builtin_addc:
5344 case Builtin::BI__builtin_addcl:
5345 case Builtin::BI__builtin_addcll:
5346 case Builtin::BI__builtin_subcb:
5347 case Builtin::BI__builtin_subcs:
5348 case Builtin::BI__builtin_subc:
5349 case Builtin::BI__builtin_subcl:
5350 case Builtin::BI__builtin_subcll: {
5376 Intrinsic::ID IntrinsicId;
5377 switch (BuiltinID) {
5378 default: llvm_unreachable(
"Unknown multiprecision builtin id.");
5379 case Builtin::BI__builtin_addcb:
5380 case Builtin::BI__builtin_addcs:
5381 case Builtin::BI__builtin_addc:
5382 case Builtin::BI__builtin_addcl:
5383 case Builtin::BI__builtin_addcll:
5384 IntrinsicId = Intrinsic::uadd_with_overflow;
5386 case Builtin::BI__builtin_subcb:
5387 case Builtin::BI__builtin_subcs:
5388 case Builtin::BI__builtin_subc:
5389 case Builtin::BI__builtin_subcl:
5390 case Builtin::BI__builtin_subcll:
5391 IntrinsicId = Intrinsic::usub_with_overflow;
5396 llvm::Value *Carry1;
5399 llvm::Value *Carry2;
5401 Sum1, Carryin, Carry2);
5402 llvm::Value *CarryOut =
Builder.CreateZExt(
Builder.CreateOr(Carry1, Carry2),
5408 case Builtin::BI__builtin_add_overflow:
5409 case Builtin::BI__builtin_sub_overflow:
5410 case Builtin::BI__builtin_mul_overflow: {
5418 WidthAndSignedness LeftInfo =
5420 WidthAndSignedness RightInfo =
5422 WidthAndSignedness ResultInfo =
5429 RightInfo, ResultArg, ResultQTy,
5435 *
this, LeftArg, LeftInfo, RightArg, RightInfo, ResultArg, ResultQTy,
5438 WidthAndSignedness EncompassingInfo =
5441 llvm::Type *EncompassingLLVMTy =
5446 Intrinsic::ID IntrinsicId;
5447 switch (BuiltinID) {
5449 llvm_unreachable(
"Unknown overflow builtin id.");
5450 case Builtin::BI__builtin_add_overflow:
5451 IntrinsicId = EncompassingInfo.Signed ? Intrinsic::sadd_with_overflow
5452 : Intrinsic::uadd_with_overflow;
5454 case Builtin::BI__builtin_sub_overflow:
5455 IntrinsicId = EncompassingInfo.Signed ? Intrinsic::ssub_with_overflow
5456 : Intrinsic::usub_with_overflow;
5458 case Builtin::BI__builtin_mul_overflow:
5459 IntrinsicId = EncompassingInfo.Signed ? Intrinsic::smul_with_overflow
5460 : Intrinsic::umul_with_overflow;
5469 Left =
Builder.CreateIntCast(Left, EncompassingLLVMTy, LeftInfo.Signed);
5470 Right =
Builder.CreateIntCast(Right, EncompassingLLVMTy, RightInfo.Signed);
5473 llvm::Value *Overflow, *
Result;
5476 if (EncompassingInfo.Width > ResultInfo.Width) {
5479 llvm::Value *ResultTrunc =
Builder.CreateTrunc(
Result, ResultLLVMTy);
5483 llvm::Value *ResultTruncExt =
Builder.CreateIntCast(
5484 ResultTrunc, EncompassingLLVMTy, ResultInfo.Signed);
5485 llvm::Value *TruncationOverflow =
5488 Overflow =
Builder.CreateOr(Overflow, TruncationOverflow);
5500 case Builtin::BI__builtin_uadd_overflow:
5501 case Builtin::BI__builtin_uaddl_overflow:
5502 case Builtin::BI__builtin_uaddll_overflow:
5503 case Builtin::BI__builtin_usub_overflow:
5504 case Builtin::BI__builtin_usubl_overflow:
5505 case Builtin::BI__builtin_usubll_overflow:
5506 case Builtin::BI__builtin_umul_overflow:
5507 case Builtin::BI__builtin_umull_overflow:
5508 case Builtin::BI__builtin_umulll_overflow:
5509 case Builtin::BI__builtin_sadd_overflow:
5510 case Builtin::BI__builtin_saddl_overflow:
5511 case Builtin::BI__builtin_saddll_overflow:
5512 case Builtin::BI__builtin_ssub_overflow:
5513 case Builtin::BI__builtin_ssubl_overflow:
5514 case Builtin::BI__builtin_ssubll_overflow:
5515 case Builtin::BI__builtin_smul_overflow:
5516 case Builtin::BI__builtin_smull_overflow:
5517 case Builtin::BI__builtin_smulll_overflow: {
5527 Intrinsic::ID IntrinsicId;
5528 switch (BuiltinID) {
5529 default: llvm_unreachable(
"Unknown overflow builtin id.");
5530 case Builtin::BI__builtin_uadd_overflow:
5531 case Builtin::BI__builtin_uaddl_overflow:
5532 case Builtin::BI__builtin_uaddll_overflow:
5533 IntrinsicId = Intrinsic::uadd_with_overflow;
5535 case Builtin::BI__builtin_usub_overflow:
5536 case Builtin::BI__builtin_usubl_overflow:
5537 case Builtin::BI__builtin_usubll_overflow:
5538 IntrinsicId = Intrinsic::usub_with_overflow;
5540 case Builtin::BI__builtin_umul_overflow:
5541 case Builtin::BI__builtin_umull_overflow:
5542 case Builtin::BI__builtin_umulll_overflow:
5543 IntrinsicId = Intrinsic::umul_with_overflow;
5545 case Builtin::BI__builtin_sadd_overflow:
5546 case Builtin::BI__builtin_saddl_overflow:
5547 case Builtin::BI__builtin_saddll_overflow:
5548 IntrinsicId = Intrinsic::sadd_with_overflow;
5550 case Builtin::BI__builtin_ssub_overflow:
5551 case Builtin::BI__builtin_ssubl_overflow:
5552 case Builtin::BI__builtin_ssubll_overflow:
5553 IntrinsicId = Intrinsic::ssub_with_overflow;
5555 case Builtin::BI__builtin_smul_overflow:
5556 case Builtin::BI__builtin_smull_overflow:
5557 case Builtin::BI__builtin_smulll_overflow:
5558 IntrinsicId = Intrinsic::smul_with_overflow;
5569 case Builtin::BIaddressof:
5570 case Builtin::BI__addressof:
5571 case Builtin::BI__builtin_addressof:
5573 case Builtin::BI__builtin_function_start:
5576 case Builtin::BI__builtin_operator_new:
5579 case Builtin::BI__builtin_operator_delete:
5584 case Builtin::BI__builtin_is_aligned:
5586 case Builtin::BI__builtin_align_up:
5588 case Builtin::BI__builtin_align_down:
5591 case Builtin::BI__noop:
5594 case Builtin::BI__builtin_call_with_static_chain: {
5596 const Expr *Chain =
E->getArg(1);
5601 case Builtin::BI_InterlockedExchange8:
5602 case Builtin::BI_InterlockedExchange16:
5603 case Builtin::BI_InterlockedExchange:
5604 case Builtin::BI_InterlockedExchangePointer:
5607 case Builtin::BI_InterlockedCompareExchangePointer:
5610 case Builtin::BI_InterlockedCompareExchangePointer_nf:
5613 case Builtin::BI_InterlockedCompareExchange8:
5614 case Builtin::BI_InterlockedCompareExchange16:
5615 case Builtin::BI_InterlockedCompareExchange:
5616 case Builtin::BI_InterlockedCompareExchange64:
5618 case Builtin::BI_InterlockedIncrement16:
5619 case Builtin::BI_InterlockedIncrement:
5622 case Builtin::BI_InterlockedDecrement16:
5623 case Builtin::BI_InterlockedDecrement:
5626 case Builtin::BI_InterlockedAnd8:
5627 case Builtin::BI_InterlockedAnd16:
5628 case Builtin::BI_InterlockedAnd:
5630 case Builtin::BI_InterlockedExchangeAdd8:
5631 case Builtin::BI_InterlockedExchangeAdd16:
5632 case Builtin::BI_InterlockedExchangeAdd:
5635 case Builtin::BI_InterlockedExchangeSub8:
5636 case Builtin::BI_InterlockedExchangeSub16:
5637 case Builtin::BI_InterlockedExchangeSub:
5640 case Builtin::BI_InterlockedOr8:
5641 case Builtin::BI_InterlockedOr16:
5642 case Builtin::BI_InterlockedOr:
5644 case Builtin::BI_InterlockedXor8:
5645 case Builtin::BI_InterlockedXor16:
5646 case Builtin::BI_InterlockedXor:
5649 case Builtin::BI_bittest64:
5650 case Builtin::BI_bittest:
5651 case Builtin::BI_bittestandcomplement64:
5652 case Builtin::BI_bittestandcomplement:
5653 case Builtin::BI_bittestandreset64:
5654 case Builtin::BI_bittestandreset:
5655 case Builtin::BI_bittestandset64:
5656 case Builtin::BI_bittestandset:
5657 case Builtin::BI_interlockedbittestandreset:
5658 case Builtin::BI_interlockedbittestandreset64:
5659 case Builtin::BI_interlockedbittestandreset64_acq:
5660 case Builtin::BI_interlockedbittestandreset64_rel:
5661 case Builtin::BI_interlockedbittestandreset64_nf:
5662 case Builtin::BI_interlockedbittestandset64:
5663 case Builtin::BI_interlockedbittestandset64_acq:
5664 case Builtin::BI_interlockedbittestandset64_rel:
5665 case Builtin::BI_interlockedbittestandset64_nf:
5666 case Builtin::BI_interlockedbittestandset:
5667 case Builtin::BI_interlockedbittestandset_acq:
5668 case Builtin::BI_interlockedbittestandset_rel:
5669 case Builtin::BI_interlockedbittestandset_nf:
5670 case Builtin::BI_interlockedbittestandreset_acq:
5671 case Builtin::BI_interlockedbittestandreset_rel:
5672 case Builtin::BI_interlockedbittestandreset_nf:
5677 case Builtin::BI__iso_volatile_load8:
5678 case Builtin::BI__iso_volatile_load16:
5679 case Builtin::BI__iso_volatile_load32:
5680 case Builtin::BI__iso_volatile_load64:
5682 case Builtin::BI__iso_volatile_store8:
5683 case Builtin::BI__iso_volatile_store16:
5684 case Builtin::BI__iso_volatile_store32:
5685 case Builtin::BI__iso_volatile_store64:
5688 case Builtin::BI__builtin_ptrauth_sign_constant:
5691 case Builtin::BI__builtin_ptrauth_auth:
5692 case Builtin::BI__builtin_ptrauth_auth_and_resign:
5693 case Builtin::BI__builtin_ptrauth_blend_discriminator:
5694 case Builtin::BI__builtin_ptrauth_sign_generic_data:
5695 case Builtin::BI__builtin_ptrauth_sign_unauthenticated:
5696 case Builtin::BI__builtin_ptrauth_strip: {
5699 for (
auto argExpr :
E->arguments())
5703 llvm::Type *OrigValueType = Args[0]->getType();
5704 if (OrigValueType->isPointerTy())
5707 switch (BuiltinID) {
5708 case Builtin::BI__builtin_ptrauth_auth_and_resign:
5709 if (Args[4]->getType()->isPointerTy())
5713 case Builtin::BI__builtin_ptrauth_auth:
5714 case Builtin::BI__builtin_ptrauth_sign_unauthenticated:
5715 if (Args[2]->getType()->isPointerTy())
5719 case Builtin::BI__builtin_ptrauth_sign_generic_data:
5720 if (Args[1]->getType()->isPointerTy())
5724 case Builtin::BI__builtin_ptrauth_blend_discriminator:
5725 case Builtin::BI__builtin_ptrauth_strip:
5730 auto IntrinsicID = [&]() ->
unsigned {
5731 switch (BuiltinID) {
5732 case Builtin::BI__builtin_ptrauth_auth:
5733 return Intrinsic::ptrauth_auth;
5734 case Builtin::BI__builtin_ptrauth_auth_and_resign:
5735 return Intrinsic::ptrauth_resign;
5736 case Builtin::BI__builtin_ptrauth_blend_discriminator:
5737 return Intrinsic::ptrauth_blend;
5738 case Builtin::BI__builtin_ptrauth_sign_generic_data:
5739 return Intrinsic::ptrauth_sign_generic;
5740 case Builtin::BI__builtin_ptrauth_sign_unauthenticated:
5741 return Intrinsic::ptrauth_sign;
5742 case Builtin::BI__builtin_ptrauth_strip:
5743 return Intrinsic::ptrauth_strip;
5745 llvm_unreachable(
"bad ptrauth intrinsic");
5750 if (BuiltinID != Builtin::BI__builtin_ptrauth_sign_generic_data &&
5751 BuiltinID != Builtin::BI__builtin_ptrauth_blend_discriminator &&
5752 OrigValueType->isPointerTy()) {
5758 case Builtin::BI__builtin_get_vtable_pointer: {
5764 assert(ThisAddress.isValid());
5765 llvm::Value *VTablePointer =
5770 case Builtin::BI__exception_code:
5771 case Builtin::BI_exception_code:
5773 case Builtin::BI__exception_info:
5774 case Builtin::BI_exception_info:
5776 case Builtin::BI__abnormal_termination:
5777 case Builtin::BI_abnormal_termination:
5779 case Builtin::BI_setjmpex:
5780 if (
getTarget().getTriple().isOSMSVCRT() &&
E->getNumArgs() == 1 &&
5784 case Builtin::BI_setjmp:
5785 if (
getTarget().getTriple().isOSMSVCRT() &&
E->getNumArgs() == 1 &&
5787 if (
getTarget().getTriple().getArch() == llvm::Triple::x86)
5789 else if (
getTarget().getTriple().getArch() == llvm::Triple::aarch64)
5796 case Builtin::BImove:
5797 case Builtin::BImove_if_noexcept:
5798 case Builtin::BIforward:
5799 case Builtin::BIforward_like:
5800 case Builtin::BIas_const:
5802 case Builtin::BI__GetExceptionInfo: {
5803 if (llvm::GlobalVariable *GV =
5809 case Builtin::BI__fastfail:
5812 case Builtin::BI__builtin_coro_id:
5814 case Builtin::BI__builtin_coro_promise:
5816 case Builtin::BI__builtin_coro_resume:
5819 case Builtin::BI__builtin_coro_frame:
5821 case Builtin::BI__builtin_coro_noop:
5823 case Builtin::BI__builtin_coro_free:
5825 case Builtin::BI__builtin_coro_destroy:
5828 case Builtin::BI__builtin_coro_done:
5830 case Builtin::BI__builtin_coro_alloc:
5832 case Builtin::BI__builtin_coro_begin:
5834 case Builtin::BI__builtin_coro_end:
5836 case Builtin::BI__builtin_coro_suspend:
5838 case Builtin::BI__builtin_coro_size:
5840 case Builtin::BI__builtin_coro_align:
5844 case Builtin::BIread_pipe:
5845 case Builtin::BIwrite_pipe: {
5853 unsigned GenericAS =
5855 llvm::Type *I8PTy = llvm::PointerType::get(
getLLVMContext(), GenericAS);
5858 if (2U ==
E->getNumArgs()) {
5859 const char *Name = (BuiltinID == Builtin::BIread_pipe) ?
"__read_pipe_2"
5864 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
5868 {Arg0, ACast, PacketSize, PacketAlign}));
5870 assert(4 ==
E->getNumArgs() &&
5871 "Illegal number of parameters to pipe function");
5872 const char *Name = (BuiltinID == Builtin::BIread_pipe) ?
"__read_pipe_4"
5875 llvm::Type *ArgTys[] = {Arg0->
getType(), Arg1->getType(),
Int32Ty, I8PTy,
5879 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
5887 {Arg0, Arg1, Arg2, ACast, PacketSize, PacketAlign}));
5892 case Builtin::BIreserve_read_pipe:
5893 case Builtin::BIreserve_write_pipe:
5894 case Builtin::BIwork_group_reserve_read_pipe:
5895 case Builtin::BIwork_group_reserve_write_pipe:
5896 case Builtin::BIsub_group_reserve_read_pipe:
5897 case Builtin::BIsub_group_reserve_write_pipe: {
5900 if (BuiltinID == Builtin::BIreserve_read_pipe)
5901 Name =
"__reserve_read_pipe";
5902 else if (BuiltinID == Builtin::BIreserve_write_pipe)
5903 Name =
"__reserve_write_pipe";
5904 else if (BuiltinID == Builtin::BIwork_group_reserve_read_pipe)
5905 Name =
"__work_group_reserve_read_pipe";
5906 else if (BuiltinID == Builtin::BIwork_group_reserve_write_pipe)
5907 Name =
"__work_group_reserve_write_pipe";
5908 else if (BuiltinID == Builtin::BIsub_group_reserve_read_pipe)
5909 Name =
"__sub_group_reserve_read_pipe";
5911 Name =
"__sub_group_reserve_write_pipe";
5922 llvm::FunctionType *FTy =
5923 llvm::FunctionType::get(ReservedIDTy, ArgTys,
false);
5926 if (Arg1->getType() !=
Int32Ty)
5929 {Arg0, Arg1, PacketSize, PacketAlign}));
5933 case Builtin::BIcommit_read_pipe:
5934 case Builtin::BIcommit_write_pipe:
5935 case Builtin::BIwork_group_commit_read_pipe:
5936 case Builtin::BIwork_group_commit_write_pipe:
5937 case Builtin::BIsub_group_commit_read_pipe:
5938 case Builtin::BIsub_group_commit_write_pipe: {
5940 if (BuiltinID == Builtin::BIcommit_read_pipe)
5941 Name =
"__commit_read_pipe";
5942 else if (BuiltinID == Builtin::BIcommit_write_pipe)
5943 Name =
"__commit_write_pipe";
5944 else if (BuiltinID == Builtin::BIwork_group_commit_read_pipe)
5945 Name =
"__work_group_commit_read_pipe";
5946 else if (BuiltinID == Builtin::BIwork_group_commit_write_pipe)
5947 Name =
"__work_group_commit_write_pipe";
5948 else if (BuiltinID == Builtin::BIsub_group_commit_read_pipe)
5949 Name =
"__sub_group_commit_read_pipe";
5951 Name =
"__sub_group_commit_write_pipe";
5961 llvm::FunctionType *FTy = llvm::FunctionType::get(
5965 {Arg0, Arg1, PacketSize, PacketAlign}));
5968 case Builtin::BIget_pipe_num_packets:
5969 case Builtin::BIget_pipe_max_packets: {
5970 const char *BaseName;
5972 if (BuiltinID == Builtin::BIget_pipe_num_packets)
5973 BaseName =
"__get_pipe_num_packets";
5975 BaseName =
"__get_pipe_max_packets";
5976 std::string Name = std::string(BaseName) +
5977 std::string(PipeTy->isReadOnly() ?
"_ro" :
"_wo");
5985 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
5988 {Arg0, PacketSize, PacketAlign}));
5992 case Builtin::BIto_global:
5993 case Builtin::BIto_local:
5994 case Builtin::BIto_private: {
5996 auto NewArgT = llvm::PointerType::get(
5999 auto NewRetT = llvm::PointerType::get(
6003 auto FTy = llvm::FunctionType::get(NewRetT, {NewArgT},
false);
6004 llvm::Value *NewArg;
6005 if (Arg0->getType()->getPointerAddressSpace() !=
6006 NewArgT->getPointerAddressSpace())
6009 NewArg =
Builder.CreateBitOrPointerCast(Arg0, NewArgT);
6010 auto NewName = std::string(
"__") +
E->getDirectCallee()->getName().str();
6025 case Builtin::BIenqueue_kernel: {
6027 unsigned NumArgs =
E->getNumArgs();
6030 llvm::Type *GenericVoidPtrTy =
Builder.getPtrTy(
6049 llvm::Type *RangePtrTy =
Range->getType();
6054 Name =
"__enqueue_kernel_basic";
6055 llvm::Type *ArgTys[] = {QueueTy,
Int32Ty, RangePtrTy, GenericVoidPtrTy,
6057 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
6062 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6063 llvm::Value *
Block =
6064 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
6067 {Queue, Flags, Range, Kernel, Block});
6070 assert(NumArgs >= 5 &&
"Invalid enqueue_kernel signature");
6074 auto CreateArrayForSizeVar =
6075 [=](
unsigned First) -> std::pair<llvm::Value *, llvm::Value *> {
6076 llvm::APInt ArraySize(32, NumArgs -
First);
6078 getContext().getSizeType(), ArraySize,
nullptr,
6082 llvm::Value *TmpPtr = Tmp.getPointer();
6087 llvm::Value *Alloca = TmpPtr->stripPointerCasts();
6088 llvm::Value *ElemPtr;
6092 auto *
Zero = llvm::ConstantInt::get(
IntTy, 0);
6093 for (
unsigned I =
First; I < NumArgs; ++I) {
6094 auto *Index = llvm::ConstantInt::get(
IntTy, I -
First);
6106 return {ElemPtr, Alloca};
6112 Name =
"__enqueue_kernel_varargs";
6116 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6117 auto *
Block =
Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
6118 auto [ElemPtr, TmpPtr] = CreateArrayForSizeVar(4);
6122 llvm::Value *
const Args[] = {Queue, Flags,
6126 llvm::Type *
const ArgTys[] = {
6127 QueueTy,
IntTy, RangePtrTy, GenericVoidPtrTy,
6128 GenericVoidPtrTy,
IntTy, ElemPtr->getType()};
6130 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
6138 llvm::PointerType *PtrTy = llvm::PointerType::get(
6142 llvm::Value *NumEvents =
6148 llvm::Value *EventWaitList =
nullptr;
6151 EventWaitList = llvm::ConstantPointerNull::get(PtrTy);
6158 EventWaitList =
Builder.CreatePointerCast(EventWaitList, PtrTy);
6160 llvm::Value *EventRet =
nullptr;
6163 EventRet = llvm::ConstantPointerNull::get(PtrTy);
6172 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6173 llvm::Value *
Block =
6174 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
6176 std::vector<llvm::Type *> ArgTys = {
6178 PtrTy, PtrTy, GenericVoidPtrTy, GenericVoidPtrTy};
6180 std::vector<llvm::Value *> Args = {Queue, Flags,
Range,
6181 NumEvents, EventWaitList, EventRet,
6186 Name =
"__enqueue_kernel_basic_events";
6187 llvm::FunctionType *FTy =
6188 llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
6194 Args.push_back(ConstantInt::get(
Int32Ty, NumArgs - 7));
6196 Name =
"__enqueue_kernel_events_varargs";
6198 auto [ElemPtr, TmpPtr] = CreateArrayForSizeVar(7);
6199 Args.push_back(ElemPtr);
6200 ArgTys.push_back(ElemPtr->getType());
6202 llvm::FunctionType *FTy = llvm::FunctionType::get(
Int32Ty, ArgTys,
false);
6208 llvm_unreachable(
"Unexpected enqueue_kernel signature");
6212 case Builtin::BIget_kernel_work_group_size: {
6213 llvm::Type *GenericVoidPtrTy =
Builder.getPtrTy(
6218 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6219 Value *Arg =
Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
6222 llvm::FunctionType::get(
IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
6224 "__get_kernel_work_group_size_impl"),
6227 case Builtin::BIget_kernel_preferred_work_group_size_multiple: {
6228 llvm::Type *GenericVoidPtrTy =
Builder.getPtrTy(
6233 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6234 Value *Arg =
Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
6237 llvm::FunctionType::get(
IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
6239 "__get_kernel_preferred_work_group_size_multiple_impl"),
6242 case Builtin::BIget_kernel_max_sub_group_size_for_ndrange:
6243 case Builtin::BIget_kernel_sub_group_count_for_ndrange: {
6244 llvm::Type *GenericVoidPtrTy =
Builder.getPtrTy(
6251 Builder.CreatePointerCast(Info.KernelHandle, GenericVoidPtrTy);
6254 BuiltinID == Builtin::BIget_kernel_max_sub_group_size_for_ndrange
6255 ?
"__get_kernel_max_sub_group_size_for_ndrange_impl"
6256 :
"__get_kernel_sub_group_count_for_ndrange_impl";
6259 llvm::FunctionType::get(
6260 IntTy, {NDRange->getType(), GenericVoidPtrTy, GenericVoidPtrTy},
6263 {NDRange, Kernel, Block}));
6265 case Builtin::BI__builtin_store_half:
6266 case Builtin::BI__builtin_store_halff: {
6273 case Builtin::BI__builtin_load_half: {
6278 case Builtin::BI__builtin_load_halff: {
6283 case Builtin::BI__builtin_printf:
6284 case Builtin::BIprintf:
6285 if (
getTarget().getTriple().isNVPTX() ||
6288 getTarget().getTriple().getVendor() == Triple::VendorType::AMD)) {
6291 if ((
getTarget().getTriple().isAMDGCN() ||
6298 case Builtin::BI__builtin_canonicalize:
6299 case Builtin::BI__builtin_canonicalizef:
6300 case Builtin::BI__builtin_canonicalizef16:
6301 case Builtin::BI__builtin_canonicalizel:
6303 emitBuiltinWithOneOverloadedType<1>(*
this,
E, Intrinsic::canonicalize));
6305 case Builtin::BI__builtin_thread_pointer: {
6306 if (!
getContext().getTargetInfo().isTLSSupported())
6310 {GlobalsInt8PtrTy}, {}));
6312 case Builtin::BI__builtin_os_log_format:
6315 case Builtin::BI__xray_customevent: {
6328 auto FTy = F->getFunctionType();
6329 auto Arg0 =
E->getArg(0);
6331 auto Arg0Ty = Arg0->getType();
6332 auto PTy0 = FTy->getParamType(0);
6333 if (PTy0 != Arg0Val->getType()) {
6334 if (Arg0Ty->isArrayType())
6337 Arg0Val =
Builder.CreatePointerCast(Arg0Val, PTy0);
6340 auto PTy1 = FTy->getParamType(1);
6341 if (PTy1 != Arg1->getType())
6342 Arg1 =
Builder.CreateTruncOrBitCast(Arg1, PTy1);
6346 case Builtin::BI__xray_typedevent: {
6362 auto FTy = F->getFunctionType();
6364 auto PTy0 = FTy->getParamType(0);
6365 if (PTy0 != Arg0->getType())
6366 Arg0 =
Builder.CreateTruncOrBitCast(Arg0, PTy0);
6367 auto Arg1 =
E->getArg(1);
6369 auto Arg1Ty = Arg1->getType();
6370 auto PTy1 = FTy->getParamType(1);
6371 if (PTy1 != Arg1Val->getType()) {
6372 if (Arg1Ty->isArrayType())
6375 Arg1Val =
Builder.CreatePointerCast(Arg1Val, PTy1);
6378 auto PTy2 = FTy->getParamType(2);
6379 if (PTy2 != Arg2->getType())
6380 Arg2 =
Builder.CreateTruncOrBitCast(Arg2, PTy2);
6384 case Builtin::BI__builtin_ms_va_start:
6385 case Builtin::BI__builtin_ms_va_end:
6388 BuiltinID == Builtin::BI__builtin_ms_va_start));
6390 case Builtin::BI__builtin_ms_va_copy: {
6407 case Builtin::BI__builtin_get_device_side_mangled_name: {
6420 BI.isLibFunction(BuiltinID))
6426 if (BI.isPredefinedLibFunction(BuiltinID))
6436 if (
unsigned VectorWidth =
getContext().BuiltinInfo.getRequiredVectorWidth(BuiltinID))
6437 LargestVectorWidth = std::max(LargestVectorWidth, VectorWidth);
6441 Intrinsic::ID IntrinsicID = Intrinsic::not_intrinsic;
6443 llvm::Triple::getArchTypePrefix(
getTarget().getTriple().getArch());
6444 if (!Prefix.empty()) {
6445 IntrinsicID = Intrinsic::getIntrinsicForClangBuiltin(Prefix.data(), Name);
6446 if (IntrinsicID == Intrinsic::not_intrinsic && Prefix ==
"spv" &&
6447 getTarget().getTriple().getOS() == llvm::Triple::OSType::AMDHSA)
6448 IntrinsicID = Intrinsic::getIntrinsicForClangBuiltin(
"amdgcn", Name);
6452 if (IntrinsicID == Intrinsic::not_intrinsic)
6453 IntrinsicID = Intrinsic::getIntrinsicForMSBuiltin(Prefix.data(), Name);
6456 if (IntrinsicID != Intrinsic::not_intrinsic) {
6461 unsigned ICEArguments = 0;
6467 llvm::FunctionType *FTy = F->getFunctionType();
6469 for (
unsigned i = 0, e =
E->getNumArgs(); i != e; ++i) {
6473 llvm::Type *PTy = FTy->getParamType(i);
6474 if (PTy != ArgValue->
getType()) {
6476 if (
auto *PtrTy = dyn_cast<llvm::PointerType>(PTy)) {
6477 if (PtrTy->getAddressSpace() !=
6478 ArgValue->
getType()->getPointerAddressSpace()) {
6481 PtrTy->getAddressSpace()));
6487 if (PTy->isX86_AMXTy())
6488 ArgValue =
Builder.CreateIntrinsic(Intrinsic::x86_cast_vector_to_tile,
6489 {ArgValue->
getType()}, {ArgValue});
6491 ArgValue =
Builder.CreateBitCast(ArgValue, PTy);
6494 Args.push_back(ArgValue);
6500 llvm::Type *RetTy =
VoidTy;
6504 if (RetTy !=
V->getType()) {
6506 if (
auto *PtrTy = dyn_cast<llvm::PointerType>(RetTy)) {
6507 if (PtrTy->getAddressSpace() !=
V->getType()->getPointerAddressSpace()) {
6510 PtrTy->getAddressSpace()));
6516 if (
V->getType()->isX86_AMXTy())
6517 V =
Builder.CreateIntrinsic(Intrinsic::x86_cast_tile_to_vector, {RetTy},
6523 if (RetTy->isVoidTy())
6543 if (
V->getType()->isVoidTy())
6550 llvm_unreachable(
"No current target builtin returns complex");
6552 llvm_unreachable(
"Bad evaluation kind in EmitBuiltinExpr");
6559 if (
V->getType()->isVoidTy())
6566 llvm_unreachable(
"No current hlsl builtin returns complex");
6568 llvm_unreachable(
"Bad evaluation kind in EmitBuiltinExpr");
6581struct BuiltinAlignArgs {
6582 llvm::Value *Src =
nullptr;
6583 llvm::Type *SrcType =
nullptr;
6584 llvm::Value *Alignment =
nullptr;
6585 llvm::Value *Mask =
nullptr;
6586 llvm::IntegerType *IntType =
nullptr;
6594 SrcType = Src->getType();
6595 if (SrcType->isPointerTy()) {
6596 IntType = IntegerType::get(
6600 assert(SrcType->isIntegerTy());
6601 IntType = cast<llvm::IntegerType>(SrcType);
6604 Alignment = CGF.
Builder.CreateZExtOrTrunc(Alignment, IntType,
"alignment");
6605 auto *One = llvm::ConstantInt::get(IntType, 1);
6606 Mask = CGF.
Builder.CreateSub(Alignment, One,
"mask");
6613 BuiltinAlignArgs Args(
E, *
this);
6614 llvm::Value *SrcAddress = Args.Src;
6615 if (Args.SrcType->isPointerTy())
6617 Builder.CreateBitOrPointerCast(Args.Src, Args.IntType,
"src_addr");
6619 Builder.CreateAnd(SrcAddress, Args.Mask,
"set_bits"),
6620 llvm::Constant::getNullValue(Args.IntType),
"is_aligned"));
6627 BuiltinAlignArgs Args(
E, *
this);
6628 llvm::Value *SrcForMask = Args.Src;
6634 if (Args.Src->getType()->isPointerTy()) {
6644 SrcForMask =
Builder.CreateAdd(SrcForMask, Args.Mask,
"over_boundary");
6648 llvm::Value *InvertedMask =
Builder.CreateNot(Args.Mask,
"inverted_mask");
6649 llvm::Value *
Result =
nullptr;
6650 if (Args.Src->getType()->isPointerTy()) {
6652 Intrinsic::ptrmask, {Args.SrcType, Args.IntType},
6653 {SrcForMask, InvertedMask},
nullptr,
"aligned_result");
6655 Result =
Builder.CreateAnd(SrcForMask, InvertedMask,
"aligned_result");
6657 assert(
Result->getType() == Args.SrcType);
static char bitActionToX86BTCode(BitTest::ActionKind A)
static Value * EmitAtomicCmpXchg128ForMSIntrin(CodeGenFunction &CGF, const CallExpr *E, AtomicOrdering SuccessOrdering)
static void emitSincosBuiltin(CodeGenFunction &CGF, const CallExpr *E, Intrinsic::ID IntrinsicID)
static CanQualType getOSLogArgType(ASTContext &C, int Size)
Get the argument type for arguments to os_log_helper.
static Value * EmitOverflowCheckedAbs(CodeGenFunction &CGF, const CallExpr *E, bool SanitizeOverflow)
static llvm::Value * EmitBitCountExpr(CodeGenFunction &CGF, const Expr *E)
static Value * tryUseTestFPKind(CodeGenFunction &CGF, unsigned BuiltinID, Value *V)
static bool areBOSTypesCompatible(int From, int To)
Checks if using the result of __builtin_object_size(p, From) in place of __builtin_object_size(p,...
static std::pair< llvm::Value *, llvm::Value * > GetCountFieldAndIndex(CodeGenFunction &CGF, const MemberExpr *ME, const FieldDecl *ArrayFD, const FieldDecl *CountFD, const Expr *Idx, llvm::IntegerType *ResType, bool IsSigned)
Value * EmitFromInt(CodeGenFunction &CGF, llvm::Value *V, QualType T, llvm::Type *ResultType)
static bool TypeRequiresBuiltinLaunderImp(const ASTContext &Ctx, QualType Ty, llvm::SmallPtrSetImpl< const Decl * > &Seen)
static Value * EmitAtomicIncrementValue(CodeGenFunction &CGF, const CallExpr *E, AtomicOrdering Ordering=AtomicOrdering::SequentiallyConsistent)
static RValue EmitMSVCRTSetJmp(CodeGenFunction &CGF, MSVCSetJmpKind SJKind, const CallExpr *E)
MSVC handles setjmp a bit differently on different platforms.
#define MUTATE_LDBL(func)
static Value * emitMaybeConstrainedFPToIntRoundBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID, unsigned ConstrainedIntrinsicID)
static bool TypeRequiresBuiltinLaunder(CodeGenModule &CGM, QualType Ty)
Determine if the specified type requires laundering by checking if it is a dynamic class type or cont...
static Value * EmitISOVolatileLoad(CodeGenFunction &CGF, const CallExpr *E)
static struct WidthAndSignedness EncompassingIntegerType(ArrayRef< struct WidthAndSignedness > Types)
static Value * EmitTargetArchBuiltinExpr(CodeGenFunction *CGF, unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue, llvm::Triple::ArchType Arch)
static Value * emitBinaryMaybeConstrainedFPBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID, unsigned ConstrainedIntrinsicID)
static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E, Instruction::BinaryOps Op, bool Invert=false)
Utility to insert an atomic instruction based Intrinsic::ID and the expression node,...
static bool HasNoIndirectArgumentsOrResults(CGFunctionInfo const &FnInfo)
Checks no arguments or results are passed indirectly in the ABI (i.e.
Value * EmitToInt(CodeGenFunction &CGF, llvm::Value *V, QualType T, llvm::IntegerType *IntType)
Emit the conversions required to turn the given value into an integer of the given size.
static Value * emitBinaryExpMaybeConstrainedFPBuiltin(CodeGenFunction &CGF, const CallExpr *E, Intrinsic::ID IntrinsicID, Intrinsic::ID ConstrainedIntrinsicID)
static llvm::Value * EmitBitTestIntrinsic(CodeGenFunction &CGF, unsigned BuiltinID, const CallExpr *E)
Emit a _bittest* intrinsic.
static Value * EmitSignBit(CodeGenFunction &CGF, Value *V)
Emit the computation of the sign bit for a floating point value.
static Value * EmitFAbs(CodeGenFunction &CGF, Value *V)
EmitFAbs - Emit a call to @llvm.fabs().
static llvm::Value * EmitPositiveResultOrZero(CodeGenFunction &CGF, llvm::Value *Res, llvm::Value *Index, llvm::IntegerType *ResType, bool IsSigned)
static bool shouldEmitBuiltinAsIR(unsigned BuiltinID, const Builtin::Context &BI, const CodeGenFunction &CGF)
Some builtins do not have library implementation on some targets and are instead emitted as LLVM IRs ...
static bool isSpecialUnsignedMultiplySignedResult(unsigned BuiltinID, WidthAndSignedness Op1Info, WidthAndSignedness Op2Info, WidthAndSignedness ResultInfo)
static llvm::Value * getDefaultBuiltinObjectSizeResult(unsigned Type, llvm::IntegerType *ResType)
static Value * emitTernaryMaybeConstrainedFPBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID, unsigned ConstrainedIntrinsicID)
static WidthAndSignedness getIntegerWidthAndSignedness(const clang::ASTContext &context, const clang::QualType Type)
static RValue EmitCheckedMixedSignMultiply(CodeGenFunction &CGF, const clang::Expr *Op1, WidthAndSignedness Op1Info, const clang::Expr *Op2, WidthAndSignedness Op2Info, const clang::Expr *ResultArg, QualType ResultQTy, WidthAndSignedness ResultInfo)
Emit a checked mixed-sign multiply.
static unsigned mutateLongDoubleBuiltin(unsigned BuiltinID)
Value * MakeAtomicCmpXchgValue(CodeGenFunction &CGF, const CallExpr *E, bool ReturnBool)
Utility to insert an atomic cmpxchg instruction.
static RValue EmitBinaryAtomic(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E)
static void initializeAlloca(CodeGenFunction &CGF, AllocaInst *AI, Value *Size, Align AlignmentInBytes)
static Value * EmitAtomicCmpXchgForMSIntrin(CodeGenFunction &CGF, const CallExpr *E, AtomicOrdering SuccessOrdering=AtomicOrdering::SequentiallyConsistent)
This function should be invoked to emit atomic cmpxchg for Microsoft's _InterlockedCompareExchange* i...
static bool isSpecialMixedSignMultiply(unsigned BuiltinID, WidthAndSignedness Op1Info, WidthAndSignedness Op2Info, WidthAndSignedness ResultInfo)
Determine if a binop is a checked mixed-sign multiply we can specialize.
static Value * emitFrexpBuiltin(CodeGenFunction &CGF, const CallExpr *E, Intrinsic::ID IntrinsicID)
static llvm::Value * emitModfBuiltin(CodeGenFunction &CGF, const CallExpr *E, Intrinsic::ID IntrinsicID)
static Value * EmitNontemporalStore(CodeGenFunction &CGF, const CallExpr *E)
static const FieldDecl * FindFlexibleArrayMemberField(CodeGenFunction &CGF, ASTContext &Ctx, const RecordDecl *RD)
Find a struct's flexible array member.
static Value * EmitISOVolatileStore(CodeGenFunction &CGF, const CallExpr *E)
static RValue EmitHipStdParUnsupportedBuiltin(CodeGenFunction *CGF, const FunctionDecl *FD)
static llvm::Value * EmitX86BitTestIntrinsic(CodeGenFunction &CGF, BitTest BT, const CallExpr *E, Value *BitBase, Value *BitPos)
static RValue EmitCheckedUnsignedMultiplySignedResult(CodeGenFunction &CGF, const clang::Expr *Op1, WidthAndSignedness Op1Info, const clang::Expr *Op2, WidthAndSignedness Op2Info, const clang::Expr *ResultArg, QualType ResultQTy, WidthAndSignedness ResultInfo)
Address CheckAtomicAlignment(CodeGenFunction &CGF, const CallExpr *E)
static Value * EmitNontemporalLoad(CodeGenFunction &CGF, const CallExpr *E)
static llvm::AtomicOrdering getBitTestAtomicOrdering(BitTest::InterlockingKind I)
static bool GetFieldOffset(ASTContext &Ctx, const RecordDecl *RD, const FieldDecl *FD, int64_t &Offset)
Calculate the offset of a struct field.
Value * MakeBinaryAtomicValue(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E, AtomicOrdering Ordering)
Utility to insert an atomic instruction based on Intrinsic::ID and the expression node.
llvm::Value * EmitOverflowIntrinsic(CodeGenFunction &CGF, const Intrinsic::ID IntrinsicID, llvm::Value *X, llvm::Value *Y, llvm::Value *&Carry)
Emit a call to llvm.
static Value * EmitAbs(CodeGenFunction &CGF, Value *ArgValue, bool HasNSW)
static Value * EmitAtomicDecrementValue(CodeGenFunction &CGF, const CallExpr *E, AtomicOrdering Ordering=AtomicOrdering::SequentiallyConsistent)
static RValue emitUnaryMaybeConstrainedFPBuiltin(CIRGenFunction &cgf, const CallExpr &e)
static RValue emitLibraryCall(CIRGenFunction &cgf, const FunctionDecl *fd, const CallExpr *e, mlir::Operation *calleeValue)
llvm::MachO::Target Target
llvm::MachO::Record Record
static QualType getPointeeType(const MemRegion *R)
__DEVICE__ float modf(float __x, float *__iptr)
__DEVICE__ double nan(const char *)
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
CharUnits getTypeAlignInChars(QualType T) const
Return the ABI-specified alignment of a (complete) type T, in characters.
unsigned getIntWidth(QualType T) const
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
Builtin::Context & BuiltinInfo
QualType getConstantArrayType(QualType EltTy, const llvm::APInt &ArySize, const Expr *SizeExpr, ArraySizeModifier ASM, unsigned IndexTypeQuals) const
Return the unique reference to the type for a constant array of the specified element type.
QualType getBaseElementType(const ArrayType *VAT) const
Return the innermost element type of an array type.
QualType getObjCIdType() const
Represents the Objective-CC id type.
bool hasSameUnqualifiedType(QualType T1, QualType T2) const
Determine whether the given types are equivalent after cvr-qualifiers have been removed.
const ArrayType * getAsArrayType(QualType T) const
Type Query functions.
uint64_t getTypeSize(QualType T) const
Return the size of the specified (complete) type T, in bits.
CharUnits getTypeSizeInChars(QualType T) const
Return the size of the specified (complete) type T, in characters.
QualType GetBuiltinType(unsigned ID, GetBuiltinTypeError &Error, unsigned *IntegerConstantArgs=nullptr) const
Return the type for the specified builtin.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
unsigned getTargetAddressSpace(LangAS AS) const
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
ArraySubscriptExpr - [C99 6.5.2.1] Array Subscripting.
Represents an array type, per C99 6.7.5.2 - Array Declarators.
QualType getElementType() const
static std::unique_ptr< AtomicScopeModel > create(AtomicScopeModelKind K)
Create an atomic scope model by AtomicScopeModelKind.
A builtin binary operation expression such as "x + y" or "x <= y".
static bool isCommaOp(Opcode Opc)
Holds information about both target-independent and target-specific builtins, allowing easy queries b...
bool isLibFunction(unsigned ID) const
Return true if this is a builtin for a libc/libm function, with a "__builtin_" prefix (e....
bool isConstWithoutErrnoAndExceptions(unsigned ID) const
Return true if this function has no side effects and doesn't read memory, except for possibly errno o...
std::string getName(unsigned ID) const
Return the identifier name for the specified builtin, e.g.
bool isConstWithoutExceptions(unsigned ID) const
bool isConst(unsigned ID) const
Return true if this function has no side effects and doesn't read memory.
Represents a C++ struct/union/class.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
CastExpr - Base class for type casts, including both implicit casts (ImplicitCastExpr) and explicit c...
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
llvm::Align getAsAlign() const
getAsAlign - Returns Quantity as a valid llvm::Align, Beware llvm::Align assumes power of two 8-bit b...
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
XRayInstrSet XRayInstrumentationBundle
Set of XRay instrumentation kinds to emit.
ABIArgInfo - Helper class to encapsulate information about how a specific C type should be passed to ...
Like RawAddress, an abstract representation of an aligned address, but the pointer contained in this ...
llvm::Value * emitRawPointer(CodeGenFunction &CGF) const
Return the pointer contained in this class after authenticating it and adding offset to it if necessa...
CharUnits getAlignment() const
llvm::Type * getElementType() const
Return the type of the values stored in this address.
Address withElementType(llvm::Type *ElemTy) const
Return address with different element type, but same pointer and alignment.
Address withAlignment(CharUnits NewAlignment) const
Return address with different alignment, but same pointer and element type.
llvm::PointerType * getType() const
Return the type of the pointer value.
A scoped helper to set the current debug location to the specified location or preferred location of ...
static ApplyDebugLocation CreateArtificial(CodeGenFunction &CGF)
Apply TemporaryLocation if it is valid.
static ApplyDebugLocation CreateEmpty(CodeGenFunction &CGF)
Set the IRBuilder to not attach debug locations.
llvm::StoreInst * CreateStore(llvm::Value *Val, Address Addr, bool IsVolatile=false)
llvm::StoreInst * CreateAlignedStore(llvm::Value *Val, llvm::Value *Addr, CharUnits Align, bool IsVolatile=false)
Address CreateGEP(CodeGenFunction &CGF, Address Addr, llvm::Value *Index, const llvm::Twine &Name="")
llvm::CallInst * CreateMemMove(Address Dest, Address Src, llvm::Value *Size, bool IsVolatile=false)
llvm::CallInst * CreateMemCpyInline(Address Dest, Address Src, uint64_t Size)
llvm::AtomicRMWInst * CreateAtomicRMW(llvm::AtomicRMWInst::BinOp Op, Address Addr, llvm::Value *Val, llvm::AtomicOrdering Ordering, llvm::SyncScope::ID SSID=llvm::SyncScope::System)
llvm::CallInst * CreateMemSetInline(Address Dest, llvm::Value *Value, uint64_t Size)
llvm::CallInst * CreateMemSet(Address Dest, llvm::Value *Value, llvm::Value *Size, bool IsVolatile=false)
llvm::AtomicCmpXchgInst * CreateAtomicCmpXchg(Address Addr, llvm::Value *Cmp, llvm::Value *New, llvm::AtomicOrdering SuccessOrdering, llvm::AtomicOrdering FailureOrdering, llvm::SyncScope::ID SSID=llvm::SyncScope::System)
llvm::LoadInst * CreateLoad(Address Addr, const llvm::Twine &Name="")
Address CreateConstByteGEP(Address Addr, CharUnits Offset, const llvm::Twine &Name="")
Address CreateLaunderInvariantGroup(Address Addr)
llvm::CallInst * CreateMemCpy(Address Dest, Address Src, llvm::Value *Size, bool IsVolatile=false)
llvm::LoadInst * CreateAlignedLoad(llvm::Type *Ty, llvm::Value *Addr, CharUnits Align, const llvm::Twine &Name="")
Address CreateAddrSpaceCast(Address Addr, llvm::Type *Ty, llvm::Type *ElementTy, const llvm::Twine &Name="")
Address CreateConstInBoundsGEP(Address Addr, uint64_t Index, const llvm::Twine &Name="")
Given addr = T* ... produce name = getelementptr inbounds addr, i64 index where i64 is actually the t...
Address CreateInBoundsGEP(Address Addr, ArrayRef< llvm::Value * > IdxList, llvm::Type *ElementType, CharUnits Align, const Twine &Name="")
virtual std::string getDeviceSideName(const NamedDecl *ND)=0
Returns function or variable name on device side even if the current compilation is for host.
virtual llvm::GlobalVariable * getThrowInfo(QualType T)
All available information about a concrete callee.
static CGCallee forDirect(llvm::Constant *functionPtr, const CGCalleeInfo &abstractInfo=CGCalleeInfo())
llvm::DILocation * CreateTrapFailureMessageFor(llvm::DebugLoc TrapLocation, StringRef Category, StringRef FailureMsg)
Create a debug location from TrapLocation that adds an artificial inline frame where the frame name i...
CGFunctionInfo - Class to encapsulate the information about a function definition.
ABIArgInfo & getReturnInfo()
MutableArrayRef< ArgInfo > arguments()
virtual void EmitGCMemmoveCollectable(CodeGen::CodeGenFunction &CGF, Address DestPtr, Address SrcPtr, llvm::Value *Size)=0
EnqueuedBlockInfo emitOpenCLEnqueuedBlock(CodeGenFunction &CGF, const Expr *E)
virtual llvm::Value * getPipeElemAlign(const Expr *PipeArg)
virtual llvm::Value * getPipeElemSize(const Expr *PipeArg)
llvm::StructType * getLLVMType() const
Return the "complete object" LLVM type associated with this record.
CallArgList - Type for representing both the value and type of arguments in a call.
void add(RValue rvalue, QualType type)
CodeGenFunction - This class organizes the per-function state that is used while generating LLVM code...
RValue EmitAMDGPUDevicePrintfCallExpr(const CallExpr *E)
llvm::Value * GetVTablePtr(Address This, llvm::Type *VTableTy, const CXXRecordDecl *VTableClass, VTableAuthMode AuthMode=VTableAuthMode::Authenticate)
GetVTablePtr - Return the Value of the vtable pointer member pointed to by This.
RValue EmitNVPTXDevicePrintfCallExpr(const CallExpr *E)
RValue EmitCoroutineIntrinsic(const CallExpr *E, unsigned int IID)
llvm::Value * EmitScalarOrConstFoldImmArg(unsigned ICEArguments, unsigned Idx, const CallExpr *E)
SanitizerSet SanOpts
Sanitizers enabled for this function.
void checkTargetFeatures(const CallExpr *E, const FunctionDecl *TargetDecl)
llvm::Value * GetCountedByFieldExprGEP(const Expr *Base, const FieldDecl *FD, const FieldDecl *CountDecl)
llvm::Type * ConvertType(QualType T)
void addInstToNewSourceAtom(llvm::Instruction *KeyInstruction, llvm::Value *Backup)
Add KeyInstruction and an optional Backup instruction to a new atom group (See ApplyAtomGroup for mor...
BuiltinCheckKind
Specifies which type of sanitizer check to apply when handling a particular builtin.
llvm::Value * EmitSystemZBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
llvm::CallBase * EmitRuntimeCallOrInvoke(llvm::FunctionCallee callee, ArrayRef< llvm::Value * > args, const Twine &name="")
Emits a call or invoke instruction to the given runtime function.
llvm::Value * EmitSEHAbnormalTermination()
llvm::Value * EmitARCRetain(QualType type, llvm::Value *value)
Produce the code to do a retain.
CleanupKind getARCCleanupKind()
Retrieves the default cleanup kind for an ARC cleanup.
llvm::Value * EmitVAStartEnd(llvm::Value *ArgValue, bool IsStart)
Emits a call to an LLVM variable-argument intrinsic, either llvm.va_start or llvm....
llvm::Value * EmitAMDGPUBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
llvm::Constant * EmitCheckSourceLocation(SourceLocation Loc)
Emit a description of a source location in a format suitable for passing to a runtime sanitizer handl...
void SetSqrtFPAccuracy(llvm::Value *Val)
Set the minimum required accuracy of the given sqrt operation based on CodeGenOpts.
RValue emitBuiltinOSLogFormat(const CallExpr &E)
Emit IR for __builtin_os_log_format.
llvm::BasicBlock * createBasicBlock(const Twine &name="", llvm::Function *parent=nullptr, llvm::BasicBlock *before=nullptr)
createBasicBlock - Create an LLVM basic block.
llvm::Function * generateBuiltinOSLogHelperFunction(const analyze_os_log::OSLogBufferLayout &Layout, CharUnits BufferAlignment)
const LangOptions & getLangOpts() const
LValue MakeNaturalAlignAddrLValue(llvm::Value *V, QualType T, KnownNonNull_t IsKnownNonNull=NotKnownNonNull)
Address makeNaturalAddressForPointer(llvm::Value *Ptr, QualType T, CharUnits Alignment=CharUnits::Zero(), bool ForPointeeType=false, LValueBaseInfo *BaseInfo=nullptr, TBAAAccessInfo *TBAAInfo=nullptr, KnownNonNull_t IsKnownNonNull=NotKnownNonNull)
Construct an address with the natural alignment of T.
TypeCheckKind
Situations in which we might emit a check for the suitability of a pointer or glvalue.
@ TCK_Store
Checking the destination of a store. Must be suitably sized and aligned.
@ TCK_Load
Checking the operand of a load. Must be suitably sized and aligned.
llvm::Value * EmitRISCVBuiltinExpr(unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue)
llvm::Value * EmitCheckedArgForBuiltin(const Expr *E, BuiltinCheckKind Kind)
Emits an argument for a call to a builtin.
llvm::Constant * EmitCheckTypeDescriptor(QualType T)
Emit a description of a type in a format suitable for passing to a runtime sanitizer handler.
void EmitNonNullArgCheck(RValue RV, QualType ArgType, SourceLocation ArgLoc, AbstractCallee AC, unsigned ParmNum)
Create a check for a function parameter that may potentially be declared as non-null.
const TargetInfo & getTarget() const
RValue emitRotate(const CallExpr *E, bool IsRotateRight)
llvm::Value * EmitAnnotationCall(llvm::Function *AnnotationFn, llvm::Value *AnnotatedVal, StringRef AnnotationStr, SourceLocation Location, const AnnotateAttr *Attr)
Emit an annotation call (intrinsic).
llvm::Value * EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue, llvm::Triple::ArchType Arch)
CGCallee EmitCallee(const Expr *E)
llvm::Value * EmitBPFBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
bool AlwaysEmitXRayCustomEvents() const
AlwaysEmitXRayCustomEvents - Return true if we must unconditionally emit XRay custom event handling c...
void StartFunction(GlobalDecl GD, QualType RetTy, llvm::Function *Fn, const CGFunctionInfo &FnInfo, const FunctionArgList &Args, SourceLocation Loc=SourceLocation(), SourceLocation StartLoc=SourceLocation())
Emit code for the start of a function.
LValue EmitAggExprToLValue(const Expr *E)
EmitAggExprToLValue - Emit the computation of the specified expression of aggregate type into a tempo...
llvm::Value * EvaluateExprAsBool(const Expr *E)
EvaluateExprAsBool - Perform the usual unary conversions on the specified expression and compare the ...
llvm::Value * EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
void EmitCheck(ArrayRef< std::pair< llvm::Value *, SanitizerKind::SanitizerOrdinal > > Checked, SanitizerHandler Check, ArrayRef< llvm::Constant * > StaticArgs, ArrayRef< llvm::Value * > DynamicArgs, const TrapReason *TR=nullptr)
Create a basic block that will either trap or call a handler function in the UBSan runtime with the p...
bool AlwaysEmitXRayTypedEvents() const
AlwaysEmitXRayTypedEvents - Return true if clang must unconditionally emit XRay typed event handling ...
CGDebugInfo * getDebugInfo()
llvm::Value * getTypeSize(QualType Ty)
Returns calculated size of the specified type.
bool EmitLifetimeStart(llvm::Value *Addr)
Emit a lifetime.begin marker if some criteria are satisfied.
llvm::Value * EmitSEHExceptionCode()
llvm::Value * EmitToMemory(llvm::Value *Value, QualType Ty)
EmitToMemory - Change a scalar value from its value representation to its in-memory representation.
ComplexPairTy EmitComplexExpr(const Expr *E, bool IgnoreReal=false, bool IgnoreImag=false)
EmitComplexExpr - Emit the computation of the specified expression of complex type,...
RValue EmitCall(const CGFunctionInfo &CallInfo, const CGCallee &Callee, ReturnValueSlot ReturnValue, const CallArgList &Args, llvm::CallBase **CallOrInvoke, bool IsMustTail, SourceLocation Loc, bool IsVirtualFunctionPointerThunk=false)
EmitCall - Generate a call of the given function, expecting the given result type,...
const TargetCodeGenInfo & getTargetHooks() const
RValue EmitBuiltinAlignTo(const CallExpr *E, bool AlignUp)
Emit IR for __builtin_align_up/__builtin_align_down.
void EmitLifetimeEnd(llvm::Value *Addr)
llvm::Value * EmitWebAssemblyBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
bool IsInPreservedAIRegion
True if CodeGen currently emits code inside presereved access index region.
ASTContext & getContext() const
llvm::Value * EmitDirectXBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
llvm::Value * EmitAArch64BuiltinExpr(unsigned BuiltinID, const CallExpr *E, llvm::Triple::ArchType Arch)
llvm::Value * EmitMSVCBuiltinExpr(MSVCIntrin BuiltinID, const CallExpr *E)
llvm::Value * EmitLoadOfScalar(Address Addr, bool Volatile, QualType Ty, SourceLocation Loc, AlignmentSource Source=AlignmentSource::Type, bool isNontemporal=false)
EmitLoadOfScalar - Load a scalar value from an address, taking care to appropriately convert from the...
const Decl * CurFuncDecl
CurFuncDecl - Holds the Decl for the current outermost non-closure context.
Address EmitArrayToPointerDecay(const Expr *Array, LValueBaseInfo *BaseInfo=nullptr, TBAAAccessInfo *TBAAInfo=nullptr)
void pushLifetimeExtendedDestroy(CleanupKind kind, Address addr, QualType type, Destroyer *destroyer, bool useEHCleanupForArray)
llvm::Value * EmitSPIRVBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
RValue EmitBuiltinExpr(const GlobalDecl GD, unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue)
Address EmitVAListRef(const Expr *E)
RValue GetUndefRValue(QualType Ty)
GetUndefRValue - Get an appropriate 'undef' rvalue for the given type.
RValue EmitBuiltinIsAligned(const CallExpr *E)
Emit IR for __builtin_is_aligned.
RValue EmitBuiltinNewDeleteCall(const FunctionProtoType *Type, const CallExpr *TheCallExpr, bool IsDelete)
llvm::CallInst * EmitRuntimeCall(llvm::FunctionCallee callee, const Twine &name="")
llvm::Value * EmitHexagonBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
CodeGenTypes & getTypes() const
llvm::Value * EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E)
static TypeEvaluationKind getEvaluationKind(QualType T)
getEvaluationKind - Return the TypeEvaluationKind of QualType T.
@ _InterlockedExchangeAdd_rel
@ _InterlockedIncrement_acq
@ _InterlockedExchange_nf
@ _InterlockedIncrement_nf
@ _InterlockedExchange_acq
@ _InterlockedCompareExchange128_rel
@ _InterlockedCompareExchange128_acq
@ _InterlockedCompareExchange_acq
@ _InterlockedExchangeAdd_nf
@ _InterlockedCompareExchange_nf
@ _InterlockedDecrement_rel
@ _InterlockedExchangeSub
@ _InterlockedExchangeAdd_acq
@ _InterlockedIncrement_rel
@ _InterlockedCompareExchange128_nf
@ _InterlockedCompareExchange128
@ _InterlockedExchange_rel
@ _InterlockedCompareExchange
@ _InterlockedDecrement_nf
@ _InterlockedExchangeAdd
@ _InterlockedDecrement_acq
@ _InterlockedCompareExchange_rel
void EmitTypeCheck(TypeCheckKind TCK, SourceLocation Loc, LValue LV, QualType Type, SanitizerSet SkippedChecks=SanitizerSet(), llvm::Value *ArraySize=nullptr)
Address EmitPointerWithAlignment(const Expr *Addr, LValueBaseInfo *BaseInfo=nullptr, TBAAAccessInfo *TBAAInfo=nullptr, KnownNonNull_t IsKnownNonNull=NotKnownNonNull)
EmitPointerWithAlignment - Given an expression with a pointer type, emit the value and compute our be...
RawAddress CreateMemTemp(QualType T, const Twine &Name="tmp", RawAddress *Alloca=nullptr)
CreateMemTemp - Create a temporary memory object of the given type, with appropriate alignmen and cas...
llvm::Value * EmitCheckedInBoundsGEP(llvm::Type *ElemTy, llvm::Value *Ptr, ArrayRef< llvm::Value * > IdxList, bool SignedIndices, bool IsSubtraction, SourceLocation Loc, const Twine &Name="")
Same as IRBuilder::CreateInBoundsGEP, but additionally emits a check to detect undefined behavior whe...
Address EmitMSVAListRef(const Expr *E)
Emit a "reference" to a __builtin_ms_va_list; this is always the value of the expression,...
llvm::Value * EmitScalarExpr(const Expr *E, bool IgnoreResultAssign=false)
EmitScalarExpr - Emit the computation of the specified expression of LLVM scalar type,...
llvm::CallInst * EmitTrapCall(llvm::Intrinsic::ID IntrID)
Emit a call to trap or debugtrap and attach function attribute "trap-func-name" if specified.
LValue MakeAddrLValue(Address Addr, QualType T, AlignmentSource Source=AlignmentSource::Type)
void EmitTrapCheck(llvm::Value *Checked, SanitizerHandler CheckHandlerID, bool NoMerge=false, const TrapReason *TR=nullptr)
Create a basic block that will call the trap intrinsic, and emit a conditional branch to it,...
void FinishFunction(SourceLocation EndLoc=SourceLocation())
FinishFunction - Complete IR generation of the current function.
llvm::Value * EmitFromMemory(llvm::Value *Value, QualType Ty)
EmitFromMemory - Change a scalar value from its memory representation to its value representation.
llvm::Value * EmitCheckedArgForAssume(const Expr *E)
Emits an argument for a call to a __builtin_assume.
llvm::Value * EmitLoadOfCountedByField(const Expr *Base, const FieldDecl *FD, const FieldDecl *CountDecl)
Build an expression accessing the "counted_by" field.
Address GetAddrOfLocalVar(const VarDecl *VD)
GetAddrOfLocalVar - Return the address of a local variable.
llvm::Value * EmitNVPTXBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
void EmitUnreachable(SourceLocation Loc)
Emit a reached-unreachable diagnostic if Loc is valid and runtime checking is enabled.
void ErrorUnsupported(const Stmt *S, const char *Type)
ErrorUnsupported - Print out an error that codegen doesn't support the specified stmt yet.
std::pair< llvm::Value *, llvm::Value * > ComplexPairTy
Address ReturnValue
ReturnValue - The temporary alloca to hold the return value.
LValue EmitLValue(const Expr *E, KnownNonNull_t IsKnownNonNull=NotKnownNonNull)
EmitLValue - Emit code to compute a designator that specifies the location of the expression.
bool ShouldXRayInstrumentFunction() const
ShouldXRayInstrument - Return true if the current function should be instrumented with XRay nop sleds...
llvm::LLVMContext & getLLVMContext()
static Destroyer destroyARCStrongPrecise
llvm::Value * EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue)
EmitTargetBuiltinExpr - Emit the given builtin call.
void emitAlignmentAssumption(llvm::Value *PtrValue, QualType Ty, SourceLocation Loc, SourceLocation AssumptionLoc, llvm::Value *Alignment, llvm::Value *OffsetValue=nullptr)
llvm::Value * EmitSEHExceptionInfo()
llvm::Value * EmitHLSLBuiltinExpr(unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue)
void EmitARCIntrinsicUse(ArrayRef< llvm::Value * > values)
Given a number of pointers, inform the optimizer that they're being intrinsically used up until this ...
void EmitStoreOfScalar(llvm::Value *Value, Address Addr, bool Volatile, QualType Ty, AlignmentSource Source=AlignmentSource::Type, bool isInit=false, bool isNontemporal=false)
EmitStoreOfScalar - Store a scalar value to an address, taking care to appropriately convert from the...
void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false)
EmitBlock - Emit the given block.
This class organizes the cross-function state that is used while generating LLVM code.
llvm::Module & getModule() const
llvm::FunctionCallee CreateRuntimeFunction(llvm::FunctionType *Ty, StringRef Name, llvm::AttributeList ExtraAttrs=llvm::AttributeList(), bool Local=false, bool AssumeConvergent=false)
Create or return a runtime function declaration with the specified type and name.
llvm::Constant * getRawFunctionPointer(GlobalDecl GD, llvm::Type *Ty=nullptr)
Return a function pointer for a reference to the given function.
llvm::Constant * getBuiltinLibFunction(const FunctionDecl *FD, unsigned BuiltinID)
Given a builtin id for a function like "__builtin_fabsf", return a Function* for "fabsf".
DiagnosticsEngine & getDiags() const
void ErrorUnsupported(const Stmt *S, const char *Type)
Print out an error that codegen doesn't support the specified stmt yet.
const LangOptions & getLangOpts() const
CGCUDARuntime & getCUDARuntime()
Return a reference to the configured CUDA runtime.
CharUnits getNaturalTypeAlignment(QualType T, LValueBaseInfo *BaseInfo=nullptr, TBAAAccessInfo *TBAAInfo=nullptr, bool forPointeeType=false)
CodeGenTypes & getTypes()
CGOpenCLRuntime & getOpenCLRuntime()
Return a reference to the configured OpenCL runtime.
const TargetInfo & getTarget() const
const llvm::DataLayout & getDataLayout() const
void Error(SourceLocation loc, StringRef error)
Emit a general error that something can't be done.
CGCXXABI & getCXXABI() const
llvm::Constant * GetFunctionStart(const ValueDecl *Decl)
const llvm::Triple & getTriple() const
void DecorateInstructionWithTBAA(llvm::Instruction *Inst, TBAAAccessInfo TBAAInfo)
DecorateInstructionWithTBAA - Decorate the instruction with a TBAA tag.
TBAAAccessInfo getTBAAAccessInfo(QualType AccessType)
getTBAAAccessInfo - Get TBAA information that describes an access to an object of the given type.
ASTContext & getContext() const
const TargetCodeGenInfo & getTargetCodeGenInfo()
const CodeGenOptions & getCodeGenOpts() const
StringRef getMangledName(GlobalDecl GD)
llvm::LLVMContext & getLLVMContext()
llvm::Function * getIntrinsic(unsigned IID, ArrayRef< llvm::Type * > Tys={})
CGObjCRuntime & getObjCRuntime()
Return a reference to the configured Objective-C runtime.
void SetLLVMFunctionAttributes(GlobalDecl GD, const CGFunctionInfo &Info, llvm::Function *F, bool IsThunk)
Set the LLVM function attributes (sext, zext, etc).
void SetLLVMFunctionAttributesForDefinition(const Decl *D, llvm::Function *F)
Set the LLVM function attributes which only apply to a function definition.
ConstantAddress GetAddrOfConstantCString(const std::string &Str, const char *GlobalName=nullptr)
Returns a pointer to a character array containing the literal and a terminating '\0' character.
llvm::Type * ConvertType(QualType T)
ConvertType - Convert type T into a llvm::Type.
llvm::FunctionType * GetFunctionType(const CGFunctionInfo &Info)
GetFunctionType - Get the LLVM function type for.
const CGFunctionInfo & arrangeBuiltinFunctionDeclaration(QualType resultType, const FunctionArgList &args)
A builtin function is a freestanding function using the default C conventions.
const CGRecordLayout & getCGRecordLayout(const RecordDecl *)
getCGRecordLayout - Return record layout info for the given record decl.
const CGFunctionInfo & arrangeBuiltinFunctionCall(QualType resultType, const CallArgList &args)
llvm::Constant * emitAbstract(const Expr *E, QualType T)
Emit the result of the given expression as an abstract constant, asserting that it succeeded.
Information for lazily generating a cleanup.
FunctionArgList - Type for representing both the decl and type of parameters to a function.
LValue - This represents an lvalue references.
llvm::Value * getPointer(CodeGenFunction &CGF) const
Address getAddress() const
void setNontemporal(bool Value)
RValue - This trivial value class is used to represent the result of an expression that is evaluated.
static RValue getIgnored()
static RValue get(llvm::Value *V)
static RValue getAggregate(Address addr, bool isVolatile=false)
Convert an Address to an RValue.
static RValue getComplex(llvm::Value *V1, llvm::Value *V2)
An abstract representation of an aligned address.
static RawAddress invalid()
ReturnValueSlot - Contains the address where the return value of a function can be stored,...
virtual bool supportsLibCall() const
supportsLibCall - Query to whether or not target supports all lib calls.
virtual llvm::Value * encodeReturnAddress(CodeGen::CodeGenFunction &CGF, llvm::Value *Address) const
Performs the code-generation required to convert the address of an instruction into a return address ...
virtual llvm::Value * decodeReturnAddress(CodeGen::CodeGenFunction &CGF, llvm::Value *Address) const
Performs the code-generation required to convert a return address as stored by the system into the ac...
virtual int getDwarfEHStackPointer(CodeGen::CodeGenModule &M) const
Determines the DWARF register number for the stack pointer, for exception-handling purposes.
virtual llvm::Value * testFPKind(llvm::Value *V, unsigned BuiltinID, CGBuilderTy &Builder, CodeGenModule &CGM) const
Performs a target specific test of a floating point value for things like IsNaN, Infinity,...
Complex values, per C99 6.2.5p11.
ConstStmtVisitor - This class implements a simple visitor for Stmt subclasses.
Represents a concrete matrix type with constant number of rows and columns.
Represents a sugar type with __counted_by or __sized_by annotations, including their _or_null variant...
DynamicCountPointerKind getKind() const
RecordDecl * getOuterLexicalRecordContext()
Retrieve the outermost lexically enclosing record context.
Decl - This represents one declaration (or definition), e.g.
static bool isFlexibleArrayMemberLike(const ASTContext &Context, const Decl *D, QualType Ty, LangOptions::StrictFlexArraysLevelKind StrictFlexArraysLevel, bool IgnoreTemplateOrMacroSubstitution)
Whether it resembles a flexible array member.
bool isImplicit() const
isImplicit - Indicates whether the declaration was implicitly generated by the implementation.
FunctionDecl * getAsFunction() LLVM_READONLY
Returns the function itself, or the templated function if this is a function template.
DeclContext * getDeclContext()
Concrete class used by the front-end to report problems and issues.
DiagnosticBuilder Report(SourceLocation Loc, unsigned DiagID)
Issue the message to the client.
This represents one expression.
bool EvaluateAsInt(EvalResult &Result, const ASTContext &Ctx, SideEffectsKind AllowSideEffects=SE_NoSideEffects, bool InConstantContext=false) const
EvaluateAsInt - Return true if this is a constant which we can fold and convert to an integer,...
Expr * IgnoreParenNoopCasts(const ASTContext &Ctx) LLVM_READONLY
Skip past any parentheses and casts which do not change the value (including ptr->int casts of the sa...
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
llvm::APSInt EvaluateKnownConstInt(const ASTContext &Ctx, SmallVectorImpl< PartialDiagnosticAt > *Diag=nullptr) const
EvaluateKnownConstInt - Call EvaluateAsRValue and return the folded integer.
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
bool EvaluateAsFloat(llvm::APFloat &Result, const ASTContext &Ctx, SideEffectsKind AllowSideEffects=SE_NoSideEffects, bool InConstantContext=false) const
EvaluateAsFloat - Return true if this is a constant which we can fold and convert to a floating point...
@ NPC_ValueDependentIsNotNull
Specifies that a value-dependent expression should be considered to never be a null pointer constant.
bool EvaluateAsRValue(EvalResult &Result, const ASTContext &Ctx, bool InConstantContext=false) const
EvaluateAsRValue - Return true if this is a constant which we can fold to an rvalue using any crazy t...
bool HasSideEffects(const ASTContext &Ctx, bool IncludePossibleEffects=true) const
HasSideEffects - This routine returns true for all those expressions which have any effect other than...
std::optional< std::string > tryEvaluateString(ASTContext &Ctx) const
If the current Expr can be evaluated to a pointer to a null-terminated constant string,...
Expr * IgnoreImpCasts() LLVM_READONLY
Skip past any implicit casts which might surround this expression until reaching a fixed point.
NullPointerConstantKind isNullPointerConstant(ASTContext &Ctx, NullPointerConstantValueDependence NPC) const
isNullPointerConstant - C99 6.3.2.3p3 - Test if this reduces down to a Null pointer constant.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
bool tryEvaluateObjectSize(uint64_t &Result, ASTContext &Ctx, unsigned Type) const
If the current Expr is a pointer, this will try to statically determine the number of bytes available...
const ValueDecl * getAsBuiltinConstantDeclRef(const ASTContext &Context) const
If this expression is an unambiguous reference to a single declaration, in the style of __builtin_fun...
Represents difference between two FPOptions values.
LangOptions::FPExceptionModeKind getExceptionMode() const
Represents a member of a struct/union/class.
const FieldDecl * findCountedByField() const
Find the FieldDecl specified in a FAM's "counted_by" attribute.
Represents a function declaration or definition.
const ParmVarDecl * getParamDecl(unsigned i) const
unsigned getBuiltinID(bool ConsiderWrapperFunctions=false) const
Returns a value indicating whether this function corresponds to a builtin function.
Represents a prototype with parameter type info, e.g.
GlobalDecl - represents a global declaration.
const Decl * getDecl() const
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
ImplicitCastExpr - Allows us to explicitly represent implicit type conversions, which have no direct ...
static ImplicitParamDecl * Create(ASTContext &C, DeclContext *DC, SourceLocation IdLoc, IdentifierInfo *Id, QualType T, ImplicitParamKind ParamKind)
Create implicit parameter.
StrictFlexArraysLevelKind
MemberExpr - [C99 6.5.2.3] Structure and Union Members.
ValueDecl * getMemberDecl() const
Retrieve the member declaration to which this expression refers.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
ParenExpr - This represents a parenthesized expression, e.g.
PointerType - C99 6.7.5.1 - Pointer Declarators.
A (possibly-)qualified type.
bool isVolatileQualified() const
Determine whether this type is volatile-qualified.
bool isNull() const
Return true if this QualType doesn't point to a type yet.
LangAS getAddressSpace() const
Return the address space of this type.
Represents a struct/union/class.
field_range fields() const
Scope - A scope is a transient data structure that is used while parsing the program.
Encodes a location in the source.
SourceLocation getBeginLoc() const LLVM_READONLY
Exposes information about the current target.
const llvm::Triple & getTriple() const
Returns the target triple of the primary target.
virtual bool checkArithmeticFenceSupported() const
Controls if __arithmetic_fence is supported in the targeted backend.
unsigned getSuitableAlign() const
Return the alignment that is the largest alignment ever used for any scalar/SIMD data type on the tar...
virtual std::string_view getClobbers() const =0
Returns a string of target-specific clobbers, in LLVM format.
The base class of the type hierarchy.
bool isBlockPointerType() const
bool isSignedIntegerType() const
Return true if this is an integer type that is signed, according to C99 6.2.5p4 [char,...
bool isComplexType() const
isComplexType() does not include complex integers (a GCC extension).
CXXRecordDecl * getAsCXXRecordDecl() const
Retrieves the CXXRecordDecl that this type refers to, either because the type is a RecordType or beca...
bool isCountAttributedType() const
bool isPointerType() const
bool isIntegerType() const
isIntegerType() does not include complex integers (a GCC extension).
const T * castAs() const
Member-template castAs<specific type>.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isIntegralOrEnumerationType() const
Determine whether this type is an integral or enumeration type.
bool isObjCObjectPointerType() const
bool isFloatingType() const
const T * getAs() const
Member-template getAs<specific type>'.
UnaryOperator - This represents the unary-expression's (except sizeof and alignof),...
Represents a GCC generic vector type.
SmallVector< OSLogBufferItem, 4 > Items
unsigned char getNumArgsByte() const
unsigned char getSummaryByte() const
Defines the clang::TargetInfo interface.
llvm::Constant * initializationPatternFor(CodeGenModule &, llvm::Type *)
TypeEvaluationKind
The kind of evaluation to perform on values of a particular type.
@ EHCleanup
Denotes a cleanup that should run when a scope is exited using exceptional control flow (a throw stat...
constexpr XRayInstrMask Typed
constexpr XRayInstrMask Custom
bool computeOSLogBufferLayout(clang::ASTContext &Ctx, const clang::CallExpr *E, OSLogBufferLayout &layout)
bool Mul(InterpState &S, CodePtr OpPC)
The JSON file list parser is used to communicate input to InstallAPI.
@ Success
Annotation was successful.
@ Asm
Assembly: we accept this only so that we can preprocess it.
@ Result
The result type of a method or function.
LangAS
Defines the address space values used by the address space qualifier of QualType.
const FunctionProtoType * T
SyncScope
Defines sync scope values used internally by clang.
llvm::StringRef getAsString(SyncScope S)
@ Other
Other implicit parameter.
Diagnostic wrappers for TextAPI types for error reporting.
llvm::PointerType * VoidPtrTy
llvm::IntegerType * Int64Ty
llvm::PointerType * ConstGlobalsPtrTy
void* in the address space for constant globals
llvm::IntegerType * Int8Ty
i8, i16, i32, and i64
llvm::IntegerType * SizeTy
llvm::IntegerType * Int32Ty
llvm::IntegerType * IntPtrTy
llvm::IntegerType * IntTy
int
llvm::PointerType * Int8PtrTy
llvm::PointerType * UnqualPtrTy
llvm::PointerType * AllocaInt8PtrTy
LangAS getASTAllocaAddressSpace() const
EvalResult is a struct with detailed info about an evaluated expression.
APValue Val
Val - This is the value the expression can be folded to.
void clear(SanitizerMask K=SanitizerKind::All)
Disable the sanitizers specified in K.
void set(SanitizerMask K, bool Value)
Enable or disable a certain (single) sanitizer.
bool has(SanitizerMask K) const
Check if a certain (single) sanitizer is enabled.
bool has(XRayInstrMask K) const
#define scalbln(__x, __y)
#define copysign(__x, __y)
#define remquo(__x, __y, __z)
#define nextafter(__x, __y)
#define nexttoward(__x, __y)
#define remainder(__x, __y)
#define fma(__x, __y, __z)