aboutsummaryrefslogtreecommitdiffstats
path: root/capstone/suite/synctools/tablegen/include/llvm
diff options
context:
space:
mode:
authorAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
committerAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
commitaf1a266670d040d2f4083ff309d732d648afba2a (patch)
tree2fc46203448ddcc6f81546d379abfaeb323575e9 /capstone/suite/synctools/tablegen/include/llvm
parente02cda008591317b1625707ff8e115a4841aa889 (diff)
Add submodule dependency filesHEADmaster
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'capstone/suite/synctools/tablegen/include/llvm')
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/CodeGen/SDNodeProperties.td34
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/CodeGen/ValueTypes.td169
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/Attributes.td239
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/DebugInfoFlags.def64
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/Instruction.def231
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/Intrinsics.td1010
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAArch64.td669
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAMDGPU.td1340
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsARM.td770
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsBPF.td24
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsHexagon.td10975
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsMips.td1771
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsNVVM.td4047
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsPowerPC.td1164
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsSystemZ.td431
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsWebAssembly.td67
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsX86.td5215
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsXCore.td121
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/Metadata.def126
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/RuntimeLibcalls.def527
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/IR/Value.def117
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/TableGen/SearchableTable.td136
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/GenericOpcodes.td672
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/RegisterBank.td16
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/SelectionDAGCompat.td131
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/Target.td61
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/Target.td1556
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/TargetCallingConv.td187
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/TargetInstrPredicate.td197
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/TargetItinerary.td152
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/TargetSchedule.td553
-rw-r--r--capstone/suite/synctools/tablegen/include/llvm/Target/TargetSelectionDAG.td1335
32 files changed, 34107 insertions, 0 deletions
diff --git a/capstone/suite/synctools/tablegen/include/llvm/CodeGen/SDNodeProperties.td b/capstone/suite/synctools/tablegen/include/llvm/CodeGen/SDNodeProperties.td
new file mode 100644
index 000000000..83bbab2fd
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/CodeGen/SDNodeProperties.td
@@ -0,0 +1,34 @@
+//===- SDNodeProperties.td - Common code for DAG isels ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+
+class SDNodeProperty;
+
+// Selection DAG Pattern Operations
+class SDPatternOperator {
+ list<SDNodeProperty> Properties = [];
+}
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Node Properties.
+//
+// Note: These are hard coded into tblgen.
+//
+def SDNPCommutative : SDNodeProperty; // X op Y == Y op X
+def SDNPAssociative : SDNodeProperty; // (X op Y) op Z == X op (Y op Z)
+def SDNPHasChain : SDNodeProperty; // R/W chain operand and result
+def SDNPOutGlue : SDNodeProperty; // Write a flag result
+def SDNPInGlue : SDNodeProperty; // Read a flag operand
+def SDNPOptInGlue : SDNodeProperty; // Optionally read a flag operand
+def SDNPMayStore : SDNodeProperty; // May write to memory, sets 'mayStore'.
+def SDNPMayLoad : SDNodeProperty; // May read memory, sets 'mayLoad'.
+def SDNPSideEffect : SDNodeProperty; // Sets 'HasUnmodelledSideEffects'.
+def SDNPMemOperand : SDNodeProperty; // Touches memory, has assoc MemOperand
+def SDNPVariadic : SDNodeProperty; // Node has variable arguments.
+def SDNPWantRoot : SDNodeProperty; // ComplexPattern gets the root of match
+def SDNPWantParent : SDNodeProperty; // ComplexPattern gets the parent
diff --git a/capstone/suite/synctools/tablegen/include/llvm/CodeGen/ValueTypes.td b/capstone/suite/synctools/tablegen/include/llvm/CodeGen/ValueTypes.td
new file mode 100644
index 000000000..0abb4ece1
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/CodeGen/ValueTypes.td
@@ -0,0 +1,169 @@
+//===- ValueTypes.td - ValueType definitions ---------------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// Value types - These values correspond to the register types defined in the
+// MachineValueTypes.h file. If you update anything here, you must update it
+// there as well!
+//
+//===----------------------------------------------------------------------===//
+
+class ValueType<int size, int value> {
+ string Namespace = "MVT";
+ int Size = size;
+ int Value = value;
+}
+
+def OtherVT: ValueType<0 , 1>; // "Other" value
+def i1 : ValueType<1 , 2>; // One bit boolean value
+def i8 : ValueType<8 , 3>; // 8-bit integer value
+def i16 : ValueType<16 , 4>; // 16-bit integer value
+def i32 : ValueType<32 , 5>; // 32-bit integer value
+def i64 : ValueType<64 , 6>; // 64-bit integer value
+def i128 : ValueType<128, 7>; // 128-bit integer value
+def f16 : ValueType<16 , 8>; // 16-bit floating point value
+def f32 : ValueType<32 , 9>; // 32-bit floating point value
+def f64 : ValueType<64 , 10>; // 64-bit floating point value
+def f80 : ValueType<80 , 11>; // 80-bit floating point value
+def f128 : ValueType<128, 12>; // 128-bit floating point value
+def ppcf128: ValueType<128, 13>; // PPC 128-bit floating point value
+
+def v1i1 : ValueType<1 , 14>; // 1 x i1 vector value
+def v2i1 : ValueType<2 , 15>; // 2 x i1 vector value
+def v4i1 : ValueType<4 , 16>; // 4 x i1 vector value
+def v8i1 : ValueType<8 , 17>; // 8 x i1 vector value
+def v16i1 : ValueType<16, 18>; // 16 x i1 vector value
+def v32i1 : ValueType<32 , 19>; // 32 x i1 vector value
+def v64i1 : ValueType<64 , 20>; // 64 x i1 vector value
+def v128i1 : ValueType<128, 21>; // 128 x i1 vector value
+def v512i1 : ValueType<512, 22>; // 512 x i1 vector value
+def v1024i1: ValueType<1024,23>; //1024 x i1 vector value
+
+def v1i8 : ValueType<8, 24>; // 1 x i8 vector value
+def v2i8 : ValueType<16 , 25>; // 2 x i8 vector value
+def v4i8 : ValueType<32 , 26>; // 4 x i8 vector value
+def v8i8 : ValueType<64 , 27>; // 8 x i8 vector value
+def v16i8 : ValueType<128, 28>; // 16 x i8 vector value
+def v32i8 : ValueType<256, 29>; // 32 x i8 vector value
+def v64i8 : ValueType<512, 30>; // 64 x i8 vector value
+def v128i8 : ValueType<1024,31>; //128 x i8 vector value
+def v256i8 : ValueType<2048,32>; //256 x i8 vector value
+
+def v1i16 : ValueType<16 , 33>; // 1 x i16 vector value
+def v2i16 : ValueType<32 , 34>; // 2 x i16 vector value
+def v4i16 : ValueType<64 , 35>; // 4 x i16 vector value
+def v8i16 : ValueType<128, 36>; // 8 x i16 vector value
+def v16i16 : ValueType<256, 37>; // 16 x i16 vector value
+def v32i16 : ValueType<512, 38>; // 32 x i16 vector value
+def v64i16 : ValueType<1024,39>; // 64 x i16 vector value
+def v128i16: ValueType<2048,40>; //128 x i16 vector value
+
+def v1i32 : ValueType<32 , 41>; // 1 x i32 vector value
+def v2i32 : ValueType<64 , 42>; // 2 x i32 vector value
+def v4i32 : ValueType<128, 43>; // 4 x i32 vector value
+def v8i32 : ValueType<256, 44>; // 8 x i32 vector value
+def v16i32 : ValueType<512, 45>; // 16 x i32 vector value
+def v32i32 : ValueType<1024,46>; // 32 x i32 vector value
+def v64i32 : ValueType<2048,47>; // 64 x i32 vector value
+
+def v1i64 : ValueType<64 , 48>; // 1 x i64 vector value
+def v2i64 : ValueType<128, 49>; // 2 x i64 vector value
+def v4i64 : ValueType<256, 50>; // 4 x i64 vector value
+def v8i64 : ValueType<512, 51>; // 8 x i64 vector value
+def v16i64 : ValueType<1024,52>; // 16 x i64 vector value
+def v32i64 : ValueType<2048,53>; // 32 x i64 vector value
+
+def v1i128 : ValueType<128, 54>; // 1 x i128 vector value
+
+def nxv1i1 : ValueType<1, 55>; // n x 1 x i1 vector value
+def nxv2i1 : ValueType<2, 56>; // n x 2 x i1 vector value
+def nxv4i1 : ValueType<4, 57>; // n x 4 x i1 vector value
+def nxv8i1 : ValueType<8, 58>; // n x 8 x i1 vector value
+def nxv16i1 : ValueType<16, 59>; // n x 16 x i1 vector value
+def nxv32i1 : ValueType<32, 60>; // n x 32 x i1 vector value
+
+def nxv1i8 : ValueType<8, 61>; // n x 1 x i8 vector value
+def nxv2i8 : ValueType<16, 62>; // n x 2 x i8 vector value
+def nxv4i8 : ValueType<32, 63>; // n x 4 x i8 vector value
+def nxv8i8 : ValueType<64, 64>; // n x 8 x i8 vector value
+def nxv16i8 : ValueType<128, 65>; // n x 16 x i8 vector value
+def nxv32i8 : ValueType<256, 66>; // n x 32 x i8 vector value
+
+def nxv1i16 : ValueType<16, 67>; // n x 1 x i16 vector value
+def nxv2i16 : ValueType<32, 68>; // n x 2 x i16 vector value
+def nxv4i16 : ValueType<64, 69>; // n x 4 x i16 vector value
+def nxv8i16 : ValueType<128, 70>; // n x 8 x i16 vector value
+def nxv16i16: ValueType<256, 71>; // n x 16 x i16 vector value
+def nxv32i16: ValueType<512, 72>; // n x 32 x i16 vector value
+
+def nxv1i32 : ValueType<32, 73>; // n x 1 x i32 vector value
+def nxv2i32 : ValueType<64, 74>; // n x 2 x i32 vector value
+def nxv4i32 : ValueType<128, 75>; // n x 4 x i32 vector value
+def nxv8i32 : ValueType<256, 76>; // n x 8 x i32 vector value
+def nxv16i32: ValueType<512, 77>; // n x 16 x i32 vector value
+def nxv32i32: ValueType<1024,78>; // n x 32 x i32 vector value
+
+def nxv1i64 : ValueType<64, 79>; // n x 1 x i64 vector value
+def nxv2i64 : ValueType<128, 80>; // n x 2 x i64 vector value
+def nxv4i64 : ValueType<256, 81>; // n x 4 x i64 vector value
+def nxv8i64 : ValueType<512, 82>; // n x 8 x i64 vector value
+def nxv16i64: ValueType<1024,83>; // n x 16 x i64 vector value
+def nxv32i64: ValueType<2048,84>; // n x 32 x i64 vector value
+
+def v2f16 : ValueType<32 , 85>; // 2 x f16 vector value
+def v4f16 : ValueType<64 , 86>; // 4 x f16 vector value
+def v8f16 : ValueType<128, 87>; // 8 x f16 vector value
+def v1f32 : ValueType<32 , 88>; // 1 x f32 vector value
+def v2f32 : ValueType<64 , 89>; // 2 x f32 vector value
+def v4f32 : ValueType<128, 90>; // 4 x f32 vector value
+def v8f32 : ValueType<256, 91>; // 8 x f32 vector value
+def v16f32 : ValueType<512, 92>; // 16 x f32 vector value
+def v1f64 : ValueType<64, 93>; // 1 x f64 vector value
+def v2f64 : ValueType<128, 94>; // 2 x f64 vector value
+def v4f64 : ValueType<256, 95>; // 4 x f64 vector value
+def v8f64 : ValueType<512, 96>; // 8 x f64 vector value
+
+def nxv2f16 : ValueType<32 , 97>; // n x 2 x f16 vector value
+def nxv4f16 : ValueType<64 , 98>; // n x 4 x f16 vector value
+def nxv8f16 : ValueType<128, 99>; // n x 8 x f16 vector value
+def nxv1f32 : ValueType<32 , 100>; // n x 1 x f32 vector value
+def nxv2f32 : ValueType<64 , 101>; // n x 2 x f32 vector value
+def nxv4f32 : ValueType<128, 102>; // n x 4 x f32 vector value
+def nxv8f32 : ValueType<256, 103>; // n x 8 x f32 vector value
+def nxv16f32 : ValueType<512, 104>; // n x 16 x f32 vector value
+def nxv1f64 : ValueType<64, 105>; // n x 1 x f64 vector value
+def nxv2f64 : ValueType<128, 106>; // n x 2 x f64 vector value
+def nxv4f64 : ValueType<256, 107>; // n x 4 x f64 vector value
+def nxv8f64 : ValueType<512, 108>; // n x 8 x f64 vector value
+
+def x86mmx : ValueType<64 , 109>; // X86 MMX value
+def FlagVT : ValueType<0 , 110>; // Pre-RA sched glue
+def isVoid : ValueType<0 , 111>; // Produces no value
+def untyped: ValueType<8 , 112>; // Produces an untyped value
+def ExceptRef: ValueType<0, 113>; // WebAssembly's except_ref type
+def token : ValueType<0 , 248>; // TokenTy
+def MetadataVT: ValueType<0, 249>; // Metadata
+
+// Pseudo valuetype mapped to the current pointer size to any address space.
+// Should only be used in TableGen.
+def iPTRAny : ValueType<0, 250>;
+
+// Pseudo valuetype to represent "vector of any size"
+def vAny : ValueType<0 , 251>;
+
+// Pseudo valuetype to represent "float of any format"
+def fAny : ValueType<0 , 252>;
+
+// Pseudo valuetype to represent "integer of any bit width"
+def iAny : ValueType<0 , 253>;
+
+// Pseudo valuetype mapped to the current pointer size.
+def iPTR : ValueType<0 , 254>;
+
+// Pseudo valuetype to represent "any type of any size".
+def Any : ValueType<0 , 255>;
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/Attributes.td b/capstone/suite/synctools/tablegen/include/llvm/IR/Attributes.td
new file mode 100644
index 000000000..39978c41a
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/Attributes.td
@@ -0,0 +1,239 @@
+/// Attribute base class.
+class Attr<string S> {
+ // String representation of this attribute in the IR.
+ string AttrString = S;
+}
+
+/// Enum attribute.
+class EnumAttr<string S> : Attr<S>;
+
+/// StringBool attribute.
+class StrBoolAttr<string S> : Attr<S>;
+
+/// Target-independent enum attributes.
+
+/// Alignment of parameter (5 bits) stored as log2 of alignment with +1 bias.
+/// 0 means unaligned (different from align(1)).
+def Alignment : EnumAttr<"align">;
+
+/// The result of the function is guaranteed to point to a number of bytes that
+/// we can determine if we know the value of the function's arguments.
+def AllocSize : EnumAttr<"allocsize">;
+
+/// inline=always.
+def AlwaysInline : EnumAttr<"alwaysinline">;
+
+/// Function can access memory only using pointers based on its arguments.
+def ArgMemOnly : EnumAttr<"argmemonly">;
+
+/// Callee is recognized as a builtin, despite nobuiltin attribute on its
+/// declaration.
+def Builtin : EnumAttr<"builtin">;
+
+/// Pass structure by value.
+def ByVal : EnumAttr<"byval">;
+
+/// Marks function as being in a cold path.
+def Cold : EnumAttr<"cold">;
+
+/// Can only be moved to control-equivalent blocks.
+def Convergent : EnumAttr<"convergent">;
+
+/// Pointer is known to be dereferenceable.
+def Dereferenceable : EnumAttr<"dereferenceable">;
+
+/// Pointer is either null or dereferenceable.
+def DereferenceableOrNull : EnumAttr<"dereferenceable_or_null">;
+
+/// Function may only access memory that is inaccessible from IR.
+def InaccessibleMemOnly : EnumAttr<"inaccessiblememonly">;
+
+/// Function may only access memory that is either inaccessible from the IR,
+/// or pointed to by its pointer arguments.
+def InaccessibleMemOrArgMemOnly : EnumAttr<"inaccessiblemem_or_argmemonly">;
+
+/// Pass structure in an alloca.
+def InAlloca : EnumAttr<"inalloca">;
+
+/// Source said inlining was desirable.
+def InlineHint : EnumAttr<"inlinehint">;
+
+/// Force argument to be passed in register.
+def InReg : EnumAttr<"inreg">;
+
+/// Build jump-instruction tables and replace refs.
+def JumpTable : EnumAttr<"jumptable">;
+
+/// Function must be optimized for size first.
+def MinSize : EnumAttr<"minsize">;
+
+/// Naked function.
+def Naked : EnumAttr<"naked">;
+
+/// Nested function static chain.
+def Nest : EnumAttr<"nest">;
+
+/// Considered to not alias after call.
+def NoAlias : EnumAttr<"noalias">;
+
+/// Callee isn't recognized as a builtin.
+def NoBuiltin : EnumAttr<"nobuiltin">;
+
+/// Function creates no aliases of pointer.
+def NoCapture : EnumAttr<"nocapture">;
+
+/// Call cannot be duplicated.
+def NoDuplicate : EnumAttr<"noduplicate">;
+
+/// Disable implicit floating point insts.
+def NoImplicitFloat : EnumAttr<"noimplicitfloat">;
+
+/// inline=never.
+def NoInline : EnumAttr<"noinline">;
+
+/// Function is called early and/or often, so lazy binding isn't worthwhile.
+def NonLazyBind : EnumAttr<"nonlazybind">;
+
+/// Pointer is known to be not null.
+def NonNull : EnumAttr<"nonnull">;
+
+/// The function does not recurse.
+def NoRecurse : EnumAttr<"norecurse">;
+
+/// Disable redzone.
+def NoRedZone : EnumAttr<"noredzone">;
+
+/// Mark the function as not returning.
+def NoReturn : EnumAttr<"noreturn">;
+
+/// Disable Indirect Branch Tracking.
+def NoCfCheck : EnumAttr<"nocf_check">;
+
+/// Function doesn't unwind stack.
+def NoUnwind : EnumAttr<"nounwind">;
+
+/// Select optimizations for best fuzzing signal.
+def OptForFuzzing : EnumAttr<"optforfuzzing">;
+
+/// opt_size.
+def OptimizeForSize : EnumAttr<"optsize">;
+
+/// Function must not be optimized.
+def OptimizeNone : EnumAttr<"optnone">;
+
+/// Function does not access memory.
+def ReadNone : EnumAttr<"readnone">;
+
+/// Function only reads from memory.
+def ReadOnly : EnumAttr<"readonly">;
+
+/// Return value is always equal to this argument.
+def Returned : EnumAttr<"returned">;
+
+/// Function can return twice.
+def ReturnsTwice : EnumAttr<"returns_twice">;
+
+/// Safe Stack protection.
+def SafeStack : EnumAttr<"safestack">;
+
+/// Shadow Call Stack protection.
+def ShadowCallStack : EnumAttr<"shadowcallstack">;
+
+/// Sign extended before/after call.
+def SExt : EnumAttr<"signext">;
+
+/// Alignment of stack for function (3 bits) stored as log2 of alignment with
+/// +1 bias 0 means unaligned (different from alignstack=(1)).
+def StackAlignment : EnumAttr<"alignstack">;
+
+/// Function can be speculated.
+def Speculatable : EnumAttr<"speculatable">;
+
+/// Stack protection.
+def StackProtect : EnumAttr<"ssp">;
+
+/// Stack protection required.
+def StackProtectReq : EnumAttr<"sspreq">;
+
+/// Strong Stack protection.
+def StackProtectStrong : EnumAttr<"sspstrong">;
+
+/// Function was called in a scope requiring strict floating point semantics.
+def StrictFP : EnumAttr<"strictfp">;
+
+/// Hidden pointer to structure to return.
+def StructRet : EnumAttr<"sret">;
+
+/// AddressSanitizer is on.
+def SanitizeAddress : EnumAttr<"sanitize_address">;
+
+/// ThreadSanitizer is on.
+def SanitizeThread : EnumAttr<"sanitize_thread">;
+
+/// MemorySanitizer is on.
+def SanitizeMemory : EnumAttr<"sanitize_memory">;
+
+/// HWAddressSanitizer is on.
+def SanitizeHWAddress : EnumAttr<"sanitize_hwaddress">;
+
+/// Argument is swift error.
+def SwiftError : EnumAttr<"swifterror">;
+
+/// Argument is swift self/context.
+def SwiftSelf : EnumAttr<"swiftself">;
+
+/// Function must be in a unwind table.
+def UWTable : EnumAttr<"uwtable">;
+
+/// Function only writes to memory.
+def WriteOnly : EnumAttr<"writeonly">;
+
+/// Zero extended before/after call.
+def ZExt : EnumAttr<"zeroext">;
+
+/// Target-independent string attributes.
+def LessPreciseFPMAD : StrBoolAttr<"less-precise-fpmad">;
+def NoInfsFPMath : StrBoolAttr<"no-infs-fp-math">;
+def NoNansFPMath : StrBoolAttr<"no-nans-fp-math">;
+def UnsafeFPMath : StrBoolAttr<"unsafe-fp-math">;
+def NoJumpTables : StrBoolAttr<"no-jump-tables">;
+def ProfileSampleAccurate : StrBoolAttr<"profile-sample-accurate">;
+
+class CompatRule<string F> {
+ // The name of the function called to check the attribute of the caller and
+ // callee and decide whether inlining should be allowed. The function's
+ // signature must match "bool(const Function&, const Function &)", where the
+ // first parameter is the reference to the caller and the second parameter is
+ // the reference to the callee. It must return false if the attributes of the
+ // caller and callee are incompatible, and true otherwise.
+ string CompatFunc = F;
+}
+
+def : CompatRule<"isEqual<SanitizeAddressAttr>">;
+def : CompatRule<"isEqual<SanitizeThreadAttr>">;
+def : CompatRule<"isEqual<SanitizeMemoryAttr>">;
+def : CompatRule<"isEqual<SanitizeHWAddressAttr>">;
+def : CompatRule<"isEqual<SafeStackAttr>">;
+def : CompatRule<"isEqual<ShadowCallStackAttr>">;
+
+class MergeRule<string F> {
+ // The name of the function called to merge the attributes of the caller and
+ // callee. The function's signature must match
+ // "void(Function&, const Function &)", where the first parameter is the
+ // reference to the caller and the second parameter is the reference to the
+ // callee.
+ string MergeFunc = F;
+}
+
+def : MergeRule<"setAND<LessPreciseFPMADAttr>">;
+def : MergeRule<"setAND<NoInfsFPMathAttr>">;
+def : MergeRule<"setAND<NoNansFPMathAttr>">;
+def : MergeRule<"setAND<UnsafeFPMathAttr>">;
+def : MergeRule<"setOR<NoImplicitFloatAttr>">;
+def : MergeRule<"setOR<NoJumpTablesAttr>">;
+def : MergeRule<"setOR<ProfileSampleAccurateAttr>">;
+def : MergeRule<"adjustCallerSSPLevel">;
+def : MergeRule<"adjustCallerStackProbes">;
+def : MergeRule<"adjustCallerStackProbeSize">;
+def : MergeRule<"adjustMinLegalVectorWidth">;
+def : MergeRule<"adjustNullPointerValidAttr">;
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/DebugInfoFlags.def b/capstone/suite/synctools/tablegen/include/llvm/IR/DebugInfoFlags.def
new file mode 100644
index 000000000..b1f5fac64
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/DebugInfoFlags.def
@@ -0,0 +1,64 @@
+//===- llvm/IR/DebugInfoFlags.def - Debug info flag definitions -*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// Macros for running through debug info flags.
+//
+//===----------------------------------------------------------------------===//
+
+// TODO: Add other DW-based macros.
+#ifndef HANDLE_DI_FLAG
+#error "Missing macro definition of HANDLE_DI_FLAG"
+#endif
+
+HANDLE_DI_FLAG(0, Zero) // Use it as zero value.
+ // For example: void foo(DIFlags Flags = FlagZero).
+HANDLE_DI_FLAG(1, Private)
+HANDLE_DI_FLAG(2, Protected)
+HANDLE_DI_FLAG(3, Public)
+HANDLE_DI_FLAG((1 << 2), FwdDecl)
+HANDLE_DI_FLAG((1 << 3), AppleBlock)
+HANDLE_DI_FLAG((1 << 4), BlockByrefStruct)
+HANDLE_DI_FLAG((1 << 5), Virtual)
+HANDLE_DI_FLAG((1 << 6), Artificial)
+HANDLE_DI_FLAG((1 << 7), Explicit)
+HANDLE_DI_FLAG((1 << 8), Prototyped)
+HANDLE_DI_FLAG((1 << 9), ObjcClassComplete)
+HANDLE_DI_FLAG((1 << 10), ObjectPointer)
+HANDLE_DI_FLAG((1 << 11), Vector)
+HANDLE_DI_FLAG((1 << 12), StaticMember)
+HANDLE_DI_FLAG((1 << 13), LValueReference)
+HANDLE_DI_FLAG((1 << 14), RValueReference)
+// 15 was formerly ExternalTypeRef, but this was never used.
+HANDLE_DI_FLAG((1 << 15), Reserved)
+HANDLE_DI_FLAG((1 << 16), SingleInheritance)
+HANDLE_DI_FLAG((2 << 16), MultipleInheritance)
+HANDLE_DI_FLAG((3 << 16), VirtualInheritance)
+HANDLE_DI_FLAG((1 << 18), IntroducedVirtual)
+HANDLE_DI_FLAG((1 << 19), BitField)
+HANDLE_DI_FLAG((1 << 20), NoReturn)
+HANDLE_DI_FLAG((1 << 21), MainSubprogram)
+HANDLE_DI_FLAG((1 << 22), TypePassByValue)
+HANDLE_DI_FLAG((1 << 23), TypePassByReference)
+HANDLE_DI_FLAG((1 << 24), FixedEnum)
+HANDLE_DI_FLAG((1 << 25), Thunk)
+HANDLE_DI_FLAG((1 << 26), Trivial)
+
+// To avoid needing a dedicated value for IndirectVirtualBase, we use
+// the bitwise or of Virtual and FwdDecl, which does not otherwise
+// make sense for inheritance.
+HANDLE_DI_FLAG((1 << 2) | (1 << 5), IndirectVirtualBase)
+
+#ifdef DI_FLAG_LARGEST_NEEDED
+// intended to be used with ADT/BitmaskEnum.h
+// NOTE: always must be equal to largest flag, check this when adding new flag
+HANDLE_DI_FLAG((1 << 26), Largest)
+#undef DI_FLAG_LARGEST_NEEDED
+#endif
+
+#undef HANDLE_DI_FLAG
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/Instruction.def b/capstone/suite/synctools/tablegen/include/llvm/IR/Instruction.def
new file mode 100644
index 000000000..86617299c
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/Instruction.def
@@ -0,0 +1,231 @@
+//===-- llvm/Instruction.def - File that describes Instructions -*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file contains descriptions of the various LLVM instructions. This is
+// used as a central place for enumerating the different instructions and
+// should eventually be the place to put comments about the instructions.
+//
+//===----------------------------------------------------------------------===//
+
+// NOTE: NO INCLUDE GUARD DESIRED!
+
+// Provide definitions of macros so that users of this file do not have to
+// define everything to use it...
+//
+#ifndef FIRST_TERM_INST
+#define FIRST_TERM_INST(num)
+#endif
+#ifndef HANDLE_TERM_INST
+#ifndef HANDLE_INST
+#define HANDLE_TERM_INST(num, opcode, Class)
+#else
+#define HANDLE_TERM_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_TERM_INST
+#define LAST_TERM_INST(num)
+#endif
+
+#ifndef FIRST_BINARY_INST
+#define FIRST_BINARY_INST(num)
+#endif
+#ifndef HANDLE_BINARY_INST
+#ifndef HANDLE_INST
+#define HANDLE_BINARY_INST(num, opcode, instclass)
+#else
+#define HANDLE_BINARY_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_BINARY_INST
+#define LAST_BINARY_INST(num)
+#endif
+
+#ifndef FIRST_MEMORY_INST
+#define FIRST_MEMORY_INST(num)
+#endif
+#ifndef HANDLE_MEMORY_INST
+#ifndef HANDLE_INST
+#define HANDLE_MEMORY_INST(num, opcode, Class)
+#else
+#define HANDLE_MEMORY_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_MEMORY_INST
+#define LAST_MEMORY_INST(num)
+#endif
+
+#ifndef FIRST_CAST_INST
+#define FIRST_CAST_INST(num)
+#endif
+#ifndef HANDLE_CAST_INST
+#ifndef HANDLE_INST
+#define HANDLE_CAST_INST(num, opcode, Class)
+#else
+#define HANDLE_CAST_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_CAST_INST
+#define LAST_CAST_INST(num)
+#endif
+
+#ifndef FIRST_FUNCLETPAD_INST
+#define FIRST_FUNCLETPAD_INST(num)
+#endif
+#ifndef HANDLE_FUNCLETPAD_INST
+#ifndef HANDLE_INST
+#define HANDLE_FUNCLETPAD_INST(num, opcode, Class)
+#else
+#define HANDLE_FUNCLETPAD_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_FUNCLETPAD_INST
+#define LAST_FUNCLETPAD_INST(num)
+#endif
+
+#ifndef FIRST_OTHER_INST
+#define FIRST_OTHER_INST(num)
+#endif
+#ifndef HANDLE_OTHER_INST
+#ifndef HANDLE_INST
+#define HANDLE_OTHER_INST(num, opcode, Class)
+#else
+#define HANDLE_OTHER_INST(num, opcode, Class) HANDLE_INST(num, opcode, Class)
+#endif
+#endif
+#ifndef LAST_OTHER_INST
+#define LAST_OTHER_INST(num)
+#endif
+
+#ifndef HANDLE_USER_INST
+#define HANDLE_USER_INST(num, opc, Class) HANDLE_OTHER_INST(num, opc, Class)
+#endif
+
+// Terminator Instructions - These instructions are used to terminate a basic
+// block of the program. Every basic block must end with one of these
+// instructions for it to be a well formed basic block.
+//
+ FIRST_TERM_INST ( 1)
+HANDLE_TERM_INST ( 1, Ret , ReturnInst)
+HANDLE_TERM_INST ( 2, Br , BranchInst)
+HANDLE_TERM_INST ( 3, Switch , SwitchInst)
+HANDLE_TERM_INST ( 4, IndirectBr , IndirectBrInst)
+HANDLE_TERM_INST ( 5, Invoke , InvokeInst)
+HANDLE_TERM_INST ( 6, Resume , ResumeInst)
+HANDLE_TERM_INST ( 7, Unreachable , UnreachableInst)
+HANDLE_TERM_INST ( 8, CleanupRet , CleanupReturnInst)
+HANDLE_TERM_INST ( 9, CatchRet , CatchReturnInst)
+HANDLE_TERM_INST (10, CatchSwitch , CatchSwitchInst)
+ LAST_TERM_INST (10)
+
+// Standard binary operators...
+ FIRST_BINARY_INST(11)
+HANDLE_BINARY_INST(11, Add , BinaryOperator)
+HANDLE_BINARY_INST(12, FAdd , BinaryOperator)
+HANDLE_BINARY_INST(13, Sub , BinaryOperator)
+HANDLE_BINARY_INST(14, FSub , BinaryOperator)
+HANDLE_BINARY_INST(15, Mul , BinaryOperator)
+HANDLE_BINARY_INST(16, FMul , BinaryOperator)
+HANDLE_BINARY_INST(17, UDiv , BinaryOperator)
+HANDLE_BINARY_INST(18, SDiv , BinaryOperator)
+HANDLE_BINARY_INST(19, FDiv , BinaryOperator)
+HANDLE_BINARY_INST(20, URem , BinaryOperator)
+HANDLE_BINARY_INST(21, SRem , BinaryOperator)
+HANDLE_BINARY_INST(22, FRem , BinaryOperator)
+
+// Logical operators (integer operands)
+HANDLE_BINARY_INST(23, Shl , BinaryOperator) // Shift left (logical)
+HANDLE_BINARY_INST(24, LShr , BinaryOperator) // Shift right (logical)
+HANDLE_BINARY_INST(25, AShr , BinaryOperator) // Shift right (arithmetic)
+HANDLE_BINARY_INST(26, And , BinaryOperator)
+HANDLE_BINARY_INST(27, Or , BinaryOperator)
+HANDLE_BINARY_INST(28, Xor , BinaryOperator)
+ LAST_BINARY_INST(28)
+
+// Memory operators...
+ FIRST_MEMORY_INST(29)
+HANDLE_MEMORY_INST(29, Alloca, AllocaInst) // Stack management
+HANDLE_MEMORY_INST(30, Load , LoadInst ) // Memory manipulation instrs
+HANDLE_MEMORY_INST(31, Store , StoreInst )
+HANDLE_MEMORY_INST(32, GetElementPtr, GetElementPtrInst)
+HANDLE_MEMORY_INST(33, Fence , FenceInst )
+HANDLE_MEMORY_INST(34, AtomicCmpXchg , AtomicCmpXchgInst )
+HANDLE_MEMORY_INST(35, AtomicRMW , AtomicRMWInst )
+ LAST_MEMORY_INST(35)
+
+// Cast operators ...
+// NOTE: The order matters here because CastInst::isEliminableCastPair
+// NOTE: (see Instructions.cpp) encodes a table based on this ordering.
+ FIRST_CAST_INST(36)
+HANDLE_CAST_INST(36, Trunc , TruncInst ) // Truncate integers
+HANDLE_CAST_INST(37, ZExt , ZExtInst ) // Zero extend integers
+HANDLE_CAST_INST(38, SExt , SExtInst ) // Sign extend integers
+HANDLE_CAST_INST(39, FPToUI , FPToUIInst ) // floating point -> UInt
+HANDLE_CAST_INST(40, FPToSI , FPToSIInst ) // floating point -> SInt
+HANDLE_CAST_INST(41, UIToFP , UIToFPInst ) // UInt -> floating point
+HANDLE_CAST_INST(42, SIToFP , SIToFPInst ) // SInt -> floating point
+HANDLE_CAST_INST(43, FPTrunc , FPTruncInst ) // Truncate floating point
+HANDLE_CAST_INST(44, FPExt , FPExtInst ) // Extend floating point
+HANDLE_CAST_INST(45, PtrToInt, PtrToIntInst) // Pointer -> Integer
+HANDLE_CAST_INST(46, IntToPtr, IntToPtrInst) // Integer -> Pointer
+HANDLE_CAST_INST(47, BitCast , BitCastInst ) // Type cast
+HANDLE_CAST_INST(48, AddrSpaceCast, AddrSpaceCastInst) // addrspace cast
+ LAST_CAST_INST(48)
+
+ FIRST_FUNCLETPAD_INST(49)
+HANDLE_FUNCLETPAD_INST(49, CleanupPad, CleanupPadInst)
+HANDLE_FUNCLETPAD_INST(50, CatchPad , CatchPadInst)
+ LAST_FUNCLETPAD_INST(50)
+
+// Other operators...
+ FIRST_OTHER_INST(51)
+HANDLE_OTHER_INST(51, ICmp , ICmpInst ) // Integer comparison instruction
+HANDLE_OTHER_INST(52, FCmp , FCmpInst ) // Floating point comparison instr.
+HANDLE_OTHER_INST(53, PHI , PHINode ) // PHI node instruction
+HANDLE_OTHER_INST(54, Call , CallInst ) // Call a function
+HANDLE_OTHER_INST(55, Select , SelectInst ) // select instruction
+HANDLE_USER_INST (56, UserOp1, Instruction) // May be used internally in a pass
+HANDLE_USER_INST (57, UserOp2, Instruction) // Internal to passes only
+HANDLE_OTHER_INST(58, VAArg , VAArgInst ) // vaarg instruction
+HANDLE_OTHER_INST(59, ExtractElement, ExtractElementInst)// extract from vector
+HANDLE_OTHER_INST(60, InsertElement, InsertElementInst) // insert into vector
+HANDLE_OTHER_INST(61, ShuffleVector, ShuffleVectorInst) // shuffle two vectors.
+HANDLE_OTHER_INST(62, ExtractValue, ExtractValueInst)// extract from aggregate
+HANDLE_OTHER_INST(63, InsertValue, InsertValueInst) // insert into aggregate
+HANDLE_OTHER_INST(64, LandingPad, LandingPadInst) // Landing pad instruction.
+ LAST_OTHER_INST(64)
+
+#undef FIRST_TERM_INST
+#undef HANDLE_TERM_INST
+#undef LAST_TERM_INST
+
+#undef FIRST_BINARY_INST
+#undef HANDLE_BINARY_INST
+#undef LAST_BINARY_INST
+
+#undef FIRST_MEMORY_INST
+#undef HANDLE_MEMORY_INST
+#undef LAST_MEMORY_INST
+
+#undef FIRST_CAST_INST
+#undef HANDLE_CAST_INST
+#undef LAST_CAST_INST
+
+#undef FIRST_FUNCLETPAD_INST
+#undef HANDLE_FUNCLETPAD_INST
+#undef LAST_FUNCLETPAD_INST
+
+#undef FIRST_OTHER_INST
+#undef HANDLE_OTHER_INST
+#undef LAST_OTHER_INST
+
+#undef HANDLE_USER_INST
+
+#ifdef HANDLE_INST
+#undef HANDLE_INST
+#endif
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/Intrinsics.td b/capstone/suite/synctools/tablegen/include/llvm/IR/Intrinsics.td
new file mode 100644
index 000000000..0cec754dd
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/Intrinsics.td
@@ -0,0 +1,1010 @@
+//===- Intrinsics.td - Defines all LLVM intrinsics ---------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines properties of all LLVM intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+include "llvm/CodeGen/ValueTypes.td"
+include "llvm/CodeGen/SDNodeProperties.td"
+
+//===----------------------------------------------------------------------===//
+// Properties we keep track of for intrinsics.
+//===----------------------------------------------------------------------===//
+
+class IntrinsicProperty;
+
+// Intr*Mem - Memory properties. If no property is set, the worst case
+// is assumed (it may read and write any memory it can get access to and it may
+// have other side effects).
+
+// IntrNoMem - The intrinsic does not access memory or have any other side
+// effects. It may be CSE'd deleted if dead, etc.
+def IntrNoMem : IntrinsicProperty;
+
+// IntrReadMem - This intrinsic only reads from memory. It does not write to
+// memory and has no other side effects. Therefore, it cannot be moved across
+// potentially aliasing stores. However, it can be reordered otherwise and can
+// be deleted if dead.
+def IntrReadMem : IntrinsicProperty;
+
+// IntrWriteMem - This intrinsic only writes to memory, but does not read from
+// memory, and has no other side effects. This means dead stores before calls
+// to this intrinsics may be removed.
+def IntrWriteMem : IntrinsicProperty;
+
+// IntrArgMemOnly - This intrinsic only accesses memory that its pointer-typed
+// argument(s) points to, but may access an unspecified amount. Other than
+// reads from and (possibly volatile) writes to memory, it has no side effects.
+def IntrArgMemOnly : IntrinsicProperty;
+
+// IntrInaccessibleMemOnly -- This intrinsic only accesses memory that is not
+// accessible by the module being compiled. This is a weaker form of IntrNoMem.
+def IntrInaccessibleMemOnly : IntrinsicProperty;
+
+// IntrInaccessibleMemOrArgMemOnly -- This intrinsic only accesses memory that
+// its pointer-typed arguments point to or memory that is not accessible
+// by the module being compiled. This is a weaker form of IntrArgMemOnly.
+def IntrInaccessibleMemOrArgMemOnly : IntrinsicProperty;
+
+// Commutative - This intrinsic is commutative: X op Y == Y op X.
+def Commutative : IntrinsicProperty;
+
+// Throws - This intrinsic can throw.
+def Throws : IntrinsicProperty;
+
+// NoCapture - The specified argument pointer is not captured by the intrinsic.
+class NoCapture<int argNo> : IntrinsicProperty {
+ int ArgNo = argNo;
+}
+
+// Returned - The specified argument is always the return value of the
+// intrinsic.
+class Returned<int argNo> : IntrinsicProperty {
+ int ArgNo = argNo;
+}
+
+// ReadOnly - The specified argument pointer is not written to through the
+// pointer by the intrinsic.
+class ReadOnly<int argNo> : IntrinsicProperty {
+ int ArgNo = argNo;
+}
+
+// WriteOnly - The intrinsic does not read memory through the specified
+// argument pointer.
+class WriteOnly<int argNo> : IntrinsicProperty {
+ int ArgNo = argNo;
+}
+
+// ReadNone - The specified argument pointer is not dereferenced by the
+// intrinsic.
+class ReadNone<int argNo> : IntrinsicProperty {
+ int ArgNo = argNo;
+}
+
+def IntrNoReturn : IntrinsicProperty;
+
+// IntrNoduplicate - Calls to this intrinsic cannot be duplicated.
+// Parallels the noduplicate attribute on LLVM IR functions.
+def IntrNoDuplicate : IntrinsicProperty;
+
+// IntrConvergent - Calls to this intrinsic are convergent and may not be made
+// control-dependent on any additional values.
+// Parallels the convergent attribute on LLVM IR functions.
+def IntrConvergent : IntrinsicProperty;
+
+// This property indicates that the intrinsic is safe to speculate.
+def IntrSpeculatable : IntrinsicProperty;
+
+// This property can be used to override the 'has no other side effects'
+// language of the IntrNoMem, IntrReadMem, IntrWriteMem, and IntrArgMemOnly
+// intrinsic properties. By default, intrinsics are assumed to have side
+// effects, so this property is only necessary if you have defined one of
+// the memory properties listed above.
+// For this property, 'side effects' has the same meaning as 'side effects'
+// defined by the hasSideEffects property of the TableGen Instruction class.
+def IntrHasSideEffects : IntrinsicProperty;
+
+//===----------------------------------------------------------------------===//
+// Types used by intrinsics.
+//===----------------------------------------------------------------------===//
+
+class LLVMType<ValueType vt> {
+ ValueType VT = vt;
+ int isAny = 0;
+}
+
+class LLVMQualPointerType<LLVMType elty, int addrspace>
+ : LLVMType<iPTR>{
+ LLVMType ElTy = elty;
+ int AddrSpace = addrspace;
+}
+
+class LLVMPointerType<LLVMType elty>
+ : LLVMQualPointerType<elty, 0>;
+
+class LLVMAnyPointerType<LLVMType elty>
+ : LLVMType<iPTRAny>{
+ LLVMType ElTy = elty;
+
+ let isAny = 1;
+}
+
+// Match the type of another intrinsic parameter. Number is an index into the
+// list of overloaded types for the intrinsic, excluding all the fixed types.
+// The Number value must refer to a previously listed type. For example:
+// Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_anyfloat_ty, LLVMMatchType<0>]>
+// has two overloaded types, the 2nd and 3rd arguments. LLVMMatchType<0>
+// refers to the first overloaded type, which is the 2nd argument.
+class LLVMMatchType<int num>
+ : LLVMType<OtherVT>{
+ int Number = num;
+}
+
+// Match the type of another intrinsic parameter that is expected to be based on
+// an integral type (i.e. either iN or <N x iM>), but change the scalar size to
+// be twice as wide or half as wide as the other type. This is only useful when
+// the intrinsic is overloaded, so the matched type should be declared as iAny.
+class LLVMExtendedType<int num> : LLVMMatchType<num>;
+class LLVMTruncatedType<int num> : LLVMMatchType<num>;
+class LLVMVectorSameWidth<int num, LLVMType elty>
+ : LLVMMatchType<num> {
+ ValueType ElTy = elty.VT;
+}
+class LLVMPointerTo<int num> : LLVMMatchType<num>;
+class LLVMPointerToElt<int num> : LLVMMatchType<num>;
+class LLVMVectorOfAnyPointersToElt<int num> : LLVMMatchType<num>;
+
+// Match the type of another intrinsic parameter that is expected to be a
+// vector type, but change the element count to be half as many
+class LLVMHalfElementsVectorType<int num> : LLVMMatchType<num>;
+
+def llvm_void_ty : LLVMType<isVoid>;
+let isAny = 1 in {
+ def llvm_any_ty : LLVMType<Any>;
+ def llvm_anyint_ty : LLVMType<iAny>;
+ def llvm_anyfloat_ty : LLVMType<fAny>;
+ def llvm_anyvector_ty : LLVMType<vAny>;
+}
+def llvm_i1_ty : LLVMType<i1>;
+def llvm_i8_ty : LLVMType<i8>;
+def llvm_i16_ty : LLVMType<i16>;
+def llvm_i32_ty : LLVMType<i32>;
+def llvm_i64_ty : LLVMType<i64>;
+def llvm_half_ty : LLVMType<f16>;
+def llvm_float_ty : LLVMType<f32>;
+def llvm_double_ty : LLVMType<f64>;
+def llvm_f80_ty : LLVMType<f80>;
+def llvm_f128_ty : LLVMType<f128>;
+def llvm_ppcf128_ty : LLVMType<ppcf128>;
+def llvm_ptr_ty : LLVMPointerType<llvm_i8_ty>; // i8*
+def llvm_ptrptr_ty : LLVMPointerType<llvm_ptr_ty>; // i8**
+def llvm_anyptr_ty : LLVMAnyPointerType<llvm_i8_ty>; // (space)i8*
+def llvm_empty_ty : LLVMType<OtherVT>; // { }
+def llvm_descriptor_ty : LLVMPointerType<llvm_empty_ty>; // { }*
+def llvm_metadata_ty : LLVMType<MetadataVT>; // !{...}
+def llvm_token_ty : LLVMType<token>; // token
+
+def llvm_x86mmx_ty : LLVMType<x86mmx>;
+def llvm_ptrx86mmx_ty : LLVMPointerType<llvm_x86mmx_ty>; // <1 x i64>*
+
+def llvm_v2i1_ty : LLVMType<v2i1>; // 2 x i1
+def llvm_v4i1_ty : LLVMType<v4i1>; // 4 x i1
+def llvm_v8i1_ty : LLVMType<v8i1>; // 8 x i1
+def llvm_v16i1_ty : LLVMType<v16i1>; // 16 x i1
+def llvm_v32i1_ty : LLVMType<v32i1>; // 32 x i1
+def llvm_v64i1_ty : LLVMType<v64i1>; // 64 x i1
+def llvm_v512i1_ty : LLVMType<v512i1>; // 512 x i1
+def llvm_v1024i1_ty : LLVMType<v1024i1>; //1024 x i1
+
+def llvm_v1i8_ty : LLVMType<v1i8>; // 1 x i8
+def llvm_v2i8_ty : LLVMType<v2i8>; // 2 x i8
+def llvm_v4i8_ty : LLVMType<v4i8>; // 4 x i8
+def llvm_v8i8_ty : LLVMType<v8i8>; // 8 x i8
+def llvm_v16i8_ty : LLVMType<v16i8>; // 16 x i8
+def llvm_v32i8_ty : LLVMType<v32i8>; // 32 x i8
+def llvm_v64i8_ty : LLVMType<v64i8>; // 64 x i8
+def llvm_v128i8_ty : LLVMType<v128i8>; //128 x i8
+def llvm_v256i8_ty : LLVMType<v256i8>; //256 x i8
+
+def llvm_v1i16_ty : LLVMType<v1i16>; // 1 x i16
+def llvm_v2i16_ty : LLVMType<v2i16>; // 2 x i16
+def llvm_v4i16_ty : LLVMType<v4i16>; // 4 x i16
+def llvm_v8i16_ty : LLVMType<v8i16>; // 8 x i16
+def llvm_v16i16_ty : LLVMType<v16i16>; // 16 x i16
+def llvm_v32i16_ty : LLVMType<v32i16>; // 32 x i16
+def llvm_v64i16_ty : LLVMType<v64i16>; // 64 x i16
+def llvm_v128i16_ty : LLVMType<v128i16>; //128 x i16
+
+def llvm_v1i32_ty : LLVMType<v1i32>; // 1 x i32
+def llvm_v2i32_ty : LLVMType<v2i32>; // 2 x i32
+def llvm_v4i32_ty : LLVMType<v4i32>; // 4 x i32
+def llvm_v8i32_ty : LLVMType<v8i32>; // 8 x i32
+def llvm_v16i32_ty : LLVMType<v16i32>; // 16 x i32
+def llvm_v32i32_ty : LLVMType<v32i32>; // 32 x i32
+def llvm_v64i32_ty : LLVMType<v64i32>; // 64 x i32
+
+def llvm_v1i64_ty : LLVMType<v1i64>; // 1 x i64
+def llvm_v2i64_ty : LLVMType<v2i64>; // 2 x i64
+def llvm_v4i64_ty : LLVMType<v4i64>; // 4 x i64
+def llvm_v8i64_ty : LLVMType<v8i64>; // 8 x i64
+def llvm_v16i64_ty : LLVMType<v16i64>; // 16 x i64
+def llvm_v32i64_ty : LLVMType<v32i64>; // 32 x i64
+
+def llvm_v1i128_ty : LLVMType<v1i128>; // 1 x i128
+
+def llvm_v2f16_ty : LLVMType<v2f16>; // 2 x half (__fp16)
+def llvm_v4f16_ty : LLVMType<v4f16>; // 4 x half (__fp16)
+def llvm_v8f16_ty : LLVMType<v8f16>; // 8 x half (__fp16)
+def llvm_v1f32_ty : LLVMType<v1f32>; // 1 x float
+def llvm_v2f32_ty : LLVMType<v2f32>; // 2 x float
+def llvm_v4f32_ty : LLVMType<v4f32>; // 4 x float
+def llvm_v8f32_ty : LLVMType<v8f32>; // 8 x float
+def llvm_v16f32_ty : LLVMType<v16f32>; // 16 x float
+def llvm_v1f64_ty : LLVMType<v1f64>; // 1 x double
+def llvm_v2f64_ty : LLVMType<v2f64>; // 2 x double
+def llvm_v4f64_ty : LLVMType<v4f64>; // 4 x double
+def llvm_v8f64_ty : LLVMType<v8f64>; // 8 x double
+
+def llvm_vararg_ty : LLVMType<isVoid>; // this means vararg here
+
+//===----------------------------------------------------------------------===//
+// Intrinsic Definitions.
+//===----------------------------------------------------------------------===//
+
+// Intrinsic class - This is used to define one LLVM intrinsic. The name of the
+// intrinsic definition should start with "int_", then match the LLVM intrinsic
+// name with the "llvm." prefix removed, and all "."s turned into "_"s. For
+// example, llvm.bswap.i16 -> int_bswap_i16.
+//
+// * RetTypes is a list containing the return types expected for the
+// intrinsic.
+// * ParamTypes is a list containing the parameter types expected for the
+// intrinsic.
+// * Properties can be set to describe the behavior of the intrinsic.
+//
+class Intrinsic<list<LLVMType> ret_types,
+ list<LLVMType> param_types = [],
+ list<IntrinsicProperty> intr_properties = [],
+ string name = "",
+ list<SDNodeProperty> sd_properties = []> : SDPatternOperator {
+ string LLVMName = name;
+ string TargetPrefix = ""; // Set to a prefix for target-specific intrinsics.
+ list<LLVMType> RetTypes = ret_types;
+ list<LLVMType> ParamTypes = param_types;
+ list<IntrinsicProperty> IntrProperties = intr_properties;
+ let Properties = sd_properties;
+
+ bit isTarget = 0;
+}
+
+/// GCCBuiltin - If this intrinsic exactly corresponds to a GCC builtin, this
+/// specifies the name of the builtin. This provides automatic CBE and CFE
+/// support.
+class GCCBuiltin<string name> {
+ string GCCBuiltinName = name;
+}
+
+class MSBuiltin<string name> {
+ string MSBuiltinName = name;
+}
+
+
+//===--------------- Variable Argument Handling Intrinsics ----------------===//
+//
+
+def int_vastart : Intrinsic<[], [llvm_ptr_ty], [], "llvm.va_start">;
+def int_vacopy : Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty], [],
+ "llvm.va_copy">;
+def int_vaend : Intrinsic<[], [llvm_ptr_ty], [], "llvm.va_end">;
+
+//===------------------- Garbage Collection Intrinsics --------------------===//
+//
+def int_gcroot : Intrinsic<[],
+ [llvm_ptrptr_ty, llvm_ptr_ty]>;
+def int_gcread : Intrinsic<[llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_ptrptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_gcwrite : Intrinsic<[],
+ [llvm_ptr_ty, llvm_ptr_ty, llvm_ptrptr_ty],
+ [IntrArgMemOnly, NoCapture<1>, NoCapture<2>]>;
+
+//===--------------------- Code Generator Intrinsics ----------------------===//
+//
+def int_returnaddress : Intrinsic<[llvm_ptr_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_addressofreturnaddress : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>;
+def int_frameaddress : Intrinsic<[llvm_ptr_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_read_register : Intrinsic<[llvm_anyint_ty], [llvm_metadata_ty],
+ [IntrReadMem], "llvm.read_register">;
+def int_write_register : Intrinsic<[], [llvm_metadata_ty, llvm_anyint_ty],
+ [], "llvm.write_register">;
+
+// Gets the address of the local variable area. This is typically a copy of the
+// stack, frame, or base pointer depending on the type of prologue.
+def int_localaddress : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>;
+
+// Escapes local variables to allow access from other functions.
+def int_localescape : Intrinsic<[], [llvm_vararg_ty]>;
+
+// Given a function and the localaddress of a parent frame, returns a pointer
+// to an escaped allocation indicated by the index.
+def int_localrecover : Intrinsic<[llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+// Note: we treat stacksave/stackrestore as writemem because we don't otherwise
+// model their dependencies on allocas.
+def int_stacksave : Intrinsic<[llvm_ptr_ty]>,
+ GCCBuiltin<"__builtin_stack_save">;
+def int_stackrestore : Intrinsic<[], [llvm_ptr_ty]>,
+ GCCBuiltin<"__builtin_stack_restore">;
+
+def int_get_dynamic_area_offset : Intrinsic<[llvm_anyint_ty]>;
+
+def int_thread_pointer : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__builtin_thread_pointer">;
+
+// IntrInaccessibleMemOrArgMemOnly is a little more pessimistic than strictly
+// necessary for prefetch, however it does conveniently prevent the prefetch
+// from being reordered overly much with respect to nearby access to the same
+// memory while not impeding optimization.
+def int_prefetch
+ : Intrinsic<[], [ llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty ],
+ [ IntrInaccessibleMemOrArgMemOnly, ReadOnly<0>, NoCapture<0> ]>;
+def int_pcmarker : Intrinsic<[], [llvm_i32_ty]>;
+
+def int_readcyclecounter : Intrinsic<[llvm_i64_ty]>;
+
+// The assume intrinsic is marked as arbitrarily writing so that proper
+// control dependencies will be maintained.
+def int_assume : Intrinsic<[], [llvm_i1_ty], []>;
+
+// Stack Protector Intrinsic - The stackprotector intrinsic writes the stack
+// guard to the correct place on the stack frame.
+def int_stackprotector : Intrinsic<[], [llvm_ptr_ty, llvm_ptrptr_ty], []>;
+def int_stackguard : Intrinsic<[llvm_ptr_ty], [], []>;
+
+// A counter increment for instrumentation based profiling.
+def int_instrprof_increment : Intrinsic<[],
+ [llvm_ptr_ty, llvm_i64_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ []>;
+
+// A counter increment with step for instrumentation based profiling.
+def int_instrprof_increment_step : Intrinsic<[],
+ [llvm_ptr_ty, llvm_i64_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i64_ty],
+ []>;
+
+// A call to profile runtime for value profiling of target expressions
+// through instrumentation based profiling.
+def int_instrprof_value_profile : Intrinsic<[],
+ [llvm_ptr_ty, llvm_i64_ty,
+ llvm_i64_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ []>;
+
+//===------------------- Standard C Library Intrinsics --------------------===//
+//
+
+def int_memcpy : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty,
+ llvm_i1_ty],
+ [IntrArgMemOnly, NoCapture<0>, NoCapture<1>,
+ WriteOnly<0>, ReadOnly<1>]>;
+def int_memmove : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty,
+ llvm_i1_ty],
+ [IntrArgMemOnly, NoCapture<0>, NoCapture<1>,
+ ReadOnly<1>]>;
+def int_memset : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_i8_ty, llvm_anyint_ty,
+ llvm_i1_ty],
+ [IntrArgMemOnly, NoCapture<0>, WriteOnly<0>]>;
+
+// FIXME: Add version of these floating point intrinsics which allow non-default
+// rounding modes and FP exception handling.
+
+let IntrProperties = [IntrNoMem, IntrSpeculatable] in {
+ def int_fma : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMMatchType<0>]>;
+ def int_fmuladd : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMMatchType<0>]>;
+
+ // These functions do not read memory, but are sensitive to the
+ // rounding mode. LLVM purposely does not model changes to the FP
+ // environment so they can be treated as readnone.
+ def int_sqrt : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_powi : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, llvm_i32_ty]>;
+ def int_sin : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_cos : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_pow : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>]>;
+ def int_log : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_log10: Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_log2 : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_exp : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_exp2 : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_fabs : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_copysign : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>]>;
+ def int_floor : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_ceil : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_trunc : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_rint : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_nearbyint : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_round : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_canonicalize : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>],
+ [IntrNoMem]>;
+}
+
+def int_minnum : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable, Commutative]
+>;
+def int_maxnum : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable, Commutative]
+>;
+
+// NOTE: these are internal interfaces.
+def int_setjmp : Intrinsic<[llvm_i32_ty], [llvm_ptr_ty]>;
+def int_longjmp : Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty], [IntrNoReturn]>;
+def int_sigsetjmp : Intrinsic<[llvm_i32_ty] , [llvm_ptr_ty, llvm_i32_ty]>;
+def int_siglongjmp : Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty], [IntrNoReturn]>;
+
+// Internal interface for object size checking
+def int_objectsize : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyptr_ty, llvm_i1_ty, llvm_i1_ty],
+ [IntrNoMem, IntrSpeculatable]>,
+ GCCBuiltin<"__builtin_object_size">;
+
+//===--------------- Constrained Floating Point Intrinsics ----------------===//
+//
+
+let IntrProperties = [IntrInaccessibleMemOnly] in {
+ def int_experimental_constrained_fadd : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_fsub : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_fmul : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_fdiv : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_frem : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+
+ def int_experimental_constrained_fma : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+
+ // These intrinsics are sensitive to the rounding mode so we need constrained
+ // versions of each of them. When strict rounding and exception control are
+ // not required the non-constrained versions of these intrinsics should be
+ // used.
+ def int_experimental_constrained_sqrt : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_powi : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_i32_ty,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_sin : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_cos : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_pow : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_log : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_log10: Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_log2 : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_exp : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_exp2 : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_rint : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+ def int_experimental_constrained_nearbyint : Intrinsic<[ llvm_anyfloat_ty ],
+ [ LLVMMatchType<0>,
+ llvm_metadata_ty,
+ llvm_metadata_ty ]>;
+}
+// FIXME: Add intrinsics for fcmp, fptrunc, fpext, fptoui and fptosi.
+// FIXME: Add intrinsics for fabs, copysign, floor, ceil, trunc and round?
+
+
+//===------------------------- Expect Intrinsics --------------------------===//
+//
+def int_expect : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>,
+ LLVMMatchType<0>], [IntrNoMem]>;
+
+//===-------------------- Bit Manipulation Intrinsics ---------------------===//
+//
+
+// None of these intrinsics accesses memory at all.
+let IntrProperties = [IntrNoMem, IntrSpeculatable] in {
+ def int_bswap: Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>]>;
+ def int_ctpop: Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>]>;
+ def int_ctlz : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, llvm_i1_ty]>;
+ def int_cttz : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, llvm_i1_ty]>;
+ def int_bitreverse : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>]>;
+ def int_fshl : Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>]>;
+ def int_fshr : Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>]>;
+}
+
+//===------------------------ Debugger Intrinsics -------------------------===//
+//
+
+// None of these intrinsics accesses memory at all...but that doesn't
+// mean the optimizers can change them aggressively. Special handling
+// needed in a few places. These synthetic intrinsics have no
+// side-effects and just mark information about their operands.
+let IntrProperties = [IntrNoMem, IntrSpeculatable] in {
+ def int_dbg_declare : Intrinsic<[],
+ [llvm_metadata_ty,
+ llvm_metadata_ty,
+ llvm_metadata_ty]>;
+ def int_dbg_value : Intrinsic<[],
+ [llvm_metadata_ty,
+ llvm_metadata_ty,
+ llvm_metadata_ty]>;
+ def int_dbg_addr : Intrinsic<[],
+ [llvm_metadata_ty,
+ llvm_metadata_ty,
+ llvm_metadata_ty]>;
+ def int_dbg_label : Intrinsic<[],
+ [llvm_metadata_ty]>;
+}
+
+//===------------------ Exception Handling Intrinsics----------------------===//
+//
+
+// The result of eh.typeid.for depends on the enclosing function, but inside a
+// given function it is 'const' and may be CSE'd etc.
+def int_eh_typeid_for : Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrNoMem]>;
+
+def int_eh_return_i32 : Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty]>;
+def int_eh_return_i64 : Intrinsic<[], [llvm_i64_ty, llvm_ptr_ty]>;
+
+// eh.exceptionpointer returns the pointer to the exception caught by
+// the given `catchpad`.
+def int_eh_exceptionpointer : Intrinsic<[llvm_anyptr_ty], [llvm_token_ty],
+ [IntrNoMem]>;
+
+// Gets the exception code from a catchpad token. Only used on some platforms.
+def int_eh_exceptioncode : Intrinsic<[llvm_i32_ty], [llvm_token_ty], [IntrNoMem]>;
+
+// __builtin_unwind_init is an undocumented GCC intrinsic that causes all
+// callee-saved registers to be saved and restored (regardless of whether they
+// are used) in the calling function. It is used by libgcc_eh.
+def int_eh_unwind_init: Intrinsic<[]>,
+ GCCBuiltin<"__builtin_unwind_init">;
+
+def int_eh_dwarf_cfa : Intrinsic<[llvm_ptr_ty], [llvm_i32_ty]>;
+
+let IntrProperties = [IntrNoMem] in {
+ def int_eh_sjlj_lsda : Intrinsic<[llvm_ptr_ty]>;
+ def int_eh_sjlj_callsite : Intrinsic<[], [llvm_i32_ty]>;
+}
+def int_eh_sjlj_functioncontext : Intrinsic<[], [llvm_ptr_ty]>;
+def int_eh_sjlj_setjmp : Intrinsic<[llvm_i32_ty], [llvm_ptr_ty]>;
+def int_eh_sjlj_longjmp : Intrinsic<[], [llvm_ptr_ty], [IntrNoReturn]>;
+def int_eh_sjlj_setup_dispatch : Intrinsic<[], []>;
+
+//===---------------- Generic Variable Attribute Intrinsics----------------===//
+//
+def int_var_annotation : Intrinsic<[],
+ [llvm_ptr_ty, llvm_ptr_ty,
+ llvm_ptr_ty, llvm_i32_ty],
+ [], "llvm.var.annotation">;
+def int_ptr_annotation : Intrinsic<[LLVMAnyPointerType<llvm_anyint_ty>],
+ [LLVMMatchType<0>, llvm_ptr_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [], "llvm.ptr.annotation">;
+def int_annotation : Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, llvm_ptr_ty,
+ llvm_ptr_ty, llvm_i32_ty],
+ [], "llvm.annotation">;
+
+// Annotates the current program point with metadata strings which are emitted
+// as CodeView debug info records. This is expensive, as it disables inlining
+// and is modelled as having side effects.
+def int_codeview_annotation : Intrinsic<[], [llvm_metadata_ty],
+ [IntrInaccessibleMemOnly, IntrNoDuplicate],
+ "llvm.codeview.annotation">;
+
+//===------------------------ Trampoline Intrinsics -----------------------===//
+//
+def int_init_trampoline : Intrinsic<[],
+ [llvm_ptr_ty, llvm_ptr_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<0>]>,
+ GCCBuiltin<"__builtin_init_trampoline">;
+
+def int_adjust_trampoline : Intrinsic<[llvm_ptr_ty], [llvm_ptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>,
+ GCCBuiltin<"__builtin_adjust_trampoline">;
+
+//===------------------------ Overflow Intrinsics -------------------------===//
+//
+
+// Expose the carry flag from add operations on two integrals.
+def int_sadd_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+def int_uadd_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_ssub_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+def int_usub_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_smul_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+def int_umul_with_overflow : Intrinsic<[llvm_anyint_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]>;
+
+//===------------------------- Memory Use Markers -------------------------===//
+//
+def int_lifetime_start : Intrinsic<[],
+ [llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<1>]>;
+def int_lifetime_end : Intrinsic<[],
+ [llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<1>]>;
+def int_invariant_start : Intrinsic<[llvm_descriptor_ty],
+ [llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<1>]>;
+def int_invariant_end : Intrinsic<[],
+ [llvm_descriptor_ty, llvm_i64_ty,
+ llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<2>]>;
+
+// launder.invariant.group can't be marked with 'readnone' (IntrNoMem),
+// because it would cause CSE of two barriers with the same argument.
+// Inaccessiblememonly says that the barrier doesn't read the argument,
+// but it changes state not accessible to this module. This way
+// we can DSE through the barrier because it doesn't read the value
+// after store. Although the barrier doesn't modify any memory it
+// can't be marked as readonly, because it would be possible to
+// CSE 2 barriers with store in between.
+// The argument also can't be marked with 'returned' attribute, because
+// it would remove barrier.
+// Note that it is still experimental, which means that its semantics
+// might change in the future.
+def int_launder_invariant_group : Intrinsic<[llvm_anyptr_ty],
+ [LLVMMatchType<0>],
+ [IntrInaccessibleMemOnly, IntrSpeculatable]>;
+
+
+def int_strip_invariant_group : Intrinsic<[llvm_anyptr_ty],
+ [LLVMMatchType<0>],
+ [IntrSpeculatable, IntrNoMem]>;
+
+//===------------------------ Stackmap Intrinsics -------------------------===//
+//
+def int_experimental_stackmap : Intrinsic<[],
+ [llvm_i64_ty, llvm_i32_ty, llvm_vararg_ty],
+ [Throws]>;
+def int_experimental_patchpoint_void : Intrinsic<[],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_ptr_ty, llvm_i32_ty,
+ llvm_vararg_ty],
+ [Throws]>;
+def int_experimental_patchpoint_i64 : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_ptr_ty, llvm_i32_ty,
+ llvm_vararg_ty],
+ [Throws]>;
+
+
+//===------------------------ Garbage Collection Intrinsics ---------------===//
+// These are documented in docs/Statepoint.rst
+
+def int_experimental_gc_statepoint : Intrinsic<[llvm_token_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_anyptr_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_vararg_ty],
+ [Throws]>;
+
+def int_experimental_gc_result : Intrinsic<[llvm_any_ty], [llvm_token_ty],
+ [IntrReadMem]>;
+def int_experimental_gc_relocate : Intrinsic<[llvm_any_ty],
+ [llvm_token_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+
+//===------------------------ Coroutine Intrinsics ---------------===//
+// These are documented in docs/Coroutines.rst
+
+// Coroutine Structure Intrinsics.
+
+def int_coro_id : Intrinsic<[llvm_token_ty], [llvm_i32_ty, llvm_ptr_ty,
+ llvm_ptr_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, IntrReadMem,
+ ReadNone<1>, ReadOnly<2>, NoCapture<2>]>;
+def int_coro_alloc : Intrinsic<[llvm_i1_ty], [llvm_token_ty], []>;
+def int_coro_begin : Intrinsic<[llvm_ptr_ty], [llvm_token_ty, llvm_ptr_ty],
+ [WriteOnly<1>]>;
+
+def int_coro_free : Intrinsic<[llvm_ptr_ty], [llvm_token_ty, llvm_ptr_ty],
+ [IntrReadMem, IntrArgMemOnly, ReadOnly<1>,
+ NoCapture<1>]>;
+def int_coro_end : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty, llvm_i1_ty], []>;
+
+def int_coro_frame : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>;
+def int_coro_noop : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>;
+def int_coro_size : Intrinsic<[llvm_anyint_ty], [], [IntrNoMem]>;
+
+def int_coro_save : Intrinsic<[llvm_token_ty], [llvm_ptr_ty], []>;
+def int_coro_suspend : Intrinsic<[llvm_i8_ty], [llvm_token_ty, llvm_i1_ty], []>;
+
+def int_coro_param : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty, llvm_ptr_ty],
+ [IntrNoMem, ReadNone<0>, ReadNone<1>]>;
+
+// Coroutine Manipulation Intrinsics.
+
+def int_coro_resume : Intrinsic<[], [llvm_ptr_ty], [Throws]>;
+def int_coro_destroy : Intrinsic<[], [llvm_ptr_ty], [Throws]>;
+def int_coro_done : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty],
+ [IntrArgMemOnly, ReadOnly<0>, NoCapture<0>]>;
+def int_coro_promise : Intrinsic<[llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i1_ty],
+ [IntrNoMem, NoCapture<0>]>;
+
+// Coroutine Lowering Intrinsics. Used internally by coroutine passes.
+
+def int_coro_subfn_addr : Intrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly, ReadOnly<0>,
+ NoCapture<0>]>;
+
+///===-------------------------- Other Intrinsics --------------------------===//
+//
+def int_flt_rounds : Intrinsic<[llvm_i32_ty]>,
+ GCCBuiltin<"__builtin_flt_rounds">;
+def int_trap : Intrinsic<[], [], [IntrNoReturn]>,
+ GCCBuiltin<"__builtin_trap">;
+def int_debugtrap : Intrinsic<[]>,
+ GCCBuiltin<"__builtin_debugtrap">;
+
+// Support for dynamic deoptimization (or de-specialization)
+def int_experimental_deoptimize : Intrinsic<[llvm_any_ty], [llvm_vararg_ty],
+ [Throws]>;
+
+// Support for speculative runtime guards
+def int_experimental_guard : Intrinsic<[], [llvm_i1_ty, llvm_vararg_ty],
+ [Throws]>;
+
+// NOP: calls/invokes to this intrinsic are removed by codegen
+def int_donothing : Intrinsic<[], [], [IntrNoMem]>;
+
+// This instruction has no actual effect, though it is treated by the optimizer
+// has having opaque side effects. This may be inserted into loops to ensure
+// that they are not removed even if they turn out to be empty, for languages
+// which specify that infinite loops must be preserved.
+def int_sideeffect : Intrinsic<[], [], [IntrInaccessibleMemOnly]>;
+
+// Intrisics to support half precision floating point format
+let IntrProperties = [IntrNoMem] in {
+def int_convert_to_fp16 : Intrinsic<[llvm_i16_ty], [llvm_anyfloat_ty]>;
+def int_convert_from_fp16 : Intrinsic<[llvm_anyfloat_ty], [llvm_i16_ty]>;
+}
+
+// Clear cache intrinsic, default to ignore (ie. emit nothing)
+// maps to void __clear_cache() on supporting platforms
+def int_clear_cache : Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty],
+ [], "llvm.clear_cache">;
+
+//===-------------------------- Masked Intrinsics -------------------------===//
+//
+def int_masked_store : Intrinsic<[], [llvm_anyvector_ty,
+ LLVMAnyPointerType<LLVMMatchType<0>>,
+ llvm_i32_ty,
+ LLVMVectorSameWidth<0, llvm_i1_ty>],
+ [IntrArgMemOnly]>;
+
+def int_masked_load : Intrinsic<[llvm_anyvector_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty,
+ LLVMVectorSameWidth<0, llvm_i1_ty>, LLVMMatchType<0>],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+def int_masked_gather: Intrinsic<[llvm_anyvector_ty],
+ [LLVMVectorOfAnyPointersToElt<0>, llvm_i32_ty,
+ LLVMVectorSameWidth<0, llvm_i1_ty>,
+ LLVMMatchType<0>],
+ [IntrReadMem]>;
+
+def int_masked_scatter: Intrinsic<[],
+ [llvm_anyvector_ty,
+ LLVMVectorOfAnyPointersToElt<0>, llvm_i32_ty,
+ LLVMVectorSameWidth<0, llvm_i1_ty>]>;
+
+def int_masked_expandload: Intrinsic<[llvm_anyvector_ty],
+ [LLVMPointerToElt<0>,
+ LLVMVectorSameWidth<0, llvm_i1_ty>,
+ LLVMMatchType<0>],
+ [IntrReadMem]>;
+
+def int_masked_compressstore: Intrinsic<[],
+ [llvm_anyvector_ty,
+ LLVMPointerToElt<0>,
+ LLVMVectorSameWidth<0, llvm_i1_ty>],
+ [IntrArgMemOnly]>;
+
+// Test whether a pointer is associated with a type metadata identifier.
+def int_type_test : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty, llvm_metadata_ty],
+ [IntrNoMem]>;
+
+// Safely loads a function pointer from a virtual table pointer using type metadata.
+def int_type_checked_load : Intrinsic<[llvm_ptr_ty, llvm_i1_ty],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_metadata_ty],
+ [IntrNoMem]>;
+
+// Create a branch funnel that implements an indirect call to a limited set of
+// callees. This needs to be a musttail call.
+def int_icall_branch_funnel : Intrinsic<[], [llvm_vararg_ty], []>;
+
+def int_load_relative: Intrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_anyint_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+// Xray intrinsics
+//===----------------------------------------------------------------------===//
+// Custom event logging for x-ray.
+// Takes a pointer to a string and the length of the string.
+def int_xray_customevent : Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty],
+ [NoCapture<0>, ReadOnly<0>, IntrWriteMem]>;
+// Typed event logging for x-ray.
+// Takes a numeric type tag, a pointer to a string and the length of the string.
+def int_xray_typedevent : Intrinsic<[], [llvm_i16_ty, llvm_ptr_ty, llvm_i32_ty],
+ [NoCapture<1>, ReadOnly<1>, IntrWriteMem]>;
+//===----------------------------------------------------------------------===//
+
+//===------ Memory intrinsics with element-wise atomicity guarantees ------===//
+//
+
+// @llvm.memcpy.element.unordered.atomic.*(dest, src, length, elementsize)
+def int_memcpy_element_unordered_atomic
+ : Intrinsic<[],
+ [
+ llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty, llvm_i32_ty
+ ],
+ [
+ IntrArgMemOnly, NoCapture<0>, NoCapture<1>, WriteOnly<0>,
+ ReadOnly<1>
+ ]>;
+
+// @llvm.memmove.element.unordered.atomic.*(dest, src, length, elementsize)
+def int_memmove_element_unordered_atomic
+ : Intrinsic<[],
+ [
+ llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty, llvm_i32_ty
+ ],
+ [
+ IntrArgMemOnly, NoCapture<0>, NoCapture<1>, WriteOnly<0>,
+ ReadOnly<1>
+ ]>;
+
+// @llvm.memset.element.unordered.atomic.*(dest, value, length, elementsize)
+def int_memset_element_unordered_atomic
+ : Intrinsic<[], [ llvm_anyptr_ty, llvm_i8_ty, llvm_anyint_ty, llvm_i32_ty ],
+ [ IntrArgMemOnly, NoCapture<0>, WriteOnly<0> ]>;
+
+//===------------------------ Reduction Intrinsics ------------------------===//
+//
+def int_experimental_vector_reduce_fadd : Intrinsic<[llvm_anyfloat_ty],
+ [llvm_anyfloat_ty,
+ llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_fmul : Intrinsic<[llvm_anyfloat_ty],
+ [llvm_anyfloat_ty,
+ llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_add : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_mul : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_and : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_or : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_xor : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_smax : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_smin : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_umax : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_umin : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_fmax : Intrinsic<[llvm_anyfloat_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_experimental_vector_reduce_fmin : Intrinsic<[llvm_anyfloat_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+
+//===----- Intrinsics that are used to provide predicate information -----===//
+
+def int_ssa_copy : Intrinsic<[llvm_any_ty], [LLVMMatchType<0>],
+ [IntrNoMem, Returned<0>]>;
+//===----------------------------------------------------------------------===//
+// Target-specific intrinsics
+//===----------------------------------------------------------------------===//
+
+include "llvm/IR/IntrinsicsPowerPC.td"
+include "llvm/IR/IntrinsicsX86.td"
+include "llvm/IR/IntrinsicsARM.td"
+include "llvm/IR/IntrinsicsAArch64.td"
+include "llvm/IR/IntrinsicsXCore.td"
+include "llvm/IR/IntrinsicsHexagon.td"
+include "llvm/IR/IntrinsicsNVVM.td"
+include "llvm/IR/IntrinsicsMips.td"
+include "llvm/IR/IntrinsicsAMDGPU.td"
+include "llvm/IR/IntrinsicsBPF.td"
+include "llvm/IR/IntrinsicsSystemZ.td"
+include "llvm/IR/IntrinsicsWebAssembly.td"
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAArch64.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAArch64.td
new file mode 100644
index 000000000..688e863c1
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAArch64.td
@@ -0,0 +1,669 @@
+//===- IntrinsicsAARCH64.td - Defines AARCH64 intrinsics ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the AARCH64-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "aarch64" in {
+
+def int_aarch64_ldxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty]>;
+def int_aarch64_ldaxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty]>;
+def int_aarch64_stxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty]>;
+def int_aarch64_stlxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty]>;
+
+def int_aarch64_ldxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty]>;
+def int_aarch64_ldaxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty]>;
+def int_aarch64_stxp : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty]>;
+def int_aarch64_stlxp : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty]>;
+
+def int_aarch64_clrex : Intrinsic<[]>;
+
+def int_aarch64_sdiv : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>,
+ LLVMMatchType<0>], [IntrNoMem]>;
+def int_aarch64_udiv : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>,
+ LLVMMatchType<0>], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// HINT
+
+def int_aarch64_hint : Intrinsic<[], [llvm_i32_ty]>;
+
+//===----------------------------------------------------------------------===//
+// Data Barrier Instructions
+
+def int_aarch64_dmb : GCCBuiltin<"__builtin_arm_dmb">, MSBuiltin<"__dmb">, Intrinsic<[], [llvm_i32_ty]>;
+def int_aarch64_dsb : GCCBuiltin<"__builtin_arm_dsb">, MSBuiltin<"__dsb">, Intrinsic<[], [llvm_i32_ty]>;
+def int_aarch64_isb : GCCBuiltin<"__builtin_arm_isb">, MSBuiltin<"__isb">, Intrinsic<[], [llvm_i32_ty]>;
+
+}
+
+//===----------------------------------------------------------------------===//
+// Advanced SIMD (NEON)
+
+let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
+ class AdvSIMD_2Scalar_Float_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+ class AdvSIMD_FPToIntRounding_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty], [IntrNoMem]>;
+
+ class AdvSIMD_1IntArg_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+ class AdvSIMD_1FloatArg_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Expand_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Long_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMTruncatedType<0>], [IntrNoMem]>;
+ class AdvSIMD_1IntArg_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyint_ty], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [LLVMExtendedType<0>], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Int_Across_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+ class AdvSIMD_1VectorArg_Float_Across_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+
+ class AdvSIMD_2IntArg_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2FloatArg_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Compare_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMMatchType<1>],
+ [IntrNoMem]>;
+ class AdvSIMD_2Arg_FloatCompare_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, LLVMMatchType<1>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Long_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMTruncatedType<0>, LLVMTruncatedType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Wide_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMTruncatedType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMExtendedType<0>, LLVMExtendedType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2Arg_Scalar_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyint_ty],
+ [LLVMExtendedType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Scalar_Expand_BySize_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMTruncatedType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMTruncatedType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_2VectorArg_Tied_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty],
+ [IntrNoMem]>;
+
+ class AdvSIMD_3VectorArg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_3VectorArg_Scalar_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_3VectorArg_Tied_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty,
+ LLVMMatchType<1>], [IntrNoMem]>;
+ class AdvSIMD_3VectorArg_Scalar_Tied_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_CvtFxToFP_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [llvm_anyint_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ class AdvSIMD_CvtFPToFx_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ class AdvSIMD_1Arg_Intrinsic
+ : Intrinsic<[llvm_any_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+
+ class AdvSIMD_Dot_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>],
+ [IntrNoMem]>;
+}
+
+// Arithmetic ops
+
+let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in {
+ // Vector Add Across Lanes
+ def int_aarch64_neon_saddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_uaddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_faddv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
+
+ // Vector Long Add Across Lanes
+ def int_aarch64_neon_saddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_uaddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+
+ // Vector Halving Add
+ def int_aarch64_neon_shadd : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_uhadd : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Rounding Halving Add
+ def int_aarch64_neon_srhadd : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_urhadd : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Saturating Add
+ def int_aarch64_neon_sqadd : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_suqadd : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_usqadd : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_uqadd : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Add High-Half
+ // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that
+ // header is no longer supported.
+ def int_aarch64_neon_addhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
+
+ // Vector Rounding Add High-Half
+ def int_aarch64_neon_raddhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
+
+ // Vector Saturating Doubling Multiply High
+ def int_aarch64_neon_sqdmulh : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Saturating Rounding Doubling Multiply High
+ def int_aarch64_neon_sqrdmulh : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Polynominal Multiply
+ def int_aarch64_neon_pmul : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Long Multiply
+ def int_aarch64_neon_smull : AdvSIMD_2VectorArg_Long_Intrinsic;
+ def int_aarch64_neon_umull : AdvSIMD_2VectorArg_Long_Intrinsic;
+ def int_aarch64_neon_pmull : AdvSIMD_2VectorArg_Long_Intrinsic;
+
+ // 64-bit polynomial multiply really returns an i128, which is not legal. Fake
+ // it with a v16i8.
+ def int_aarch64_neon_pmull64 :
+ Intrinsic<[llvm_v16i8_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+
+ // Vector Extending Multiply
+ def int_aarch64_neon_fmulx : AdvSIMD_2FloatArg_Intrinsic {
+ let IntrProperties = [IntrNoMem, Commutative];
+ }
+
+ // Vector Saturating Doubling Long Multiply
+ def int_aarch64_neon_sqdmull : AdvSIMD_2VectorArg_Long_Intrinsic;
+ def int_aarch64_neon_sqdmulls_scalar
+ : Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ // Vector Halving Subtract
+ def int_aarch64_neon_shsub : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_uhsub : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Saturating Subtract
+ def int_aarch64_neon_sqsub : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_uqsub : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Subtract High-Half
+ // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that
+ // header is no longer supported.
+ def int_aarch64_neon_subhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
+
+ // Vector Rounding Subtract High-Half
+ def int_aarch64_neon_rsubhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
+
+ // Vector Compare Absolute Greater-than-or-equal
+ def int_aarch64_neon_facge : AdvSIMD_2Arg_FloatCompare_Intrinsic;
+
+ // Vector Compare Absolute Greater-than
+ def int_aarch64_neon_facgt : AdvSIMD_2Arg_FloatCompare_Intrinsic;
+
+ // Vector Absolute Difference
+ def int_aarch64_neon_sabd : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_uabd : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_fabd : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Scalar Absolute Difference
+ def int_aarch64_sisd_fabd : AdvSIMD_2Scalar_Float_Intrinsic;
+
+ // Vector Max
+ def int_aarch64_neon_smax : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_umax : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_fmax : AdvSIMD_2FloatArg_Intrinsic;
+ def int_aarch64_neon_fmaxnmp : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Max Across Lanes
+ def int_aarch64_neon_smaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_umaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_fmaxv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
+ def int_aarch64_neon_fmaxnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
+
+ // Vector Min
+ def int_aarch64_neon_smin : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_umin : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_fmin : AdvSIMD_2FloatArg_Intrinsic;
+ def int_aarch64_neon_fminnmp : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Vector Min/Max Number
+ def int_aarch64_neon_fminnm : AdvSIMD_2FloatArg_Intrinsic;
+ def int_aarch64_neon_fmaxnm : AdvSIMD_2FloatArg_Intrinsic;
+
+ // Vector Min Across Lanes
+ def int_aarch64_neon_sminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_uminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
+ def int_aarch64_neon_fminv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
+ def int_aarch64_neon_fminnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
+
+ // Pairwise Add
+ def int_aarch64_neon_addp : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Long Pairwise Add
+ // FIXME: In theory, we shouldn't need intrinsics for saddlp or
+ // uaddlp, but tblgen's type inference currently can't handle the
+ // pattern fragments this ends up generating.
+ def int_aarch64_neon_saddlp : AdvSIMD_1VectorArg_Expand_Intrinsic;
+ def int_aarch64_neon_uaddlp : AdvSIMD_1VectorArg_Expand_Intrinsic;
+
+ // Folding Maximum
+ def int_aarch64_neon_smaxp : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_umaxp : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_fmaxp : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Folding Minimum
+ def int_aarch64_neon_sminp : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_uminp : AdvSIMD_2VectorArg_Intrinsic;
+ def int_aarch64_neon_fminp : AdvSIMD_2VectorArg_Intrinsic;
+
+ // Reciprocal Estimate/Step
+ def int_aarch64_neon_frecps : AdvSIMD_2FloatArg_Intrinsic;
+ def int_aarch64_neon_frsqrts : AdvSIMD_2FloatArg_Intrinsic;
+
+ // Reciprocal Exponent
+ def int_aarch64_neon_frecpx : AdvSIMD_1FloatArg_Intrinsic;
+
+ // Vector Saturating Shift Left
+ def int_aarch64_neon_sqshl : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_uqshl : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Rounding Shift Left
+ def int_aarch64_neon_srshl : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_urshl : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Saturating Rounding Shift Left
+ def int_aarch64_neon_sqrshl : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_uqrshl : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Signed->Unsigned Shift Left by Constant
+ def int_aarch64_neon_sqshlu : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Signed->Unsigned Narrowing Saturating Shift Right by Constant
+ def int_aarch64_neon_sqshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+
+ // Vector Signed->Unsigned Rounding Narrowing Saturating Shift Right by Const
+ def int_aarch64_neon_sqrshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+
+ // Vector Narrowing Shift Right by Constant
+ def int_aarch64_neon_sqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+ def int_aarch64_neon_uqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+
+ // Vector Rounding Narrowing Shift Right by Constant
+ def int_aarch64_neon_rshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+
+ // Vector Rounding Narrowing Saturating Shift Right by Constant
+ def int_aarch64_neon_sqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+ def int_aarch64_neon_uqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
+
+ // Vector Shift Left
+ def int_aarch64_neon_sshl : AdvSIMD_2IntArg_Intrinsic;
+ def int_aarch64_neon_ushl : AdvSIMD_2IntArg_Intrinsic;
+
+ // Vector Widening Shift Left by Constant
+ def int_aarch64_neon_shll : AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic;
+ def int_aarch64_neon_sshll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic;
+ def int_aarch64_neon_ushll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic;
+
+ // Vector Shift Right by Constant and Insert
+ def int_aarch64_neon_vsri : AdvSIMD_3VectorArg_Scalar_Intrinsic;
+
+ // Vector Shift Left by Constant and Insert
+ def int_aarch64_neon_vsli : AdvSIMD_3VectorArg_Scalar_Intrinsic;
+
+ // Vector Saturating Narrow
+ def int_aarch64_neon_scalar_sqxtn: AdvSIMD_1IntArg_Narrow_Intrinsic;
+ def int_aarch64_neon_scalar_uqxtn : AdvSIMD_1IntArg_Narrow_Intrinsic;
+ def int_aarch64_neon_sqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic;
+ def int_aarch64_neon_uqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic;
+
+ // Vector Saturating Extract and Unsigned Narrow
+ def int_aarch64_neon_scalar_sqxtun : AdvSIMD_1IntArg_Narrow_Intrinsic;
+ def int_aarch64_neon_sqxtun : AdvSIMD_1VectorArg_Narrow_Intrinsic;
+
+ // Vector Absolute Value
+ def int_aarch64_neon_abs : AdvSIMD_1Arg_Intrinsic;
+
+ // Vector Saturating Absolute Value
+ def int_aarch64_neon_sqabs : AdvSIMD_1IntArg_Intrinsic;
+
+ // Vector Saturating Negation
+ def int_aarch64_neon_sqneg : AdvSIMD_1IntArg_Intrinsic;
+
+ // Vector Count Leading Sign Bits
+ def int_aarch64_neon_cls : AdvSIMD_1VectorArg_Intrinsic;
+
+ // Vector Reciprocal Estimate
+ def int_aarch64_neon_urecpe : AdvSIMD_1VectorArg_Intrinsic;
+ def int_aarch64_neon_frecpe : AdvSIMD_1FloatArg_Intrinsic;
+
+ // Vector Square Root Estimate
+ def int_aarch64_neon_ursqrte : AdvSIMD_1VectorArg_Intrinsic;
+ def int_aarch64_neon_frsqrte : AdvSIMD_1FloatArg_Intrinsic;
+
+ // Vector Bitwise Reverse
+ def int_aarch64_neon_rbit : AdvSIMD_1VectorArg_Intrinsic;
+
+ // Vector Conversions Between Half-Precision and Single-Precision.
+ def int_aarch64_neon_vcvtfp2hf
+ : Intrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_aarch64_neon_vcvthf2fp
+ : Intrinsic<[llvm_v4f32_ty], [llvm_v4i16_ty], [IntrNoMem]>;
+
+ // Vector Conversions Between Floating-point and Fixed-point.
+ def int_aarch64_neon_vcvtfp2fxs : AdvSIMD_CvtFPToFx_Intrinsic;
+ def int_aarch64_neon_vcvtfp2fxu : AdvSIMD_CvtFPToFx_Intrinsic;
+ def int_aarch64_neon_vcvtfxs2fp : AdvSIMD_CvtFxToFP_Intrinsic;
+ def int_aarch64_neon_vcvtfxu2fp : AdvSIMD_CvtFxToFP_Intrinsic;
+
+ // Vector FP->Int Conversions
+ def int_aarch64_neon_fcvtas : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtau : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtms : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtmu : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtns : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtnu : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtps : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtpu : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtzs : AdvSIMD_FPToIntRounding_Intrinsic;
+ def int_aarch64_neon_fcvtzu : AdvSIMD_FPToIntRounding_Intrinsic;
+
+ // Vector FP Rounding: only ties to even is unrepresented by a normal
+ // intrinsic.
+ def int_aarch64_neon_frintn : AdvSIMD_1FloatArg_Intrinsic;
+
+ // Scalar FP->Int conversions
+
+ // Vector FP Inexact Narrowing
+ def int_aarch64_neon_fcvtxn : AdvSIMD_1VectorArg_Expand_Intrinsic;
+
+ // Scalar FP Inexact Narrowing
+ def int_aarch64_sisd_fcvtxn : Intrinsic<[llvm_float_ty], [llvm_double_ty],
+ [IntrNoMem]>;
+
+ // v8.2-A Dot Product
+ def int_aarch64_neon_udot : AdvSIMD_Dot_Intrinsic;
+ def int_aarch64_neon_sdot : AdvSIMD_Dot_Intrinsic;
+}
+
+let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
+ class AdvSIMD_2Vector2Index_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, llvm_i64_ty, LLVMMatchType<0>, llvm_i64_ty],
+ [IntrNoMem]>;
+}
+
+// Vector element to element moves
+def int_aarch64_neon_vcopy_lane: AdvSIMD_2Vector2Index_Intrinsic;
+
+let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
+ class AdvSIMD_1Vec_Load_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_1Vec_Store_Lane_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<2>]>;
+
+ class AdvSIMD_2Vec_Load_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_2Vec_Load_Lane_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_2Vec_Store_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrArgMemOnly, NoCapture<2>]>;
+ class AdvSIMD_2Vec_Store_Lane_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<3>]>;
+
+ class AdvSIMD_3Vec_Load_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_3Vec_Load_Lane_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_3Vec_Store_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrArgMemOnly, NoCapture<3>]>;
+ class AdvSIMD_3Vec_Store_Lane_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<4>]>;
+
+ class AdvSIMD_4Vec_Load_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_4Vec_Load_Lane_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_4Vec_Store_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrArgMemOnly, NoCapture<4>]>;
+ class AdvSIMD_4Vec_Store_Lane_Intrinsic
+ : Intrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i64_ty, llvm_anyptr_ty],
+ [IntrArgMemOnly, NoCapture<5>]>;
+}
+
+// Memory ops
+
+def int_aarch64_neon_ld1x2 : AdvSIMD_2Vec_Load_Intrinsic;
+def int_aarch64_neon_ld1x3 : AdvSIMD_3Vec_Load_Intrinsic;
+def int_aarch64_neon_ld1x4 : AdvSIMD_4Vec_Load_Intrinsic;
+
+def int_aarch64_neon_st1x2 : AdvSIMD_2Vec_Store_Intrinsic;
+def int_aarch64_neon_st1x3 : AdvSIMD_3Vec_Store_Intrinsic;
+def int_aarch64_neon_st1x4 : AdvSIMD_4Vec_Store_Intrinsic;
+
+def int_aarch64_neon_ld2 : AdvSIMD_2Vec_Load_Intrinsic;
+def int_aarch64_neon_ld3 : AdvSIMD_3Vec_Load_Intrinsic;
+def int_aarch64_neon_ld4 : AdvSIMD_4Vec_Load_Intrinsic;
+
+def int_aarch64_neon_ld2lane : AdvSIMD_2Vec_Load_Lane_Intrinsic;
+def int_aarch64_neon_ld3lane : AdvSIMD_3Vec_Load_Lane_Intrinsic;
+def int_aarch64_neon_ld4lane : AdvSIMD_4Vec_Load_Lane_Intrinsic;
+
+def int_aarch64_neon_ld2r : AdvSIMD_2Vec_Load_Intrinsic;
+def int_aarch64_neon_ld3r : AdvSIMD_3Vec_Load_Intrinsic;
+def int_aarch64_neon_ld4r : AdvSIMD_4Vec_Load_Intrinsic;
+
+def int_aarch64_neon_st2 : AdvSIMD_2Vec_Store_Intrinsic;
+def int_aarch64_neon_st3 : AdvSIMD_3Vec_Store_Intrinsic;
+def int_aarch64_neon_st4 : AdvSIMD_4Vec_Store_Intrinsic;
+
+def int_aarch64_neon_st2lane : AdvSIMD_2Vec_Store_Lane_Intrinsic;
+def int_aarch64_neon_st3lane : AdvSIMD_3Vec_Store_Lane_Intrinsic;
+def int_aarch64_neon_st4lane : AdvSIMD_4Vec_Store_Lane_Intrinsic;
+
+let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
+ class AdvSIMD_Tbl1_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_v16i8_ty, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_Tbl2_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], [IntrNoMem]>;
+ class AdvSIMD_Tbl3_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty,
+ LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_Tbl4_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty,
+ LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+ class AdvSIMD_Tbx1_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_v16i8_ty, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_Tbx2_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
+ LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_Tbx3_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, LLVMMatchType<0>],
+ [IntrNoMem]>;
+ class AdvSIMD_Tbx4_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>],
+ [IntrNoMem]>;
+}
+def int_aarch64_neon_tbl1 : AdvSIMD_Tbl1_Intrinsic;
+def int_aarch64_neon_tbl2 : AdvSIMD_Tbl2_Intrinsic;
+def int_aarch64_neon_tbl3 : AdvSIMD_Tbl3_Intrinsic;
+def int_aarch64_neon_tbl4 : AdvSIMD_Tbl4_Intrinsic;
+
+def int_aarch64_neon_tbx1 : AdvSIMD_Tbx1_Intrinsic;
+def int_aarch64_neon_tbx2 : AdvSIMD_Tbx2_Intrinsic;
+def int_aarch64_neon_tbx3 : AdvSIMD_Tbx3_Intrinsic;
+def int_aarch64_neon_tbx4 : AdvSIMD_Tbx4_Intrinsic;
+
+let TargetPrefix = "aarch64" in {
+ class FPCR_Get_Intrinsic
+ : Intrinsic<[llvm_i64_ty], [], [IntrNoMem]>;
+}
+
+// FPCR
+def int_aarch64_get_fpcr : FPCR_Get_Intrinsic;
+
+let TargetPrefix = "aarch64" in {
+ class Crypto_AES_DataKey_Intrinsic
+ : Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+ class Crypto_AES_Data_Intrinsic
+ : Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+ // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule
+ // (v4i32).
+ class Crypto_SHA_5Hash4Schedule_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule
+ // (v4i32).
+ class Crypto_SHA_1Hash_Intrinsic
+ : Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ // SHA intrinsic taking 8 words of the schedule
+ class Crypto_SHA_8Schedule_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+ // SHA intrinsic taking 12 words of the schedule
+ class Crypto_SHA_12Schedule_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ // SHA intrinsic taking 8 words of the hash and 4 of the schedule.
+ class Crypto_SHA_8Hash4Schedule_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+}
+
+// AES
+def int_aarch64_crypto_aese : Crypto_AES_DataKey_Intrinsic;
+def int_aarch64_crypto_aesd : Crypto_AES_DataKey_Intrinsic;
+def int_aarch64_crypto_aesmc : Crypto_AES_Data_Intrinsic;
+def int_aarch64_crypto_aesimc : Crypto_AES_Data_Intrinsic;
+
+// SHA1
+def int_aarch64_crypto_sha1c : Crypto_SHA_5Hash4Schedule_Intrinsic;
+def int_aarch64_crypto_sha1p : Crypto_SHA_5Hash4Schedule_Intrinsic;
+def int_aarch64_crypto_sha1m : Crypto_SHA_5Hash4Schedule_Intrinsic;
+def int_aarch64_crypto_sha1h : Crypto_SHA_1Hash_Intrinsic;
+
+def int_aarch64_crypto_sha1su0 : Crypto_SHA_12Schedule_Intrinsic;
+def int_aarch64_crypto_sha1su1 : Crypto_SHA_8Schedule_Intrinsic;
+
+// SHA256
+def int_aarch64_crypto_sha256h : Crypto_SHA_8Hash4Schedule_Intrinsic;
+def int_aarch64_crypto_sha256h2 : Crypto_SHA_8Hash4Schedule_Intrinsic;
+def int_aarch64_crypto_sha256su0 : Crypto_SHA_8Schedule_Intrinsic;
+def int_aarch64_crypto_sha256su1 : Crypto_SHA_12Schedule_Intrinsic;
+
+//===----------------------------------------------------------------------===//
+// CRC32
+
+let TargetPrefix = "aarch64" in {
+
+def int_aarch64_crc32b : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32cb : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32h : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32ch : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32w : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32cw : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32x : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+def int_aarch64_crc32cx : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAMDGPU.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAMDGPU.td
new file mode 100644
index 000000000..9f361410b
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsAMDGPU.td
@@ -0,0 +1,1340 @@
+//===- IntrinsicsAMDGPU.td - Defines AMDGPU intrinsics -----*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the R600-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+class AMDGPUReadPreloadRegisterIntrinsic
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem, IntrSpeculatable]>;
+
+class AMDGPUReadPreloadRegisterIntrinsicNamed<string name>
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem, IntrSpeculatable]>, GCCBuiltin<name>;
+
+// Used to tag image and resource intrinsics with information used to generate
+// mem operands.
+class AMDGPURsrcIntrinsic<int rsrcarg, bit isimage = 0> {
+ int RsrcArg = rsrcarg;
+ bit IsImage = isimage;
+}
+
+let TargetPrefix = "r600" in {
+
+multiclass AMDGPUReadPreloadRegisterIntrinsic_xyz {
+ def _x : AMDGPUReadPreloadRegisterIntrinsic;
+ def _y : AMDGPUReadPreloadRegisterIntrinsic;
+ def _z : AMDGPUReadPreloadRegisterIntrinsic;
+}
+
+multiclass AMDGPUReadPreloadRegisterIntrinsic_xyz_named<string prefix> {
+ def _x : AMDGPUReadPreloadRegisterIntrinsicNamed<!strconcat(prefix, "_x")>;
+ def _y : AMDGPUReadPreloadRegisterIntrinsicNamed<!strconcat(prefix, "_y")>;
+ def _z : AMDGPUReadPreloadRegisterIntrinsicNamed<!strconcat(prefix, "_z")>;
+}
+
+defm int_r600_read_global_size : AMDGPUReadPreloadRegisterIntrinsic_xyz_named
+ <"__builtin_r600_read_global_size">;
+defm int_r600_read_ngroups : AMDGPUReadPreloadRegisterIntrinsic_xyz_named
+ <"__builtin_r600_read_ngroups">;
+defm int_r600_read_tgid : AMDGPUReadPreloadRegisterIntrinsic_xyz_named
+ <"__builtin_r600_read_tgid">;
+
+defm int_r600_read_local_size : AMDGPUReadPreloadRegisterIntrinsic_xyz;
+defm int_r600_read_tidig : AMDGPUReadPreloadRegisterIntrinsic_xyz;
+
+def int_r600_group_barrier : GCCBuiltin<"__builtin_r600_group_barrier">,
+ Intrinsic<[], [], [IntrConvergent]>;
+
+// AS 7 is PARAM_I_ADDRESS, used for kernel arguments
+def int_r600_implicitarg_ptr :
+ GCCBuiltin<"__builtin_r600_implicitarg_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 7>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_r600_rat_store_typed :
+ // 1st parameter: Data
+ // 2nd parameter: Index
+ // 3rd parameter: Constant RAT ID
+ Intrinsic<[], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty], []>,
+ GCCBuiltin<"__builtin_r600_rat_store_typed">;
+
+def int_r600_recipsqrt_ieee : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_r600_recipsqrt_clamped : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_r600_cube : Intrinsic<
+ [llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_r600_store_stream_output : Intrinsic<
+ [], [llvm_v4f32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []
+>;
+
+class TextureIntrinsicFloatInput : Intrinsic<[llvm_v4f32_ty], [
+ llvm_v4f32_ty, // Coord
+ llvm_i32_ty, // offset_x
+ llvm_i32_ty, // offset_y,
+ llvm_i32_ty, // offset_z,
+ llvm_i32_ty, // resource_id
+ llvm_i32_ty, // samplerid
+ llvm_i32_ty, // coord_type_x
+ llvm_i32_ty, // coord_type_y
+ llvm_i32_ty, // coord_type_z
+ llvm_i32_ty], // coord_type_w
+ [IntrNoMem]
+>;
+
+class TextureIntrinsicInt32Input : Intrinsic<[llvm_v4i32_ty], [
+ llvm_v4i32_ty, // Coord
+ llvm_i32_ty, // offset_x
+ llvm_i32_ty, // offset_y,
+ llvm_i32_ty, // offset_z,
+ llvm_i32_ty, // resource_id
+ llvm_i32_ty, // samplerid
+ llvm_i32_ty, // coord_type_x
+ llvm_i32_ty, // coord_type_y
+ llvm_i32_ty, // coord_type_z
+ llvm_i32_ty], // coord_type_w
+ [IntrNoMem]
+>;
+
+def int_r600_store_swizzle :
+ Intrinsic<[], [llvm_v4f32_ty, llvm_i32_ty, llvm_i32_ty], []
+>;
+
+def int_r600_tex : TextureIntrinsicFloatInput;
+def int_r600_texc : TextureIntrinsicFloatInput;
+def int_r600_txl : TextureIntrinsicFloatInput;
+def int_r600_txlc : TextureIntrinsicFloatInput;
+def int_r600_txb : TextureIntrinsicFloatInput;
+def int_r600_txbc : TextureIntrinsicFloatInput;
+def int_r600_txf : TextureIntrinsicInt32Input;
+def int_r600_txq : TextureIntrinsicInt32Input;
+def int_r600_ddx : TextureIntrinsicFloatInput;
+def int_r600_ddy : TextureIntrinsicFloatInput;
+
+def int_r600_dot4 : Intrinsic<[llvm_float_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_r600_kill : Intrinsic<[], [llvm_float_ty], []>;
+
+} // End TargetPrefix = "r600"
+
+let TargetPrefix = "amdgcn" in {
+
+//===----------------------------------------------------------------------===//
+// ABI Special Intrinsics
+//===----------------------------------------------------------------------===//
+
+defm int_amdgcn_workitem_id : AMDGPUReadPreloadRegisterIntrinsic_xyz;
+defm int_amdgcn_workgroup_id : AMDGPUReadPreloadRegisterIntrinsic_xyz_named
+ <"__builtin_amdgcn_workgroup_id">;
+
+def int_amdgcn_dispatch_ptr :
+ GCCBuiltin<"__builtin_amdgcn_dispatch_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 4>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_queue_ptr :
+ GCCBuiltin<"__builtin_amdgcn_queue_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 4>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_kernarg_segment_ptr :
+ GCCBuiltin<"__builtin_amdgcn_kernarg_segment_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 4>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_implicitarg_ptr :
+ GCCBuiltin<"__builtin_amdgcn_implicitarg_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 4>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_groupstaticsize :
+ GCCBuiltin<"__builtin_amdgcn_groupstaticsize">,
+ Intrinsic<[llvm_i32_ty], [], [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_dispatch_id :
+ GCCBuiltin<"__builtin_amdgcn_dispatch_id">,
+ Intrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_implicit_buffer_ptr :
+ GCCBuiltin<"__builtin_amdgcn_implicit_buffer_ptr">,
+ Intrinsic<[LLVMQualPointerType<llvm_i8_ty, 4>], [],
+ [IntrNoMem, IntrSpeculatable]>;
+
+// Set EXEC to the 64-bit value given.
+// This is always moved to the beginning of the basic block.
+def int_amdgcn_init_exec : Intrinsic<[],
+ [llvm_i64_ty], // 64-bit literal constant
+ [IntrConvergent]>;
+
+// Set EXEC according to a thread count packed in an SGPR input:
+// thread_count = (input >> bitoffset) & 0x7f;
+// This is always moved to the beginning of the basic block.
+def int_amdgcn_init_exec_from_input : Intrinsic<[],
+ [llvm_i32_ty, // 32-bit SGPR input
+ llvm_i32_ty], // bit offset of the thread count
+ [IntrConvergent]>;
+
+
+//===----------------------------------------------------------------------===//
+// Instruction Intrinsics
+//===----------------------------------------------------------------------===//
+
+// The first parameter is s_sendmsg immediate (i16),
+// the second one is copied to m0
+def int_amdgcn_s_sendmsg : GCCBuiltin<"__builtin_amdgcn_s_sendmsg">,
+ Intrinsic <[], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_amdgcn_s_sendmsghalt : GCCBuiltin<"__builtin_amdgcn_s_sendmsghalt">,
+ Intrinsic <[], [llvm_i32_ty, llvm_i32_ty], []>;
+
+def int_amdgcn_s_barrier : GCCBuiltin<"__builtin_amdgcn_s_barrier">,
+ Intrinsic<[], [], [IntrConvergent]>;
+
+def int_amdgcn_wave_barrier : GCCBuiltin<"__builtin_amdgcn_wave_barrier">,
+ Intrinsic<[], [], [IntrConvergent]>;
+
+def int_amdgcn_s_waitcnt : GCCBuiltin<"__builtin_amdgcn_s_waitcnt">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+
+def int_amdgcn_div_scale : Intrinsic<
+ // 1st parameter: Numerator
+ // 2nd parameter: Denominator
+ // 3rd parameter: Constant to select select between first and
+ // second. (0 = first, 1 = second).
+ [llvm_anyfloat_ty, llvm_i1_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i1_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_div_fmas : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>, llvm_i1_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_div_fixup : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_trig_preop : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_sin : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cos : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_log_clamp : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_fmul_legacy : GCCBuiltin<"__builtin_amdgcn_fmul_legacy">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_rcp : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_rcp_legacy : GCCBuiltin<"__builtin_amdgcn_rcp_legacy">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_rsq : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_rsq_legacy : GCCBuiltin<"__builtin_amdgcn_rsq_legacy">,
+ Intrinsic<
+ [llvm_float_ty], [llvm_float_ty], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_rsq_clamp : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]>;
+
+def int_amdgcn_ldexp : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_frexp_mant : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_frexp_exp : Intrinsic<
+ [llvm_anyint_ty], [llvm_anyfloat_ty], [IntrNoMem, IntrSpeculatable]
+>;
+
+// v_fract is buggy on SI/CI. It mishandles infinities, may return 1.0
+// and always uses rtz, so is not suitable for implementing the OpenCL
+// fract function. It should be ok on VI.
+def int_amdgcn_fract : Intrinsic<
+ [llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pkrtz : Intrinsic<
+ [llvm_v2f16_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pknorm_i16 : Intrinsic<
+ [llvm_v2i16_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pknorm_u16 : Intrinsic<
+ [llvm_v2i16_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pk_i16 : Intrinsic<
+ [llvm_v2i16_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pk_u16 : Intrinsic<
+ [llvm_v2i16_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_class : Intrinsic<
+ [llvm_i1_ty], [llvm_anyfloat_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_fmed3 : GCCBuiltin<"__builtin_amdgcn_fmed3">,
+ Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cubeid : GCCBuiltin<"__builtin_amdgcn_cubeid">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cubema : GCCBuiltin<"__builtin_amdgcn_cubema">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cubesc : GCCBuiltin<"__builtin_amdgcn_cubesc">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cubetc : GCCBuiltin<"__builtin_amdgcn_cubetc">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+// v_ffbh_i32, as opposed to v_ffbh_u32. For v_ffbh_u32, llvm.ctlz
+// should be used.
+def int_amdgcn_sffbh :
+ Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+// v_mad_f32|f16/v_mac_f32|f16, selected regardless of denorm support.
+def int_amdgcn_fmad_ftz :
+ Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+// Fields should mirror atomicrmw
+class AMDGPUAtomicIncIntrin : Intrinsic<[llvm_anyint_ty],
+ [llvm_anyptr_ty,
+ LLVMMatchType<0>,
+ llvm_i32_ty, // ordering
+ llvm_i32_ty, // scope
+ llvm_i1_ty], // isVolatile
+ [IntrArgMemOnly, NoCapture<0>], "",
+ [SDNPMemOperand]
+>;
+
+def int_amdgcn_atomic_inc : AMDGPUAtomicIncIntrin;
+def int_amdgcn_atomic_dec : AMDGPUAtomicIncIntrin;
+
+class AMDGPULDSF32Intrin<string clang_builtin> :
+ GCCBuiltin<clang_builtin>,
+ Intrinsic<[llvm_float_ty],
+ [LLVMQualPointerType<llvm_float_ty, 3>,
+ llvm_float_ty,
+ llvm_i32_ty, // ordering
+ llvm_i32_ty, // scope
+ llvm_i1_ty], // isVolatile
+ [IntrArgMemOnly, NoCapture<0>]
+>;
+
+def int_amdgcn_ds_fadd : AMDGPULDSF32Intrin<"__builtin_amdgcn_ds_faddf">;
+def int_amdgcn_ds_fmin : AMDGPULDSF32Intrin<"__builtin_amdgcn_ds_fminf">;
+def int_amdgcn_ds_fmax : AMDGPULDSF32Intrin<"__builtin_amdgcn_ds_fmaxf">;
+
+} // TargetPrefix = "amdgcn"
+
+// New-style image intrinsics
+
+//////////////////////////////////////////////////////////////////////////
+// Dimension-aware image intrinsics framework
+//////////////////////////////////////////////////////////////////////////
+
+// Helper class to represent (type, name) combinations of arguments. The
+// argument names are explanatory and used as DAG operand names for codegen
+// pattern matching.
+class AMDGPUArg<LLVMType ty, string name> {
+ LLVMType Type = ty;
+ string Name = name;
+}
+
+// Return [AMDGPUArg<basety, names[0]>, AMDGPUArg<LLVMMatchType<0>, names[1]>, ...]
+class makeArgList<list<string> names, LLVMType basety> {
+ list<AMDGPUArg> ret =
+ !listconcat([AMDGPUArg<basety, names[0]>],
+ !foreach(name, !tail(names), AMDGPUArg<LLVMMatchType<0>, name>));
+}
+
+// Return arglist, with LLVMMatchType's references shifted by 'shift'.
+class arglistmatchshift<list<AMDGPUArg> arglist, int shift> {
+ list<AMDGPUArg> ret =
+ !foreach(arg, arglist,
+ !if(!isa<LLVMMatchType>(arg.Type),
+ AMDGPUArg<LLVMMatchType<!add(!cast<LLVMMatchType>(arg.Type).Number, shift)>,
+ arg.Name>,
+ arg));
+}
+
+// Return the concatenation of the given arglists. LLVMMatchType's are adjusted
+// accordingly, and shifted by an additional 'shift'.
+class arglistconcat<list<list<AMDGPUArg>> arglists, int shift = 0> {
+ list<AMDGPUArg> ret =
+ !foldl([]<AMDGPUArg>, arglists, lhs, rhs,
+ !listconcat(
+ lhs,
+ arglistmatchshift<rhs,
+ !add(shift, !foldl(0, lhs, a, b,
+ !add(a, b.Type.isAny)))>.ret));
+}
+
+// Represent texture/image types / dimensionality.
+class AMDGPUDimProps<string name, list<string> coord_names, list<string> slice_names> {
+ AMDGPUDimProps Dim = !cast<AMDGPUDimProps>(NAME);
+ string Name = name; // e.g. "2darraymsaa"
+ bit DA = 0; // DA bit in MIMG encoding
+
+ list<AMDGPUArg> CoordSliceArgs =
+ makeArgList<!listconcat(coord_names, slice_names), llvm_anyfloat_ty>.ret;
+ list<AMDGPUArg> CoordSliceIntArgs =
+ makeArgList<!listconcat(coord_names, slice_names), llvm_anyint_ty>.ret;
+ list<AMDGPUArg> GradientArgs =
+ makeArgList<!listconcat(!foreach(name, coord_names, "d" # name # "dh"),
+ !foreach(name, coord_names, "d" # name # "dv")),
+ llvm_anyfloat_ty>.ret;
+
+ bits<8> NumCoords = !size(CoordSliceArgs);
+ bits<8> NumGradients = !size(GradientArgs);
+}
+
+def AMDGPUDim1D : AMDGPUDimProps<"1d", ["s"], []>;
+def AMDGPUDim2D : AMDGPUDimProps<"2d", ["s", "t"], []>;
+def AMDGPUDim3D : AMDGPUDimProps<"3d", ["s", "t", "r"], []>;
+let DA = 1 in {
+ def AMDGPUDimCube : AMDGPUDimProps<"cube", ["s", "t"], ["face"]>;
+ def AMDGPUDim1DArray : AMDGPUDimProps<"1darray", ["s"], ["slice"]>;
+ def AMDGPUDim2DArray : AMDGPUDimProps<"2darray", ["s", "t"], ["slice"]>;
+}
+def AMDGPUDim2DMsaa : AMDGPUDimProps<"2dmsaa", ["s", "t"], ["fragid"]>;
+let DA = 1 in {
+ def AMDGPUDim2DArrayMsaa : AMDGPUDimProps<"2darraymsaa", ["s", "t"], ["slice", "fragid"]>;
+}
+
+def AMDGPUDims {
+ list<AMDGPUDimProps> NoMsaa = [AMDGPUDim1D, AMDGPUDim2D, AMDGPUDim3D,
+ AMDGPUDimCube, AMDGPUDim1DArray,
+ AMDGPUDim2DArray];
+ list<AMDGPUDimProps> Msaa = [AMDGPUDim2DMsaa, AMDGPUDim2DArrayMsaa];
+ list<AMDGPUDimProps> All = !listconcat(NoMsaa, Msaa);
+}
+
+// Represent sample variants, i.e. _C, _O, _B, ... and combinations thereof.
+class AMDGPUSampleVariant<string ucmod, string lcmod, list<AMDGPUArg> extra_addr> {
+ string UpperCaseMod = ucmod;
+ string LowerCaseMod = lcmod;
+
+ // {offset} {bias} {z-compare}
+ list<AMDGPUArg> ExtraAddrArgs = extra_addr;
+ bit Gradients = 0;
+
+ // Name of the {lod} or {clamp} argument that is appended to the coordinates,
+ // if any.
+ string LodOrClamp = "";
+}
+
+// AMDGPUSampleVariants: all variants supported by IMAGE_SAMPLE
+// AMDGPUSampleVariantsNoGradients: variants supported by IMAGE_GATHER4
+defset list<AMDGPUSampleVariant> AMDGPUSampleVariants = {
+ multiclass AMDGPUSampleHelper_Offset<string ucmod, string lcmod,
+ list<AMDGPUArg> extra_addr> {
+ def NAME#lcmod : AMDGPUSampleVariant<ucmod, lcmod, extra_addr>;
+ def NAME#lcmod#_o : AMDGPUSampleVariant<
+ ucmod#"_O", lcmod#"_o", !listconcat([AMDGPUArg<llvm_i32_ty, "offset">], extra_addr)>;
+ }
+
+ multiclass AMDGPUSampleHelper_Compare<string ucmod, string lcmod,
+ list<AMDGPUArg> extra_addr> {
+ defm NAME : AMDGPUSampleHelper_Offset<ucmod, lcmod, extra_addr>;
+ defm NAME : AMDGPUSampleHelper_Offset<
+ "_C"#ucmod, "_c"#lcmod, !listconcat(extra_addr, [AMDGPUArg<llvm_float_ty, "zcompare">])>;
+ }
+
+ multiclass AMDGPUSampleHelper_Clamp<string ucmod, string lcmod,
+ list<AMDGPUArg> extra_addr> {
+ defm NAME : AMDGPUSampleHelper_Compare<ucmod, lcmod, extra_addr>;
+ let LodOrClamp = "clamp" in
+ defm NAME : AMDGPUSampleHelper_Compare<ucmod#"_CL", lcmod#"_cl", extra_addr>;
+ }
+
+ defset list<AMDGPUSampleVariant> AMDGPUSampleVariantsNoGradients = {
+ defm AMDGPUSample : AMDGPUSampleHelper_Clamp<"", "", []>;
+ defm AMDGPUSample : AMDGPUSampleHelper_Clamp<
+ "_B", "_b", [AMDGPUArg<llvm_anyfloat_ty, "bias">]>;
+ let LodOrClamp = "lod" in
+ defm AMDGPUSample : AMDGPUSampleHelper_Compare<"_L", "_l", []>;
+ defm AMDGPUSample : AMDGPUSampleHelper_Compare<"_LZ", "_lz", []>;
+ }
+
+ let Gradients = 1 in {
+ defm AMDGPUSample : AMDGPUSampleHelper_Clamp<"_D", "_d", []>;
+ defm AMDGPUSample : AMDGPUSampleHelper_Clamp<"_CD", "_cd", []>;
+ }
+}
+
+// Helper class to capture the profile of a dimension-aware image intrinsic.
+// This information is used to generate the intrinsic's type and to inform
+// codegen pattern matching.
+class AMDGPUDimProfile<string opmod,
+ AMDGPUDimProps dim> {
+ AMDGPUDimProps Dim = dim;
+ string OpMod = opmod; // the corresponding instruction is named IMAGE_OpMod
+
+ // These are entended to be overwritten by subclasses
+ bit IsSample = 0;
+ bit IsAtomic = 0;
+ list<LLVMType> RetTypes = [];
+ list<AMDGPUArg> DataArgs = [];
+ list<AMDGPUArg> ExtraAddrArgs = [];
+ bit Gradients = 0;
+ string LodClampMip = "";
+
+ int NumRetAndDataAnyTypes =
+ !foldl(0, !listconcat(RetTypes, !foreach(arg, DataArgs, arg.Type)), a, b,
+ !add(a, b.isAny));
+
+ list<AMDGPUArg> AddrArgs =
+ arglistconcat<[ExtraAddrArgs,
+ !if(Gradients, dim.GradientArgs, []),
+ !listconcat(!if(IsSample, dim.CoordSliceArgs, dim.CoordSliceIntArgs),
+ !if(!eq(LodClampMip, ""),
+ []<AMDGPUArg>,
+ [AMDGPUArg<LLVMMatchType<0>, LodClampMip>]))],
+ NumRetAndDataAnyTypes>.ret;
+ list<LLVMType> AddrTypes = !foreach(arg, AddrArgs, arg.Type);
+ list<AMDGPUArg> AddrDefaultArgs =
+ !foreach(arg, AddrArgs,
+ AMDGPUArg<!if(!or(arg.Type.isAny, !isa<LLVMMatchType>(arg.Type)),
+ !if(IsSample, llvm_float_ty, llvm_i32_ty), arg.Type),
+ arg.Name>);
+ list<AMDGPUArg> AddrA16Args =
+ !foreach(arg, AddrArgs,
+ AMDGPUArg<!if(!or(arg.Type.isAny, !isa<LLVMMatchType>(arg.Type)),
+ !if(IsSample, llvm_half_ty, llvm_i16_ty), arg.Type),
+ arg.Name>);
+}
+
+class AMDGPUDimProfileCopy<AMDGPUDimProfile base> : AMDGPUDimProfile<base.OpMod, base.Dim> {
+ let IsSample = base.IsSample;
+ let IsAtomic = base.IsAtomic;
+ let RetTypes = base.RetTypes;
+ let DataArgs = base.DataArgs;
+ let ExtraAddrArgs = base.ExtraAddrArgs;
+ let Gradients = base.Gradients;
+ let LodClampMip = base.LodClampMip;
+}
+
+class AMDGPUDimSampleProfile<string opmod,
+ AMDGPUDimProps dim,
+ AMDGPUSampleVariant sample> : AMDGPUDimProfile<opmod, dim> {
+ let IsSample = 1;
+ let RetTypes = [llvm_anyfloat_ty];
+ let ExtraAddrArgs = sample.ExtraAddrArgs;
+ let Gradients = sample.Gradients;
+ let LodClampMip = sample.LodOrClamp;
+}
+
+class AMDGPUDimNoSampleProfile<string opmod,
+ AMDGPUDimProps dim,
+ list<LLVMType> retty,
+ list<AMDGPUArg> dataargs,
+ bit Mip = 0> : AMDGPUDimProfile<opmod, dim> {
+ let RetTypes = retty;
+ let DataArgs = dataargs;
+ let LodClampMip = !if(Mip, "mip", "");
+}
+
+class AMDGPUDimAtomicProfile<string opmod,
+ AMDGPUDimProps dim,
+ list<AMDGPUArg> dataargs> : AMDGPUDimProfile<opmod, dim> {
+ let RetTypes = [llvm_anyint_ty];
+ let DataArgs = dataargs;
+ let IsAtomic = 1;
+}
+
+class AMDGPUDimGetResInfoProfile<AMDGPUDimProps dim> : AMDGPUDimProfile<"GET_RESINFO", dim> {
+ let RetTypes = [llvm_anyfloat_ty];
+ let DataArgs = [];
+ let AddrArgs = [AMDGPUArg<llvm_anyint_ty, "mip">];
+ let LodClampMip = "mip";
+}
+
+// All dimension-aware intrinsics are derived from this class.
+class AMDGPUImageDimIntrinsic<AMDGPUDimProfile P_,
+ list<IntrinsicProperty> props,
+ list<SDNodeProperty> sdnodeprops> : Intrinsic<
+ P_.RetTypes, // vdata(VGPR) -- for load/atomic-with-return
+ !listconcat(
+ !foreach(arg, P_.DataArgs, arg.Type), // vdata(VGPR) -- for store/atomic
+ !if(P_.IsAtomic, [], [llvm_i32_ty]), // dmask(imm)
+ P_.AddrTypes, // vaddr(VGPR)
+ [llvm_v8i32_ty], // rsrc(SGPR)
+ !if(P_.IsSample, [llvm_v4i32_ty, // samp(SGPR)
+ llvm_i1_ty], []), // unorm(imm)
+ [llvm_i32_ty, // texfailctrl(imm; bit 0 = tfe, bit 1 = lwe)
+ llvm_i32_ty]), // cachepolicy(imm; bit 0 = glc, bit 1 = slc)
+ props, "", sdnodeprops>,
+ AMDGPURsrcIntrinsic<!add(!size(P_.DataArgs), !size(P_.AddrTypes),
+ !if(P_.IsAtomic, 0, 1)), 1> {
+ AMDGPUDimProfile P = P_;
+
+ AMDGPUImageDimIntrinsic Intr = !cast<AMDGPUImageDimIntrinsic>(NAME);
+
+ let TargetPrefix = "amdgcn";
+}
+
+// Marker class for intrinsics with a DMask that determines the returned
+// channels.
+class AMDGPUImageDMaskIntrinsic;
+
+defset list<AMDGPUImageDimIntrinsic> AMDGPUImageDimIntrinsics = {
+
+ //////////////////////////////////////////////////////////////////////////
+ // Load and store intrinsics
+ //////////////////////////////////////////////////////////////////////////
+ multiclass AMDGPUImageDimIntrinsicsNoMsaa<string opmod,
+ list<LLVMType> retty,
+ list<AMDGPUArg> dataargs,
+ list<IntrinsicProperty> props,
+ list<SDNodeProperty> sdnodeprops,
+ bit Mip = 0> {
+ foreach dim = AMDGPUDims.NoMsaa in {
+ def !strconcat(NAME, "_", dim.Name)
+ : AMDGPUImageDimIntrinsic<
+ AMDGPUDimNoSampleProfile<opmod, dim, retty, dataargs, Mip>,
+ props, sdnodeprops>;
+ }
+ }
+
+ multiclass AMDGPUImageDimIntrinsicsAll<string opmod,
+ list<LLVMType> retty,
+ list<AMDGPUArg> dataargs,
+ list<IntrinsicProperty> props,
+ list<SDNodeProperty> sdnodeprops,
+ bit Mip = 0> {
+ foreach dim = AMDGPUDims.All in {
+ def !strconcat(NAME, "_", dim.Name)
+ : AMDGPUImageDimIntrinsic<
+ AMDGPUDimNoSampleProfile<opmod, dim, retty, dataargs, Mip>,
+ props, sdnodeprops>;
+ }
+ }
+
+ defm int_amdgcn_image_load
+ : AMDGPUImageDimIntrinsicsAll<"LOAD", [llvm_anyfloat_ty], [], [IntrReadMem],
+ [SDNPMemOperand]>,
+ AMDGPUImageDMaskIntrinsic;
+ defm int_amdgcn_image_load_mip
+ : AMDGPUImageDimIntrinsicsNoMsaa<"LOAD_MIP", [llvm_anyfloat_ty], [],
+ [IntrReadMem], [SDNPMemOperand], 1>,
+ AMDGPUImageDMaskIntrinsic;
+
+ defm int_amdgcn_image_store : AMDGPUImageDimIntrinsicsAll<
+ "STORE", [], [AMDGPUArg<llvm_anyfloat_ty, "vdata">],
+ [IntrWriteMem], [SDNPMemOperand]>;
+ defm int_amdgcn_image_store_mip : AMDGPUImageDimIntrinsicsNoMsaa<
+ "STORE_MIP", [], [AMDGPUArg<llvm_anyfloat_ty, "vdata">],
+ [IntrWriteMem], [SDNPMemOperand], 1>;
+
+ //////////////////////////////////////////////////////////////////////////
+ // sample and getlod intrinsics
+ //////////////////////////////////////////////////////////////////////////
+ multiclass AMDGPUImageDimSampleDims<string opmod,
+ AMDGPUSampleVariant sample,
+ bit NoMem = 0> {
+ foreach dim = AMDGPUDims.NoMsaa in {
+ def !strconcat(NAME, "_", dim.Name) : AMDGPUImageDimIntrinsic<
+ AMDGPUDimSampleProfile<opmod, dim, sample>,
+ !if(NoMem, [IntrNoMem], [IntrReadMem]),
+ !if(NoMem, [], [SDNPMemOperand])>;
+ }
+ }
+
+ foreach sample = AMDGPUSampleVariants in {
+ defm int_amdgcn_image_sample # sample.LowerCaseMod
+ : AMDGPUImageDimSampleDims<"SAMPLE" # sample.UpperCaseMod, sample>,
+ AMDGPUImageDMaskIntrinsic;
+ }
+
+ defm int_amdgcn_image_getlod
+ : AMDGPUImageDimSampleDims<"GET_LOD", AMDGPUSample, 1>,
+ AMDGPUImageDMaskIntrinsic;
+
+ //////////////////////////////////////////////////////////////////////////
+ // getresinfo intrinsics
+ //////////////////////////////////////////////////////////////////////////
+ foreach dim = AMDGPUDims.All in {
+ def !strconcat("int_amdgcn_image_getresinfo_", dim.Name)
+ : AMDGPUImageDimIntrinsic<AMDGPUDimGetResInfoProfile<dim>, [IntrNoMem], []>,
+ AMDGPUImageDMaskIntrinsic;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+ // gather4 intrinsics
+ //////////////////////////////////////////////////////////////////////////
+ foreach sample = AMDGPUSampleVariantsNoGradients in {
+ foreach dim = [AMDGPUDim2D, AMDGPUDimCube, AMDGPUDim2DArray] in {
+ def int_amdgcn_image_gather4 # sample.LowerCaseMod # _ # dim.Name:
+ AMDGPUImageDimIntrinsic<
+ AMDGPUDimSampleProfile<"GATHER4" # sample.UpperCaseMod, dim, sample>,
+ [IntrReadMem], [SDNPMemOperand]>;
+ }
+ }
+}
+
+//////////////////////////////////////////////////////////////////////////
+// atomic intrinsics
+//////////////////////////////////////////////////////////////////////////
+defset list<AMDGPUImageDimIntrinsic> AMDGPUImageDimAtomicIntrinsics = {
+ multiclass AMDGPUImageDimAtomicX<string opmod, list<AMDGPUArg> dataargs> {
+ foreach dim = AMDGPUDims.All in {
+ def !strconcat(NAME, "_", dim.Name)
+ : AMDGPUImageDimIntrinsic<
+ AMDGPUDimAtomicProfile<opmod, dim, dataargs>,
+ [], [SDNPMemOperand]>;
+ }
+ }
+
+ multiclass AMDGPUImageDimAtomic<string opmod> {
+ defm "" : AMDGPUImageDimAtomicX<opmod, [AMDGPUArg<LLVMMatchType<0>, "vdata">]>;
+ }
+
+ defm int_amdgcn_image_atomic_swap : AMDGPUImageDimAtomic<"ATOMIC_SWAP">;
+ defm int_amdgcn_image_atomic_add : AMDGPUImageDimAtomic<"ATOMIC_ADD">;
+ defm int_amdgcn_image_atomic_sub : AMDGPUImageDimAtomic<"ATOMIC_SUB">;
+ defm int_amdgcn_image_atomic_smin : AMDGPUImageDimAtomic<"ATOMIC_SMIN">;
+ defm int_amdgcn_image_atomic_umin : AMDGPUImageDimAtomic<"ATOMIC_UMIN">;
+ defm int_amdgcn_image_atomic_smax : AMDGPUImageDimAtomic<"ATOMIC_SMAX">;
+ defm int_amdgcn_image_atomic_umax : AMDGPUImageDimAtomic<"ATOMIC_UMAX">;
+ defm int_amdgcn_image_atomic_and : AMDGPUImageDimAtomic<"ATOMIC_AND">;
+ defm int_amdgcn_image_atomic_or : AMDGPUImageDimAtomic<"ATOMIC_OR">;
+ defm int_amdgcn_image_atomic_xor : AMDGPUImageDimAtomic<"ATOMIC_XOR">;
+
+ // TODO: INC/DEC are weird: they seem to have a vdata argument in hardware,
+ // even though it clearly shouldn't be needed
+ defm int_amdgcn_image_atomic_inc : AMDGPUImageDimAtomic<"ATOMIC_INC">;
+ defm int_amdgcn_image_atomic_dec : AMDGPUImageDimAtomic<"ATOMIC_DEC">;
+
+ defm int_amdgcn_image_atomic_cmpswap :
+ AMDGPUImageDimAtomicX<"ATOMIC_CMPSWAP", [AMDGPUArg<LLVMMatchType<0>, "src">,
+ AMDGPUArg<LLVMMatchType<0>, "cmp">]>;
+}
+
+//////////////////////////////////////////////////////////////////////////
+// Buffer intrinsics
+//////////////////////////////////////////////////////////////////////////
+
+let TargetPrefix = "amdgcn" in {
+
+defset list<AMDGPURsrcIntrinsic> AMDGPUBufferIntrinsics = {
+
+class AMDGPUBufferLoad : Intrinsic <
+ [llvm_anyfloat_ty],
+ [llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // offset(SGPR/VGPR/imm)
+ llvm_i1_ty, // glc(imm)
+ llvm_i1_ty], // slc(imm)
+ [IntrReadMem], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<0>;
+def int_amdgcn_buffer_load_format : AMDGPUBufferLoad;
+def int_amdgcn_buffer_load : AMDGPUBufferLoad;
+
+class AMDGPUBufferStore : Intrinsic <
+ [],
+ [llvm_anyfloat_ty, // vdata(VGPR) -- can currently only select f32, v2f32, v4f32
+ llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // offset(SGPR/VGPR/imm)
+ llvm_i1_ty, // glc(imm)
+ llvm_i1_ty], // slc(imm)
+ [IntrWriteMem], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<1>;
+def int_amdgcn_buffer_store_format : AMDGPUBufferStore;
+def int_amdgcn_buffer_store : AMDGPUBufferStore;
+
+def int_amdgcn_tbuffer_load : Intrinsic <
+ [llvm_any_ty], // overloaded for types f32/i32, v2f32/v2i32, v4f32/v4i32
+ [llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // voffset(VGPR)
+ llvm_i32_ty, // soffset(SGPR)
+ llvm_i32_ty, // offset(imm)
+ llvm_i32_ty, // dfmt(imm)
+ llvm_i32_ty, // nfmt(imm)
+ llvm_i1_ty, // glc(imm)
+ llvm_i1_ty], // slc(imm)
+ [IntrReadMem], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<0>;
+
+def int_amdgcn_tbuffer_store : Intrinsic <
+ [],
+ [llvm_any_ty, // vdata(VGPR), overloaded for types f32/i32, v2f32/v2i32, v4f32/v4i32
+ llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // voffset(VGPR)
+ llvm_i32_ty, // soffset(SGPR)
+ llvm_i32_ty, // offset(imm)
+ llvm_i32_ty, // dfmt(imm)
+ llvm_i32_ty, // nfmt(imm)
+ llvm_i1_ty, // glc(imm)
+ llvm_i1_ty], // slc(imm)
+ [IntrWriteMem], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<1>;
+
+class AMDGPUBufferAtomic : Intrinsic <
+ [llvm_i32_ty],
+ [llvm_i32_ty, // vdata(VGPR)
+ llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // offset(SGPR/VGPR/imm)
+ llvm_i1_ty], // slc(imm)
+ [], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<1, 0>;
+def int_amdgcn_buffer_atomic_swap : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_add : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_sub : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_smin : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_umin : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_smax : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_umax : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_and : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_or : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_xor : AMDGPUBufferAtomic;
+def int_amdgcn_buffer_atomic_cmpswap : Intrinsic<
+ [llvm_i32_ty],
+ [llvm_i32_ty, // src(VGPR)
+ llvm_i32_ty, // cmp(VGPR)
+ llvm_v4i32_ty, // rsrc(SGPR)
+ llvm_i32_ty, // vindex(VGPR)
+ llvm_i32_ty, // offset(SGPR/VGPR/imm)
+ llvm_i1_ty], // slc(imm)
+ [], "", [SDNPMemOperand]>,
+ AMDGPURsrcIntrinsic<2, 0>;
+
+} // defset AMDGPUBufferIntrinsics
+
+// Uses that do not set the done bit should set IntrWriteMem on the
+// call site.
+def int_amdgcn_exp : Intrinsic <[], [
+ llvm_i32_ty, // tgt,
+ llvm_i32_ty, // en
+ llvm_any_ty, // src0 (f32 or i32)
+ LLVMMatchType<0>, // src1
+ LLVMMatchType<0>, // src2
+ LLVMMatchType<0>, // src3
+ llvm_i1_ty, // done
+ llvm_i1_ty // vm
+ ],
+ []
+>;
+
+// exp with compr bit set.
+def int_amdgcn_exp_compr : Intrinsic <[], [
+ llvm_i32_ty, // tgt,
+ llvm_i32_ty, // en
+ llvm_anyvector_ty, // src0 (v2f16 or v2i16)
+ LLVMMatchType<0>, // src1
+ llvm_i1_ty, // done
+ llvm_i1_ty], // vm
+ []
+>;
+
+def int_amdgcn_buffer_wbinvl1_sc :
+ GCCBuiltin<"__builtin_amdgcn_buffer_wbinvl1_sc">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_buffer_wbinvl1 :
+ GCCBuiltin<"__builtin_amdgcn_buffer_wbinvl1">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_s_dcache_inv :
+ GCCBuiltin<"__builtin_amdgcn_s_dcache_inv">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_s_memtime :
+ GCCBuiltin<"__builtin_amdgcn_s_memtime">,
+ Intrinsic<[llvm_i64_ty], [], [IntrReadMem]>;
+
+def int_amdgcn_s_sleep :
+ GCCBuiltin<"__builtin_amdgcn_s_sleep">,
+ Intrinsic<[], [llvm_i32_ty], []> {
+}
+
+def int_amdgcn_s_incperflevel :
+ GCCBuiltin<"__builtin_amdgcn_s_incperflevel">,
+ Intrinsic<[], [llvm_i32_ty], []> {
+}
+
+def int_amdgcn_s_decperflevel :
+ GCCBuiltin<"__builtin_amdgcn_s_decperflevel">,
+ Intrinsic<[], [llvm_i32_ty], []> {
+}
+
+def int_amdgcn_s_getreg :
+ GCCBuiltin<"__builtin_amdgcn_s_getreg">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty],
+ [IntrReadMem, IntrSpeculatable]
+>;
+
+// int_amdgcn_s_getpc is provided to allow a specific style of position
+// independent code to determine the high part of its address when it is
+// known (through convention) that the code and any data of interest does
+// not cross a 4Gb address boundary. Use for any other purpose may not
+// produce the desired results as optimizations may cause code movement,
+// especially as we explicitly use IntrNoMem to allow optimizations.
+def int_amdgcn_s_getpc :
+ GCCBuiltin<"__builtin_amdgcn_s_getpc">,
+ Intrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrSpeculatable]>;
+
+// __builtin_amdgcn_interp_mov <param>, <attr_chan>, <attr>, <m0>
+// param values: 0 = P10, 1 = P20, 2 = P0
+def int_amdgcn_interp_mov :
+ GCCBuiltin<"__builtin_amdgcn_interp_mov">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]>;
+
+// __builtin_amdgcn_interp_p1 <i>, <attr_chan>, <attr>, <m0>
+// This intrinsic reads from lds, but the memory values are constant,
+// so it behaves like IntrNoMem.
+def int_amdgcn_interp_p1 :
+ GCCBuiltin<"__builtin_amdgcn_interp_p1">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]>;
+
+// __builtin_amdgcn_interp_p2 <p1>, <j>, <attr_chan>, <attr>, <m0>
+def int_amdgcn_interp_p2 :
+ GCCBuiltin<"__builtin_amdgcn_interp_p2">,
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]>;
+ // See int_amdgcn_v_interp_p1 for why this is IntrNoMem.
+
+// Pixel shaders only: whether the current pixel is live (i.e. not a helper
+// invocation for derivative computation).
+def int_amdgcn_ps_live : Intrinsic <
+ [llvm_i1_ty],
+ [],
+ [IntrNoMem]>;
+
+def int_amdgcn_mbcnt_lo :
+ GCCBuiltin<"__builtin_amdgcn_mbcnt_lo">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_amdgcn_mbcnt_hi :
+ GCCBuiltin<"__builtin_amdgcn_mbcnt_hi">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// llvm.amdgcn.ds.swizzle src offset
+def int_amdgcn_ds_swizzle :
+ GCCBuiltin<"__builtin_amdgcn_ds_swizzle">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem, IntrConvergent]>;
+
+def int_amdgcn_ubfe : Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_sbfe : Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_lerp :
+ GCCBuiltin<"__builtin_amdgcn_lerp">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_sad_u8 :
+ GCCBuiltin<"__builtin_amdgcn_sad_u8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_msad_u8 :
+ GCCBuiltin<"__builtin_amdgcn_msad_u8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_sad_hi_u8 :
+ GCCBuiltin<"__builtin_amdgcn_sad_hi_u8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_sad_u16 :
+ GCCBuiltin<"__builtin_amdgcn_sad_u16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_qsad_pk_u16_u8 :
+ GCCBuiltin<"__builtin_amdgcn_qsad_pk_u16_u8">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_mqsad_pk_u16_u8 :
+ GCCBuiltin<"__builtin_amdgcn_mqsad_pk_u16_u8">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_mqsad_u32_u8 :
+ GCCBuiltin<"__builtin_amdgcn_mqsad_u32_u8">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i64_ty, llvm_i32_ty, llvm_v4i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_cvt_pk_u8_f32 :
+ GCCBuiltin<"__builtin_amdgcn_cvt_pk_u8_f32">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_icmp :
+ Intrinsic<[llvm_i64_ty], [llvm_anyint_ty, LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem, IntrConvergent]>;
+
+def int_amdgcn_fcmp :
+ Intrinsic<[llvm_i64_ty], [llvm_anyfloat_ty, LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem, IntrConvergent]>;
+
+def int_amdgcn_readfirstlane :
+ GCCBuiltin<"__builtin_amdgcn_readfirstlane">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem, IntrConvergent]>;
+
+// The lane argument must be uniform across the currently active threads of the
+// current wave. Otherwise, the result is undefined.
+def int_amdgcn_readlane :
+ GCCBuiltin<"__builtin_amdgcn_readlane">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem, IntrConvergent]>;
+
+// The value to write and lane select arguments must be uniform across the
+// currently active threads of the current wave. Otherwise, the result is
+// undefined.
+def int_amdgcn_writelane :
+ GCCBuiltin<"__builtin_amdgcn_writelane">,
+ Intrinsic<[llvm_i32_ty], [
+ llvm_i32_ty, // uniform value to write: returned by the selected lane
+ llvm_i32_ty, // uniform lane select
+ llvm_i32_ty // returned by all lanes other than the selected one
+ ],
+ [IntrNoMem, IntrConvergent]
+>;
+
+def int_amdgcn_alignbit : Intrinsic<[llvm_i32_ty],
+ [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+def int_amdgcn_alignbyte : Intrinsic<[llvm_i32_ty],
+ [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+
+
+// Copies the source value to the destination value, with the guarantee that
+// the source value is computed as if the entire program were executed in WQM.
+def int_amdgcn_wqm : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+// Return true if at least one thread within the pixel quad passes true into
+// the function.
+def int_amdgcn_wqm_vote : Intrinsic<[llvm_i1_ty],
+ [llvm_i1_ty], [IntrNoMem, IntrConvergent]
+>;
+
+// If false, set EXEC=0 for the current thread until the end of program.
+def int_amdgcn_kill : Intrinsic<[], [llvm_i1_ty], []>;
+
+// Copies the active channels of the source value to the destination value,
+// with the guarantee that the source value is computed as if the entire
+// program were executed in Whole Wavefront Mode, i.e. with all channels
+// enabled, with a few exceptions: - Phi nodes with require WWM return an
+// undefined value.
+def int_amdgcn_wwm : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable]
+>;
+
+// Given a value, copies it while setting all the inactive lanes to a given
+// value. Note that OpenGL helper lanes are considered active, so if the
+// program ever uses WQM, then the instruction and the first source will be
+// computed in WQM.
+def int_amdgcn_set_inactive :
+ Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, // value to be copied
+ LLVMMatchType<0>], // value for the inactive lanes to take
+ [IntrNoMem, IntrConvergent]>;
+
+//===----------------------------------------------------------------------===//
+// CI+ Intrinsics
+//===----------------------------------------------------------------------===//
+
+def int_amdgcn_s_dcache_inv_vol :
+ GCCBuiltin<"__builtin_amdgcn_s_dcache_inv_vol">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_buffer_wbinvl1_vol :
+ GCCBuiltin<"__builtin_amdgcn_buffer_wbinvl1_vol">,
+ Intrinsic<[], [], []>;
+
+//===----------------------------------------------------------------------===//
+// VI Intrinsics
+//===----------------------------------------------------------------------===//
+
+// llvm.amdgcn.mov.dpp.i32 <src> <dpp_ctrl> <row_mask> <bank_mask> <bound_ctrl>
+def int_amdgcn_mov_dpp :
+ Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i1_ty], [IntrNoMem, IntrConvergent]>;
+
+// llvm.amdgcn.update.dpp.i32 <old> <src> <dpp_ctrl> <row_mask> <bank_mask> <bound_ctrl>
+// Should be equivalent to:
+// v_mov_b32 <dest> <old>
+// v_mov_b32 <dest> <src> <dpp_ctrl> <row_mask> <bank_mask> <bound_ctrl>
+def int_amdgcn_update_dpp :
+ Intrinsic<[llvm_anyint_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i1_ty], [IntrNoMem, IntrConvergent]>;
+
+def int_amdgcn_s_dcache_wb :
+ GCCBuiltin<"__builtin_amdgcn_s_dcache_wb">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_s_dcache_wb_vol :
+ GCCBuiltin<"__builtin_amdgcn_s_dcache_wb_vol">,
+ Intrinsic<[], [], []>;
+
+def int_amdgcn_s_memrealtime :
+ GCCBuiltin<"__builtin_amdgcn_s_memrealtime">,
+ Intrinsic<[llvm_i64_ty], [], [IntrReadMem]>;
+
+// llvm.amdgcn.ds.permute <index> <src>
+def int_amdgcn_ds_permute :
+ GCCBuiltin<"__builtin_amdgcn_ds_permute">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem, IntrConvergent]>;
+
+// llvm.amdgcn.ds.bpermute <index> <src>
+def int_amdgcn_ds_bpermute :
+ GCCBuiltin<"__builtin_amdgcn_ds_bpermute">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem, IntrConvergent]>;
+
+//===----------------------------------------------------------------------===//
+// Deep learning intrinsics.
+//===----------------------------------------------------------------------===//
+
+// f32 %r = llvm.amdgcn.fdot2(v2f16 %a, v2f16 %b, f32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %c
+def int_amdgcn_fdot2 :
+ GCCBuiltin<"__builtin_amdgcn_fdot2">,
+ Intrinsic<
+ [llvm_float_ty], // %r
+ [
+ llvm_v2f16_ty, // %a
+ llvm_v2f16_ty, // %b
+ llvm_float_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// i32 %r = llvm.amdgcn.sdot2(v2i16 %a, v2i16 %b, i32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %c
+def int_amdgcn_sdot2 :
+ GCCBuiltin<"__builtin_amdgcn_sdot2">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_v2i16_ty, // %a
+ llvm_v2i16_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// u32 %r = llvm.amdgcn.udot2(v2u16 %a, v2u16 %b, u32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %c
+def int_amdgcn_udot2 :
+ GCCBuiltin<"__builtin_amdgcn_udot2">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_v2i16_ty, // %a
+ llvm_v2i16_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// i32 %r = llvm.amdgcn.sdot4(v4i8 (as i32) %a, v4i8 (as i32) %b, i32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %a[2] * %b[2] + %a[3] * %b[3] + %c
+def int_amdgcn_sdot4 :
+ GCCBuiltin<"__builtin_amdgcn_sdot4">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_i32_ty, // %a
+ llvm_i32_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// u32 %r = llvm.amdgcn.udot4(v4u8 (as u32) %a, v4u8 (as u32) %b, u32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %a[2] * %b[2] + %a[3] * %b[3] + %c
+def int_amdgcn_udot4 :
+ GCCBuiltin<"__builtin_amdgcn_udot4">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_i32_ty, // %a
+ llvm_i32_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// i32 %r = llvm.amdgcn.sdot8(v8i4 (as i32) %a, v8i4 (as i32) %b, i32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %a[2] * %b[2] + %a[3] * %b[3] +
+// %a[4] * %b[4] + %a[5] * %b[5] + %a[6] * %b[6] + %a[7] * %b[7] + %c
+def int_amdgcn_sdot8 :
+ GCCBuiltin<"__builtin_amdgcn_sdot8">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_i32_ty, // %a
+ llvm_i32_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+// u32 %r = llvm.amdgcn.udot8(v8u4 (as u32) %a, v8u4 (as u32) %b, u32 %c, i1 %clamp)
+// %r = %a[0] * %b[0] + %a[1] * %b[1] + %a[2] * %b[2] + %a[3] * %b[3] +
+// %a[4] * %b[4] + %a[5] * %b[5] + %a[6] * %b[6] + %a[7] * %b[7] + %c
+def int_amdgcn_udot8 :
+ GCCBuiltin<"__builtin_amdgcn_udot8">,
+ Intrinsic<
+ [llvm_i32_ty], // %r
+ [
+ llvm_i32_ty, // %a
+ llvm_i32_ty, // %b
+ llvm_i32_ty, // %c
+ llvm_i1_ty // %clamp
+ ],
+ [IntrNoMem, IntrSpeculatable]
+ >;
+
+//===----------------------------------------------------------------------===//
+// Special Intrinsics for backend internal use only. No frontend
+// should emit calls to these.
+// ===----------------------------------------------------------------------===//
+def int_amdgcn_if : Intrinsic<[llvm_i1_ty, llvm_i64_ty],
+ [llvm_i1_ty], [IntrConvergent]
+>;
+
+def int_amdgcn_else : Intrinsic<[llvm_i1_ty, llvm_i64_ty],
+ [llvm_i64_ty], [IntrConvergent]
+>;
+
+def int_amdgcn_break : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty], [IntrNoMem, IntrConvergent]
+>;
+
+def int_amdgcn_if_break : Intrinsic<[llvm_i64_ty],
+ [llvm_i1_ty, llvm_i64_ty], [IntrNoMem, IntrConvergent]
+>;
+
+def int_amdgcn_else_break : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i64_ty], [IntrNoMem, IntrConvergent]
+>;
+
+def int_amdgcn_loop : Intrinsic<[llvm_i1_ty],
+ [llvm_i64_ty], [IntrConvergent]
+>;
+
+def int_amdgcn_end_cf : Intrinsic<[], [llvm_i64_ty], [IntrConvergent]>;
+
+// Represent unreachable in a divergent region.
+def int_amdgcn_unreachable : Intrinsic<[], [], [IntrConvergent]>;
+
+// Emit 2.5 ulp, no denormal division. Should only be inserted by
+// pass based on !fpmath metadata.
+def int_amdgcn_fdiv_fast : Intrinsic<
+ [llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, IntrSpeculatable]
+>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsARM.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsARM.td
new file mode 100644
index 000000000..4e11f9c29
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsARM.td
@@ -0,0 +1,770 @@
+//===- IntrinsicsARM.td - Defines ARM intrinsics -----------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the ARM-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+
+//===----------------------------------------------------------------------===//
+// TLS
+
+let TargetPrefix = "arm" in { // All intrinsics start with "llvm.arm.".
+
+// A space-consuming intrinsic primarily for testing ARMConstantIslands. The
+// first argument is the number of bytes this "instruction" takes up, the second
+// and return value are essentially chains, used to force ordering during ISel.
+def int_arm_space : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+
+// 16-bit multiplications
+def int_arm_smulbb : GCCBuiltin<"__builtin_arm_smulbb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smulbt : GCCBuiltin<"__builtin_arm_smulbt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smultb : GCCBuiltin<"__builtin_arm_smultb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smultt : GCCBuiltin<"__builtin_arm_smultt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smulwb : GCCBuiltin<"__builtin_arm_smulwb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smulwt : GCCBuiltin<"__builtin_arm_smulwt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// Saturating Arithmetic
+
+def int_arm_qadd : GCCBuiltin<"__builtin_arm_qadd">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_arm_qsub : GCCBuiltin<"__builtin_arm_qsub">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_ssat : GCCBuiltin<"__builtin_arm_ssat">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_usat : GCCBuiltin<"__builtin_arm_usat">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// Accumulating multiplications
+def int_arm_smlabb : GCCBuiltin<"__builtin_arm_smlabb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlabt : GCCBuiltin<"__builtin_arm_smlabt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlatb : GCCBuiltin<"__builtin_arm_smlatb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlatt : GCCBuiltin<"__builtin_arm_smlatt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlawb : GCCBuiltin<"__builtin_arm_smlawb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlawt : GCCBuiltin<"__builtin_arm_smlawt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Parallel 16-bit saturation
+def int_arm_ssat16 : GCCBuiltin<"__builtin_arm_ssat16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_usat16 : GCCBuiltin<"__builtin_arm_usat16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// Packing and unpacking
+def int_arm_sxtab16 : GCCBuiltin<"__builtin_arm_sxtab16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_sxtb16 : GCCBuiltin<"__builtin_arm_sxtb16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uxtab16 : GCCBuiltin<"__builtin_arm_uxtab16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uxtb16 : GCCBuiltin<"__builtin_arm_uxtb16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+// Parallel selection, reads the GE flags.
+def int_arm_sel : GCCBuiltin<"__builtin_arm_sel">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrReadMem]>;
+
+// Parallel 8-bit addition and subtraction
+def int_arm_qadd8 : GCCBuiltin<"__builtin_arm_qadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_qsub8 : GCCBuiltin<"__builtin_arm_qsub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_sadd8 : GCCBuiltin<"__builtin_arm_sadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_shadd8 : GCCBuiltin<"__builtin_arm_shadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_shsub8 : GCCBuiltin<"__builtin_arm_shsub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_ssub8 : GCCBuiltin<"__builtin_arm_ssub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_uadd8 : GCCBuiltin<"__builtin_arm_uadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_uhadd8 : GCCBuiltin<"__builtin_arm_uhadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uhsub8 : GCCBuiltin<"__builtin_arm_uhsub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqadd8 : GCCBuiltin<"__builtin_arm_uqadd8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqsub8 : GCCBuiltin<"__builtin_arm_uqsub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_usub8 : GCCBuiltin<"__builtin_arm_usub8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+
+// Sum of 8-bit absolute differences
+def int_arm_usad8 : GCCBuiltin<"__builtin_arm_usad8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_usada8 : GCCBuiltin<"__builtin_arm_usada8">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Parallel 16-bit addition and subtraction
+def int_arm_qadd16 : GCCBuiltin<"__builtin_arm_qadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_qasx : GCCBuiltin<"__builtin_arm_qasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_qsax : GCCBuiltin<"__builtin_arm_qsax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_qsub16 : GCCBuiltin<"__builtin_arm_qsub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_sadd16 : GCCBuiltin<"__builtin_arm_sadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_sasx : GCCBuiltin<"__builtin_arm_sasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_shadd16 : GCCBuiltin<"__builtin_arm_shadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_shasx : GCCBuiltin<"__builtin_arm_shasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_shsax : GCCBuiltin<"__builtin_arm_shsax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_shsub16 : GCCBuiltin<"__builtin_arm_shsub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_ssax : GCCBuiltin<"__builtin_arm_ssax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_ssub16 : GCCBuiltin<"__builtin_arm_ssub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_uadd16 : GCCBuiltin<"__builtin_arm_uadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_uasx : GCCBuiltin<"__builtin_arm_uasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_uhadd16 : GCCBuiltin<"__builtin_arm_uhadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uhasx : GCCBuiltin<"__builtin_arm_uhasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uhsax : GCCBuiltin<"__builtin_arm_uhsax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uhsub16 : GCCBuiltin<"__builtin_arm_uhsub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqadd16 : GCCBuiltin<"__builtin_arm_uqadd16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqasx : GCCBuiltin<"__builtin_arm_uqasx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqsax : GCCBuiltin<"__builtin_arm_uqsax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_uqsub16 : GCCBuiltin<"__builtin_arm_uqsub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+// Writes to the GE bits.
+def int_arm_usax : GCCBuiltin<"__builtin_arm_usax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+// Writes to the GE bits.
+def int_arm_usub16 : GCCBuiltin<"__builtin_arm_usub16">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], []>;
+
+// Parallel 16-bit multiplication
+def int_arm_smlad : GCCBuiltin<"__builtin_arm_smlad">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smladx : GCCBuiltin<"__builtin_arm_smladx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlald : GCCBuiltin<"__builtin_arm_smlald">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+def int_arm_smlaldx : GCCBuiltin<"__builtin_arm_smlaldx">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+def int_arm_smlsd : GCCBuiltin<"__builtin_arm_smlsd">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlsdx : GCCBuiltin<"__builtin_arm_smlsdx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_smlsld : GCCBuiltin<"__builtin_arm_smlsld">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+def int_arm_smlsldx : GCCBuiltin<"__builtin_arm_smlsldx">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+def int_arm_smuad : GCCBuiltin<"__builtin_arm_smuad">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smuadx : GCCBuiltin<"__builtin_arm_smuadx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smusd : GCCBuiltin<"__builtin_arm_smusd">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_arm_smusdx : GCCBuiltin<"__builtin_arm_smusdx">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+
+//===----------------------------------------------------------------------===//
+// Load, Store and Clear exclusive
+
+def int_arm_ldrex : Intrinsic<[llvm_i32_ty], [llvm_anyptr_ty]>;
+def int_arm_strex : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_anyptr_ty]>;
+
+def int_arm_ldaex : Intrinsic<[llvm_i32_ty], [llvm_anyptr_ty]>;
+def int_arm_stlex : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_anyptr_ty]>;
+
+def int_arm_clrex : Intrinsic<[]>;
+
+def int_arm_strexd : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty]>;
+def int_arm_ldrexd : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [llvm_ptr_ty]>;
+
+def int_arm_stlexd : Intrinsic<[llvm_i32_ty],
+ [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty]>;
+def int_arm_ldaexd : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [llvm_ptr_ty]>;
+
+//===----------------------------------------------------------------------===//
+// Data barrier instructions
+def int_arm_dmb : GCCBuiltin<"__builtin_arm_dmb">, MSBuiltin<"__dmb">,
+ Intrinsic<[], [llvm_i32_ty]>;
+def int_arm_dsb : GCCBuiltin<"__builtin_arm_dsb">, MSBuiltin<"__dsb">,
+ Intrinsic<[], [llvm_i32_ty]>;
+def int_arm_isb : GCCBuiltin<"__builtin_arm_isb">, MSBuiltin<"__isb">,
+ Intrinsic<[], [llvm_i32_ty]>;
+
+//===----------------------------------------------------------------------===//
+// VFP
+
+def int_arm_get_fpscr : GCCBuiltin<"__builtin_arm_get_fpscr">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+def int_arm_set_fpscr : GCCBuiltin<"__builtin_arm_set_fpscr">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+def int_arm_vcvtr : Intrinsic<[llvm_float_ty], [llvm_anyfloat_ty],
+ [IntrNoMem]>;
+def int_arm_vcvtru : Intrinsic<[llvm_float_ty], [llvm_anyfloat_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// Coprocessor
+
+def int_arm_ldc : GCCBuiltin<"__builtin_arm_ldc">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_ldcl : GCCBuiltin<"__builtin_arm_ldcl">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_ldc2 : GCCBuiltin<"__builtin_arm_ldc2">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_ldc2l : GCCBuiltin<"__builtin_arm_ldc2l">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+
+def int_arm_stc : GCCBuiltin<"__builtin_arm_stc">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_stcl : GCCBuiltin<"__builtin_arm_stcl">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_stc2 : GCCBuiltin<"__builtin_arm_stc2">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+def int_arm_stc2l : GCCBuiltin<"__builtin_arm_stc2l">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty], []>;
+
+// Move to coprocessor
+def int_arm_mcr : GCCBuiltin<"__builtin_arm_mcr">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_mcr2 : GCCBuiltin<"__builtin_arm_mcr2">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+
+// Move from coprocessor
+def int_arm_mrc : GCCBuiltin<"__builtin_arm_mrc">,
+ MSBuiltin<"_MoveFromCoprocessor">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_mrc2 : GCCBuiltin<"__builtin_arm_mrc2">,
+ MSBuiltin<"_MoveFromCoprocessor2">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+
+// Coprocessor data processing
+def int_arm_cdp : GCCBuiltin<"__builtin_arm_cdp">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_cdp2 : GCCBuiltin<"__builtin_arm_cdp2">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+
+// Move from two registers to coprocessor
+def int_arm_mcrr : Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_mcrr2 : Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+
+def int_arm_mrrc : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+def int_arm_mrrc2 : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+
+//===----------------------------------------------------------------------===//
+// CRC32
+
+def int_arm_crc32b : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32cb : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32h : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32ch : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32w : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32cw : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// HINT
+
+def int_arm_hint : Intrinsic<[], [llvm_i32_ty]>;
+def int_arm_dbg : Intrinsic<[], [llvm_i32_ty]>;
+
+//===----------------------------------------------------------------------===//
+// UND (reserved undefined sequence)
+
+def int_arm_undefined : Intrinsic<[], [llvm_i32_ty]>;
+
+//===----------------------------------------------------------------------===//
+// Advanced SIMD (NEON)
+
+// The following classes do not correspond directly to GCC builtins.
+class Neon_1Arg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+class Neon_1Arg_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMExtendedType<0>], [IntrNoMem]>;
+class Neon_2Arg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+class Neon_2Arg_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMExtendedType<0>, LLVMExtendedType<0>],
+ [IntrNoMem]>;
+class Neon_2Arg_Long_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMTruncatedType<0>, LLVMTruncatedType<0>],
+ [IntrNoMem]>;
+class Neon_3Arg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+class Neon_3Arg_Long_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMTruncatedType<0>, LLVMTruncatedType<0>],
+ [IntrNoMem]>;
+
+class Neon_1FloatArg_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+
+class Neon_CvtFxToFP_Intrinsic
+ : Intrinsic<[llvm_anyfloat_ty], [llvm_anyint_ty, llvm_i32_ty], [IntrNoMem]>;
+class Neon_CvtFPToFx_Intrinsic
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, llvm_i32_ty], [IntrNoMem]>;
+class Neon_CvtFPtoInt_1Arg_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+
+class Neon_Compare_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMMatchType<1>],
+ [IntrNoMem]>;
+
+// The table operands for VTBL and VTBX consist of 1 to 4 v8i8 vectors.
+// Besides the table, VTBL has one other v8i8 argument and VTBX has two.
+// Overall, the classes range from 2 to 6 v8i8 arguments.
+class Neon_Tbl2Arg_Intrinsic
+ : Intrinsic<[llvm_v8i8_ty],
+ [llvm_v8i8_ty, llvm_v8i8_ty], [IntrNoMem]>;
+class Neon_Tbl3Arg_Intrinsic
+ : Intrinsic<[llvm_v8i8_ty],
+ [llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty], [IntrNoMem]>;
+class Neon_Tbl4Arg_Intrinsic
+ : Intrinsic<[llvm_v8i8_ty],
+ [llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty],
+ [IntrNoMem]>;
+class Neon_Tbl5Arg_Intrinsic
+ : Intrinsic<[llvm_v8i8_ty],
+ [llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty,
+ llvm_v8i8_ty], [IntrNoMem]>;
+class Neon_Tbl6Arg_Intrinsic
+ : Intrinsic<[llvm_v8i8_ty],
+ [llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty, llvm_v8i8_ty,
+ llvm_v8i8_ty, llvm_v8i8_ty], [IntrNoMem]>;
+
+// Arithmetic ops
+
+let IntrProperties = [IntrNoMem, Commutative] in {
+
+ // Vector Add.
+ def int_arm_neon_vhadds : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vhaddu : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vrhadds : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vrhaddu : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vqadds : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vqaddu : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vraddhn : Neon_2Arg_Narrow_Intrinsic;
+
+ // Vector Multiply.
+ def int_arm_neon_vmulp : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vqdmulh : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vqrdmulh : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vmulls : Neon_2Arg_Long_Intrinsic;
+ def int_arm_neon_vmullu : Neon_2Arg_Long_Intrinsic;
+ def int_arm_neon_vmullp : Neon_2Arg_Long_Intrinsic;
+ def int_arm_neon_vqdmull : Neon_2Arg_Long_Intrinsic;
+
+ // Vector Maximum.
+ def int_arm_neon_vmaxs : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vmaxu : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vmaxnm : Neon_2Arg_Intrinsic;
+
+ // Vector Minimum.
+ def int_arm_neon_vmins : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vminu : Neon_2Arg_Intrinsic;
+ def int_arm_neon_vminnm : Neon_2Arg_Intrinsic;
+
+ // Vector Reciprocal Step.
+ def int_arm_neon_vrecps : Neon_2Arg_Intrinsic;
+
+ // Vector Reciprocal Square Root Step.
+ def int_arm_neon_vrsqrts : Neon_2Arg_Intrinsic;
+}
+
+// Vector Subtract.
+def int_arm_neon_vhsubs : Neon_2Arg_Intrinsic;
+def int_arm_neon_vhsubu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqsubs : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqsubu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vrsubhn : Neon_2Arg_Narrow_Intrinsic;
+
+// Vector Absolute Compare.
+def int_arm_neon_vacge : Neon_Compare_Intrinsic;
+def int_arm_neon_vacgt : Neon_Compare_Intrinsic;
+
+// Vector Absolute Differences.
+def int_arm_neon_vabds : Neon_2Arg_Intrinsic;
+def int_arm_neon_vabdu : Neon_2Arg_Intrinsic;
+
+// Vector Pairwise Add.
+def int_arm_neon_vpadd : Neon_2Arg_Intrinsic;
+
+// Vector Pairwise Add Long.
+// Note: This is different than the other "long" NEON intrinsics because
+// the result vector has half as many elements as the source vector.
+// The source and destination vector types must be specified separately.
+def int_arm_neon_vpaddls : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_arm_neon_vpaddlu : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty],
+ [IntrNoMem]>;
+
+// Vector Pairwise Add and Accumulate Long.
+// Note: This is similar to vpaddl but the destination vector also appears
+// as the first argument.
+def int_arm_neon_vpadals : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty],
+ [IntrNoMem]>;
+def int_arm_neon_vpadalu : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty],
+ [IntrNoMem]>;
+
+// Vector Pairwise Maximum and Minimum.
+def int_arm_neon_vpmaxs : Neon_2Arg_Intrinsic;
+def int_arm_neon_vpmaxu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vpmins : Neon_2Arg_Intrinsic;
+def int_arm_neon_vpminu : Neon_2Arg_Intrinsic;
+
+// Vector Shifts:
+//
+// The various saturating and rounding vector shift operations need to be
+// represented by intrinsics in LLVM, and even the basic VSHL variable shift
+// operation cannot be safely translated to LLVM's shift operators. VSHL can
+// be used for both left and right shifts, or even combinations of the two,
+// depending on the signs of the shift amounts. It also has well-defined
+// behavior for shift amounts that LLVM leaves undefined. Only basic shifts
+// by constants can be represented with LLVM's shift operators.
+//
+// The shift counts for these intrinsics are always vectors, even for constant
+// shifts, where the constant is replicated. For consistency with VSHL (and
+// other variable shift instructions), left shifts have positive shift counts
+// and right shifts have negative shift counts. This convention is also used
+// for constant right shift intrinsics, and to help preserve sanity, the
+// intrinsic names use "shift" instead of either "shl" or "shr". Where
+// applicable, signed and unsigned versions of the intrinsics are
+// distinguished with "s" and "u" suffixes. A few NEON shift instructions,
+// such as VQSHLU, take signed operands but produce unsigned results; these
+// use a "su" suffix.
+
+// Vector Shift.
+def int_arm_neon_vshifts : Neon_2Arg_Intrinsic;
+def int_arm_neon_vshiftu : Neon_2Arg_Intrinsic;
+
+// Vector Rounding Shift.
+def int_arm_neon_vrshifts : Neon_2Arg_Intrinsic;
+def int_arm_neon_vrshiftu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vrshiftn : Neon_2Arg_Narrow_Intrinsic;
+
+// Vector Saturating Shift.
+def int_arm_neon_vqshifts : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqshiftu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqshiftsu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqshiftns : Neon_2Arg_Narrow_Intrinsic;
+def int_arm_neon_vqshiftnu : Neon_2Arg_Narrow_Intrinsic;
+def int_arm_neon_vqshiftnsu : Neon_2Arg_Narrow_Intrinsic;
+
+// Vector Saturating Rounding Shift.
+def int_arm_neon_vqrshifts : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqrshiftu : Neon_2Arg_Intrinsic;
+def int_arm_neon_vqrshiftns : Neon_2Arg_Narrow_Intrinsic;
+def int_arm_neon_vqrshiftnu : Neon_2Arg_Narrow_Intrinsic;
+def int_arm_neon_vqrshiftnsu : Neon_2Arg_Narrow_Intrinsic;
+
+// Vector Shift and Insert.
+def int_arm_neon_vshiftins : Neon_3Arg_Intrinsic;
+
+// Vector Absolute Value and Saturating Absolute Value.
+def int_arm_neon_vabs : Neon_1Arg_Intrinsic;
+def int_arm_neon_vqabs : Neon_1Arg_Intrinsic;
+
+// Vector Saturating Negate.
+def int_arm_neon_vqneg : Neon_1Arg_Intrinsic;
+
+// Vector Count Leading Sign/Zero Bits.
+def int_arm_neon_vcls : Neon_1Arg_Intrinsic;
+
+// Vector Reciprocal Estimate.
+def int_arm_neon_vrecpe : Neon_1Arg_Intrinsic;
+
+// Vector Reciprocal Square Root Estimate.
+def int_arm_neon_vrsqrte : Neon_1Arg_Intrinsic;
+
+// Vector Conversions Between Floating-point and Integer
+def int_arm_neon_vcvtau : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtas : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtnu : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtns : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtpu : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtps : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtmu : Neon_CvtFPtoInt_1Arg_Intrinsic;
+def int_arm_neon_vcvtms : Neon_CvtFPtoInt_1Arg_Intrinsic;
+
+// Vector Conversions Between Floating-point and Fixed-point.
+def int_arm_neon_vcvtfp2fxs : Neon_CvtFPToFx_Intrinsic;
+def int_arm_neon_vcvtfp2fxu : Neon_CvtFPToFx_Intrinsic;
+def int_arm_neon_vcvtfxs2fp : Neon_CvtFxToFP_Intrinsic;
+def int_arm_neon_vcvtfxu2fp : Neon_CvtFxToFP_Intrinsic;
+
+// Vector Conversions Between Half-Precision and Single-Precision.
+def int_arm_neon_vcvtfp2hf
+ : Intrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_arm_neon_vcvthf2fp
+ : Intrinsic<[llvm_v4f32_ty], [llvm_v4i16_ty], [IntrNoMem]>;
+
+// Narrowing Saturating Vector Moves.
+def int_arm_neon_vqmovns : Neon_1Arg_Narrow_Intrinsic;
+def int_arm_neon_vqmovnu : Neon_1Arg_Narrow_Intrinsic;
+def int_arm_neon_vqmovnsu : Neon_1Arg_Narrow_Intrinsic;
+
+// Vector Table Lookup.
+// The first 1-4 arguments are the table.
+def int_arm_neon_vtbl1 : Neon_Tbl2Arg_Intrinsic;
+def int_arm_neon_vtbl2 : Neon_Tbl3Arg_Intrinsic;
+def int_arm_neon_vtbl3 : Neon_Tbl4Arg_Intrinsic;
+def int_arm_neon_vtbl4 : Neon_Tbl5Arg_Intrinsic;
+
+// Vector Table Extension.
+// Some elements of the destination vector may not be updated, so the original
+// value of that vector is passed as the first argument. The next 1-4
+// arguments after that are the table.
+def int_arm_neon_vtbx1 : Neon_Tbl3Arg_Intrinsic;
+def int_arm_neon_vtbx2 : Neon_Tbl4Arg_Intrinsic;
+def int_arm_neon_vtbx3 : Neon_Tbl5Arg_Intrinsic;
+def int_arm_neon_vtbx4 : Neon_Tbl6Arg_Intrinsic;
+
+// Vector and Scalar Rounding.
+def int_arm_neon_vrintn : Neon_1FloatArg_Intrinsic;
+def int_arm_neon_vrintx : Neon_1Arg_Intrinsic;
+def int_arm_neon_vrinta : Neon_1Arg_Intrinsic;
+def int_arm_neon_vrintz : Neon_1Arg_Intrinsic;
+def int_arm_neon_vrintm : Neon_1Arg_Intrinsic;
+def int_arm_neon_vrintp : Neon_1Arg_Intrinsic;
+
+// De-interleaving vector loads from N-element structures.
+// Source operands are the address and alignment.
+def int_arm_neon_vld1 : Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld2 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld3 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld4 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+def int_arm_neon_vld1x2 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld1x3 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld1x4 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [LLVMAnyPointerType<LLVMMatchType<0>>],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+// Vector load N-element structure to one lane.
+// Source operands are: the address, the N input vectors (since only one
+// lane is assigned), the lane number, and the alignment.
+def int_arm_neon_vld2lane : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [llvm_anyptr_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, llvm_i32_ty,
+ llvm_i32_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld3lane : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [llvm_anyptr_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld4lane : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [llvm_anyptr_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMMatchType<0>, llvm_i32_ty,
+ llvm_i32_ty], [IntrReadMem, IntrArgMemOnly]>;
+
+// Vector load N-element structure to all lanes.
+// Source operands are the address and alignment.
+def int_arm_neon_vld2dup : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld3dup : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_neon_vld4dup : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [llvm_anyptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+// Interleaving vector stores from N-element structures.
+// Source operands are: the address, the N vectors, and the alignment.
+def int_arm_neon_vst1 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ llvm_i32_ty], [IntrArgMemOnly]>;
+def int_arm_neon_vst2 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+def int_arm_neon_vst3 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>,
+ llvm_i32_ty], [IntrArgMemOnly]>;
+def int_arm_neon_vst4 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>,
+ LLVMMatchType<1>, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+def int_arm_neon_vst1x2 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>],
+ [IntrArgMemOnly, NoCapture<0>]>;
+def int_arm_neon_vst1x3 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>],
+ [IntrArgMemOnly, NoCapture<0>]>;
+def int_arm_neon_vst1x4 : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>,
+ LLVMMatchType<1>],
+ [IntrArgMemOnly, NoCapture<0>]>;
+
+// Vector store N-element structure from one lane.
+// Source operands are: the address, the N vectors, the lane number, and
+// the alignment.
+def int_arm_neon_vst2lane : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, llvm_i32_ty,
+ llvm_i32_ty], [IntrArgMemOnly]>;
+def int_arm_neon_vst3lane : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+def int_arm_neon_vst4lane : Intrinsic<[],
+ [llvm_anyptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<1>, LLVMMatchType<1>,
+ LLVMMatchType<1>, llvm_i32_ty,
+ llvm_i32_ty], [IntrArgMemOnly]>;
+
+// Vector bitwise select.
+def int_arm_neon_vbsl : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+
+// Crypto instructions
+class AES_1Arg_Intrinsic : Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty], [IntrNoMem]>;
+class AES_2Arg_Intrinsic : Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+class SHA_1Arg_Intrinsic : Intrinsic<[llvm_i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+class SHA_2Arg_Intrinsic : Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+class SHA_3Arg_i32_Intrinsic : Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+class SHA_3Arg_v4i32_Intrinsic : Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_arm_neon_aesd : AES_2Arg_Intrinsic;
+def int_arm_neon_aese : AES_2Arg_Intrinsic;
+def int_arm_neon_aesimc : AES_1Arg_Intrinsic;
+def int_arm_neon_aesmc : AES_1Arg_Intrinsic;
+def int_arm_neon_sha1h : SHA_1Arg_Intrinsic;
+def int_arm_neon_sha1su1 : SHA_2Arg_Intrinsic;
+def int_arm_neon_sha256su0 : SHA_2Arg_Intrinsic;
+def int_arm_neon_sha1c : SHA_3Arg_i32_Intrinsic;
+def int_arm_neon_sha1m : SHA_3Arg_i32_Intrinsic;
+def int_arm_neon_sha1p : SHA_3Arg_i32_Intrinsic;
+def int_arm_neon_sha1su0: SHA_3Arg_v4i32_Intrinsic;
+def int_arm_neon_sha256h: SHA_3Arg_v4i32_Intrinsic;
+def int_arm_neon_sha256h2: SHA_3Arg_v4i32_Intrinsic;
+def int_arm_neon_sha256su1: SHA_3Arg_v4i32_Intrinsic;
+
+// Armv8.2-A dot product instructions
+class Neon_Dot_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty,
+ LLVMMatchType<1>],
+ [IntrNoMem]>;
+def int_arm_neon_udot : Neon_Dot_Intrinsic;
+def int_arm_neon_sdot : Neon_Dot_Intrinsic;
+
+
+} // end TargetPrefix
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsBPF.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsBPF.td
new file mode 100644
index 000000000..94eca8e40
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsBPF.td
@@ -0,0 +1,24 @@
+//===- IntrinsicsBPF.td - Defines BPF intrinsics -----------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the BPF-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+// Specialized loads from packet
+let TargetPrefix = "bpf" in { // All intrinsics start with "llvm.bpf."
+ def int_bpf_load_byte : GCCBuiltin<"__builtin_bpf_load_byte">,
+ Intrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty], [IntrReadMem]>;
+ def int_bpf_load_half : GCCBuiltin<"__builtin_bpf_load_half">,
+ Intrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty], [IntrReadMem]>;
+ def int_bpf_load_word : GCCBuiltin<"__builtin_bpf_load_word">,
+ Intrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty], [IntrReadMem]>;
+ def int_bpf_pseudo : GCCBuiltin<"__builtin_bpf_pseudo">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsHexagon.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsHexagon.td
new file mode 100644
index 000000000..25f4215d6
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsHexagon.td
@@ -0,0 +1,10975 @@
+//===- IntrinsicsHexagon.td - Defines Hexagon intrinsics ---*- tablegen -*-===//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the Hexagon-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// Definitions for all Hexagon intrinsics.
+//
+// All Hexagon intrinsics start with "llvm.hexagon.".
+let TargetPrefix = "hexagon" in {
+ /// Hexagon_Intrinsic - Base class for all Hexagon intrinsics.
+ class Hexagon_Intrinsic<string GCCIntSuffix, list<LLVMType> ret_types,
+ list<LLVMType> param_types,
+ list<IntrinsicProperty> properties>
+ : GCCBuiltin<!strconcat("__builtin_", GCCIntSuffix)>,
+ Intrinsic<ret_types, param_types, properties>;
+
+ /// Hexagon_NonGCC_Intrinsic - Base class for bitcode convertible Hexagon
+ /// intrinsics.
+ class Hexagon_NonGCC_Intrinsic<list<LLVMType> ret_types,
+ list<LLVMType> param_types,
+ list<IntrinsicProperty> properties>
+ : Intrinsic<ret_types, param_types, properties>;
+}
+
+//===----------------------------------------------------------------------===//
+//
+// DEF_FUNCTION_TYPE_1(QI_ftype_MEM,BT_BOOL,BT_PTR) ->
+// Hexagon_qi_mem_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_mem_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_ptr_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(HI_ftype_SI,BT_I16,BT_INT) ->
+// Hexagon_hi_si_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_hi_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i16_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(SI_ftype_SI,BT_INT,BT_INT) ->
+// Hexagon_si_si_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(DI_ftype_SI,BT_LONGLONG,BT_INT) ->
+// Hexagon_di_si_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(SI_ftype_DI,BT_INT,BT_LONGLONG) ->
+// Hexagon_si_di_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_di_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(DI_ftype_DI,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_di_di_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_di_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(QI_ftype_QI,BT_BOOL,BT_BOOL) ->
+// Hexagon_qi_qi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_qi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(QI_ftype_SI,BT_BOOL,BT_INT) ->
+// Hexagon_qi_si_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(DI_ftype_QI,BT_LONGLONG,BT_BOOL) ->
+// Hexagon_di_qi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_qi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_1(SI_ftype_QI,BT_INT,BT_BOOL) ->
+// Hexagon_si_qi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_qi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_SISI,BT_BOOL,BT_INT,BT_INT) ->
+// Hexagon_qi_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(void_ftype_SISI,BT_VOID,BT_INT,BT_INT) ->
+// Hexagon_void_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_void_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_void_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_SISI,BT_INT,BT_INT,BT_INT) ->
+// Hexagon_si_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(USI_ftype_SISI,BT_UINT,BT_INT,BT_INT) ->
+// Hexagon_usi_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_usi_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(DI_ftype_SISI,BT_LONGLONG,BT_INT,BT_INT) ->
+// Hexagon_di_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(UDI_ftype_SISI,BT_ULONGLONG,BT_INT,BT_INT) ->
+// Hexagon_udi_sisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_udi_sisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(DI_ftype_SIDI,BT_LONGLONG,BT_INT,BT_LONGLONG) ->
+// Hexagon_di_sidi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_sidi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(DI_ftype_DISI,BT_LONGLONG,BT_LONGLONG,BT_INT) ->
+// Hexagon_di_disi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_disi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_SIDI,BT_INT,BT_INT,BT_LONGLONG) ->
+// Hexagon_si_sidi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sidi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_DIDI,BT_INT,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_si_didi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_didi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(DI_ftype_DIDI,BT_LONGLONG,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_di_didi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_didi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(UDI_ftype_DIDI,BT_ULONGLONG,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_udi_didi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_udi_didi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_DISI,BT_INT,BT_LONGLONG,BT_INT) ->
+// Hexagon_si_disi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_disi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_DIDI,BT_BOOL,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_qi_didi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_didi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_SIDI,BT_BOOL,BT_INT,BT_LONGLONG) ->
+// Hexagon_qi_didi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_sidi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_DISI,BT_BOOL,BT_LONGLONG,BT_INT) ->
+// Hexagon_qi_disi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_disi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_QIQI,BT_BOOL,BT_BOOL,BT_BOOL) ->
+// Hexagon_qi_qiqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_qiqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(QI_ftype_QIQIQI,BT_BOOL,BT_BOOL,BT_BOOL) ->
+// Hexagon_qi_qiqiqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_qiqiqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_i1_ty, llvm_i1_ty, llvm_i1_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_QIQI,BT_INT,BT_BOOL,BT_BOOL) ->
+// Hexagon_si_qiqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_qiqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_2(SI_ftype_QISI,BT_INT,BT_BOOL,BT_INT) ->
+// Hexagon_si_qisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_qisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i1_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(void_ftype_SISISI,BT_VOID,BT_INT,BT_INT,BT_INT) ->
+// Hexagon_void_sisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_void_sisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_void_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_SISISI,BT_INT,BT_INT,BT_INT,BT_INT) ->
+// Hexagon_si_sisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_SISISI,BT_LONGLONG,BT_INT,BT_INT,BT_INT) ->
+// Hexagon_di_sisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_sisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_DISISI,BT_INT,BT_LONGLONG,BT_INT,BT_INT) ->
+// Hexagon_si_disisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_disisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_DISISI,BT_LONGLONG,BT_LONGLONG,BT_INT,BT_INT) ->
+// Hexagon_di_disisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_disisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_SIDISI,BT_INT,BT_INT,BT_LONGLONG,BT_INT) ->
+// Hexagon_si_sidisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sidisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_DIDISI,BT_LONGLONG,BT_LONGLONG,
+// BT_LONGLONG,BT_INT) ->
+// Hexagon_di_didisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_didisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_SIDIDI,BT_INT,BT_INT,BT_LONGLONG,BT_LONGLONG) ->
+// Hexagon_si_sididi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sididi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty,
+ llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_DIDIDI,BT_LONGLONG,BT_LONGLONG,BT_LONGLONG,
+// BT_LONGLONG) ->
+// Hexagon_di_dididi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_dididi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty,
+ llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_SISIDI,BT_INT,BT_INT,BT_INT,BT_LONGLONG) ->
+// Hexagon_si_sisidi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sisidi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(SI_ftype_QISISI,BT_INT,BT_BOOL,BT_INT,BT_INT) ->
+// Hexagon_si_qisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_qisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_QISISI,BT_LONGLONG,BT_BOOL,BT_INT,BT_INT) ->
+// Hexagon_di_qisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_qisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i1_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_QIDIDI,BT_LONGLONG,BT_BOOL,BT_LONGLONG,
+// BT_LONGLONG) ->
+// Hexagon_di_qididi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_qididi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty,
+ llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_3(DI_ftype_DIDIQI,BT_LONGLONG,BT_LONGLONG,BT_LONGLONG,
+// BT_BOOL) ->
+// Hexagon_di_didiqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_didiqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty,
+ llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_4(SI_ftype_SISISISI,BT_INT,BT_INT,BT_INT,BT_INT,BT_INT) ->
+// Hexagon_si_sisisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sisisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// DEF_FUNCTION_TYPE_4(DI_ftype_DIDISISI,BT_LONGLONG,BT_LONGLONG,
+// BT_LONGLONG,BT_INT,BT_INT) ->
+// Hexagon_di_didisisi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_didisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+class Hexagon_mem_memmemsi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_mem_memsisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_i32_ty,
+ llvm_i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_mem_memdisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty,
+ llvm_i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_mem_memmemsisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_mem_memsisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_mem_memdisisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty,
+ llvm_i32_ty, llvm_i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_v256_v256v256_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrArgMemOnly]>;
+
+//
+// Hexagon_sf_df_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_sf_df_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_df_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_double_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_sf_di_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_di_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_df_sf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_sf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_float_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_di_sf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_sf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_float_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_sf_sf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_sf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_float_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_si_sf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_float_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_si_df_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_df_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_double_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_sf_sfsf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_sfsf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_si_sfsf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sfsf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_si_sfsi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_sfsi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_float_ty, llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_qi_sfqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_qi_sfqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i1_ty], [llvm_float_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_sf_sfsfsf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_sfsfsf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_float_ty, llvm_float_ty,
+ llvm_float_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_sf_sfsfsfqi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_sf_sfsfsfqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_float_ty], [llvm_float_ty, llvm_float_ty,
+ llvm_float_ty,
+ llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_di_dididi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_dididisi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty,
+ llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_df_si_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_si_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_df_di_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_di_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_i64_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_di_df_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_di_df_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_double_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_df_df_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_df_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_double_ty],
+ [IntrNoMem]>;
+//
+// Hexagon_df_dfdf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_dfdf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_si_dfdf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_dfdf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_si_dfsi_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_si_dfsi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_double_ty, llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+//
+//
+// Hexagon_df_dfdfdf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_dfdfdf_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_double_ty, llvm_double_ty,
+ llvm_double_ty],
+ [IntrNoMem, Throws]>;
+//
+// Hexagon_df_dfdfdf_Intrinsic<string GCCIntSuffix>
+//
+class Hexagon_df_dfdfdfqi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_double_ty, llvm_double_ty,
+ llvm_double_ty,
+ llvm_i32_ty],
+ [IntrNoMem, Throws]>;
+
+
+// This one below will not be auto-generated,
+// so make sure, you don't overwrite this one.
+//
+// BUILTIN_INFO_NONCONST(circ_ldd,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_ldd :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_ldd">;
+//
+// BUILTIN_INFO_NONCONST(circ_ldw,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_ldw :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_ldw">;
+//
+// BUILTIN_INFO_NONCONST(circ_ldh,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_ldh :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_ldh">;
+//
+// BUILTIN_INFO_NONCONST(circ_lduh,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_lduh :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_lduh">;
+//
+// BUILTIN_INFO_NONCONST(circ_ldb,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_ldb :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_ldb">;
+//
+// BUILTIN_INFO_NONCONST(circ_ldub,PTR_ftype_PTRPTRSISI,4)
+//
+def int_hexagon_circ_ldub :
+Hexagon_mem_memmemsisi_Intrinsic<"circ_ldub">;
+
+//
+// BUILTIN_INFO_NONCONST(circ_std,PTR_ftype_PTRDISISI,4)
+//
+def int_hexagon_circ_std :
+Hexagon_mem_memdisisi_Intrinsic<"circ_std">;
+//
+// BUILTIN_INFO_NONCONST(circ_stw,PTR_ftype_PTRSISISI,4)
+//
+def int_hexagon_circ_stw :
+Hexagon_mem_memsisisi_Intrinsic<"circ_stw">;
+//
+// BUILTIN_INFO_NONCONST(circ_sth,PTR_ftype_PTRSISISI,4)
+//
+def int_hexagon_circ_sth :
+Hexagon_mem_memsisisi_Intrinsic<"circ_sth">;
+//
+// BUILTIN_INFO_NONCONST(circ_sthhi,PTR_ftype_PTRSISISI,4)
+//
+def int_hexagon_circ_sthhi :
+Hexagon_mem_memsisisi_Intrinsic<"circ_sthhi">;
+//
+// BUILTIN_INFO_NONCONST(circ_stb,PTR_ftype_PTRSISISI,4)
+//
+def int_hexagon_circ_stb :
+Hexagon_mem_memsisisi_Intrinsic<"circ_stb">;
+
+
+def int_hexagon_mm256i_vaddw :
+Hexagon_v256_v256v256_Intrinsic<"_mm256i_vaddw">;
+
+
+// This one above will not be auto-generated,
+// so make sure, you don't overwrite this one.
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpeq,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpeq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpeq">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgt,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgt :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgt">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgtu,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgtu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgtu">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpeqp,QI_ftype_DIDI,2)
+//
+def int_hexagon_C2_cmpeqp :
+Hexagon_si_didi_Intrinsic<"HEXAGON_C2_cmpeqp">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgtp,QI_ftype_DIDI,2)
+//
+def int_hexagon_C2_cmpgtp :
+Hexagon_si_didi_Intrinsic<"HEXAGON_C2_cmpgtp">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgtup,QI_ftype_DIDI,2)
+//
+def int_hexagon_C2_cmpgtup :
+Hexagon_si_didi_Intrinsic<"HEXAGON_C2_cmpgtup">;
+//
+// BUILTIN_INFO(HEXAGON.A4_rcmpeqi,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_rcmpeqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_rcmpeqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_rcmpneqi,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_rcmpneqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_rcmpneqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_rcmpeq,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_rcmpeq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_rcmpeq">;
+//
+// BUILTIN_INFO(HEXAGON.A4_rcmpneq,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_rcmpneq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_rcmpneq">;
+//
+// BUILTIN_INFO(HEXAGON.C2_bitsset,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_bitsset :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_bitsset">;
+//
+// BUILTIN_INFO(HEXAGON.C2_bitsclr,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_bitsclr :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_bitsclr">;
+//
+// BUILTIN_INFO(HEXAGON.C4_nbitsset,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_nbitsset :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_nbitsset">;
+//
+// BUILTIN_INFO(HEXAGON.C4_nbitsclr,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_nbitsclr :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_nbitsclr">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpeqi,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpeqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpeqi">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgti,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgti :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgti">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgtui,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgtui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgtui">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgei,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgei :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgei">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpgeui,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpgeui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpgeui">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmplt,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmplt :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmplt">;
+//
+// BUILTIN_INFO(HEXAGON.C2_cmpltu,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_cmpltu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_cmpltu">;
+//
+// BUILTIN_INFO(HEXAGON.C2_bitsclri,QI_ftype_SISI,2)
+//
+def int_hexagon_C2_bitsclri :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_bitsclri">;
+//
+// BUILTIN_INFO(HEXAGON.C4_nbitsclri,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_nbitsclri :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_nbitsclri">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmpneqi,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmpneqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmpneqi">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmpltei,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmpltei :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmpltei">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmplteui,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmplteui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmplteui">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmpneq,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmpneq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmpneq">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmplte,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmplte :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmplte">;
+//
+// BUILTIN_INFO(HEXAGON.C4_cmplteu,QI_ftype_SISI,2)
+//
+def int_hexagon_C4_cmplteu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C4_cmplteu">;
+//
+// BUILTIN_INFO(HEXAGON.C2_and,QI_ftype_QIQI,2)
+//
+def int_hexagon_C2_and :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_and">;
+//
+// BUILTIN_INFO(HEXAGON.C2_or,QI_ftype_QIQI,2)
+//
+def int_hexagon_C2_or :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_or">;
+//
+// BUILTIN_INFO(HEXAGON.C2_xor,QI_ftype_QIQI,2)
+//
+def int_hexagon_C2_xor :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_xor">;
+//
+// BUILTIN_INFO(HEXAGON.C2_andn,QI_ftype_QIQI,2)
+//
+def int_hexagon_C2_andn :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_andn">;
+//
+// BUILTIN_INFO(HEXAGON.C2_not,QI_ftype_QI,1)
+//
+def int_hexagon_C2_not :
+Hexagon_si_si_Intrinsic<"HEXAGON_C2_not">;
+//
+// BUILTIN_INFO(HEXAGON.C2_orn,QI_ftype_QIQI,2)
+//
+def int_hexagon_C2_orn :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_C2_orn">;
+//
+// BUILTIN_INFO(HEXAGON.C4_and_and,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_and_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_and_and">;
+//
+// BUILTIN_INFO(HEXAGON.C4_and_or,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_and_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_and_or">;
+//
+// BUILTIN_INFO(HEXAGON.C4_or_and,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_or_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_or_and">;
+//
+// BUILTIN_INFO(HEXAGON.C4_or_or,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_or_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_or_or">;
+//
+// BUILTIN_INFO(HEXAGON.C4_and_andn,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_and_andn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_and_andn">;
+//
+// BUILTIN_INFO(HEXAGON.C4_and_orn,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_and_orn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_and_orn">;
+//
+// BUILTIN_INFO(HEXAGON.C4_or_andn,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_or_andn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_or_andn">;
+//
+// BUILTIN_INFO(HEXAGON.C4_or_orn,QI_ftype_QIQIQI,3)
+//
+def int_hexagon_C4_or_orn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_C4_or_orn">;
+//
+// BUILTIN_INFO(HEXAGON.C2_pxfer_map,QI_ftype_QI,1)
+//
+def int_hexagon_C2_pxfer_map :
+Hexagon_si_qi_Intrinsic<"HEXAGON_C2_pxfer_map">;
+//
+// BUILTIN_INFO(HEXAGON.C2_any8,QI_ftype_QI,1)
+//
+def int_hexagon_C2_any8 :
+Hexagon_si_qi_Intrinsic<"HEXAGON_C2_any8">;
+//
+// BUILTIN_INFO(HEXAGON.C2_all8,QI_ftype_QI,1)
+//
+def int_hexagon_C2_all8 :
+Hexagon_si_qi_Intrinsic<"HEXAGON_C2_all8">;
+//
+// BUILTIN_INFO(HEXAGON.C2_vitpack,SI_ftype_QIQI,2)
+//
+def int_hexagon_C2_vitpack :
+Hexagon_si_qiqi_Intrinsic<"HEXAGON_C2_vitpack">;
+//
+// BUILTIN_INFO(HEXAGON.C2_mux,SI_ftype_QISISI,3)
+//
+def int_hexagon_C2_mux :
+Hexagon_si_qisisi_Intrinsic<"HEXAGON_C2_mux">;
+//
+// BUILTIN_INFO(HEXAGON.C2_muxii,SI_ftype_QISISI,3)
+//
+def int_hexagon_C2_muxii :
+Hexagon_si_qisisi_Intrinsic<"HEXAGON_C2_muxii">;
+//
+// BUILTIN_INFO(HEXAGON.C2_muxir,SI_ftype_QISISI,3)
+//
+def int_hexagon_C2_muxir :
+Hexagon_si_qisisi_Intrinsic<"HEXAGON_C2_muxir">;
+//
+// BUILTIN_INFO(HEXAGON.C2_muxri,SI_ftype_QISISI,3)
+//
+def int_hexagon_C2_muxri :
+Hexagon_si_qisisi_Intrinsic<"HEXAGON_C2_muxri">;
+//
+// BUILTIN_INFO(HEXAGON.C2_vmux,DI_ftype_QIDIDI,3)
+//
+def int_hexagon_C2_vmux :
+Hexagon_di_qididi_Intrinsic<"HEXAGON_C2_vmux">;
+//
+// BUILTIN_INFO(HEXAGON.C2_mask,DI_ftype_QI,1)
+//
+def int_hexagon_C2_mask :
+Hexagon_di_qi_Intrinsic<"HEXAGON_C2_mask">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpbeq,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpbeq :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpbeq">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpbeqi,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpbeqi :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpbeqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpbeq_any,QI_ftype_DIDI,2)
+//
+def int_hexagon_A4_vcmpbeq_any :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A4_vcmpbeq_any">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpbgtu,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpbgtu :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpbgtu">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpbgtui,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpbgtui :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpbgtui">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpbgt,QI_ftype_DIDI,2)
+//
+def int_hexagon_A4_vcmpbgt :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A4_vcmpbgt">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpbgti,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpbgti :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpbgti">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbeq,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbeq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbeq">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbeqi,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbeqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbeqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbgtu,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbgtu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbgtu">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbgtui,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbgtui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbgtui">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbgt,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbgt :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbgt">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpbgti,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpbgti :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpbgti">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpheq,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpheq :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpheq">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmphgt,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmphgt :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmphgt">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmphgtu,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmphgtu :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmphgtu">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpheqi,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpheqi :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpheqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmphgti,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmphgti :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmphgti">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmphgtui,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmphgtui :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmphgtui">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpheq,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpheq :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpheq">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmphgt,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmphgt :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmphgt">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmphgtu,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmphgtu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmphgtu">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmpheqi,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmpheqi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmpheqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmphgti,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmphgti :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmphgti">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cmphgtui,QI_ftype_SISI,2)
+//
+def int_hexagon_A4_cmphgtui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cmphgtui">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpweq,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpweq :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpweq">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpwgt,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpwgt :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpwgt">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vcmpwgtu,QI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vcmpwgtu :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A2_vcmpwgtu">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpweqi,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpweqi :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpweqi">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpwgti,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpwgti :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpwgti">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vcmpwgtui,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_vcmpwgtui :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_vcmpwgtui">;
+//
+// BUILTIN_INFO(HEXAGON.A4_boundscheck,QI_ftype_SIDI,2)
+//
+def int_hexagon_A4_boundscheck :
+Hexagon_si_sidi_Intrinsic<"HEXAGON_A4_boundscheck">;
+//
+// BUILTIN_INFO(HEXAGON.A4_tlbmatch,QI_ftype_DISI,2)
+//
+def int_hexagon_A4_tlbmatch :
+Hexagon_si_disi_Intrinsic<"HEXAGON_A4_tlbmatch">;
+//
+// BUILTIN_INFO(HEXAGON.C2_tfrpr,SI_ftype_QI,1)
+//
+def int_hexagon_C2_tfrpr :
+Hexagon_si_qi_Intrinsic<"HEXAGON_C2_tfrpr">;
+//
+// BUILTIN_INFO(HEXAGON.C2_tfrrp,QI_ftype_SI,1)
+//
+def int_hexagon_C2_tfrrp :
+Hexagon_si_si_Intrinsic<"HEXAGON_C2_tfrrp">;
+//
+// BUILTIN_INFO(HEXAGON.C4_fastcorner9,QI_ftype_QIQI,2)
+//
+def int_hexagon_C4_fastcorner9 :
+Hexagon_si_qiqi_Intrinsic<"HEXAGON_C4_fastcorner9">;
+//
+// BUILTIN_INFO(HEXAGON.C4_fastcorner9_not,QI_ftype_QIQI,2)
+//
+def int_hexagon_C4_fastcorner9_not :
+Hexagon_si_qiqi_Intrinsic<"HEXAGON_C4_fastcorner9_not">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_acc_sat_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_acc_sat_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_nac_sat_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpy_nac_sat_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_hh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_hh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_hh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_hh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_hl_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_hl_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_hl_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_hl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_lh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_lh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_lh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_lh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_ll_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_ll_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_ll_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_ll_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_hh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_hh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_hh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_hh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_hl_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_hl_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_hl_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_hl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_lh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_lh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_lh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_lh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_ll_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_ll_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_ll_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_ll_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_hh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_hh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_hh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_hh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_hl_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_hl_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_hl_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_hl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_lh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_lh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_lh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_lh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_ll_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_ll_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_rnd_ll_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_rnd_ll_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_hh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_hh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_hh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_hh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_hl_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_hl_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_hl_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_hl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_lh_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_lh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_lh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_lh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_ll_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_ll_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_sat_rnd_ll_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_sat_rnd_ll_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_hh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_hh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_hh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_hh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_hl_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_hl_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_hl_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_hl_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_lh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_lh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_lh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_lh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_ll_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_ll_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_acc_ll_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_acc_ll_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_hh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_hh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_hh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_hh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_hl_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_hl_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_hl_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_hl_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_lh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_lh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_lh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_lh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_ll_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_ll_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_nac_ll_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyd_nac_ll_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_hh_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_hh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_hh_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_hh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_hl_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_hl_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_hl_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_hl_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_lh_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_lh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_lh_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_lh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_ll_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_ll_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_ll_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_ll_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_hh_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_hh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_hh_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_hh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_hl_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_hl_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_hl_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_hl_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_lh_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_lh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_lh_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_lh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_ll_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_ll_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyd_rnd_ll_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyd_rnd_ll_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_acc_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_acc_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_hh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_hh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_hh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_hh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_hl_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_hl_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_hl_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_hl_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_lh_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_lh_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_lh_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_lh_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_ll_s0,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_ll_s0 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_nac_ll_s1,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_mpyu_nac_ll_s1 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_hh_s0,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_hh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_hh_s1,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_hh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_hl_s0,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_hl_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_hl_s1,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_hl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_lh_s0,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_lh_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_lh_s1,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_lh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_ll_s0,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_ll_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_ll_s1,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_ll_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_hh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_hh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_hh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_hh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_hl_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_hl_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_hl_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_hl_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_lh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_lh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_lh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_lh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_ll_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_ll_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_acc_ll_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_acc_ll_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_hh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_hh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_hh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_hh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_hl_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_hl_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_hl_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_hl_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_lh_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_lh_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_lh_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_lh_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_ll_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_ll_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_nac_ll_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_mpyud_nac_ll_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_hh_s0,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_hh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_hh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_hh_s1,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_hh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_hh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_hl_s0,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_hl_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_hl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_hl_s1,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_hl_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_hl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_lh_s0,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_lh_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_lh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_lh_s1,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_lh_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_lh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_ll_s0,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_ll_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_ll_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyud_ll_s1,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyud_ll_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_mpyud_ll_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpysmi,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpysmi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpysmi">;
+//
+// BUILTIN_INFO(HEXAGON.M2_macsip,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_macsip :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_macsip">;
+//
+// BUILTIN_INFO(HEXAGON.M2_macsin,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_macsin :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_macsin">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyss_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_dpmpyss_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_dpmpyss_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyss_acc_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_dpmpyss_acc_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_dpmpyss_acc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyss_nac_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_dpmpyss_nac_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_dpmpyss_nac_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyuu_s0,UDI_ftype_SISI,2)
+//
+def int_hexagon_M2_dpmpyuu_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_dpmpyuu_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyuu_acc_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_dpmpyuu_acc_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_dpmpyuu_acc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyuu_nac_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_dpmpyuu_nac_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_dpmpyuu_nac_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_up,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_up :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_up">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_up_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_up_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_up_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpy_up_s1_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpy_up_s1_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpy_up_s1_sat">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyu_up,USI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyu_up :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyu_up">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpysu_up,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpysu_up :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpysu_up">;
+//
+// BUILTIN_INFO(HEXAGON.M2_dpmpyss_rnd_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_dpmpyss_rnd_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_dpmpyss_rnd_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mac_up_s1_sat,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mac_up_s1_sat :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mac_up_s1_sat">;
+//
+// BUILTIN_INFO(HEXAGON.M4_nac_up_s1_sat,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_nac_up_s1_sat :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_nac_up_s1_sat">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyi,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyi">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mpyui,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_mpyui :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_mpyui">;
+//
+// BUILTIN_INFO(HEXAGON.M2_maci,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_maci :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_maci">;
+//
+// BUILTIN_INFO(HEXAGON.M2_acci,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_acci :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_acci">;
+//
+// BUILTIN_INFO(HEXAGON.M2_accii,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_accii :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_accii">;
+//
+// BUILTIN_INFO(HEXAGON.M2_nacci,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_nacci :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_nacci">;
+//
+// BUILTIN_INFO(HEXAGON.M2_naccii,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_naccii :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_naccii">;
+//
+// BUILTIN_INFO(HEXAGON.M2_subacc,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_subacc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_subacc">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mpyrr_addr,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mpyrr_addr :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mpyrr_addr">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mpyri_addr_u2,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mpyri_addr_u2 :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mpyri_addr_u2">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mpyri_addr,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mpyri_addr :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mpyri_addr">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mpyri_addi,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mpyri_addi :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mpyri_addi">;
+//
+// BUILTIN_INFO(HEXAGON.M4_mpyrr_addi,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_mpyrr_addi :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_mpyrr_addi">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2s_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2s_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_vmpy2s_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2s_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2s_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_vmpy2s_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2s_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_vmac2s_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_vmac2s_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2s_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_vmac2s_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_vmac2s_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2su_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2su_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_vmpy2su_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2su_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2su_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_vmpy2su_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2su_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_vmac2su_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_vmac2su_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2su_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_vmac2su_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_vmac2su_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2s_s0pack,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2s_s0pack :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_vmpy2s_s0pack">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2s_s1pack,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_vmpy2s_s1pack :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_vmpy2s_s1pack">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_vmac2 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_vmac2">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2es_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vmpy2es_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vmpy2es_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmpy2es_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vmpy2es_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vmpy2es_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2es_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vmac2es_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vmac2es_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2es_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vmac2es_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vmac2es_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vmac2es,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vmac2es :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vmac2es">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrmac_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vrmac_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vrmac_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrmpy_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vrmpy_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vrmpy_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmpyrs_s0,SI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vdmpyrs_s0 :
+Hexagon_si_didi_Intrinsic<"HEXAGON_M2_vdmpyrs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmpyrs_s1,SI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vdmpyrs_s1 :
+Hexagon_si_didi_Intrinsic<"HEXAGON_M2_vdmpyrs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vrmpybuu,DI_ftype_DIDI,2)
+//
+def int_hexagon_M5_vrmpybuu :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M5_vrmpybuu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vrmacbuu,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M5_vrmacbuu :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M5_vrmacbuu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vrmpybsu,DI_ftype_DIDI,2)
+//
+def int_hexagon_M5_vrmpybsu :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M5_vrmpybsu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vrmacbsu,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M5_vrmacbsu :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M5_vrmacbsu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vmpybuu,DI_ftype_SISI,2)
+//
+def int_hexagon_M5_vmpybuu :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M5_vmpybuu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vmpybsu,DI_ftype_SISI,2)
+//
+def int_hexagon_M5_vmpybsu :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M5_vmpybsu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vmacbuu,DI_ftype_DISISI,3)
+//
+def int_hexagon_M5_vmacbuu :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M5_vmacbuu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vmacbsu,DI_ftype_DISISI,3)
+//
+def int_hexagon_M5_vmacbsu :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M5_vmacbsu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vdmpybsu,DI_ftype_DIDI,2)
+//
+def int_hexagon_M5_vdmpybsu :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M5_vdmpybsu">;
+//
+// BUILTIN_INFO(HEXAGON.M5_vdmacbsu,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M5_vdmacbsu :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M5_vdmacbsu">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmacs_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vdmacs_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vdmacs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmacs_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vdmacs_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vdmacs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmpys_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vdmpys_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vdmpys_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vdmpys_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vdmpys_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vdmpys_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyrs_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyrs_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_cmpyrs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyrs_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyrs_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_cmpyrs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyrsc_s0,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyrsc_s0 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_cmpyrsc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyrsc_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyrsc_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_cmpyrsc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmacs_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmacs_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmacs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmacs_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmacs_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmacs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmacsc_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmacsc_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmacsc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmacsc_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmacsc_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmacsc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpys_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpys_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpys_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpys_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpys_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpys_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpysc_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpysc_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpysc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpysc_s1,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpysc_s1 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpysc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cnacs_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cnacs_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cnacs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cnacs_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cnacs_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cnacs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cnacsc_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cnacsc_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cnacsc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cnacsc_s1,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cnacsc_s1 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cnacsc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpys_s1,DI_ftype_DISI,2)
+//
+def int_hexagon_M2_vrcmpys_s1 :
+Hexagon_di_disi_Intrinsic<"HEXAGON_M2_vrcmpys_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpys_acc_s1,DI_ftype_DIDISI,3)
+//
+def int_hexagon_M2_vrcmpys_acc_s1 :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_M2_vrcmpys_acc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpys_s1rp,SI_ftype_DISI,2)
+//
+def int_hexagon_M2_vrcmpys_s1rp :
+Hexagon_si_disi_Intrinsic<"HEXAGON_M2_vrcmpys_s1rp">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacls_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacls_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacls_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacls_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacls_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacls_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmachs_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmachs_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmachs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmachs_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmachs_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmachs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyl_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyl_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyl_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyl_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyl_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyh_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyh_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyh_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyh_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacls_rs0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacls_rs0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacls_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacls_rs1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacls_rs1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacls_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmachs_rs0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmachs_rs0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmachs_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmachs_rs1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmachs_rs1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmachs_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyl_rs0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyl_rs0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyl_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyl_rs1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyl_rs1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyl_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyh_rs0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyh_rs0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyh_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyh_rs1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyh_rs1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyh_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyeh_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M4_vrmpyeh_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M4_vrmpyeh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyeh_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M4_vrmpyeh_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M4_vrmpyeh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyeh_acc_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M4_vrmpyeh_acc_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyeh_acc_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M4_vrmpyeh_acc_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyoh_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M4_vrmpyoh_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M4_vrmpyoh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyoh_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M4_vrmpyoh_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M4_vrmpyoh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyoh_acc_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M4_vrmpyoh_acc_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vrmpyoh_acc_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M4_vrmpyoh_acc_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_hmmpyl_rs1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_hmmpyl_rs1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_hmmpyl_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_hmmpyh_rs1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_hmmpyh_rs1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_hmmpyh_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_hmmpyl_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_hmmpyl_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_hmmpyl_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_hmmpyh_s1,SI_ftype_SISI,2)
+//
+def int_hexagon_M2_hmmpyh_s1 :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_M2_hmmpyh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmaculs_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmaculs_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmaculs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmaculs_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmaculs_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmaculs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacuhs_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacuhs_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacuhs_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacuhs_s1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacuhs_s1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacuhs_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyul_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyul_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyul_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyul_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyul_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyul_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyuh_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyuh_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyuh_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyuh_s1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyuh_s1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyuh_s1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmaculs_rs0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmaculs_rs0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmaculs_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmaculs_rs1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmaculs_rs1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmaculs_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacuhs_rs0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacuhs_rs0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacuhs_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmacuhs_rs1,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_mmacuhs_rs1 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_mmacuhs_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyul_rs0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyul_rs0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyul_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyul_rs1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyul_rs1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyul_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyuh_rs0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyuh_rs0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyuh_rs0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_mmpyuh_rs1,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_mmpyuh_rs1 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_mmpyuh_rs1">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmaci_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vrcmaci_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vrcmaci_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmacr_s0,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vrcmacr_s0 :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vrcmacr_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmaci_s0c,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vrcmaci_s0c :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vrcmaci_s0c">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmacr_s0c,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vrcmacr_s0c :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vrcmacr_s0c">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmaci_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmaci_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmaci_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmacr_s0,DI_ftype_DISISI,3)
+//
+def int_hexagon_M2_cmacr_s0 :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M2_cmacr_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpyi_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vrcmpyi_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vrcmpyi_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpyr_s0,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vrcmpyr_s0 :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vrcmpyr_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpyi_s0c,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vrcmpyi_s0c :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vrcmpyi_s0c">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vrcmpyr_s0c,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vrcmpyr_s0c :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vrcmpyr_s0c">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyi_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyi_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpyi_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M2_cmpyr_s0,DI_ftype_SISI,2)
+//
+def int_hexagon_M2_cmpyr_s0 :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M2_cmpyr_s0">;
+//
+// BUILTIN_INFO(HEXAGON.M4_cmpyi_wh,SI_ftype_DISI,2)
+//
+def int_hexagon_M4_cmpyi_wh :
+Hexagon_si_disi_Intrinsic<"HEXAGON_M4_cmpyi_wh">;
+//
+// BUILTIN_INFO(HEXAGON.M4_cmpyr_wh,SI_ftype_DISI,2)
+//
+def int_hexagon_M4_cmpyr_wh :
+Hexagon_si_disi_Intrinsic<"HEXAGON_M4_cmpyr_wh">;
+//
+// BUILTIN_INFO(HEXAGON.M4_cmpyi_whc,SI_ftype_DISI,2)
+//
+def int_hexagon_M4_cmpyi_whc :
+Hexagon_si_disi_Intrinsic<"HEXAGON_M4_cmpyi_whc">;
+//
+// BUILTIN_INFO(HEXAGON.M4_cmpyr_whc,SI_ftype_DISI,2)
+//
+def int_hexagon_M4_cmpyr_whc :
+Hexagon_si_disi_Intrinsic<"HEXAGON_M4_cmpyr_whc">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmpy_s0_sat_i,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vcmpy_s0_sat_i :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_i">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmpy_s0_sat_r,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vcmpy_s0_sat_r :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_r">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmpy_s1_sat_i,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vcmpy_s1_sat_i :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_i">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmpy_s1_sat_r,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vcmpy_s1_sat_r :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_r">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmac_s0_sat_i,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vcmac_s0_sat_i :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_i">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vcmac_s0_sat_r,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M2_vcmac_s0_sat_r :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vcrotate,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_vcrotate :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_vcrotate">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vrcrotate_acc,DI_ftype_DIDISISI,4)
+//
+def int_hexagon_S4_vrcrotate_acc :
+Hexagon_di_didisisi_Intrinsic<"HEXAGON_S4_vrcrotate_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vrcrotate,DI_ftype_DISISI,3)
+//
+def int_hexagon_S4_vrcrotate :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_S4_vrcrotate">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vcnegh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_vcnegh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_vcnegh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vrcnegh,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_vrcnegh :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_vrcnegh">;
+//
+// BUILTIN_INFO(HEXAGON.M4_pmpyw,DI_ftype_SISI,2)
+//
+def int_hexagon_M4_pmpyw :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M4_pmpyw">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vpmpyh,DI_ftype_SISI,2)
+//
+def int_hexagon_M4_vpmpyh :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_M4_vpmpyh">;
+//
+// BUILTIN_INFO(HEXAGON.M4_pmpyw_acc,DI_ftype_DISISI,3)
+//
+def int_hexagon_M4_pmpyw_acc :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M4_pmpyw_acc">;
+//
+// BUILTIN_INFO(HEXAGON.M4_vpmpyh_acc,DI_ftype_DISISI,3)
+//
+def int_hexagon_M4_vpmpyh_acc :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_M4_vpmpyh_acc">;
+//
+// BUILTIN_INFO(HEXAGON.A2_add,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_add :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_add">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sub,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_sub :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_sub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addsat,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addsat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subsat,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subsat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addi,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addi">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_l16_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_l16_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_l16_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_l16_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_l16_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_l16_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_l16_sat_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_l16_sat_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_l16_sat_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_l16_sat_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_l16_sat_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_l16_sat_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_l16_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_l16_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_l16_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_l16_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_l16_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_l16_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_l16_sat_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_l16_sat_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_l16_sat_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_l16_sat_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_l16_sat_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_l16_sat_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_lh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_lh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_lh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_hh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_hh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_hh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_sat_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_sat_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_sat_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_sat_lh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_sat_lh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_sat_lh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_sat_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_sat_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_sat_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addh_h16_sat_hh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_addh_h16_sat_hh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_addh_h16_sat_hh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_lh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_lh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_lh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_hh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_hh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_hh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_sat_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_sat_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_sat_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_sat_lh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_sat_lh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_sat_lh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_sat_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_sat_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_sat_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subh_h16_sat_hh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subh_h16_sat_hh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subh_h16_sat_hh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_aslh,SI_ftype_SI,1)
+//
+def int_hexagon_A2_aslh :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_aslh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_asrh,SI_ftype_SI,1)
+//
+def int_hexagon_A2_asrh :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_asrh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_addp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_addp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addpsat,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_addpsat :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_addpsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_addsp,DI_ftype_SIDI,2)
+//
+def int_hexagon_A2_addsp :
+Hexagon_di_sidi_Intrinsic<"HEXAGON_A2_addsp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_subp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_subp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_neg,SI_ftype_SI,1)
+//
+def int_hexagon_A2_neg :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_neg">;
+//
+// BUILTIN_INFO(HEXAGON.A2_negsat,SI_ftype_SI,1)
+//
+def int_hexagon_A2_negsat :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_negsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_abs,SI_ftype_SI,1)
+//
+def int_hexagon_A2_abs :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_abs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_abssat,SI_ftype_SI,1)
+//
+def int_hexagon_A2_abssat :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_abssat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vconj,DI_ftype_DI,1)
+//
+def int_hexagon_A2_vconj :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_vconj">;
+//
+// BUILTIN_INFO(HEXAGON.A2_negp,DI_ftype_DI,1)
+//
+def int_hexagon_A2_negp :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_negp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_absp,DI_ftype_DI,1)
+//
+def int_hexagon_A2_absp :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_absp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_max,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_max :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_max">;
+//
+// BUILTIN_INFO(HEXAGON.A2_maxu,USI_ftype_SISI,2)
+//
+def int_hexagon_A2_maxu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_maxu">;
+//
+// BUILTIN_INFO(HEXAGON.A2_min,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_min :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_min">;
+//
+// BUILTIN_INFO(HEXAGON.A2_minu,USI_ftype_SISI,2)
+//
+def int_hexagon_A2_minu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_minu">;
+//
+// BUILTIN_INFO(HEXAGON.A2_maxp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_maxp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_maxp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_maxup,UDI_ftype_DIDI,2)
+//
+def int_hexagon_A2_maxup :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_maxup">;
+//
+// BUILTIN_INFO(HEXAGON.A2_minp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_minp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_minp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_minup,UDI_ftype_DIDI,2)
+//
+def int_hexagon_A2_minup :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_minup">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfr,SI_ftype_SI,1)
+//
+def int_hexagon_A2_tfr :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_tfr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfrsi,SI_ftype_SI,1)
+//
+def int_hexagon_A2_tfrsi :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_tfrsi">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfrp,DI_ftype_DI,1)
+//
+def int_hexagon_A2_tfrp :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_tfrp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfrpi,DI_ftype_SI,1)
+//
+def int_hexagon_A2_tfrpi :
+Hexagon_di_si_Intrinsic<"HEXAGON_A2_tfrpi">;
+//
+// BUILTIN_INFO(HEXAGON.A2_zxtb,SI_ftype_SI,1)
+//
+def int_hexagon_A2_zxtb :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_zxtb">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sxtb,SI_ftype_SI,1)
+//
+def int_hexagon_A2_sxtb :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_sxtb">;
+//
+// BUILTIN_INFO(HEXAGON.A2_zxth,SI_ftype_SI,1)
+//
+def int_hexagon_A2_zxth :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_zxth">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sxth,SI_ftype_SI,1)
+//
+def int_hexagon_A2_sxth :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_sxth">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combinew,DI_ftype_SISI,2)
+//
+def int_hexagon_A2_combinew :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A2_combinew">;
+//
+// BUILTIN_INFO(HEXAGON.A4_combineri,DI_ftype_SISI,2)
+//
+def int_hexagon_A4_combineri :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A4_combineri">;
+//
+// BUILTIN_INFO(HEXAGON.A4_combineir,DI_ftype_SISI,2)
+//
+def int_hexagon_A4_combineir :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A4_combineir">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combineii,DI_ftype_SISI,2)
+//
+def int_hexagon_A2_combineii :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A2_combineii">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combine_hh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_combine_hh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_combine_hh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combine_hl,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_combine_hl :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_combine_hl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combine_lh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_combine_lh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_combine_lh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_combine_ll,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_combine_ll :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_combine_ll">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfril,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_tfril :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_tfril">;
+//
+// BUILTIN_INFO(HEXAGON.A2_tfrih,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_tfrih :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_tfrih">;
+//
+// BUILTIN_INFO(HEXAGON.A2_and,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_and :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_and">;
+//
+// BUILTIN_INFO(HEXAGON.A2_or,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_or :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_or">;
+//
+// BUILTIN_INFO(HEXAGON.A2_xor,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_xor :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_xor">;
+//
+// BUILTIN_INFO(HEXAGON.A2_not,SI_ftype_SI,1)
+//
+def int_hexagon_A2_not :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_not">;
+//
+// BUILTIN_INFO(HEXAGON.M2_xor_xacc,SI_ftype_SISISI,3)
+//
+def int_hexagon_M2_xor_xacc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M2_xor_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.M4_xor_xacc,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_M4_xor_xacc :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_M4_xor_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.A4_andn,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_andn :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_andn">;
+//
+// BUILTIN_INFO(HEXAGON.A4_orn,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_orn :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_orn">;
+//
+// BUILTIN_INFO(HEXAGON.A4_andnp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A4_andnp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A4_andnp">;
+//
+// BUILTIN_INFO(HEXAGON.A4_ornp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A4_ornp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A4_ornp">;
+//
+// BUILTIN_INFO(HEXAGON.S4_addaddi,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_addaddi :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_addaddi">;
+//
+// BUILTIN_INFO(HEXAGON.S4_subaddi,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_subaddi :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_subaddi">;
+//
+// BUILTIN_INFO(HEXAGON.M4_and_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_and_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_and_and">;
+//
+// BUILTIN_INFO(HEXAGON.M4_and_andn,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_and_andn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_and_andn">;
+//
+// BUILTIN_INFO(HEXAGON.M4_and_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_and_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_and_or">;
+//
+// BUILTIN_INFO(HEXAGON.M4_and_xor,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_and_xor :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_and_xor">;
+//
+// BUILTIN_INFO(HEXAGON.M4_or_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_or_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_or_and">;
+//
+// BUILTIN_INFO(HEXAGON.M4_or_andn,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_or_andn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_or_andn">;
+//
+// BUILTIN_INFO(HEXAGON.M4_or_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_or_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_or_or">;
+//
+// BUILTIN_INFO(HEXAGON.M4_or_xor,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_or_xor :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_or_xor">;
+//
+// BUILTIN_INFO(HEXAGON.S4_or_andix,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_or_andix :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_or_andix">;
+//
+// BUILTIN_INFO(HEXAGON.S4_or_andi,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_or_andi :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_or_andi">;
+//
+// BUILTIN_INFO(HEXAGON.S4_or_ori,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_or_ori :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_or_ori">;
+//
+// BUILTIN_INFO(HEXAGON.M4_xor_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_xor_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_xor_and">;
+//
+// BUILTIN_INFO(HEXAGON.M4_xor_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_xor_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_xor_or">;
+//
+// BUILTIN_INFO(HEXAGON.M4_xor_andn,SI_ftype_SISISI,3)
+//
+def int_hexagon_M4_xor_andn :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_M4_xor_andn">;
+//
+// BUILTIN_INFO(HEXAGON.A2_subri,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_subri :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_subri">;
+//
+// BUILTIN_INFO(HEXAGON.A2_andir,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_andir :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_andir">;
+//
+// BUILTIN_INFO(HEXAGON.A2_orir,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_orir :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_orir">;
+//
+// BUILTIN_INFO(HEXAGON.A2_andp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_andp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_andp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_orp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_orp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_orp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_xorp,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_xorp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_xorp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_notp,DI_ftype_DI,1)
+//
+def int_hexagon_A2_notp :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_notp">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sxtw,DI_ftype_SI,1)
+//
+def int_hexagon_A2_sxtw :
+Hexagon_di_si_Intrinsic<"HEXAGON_A2_sxtw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sat,SI_ftype_DI,1)
+//
+def int_hexagon_A2_sat :
+Hexagon_si_di_Intrinsic<"HEXAGON_A2_sat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_roundsat,SI_ftype_DI,1)
+//
+def int_hexagon_A2_roundsat :
+Hexagon_si_di_Intrinsic<"HEXAGON_A2_roundsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_sath,SI_ftype_SI,1)
+//
+def int_hexagon_A2_sath :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_sath">;
+//
+// BUILTIN_INFO(HEXAGON.A2_satuh,SI_ftype_SI,1)
+//
+def int_hexagon_A2_satuh :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_satuh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_satub,SI_ftype_SI,1)
+//
+def int_hexagon_A2_satub :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_satub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_satb,SI_ftype_SI,1)
+//
+def int_hexagon_A2_satb :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_satb">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddb_map,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddb_map :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddb_map">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddubs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddubs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddubs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddhs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddhs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vadduhs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vadduhs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vadduhs">;
+//
+// BUILTIN_INFO(HEXAGON.A5_vaddhubs,SI_ftype_DIDI,2)
+//
+def int_hexagon_A5_vaddhubs :
+Hexagon_si_didi_Intrinsic<"HEXAGON_A5_vaddhubs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vaddws,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vaddws :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vaddws">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxaddsubw,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxaddsubw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxaddsubw">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxsubaddw,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxsubaddw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxsubaddw">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxaddsubh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxaddsubh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxaddsubh">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxsubaddh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxsubaddh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxsubaddh">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxaddsubhr,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxaddsubhr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxaddsubhr">;
+//
+// BUILTIN_INFO(HEXAGON.S4_vxsubaddhr,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_vxsubaddhr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_vxsubaddhr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svavgh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svavgh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svavgh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svavghs,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svavghs :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svavghs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svnavgh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svnavgh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svnavgh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svaddh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svaddh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svaddh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svaddhs,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svaddhs :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svaddhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svadduhs,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svadduhs :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svadduhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svsubh,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svsubh :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svsubh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svsubhs,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svsubhs :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svsubhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_svsubuhs,SI_ftype_SISI,2)
+//
+def int_hexagon_A2_svsubuhs :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A2_svsubuhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vraddub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vraddub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vraddub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vraddub_acc,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_A2_vraddub_acc :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_A2_vraddub_acc">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vraddh,SI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vraddh :
+Hexagon_si_didi_Intrinsic<"HEXAGON_M2_vraddh">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vradduh,SI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vradduh :
+Hexagon_si_didi_Intrinsic<"HEXAGON_M2_vradduh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubb_map,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubb_map :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubb_map">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsububs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsububs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsububs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubhs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubhs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubuhs,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubuhs :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubuhs">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vsubws,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vsubws :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vsubws">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vabsh,DI_ftype_DI,1)
+//
+def int_hexagon_A2_vabsh :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_vabsh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vabshsat,DI_ftype_DI,1)
+//
+def int_hexagon_A2_vabshsat :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_vabshsat">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vabsw,DI_ftype_DI,1)
+//
+def int_hexagon_A2_vabsw :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_vabsw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vabswsat,DI_ftype_DI,1)
+//
+def int_hexagon_A2_vabswsat :
+Hexagon_di_di_Intrinsic<"HEXAGON_A2_vabswsat">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vabsdiffw,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vabsdiffw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vabsdiffw">;
+//
+// BUILTIN_INFO(HEXAGON.M2_vabsdiffh,DI_ftype_DIDI,2)
+//
+def int_hexagon_M2_vabsdiffh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_M2_vabsdiffh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vrsadub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vrsadub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vrsadub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vrsadub_acc,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_A2_vrsadub_acc :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_A2_vrsadub_acc">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavguh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavguh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavguh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavgh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavgh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavgh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavgw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavgw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavgw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgwr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgwr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgwr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavgwr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavgwr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavgwr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgwcr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgwcr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgwcr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavgwcr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavgwcr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavgwcr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavghcr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavghcr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavghcr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavghcr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavghcr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavghcr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavguw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavguw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavguw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavguwr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavguwr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavguwr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavgubr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavgubr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavgubr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavguhr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavguhr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavguhr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vavghr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vavghr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vavghr">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vnavghr,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vnavghr :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vnavghr">;
+//
+// BUILTIN_INFO(HEXAGON.A4_round_ri,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_round_ri :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_round_ri">;
+//
+// BUILTIN_INFO(HEXAGON.A4_round_rr,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_round_rr :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_round_rr">;
+//
+// BUILTIN_INFO(HEXAGON.A4_round_ri_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_round_ri_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_round_ri_sat">;
+//
+// BUILTIN_INFO(HEXAGON.A4_round_rr_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_round_rr_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_round_rr_sat">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cround_ri,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_cround_ri :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cround_ri">;
+//
+// BUILTIN_INFO(HEXAGON.A4_cround_rr,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_cround_rr :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_cround_rr">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrminh,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrminh :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrminh">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrmaxh,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrmaxh :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrmaxh">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrminuh,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrminuh :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrminuh">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrmaxuh,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrmaxuh :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrmaxuh">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrminw,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrminw :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrminw">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrmaxw,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrmaxw :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrmaxw">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrminuw,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrminuw :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrminuw">;
+//
+// BUILTIN_INFO(HEXAGON.A4_vrmaxuw,DI_ftype_DIDISI,3)
+//
+def int_hexagon_A4_vrmaxuw :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_A4_vrmaxuw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminb,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminb :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminb">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxb,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxb :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxb">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxub,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxub :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxub">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminuh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminuh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminuh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxuh,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxuh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxuh">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vminuw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vminuw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vminuw">;
+//
+// BUILTIN_INFO(HEXAGON.A2_vmaxuw,DI_ftype_DIDI,2)
+//
+def int_hexagon_A2_vmaxuw :
+Hexagon_di_didi_Intrinsic<"HEXAGON_A2_vmaxuw">;
+//
+// BUILTIN_INFO(HEXAGON.A4_modwrapu,SI_ftype_SISI,2)
+//
+def int_hexagon_A4_modwrapu :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_A4_modwrapu">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfadd,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfadd :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sfadd">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfsub,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfsub :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sfsub">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfmpy,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfmpy :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sfmpy">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffma,SF_ftype_SFSFSF,3)
+//
+def int_hexagon_F2_sffma :
+Hexagon_sf_sfsfsf_Intrinsic<"HEXAGON_F2_sffma">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffma_sc,SF_ftype_SFSFSFQI,4)
+//
+def int_hexagon_F2_sffma_sc :
+Hexagon_sf_sfsfsfqi_Intrinsic<"HEXAGON_F2_sffma_sc">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffms,SF_ftype_SFSFSF,3)
+//
+def int_hexagon_F2_sffms :
+Hexagon_sf_sfsfsf_Intrinsic<"HEXAGON_F2_sffms">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffma_lib,SF_ftype_SFSFSF,3)
+//
+def int_hexagon_F2_sffma_lib :
+Hexagon_sf_sfsfsf_Intrinsic<"HEXAGON_F2_sffma_lib">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffms_lib,SF_ftype_SFSFSF,3)
+//
+def int_hexagon_F2_sffms_lib :
+Hexagon_sf_sfsfsf_Intrinsic<"HEXAGON_F2_sffms_lib">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfcmpeq,QI_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfcmpeq :
+Hexagon_si_sfsf_Intrinsic<"HEXAGON_F2_sfcmpeq">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfcmpgt,QI_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfcmpgt :
+Hexagon_si_sfsf_Intrinsic<"HEXAGON_F2_sfcmpgt">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfcmpge,QI_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfcmpge :
+Hexagon_si_sfsf_Intrinsic<"HEXAGON_F2_sfcmpge">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfcmpuo,QI_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfcmpuo :
+Hexagon_si_sfsf_Intrinsic<"HEXAGON_F2_sfcmpuo">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfmax,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfmax :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sfmax">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfmin,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sfmin :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sfmin">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfclass,QI_ftype_SFSI,2)
+//
+def int_hexagon_F2_sfclass :
+Hexagon_si_sfsi_Intrinsic<"HEXAGON_F2_sfclass">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfimm_p,SF_ftype_SI,1)
+//
+def int_hexagon_F2_sfimm_p :
+Hexagon_sf_si_Intrinsic<"HEXAGON_F2_sfimm_p">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sfimm_n,SF_ftype_SI,1)
+//
+def int_hexagon_F2_sfimm_n :
+Hexagon_sf_si_Intrinsic<"HEXAGON_F2_sfimm_n">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffixupn,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sffixupn :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sffixupn">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffixupd,SF_ftype_SFSF,2)
+//
+def int_hexagon_F2_sffixupd :
+Hexagon_sf_sfsf_Intrinsic<"HEXAGON_F2_sffixupd">;
+//
+// BUILTIN_INFO(HEXAGON.F2_sffixupr,SF_ftype_SF,1)
+//
+def int_hexagon_F2_sffixupr :
+Hexagon_sf_sf_Intrinsic<"HEXAGON_F2_sffixupr">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfcmpeq,QI_ftype_DFDF,2)
+//
+def int_hexagon_F2_dfcmpeq :
+Hexagon_si_dfdf_Intrinsic<"HEXAGON_F2_dfcmpeq">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfcmpgt,QI_ftype_DFDF,2)
+//
+def int_hexagon_F2_dfcmpgt :
+Hexagon_si_dfdf_Intrinsic<"HEXAGON_F2_dfcmpgt">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfcmpge,QI_ftype_DFDF,2)
+//
+def int_hexagon_F2_dfcmpge :
+Hexagon_si_dfdf_Intrinsic<"HEXAGON_F2_dfcmpge">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfcmpuo,QI_ftype_DFDF,2)
+//
+def int_hexagon_F2_dfcmpuo :
+Hexagon_si_dfdf_Intrinsic<"HEXAGON_F2_dfcmpuo">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfclass,QI_ftype_DFSI,2)
+//
+def int_hexagon_F2_dfclass :
+Hexagon_si_dfsi_Intrinsic<"HEXAGON_F2_dfclass">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfimm_p,DF_ftype_SI,1)
+//
+def int_hexagon_F2_dfimm_p :
+Hexagon_df_si_Intrinsic<"HEXAGON_F2_dfimm_p">;
+//
+// BUILTIN_INFO(HEXAGON.F2_dfimm_n,DF_ftype_SI,1)
+//
+def int_hexagon_F2_dfimm_n :
+Hexagon_df_si_Intrinsic<"HEXAGON_F2_dfimm_n">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2df,DF_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2df :
+Hexagon_df_sf_Intrinsic<"HEXAGON_F2_conv_sf2df">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2sf,SF_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2sf :
+Hexagon_sf_df_Intrinsic<"HEXAGON_F2_conv_df2sf">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_uw2sf,SF_ftype_SI,1)
+//
+def int_hexagon_F2_conv_uw2sf :
+Hexagon_sf_si_Intrinsic<"HEXAGON_F2_conv_uw2sf">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_uw2df,DF_ftype_SI,1)
+//
+def int_hexagon_F2_conv_uw2df :
+Hexagon_df_si_Intrinsic<"HEXAGON_F2_conv_uw2df">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_w2sf,SF_ftype_SI,1)
+//
+def int_hexagon_F2_conv_w2sf :
+Hexagon_sf_si_Intrinsic<"HEXAGON_F2_conv_w2sf">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_w2df,DF_ftype_SI,1)
+//
+def int_hexagon_F2_conv_w2df :
+Hexagon_df_si_Intrinsic<"HEXAGON_F2_conv_w2df">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_ud2sf,SF_ftype_DI,1)
+//
+def int_hexagon_F2_conv_ud2sf :
+Hexagon_sf_di_Intrinsic<"HEXAGON_F2_conv_ud2sf">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_ud2df,DF_ftype_DI,1)
+//
+def int_hexagon_F2_conv_ud2df :
+Hexagon_df_di_Intrinsic<"HEXAGON_F2_conv_ud2df">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_d2sf,SF_ftype_DI,1)
+//
+def int_hexagon_F2_conv_d2sf :
+Hexagon_sf_di_Intrinsic<"HEXAGON_F2_conv_d2sf">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_d2df,DF_ftype_DI,1)
+//
+def int_hexagon_F2_conv_d2df :
+Hexagon_df_di_Intrinsic<"HEXAGON_F2_conv_d2df">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2uw,SI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2uw :
+Hexagon_si_sf_Intrinsic<"HEXAGON_F2_conv_sf2uw">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2w,SI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2w :
+Hexagon_si_sf_Intrinsic<"HEXAGON_F2_conv_sf2w">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2ud,DI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2ud :
+Hexagon_di_sf_Intrinsic<"HEXAGON_F2_conv_sf2ud">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2d,DI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2d :
+Hexagon_di_sf_Intrinsic<"HEXAGON_F2_conv_sf2d">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2uw,SI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2uw :
+Hexagon_si_df_Intrinsic<"HEXAGON_F2_conv_df2uw">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2w,SI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2w :
+Hexagon_si_df_Intrinsic<"HEXAGON_F2_conv_df2w">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2ud,DI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2ud :
+Hexagon_di_df_Intrinsic<"HEXAGON_F2_conv_df2ud">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2d,DI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2d :
+Hexagon_di_df_Intrinsic<"HEXAGON_F2_conv_df2d">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2uw_chop,SI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2uw_chop :
+Hexagon_si_sf_Intrinsic<"HEXAGON_F2_conv_sf2uw_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2w_chop,SI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2w_chop :
+Hexagon_si_sf_Intrinsic<"HEXAGON_F2_conv_sf2w_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2ud_chop,DI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2ud_chop :
+Hexagon_di_sf_Intrinsic<"HEXAGON_F2_conv_sf2ud_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_sf2d_chop,DI_ftype_SF,1)
+//
+def int_hexagon_F2_conv_sf2d_chop :
+Hexagon_di_sf_Intrinsic<"HEXAGON_F2_conv_sf2d_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2uw_chop,SI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2uw_chop :
+Hexagon_si_df_Intrinsic<"HEXAGON_F2_conv_df2uw_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2w_chop,SI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2w_chop :
+Hexagon_si_df_Intrinsic<"HEXAGON_F2_conv_df2w_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2ud_chop,DI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2ud_chop :
+Hexagon_di_df_Intrinsic<"HEXAGON_F2_conv_df2ud_chop">;
+//
+// BUILTIN_INFO(HEXAGON.F2_conv_df2d_chop,DI_ftype_DF,1)
+//
+def int_hexagon_F2_conv_df2d_chop :
+Hexagon_di_df_Intrinsic<"HEXAGON_F2_conv_df2d_chop">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asr_r_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asr_r_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asl_r_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asl_r_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_lsr_r_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_lsr_r_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_lsl_r_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_lsl_r_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_r_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_r_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_r_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_r_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_r_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_r_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsl_r_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsl_r_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_r_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_r_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_r_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_r_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_r_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_r_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsl_r_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsl_r_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_r_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_r_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_r_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_r_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_r_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_r_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsl_r_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsl_r_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_r_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_r_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_r_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_r_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_r_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_r_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsl_r_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsl_r_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_r_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_r_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_r_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_r_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_r_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_r_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsl_r_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsl_r_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_r_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_r_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_r_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_r_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_r_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_r_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsl_r_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsl_r_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_r_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_r_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_r_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_r_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_r_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_r_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsl_r_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsl_r_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_r_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_r_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_r_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_r_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_r_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_r_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsl_r_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsl_r_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_r_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_r_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_r_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_r_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_r_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_r_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsl_r_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsl_r_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_p_xor,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_r_p_xor :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_r_p_xor">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_p_xor,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_r_p_xor :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_r_p_xor">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_p_xor,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_r_p_xor :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_r_p_xor">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_p_xor,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsl_r_p_xor :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsl_r_p_xor">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_r_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asr_r_r_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asr_r_r_sat">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_r_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asl_r_r_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asl_r_r_sat">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asr_i_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asr_i_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_lsr_i_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_lsr_i_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asl_i_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asl_i_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_i_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_i_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_i_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_i_p :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_i_p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_i_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_i_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_i_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_i_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_acc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_i_r_acc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_i_r_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_i_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_i_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_i_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_i_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p_acc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_i_p_acc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_i_p_acc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_i_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_i_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_i_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_i_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_nac,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_i_r_nac :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_i_r_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_i_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_i_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_i_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_i_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p_nac,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_i_p_nac :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_i_p_nac">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r_xacc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_i_r_xacc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_i_r_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_xacc,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_i_r_xacc :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_i_r_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p_xacc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_i_p_xacc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_i_p_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p_xacc,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_i_p_xacc :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_i_p_xacc">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_i_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_i_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_i_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_i_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_and,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_i_r_and :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_i_r_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asr_i_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asr_i_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_lsr_i_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_lsr_i_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_or,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_asl_i_r_or :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_asl_i_r_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_i_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_i_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_i_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_i_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p_and,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_i_p_and :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_i_p_and">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asr_i_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asr_i_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_lsr_i_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_lsr_i_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_p_or,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_asl_i_p_or :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_asl_i_p_or">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_r_sat,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asl_i_r_sat :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asl_i_r_sat">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_rnd,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asr_i_r_rnd :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asr_i_r_rnd">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_r_rnd_goodsyntax,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_asr_i_r_rnd_goodsyntax :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_asr_i_r_rnd_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_rnd,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_p_rnd :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_i_p_rnd">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_p_rnd_goodsyntax,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_p_rnd_goodsyntax :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_i_p_rnd_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S4_lsli,SI_ftype_SISI,2)
+//
+def int_hexagon_S4_lsli :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S4_lsli">;
+//
+// BUILTIN_INFO(HEXAGON.S2_addasl_rrri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_addasl_rrri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_addasl_rrri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_andi_asl_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_andi_asl_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_andi_asl_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_ori_asl_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_ori_asl_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_ori_asl_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_addi_asl_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_addi_asl_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_addi_asl_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_subi_asl_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_subi_asl_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_subi_asl_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_andi_lsr_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_andi_lsr_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_andi_lsr_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_ori_lsr_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_ori_lsr_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_ori_lsr_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_addi_lsr_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_addi_lsr_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_addi_lsr_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S4_subi_lsr_ri,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_subi_lsr_ri :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_subi_lsr_ri">;
+//
+// BUILTIN_INFO(HEXAGON.S2_valignib,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_valignib :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_valignib">;
+//
+// BUILTIN_INFO(HEXAGON.S2_valignrb,DI_ftype_DIDIQI,3)
+//
+def int_hexagon_S2_valignrb :
+Hexagon_di_didiqi_Intrinsic<"HEXAGON_S2_valignrb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vspliceib,DI_ftype_DIDISI,3)
+//
+def int_hexagon_S2_vspliceib :
+Hexagon_di_didisi_Intrinsic<"HEXAGON_S2_vspliceib">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsplicerb,DI_ftype_DIDIQI,3)
+//
+def int_hexagon_S2_vsplicerb :
+Hexagon_di_didiqi_Intrinsic<"HEXAGON_S2_vsplicerb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsplatrh,DI_ftype_SI,1)
+//
+def int_hexagon_S2_vsplatrh :
+Hexagon_di_si_Intrinsic<"HEXAGON_S2_vsplatrh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsplatrb,SI_ftype_SI,1)
+//
+def int_hexagon_S2_vsplatrb :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_vsplatrb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_insert,SI_ftype_SISISISI,4)
+//
+def int_hexagon_S2_insert :
+Hexagon_si_sisisisi_Intrinsic<"HEXAGON_S2_insert">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tableidxb_goodsyntax,SI_ftype_SISISISI,4)
+//
+def int_hexagon_S2_tableidxb_goodsyntax :
+Hexagon_si_sisisisi_Intrinsic<"HEXAGON_S2_tableidxb_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tableidxh_goodsyntax,SI_ftype_SISISISI,4)
+//
+def int_hexagon_S2_tableidxh_goodsyntax :
+Hexagon_si_sisisisi_Intrinsic<"HEXAGON_S2_tableidxh_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tableidxw_goodsyntax,SI_ftype_SISISISI,4)
+//
+def int_hexagon_S2_tableidxw_goodsyntax :
+Hexagon_si_sisisisi_Intrinsic<"HEXAGON_S2_tableidxw_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tableidxd_goodsyntax,SI_ftype_SISISISI,4)
+//
+def int_hexagon_S2_tableidxd_goodsyntax :
+Hexagon_si_sisisisi_Intrinsic<"HEXAGON_S2_tableidxd_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.A4_bitspliti,DI_ftype_SISI,2)
+//
+def int_hexagon_A4_bitspliti :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A4_bitspliti">;
+//
+// BUILTIN_INFO(HEXAGON.A4_bitsplit,DI_ftype_SISI,2)
+//
+def int_hexagon_A4_bitsplit :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_A4_bitsplit">;
+//
+// BUILTIN_INFO(HEXAGON.S4_extract,SI_ftype_SISISI,3)
+//
+def int_hexagon_S4_extract :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S4_extract">;
+//
+// BUILTIN_INFO(HEXAGON.S2_extractu,SI_ftype_SISISI,3)
+//
+def int_hexagon_S2_extractu :
+Hexagon_si_sisisi_Intrinsic<"HEXAGON_S2_extractu">;
+//
+// BUILTIN_INFO(HEXAGON.S2_insertp,DI_ftype_DIDISISI,4)
+//
+def int_hexagon_S2_insertp :
+Hexagon_di_didisisi_Intrinsic<"HEXAGON_S2_insertp">;
+//
+// BUILTIN_INFO(HEXAGON.S4_extractp,DI_ftype_DISISI,3)
+//
+def int_hexagon_S4_extractp :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_S4_extractp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_extractup,DI_ftype_DISISI,3)
+//
+def int_hexagon_S2_extractup :
+Hexagon_di_disisi_Intrinsic<"HEXAGON_S2_extractup">;
+//
+// BUILTIN_INFO(HEXAGON.S2_insert_rp,SI_ftype_SISIDI,3)
+//
+def int_hexagon_S2_insert_rp :
+Hexagon_si_sisidi_Intrinsic<"HEXAGON_S2_insert_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S4_extract_rp,SI_ftype_SIDI,2)
+//
+def int_hexagon_S4_extract_rp :
+Hexagon_si_sidi_Intrinsic<"HEXAGON_S4_extract_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_extractu_rp,SI_ftype_SIDI,2)
+//
+def int_hexagon_S2_extractu_rp :
+Hexagon_si_sidi_Intrinsic<"HEXAGON_S2_extractu_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_insertp_rp,DI_ftype_DIDIDI,3)
+//
+def int_hexagon_S2_insertp_rp :
+Hexagon_di_dididi_Intrinsic<"HEXAGON_S2_insertp_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S4_extractp_rp,DI_ftype_DIDI,2)
+//
+def int_hexagon_S4_extractp_rp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S4_extractp_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_extractup_rp,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_extractup_rp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_extractup_rp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tstbit_i,QI_ftype_SISI,2)
+//
+def int_hexagon_S2_tstbit_i :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_tstbit_i">;
+//
+// BUILTIN_INFO(HEXAGON.S4_ntstbit_i,QI_ftype_SISI,2)
+//
+def int_hexagon_S4_ntstbit_i :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S4_ntstbit_i">;
+//
+// BUILTIN_INFO(HEXAGON.S2_setbit_i,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_setbit_i :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_setbit_i">;
+//
+// BUILTIN_INFO(HEXAGON.S2_togglebit_i,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_togglebit_i :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_togglebit_i">;
+//
+// BUILTIN_INFO(HEXAGON.S2_clrbit_i,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_clrbit_i :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_clrbit_i">;
+//
+// BUILTIN_INFO(HEXAGON.S2_tstbit_r,QI_ftype_SISI,2)
+//
+def int_hexagon_S2_tstbit_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_tstbit_r">;
+//
+// BUILTIN_INFO(HEXAGON.S4_ntstbit_r,QI_ftype_SISI,2)
+//
+def int_hexagon_S4_ntstbit_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S4_ntstbit_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_setbit_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_setbit_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_setbit_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_togglebit_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_togglebit_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_togglebit_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_clrbit_r,SI_ftype_SISI,2)
+//
+def int_hexagon_S2_clrbit_r :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S2_clrbit_r">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_i_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_i_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_i_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_i_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_i_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_r_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_r_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S5_asrhub_rnd_sat_goodsyntax,SI_ftype_DISI,2)
+//
+def int_hexagon_S5_asrhub_rnd_sat_goodsyntax :
+Hexagon_si_disi_Intrinsic<"HEXAGON_S5_asrhub_rnd_sat_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S5_asrhub_sat,SI_ftype_DISI,2)
+//
+def int_hexagon_S5_asrhub_sat :
+Hexagon_si_disi_Intrinsic<"HEXAGON_S5_asrhub_sat">;
+//
+// BUILTIN_INFO(HEXAGON.S5_vasrhrnd_goodsyntax,DI_ftype_DISI,2)
+//
+def int_hexagon_S5_vasrhrnd_goodsyntax :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S5_vasrhrnd_goodsyntax">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_r_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_r_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_r_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_r_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_vh,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsl_r_vh :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsl_r_vh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_i_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_i_svw_trun,SI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_i_svw_trun :
+Hexagon_si_disi_Intrinsic<"HEXAGON_S2_asr_i_svw_trun">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_svw_trun,SI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_r_svw_trun :
+Hexagon_si_disi_Intrinsic<"HEXAGON_S2_asr_r_svw_trun">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_i_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_i_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_i_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_i_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_i_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_i_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asr_r_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asr_r_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asr_r_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_asl_r_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_asl_r_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_asl_r_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsr_r_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsr_r_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsr_r_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lsl_r_vw,DI_ftype_DISI,2)
+//
+def int_hexagon_S2_lsl_r_vw :
+Hexagon_di_disi_Intrinsic<"HEXAGON_S2_lsl_r_vw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vrndpackwh,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vrndpackwh :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vrndpackwh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vrndpackwhs,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vrndpackwhs :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vrndpackwhs">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsxtbh,DI_ftype_SI,1)
+//
+def int_hexagon_S2_vsxtbh :
+Hexagon_di_si_Intrinsic<"HEXAGON_S2_vsxtbh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vzxtbh,DI_ftype_SI,1)
+//
+def int_hexagon_S2_vzxtbh :
+Hexagon_di_si_Intrinsic<"HEXAGON_S2_vzxtbh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsathub,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vsathub :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vsathub">;
+//
+// BUILTIN_INFO(HEXAGON.S2_svsathub,SI_ftype_SI,1)
+//
+def int_hexagon_S2_svsathub :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_svsathub">;
+//
+// BUILTIN_INFO(HEXAGON.S2_svsathb,SI_ftype_SI,1)
+//
+def int_hexagon_S2_svsathb :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_svsathb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsathb,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vsathb :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vsathb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vtrunohb,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vtrunohb :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vtrunohb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vtrunewh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_vtrunewh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_vtrunewh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vtrunowh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_vtrunowh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_vtrunowh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vtrunehb,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vtrunehb :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vtrunehb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsxthw,DI_ftype_SI,1)
+//
+def int_hexagon_S2_vsxthw :
+Hexagon_di_si_Intrinsic<"HEXAGON_S2_vsxthw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vzxthw,DI_ftype_SI,1)
+//
+def int_hexagon_S2_vzxthw :
+Hexagon_di_si_Intrinsic<"HEXAGON_S2_vzxthw">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsatwh,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vsatwh :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vsatwh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsatwuh,SI_ftype_DI,1)
+//
+def int_hexagon_S2_vsatwuh :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_vsatwuh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_packhl,DI_ftype_SISI,2)
+//
+def int_hexagon_S2_packhl :
+Hexagon_di_sisi_Intrinsic<"HEXAGON_S2_packhl">;
+//
+// BUILTIN_INFO(HEXAGON.A2_swiz,SI_ftype_SI,1)
+//
+def int_hexagon_A2_swiz :
+Hexagon_si_si_Intrinsic<"HEXAGON_A2_swiz">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsathub_nopack,DI_ftype_DI,1)
+//
+def int_hexagon_S2_vsathub_nopack :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_vsathub_nopack">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsathb_nopack,DI_ftype_DI,1)
+//
+def int_hexagon_S2_vsathb_nopack :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_vsathb_nopack">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsatwh_nopack,DI_ftype_DI,1)
+//
+def int_hexagon_S2_vsatwh_nopack :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_vsatwh_nopack">;
+//
+// BUILTIN_INFO(HEXAGON.S2_vsatwuh_nopack,DI_ftype_DI,1)
+//
+def int_hexagon_S2_vsatwuh_nopack :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_vsatwuh_nopack">;
+//
+// BUILTIN_INFO(HEXAGON.S2_shuffob,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_shuffob :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_shuffob">;
+//
+// BUILTIN_INFO(HEXAGON.S2_shuffeb,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_shuffeb :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_shuffeb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_shuffoh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_shuffoh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_shuffoh">;
+//
+// BUILTIN_INFO(HEXAGON.S2_shuffeh,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_shuffeh :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_shuffeh">;
+//
+// BUILTIN_INFO(HEXAGON.S5_popcountp,SI_ftype_DI,1)
+//
+def int_hexagon_S5_popcountp :
+Hexagon_si_di_Intrinsic<"HEXAGON_S5_popcountp">;
+//
+// BUILTIN_INFO(HEXAGON.S4_parity,SI_ftype_SISI,2)
+//
+def int_hexagon_S4_parity :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S4_parity">;
+//
+// BUILTIN_INFO(HEXAGON.S2_parityp,SI_ftype_DIDI,2)
+//
+def int_hexagon_S2_parityp :
+Hexagon_si_didi_Intrinsic<"HEXAGON_S2_parityp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_lfsp,DI_ftype_DIDI,2)
+//
+def int_hexagon_S2_lfsp :
+Hexagon_di_didi_Intrinsic<"HEXAGON_S2_lfsp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_clbnorm,SI_ftype_SI,1)
+//
+def int_hexagon_S2_clbnorm :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_clbnorm">;
+//
+// BUILTIN_INFO(HEXAGON.S4_clbaddi,SI_ftype_SISI,2)
+//
+def int_hexagon_S4_clbaddi :
+Hexagon_si_sisi_Intrinsic<"HEXAGON_S4_clbaddi">;
+//
+// BUILTIN_INFO(HEXAGON.S4_clbpnorm,SI_ftype_DI,1)
+//
+def int_hexagon_S4_clbpnorm :
+Hexagon_si_di_Intrinsic<"HEXAGON_S4_clbpnorm">;
+//
+// BUILTIN_INFO(HEXAGON.S4_clbpaddi,SI_ftype_DISI,2)
+//
+def int_hexagon_S4_clbpaddi :
+Hexagon_si_disi_Intrinsic<"HEXAGON_S4_clbpaddi">;
+//
+// BUILTIN_INFO(HEXAGON.S2_clb,SI_ftype_SI,1)
+//
+def int_hexagon_S2_clb :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_clb">;
+//
+// BUILTIN_INFO(HEXAGON.S2_cl0,SI_ftype_SI,1)
+//
+def int_hexagon_S2_cl0 :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_cl0">;
+//
+// BUILTIN_INFO(HEXAGON.S2_cl1,SI_ftype_SI,1)
+//
+def int_hexagon_S2_cl1 :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_cl1">;
+//
+// BUILTIN_INFO(HEXAGON.S2_clbp,SI_ftype_DI,1)
+//
+def int_hexagon_S2_clbp :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_clbp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_cl0p,SI_ftype_DI,1)
+//
+def int_hexagon_S2_cl0p :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_cl0p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_cl1p,SI_ftype_DI,1)
+//
+def int_hexagon_S2_cl1p :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_cl1p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_brev,SI_ftype_SI,1)
+//
+def int_hexagon_S2_brev :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_brev">;
+//
+// BUILTIN_INFO(HEXAGON.S2_brevp,DI_ftype_DI,1)
+//
+def int_hexagon_S2_brevp :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_brevp">;
+//
+// BUILTIN_INFO(HEXAGON.S2_ct0,SI_ftype_SI,1)
+//
+def int_hexagon_S2_ct0 :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_ct0">;
+//
+// BUILTIN_INFO(HEXAGON.S2_ct1,SI_ftype_SI,1)
+//
+def int_hexagon_S2_ct1 :
+Hexagon_si_si_Intrinsic<"HEXAGON_S2_ct1">;
+//
+// BUILTIN_INFO(HEXAGON.S2_ct0p,SI_ftype_DI,1)
+//
+def int_hexagon_S2_ct0p :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_ct0p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_ct1p,SI_ftype_DI,1)
+//
+def int_hexagon_S2_ct1p :
+Hexagon_si_di_Intrinsic<"HEXAGON_S2_ct1p">;
+//
+// BUILTIN_INFO(HEXAGON.S2_interleave,DI_ftype_DI,1)
+//
+def int_hexagon_S2_interleave :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_interleave">;
+//
+// BUILTIN_INFO(HEXAGON.S2_deinterleave,DI_ftype_DI,1)
+//
+def int_hexagon_S2_deinterleave :
+Hexagon_di_di_Intrinsic<"HEXAGON_S2_deinterleave">;
+
+//
+// BUILTIN_INFO(HEXAGON.dcfetch_A,v_ftype_DI*,1)
+//
+def int_hexagon_prefetch :
+Hexagon_Intrinsic<"HEXAGON_prefetch", [], [llvm_ptr_ty], []>;
+def int_hexagon_Y2_dccleana :
+Hexagon_Intrinsic<"HEXAGON_Y2_dccleana", [], [llvm_ptr_ty], []>;
+def int_hexagon_Y2_dccleaninva :
+Hexagon_Intrinsic<"HEXAGON_Y2_dccleaninva", [], [llvm_ptr_ty], []>;
+def int_hexagon_Y2_dcinva :
+Hexagon_Intrinsic<"HEXAGON_Y2_dcinva", [], [llvm_ptr_ty], []>;
+def int_hexagon_Y2_dczeroa :
+Hexagon_Intrinsic<"HEXAGON_Y2_dczeroa", [], [llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly, IntrHasSideEffects]>;
+def int_hexagon_Y4_l2fetch :
+Hexagon_Intrinsic<"HEXAGON_Y4_l2fetch", [], [llvm_ptr_ty, llvm_i32_ty], []>;
+def int_hexagon_Y5_l2fetch :
+Hexagon_Intrinsic<"HEXAGON_Y5_l2fetch", [], [llvm_ptr_ty, llvm_i64_ty], []>;
+
+def llvm_ptr32_ty : LLVMPointerType<llvm_i32_ty>;
+def llvm_ptr64_ty : LLVMPointerType<llvm_i64_ty>;
+
+// Mark locked loads as read/write to prevent any accidental reordering.
+def int_hexagon_L2_loadw_locked :
+Hexagon_Intrinsic<"HEXAGON_L2_loadw_locked", [llvm_i32_ty], [llvm_ptr32_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+def int_hexagon_L4_loadd_locked :
+Hexagon_Intrinsic<"HEXAGON_L4_loadd_locked", [llvm_i64_ty], [llvm_ptr64_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+
+def int_hexagon_S2_storew_locked :
+Hexagon_Intrinsic<"HEXAGON_S2_storew_locked", [llvm_i32_ty],
+ [llvm_ptr32_ty, llvm_i32_ty], [IntrArgMemOnly, NoCapture<0>]>;
+def int_hexagon_S4_stored_locked :
+Hexagon_Intrinsic<"HEXAGON_S4_stored_locked", [llvm_i32_ty],
+ [llvm_ptr64_ty, llvm_i64_ty], [IntrArgMemOnly, NoCapture<0>]>;
+
+// V60
+
+class Hexagon_v2048v2048_Intrinsic_T<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty],
+ [IntrNoMem]>;
+
+// tag : V6_hi_W
+// tag : V6_lo_W
+class Hexagon_v512v1024_Intrinsic_T<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+// tag : V6_hi_W_128B
+// tag : V6_lo_W_128B
+class Hexagon_v1024v2048_Intrinsic_T<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v64i32_ty],
+ [IntrNoMem]>;
+
+class Hexagon_v1024v1024_Intrinsic_T<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+// BUILTIN_INFO(HEXAGON.V6_hi_W,VI_ftype_VI,1)
+// tag : V6_hi
+def int_hexagon_V6_hi :
+Hexagon_v512v1024_Intrinsic_T<"HEXAGON_V6_hi">;
+
+// BUILTIN_INFO(HEXAGON.V6_lo_W,VI_ftype_VI,1)
+// tag : V6_lo
+def int_hexagon_V6_lo :
+Hexagon_v512v1024_Intrinsic_T<"HEXAGON_V6_lo">;
+
+// BUILTIN_INFO(HEXAGON.V6_hi_W,VI_ftype_VI,1)
+// tag : V6_hi_128B
+def int_hexagon_V6_hi_128B :
+Hexagon_v1024v2048_Intrinsic_T<"HEXAGON_V6_hi_128B">;
+
+// BUILTIN_INFO(HEXAGON.V6_lo_W,VI_ftype_VI,1)
+// tag : V6_lo_128B
+def int_hexagon_V6_lo_128B :
+Hexagon_v1024v2048_Intrinsic_T<"HEXAGON_V6_lo_128B">;
+
+// BUILTIN_INFO(HEXAGON.V6_vassignp,VI_ftype_VI,1)
+// tag : V6_vassignp
+def int_hexagon_V6_vassignp :
+Hexagon_v1024v1024_Intrinsic_T<"HEXAGON_V6_vassignp">;
+
+// BUILTIN_INFO(HEXAGON.V6_vassignp,VI_ftype_VI,1)
+// tag : V6_vassignp_128B
+def int_hexagon_V6_vassignp_128B :
+Hexagon_v2048v2048_Intrinsic_T<"HEXAGON_V6_vassignp_128B">;
+
+
+//
+// Hexagon_iii_Intrinsic<string GCCIntSuffix>
+// tag : S6_rol_i_r
+class Hexagon_iii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_LLiLLii_Intrinsic<string GCCIntSuffix>
+// tag : S6_rol_i_p
+class Hexagon_LLiLLii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_iiii_Intrinsic<string GCCIntSuffix>
+// tag : S6_rol_i_r_acc
+class Hexagon_iiii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_LLiLLiLLii_Intrinsic<string GCCIntSuffix>
+// tag : S6_rol_i_p_acc
+class Hexagon_LLiLLiLLii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_valignb
+class Hexagon_v512v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_valignb_128B
+class Hexagon_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vror
+class Hexagon_v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vror_128B
+class Hexagon_v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vunpackub
+class Hexagon_v1024v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vunpackub_128B
+class Hexagon_v2048v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vunpackob
+class Hexagon_v1024v1024v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vunpackob_128B
+class Hexagon_v2048v2048v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vpackeb
+class Hexagon_v512v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vpackeb_128B
+class Hexagon_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpybus_dv_128B
+class Hexagon_v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpybus_dv_acc_128B
+class Hexagon_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhvsat_acc
+class Hexagon_v512v512v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhvsat_acc_128B
+class Hexagon_v1024v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhisat
+class Hexagon_v512v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhisat_128B
+class Hexagon_v1024v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhisat_acc
+class Hexagon_v512v512v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vdmpyhisat_acc_128B
+class Hexagon_v1024v1024v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyubi
+class Hexagon_v1024v1024ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyubi_128B
+class Hexagon_v2048v2048ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v1024ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyubi_acc
+class Hexagon_v1024v1024v1024ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v2048ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyubi_acc_128B
+class Hexagon_v2048v2048v2048ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v2048_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddb_dv_128B
+class Hexagon_v2048v2048v2048_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddubh
+class Hexagon_v1024v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddubh_128B
+class Hexagon_v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vd0
+class Hexagon_v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vd0_128B
+class Hexagon_v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v64iv512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddbq
+class Hexagon_v512v64iv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v512i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddbq_128B
+class Hexagon_v1024v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v1024i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vabsh
+class Hexagon_v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vabsh_128B
+class Hexagon_v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpybv_acc
+class Hexagon_v1024v1024v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpybv_acc_128B
+class Hexagon_v2048v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyub
+class Hexagon_v1024v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyub_128B
+class Hexagon_v2048v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyub_acc
+class Hexagon_v1024v1024v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyub_acc_128B
+class Hexagon_v2048v2048v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandqrt
+class Hexagon_v512v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v512i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandqrt_128B
+class Hexagon_v1024v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v1024i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandqrt_acc
+class Hexagon_v512v512v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v512i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandqrt_acc_128B
+class Hexagon_v1024v1024v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v1024i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvrt
+class Hexagon_v64iv512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvrt_128B
+class Hexagon_v128iv1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv64iv512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvrt_acc
+class Hexagon_v64iv64iv512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v512i1_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv128iv1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvrt_acc_128B
+class Hexagon_v128iv128iv1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v1024i1_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vgtw
+class Hexagon_v64iv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vgtw_128B
+class Hexagon_v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv64iv512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vgtw_and
+class Hexagon_v64iv64iv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v512i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv128iv1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vgtw_and_128B
+class Hexagon_v128iv128iv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v1024i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv64iv64i_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_or
+class Hexagon_v64iv64iv64i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v512i1_ty,llvm_v512i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv128iv128i_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_or_128B
+class Hexagon_v128iv128iv128i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v1024i1_ty,llvm_v1024i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64iv64i_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_not
+class Hexagon_v64iv64i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v512i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128iv128i_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_not_128B
+class Hexagon_v128iv128i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v1024i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_scalar2
+class Hexagon_v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_scalar2_128B
+class Hexagon_v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v64iv512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vswap
+class Hexagon_v1024v64iv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v512i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vswap_128B
+class Hexagon_v2048v128iv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v1024i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vshuffvdd
+class Hexagon_v1024v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vshuffvdd_128B
+class Hexagon_v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+
+//
+// Hexagon_iv512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_extractw
+class Hexagon_iv512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_iv1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_extractw_128B
+class Hexagon_iv1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_lvsplatw
+class Hexagon_v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_lvsplatw_128B
+class Hexagon_v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v512v512v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvvb_oracc
+class Hexagon_v512v512v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvvb_oracc_128B
+class Hexagon_v1024v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v1024v1024v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwh_oracc
+class Hexagon_v1024v1024v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_v2048v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwh_oracc_128B
+class Hexagon_v2048v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_vv64ivmemv512_Intrinsic<string GCCIntSuffix>
+// tag: V6_vS32b_qpred_ai
+class Hexagon_vv64ivmemv512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v512i1_ty,llvm_ptr_ty,llvm_v16i32_ty],
+ [IntrArgMemOnly]>;
+
+//
+// Hexagon_vv128ivmemv1024_Intrinsic<string GCCIntSuffix>
+// tag: V6_vS32b_qpred_ai_128B
+class Hexagon_vv128ivmemv1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v1024i1_ty,llvm_ptr_ty,llvm_v32i32_ty],
+ [IntrArgMemOnly]>;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r,SI_ftype_SISI,2)
+// tag : S6_rol_i_r
+def int_hexagon_S6_rol_i_r :
+Hexagon_iii_Intrinsic<"HEXAGON_S6_rol_i_r">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p,DI_ftype_DISI,2)
+// tag : S6_rol_i_p
+def int_hexagon_S6_rol_i_p :
+Hexagon_LLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r_acc,SI_ftype_SISISI,3)
+// tag : S6_rol_i_r_acc
+def int_hexagon_S6_rol_i_r_acc :
+Hexagon_iiii_Intrinsic<"HEXAGON_S6_rol_i_r_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p_acc,DI_ftype_DIDISI,3)
+// tag : S6_rol_i_p_acc
+def int_hexagon_S6_rol_i_p_acc :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r_nac,SI_ftype_SISISI,3)
+// tag : S6_rol_i_r_nac
+def int_hexagon_S6_rol_i_r_nac :
+Hexagon_iiii_Intrinsic<"HEXAGON_S6_rol_i_r_nac">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p_nac,DI_ftype_DIDISI,3)
+// tag : S6_rol_i_p_nac
+def int_hexagon_S6_rol_i_p_nac :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p_nac">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r_xacc,SI_ftype_SISISI,3)
+// tag : S6_rol_i_r_xacc
+def int_hexagon_S6_rol_i_r_xacc :
+Hexagon_iiii_Intrinsic<"HEXAGON_S6_rol_i_r_xacc">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p_xacc,DI_ftype_DIDISI,3)
+// tag : S6_rol_i_p_xacc
+def int_hexagon_S6_rol_i_p_xacc :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p_xacc">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r_and,SI_ftype_SISISI,3)
+// tag : S6_rol_i_r_and
+def int_hexagon_S6_rol_i_r_and :
+Hexagon_iiii_Intrinsic<"HEXAGON_S6_rol_i_r_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_r_or,SI_ftype_SISISI,3)
+// tag : S6_rol_i_r_or
+def int_hexagon_S6_rol_i_r_or :
+Hexagon_iiii_Intrinsic<"HEXAGON_S6_rol_i_r_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p_and,DI_ftype_DIDISI,3)
+// tag : S6_rol_i_p_and
+def int_hexagon_S6_rol_i_p_and :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_rol_i_p_or,DI_ftype_DIDISI,3)
+// tag : S6_rol_i_p_or
+def int_hexagon_S6_rol_i_p_or :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S6_rol_i_p_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.S2_cabacencbin,DI_ftype_DIDIQI,3)
+// tag : S2_cabacencbin
+def int_hexagon_S2_cabacencbin :
+Hexagon_LLiLLiLLii_Intrinsic<"HEXAGON_S2_cabacencbin">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_valignb,VI_ftype_VIVISI,3)
+// tag : V6_valignb
+def int_hexagon_V6_valignb :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_valignb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_valignb_128B,VI_ftype_VIVISI,3)
+// tag : V6_valignb_128B
+def int_hexagon_V6_valignb_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_valignb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlalignb,VI_ftype_VIVISI,3)
+// tag : V6_vlalignb
+def int_hexagon_V6_vlalignb :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vlalignb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlalignb_128B,VI_ftype_VIVISI,3)
+// tag : V6_vlalignb_128B
+def int_hexagon_V6_vlalignb_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlalignb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_valignbi,VI_ftype_VIVISI,3)
+// tag : V6_valignbi
+def int_hexagon_V6_valignbi :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_valignbi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_valignbi_128B,VI_ftype_VIVISI,3)
+// tag : V6_valignbi_128B
+def int_hexagon_V6_valignbi_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_valignbi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlalignbi,VI_ftype_VIVISI,3)
+// tag : V6_vlalignbi
+def int_hexagon_V6_vlalignbi :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vlalignbi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlalignbi_128B,VI_ftype_VIVISI,3)
+// tag : V6_vlalignbi_128B
+def int_hexagon_V6_vlalignbi_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlalignbi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vror,VI_ftype_VISI,2)
+// tag : V6_vror
+def int_hexagon_V6_vror :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vror">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vror_128B,VI_ftype_VISI,2)
+// tag : V6_vror_128B
+def int_hexagon_V6_vror_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vror_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackub,VD_ftype_VI,1)
+// tag : V6_vunpackub
+def int_hexagon_V6_vunpackub :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vunpackub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackub_128B,VD_ftype_VI,1)
+// tag : V6_vunpackub_128B
+def int_hexagon_V6_vunpackub_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vunpackub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackb,VD_ftype_VI,1)
+// tag : V6_vunpackb
+def int_hexagon_V6_vunpackb :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vunpackb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackb_128B,VD_ftype_VI,1)
+// tag : V6_vunpackb_128B
+def int_hexagon_V6_vunpackb_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vunpackb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackuh,VD_ftype_VI,1)
+// tag : V6_vunpackuh
+def int_hexagon_V6_vunpackuh :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vunpackuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackuh_128B,VD_ftype_VI,1)
+// tag : V6_vunpackuh_128B
+def int_hexagon_V6_vunpackuh_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vunpackuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackh,VD_ftype_VI,1)
+// tag : V6_vunpackh
+def int_hexagon_V6_vunpackh :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vunpackh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackh_128B,VD_ftype_VI,1)
+// tag : V6_vunpackh_128B
+def int_hexagon_V6_vunpackh_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vunpackh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackob,VD_ftype_VDVI,2)
+// tag : V6_vunpackob
+def int_hexagon_V6_vunpackob :
+Hexagon_v1024v1024v512_Intrinsic<"HEXAGON_V6_vunpackob">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackob_128B,VD_ftype_VDVI,2)
+// tag : V6_vunpackob_128B
+def int_hexagon_V6_vunpackob_128B :
+Hexagon_v2048v2048v1024_Intrinsic<"HEXAGON_V6_vunpackob_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackoh,VD_ftype_VDVI,2)
+// tag : V6_vunpackoh
+def int_hexagon_V6_vunpackoh :
+Hexagon_v1024v1024v512_Intrinsic<"HEXAGON_V6_vunpackoh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vunpackoh_128B,VD_ftype_VDVI,2)
+// tag : V6_vunpackoh_128B
+def int_hexagon_V6_vunpackoh_128B :
+Hexagon_v2048v2048v1024_Intrinsic<"HEXAGON_V6_vunpackoh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackeb,VI_ftype_VIVI,2)
+// tag : V6_vpackeb
+def int_hexagon_V6_vpackeb :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackeb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackeb_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackeb_128B
+def int_hexagon_V6_vpackeb_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackeb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackeh,VI_ftype_VIVI,2)
+// tag : V6_vpackeh
+def int_hexagon_V6_vpackeh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackeh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackeh_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackeh_128B
+def int_hexagon_V6_vpackeh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackeh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackob,VI_ftype_VIVI,2)
+// tag : V6_vpackob
+def int_hexagon_V6_vpackob :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackob">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackob_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackob_128B
+def int_hexagon_V6_vpackob_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackob_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackoh,VI_ftype_VIVI,2)
+// tag : V6_vpackoh
+def int_hexagon_V6_vpackoh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackoh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackoh_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackoh_128B
+def int_hexagon_V6_vpackoh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackoh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackhub_sat,VI_ftype_VIVI,2)
+// tag : V6_vpackhub_sat
+def int_hexagon_V6_vpackhub_sat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackhub_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackhub_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackhub_sat_128B
+def int_hexagon_V6_vpackhub_sat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackhub_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackhb_sat,VI_ftype_VIVI,2)
+// tag : V6_vpackhb_sat
+def int_hexagon_V6_vpackhb_sat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackhb_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackhb_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackhb_sat_128B
+def int_hexagon_V6_vpackhb_sat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackhb_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackwuh_sat,VI_ftype_VIVI,2)
+// tag : V6_vpackwuh_sat
+def int_hexagon_V6_vpackwuh_sat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackwuh_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackwuh_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackwuh_sat_128B
+def int_hexagon_V6_vpackwuh_sat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackwuh_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackwh_sat,VI_ftype_VIVI,2)
+// tag : V6_vpackwh_sat
+def int_hexagon_V6_vpackwh_sat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vpackwh_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpackwh_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vpackwh_sat_128B
+def int_hexagon_V6_vpackwh_sat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vpackwh_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vzb,VD_ftype_VI,1)
+// tag : V6_vzb
+def int_hexagon_V6_vzb :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vzb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vzb_128B,VD_ftype_VI,1)
+// tag : V6_vzb_128B
+def int_hexagon_V6_vzb_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vzb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsb,VD_ftype_VI,1)
+// tag : V6_vsb
+def int_hexagon_V6_vsb :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vsb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsb_128B,VD_ftype_VI,1)
+// tag : V6_vsb_128B
+def int_hexagon_V6_vsb_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vsb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vzh,VD_ftype_VI,1)
+// tag : V6_vzh
+def int_hexagon_V6_vzh :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vzh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vzh_128B,VD_ftype_VI,1)
+// tag : V6_vzh_128B
+def int_hexagon_V6_vzh_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vzh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsh,VD_ftype_VI,1)
+// tag : V6_vsh
+def int_hexagon_V6_vsh :
+Hexagon_v1024v512_Intrinsic<"HEXAGON_V6_vsh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsh_128B,VD_ftype_VI,1)
+// tag : V6_vsh_128B
+def int_hexagon_V6_vsh_128B :
+Hexagon_v2048v1024_Intrinsic<"HEXAGON_V6_vsh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus,VI_ftype_VISI,2)
+// tag : V6_vdmpybus
+def int_hexagon_V6_vdmpybus :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vdmpybus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_128B,VI_ftype_VISI,2)
+// tag : V6_vdmpybus_128B
+def int_hexagon_V6_vdmpybus_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpybus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_acc,VI_ftype_VIVISI,3)
+// tag : V6_vdmpybus_acc
+def int_hexagon_V6_vdmpybus_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vdmpybus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vdmpybus_acc_128B
+def int_hexagon_V6_vdmpybus_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpybus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_dv,VD_ftype_VDSI,2)
+// tag : V6_vdmpybus_dv
+def int_hexagon_V6_vdmpybus_dv :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpybus_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_dv_128B,VD_ftype_VDSI,2)
+// tag : V6_vdmpybus_dv_128B
+def int_hexagon_V6_vdmpybus_dv_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vdmpybus_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_dv_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vdmpybus_dv_acc
+def int_hexagon_V6_vdmpybus_dv_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpybus_dv_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vdmpybus_dv_acc_128B
+def int_hexagon_V6_vdmpybus_dv_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb,VI_ftype_VISI,2)
+// tag : V6_vdmpyhb
+def int_hexagon_V6_vdmpyhb :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_128B,VI_ftype_VISI,2)
+// tag : V6_vdmpyhb_128B
+def int_hexagon_V6_vdmpyhb_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_acc,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhb_acc
+def int_hexagon_V6_vdmpyhb_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhb_acc_128B
+def int_hexagon_V6_vdmpyhb_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_dv,VD_ftype_VDSI,2)
+// tag : V6_vdmpyhb_dv
+def int_hexagon_V6_vdmpyhb_dv :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhb_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_dv_128B,VD_ftype_VDSI,2)
+// tag : V6_vdmpyhb_dv_128B
+def int_hexagon_V6_vdmpyhb_dv_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_dv_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vdmpyhb_dv_acc
+def int_hexagon_V6_vdmpyhb_dv_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhb_dv_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vdmpyhb_dv_acc_128B
+def int_hexagon_V6_vdmpyhb_dv_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhvsat,VI_ftype_VIVI,2)
+// tag : V6_vdmpyhvsat
+def int_hexagon_V6_vdmpyhvsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vdmpyhvsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhvsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vdmpyhvsat_128B
+def int_hexagon_V6_vdmpyhvsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vdmpyhvsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhvsat_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vdmpyhvsat_acc
+def int_hexagon_V6_vdmpyhvsat_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhvsat_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vdmpyhvsat_acc_128B
+def int_hexagon_V6_vdmpyhvsat_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsat,VI_ftype_VISI,2)
+// tag : V6_vdmpyhsat
+def int_hexagon_V6_vdmpyhsat :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsat_128B,VI_ftype_VISI,2)
+// tag : V6_vdmpyhsat_128B
+def int_hexagon_V6_vdmpyhsat_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsat_acc,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhsat_acc
+def int_hexagon_V6_vdmpyhsat_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsat_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhsat_acc_128B
+def int_hexagon_V6_vdmpyhsat_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhisat,VI_ftype_VDSI,2)
+// tag : V6_vdmpyhisat
+def int_hexagon_V6_vdmpyhisat :
+Hexagon_v512v1024i_Intrinsic<"HEXAGON_V6_vdmpyhisat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhisat_128B,VI_ftype_VDSI,2)
+// tag : V6_vdmpyhisat_128B
+def int_hexagon_V6_vdmpyhisat_128B :
+Hexagon_v1024v2048i_Intrinsic<"HEXAGON_V6_vdmpyhisat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhisat_acc,VI_ftype_VIVDSI,3)
+// tag : V6_vdmpyhisat_acc
+def int_hexagon_V6_vdmpyhisat_acc :
+Hexagon_v512v512v1024i_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhisat_acc_128B,VI_ftype_VIVDSI,3)
+// tag : V6_vdmpyhisat_acc_128B
+def int_hexagon_V6_vdmpyhisat_acc_128B :
+Hexagon_v1024v1024v2048i_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsusat,VI_ftype_VISI,2)
+// tag : V6_vdmpyhsusat
+def int_hexagon_V6_vdmpyhsusat :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhsusat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsusat_128B,VI_ftype_VISI,2)
+// tag : V6_vdmpyhsusat_128B
+def int_hexagon_V6_vdmpyhsusat_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsusat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsusat_acc,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhsusat_acc
+def int_hexagon_V6_vdmpyhsusat_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsusat_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vdmpyhsusat_acc_128B
+def int_hexagon_V6_vdmpyhsusat_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsuisat,VI_ftype_VDSI,2)
+// tag : V6_vdmpyhsuisat
+def int_hexagon_V6_vdmpyhsuisat :
+Hexagon_v512v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsuisat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsuisat_128B,VI_ftype_VDSI,2)
+// tag : V6_vdmpyhsuisat_128B
+def int_hexagon_V6_vdmpyhsuisat_128B :
+Hexagon_v1024v2048i_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsuisat_acc,VI_ftype_VIVDSI,3)
+// tag : V6_vdmpyhsuisat_acc
+def int_hexagon_V6_vdmpyhsuisat_acc :
+Hexagon_v512v512v1024i_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdmpyhsuisat_acc_128B,VI_ftype_VIVDSI,3)
+// tag : V6_vdmpyhsuisat_acc_128B
+def int_hexagon_V6_vdmpyhsuisat_acc_128B :
+Hexagon_v1024v1024v2048i_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyb,VD_ftype_VDSI,2)
+// tag : V6_vtmpyb
+def int_hexagon_V6_vtmpyb :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpyb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyb_128B,VD_ftype_VDSI,2)
+// tag : V6_vtmpyb_128B
+def int_hexagon_V6_vtmpyb_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpyb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyb_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpyb_acc
+def int_hexagon_V6_vtmpyb_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpyb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyb_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpyb_acc_128B
+def int_hexagon_V6_vtmpyb_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpyb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpybus,VD_ftype_VDSI,2)
+// tag : V6_vtmpybus
+def int_hexagon_V6_vtmpybus :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpybus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpybus_128B,VD_ftype_VDSI,2)
+// tag : V6_vtmpybus_128B
+def int_hexagon_V6_vtmpybus_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpybus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpybus_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpybus_acc
+def int_hexagon_V6_vtmpybus_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpybus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpybus_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpybus_acc_128B
+def int_hexagon_V6_vtmpybus_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpybus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyhb,VD_ftype_VDSI,2)
+// tag : V6_vtmpyhb
+def int_hexagon_V6_vtmpyhb :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpyhb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyhb_128B,VD_ftype_VDSI,2)
+// tag : V6_vtmpyhb_128B
+def int_hexagon_V6_vtmpyhb_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpyhb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyhb_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpyhb_acc
+def int_hexagon_V6_vtmpyhb_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vtmpyhb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vtmpyhb_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vtmpyhb_acc_128B
+def int_hexagon_V6_vtmpyhb_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vtmpyhb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub,VI_ftype_VISI,2)
+// tag : V6_vrmpyub
+def int_hexagon_V6_vrmpyub :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vrmpyub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_128B,VI_ftype_VISI,2)
+// tag : V6_vrmpyub_128B
+def int_hexagon_V6_vrmpyub_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vrmpyub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_acc,VI_ftype_VIVISI,3)
+// tag : V6_vrmpyub_acc
+def int_hexagon_V6_vrmpyub_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vrmpyub_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vrmpyub_acc_128B
+def int_hexagon_V6_vrmpyub_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vrmpyub_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubv,VI_ftype_VIVI,2)
+// tag : V6_vrmpyubv
+def int_hexagon_V6_vrmpyubv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vrmpyubv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubv_128B,VI_ftype_VIVI,2)
+// tag : V6_vrmpyubv_128B
+def int_hexagon_V6_vrmpyubv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpyubv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubv_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpyubv_acc
+def int_hexagon_V6_vrmpyubv_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vrmpyubv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubv_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpyubv_acc_128B
+def int_hexagon_V6_vrmpyubv_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpyubv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybv,VI_ftype_VIVI,2)
+// tag : V6_vrmpybv
+def int_hexagon_V6_vrmpybv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vrmpybv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybv_128B,VI_ftype_VIVI,2)
+// tag : V6_vrmpybv_128B
+def int_hexagon_V6_vrmpybv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpybv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybv_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpybv_acc
+def int_hexagon_V6_vrmpybv_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vrmpybv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybv_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpybv_acc_128B
+def int_hexagon_V6_vrmpybv_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpybv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubi,VD_ftype_VDSISI,3)
+// tag : V6_vrmpyubi
+def int_hexagon_V6_vrmpyubi :
+Hexagon_v1024v1024ii_Intrinsic<"HEXAGON_V6_vrmpyubi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubi_128B,VD_ftype_VDSISI,3)
+// tag : V6_vrmpyubi_128B
+def int_hexagon_V6_vrmpyubi_128B :
+Hexagon_v2048v2048ii_Intrinsic<"HEXAGON_V6_vrmpyubi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubi_acc,VD_ftype_VDVDSISI,4)
+// tag : V6_vrmpyubi_acc
+def int_hexagon_V6_vrmpyubi_acc :
+Hexagon_v1024v1024v1024ii_Intrinsic<"HEXAGON_V6_vrmpyubi_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyubi_acc_128B,VD_ftype_VDVDSISI,4)
+// tag : V6_vrmpyubi_acc_128B
+def int_hexagon_V6_vrmpyubi_acc_128B :
+Hexagon_v2048v2048v2048ii_Intrinsic<"HEXAGON_V6_vrmpyubi_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybus,VI_ftype_VISI,2)
+// tag : V6_vrmpybus
+def int_hexagon_V6_vrmpybus :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vrmpybus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybus_128B,VI_ftype_VISI,2)
+// tag : V6_vrmpybus_128B
+def int_hexagon_V6_vrmpybus_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vrmpybus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybus_acc,VI_ftype_VIVISI,3)
+// tag : V6_vrmpybus_acc
+def int_hexagon_V6_vrmpybus_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vrmpybus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybus_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vrmpybus_acc_128B
+def int_hexagon_V6_vrmpybus_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vrmpybus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusi,VD_ftype_VDSISI,3)
+// tag : V6_vrmpybusi
+def int_hexagon_V6_vrmpybusi :
+Hexagon_v1024v1024ii_Intrinsic<"HEXAGON_V6_vrmpybusi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusi_128B,VD_ftype_VDSISI,3)
+// tag : V6_vrmpybusi_128B
+def int_hexagon_V6_vrmpybusi_128B :
+Hexagon_v2048v2048ii_Intrinsic<"HEXAGON_V6_vrmpybusi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusi_acc,VD_ftype_VDVDSISI,4)
+// tag : V6_vrmpybusi_acc
+def int_hexagon_V6_vrmpybusi_acc :
+Hexagon_v1024v1024v1024ii_Intrinsic<"HEXAGON_V6_vrmpybusi_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusi_acc_128B,VD_ftype_VDVDSISI,4)
+// tag : V6_vrmpybusi_acc_128B
+def int_hexagon_V6_vrmpybusi_acc_128B :
+Hexagon_v2048v2048v2048ii_Intrinsic<"HEXAGON_V6_vrmpybusi_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusv,VI_ftype_VIVI,2)
+// tag : V6_vrmpybusv
+def int_hexagon_V6_vrmpybusv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vrmpybusv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusv_128B,VI_ftype_VIVI,2)
+// tag : V6_vrmpybusv_128B
+def int_hexagon_V6_vrmpybusv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpybusv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusv_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpybusv_acc
+def int_hexagon_V6_vrmpybusv_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vrmpybusv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybusv_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vrmpybusv_acc_128B
+def int_hexagon_V6_vrmpybusv_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrmpybusv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdsaduh,VD_ftype_VDSI,2)
+// tag : V6_vdsaduh
+def int_hexagon_V6_vdsaduh :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vdsaduh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdsaduh_128B,VD_ftype_VDSI,2)
+// tag : V6_vdsaduh_128B
+def int_hexagon_V6_vdsaduh_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vdsaduh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdsaduh_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vdsaduh_acc
+def int_hexagon_V6_vdsaduh_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vdsaduh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdsaduh_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vdsaduh_acc_128B
+def int_hexagon_V6_vdsaduh_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vdsaduh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrsadubi,VD_ftype_VDSISI,3)
+// tag : V6_vrsadubi
+def int_hexagon_V6_vrsadubi :
+Hexagon_v1024v1024ii_Intrinsic<"HEXAGON_V6_vrsadubi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrsadubi_128B,VD_ftype_VDSISI,3)
+// tag : V6_vrsadubi_128B
+def int_hexagon_V6_vrsadubi_128B :
+Hexagon_v2048v2048ii_Intrinsic<"HEXAGON_V6_vrsadubi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrsadubi_acc,VD_ftype_VDVDSISI,4)
+// tag : V6_vrsadubi_acc
+def int_hexagon_V6_vrsadubi_acc :
+Hexagon_v1024v1024v1024ii_Intrinsic<"HEXAGON_V6_vrsadubi_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrsadubi_acc_128B,VD_ftype_VDVDSISI,4)
+// tag : V6_vrsadubi_acc_128B
+def int_hexagon_V6_vrsadubi_acc_128B :
+Hexagon_v2048v2048v2048ii_Intrinsic<"HEXAGON_V6_vrsadubi_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrw,VI_ftype_VISI,2)
+// tag : V6_vasrw
+def int_hexagon_V6_vasrw :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vasrw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrw_128B,VI_ftype_VISI,2)
+// tag : V6_vasrw_128B
+def int_hexagon_V6_vasrw_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vasrw_128B">;
+
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslw,VI_ftype_VISI,2)
+// tag : V6_vaslw
+def int_hexagon_V6_vaslw :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vaslw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslw_128B,VI_ftype_VISI,2)
+// tag : V6_vaslw_128B
+def int_hexagon_V6_vaslw_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vaslw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrw,VI_ftype_VISI,2)
+// tag : V6_vlsrw
+def int_hexagon_V6_vlsrw :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vlsrw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrw_128B,VI_ftype_VISI,2)
+// tag : V6_vlsrw_128B
+def int_hexagon_V6_vlsrw_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vlsrw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwv,VI_ftype_VIVI,2)
+// tag : V6_vasrwv
+def int_hexagon_V6_vasrwv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vasrwv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwv_128B,VI_ftype_VIVI,2)
+// tag : V6_vasrwv_128B
+def int_hexagon_V6_vasrwv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vasrwv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslwv,VI_ftype_VIVI,2)
+// tag : V6_vaslwv
+def int_hexagon_V6_vaslwv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaslwv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslwv_128B,VI_ftype_VIVI,2)
+// tag : V6_vaslwv_128B
+def int_hexagon_V6_vaslwv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaslwv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrwv,VI_ftype_VIVI,2)
+// tag : V6_vlsrwv
+def int_hexagon_V6_vlsrwv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vlsrwv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrwv_128B,VI_ftype_VIVI,2)
+// tag : V6_vlsrwv_128B
+def int_hexagon_V6_vlsrwv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vlsrwv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrh,VI_ftype_VISI,2)
+// tag : V6_vasrh
+def int_hexagon_V6_vasrh :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vasrh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrh_128B,VI_ftype_VISI,2)
+// tag : V6_vasrh_128B
+def int_hexagon_V6_vasrh_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vasrh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslh,VI_ftype_VISI,2)
+// tag : V6_vaslh
+def int_hexagon_V6_vaslh :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vaslh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslh_128B,VI_ftype_VISI,2)
+// tag : V6_vaslh_128B
+def int_hexagon_V6_vaslh_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vaslh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrh,VI_ftype_VISI,2)
+// tag : V6_vlsrh
+def int_hexagon_V6_vlsrh :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vlsrh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrh_128B,VI_ftype_VISI,2)
+// tag : V6_vlsrh_128B
+def int_hexagon_V6_vlsrh_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vlsrh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhv,VI_ftype_VIVI,2)
+// tag : V6_vasrhv
+def int_hexagon_V6_vasrhv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vasrhv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhv_128B,VI_ftype_VIVI,2)
+// tag : V6_vasrhv_128B
+def int_hexagon_V6_vasrhv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vasrhv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslhv,VI_ftype_VIVI,2)
+// tag : V6_vaslhv
+def int_hexagon_V6_vaslhv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaslhv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslhv_128B,VI_ftype_VIVI,2)
+// tag : V6_vaslhv_128B
+def int_hexagon_V6_vaslhv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaslhv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrhv,VI_ftype_VIVI,2)
+// tag : V6_vlsrhv
+def int_hexagon_V6_vlsrhv :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vlsrhv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrhv_128B,VI_ftype_VIVI,2)
+// tag : V6_vlsrhv_128B
+def int_hexagon_V6_vlsrhv_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vlsrhv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwh,VI_ftype_VIVISI,3)
+// tag : V6_vasrwh
+def int_hexagon_V6_vasrwh :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrwh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwh_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrwh_128B
+def int_hexagon_V6_vasrwh_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrwh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwhsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrwhsat
+def int_hexagon_V6_vasrwhsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrwhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwhsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrwhsat_128B
+def int_hexagon_V6_vasrwhsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrwhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwhrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrwhrndsat
+def int_hexagon_V6_vasrwhrndsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrwhrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwhrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrwhrndsat_128B
+def int_hexagon_V6_vasrwhrndsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrwhrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwuhsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrwuhsat
+def int_hexagon_V6_vasrwuhsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrwuhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwuhsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrwuhsat_128B
+def int_hexagon_V6_vasrwuhsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrwuhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundwh,VI_ftype_VIVI,2)
+// tag : V6_vroundwh
+def int_hexagon_V6_vroundwh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vroundwh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundwh_128B,VI_ftype_VIVI,2)
+// tag : V6_vroundwh_128B
+def int_hexagon_V6_vroundwh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vroundwh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundwuh,VI_ftype_VIVI,2)
+// tag : V6_vroundwuh
+def int_hexagon_V6_vroundwuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vroundwuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundwuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vroundwuh_128B
+def int_hexagon_V6_vroundwuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vroundwuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhubsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrhubsat
+def int_hexagon_V6_vasrhubsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrhubsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhubsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrhubsat_128B
+def int_hexagon_V6_vasrhubsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrhubsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhubrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrhubrndsat
+def int_hexagon_V6_vasrhubrndsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrhubrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhubrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrhubrndsat_128B
+def int_hexagon_V6_vasrhubrndsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrhubrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhbrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrhbrndsat
+def int_hexagon_V6_vasrhbrndsat :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrhbrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhbrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrhbrndsat_128B
+def int_hexagon_V6_vasrhbrndsat_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrhbrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundhb,VI_ftype_VIVI,2)
+// tag : V6_vroundhb
+def int_hexagon_V6_vroundhb :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vroundhb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundhb_128B,VI_ftype_VIVI,2)
+// tag : V6_vroundhb_128B
+def int_hexagon_V6_vroundhb_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vroundhb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundhub,VI_ftype_VIVI,2)
+// tag : V6_vroundhub
+def int_hexagon_V6_vroundhub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vroundhub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vroundhub_128B,VI_ftype_VIVI,2)
+// tag : V6_vroundhub_128B
+def int_hexagon_V6_vroundhub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vroundhub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslw_acc,VI_ftype_VIVISI,3)
+// tag : V6_vaslw_acc
+def int_hexagon_V6_vaslw_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vaslw_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslw_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vaslw_acc_128B
+def int_hexagon_V6_vaslw_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vaslw_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrw_acc,VI_ftype_VIVISI,3)
+// tag : V6_vasrw_acc
+def int_hexagon_V6_vasrw_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrw_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrw_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrw_acc_128B
+def int_hexagon_V6_vasrw_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrw_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddb,VI_ftype_VIVI,2)
+// tag : V6_vaddb
+def int_hexagon_V6_vaddb :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddb_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddb_128B
+def int_hexagon_V6_vaddb_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubb,VI_ftype_VIVI,2)
+// tag : V6_vsubb
+def int_hexagon_V6_vsubb :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubb_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubb_128B
+def int_hexagon_V6_vsubb_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddb_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddb_dv
+def int_hexagon_V6_vaddb_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddb_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddb_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddb_dv_128B
+def int_hexagon_V6_vaddb_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddb_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubb_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubb_dv
+def int_hexagon_V6_vsubb_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubb_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubb_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubb_dv_128B
+def int_hexagon_V6_vsubb_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubb_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddh,VI_ftype_VIVI,2)
+// tag : V6_vaddh
+def int_hexagon_V6_vaddh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddh_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddh_128B
+def int_hexagon_V6_vaddh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubh,VI_ftype_VIVI,2)
+// tag : V6_vsubh
+def int_hexagon_V6_vsubh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubh_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubh_128B
+def int_hexagon_V6_vsubh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddh_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddh_dv
+def int_hexagon_V6_vaddh_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddh_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddh_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddh_dv_128B
+def int_hexagon_V6_vaddh_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddh_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubh_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubh_dv
+def int_hexagon_V6_vsubh_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubh_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubh_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubh_dv_128B
+def int_hexagon_V6_vsubh_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubh_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddw,VI_ftype_VIVI,2)
+// tag : V6_vaddw
+def int_hexagon_V6_vaddw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddw_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddw_128B
+def int_hexagon_V6_vaddw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubw,VI_ftype_VIVI,2)
+// tag : V6_vsubw
+def int_hexagon_V6_vsubw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubw_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubw_128B
+def int_hexagon_V6_vsubw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddw_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddw_dv
+def int_hexagon_V6_vaddw_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddw_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddw_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddw_dv_128B
+def int_hexagon_V6_vaddw_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddw_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubw_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubw_dv
+def int_hexagon_V6_vsubw_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubw_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubw_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubw_dv_128B
+def int_hexagon_V6_vsubw_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubw_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubsat,VI_ftype_VIVI,2)
+// tag : V6_vaddubsat
+def int_hexagon_V6_vaddubsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddubsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddubsat_128B
+def int_hexagon_V6_vaddubsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddubsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddubsat_dv
+def int_hexagon_V6_vaddubsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddubsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddubsat_dv_128B
+def int_hexagon_V6_vaddubsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddubsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububsat,VI_ftype_VIVI,2)
+// tag : V6_vsububsat
+def int_hexagon_V6_vsububsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsububsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsububsat_128B
+def int_hexagon_V6_vsububsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsububsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsububsat_dv
+def int_hexagon_V6_vsububsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsububsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsububsat_dv_128B
+def int_hexagon_V6_vsububsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsububsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhsat,VI_ftype_VIVI,2)
+// tag : V6_vadduhsat
+def int_hexagon_V6_vadduhsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vadduhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vadduhsat_128B
+def int_hexagon_V6_vadduhsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vadduhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vadduhsat_dv
+def int_hexagon_V6_vadduhsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vadduhsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vadduhsat_dv_128B
+def int_hexagon_V6_vadduhsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vadduhsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhsat,VI_ftype_VIVI,2)
+// tag : V6_vsubuhsat
+def int_hexagon_V6_vsubuhsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubuhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubuhsat_128B
+def int_hexagon_V6_vsubuhsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubuhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubuhsat_dv
+def int_hexagon_V6_vsubuhsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubuhsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubuhsat_dv_128B
+def int_hexagon_V6_vsubuhsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubuhsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhsat,VI_ftype_VIVI,2)
+// tag : V6_vaddhsat
+def int_hexagon_V6_vaddhsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddhsat_128B
+def int_hexagon_V6_vaddhsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddhsat_dv
+def int_hexagon_V6_vaddhsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddhsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddhsat_dv_128B
+def int_hexagon_V6_vaddhsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddhsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhsat,VI_ftype_VIVI,2)
+// tag : V6_vsubhsat
+def int_hexagon_V6_vsubhsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubhsat_128B
+def int_hexagon_V6_vsubhsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubhsat_dv
+def int_hexagon_V6_vsubhsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubhsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubhsat_dv_128B
+def int_hexagon_V6_vsubhsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubhsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwsat,VI_ftype_VIVI,2)
+// tag : V6_vaddwsat
+def int_hexagon_V6_vaddwsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vaddwsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddwsat_128B
+def int_hexagon_V6_vaddwsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddwsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddwsat_dv
+def int_hexagon_V6_vaddwsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddwsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddwsat_dv_128B
+def int_hexagon_V6_vaddwsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddwsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwsat,VI_ftype_VIVI,2)
+// tag : V6_vsubwsat
+def int_hexagon_V6_vsubwsat :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsubwsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubwsat_128B
+def int_hexagon_V6_vsubwsat_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubwsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubwsat_dv
+def int_hexagon_V6_vsubwsat_dv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubwsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubwsat_dv_128B
+def int_hexagon_V6_vsubwsat_dv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubwsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgub,VI_ftype_VIVI,2)
+// tag : V6_vavgub
+def int_hexagon_V6_vavgub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavgub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgub_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgub_128B
+def int_hexagon_V6_vavgub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgubrnd,VI_ftype_VIVI,2)
+// tag : V6_vavgubrnd
+def int_hexagon_V6_vavgubrnd :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavgubrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgubrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgubrnd_128B
+def int_hexagon_V6_vavgubrnd_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgubrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguh,VI_ftype_VIVI,2)
+// tag : V6_vavguh
+def int_hexagon_V6_vavguh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavguh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguh_128B,VI_ftype_VIVI,2)
+// tag : V6_vavguh_128B
+def int_hexagon_V6_vavguh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavguh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguhrnd,VI_ftype_VIVI,2)
+// tag : V6_vavguhrnd
+def int_hexagon_V6_vavguhrnd :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavguhrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguhrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavguhrnd_128B
+def int_hexagon_V6_vavguhrnd_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavguhrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgh,VI_ftype_VIVI,2)
+// tag : V6_vavgh
+def int_hexagon_V6_vavgh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavgh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgh_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgh_128B
+def int_hexagon_V6_vavgh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavghrnd,VI_ftype_VIVI,2)
+// tag : V6_vavghrnd
+def int_hexagon_V6_vavghrnd :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavghrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavghrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavghrnd_128B
+def int_hexagon_V6_vavghrnd_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavghrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgh,VI_ftype_VIVI,2)
+// tag : V6_vnavgh
+def int_hexagon_V6_vnavgh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vnavgh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgh_128B,VI_ftype_VIVI,2)
+// tag : V6_vnavgh_128B
+def int_hexagon_V6_vnavgh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vnavgh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgw,VI_ftype_VIVI,2)
+// tag : V6_vavgw
+def int_hexagon_V6_vavgw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavgw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgw_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgw_128B
+def int_hexagon_V6_vavgw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgwrnd,VI_ftype_VIVI,2)
+// tag : V6_vavgwrnd
+def int_hexagon_V6_vavgwrnd :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vavgwrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgwrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgwrnd_128B
+def int_hexagon_V6_vavgwrnd_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgwrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgw,VI_ftype_VIVI,2)
+// tag : V6_vnavgw
+def int_hexagon_V6_vnavgw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vnavgw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgw_128B,VI_ftype_VIVI,2)
+// tag : V6_vnavgw_128B
+def int_hexagon_V6_vnavgw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vnavgw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffub,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffub
+def int_hexagon_V6_vabsdiffub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vabsdiffub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffub_128B,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffub_128B
+def int_hexagon_V6_vabsdiffub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vabsdiffub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffuh,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffuh
+def int_hexagon_V6_vabsdiffuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vabsdiffuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffuh_128B
+def int_hexagon_V6_vabsdiffuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vabsdiffuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffh,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffh
+def int_hexagon_V6_vabsdiffh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vabsdiffh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffh_128B,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffh_128B
+def int_hexagon_V6_vabsdiffh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vabsdiffh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffw,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffw
+def int_hexagon_V6_vabsdiffw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vabsdiffw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsdiffw_128B,VI_ftype_VIVI,2)
+// tag : V6_vabsdiffw_128B
+def int_hexagon_V6_vabsdiffw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vabsdiffw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgub,VI_ftype_VIVI,2)
+// tag : V6_vnavgub
+def int_hexagon_V6_vnavgub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vnavgub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgub_128B,VI_ftype_VIVI,2)
+// tag : V6_vnavgub_128B
+def int_hexagon_V6_vnavgub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vnavgub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubh,VD_ftype_VIVI,2)
+// tag : V6_vaddubh
+def int_hexagon_V6_vaddubh :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vaddubh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubh_128B,VD_ftype_VIVI,2)
+// tag : V6_vaddubh_128B
+def int_hexagon_V6_vaddubh_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vaddubh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububh,VD_ftype_VIVI,2)
+// tag : V6_vsububh
+def int_hexagon_V6_vsububh :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vsububh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsububh_128B,VD_ftype_VIVI,2)
+// tag : V6_vsububh_128B
+def int_hexagon_V6_vsububh_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vsububh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhw,VD_ftype_VIVI,2)
+// tag : V6_vaddhw
+def int_hexagon_V6_vaddhw :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vaddhw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhw_128B,VD_ftype_VIVI,2)
+// tag : V6_vaddhw_128B
+def int_hexagon_V6_vaddhw_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vaddhw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhw,VD_ftype_VIVI,2)
+// tag : V6_vsubhw
+def int_hexagon_V6_vsubhw :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vsubhw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhw_128B,VD_ftype_VIVI,2)
+// tag : V6_vsubhw_128B
+def int_hexagon_V6_vsubhw_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vsubhw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhw,VD_ftype_VIVI,2)
+// tag : V6_vadduhw
+def int_hexagon_V6_vadduhw :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vadduhw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhw_128B,VD_ftype_VIVI,2)
+// tag : V6_vadduhw_128B
+def int_hexagon_V6_vadduhw_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vadduhw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhw,VD_ftype_VIVI,2)
+// tag : V6_vsubuhw
+def int_hexagon_V6_vsubuhw :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vsubuhw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuhw_128B,VD_ftype_VIVI,2)
+// tag : V6_vsubuhw_128B
+def int_hexagon_V6_vsubuhw_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vsubuhw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vd0,VI_ftype_,0)
+// tag : V6_vd0
+def int_hexagon_V6_vd0 :
+Hexagon_v512_Intrinsic<"HEXAGON_V6_vd0">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vd0_128B,VI_ftype_,0)
+// tag : V6_vd0_128B
+def int_hexagon_V6_vd0_128B :
+Hexagon_v1024_Intrinsic<"HEXAGON_V6_vd0_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddbq
+def int_hexagon_V6_vaddbq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddbq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddbq_128B
+def int_hexagon_V6_vaddbq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddbq_128B">;
+
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubbq
+def int_hexagon_V6_vsubbq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubbq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubbq_128B
+def int_hexagon_V6_vsubbq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubbq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbnq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddbnq
+def int_hexagon_V6_vaddbnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddbnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddbnq_128B
+def int_hexagon_V6_vaddbnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddbnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbnq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubbnq
+def int_hexagon_V6_vsubbnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubbnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubbnq_128B
+def int_hexagon_V6_vsubbnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubbnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddhq
+def int_hexagon_V6_vaddhq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddhq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddhq_128B
+def int_hexagon_V6_vaddhq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddhq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubhq
+def int_hexagon_V6_vsubhq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubhq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubhq_128B
+def int_hexagon_V6_vsubhq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubhq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhnq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddhnq
+def int_hexagon_V6_vaddhnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddhnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddhnq_128B
+def int_hexagon_V6_vaddhnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddhnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhnq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubhnq
+def int_hexagon_V6_vsubhnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubhnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubhnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubhnq_128B
+def int_hexagon_V6_vsubhnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubhnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddwq
+def int_hexagon_V6_vaddwq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddwq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddwq_128B
+def int_hexagon_V6_vaddwq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddwq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubwq
+def int_hexagon_V6_vsubwq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubwq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubwq_128B
+def int_hexagon_V6_vsubwq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubwq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwnq,VI_ftype_QVVIVI,3)
+// tag : V6_vaddwnq
+def int_hexagon_V6_vaddwnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vaddwnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddwnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vaddwnq_128B
+def int_hexagon_V6_vaddwnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vaddwnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwnq,VI_ftype_QVVIVI,3)
+// tag : V6_vsubwnq
+def int_hexagon_V6_vsubwnq :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vsubwnq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubwnq_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vsubwnq_128B
+def int_hexagon_V6_vsubwnq_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vsubwnq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsh,VI_ftype_VI,1)
+// tag : V6_vabsh
+def int_hexagon_V6_vabsh :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vabsh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsh_128B,VI_ftype_VI,1)
+// tag : V6_vabsh_128B
+def int_hexagon_V6_vabsh_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vabsh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsh_sat,VI_ftype_VI,1)
+// tag : V6_vabsh_sat
+def int_hexagon_V6_vabsh_sat :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vabsh_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsh_sat_128B,VI_ftype_VI,1)
+// tag : V6_vabsh_sat_128B
+def int_hexagon_V6_vabsh_sat_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vabsh_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsw,VI_ftype_VI,1)
+// tag : V6_vabsw
+def int_hexagon_V6_vabsw :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vabsw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsw_128B,VI_ftype_VI,1)
+// tag : V6_vabsw_128B
+def int_hexagon_V6_vabsw_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vabsw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsw_sat,VI_ftype_VI,1)
+// tag : V6_vabsw_sat
+def int_hexagon_V6_vabsw_sat :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vabsw_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsw_sat_128B,VI_ftype_VI,1)
+// tag : V6_vabsw_sat_128B
+def int_hexagon_V6_vabsw_sat_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vabsw_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybv,VD_ftype_VIVI,2)
+// tag : V6_vmpybv
+def int_hexagon_V6_vmpybv :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpybv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybv_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpybv_128B
+def int_hexagon_V6_vmpybv_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpybv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybv_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpybv_acc
+def int_hexagon_V6_vmpybv_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpybv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybv_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpybv_acc_128B
+def int_hexagon_V6_vmpybv_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpybv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyubv,VD_ftype_VIVI,2)
+// tag : V6_vmpyubv
+def int_hexagon_V6_vmpyubv :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyubv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyubv_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpyubv_128B
+def int_hexagon_V6_vmpyubv_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyubv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyubv_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyubv_acc
+def int_hexagon_V6_vmpyubv_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyubv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyubv_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyubv_acc_128B
+def int_hexagon_V6_vmpyubv_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyubv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybusv,VD_ftype_VIVI,2)
+// tag : V6_vmpybusv
+def int_hexagon_V6_vmpybusv :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpybusv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybusv_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpybusv_128B
+def int_hexagon_V6_vmpybusv_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpybusv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybusv_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpybusv_acc
+def int_hexagon_V6_vmpybusv_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpybusv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybusv_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpybusv_acc_128B
+def int_hexagon_V6_vmpybusv_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpybusv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabusv,VD_ftype_VDVD,2)
+// tag : V6_vmpabusv
+def int_hexagon_V6_vmpabusv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpabusv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabusv_128B,VD_ftype_VDVD,2)
+// tag : V6_vmpabusv_128B
+def int_hexagon_V6_vmpabusv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vmpabusv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuuv,VD_ftype_VDVD,2)
+// tag : V6_vmpabuuv
+def int_hexagon_V6_vmpabuuv :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpabuuv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuuv_128B,VD_ftype_VDVD,2)
+// tag : V6_vmpabuuv_128B
+def int_hexagon_V6_vmpabuuv_128B :
+Hexagon_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vmpabuuv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhv,VD_ftype_VIVI,2)
+// tag : V6_vmpyhv
+def int_hexagon_V6_vmpyhv :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyhv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhv_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpyhv_128B
+def int_hexagon_V6_vmpyhv_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyhv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhv_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyhv_acc
+def int_hexagon_V6_vmpyhv_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyhv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhv_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyhv_acc_128B
+def int_hexagon_V6_vmpyhv_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyhv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhv,VD_ftype_VIVI,2)
+// tag : V6_vmpyuhv
+def int_hexagon_V6_vmpyuhv :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyuhv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhv_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpyuhv_128B
+def int_hexagon_V6_vmpyuhv_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyuhv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhv_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyuhv_acc
+def int_hexagon_V6_vmpyuhv_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyuhv_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhv_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyuhv_acc_128B
+def int_hexagon_V6_vmpyuhv_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyuhv_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhvsrs,VI_ftype_VIVI,2)
+// tag : V6_vmpyhvsrs
+def int_hexagon_V6_vmpyhvsrs :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyhvsrs">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhvsrs_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyhvsrs_128B
+def int_hexagon_V6_vmpyhvsrs_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyhvsrs_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhus,VD_ftype_VIVI,2)
+// tag : V6_vmpyhus
+def int_hexagon_V6_vmpyhus :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyhus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhus_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpyhus_128B
+def int_hexagon_V6_vmpyhus_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyhus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhus_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyhus_acc
+def int_hexagon_V6_vmpyhus_acc :
+Hexagon_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyhus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhus_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyhus_acc_128B
+def int_hexagon_V6_vmpyhus_acc_128B :
+Hexagon_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyhus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyih,VI_ftype_VIVI,2)
+// tag : V6_vmpyih
+def int_hexagon_V6_vmpyih :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyih">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyih_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyih_128B
+def int_hexagon_V6_vmpyih_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyih_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyih_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyih_acc
+def int_hexagon_V6_vmpyih_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vmpyih_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyih_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyih_acc_128B
+def int_hexagon_V6_vmpyih_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyih_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyewuh,VI_ftype_VIVI,2)
+// tag : V6_vmpyewuh
+def int_hexagon_V6_vmpyewuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyewuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyewuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyewuh_128B
+def int_hexagon_V6_vmpyewuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyewuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh,VI_ftype_VIVI,2)
+// tag : V6_vmpyowh
+def int_hexagon_V6_vmpyowh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyowh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyowh_128B
+def int_hexagon_V6_vmpyowh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyowh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_rnd,VI_ftype_VIVI,2)
+// tag : V6_vmpyowh_rnd
+def int_hexagon_V6_vmpyowh_rnd :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyowh_rnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_rnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyowh_rnd_128B
+def int_hexagon_V6_vmpyowh_rnd_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_sacc,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyowh_sacc
+def int_hexagon_V6_vmpyowh_sacc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vmpyowh_sacc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_sacc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyowh_sacc_128B
+def int_hexagon_V6_vmpyowh_sacc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyowh_sacc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_rnd_sacc,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyowh_rnd_sacc
+def int_hexagon_V6_vmpyowh_rnd_sacc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_rnd_sacc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyowh_rnd_sacc_128B
+def int_hexagon_V6_vmpyowh_rnd_sacc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyieoh,VI_ftype_VIVI,2)
+// tag : V6_vmpyieoh
+def int_hexagon_V6_vmpyieoh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyieoh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyieoh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyieoh_128B
+def int_hexagon_V6_vmpyieoh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyieoh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewuh,VI_ftype_VIVI,2)
+// tag : V6_vmpyiewuh
+def int_hexagon_V6_vmpyiewuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyiewuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyiewuh_128B
+def int_hexagon_V6_vmpyiewuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyiewuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiowh,VI_ftype_VIVI,2)
+// tag : V6_vmpyiowh
+def int_hexagon_V6_vmpyiowh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmpyiowh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiowh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmpyiowh_128B
+def int_hexagon_V6_vmpyiowh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyiowh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewh_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyiewh_acc
+def int_hexagon_V6_vmpyiewh_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vmpyiewh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewh_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyiewh_acc_128B
+def int_hexagon_V6_vmpyiewh_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyiewh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewuh_acc,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyiewuh_acc
+def int_hexagon_V6_vmpyiewuh_acc :
+Hexagon_v512v512v512v512_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiewuh_acc_128B,VI_ftype_VIVIVI,3)
+// tag : V6_vmpyiewuh_acc_128B
+def int_hexagon_V6_vmpyiewuh_acc_128B :
+Hexagon_v1024v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyub,VD_ftype_VISI,2)
+// tag : V6_vmpyub
+def int_hexagon_V6_vmpyub :
+Hexagon_v1024v512i_Intrinsic<"HEXAGON_V6_vmpyub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyub_128B,VD_ftype_VISI,2)
+// tag : V6_vmpyub_128B
+def int_hexagon_V6_vmpyub_128B :
+Hexagon_v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyub_acc,VD_ftype_VDVISI,3)
+// tag : V6_vmpyub_acc
+def int_hexagon_V6_vmpyub_acc :
+Hexagon_v1024v1024v512i_Intrinsic<"HEXAGON_V6_vmpyub_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyub_acc_128B,VD_ftype_VDVISI,3)
+// tag : V6_vmpyub_acc_128B
+def int_hexagon_V6_vmpyub_acc_128B :
+Hexagon_v2048v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyub_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybus,VD_ftype_VISI,2)
+// tag : V6_vmpybus
+def int_hexagon_V6_vmpybus :
+Hexagon_v1024v512i_Intrinsic<"HEXAGON_V6_vmpybus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybus_128B,VD_ftype_VISI,2)
+// tag : V6_vmpybus_128B
+def int_hexagon_V6_vmpybus_128B :
+Hexagon_v2048v1024i_Intrinsic<"HEXAGON_V6_vmpybus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybus_acc,VD_ftype_VDVISI,3)
+// tag : V6_vmpybus_acc
+def int_hexagon_V6_vmpybus_acc :
+Hexagon_v1024v1024v512i_Intrinsic<"HEXAGON_V6_vmpybus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpybus_acc_128B,VD_ftype_VDVISI,3)
+// tag : V6_vmpybus_acc_128B
+def int_hexagon_V6_vmpybus_acc_128B :
+Hexagon_v2048v2048v1024i_Intrinsic<"HEXAGON_V6_vmpybus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabus,VD_ftype_VDSI,2)
+// tag : V6_vmpabus
+def int_hexagon_V6_vmpabus :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpabus">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabus_128B,VD_ftype_VDSI,2)
+// tag : V6_vmpabus_128B
+def int_hexagon_V6_vmpabus_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vmpabus_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabus_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vmpabus_acc
+def int_hexagon_V6_vmpabus_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpabus_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabus_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vmpabus_acc_128B
+def int_hexagon_V6_vmpabus_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vmpabus_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahb,VD_ftype_VDSI,2)
+// tag : V6_vmpahb
+def int_hexagon_V6_vmpahb :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpahb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahb_128B,VD_ftype_VDSI,2)
+// tag : V6_vmpahb_128B
+def int_hexagon_V6_vmpahb_128B :
+Hexagon_v2048v2048i_Intrinsic<"HEXAGON_V6_vmpahb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahb_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vmpahb_acc
+def int_hexagon_V6_vmpahb_acc :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpahb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahb_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vmpahb_acc_128B
+def int_hexagon_V6_vmpahb_acc_128B :
+Hexagon_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vmpahb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyh,VD_ftype_VISI,2)
+// tag : V6_vmpyh
+def int_hexagon_V6_vmpyh :
+Hexagon_v1024v512i_Intrinsic<"HEXAGON_V6_vmpyh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyh_128B,VD_ftype_VISI,2)
+// tag : V6_vmpyh_128B
+def int_hexagon_V6_vmpyh_128B :
+Hexagon_v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhsat_acc,VD_ftype_VDVISI,3)
+// tag : V6_vmpyhsat_acc
+def int_hexagon_V6_vmpyhsat_acc :
+Hexagon_v1024v1024v512i_Intrinsic<"HEXAGON_V6_vmpyhsat_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhsat_acc_128B,VD_ftype_VDVISI,3)
+// tag : V6_vmpyhsat_acc_128B
+def int_hexagon_V6_vmpyhsat_acc_128B :
+Hexagon_v2048v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyhsat_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhss,VI_ftype_VISI,2)
+// tag : V6_vmpyhss
+def int_hexagon_V6_vmpyhss :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vmpyhss">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhss_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyhss_128B
+def int_hexagon_V6_vmpyhss_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyhss_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhsrs,VI_ftype_VISI,2)
+// tag : V6_vmpyhsrs
+def int_hexagon_V6_vmpyhsrs :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vmpyhsrs">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyhsrs_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyhsrs_128B
+def int_hexagon_V6_vmpyhsrs_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyhsrs_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuh,VD_ftype_VISI,2)
+// tag : V6_vmpyuh
+def int_hexagon_V6_vmpyuh :
+Hexagon_v1024v512i_Intrinsic<"HEXAGON_V6_vmpyuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuh_128B,VD_ftype_VISI,2)
+// tag : V6_vmpyuh_128B
+def int_hexagon_V6_vmpyuh_128B :
+Hexagon_v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuh_acc,VD_ftype_VDVISI,3)
+// tag : V6_vmpyuh_acc
+def int_hexagon_V6_vmpyuh_acc :
+Hexagon_v1024v1024v512i_Intrinsic<"HEXAGON_V6_vmpyuh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuh_acc_128B,VD_ftype_VDVISI,3)
+// tag : V6_vmpyuh_acc_128B
+def int_hexagon_V6_vmpyuh_acc_128B :
+Hexagon_v2048v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyuh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyihb,VI_ftype_VISI,2)
+// tag : V6_vmpyihb
+def int_hexagon_V6_vmpyihb :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vmpyihb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyihb_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyihb_128B
+def int_hexagon_V6_vmpyihb_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyihb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyihb_acc,VI_ftype_VIVISI,3)
+// tag : V6_vmpyihb_acc
+def int_hexagon_V6_vmpyihb_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vmpyihb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyihb_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vmpyihb_acc_128B
+def int_hexagon_V6_vmpyihb_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyihb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwb,VI_ftype_VISI,2)
+// tag : V6_vmpyiwb
+def int_hexagon_V6_vmpyiwb :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwb_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyiwb_128B
+def int_hexagon_V6_vmpyiwb_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwb_acc,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwb_acc
+def int_hexagon_V6_vmpyiwb_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwb_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwb_acc_128B
+def int_hexagon_V6_vmpyiwb_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwh,VI_ftype_VISI,2)
+// tag : V6_vmpyiwh
+def int_hexagon_V6_vmpyiwh :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwh_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyiwh_128B
+def int_hexagon_V6_vmpyiwh_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwh_acc,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwh_acc
+def int_hexagon_V6_vmpyiwh_acc :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwh_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwh_acc_128B
+def int_hexagon_V6_vmpyiwh_acc_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vand,VI_ftype_VIVI,2)
+// tag : V6_vand
+def int_hexagon_V6_vand :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vand">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vand_128B,VI_ftype_VIVI,2)
+// tag : V6_vand_128B
+def int_hexagon_V6_vand_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vand_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vor,VI_ftype_VIVI,2)
+// tag : V6_vor
+def int_hexagon_V6_vor :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vor_128B,VI_ftype_VIVI,2)
+// tag : V6_vor_128B
+def int_hexagon_V6_vor_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vxor,VI_ftype_VIVI,2)
+// tag : V6_vxor
+def int_hexagon_V6_vxor :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vxor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vxor_128B,VI_ftype_VIVI,2)
+// tag : V6_vxor_128B
+def int_hexagon_V6_vxor_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vxor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnot,VI_ftype_VI,1)
+// tag : V6_vnot
+def int_hexagon_V6_vnot :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vnot">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnot_128B,VI_ftype_VI,1)
+// tag : V6_vnot_128B
+def int_hexagon_V6_vnot_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vnot_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandqrt,VI_ftype_QVSI,2)
+// tag : V6_vandqrt
+def int_hexagon_V6_vandqrt :
+Hexagon_v512v64ii_Intrinsic<"HEXAGON_V6_vandqrt">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandqrt_128B,VI_ftype_QVSI,2)
+// tag : V6_vandqrt_128B
+def int_hexagon_V6_vandqrt_128B :
+Hexagon_v1024v128ii_Intrinsic<"HEXAGON_V6_vandqrt_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandqrt_acc,VI_ftype_VIQVSI,3)
+// tag : V6_vandqrt_acc
+def int_hexagon_V6_vandqrt_acc :
+Hexagon_v512v512v64ii_Intrinsic<"HEXAGON_V6_vandqrt_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandqrt_acc_128B,VI_ftype_VIQVSI,3)
+// tag : V6_vandqrt_acc_128B
+def int_hexagon_V6_vandqrt_acc_128B :
+Hexagon_v1024v1024v128ii_Intrinsic<"HEXAGON_V6_vandqrt_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvrt,QV_ftype_VISI,2)
+// tag : V6_vandvrt
+def int_hexagon_V6_vandvrt :
+Hexagon_v64iv512i_Intrinsic<"HEXAGON_V6_vandvrt">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvrt_128B,QV_ftype_VISI,2)
+// tag : V6_vandvrt_128B
+def int_hexagon_V6_vandvrt_128B :
+Hexagon_v128iv1024i_Intrinsic<"HEXAGON_V6_vandvrt_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvrt_acc,QV_ftype_QVVISI,3)
+// tag : V6_vandvrt_acc
+def int_hexagon_V6_vandvrt_acc :
+Hexagon_v64iv64iv512i_Intrinsic<"HEXAGON_V6_vandvrt_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvrt_acc_128B,QV_ftype_QVVISI,3)
+// tag : V6_vandvrt_acc_128B
+def int_hexagon_V6_vandvrt_acc_128B :
+Hexagon_v128iv128iv1024i_Intrinsic<"HEXAGON_V6_vandvrt_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw,QV_ftype_VIVI,2)
+// tag : V6_vgtw
+def int_hexagon_V6_vgtw :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgtw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_128B,QV_ftype_VIVI,2)
+// tag : V6_vgtw_128B
+def int_hexagon_V6_vgtw_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_and
+def int_hexagon_V6_vgtw_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtw_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_and_128B
+def int_hexagon_V6_vgtw_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtw_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_or
+def int_hexagon_V6_vgtw_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtw_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_or_128B
+def int_hexagon_V6_vgtw_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtw_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_xor
+def int_hexagon_V6_vgtw_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtw_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtw_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtw_xor_128B
+def int_hexagon_V6_vgtw_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtw_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw,QV_ftype_VIVI,2)
+// tag : V6_veqw
+def int_hexagon_V6_veqw :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_veqw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_128B,QV_ftype_VIVI,2)
+// tag : V6_veqw_128B
+def int_hexagon_V6_veqw_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_veqw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_and,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_and
+def int_hexagon_V6_veqw_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqw_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_and_128B
+def int_hexagon_V6_veqw_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqw_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_or,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_or
+def int_hexagon_V6_veqw_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqw_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_or_128B
+def int_hexagon_V6_veqw_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqw_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_xor,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_xor
+def int_hexagon_V6_veqw_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqw_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqw_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqw_xor_128B
+def int_hexagon_V6_veqw_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqw_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth,QV_ftype_VIVI,2)
+// tag : V6_vgth
+def int_hexagon_V6_vgth :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgth">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_128B,QV_ftype_VIVI,2)
+// tag : V6_vgth_128B
+def int_hexagon_V6_vgth_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgth_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_and
+def int_hexagon_V6_vgth_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgth_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_and_128B
+def int_hexagon_V6_vgth_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgth_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_or
+def int_hexagon_V6_vgth_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgth_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_or_128B
+def int_hexagon_V6_vgth_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgth_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_xor
+def int_hexagon_V6_vgth_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgth_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgth_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgth_xor_128B
+def int_hexagon_V6_vgth_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgth_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh,QV_ftype_VIVI,2)
+// tag : V6_veqh
+def int_hexagon_V6_veqh :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_veqh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_128B,QV_ftype_VIVI,2)
+// tag : V6_veqh_128B
+def int_hexagon_V6_veqh_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_veqh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_and,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_and
+def int_hexagon_V6_veqh_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqh_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_and_128B
+def int_hexagon_V6_veqh_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqh_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_or,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_or
+def int_hexagon_V6_veqh_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqh_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_or_128B
+def int_hexagon_V6_veqh_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqh_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_xor,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_xor
+def int_hexagon_V6_veqh_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqh_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqh_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqh_xor_128B
+def int_hexagon_V6_veqh_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqh_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb,QV_ftype_VIVI,2)
+// tag : V6_vgtb
+def int_hexagon_V6_vgtb :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgtb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_128B,QV_ftype_VIVI,2)
+// tag : V6_vgtb_128B
+def int_hexagon_V6_vgtb_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_and
+def int_hexagon_V6_vgtb_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtb_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_and_128B
+def int_hexagon_V6_vgtb_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtb_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_or
+def int_hexagon_V6_vgtb_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtb_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_or_128B
+def int_hexagon_V6_vgtb_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtb_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_xor
+def int_hexagon_V6_vgtb_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtb_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtb_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtb_xor_128B
+def int_hexagon_V6_vgtb_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtb_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb,QV_ftype_VIVI,2)
+// tag : V6_veqb
+def int_hexagon_V6_veqb :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_veqb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_128B,QV_ftype_VIVI,2)
+// tag : V6_veqb_128B
+def int_hexagon_V6_veqb_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_veqb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_and,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_and
+def int_hexagon_V6_veqb_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqb_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_and_128B
+def int_hexagon_V6_veqb_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqb_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_or,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_or
+def int_hexagon_V6_veqb_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqb_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_or_128B
+def int_hexagon_V6_veqb_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqb_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_xor,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_xor
+def int_hexagon_V6_veqb_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_veqb_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_veqb_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_veqb_xor_128B
+def int_hexagon_V6_veqb_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_veqb_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw,QV_ftype_VIVI,2)
+// tag : V6_vgtuw
+def int_hexagon_V6_vgtuw :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgtuw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_128B,QV_ftype_VIVI,2)
+// tag : V6_vgtuw_128B
+def int_hexagon_V6_vgtuw_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_and
+def int_hexagon_V6_vgtuw_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuw_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_and_128B
+def int_hexagon_V6_vgtuw_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuw_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_or
+def int_hexagon_V6_vgtuw_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuw_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_or_128B
+def int_hexagon_V6_vgtuw_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuw_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_xor
+def int_hexagon_V6_vgtuw_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuw_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuw_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuw_xor_128B
+def int_hexagon_V6_vgtuw_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuw_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh,QV_ftype_VIVI,2)
+// tag : V6_vgtuh
+def int_hexagon_V6_vgtuh :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgtuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_128B,QV_ftype_VIVI,2)
+// tag : V6_vgtuh_128B
+def int_hexagon_V6_vgtuh_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_and
+def int_hexagon_V6_vgtuh_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuh_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_and_128B
+def int_hexagon_V6_vgtuh_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuh_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_or
+def int_hexagon_V6_vgtuh_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuh_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_or_128B
+def int_hexagon_V6_vgtuh_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuh_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_xor
+def int_hexagon_V6_vgtuh_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtuh_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtuh_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtuh_xor_128B
+def int_hexagon_V6_vgtuh_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtuh_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub,QV_ftype_VIVI,2)
+// tag : V6_vgtub
+def int_hexagon_V6_vgtub :
+Hexagon_v64iv512v512_Intrinsic<"HEXAGON_V6_vgtub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_128B,QV_ftype_VIVI,2)
+// tag : V6_vgtub_128B
+def int_hexagon_V6_vgtub_128B :
+Hexagon_v128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_and,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_and
+def int_hexagon_V6_vgtub_and :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtub_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_and_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_and_128B
+def int_hexagon_V6_vgtub_and_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtub_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_or,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_or
+def int_hexagon_V6_vgtub_or :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtub_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_or_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_or_128B
+def int_hexagon_V6_vgtub_or_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtub_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_xor,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_xor
+def int_hexagon_V6_vgtub_xor :
+Hexagon_v64iv64iv512v512_Intrinsic<"HEXAGON_V6_vgtub_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vgtub_xor_128B,QV_ftype_QVVIVI,3)
+// tag : V6_vgtub_xor_128B
+def int_hexagon_V6_vgtub_xor_128B :
+Hexagon_v128iv128iv1024v1024_Intrinsic<"HEXAGON_V6_vgtub_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_or,QV_ftype_QVQV,2)
+// tag : V6_pred_or
+def int_hexagon_V6_pred_or :
+Hexagon_v64iv64iv64i_Intrinsic<"HEXAGON_V6_pred_or">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_or_128B,QV_ftype_QVQV,2)
+// tag : V6_pred_or_128B
+def int_hexagon_V6_pred_or_128B :
+Hexagon_v128iv128iv128i_Intrinsic<"HEXAGON_V6_pred_or_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_and,QV_ftype_QVQV,2)
+// tag : V6_pred_and
+def int_hexagon_V6_pred_and :
+Hexagon_v64iv64iv64i_Intrinsic<"HEXAGON_V6_pred_and">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_and_128B,QV_ftype_QVQV,2)
+// tag : V6_pred_and_128B
+def int_hexagon_V6_pred_and_128B :
+Hexagon_v128iv128iv128i_Intrinsic<"HEXAGON_V6_pred_and_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_not,QV_ftype_QV,1)
+// tag : V6_pred_not
+def int_hexagon_V6_pred_not :
+Hexagon_v64iv64i_Intrinsic<"HEXAGON_V6_pred_not">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_not_128B,QV_ftype_QV,1)
+// tag : V6_pred_not_128B
+def int_hexagon_V6_pred_not_128B :
+Hexagon_v128iv128i_Intrinsic<"HEXAGON_V6_pred_not_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_xor,QV_ftype_QVQV,2)
+// tag : V6_pred_xor
+def int_hexagon_V6_pred_xor :
+Hexagon_v64iv64iv64i_Intrinsic<"HEXAGON_V6_pred_xor">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_xor_128B,QV_ftype_QVQV,2)
+// tag : V6_pred_xor_128B
+def int_hexagon_V6_pred_xor_128B :
+Hexagon_v128iv128iv128i_Intrinsic<"HEXAGON_V6_pred_xor_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_and_n,QV_ftype_QVQV,2)
+// tag : V6_pred_and_n
+def int_hexagon_V6_pred_and_n :
+Hexagon_v64iv64iv64i_Intrinsic<"HEXAGON_V6_pred_and_n">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_and_n_128B,QV_ftype_QVQV,2)
+// tag : V6_pred_and_n_128B
+def int_hexagon_V6_pred_and_n_128B :
+Hexagon_v128iv128iv128i_Intrinsic<"HEXAGON_V6_pred_and_n_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_or_n,QV_ftype_QVQV,2)
+// tag : V6_pred_or_n
+def int_hexagon_V6_pred_or_n :
+Hexagon_v64iv64iv64i_Intrinsic<"HEXAGON_V6_pred_or_n">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_or_n_128B,QV_ftype_QVQV,2)
+// tag : V6_pred_or_n_128B
+def int_hexagon_V6_pred_or_n_128B :
+Hexagon_v128iv128iv128i_Intrinsic<"HEXAGON_V6_pred_or_n_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_scalar2,QV_ftype_SI,1)
+// tag : V6_pred_scalar2
+def int_hexagon_V6_pred_scalar2 :
+Hexagon_v64ii_Intrinsic<"HEXAGON_V6_pred_scalar2">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_scalar2_128B,QV_ftype_SI,1)
+// tag : V6_pred_scalar2_128B
+def int_hexagon_V6_pred_scalar2_128B :
+Hexagon_v128ii_Intrinsic<"HEXAGON_V6_pred_scalar2_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmux,VI_ftype_QVVIVI,3)
+// tag : V6_vmux
+def int_hexagon_V6_vmux :
+Hexagon_v512v64iv512v512_Intrinsic<"HEXAGON_V6_vmux">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmux_128B,VI_ftype_QVVIVI,3)
+// tag : V6_vmux_128B
+def int_hexagon_V6_vmux_128B :
+Hexagon_v1024v128iv1024v1024_Intrinsic<"HEXAGON_V6_vmux_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vswap,VD_ftype_QVVIVI,3)
+// tag : V6_vswap
+def int_hexagon_V6_vswap :
+Hexagon_v1024v64iv512v512_Intrinsic<"HEXAGON_V6_vswap">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vswap_128B,VD_ftype_QVVIVI,3)
+// tag : V6_vswap_128B
+def int_hexagon_V6_vswap_128B :
+Hexagon_v2048v128iv1024v1024_Intrinsic<"HEXAGON_V6_vswap_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxub,VI_ftype_VIVI,2)
+// tag : V6_vmaxub
+def int_hexagon_V6_vmaxub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmaxub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxub_128B,VI_ftype_VIVI,2)
+// tag : V6_vmaxub_128B
+def int_hexagon_V6_vmaxub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmaxub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminub,VI_ftype_VIVI,2)
+// tag : V6_vminub
+def int_hexagon_V6_vminub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vminub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminub_128B,VI_ftype_VIVI,2)
+// tag : V6_vminub_128B
+def int_hexagon_V6_vminub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vminub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxuh,VI_ftype_VIVI,2)
+// tag : V6_vmaxuh
+def int_hexagon_V6_vmaxuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmaxuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmaxuh_128B
+def int_hexagon_V6_vmaxuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmaxuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminuh,VI_ftype_VIVI,2)
+// tag : V6_vminuh
+def int_hexagon_V6_vminuh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vminuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vminuh_128B
+def int_hexagon_V6_vminuh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vminuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxh,VI_ftype_VIVI,2)
+// tag : V6_vmaxh
+def int_hexagon_V6_vmaxh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmaxh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxh_128B,VI_ftype_VIVI,2)
+// tag : V6_vmaxh_128B
+def int_hexagon_V6_vmaxh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmaxh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminh,VI_ftype_VIVI,2)
+// tag : V6_vminh
+def int_hexagon_V6_vminh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vminh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminh_128B,VI_ftype_VIVI,2)
+// tag : V6_vminh_128B
+def int_hexagon_V6_vminh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vminh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxw,VI_ftype_VIVI,2)
+// tag : V6_vmaxw
+def int_hexagon_V6_vmaxw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vmaxw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxw_128B,VI_ftype_VIVI,2)
+// tag : V6_vmaxw_128B
+def int_hexagon_V6_vmaxw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmaxw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminw,VI_ftype_VIVI,2)
+// tag : V6_vminw
+def int_hexagon_V6_vminw :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vminw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminw_128B,VI_ftype_VIVI,2)
+// tag : V6_vminw_128B
+def int_hexagon_V6_vminw_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vminw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsathub,VI_ftype_VIVI,2)
+// tag : V6_vsathub
+def int_hexagon_V6_vsathub :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsathub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsathub_128B,VI_ftype_VIVI,2)
+// tag : V6_vsathub_128B
+def int_hexagon_V6_vsathub_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsathub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsatwh,VI_ftype_VIVI,2)
+// tag : V6_vsatwh
+def int_hexagon_V6_vsatwh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vsatwh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsatwh_128B,VI_ftype_VIVI,2)
+// tag : V6_vsatwh_128B
+def int_hexagon_V6_vsatwh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsatwh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffeb,VI_ftype_VIVI,2)
+// tag : V6_vshuffeb
+def int_hexagon_V6_vshuffeb :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vshuffeb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffeb_128B,VI_ftype_VIVI,2)
+// tag : V6_vshuffeb_128B
+def int_hexagon_V6_vshuffeb_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vshuffeb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffob,VI_ftype_VIVI,2)
+// tag : V6_vshuffob
+def int_hexagon_V6_vshuffob :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vshuffob">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffob_128B,VI_ftype_VIVI,2)
+// tag : V6_vshuffob_128B
+def int_hexagon_V6_vshuffob_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vshuffob_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufeh,VI_ftype_VIVI,2)
+// tag : V6_vshufeh
+def int_hexagon_V6_vshufeh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vshufeh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufeh_128B,VI_ftype_VIVI,2)
+// tag : V6_vshufeh_128B
+def int_hexagon_V6_vshufeh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vshufeh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoh,VI_ftype_VIVI,2)
+// tag : V6_vshufoh
+def int_hexagon_V6_vshufoh :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vshufoh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoh_128B,VI_ftype_VIVI,2)
+// tag : V6_vshufoh_128B
+def int_hexagon_V6_vshufoh_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vshufoh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffvdd,VD_ftype_VIVISI,3)
+// tag : V6_vshuffvdd
+def int_hexagon_V6_vshuffvdd :
+Hexagon_v1024v512v512i_Intrinsic<"HEXAGON_V6_vshuffvdd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffvdd_128B,VD_ftype_VIVISI,3)
+// tag : V6_vshuffvdd_128B
+def int_hexagon_V6_vshuffvdd_128B :
+Hexagon_v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vshuffvdd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealvdd,VD_ftype_VIVISI,3)
+// tag : V6_vdealvdd
+def int_hexagon_V6_vdealvdd :
+Hexagon_v1024v512v512i_Intrinsic<"HEXAGON_V6_vdealvdd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealvdd_128B,VD_ftype_VIVISI,3)
+// tag : V6_vdealvdd_128B
+def int_hexagon_V6_vdealvdd_128B :
+Hexagon_v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vdealvdd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoeh,VD_ftype_VIVI,2)
+// tag : V6_vshufoeh
+def int_hexagon_V6_vshufoeh :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vshufoeh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoeh_128B,VD_ftype_VIVI,2)
+// tag : V6_vshufoeh_128B
+def int_hexagon_V6_vshufoeh_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vshufoeh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoeb,VD_ftype_VIVI,2)
+// tag : V6_vshufoeb
+def int_hexagon_V6_vshufoeb :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vshufoeb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshufoeb_128B,VD_ftype_VIVI,2)
+// tag : V6_vshufoeb_128B
+def int_hexagon_V6_vshufoeb_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vshufoeb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealh,VI_ftype_VI,1)
+// tag : V6_vdealh
+def int_hexagon_V6_vdealh :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vdealh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealh_128B,VI_ftype_VI,1)
+// tag : V6_vdealh_128B
+def int_hexagon_V6_vdealh_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vdealh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealb,VI_ftype_VI,1)
+// tag : V6_vdealb
+def int_hexagon_V6_vdealb :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vdealb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealb_128B,VI_ftype_VI,1)
+// tag : V6_vdealb_128B
+def int_hexagon_V6_vdealb_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vdealb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealb4w,VI_ftype_VIVI,2)
+// tag : V6_vdealb4w
+def int_hexagon_V6_vdealb4w :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vdealb4w">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdealb4w_128B,VI_ftype_VIVI,2)
+// tag : V6_vdealb4w_128B
+def int_hexagon_V6_vdealb4w_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vdealb4w_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffh,VI_ftype_VI,1)
+// tag : V6_vshuffh
+def int_hexagon_V6_vshuffh :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vshuffh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffh_128B,VI_ftype_VI,1)
+// tag : V6_vshuffh_128B
+def int_hexagon_V6_vshuffh_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vshuffh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffb,VI_ftype_VI,1)
+// tag : V6_vshuffb
+def int_hexagon_V6_vshuffb :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vshuffb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vshuffb_128B,VI_ftype_VI,1)
+// tag : V6_vshuffb_128B
+def int_hexagon_V6_vshuffb_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vshuffb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_extractw,SI_ftype_VISI,2)
+// tag : V6_extractw
+def int_hexagon_V6_extractw :
+Hexagon_iv512i_Intrinsic<"HEXAGON_V6_extractw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_extractw_128B,SI_ftype_VISI,2)
+// tag : V6_extractw_128B
+def int_hexagon_V6_extractw_128B :
+Hexagon_iv1024i_Intrinsic<"HEXAGON_V6_extractw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vinsertwr,VI_ftype_VISI,2)
+// tag : V6_vinsertwr
+def int_hexagon_V6_vinsertwr :
+Hexagon_v512v512i_Intrinsic<"HEXAGON_V6_vinsertwr">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vinsertwr_128B,VI_ftype_VISI,2)
+// tag : V6_vinsertwr_128B
+def int_hexagon_V6_vinsertwr_128B :
+Hexagon_v1024v1024i_Intrinsic<"HEXAGON_V6_vinsertwr_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplatw,VI_ftype_SI,1)
+// tag : V6_lvsplatw
+def int_hexagon_V6_lvsplatw :
+Hexagon_v512i_Intrinsic<"HEXAGON_V6_lvsplatw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplatw_128B,VI_ftype_SI,1)
+// tag : V6_lvsplatw_128B
+def int_hexagon_V6_lvsplatw_128B :
+Hexagon_v1024i_Intrinsic<"HEXAGON_V6_lvsplatw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vassign,VI_ftype_VI,1)
+// tag : V6_vassign
+def int_hexagon_V6_vassign :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vassign">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vassign_128B,VI_ftype_VI,1)
+// tag : V6_vassign_128B
+def int_hexagon_V6_vassign_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vassign_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcombine,VD_ftype_VIVI,2)
+// tag : V6_vcombine
+def int_hexagon_V6_vcombine :
+Hexagon_v1024v512v512_Intrinsic<"HEXAGON_V6_vcombine">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcombine_128B,VD_ftype_VIVI,2)
+// tag : V6_vcombine_128B
+def int_hexagon_V6_vcombine_128B :
+Hexagon_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vcombine_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdelta,VI_ftype_VIVI,2)
+// tag : V6_vdelta
+def int_hexagon_V6_vdelta :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vdelta">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdelta_128B,VI_ftype_VIVI,2)
+// tag : V6_vdelta_128B
+def int_hexagon_V6_vdelta_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vdelta_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrdelta,VI_ftype_VIVI,2)
+// tag : V6_vrdelta
+def int_hexagon_V6_vrdelta :
+Hexagon_v512v512v512_Intrinsic<"HEXAGON_V6_vrdelta">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrdelta_128B,VI_ftype_VIVI,2)
+// tag : V6_vrdelta_128B
+def int_hexagon_V6_vrdelta_128B :
+Hexagon_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrdelta_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcl0w,VI_ftype_VI,1)
+// tag : V6_vcl0w
+def int_hexagon_V6_vcl0w :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vcl0w">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcl0w_128B,VI_ftype_VI,1)
+// tag : V6_vcl0w_128B
+def int_hexagon_V6_vcl0w_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vcl0w_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcl0h,VI_ftype_VI,1)
+// tag : V6_vcl0h
+def int_hexagon_V6_vcl0h :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vcl0h">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vcl0h_128B,VI_ftype_VI,1)
+// tag : V6_vcl0h_128B
+def int_hexagon_V6_vcl0h_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vcl0h_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnormamtw,VI_ftype_VI,1)
+// tag : V6_vnormamtw
+def int_hexagon_V6_vnormamtw :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vnormamtw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnormamtw_128B,VI_ftype_VI,1)
+// tag : V6_vnormamtw_128B
+def int_hexagon_V6_vnormamtw_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vnormamtw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnormamth,VI_ftype_VI,1)
+// tag : V6_vnormamth
+def int_hexagon_V6_vnormamth :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vnormamth">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnormamth_128B,VI_ftype_VI,1)
+// tag : V6_vnormamth_128B
+def int_hexagon_V6_vnormamth_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vnormamth_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpopcounth,VI_ftype_VI,1)
+// tag : V6_vpopcounth
+def int_hexagon_V6_vpopcounth :
+Hexagon_v512v512_Intrinsic<"HEXAGON_V6_vpopcounth">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vpopcounth_128B,VI_ftype_VI,1)
+// tag : V6_vpopcounth_128B
+def int_hexagon_V6_vpopcounth_128B :
+Hexagon_v1024v1024_Intrinsic<"HEXAGON_V6_vpopcounth_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvb
+def int_hexagon_V6_vlutvvb :
+Hexagon_v512v512v512i_Intrinsic<"HEXAGON_V6_vlutvvb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_128B,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvb_128B
+def int_hexagon_V6_vlutvvb_128B :
+Hexagon_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvvb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_oracc,VI_ftype_VIVIVISI,4)
+// tag : V6_vlutvvb_oracc
+def int_hexagon_V6_vlutvvb_oracc :
+Hexagon_v512v512v512v512i_Intrinsic<"HEXAGON_V6_vlutvvb_oracc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_oracc_128B,VI_ftype_VIVIVISI,4)
+// tag : V6_vlutvvb_oracc_128B
+def int_hexagon_V6_vlutvvb_oracc_128B :
+Hexagon_v1024v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvvb_oracc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwh
+def int_hexagon_V6_vlutvwh :
+Hexagon_v1024v512v512i_Intrinsic<"HEXAGON_V6_vlutvwh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_128B,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwh_128B
+def int_hexagon_V6_vlutvwh_128B :
+Hexagon_v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvwh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_oracc,VD_ftype_VDVIVISI,4)
+// tag : V6_vlutvwh_oracc
+def int_hexagon_V6_vlutvwh_oracc :
+Hexagon_v1024v1024v512v512i_Intrinsic<"HEXAGON_V6_vlutvwh_oracc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_oracc_128B,VD_ftype_VDVIVISI,4)
+// tag : V6_vlutvwh_oracc_128B
+def int_hexagon_V6_vlutvwh_oracc_128B :
+Hexagon_v2048v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvwh_oracc_128B">;
+
+//
+// Masked vector stores
+//
+def int_hexagon_V6_vS32b_qpred_ai :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vS32b_qpred_ai">;
+
+def int_hexagon_V6_vS32b_nqpred_ai :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vS32b_nqpred_ai">;
+
+def int_hexagon_V6_vS32b_nt_qpred_ai :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vS32b_nt_qpred_ai">;
+
+def int_hexagon_V6_vS32b_nt_nqpred_ai :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vS32b_nt_nqpred_ai">;
+
+def int_hexagon_V6_vS32b_qpred_ai_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vS32b_qpred_ai_128B">;
+
+def int_hexagon_V6_vS32b_nqpred_ai_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vS32b_nqpred_ai_128B">;
+
+def int_hexagon_V6_vS32b_nt_qpred_ai_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vS32b_nt_qpred_ai_128B">;
+
+def int_hexagon_V6_vS32b_nt_nqpred_ai_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vS32b_nt_nqpred_ai_128B">;
+
+def int_hexagon_V6_vmaskedstoreq :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vmaskedstoreq">;
+
+def int_hexagon_V6_vmaskedstorenq :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vmaskedstorenq">;
+
+def int_hexagon_V6_vmaskedstorentq :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vmaskedstorentq">;
+
+def int_hexagon_V6_vmaskedstorentnq :
+Hexagon_vv64ivmemv512_Intrinsic<"HEXAGON_V6_vmaskedstorentnq">;
+
+def int_hexagon_V6_vmaskedstoreq_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vmaskedstoreq_128B">;
+
+def int_hexagon_V6_vmaskedstorenq_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vmaskedstorenq_128B">;
+
+def int_hexagon_V6_vmaskedstorentq_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vmaskedstorentq_128B">;
+
+def int_hexagon_V6_vmaskedstorentnq_128B :
+Hexagon_vv128ivmemv1024_Intrinsic<"HEXAGON_V6_vmaskedstorentnq_128B">;
+
+multiclass Hexagon_custom_circ_ld_Intrinsic<LLVMType ElTy> {
+ def NAME#_pci : Hexagon_NonGCC_Intrinsic<
+ [ElTy, llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<3>]>;
+ def NAME#_pcr : Hexagon_NonGCC_Intrinsic<
+ [ElTy, llvm_ptr_ty], [llvm_ptr_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<2>]>;
+}
+
+defm int_hexagon_L2_loadrub : Hexagon_custom_circ_ld_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_L2_loadrb : Hexagon_custom_circ_ld_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_L2_loadruh : Hexagon_custom_circ_ld_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_L2_loadrh : Hexagon_custom_circ_ld_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_L2_loadri : Hexagon_custom_circ_ld_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_L2_loadrd : Hexagon_custom_circ_ld_Intrinsic<llvm_i64_ty>;
+
+multiclass Hexagon_custom_circ_st_Intrinsic<LLVMType ElTy> {
+ def NAME#_pci : Hexagon_NonGCC_Intrinsic<
+ [llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty, ElTy, llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<4>]>;
+ def NAME#_pcr : Hexagon_NonGCC_Intrinsic<
+ [llvm_ptr_ty], [llvm_ptr_ty, llvm_i32_ty, ElTy, llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<3>]>;
+}
+
+defm int_hexagon_S2_storerb : Hexagon_custom_circ_st_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_S2_storerh : Hexagon_custom_circ_st_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_S2_storerf : Hexagon_custom_circ_st_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_S2_storeri : Hexagon_custom_circ_st_Intrinsic<llvm_i32_ty>;
+defm int_hexagon_S2_storerd : Hexagon_custom_circ_st_Intrinsic<llvm_i64_ty>;
+
+// The front-end emits the intrinsic call with only two arguments. The third
+// argument from the builtin is already used by front-end to write to memory
+// by generating a store.
+class Hexagon_custom_brev_ld_Intrinsic<LLVMType ElTy>
+ : Hexagon_NonGCC_Intrinsic<
+ [ElTy, llvm_ptr_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+
+def int_hexagon_L2_loadrub_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i32_ty>;
+def int_hexagon_L2_loadrb_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i32_ty>;
+def int_hexagon_L2_loadruh_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i32_ty>;
+def int_hexagon_L2_loadrh_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i32_ty>;
+def int_hexagon_L2_loadri_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i32_ty>;
+def int_hexagon_L2_loadrd_pbr : Hexagon_custom_brev_ld_Intrinsic<llvm_i64_ty>;
+
+def int_hexagon_S2_storerb_pbr : Hexagon_mem_memsisi_Intrinsic<"brev_stb">;
+def int_hexagon_S2_storerh_pbr : Hexagon_mem_memsisi_Intrinsic<"brev_sth">;
+def int_hexagon_S2_storerf_pbr : Hexagon_mem_memsisi_Intrinsic<"brev_sthhi">;
+def int_hexagon_S2_storeri_pbr : Hexagon_mem_memsisi_Intrinsic<"brev_stw">;
+def int_hexagon_S2_storerd_pbr : Hexagon_mem_memdisi_Intrinsic<"brev_std">;
+
+
+///
+/// HexagonV62 intrinsics
+///
+
+//
+// Hexagon_LLiLLiLLi_Intrinsic<string GCCIntSuffix>
+// tag : M6_vabsdiffb
+class Hexagon_LLiLLiLLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_LLii_Intrinsic<string GCCIntSuffix>
+// tag : S6_vsplatrbp
+class Hexagon_LLii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i64_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlsrb
+class Hexagon_V62_v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlsrb_128B
+class Hexagon_V62_v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vasrwuhrndsat
+class Hexagon_V62_v512v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vasrwuhrndsat_128B
+class Hexagon_V62_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrounduwuh
+class Hexagon_V62_v512v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrounduwuh_128B
+class Hexagon_V62_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v2048v2048_Intrinsic<string GCCIntSuffix>
+// tag : V6_vadduwsat_dv_128B
+class Hexagon_V62_v2048v2048v2048_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddhw_acc
+class Hexagon_V62_v1024v1024v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vaddhw_acc_128B
+class Hexagon_V62_v2048v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyewuh_64
+class Hexagon_V62_v1024v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyewuh_64_128B
+class Hexagon_V62_v2048v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpauhb_128B
+class Hexagon_V62_v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpauhb_acc_128B
+class Hexagon_V62_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandnqrt
+class Hexagon_V62_v512v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v512i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandnqrt_128B
+class Hexagon_V62_v1024v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v1024i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v512v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandnqrt_acc
+class Hexagon_V62_v512v512v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v512i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandnqrt_acc_128B
+class Hexagon_V62_v1024v1024v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v1024i1_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v64iv512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvqv
+class Hexagon_V62_v512v64iv512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v512i1_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v128iv1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vandvqv_128B
+class Hexagon_V62_v1024v128iv1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v1024i1_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v64ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_scalar2v2
+class Hexagon_V62_v64ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v128ii_Intrinsic<string GCCIntSuffix>
+// tag : V6_pred_scalar2v2_128B
+class Hexagon_V62_v128ii_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v64iv64iv64i_Intrinsic<string GCCIntSuffix>
+// tag : V6_shuffeqw
+class Hexagon_V62_v64iv64iv64i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v512i1_ty], [llvm_v512i1_ty,llvm_v512i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v128iv128iv128i_Intrinsic<string GCCIntSuffix>
+// tag : V6_shuffeqw_128B
+class Hexagon_V62_v128iv128iv128i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v1024i1_ty], [llvm_v1024i1_ty,llvm_v1024i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_lvsplath
+class Hexagon_V62_v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_lvsplath_128B
+class Hexagon_V62_v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v512v512v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvvb_oracci
+class Hexagon_V62_v512v512v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvvb_oracci_128B
+class Hexagon_V62_v1024v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwhi
+class Hexagon_V62_v1024v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwhi_128B
+class Hexagon_V62_v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v1024v1024v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwh_oracci
+class Hexagon_V62_v1024v1024v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V62_v2048v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlutvwh_oracci_128B
+class Hexagon_V62_v2048v2048v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Hexagon_v512v64iv512v512v64i_Intrinsic<string GCCIntSuffix>
+// tag: V6_vaddcarry
+class Hexagon_v512v64iv512v512v64i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty, llvm_v512i1_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v512i1_ty],
+ [IntrNoMem]>;
+
+// Hexagon_v1024v128iv1024v1024v128i_Intrinsic<string GCCIntSuffix>
+// tag: V6_vaddcarry_128B
+class Hexagon_v1024v128iv1024v1024v128i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty, llvm_v1024i1_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v1024i1_ty],
+ [IntrNoMem]>;
+
+
+//
+// BUILTIN_INFO(HEXAGON.M6_vabsdiffb,DI_ftype_DIDI,2)
+// tag : M6_vabsdiffb
+def int_hexagon_M6_vabsdiffb :
+Hexagon_LLiLLiLLi_Intrinsic<"HEXAGON_M6_vabsdiffb">;
+
+//
+// BUILTIN_INFO(HEXAGON.M6_vabsdiffub,DI_ftype_DIDI,2)
+// tag : M6_vabsdiffub
+def int_hexagon_M6_vabsdiffub :
+Hexagon_LLiLLiLLi_Intrinsic<"HEXAGON_M6_vabsdiffub">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_vtrunehb_ppp,DI_ftype_DIDI,2)
+// tag : S6_vtrunehb_ppp
+def int_hexagon_S6_vtrunehb_ppp :
+Hexagon_LLiLLiLLi_Intrinsic<"HEXAGON_S6_vtrunehb_ppp">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_vtrunohb_ppp,DI_ftype_DIDI,2)
+// tag : S6_vtrunohb_ppp
+def int_hexagon_S6_vtrunohb_ppp :
+Hexagon_LLiLLiLLi_Intrinsic<"HEXAGON_S6_vtrunohb_ppp">;
+
+//
+// BUILTIN_INFO(HEXAGON.S6_vsplatrbp,DI_ftype_SI,1)
+// tag : S6_vsplatrbp
+def int_hexagon_S6_vsplatrbp :
+Hexagon_LLii_Intrinsic<"HEXAGON_S6_vsplatrbp">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrb,VI_ftype_VISI,2)
+// tag : V6_vlsrb
+def int_hexagon_V6_vlsrb :
+Hexagon_V62_v512v512i_Intrinsic<"HEXAGON_V6_vlsrb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlsrb_128B,VI_ftype_VISI,2)
+// tag : V6_vlsrb_128B
+def int_hexagon_V6_vlsrb_128B :
+Hexagon_V62_v1024v1024i_Intrinsic<"HEXAGON_V6_vlsrb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwuhrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrwuhrndsat
+def int_hexagon_V6_vasrwuhrndsat :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrwuhrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrwuhrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrwuhrndsat_128B
+def int_hexagon_V6_vasrwuhrndsat_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrwuhrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruwuhrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasruwuhrndsat
+def int_hexagon_V6_vasruwuhrndsat :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vasruwuhrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruwuhrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasruwuhrndsat_128B
+def int_hexagon_V6_vasruwuhrndsat_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasruwuhrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhbsat,VI_ftype_VIVISI,3)
+// tag : V6_vasrhbsat
+def int_hexagon_V6_vasrhbsat :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrhbsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrhbsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrhbsat_128B
+def int_hexagon_V6_vasrhbsat_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrhbsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrounduwuh,VI_ftype_VIVI,2)
+// tag : V6_vrounduwuh
+def int_hexagon_V6_vrounduwuh :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vrounduwuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrounduwuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vrounduwuh_128B
+def int_hexagon_V6_vrounduwuh_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrounduwuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrounduhub,VI_ftype_VIVI,2)
+// tag : V6_vrounduhub
+def int_hexagon_V6_vrounduhub :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vrounduhub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrounduhub_128B,VI_ftype_VIVI,2)
+// tag : V6_vrounduhub_128B
+def int_hexagon_V6_vrounduhub_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vrounduhub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduwsat,VI_ftype_VIVI,2)
+// tag : V6_vadduwsat
+def int_hexagon_V6_vadduwsat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vadduwsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduwsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vadduwsat_128B
+def int_hexagon_V6_vadduwsat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vadduwsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduwsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vadduwsat_dv
+def int_hexagon_V6_vadduwsat_dv :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vadduwsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduwsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vadduwsat_dv_128B
+def int_hexagon_V6_vadduwsat_dv_128B :
+Hexagon_V62_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vadduwsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuwsat,VI_ftype_VIVI,2)
+// tag : V6_vsubuwsat
+def int_hexagon_V6_vsubuwsat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vsubuwsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuwsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubuwsat_128B
+def int_hexagon_V6_vsubuwsat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubuwsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuwsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubuwsat_dv
+def int_hexagon_V6_vsubuwsat_dv :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubuwsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubuwsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubuwsat_dv_128B
+def int_hexagon_V6_vsubuwsat_dv_128B :
+Hexagon_V62_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubuwsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbsat,VI_ftype_VIVI,2)
+// tag : V6_vaddbsat
+def int_hexagon_V6_vaddbsat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vaddbsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddbsat_128B
+def int_hexagon_V6_vaddbsat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddbsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vaddbsat_dv
+def int_hexagon_V6_vaddbsat_dv :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddbsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddbsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vaddbsat_dv_128B
+def int_hexagon_V6_vaddbsat_dv_128B :
+Hexagon_V62_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vaddbsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbsat,VI_ftype_VIVI,2)
+// tag : V6_vsubbsat
+def int_hexagon_V6_vsubbsat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vsubbsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbsat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubbsat_128B
+def int_hexagon_V6_vsubbsat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubbsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbsat_dv,VD_ftype_VDVD,2)
+// tag : V6_vsubbsat_dv
+def int_hexagon_V6_vsubbsat_dv :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubbsat_dv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubbsat_dv_128B,VD_ftype_VDVD,2)
+// tag : V6_vsubbsat_dv_128B
+def int_hexagon_V6_vsubbsat_dv_128B :
+Hexagon_V62_v2048v2048v2048_Intrinsic<"HEXAGON_V6_vsubbsat_dv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddububb_sat,VI_ftype_VIVI,2)
+// tag : V6_vaddububb_sat
+def int_hexagon_V6_vaddububb_sat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vaddububb_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddububb_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddububb_sat_128B
+def int_hexagon_V6_vaddububb_sat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddububb_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubububb_sat,VI_ftype_VIVI,2)
+// tag : V6_vsubububb_sat
+def int_hexagon_V6_vsubububb_sat :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vsubububb_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubububb_sat_128B,VI_ftype_VIVI,2)
+// tag : V6_vsubububb_sat_128B
+def int_hexagon_V6_vsubububb_sat_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsubububb_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhw_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vaddhw_acc
+def int_hexagon_V6_vaddhw_acc :
+Hexagon_V62_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vaddhw_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddhw_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vaddhw_acc_128B
+def int_hexagon_V6_vaddhw_acc_128B :
+Hexagon_V62_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vaddhw_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhw_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vadduhw_acc
+def int_hexagon_V6_vadduhw_acc :
+Hexagon_V62_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vadduhw_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vadduhw_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vadduhw_acc_128B
+def int_hexagon_V6_vadduhw_acc_128B :
+Hexagon_V62_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vadduhw_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubh_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vaddubh_acc
+def int_hexagon_V6_vaddubh_acc :
+Hexagon_V62_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vaddubh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddubh_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vaddubh_acc_128B
+def int_hexagon_V6_vaddubh_acc_128B :
+Hexagon_V62_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vaddubh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyewuh_64,VD_ftype_VIVI,2)
+// tag : V6_vmpyewuh_64
+def int_hexagon_V6_vmpyewuh_64 :
+Hexagon_V62_v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyewuh_64">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyewuh_64_128B,VD_ftype_VIVI,2)
+// tag : V6_vmpyewuh_64_128B
+def int_hexagon_V6_vmpyewuh_64_128B :
+Hexagon_V62_v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyewuh_64_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_64_acc,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyowh_64_acc
+def int_hexagon_V6_vmpyowh_64_acc :
+Hexagon_V62_v1024v1024v512v512_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyowh_64_acc_128B,VD_ftype_VDVIVI,3)
+// tag : V6_vmpyowh_64_acc_128B
+def int_hexagon_V6_vmpyowh_64_acc_128B :
+Hexagon_V62_v2048v2048v1024v1024_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhb,VD_ftype_VDSI,2)
+// tag : V6_vmpauhb
+def int_hexagon_V6_vmpauhb :
+Hexagon_V62_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpauhb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhb_128B,VD_ftype_VDSI,2)
+// tag : V6_vmpauhb_128B
+def int_hexagon_V6_vmpauhb_128B :
+Hexagon_V62_v2048v2048i_Intrinsic<"HEXAGON_V6_vmpauhb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhb_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vmpauhb_acc
+def int_hexagon_V6_vmpauhb_acc :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpauhb_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhb_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vmpauhb_acc_128B
+def int_hexagon_V6_vmpauhb_acc_128B :
+Hexagon_V62_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vmpauhb_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwub,VI_ftype_VISI,2)
+// tag : V6_vmpyiwub
+def int_hexagon_V6_vmpyiwub :
+Hexagon_V62_v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwub">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwub_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyiwub_128B
+def int_hexagon_V6_vmpyiwub_128B :
+Hexagon_V62_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwub_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwub_acc,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwub_acc
+def int_hexagon_V6_vmpyiwub_acc :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vmpyiwub_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyiwub_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vmpyiwub_acc_128B
+def int_hexagon_V6_vmpyiwub_acc_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyiwub_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandnqrt,VI_ftype_QVSI,2)
+// tag : V6_vandnqrt
+def int_hexagon_V6_vandnqrt :
+Hexagon_V62_v512v64ii_Intrinsic<"HEXAGON_V6_vandnqrt">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandnqrt_128B,VI_ftype_QVSI,2)
+// tag : V6_vandnqrt_128B
+def int_hexagon_V6_vandnqrt_128B :
+Hexagon_V62_v1024v128ii_Intrinsic<"HEXAGON_V6_vandnqrt_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandnqrt_acc,VI_ftype_VIQVSI,3)
+// tag : V6_vandnqrt_acc
+def int_hexagon_V6_vandnqrt_acc :
+Hexagon_V62_v512v512v64ii_Intrinsic<"HEXAGON_V6_vandnqrt_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandnqrt_acc_128B,VI_ftype_VIQVSI,3)
+// tag : V6_vandnqrt_acc_128B
+def int_hexagon_V6_vandnqrt_acc_128B :
+Hexagon_V62_v1024v1024v128ii_Intrinsic<"HEXAGON_V6_vandnqrt_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvqv,VI_ftype_QVVI,2)
+// tag : V6_vandvqv
+def int_hexagon_V6_vandvqv :
+Hexagon_V62_v512v64iv512_Intrinsic<"HEXAGON_V6_vandvqv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvqv_128B,VI_ftype_QVVI,2)
+// tag : V6_vandvqv_128B
+def int_hexagon_V6_vandvqv_128B :
+Hexagon_V62_v1024v128iv1024_Intrinsic<"HEXAGON_V6_vandvqv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvnqv,VI_ftype_QVVI,2)
+// tag : V6_vandvnqv
+def int_hexagon_V6_vandvnqv :
+Hexagon_V62_v512v64iv512_Intrinsic<"HEXAGON_V6_vandvnqv">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vandvnqv_128B,VI_ftype_QVVI,2)
+// tag : V6_vandvnqv_128B
+def int_hexagon_V6_vandvnqv_128B :
+Hexagon_V62_v1024v128iv1024_Intrinsic<"HEXAGON_V6_vandvnqv_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_scalar2v2,QV_ftype_SI,1)
+// tag : V6_pred_scalar2v2
+def int_hexagon_V6_pred_scalar2v2 :
+Hexagon_V62_v64ii_Intrinsic<"HEXAGON_V6_pred_scalar2v2">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_pred_scalar2v2_128B,QV_ftype_SI,1)
+// tag : V6_pred_scalar2v2_128B
+def int_hexagon_V6_pred_scalar2v2_128B :
+Hexagon_V62_v128ii_Intrinsic<"HEXAGON_V6_pred_scalar2v2_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_shuffeqw,QV_ftype_QVQV,2)
+// tag : V6_shuffeqw
+def int_hexagon_V6_shuffeqw :
+Hexagon_V62_v64iv64iv64i_Intrinsic<"HEXAGON_V6_shuffeqw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_shuffeqw_128B,QV_ftype_QVQV,2)
+// tag : V6_shuffeqw_128B
+def int_hexagon_V6_shuffeqw_128B :
+Hexagon_V62_v128iv128iv128i_Intrinsic<"HEXAGON_V6_shuffeqw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_shuffeqh,QV_ftype_QVQV,2)
+// tag : V6_shuffeqh
+def int_hexagon_V6_shuffeqh :
+Hexagon_V62_v64iv64iv64i_Intrinsic<"HEXAGON_V6_shuffeqh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_shuffeqh_128B,QV_ftype_QVQV,2)
+// tag : V6_shuffeqh_128B
+def int_hexagon_V6_shuffeqh_128B :
+Hexagon_V62_v128iv128iv128i_Intrinsic<"HEXAGON_V6_shuffeqh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxb,VI_ftype_VIVI,2)
+// tag : V6_vmaxb
+def int_hexagon_V6_vmaxb :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vmaxb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmaxb_128B,VI_ftype_VIVI,2)
+// tag : V6_vmaxb_128B
+def int_hexagon_V6_vmaxb_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vmaxb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminb,VI_ftype_VIVI,2)
+// tag : V6_vminb
+def int_hexagon_V6_vminb :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vminb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vminb_128B,VI_ftype_VIVI,2)
+// tag : V6_vminb_128B
+def int_hexagon_V6_vminb_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vminb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsatuwuh,VI_ftype_VIVI,2)
+// tag : V6_vsatuwuh
+def int_hexagon_V6_vsatuwuh :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vsatuwuh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsatuwuh_128B,VI_ftype_VIVI,2)
+// tag : V6_vsatuwuh_128B
+def int_hexagon_V6_vsatuwuh_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vsatuwuh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplath,VI_ftype_SI,1)
+// tag : V6_lvsplath
+def int_hexagon_V6_lvsplath :
+Hexagon_V62_v512i_Intrinsic<"HEXAGON_V6_lvsplath">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplath_128B,VI_ftype_SI,1)
+// tag : V6_lvsplath_128B
+def int_hexagon_V6_lvsplath_128B :
+Hexagon_V62_v1024i_Intrinsic<"HEXAGON_V6_lvsplath_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplatb,VI_ftype_SI,1)
+// tag : V6_lvsplatb
+def int_hexagon_V6_lvsplatb :
+Hexagon_V62_v512i_Intrinsic<"HEXAGON_V6_lvsplatb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_lvsplatb_128B,VI_ftype_SI,1)
+// tag : V6_lvsplatb_128B
+def int_hexagon_V6_lvsplatb_128B :
+Hexagon_V62_v1024i_Intrinsic<"HEXAGON_V6_lvsplatb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddclbw,VI_ftype_VIVI,2)
+// tag : V6_vaddclbw
+def int_hexagon_V6_vaddclbw :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vaddclbw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddclbw_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddclbw_128B
+def int_hexagon_V6_vaddclbw_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddclbw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddclbh,VI_ftype_VIVI,2)
+// tag : V6_vaddclbh
+def int_hexagon_V6_vaddclbh :
+Hexagon_V62_v512v512v512_Intrinsic<"HEXAGON_V6_vaddclbh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddclbh_128B,VI_ftype_VIVI,2)
+// tag : V6_vaddclbh_128B
+def int_hexagon_V6_vaddclbh_128B :
+Hexagon_V62_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vaddclbh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvbi,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvbi
+def int_hexagon_V6_vlutvvbi :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vlutvvbi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvbi_128B,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvbi_128B
+def int_hexagon_V6_vlutvvbi_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvvbi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_oracci,VI_ftype_VIVIVISI,4)
+// tag : V6_vlutvvb_oracci
+def int_hexagon_V6_vlutvvb_oracci :
+Hexagon_V62_v512v512v512v512i_Intrinsic<"HEXAGON_V6_vlutvvb_oracci">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_oracci_128B,VI_ftype_VIVIVISI,4)
+// tag : V6_vlutvvb_oracci_128B
+def int_hexagon_V6_vlutvvb_oracci_128B :
+Hexagon_V62_v1024v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvvb_oracci_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwhi,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwhi
+def int_hexagon_V6_vlutvwhi :
+Hexagon_V62_v1024v512v512i_Intrinsic<"HEXAGON_V6_vlutvwhi">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwhi_128B,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwhi_128B
+def int_hexagon_V6_vlutvwhi_128B :
+Hexagon_V62_v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvwhi_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_oracci,VD_ftype_VDVIVISI,4)
+// tag : V6_vlutvwh_oracci
+def int_hexagon_V6_vlutvwh_oracci :
+Hexagon_V62_v1024v1024v512v512i_Intrinsic<"HEXAGON_V6_vlutvwh_oracci">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_oracci_128B,VD_ftype_VDVIVISI,4)
+// tag : V6_vlutvwh_oracci_128B
+def int_hexagon_V6_vlutvwh_oracci_128B :
+Hexagon_V62_v2048v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvwh_oracci_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_nm,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvb_nm
+def int_hexagon_V6_vlutvvb_nm :
+Hexagon_V62_v512v512v512i_Intrinsic<"HEXAGON_V6_vlutvvb_nm">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvvb_nm_128B,VI_ftype_VIVISI,3)
+// tag : V6_vlutvvb_nm_128B
+def int_hexagon_V6_vlutvvb_nm_128B :
+Hexagon_V62_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvvb_nm_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_nm,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwh_nm
+def int_hexagon_V6_vlutvwh_nm :
+Hexagon_V62_v1024v512v512i_Intrinsic<"HEXAGON_V6_vlutvwh_nm">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlutvwh_nm_128B,VD_ftype_VIVISI,3)
+// tag : V6_vlutvwh_nm_128B
+def int_hexagon_V6_vlutvwh_nm_128B :
+Hexagon_V62_v2048v1024v1024i_Intrinsic<"HEXAGON_V6_vlutvwh_nm_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddcarry,VI_ftype_VIVIQV,3)
+// tag: V6_vaddcarry
+def int_hexagon_V6_vaddcarry :
+Hexagon_v512v64iv512v512v64i_Intrinsic<"HEXAGON_v6_vaddcarry">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaddcarry_128B,VI_ftype_VIVIQV,3)
+// tag: V6_vaddcarry_128B
+def int_hexagon_V6_vaddcarry_128B :
+Hexagon_v1024v128iv1024v1024v128i_Intrinsic<"HEXAGON_v6_vaddcarry_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubcarry,VI_ftype_VIVIQV,3)
+// tag: V6_vsubcarry
+def int_hexagon_V6_vsubcarry :
+Hexagon_v512v64iv512v512v64i_Intrinsic<"HEXAGON_v6_vsubcarry">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vsubcarry_128B,VI_ftype_VIVIQV,3)
+// tag: V6_vsubcarry_128B
+def int_hexagon_V6_vsubcarry_128B :
+Hexagon_v1024v128iv1024v1024v128i_Intrinsic<"HEXAGON_v6_vsubcarry_128B">;
+
+
+///
+/// HexagonV65 intrinsics
+///
+
+//
+// Hexagon_V65_iLLiLLi_Intrinsic<string GCCIntSuffix>
+// tag : A6_vcmpbeq_notany
+class Hexagon_V65_iLLiLLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [llvm_i64_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v512LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyub_rtt
+class Hexagon_V65_v1024v512LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v2048v1024LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyub_rtt_128B
+class Hexagon_V65_v2048v1024LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024v512LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyub_rtt_acc
+class Hexagon_V65_v1024v1024v512LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v2048v2048v1024LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vrmpyub_rtt_acc_128B
+class Hexagon_V65_v2048v2048v1024LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vasruwuhsat
+class Hexagon_V65_v512v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vasruwuhsat_128B
+class Hexagon_V65_v1024v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vavguw
+class Hexagon_V65_v512v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vavguw_128B
+class Hexagon_V65_v1024v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512_Intrinsic<string GCCIntSuffix>
+// tag : V6_vabsb
+class Hexagon_V65_v512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024_Intrinsic<string GCCIntSuffix>
+// tag : V6_vabsb_128B
+class Hexagon_V65_v1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpabuu
+class Hexagon_V65_v1024v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpabuu_128B
+class Hexagon_V65_v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpabuu_acc_128B
+class Hexagon_V65_v2048v2048v2048i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyh_acc
+class Hexagon_V65_v1024v1024v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v2048v2048v1024i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyh_acc_128B
+class Hexagon_V65_v2048v2048v1024i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512v512LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpahhsat
+class Hexagon_V65_v512v512v512LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024v1024LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpahhsat_128B
+class Hexagon_V65_v1024v1024v1024LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlut4
+class Hexagon_V65_v512v512LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v1024LLi_Intrinsic<string GCCIntSuffix>
+// tag : V6_vlut4_128B
+class Hexagon_V65_v1024v1024LLi_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i64_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v512i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vmpyuhe
+class Hexagon_V65_v512v512i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v512v64i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vprefixqb
+class Hexagon_V65_v512v64i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v512i1_ty],
+ [IntrNoMem]>;
+
+//
+// Hexagon_V65_v1024v128i_Intrinsic<string GCCIntSuffix>
+// tag : V6_vprefixqb_128B
+class Hexagon_V65_v1024v128i_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v1024i1_ty],
+ [IntrNoMem]>;
+
+//
+// BUILTIN_INFO(HEXAGON.A6_vcmpbeq_notany,QI_ftype_DIDI,2)
+// tag : A6_vcmpbeq_notany
+def int_hexagon_A6_vcmpbeq_notany :
+Hexagon_V65_iLLiLLi_Intrinsic<"HEXAGON_A6_vcmpbeq_notany">;
+
+//
+// BUILTIN_INFO(HEXAGON.A6_vcmpbeq_notany_128B,QI_ftype_DIDI,2)
+// tag : A6_vcmpbeq_notany_128B
+def int_hexagon_A6_vcmpbeq_notany_128B :
+Hexagon_V65_iLLiLLi_Intrinsic<"HEXAGON_A6_vcmpbeq_notany_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_rtt,VD_ftype_VIDI,2)
+// tag : V6_vrmpyub_rtt
+def int_hexagon_V6_vrmpyub_rtt :
+Hexagon_V65_v1024v512LLi_Intrinsic<"HEXAGON_V6_vrmpyub_rtt">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_rtt_128B,VD_ftype_VIDI,2)
+// tag : V6_vrmpyub_rtt_128B
+def int_hexagon_V6_vrmpyub_rtt_128B :
+Hexagon_V65_v2048v1024LLi_Intrinsic<"HEXAGON_V6_vrmpyub_rtt_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_rtt_acc,VD_ftype_VDVIDI,3)
+// tag : V6_vrmpyub_rtt_acc
+def int_hexagon_V6_vrmpyub_rtt_acc :
+Hexagon_V65_v1024v1024v512LLi_Intrinsic<"HEXAGON_V6_vrmpyub_rtt_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpyub_rtt_acc_128B,VD_ftype_VDVIDI,3)
+// tag : V6_vrmpyub_rtt_acc_128B
+def int_hexagon_V6_vrmpyub_rtt_acc_128B :
+Hexagon_V65_v2048v2048v1024LLi_Intrinsic<"HEXAGON_V6_vrmpyub_rtt_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybub_rtt,VD_ftype_VIDI,2)
+// tag : V6_vrmpybub_rtt
+def int_hexagon_V6_vrmpybub_rtt :
+Hexagon_V65_v1024v512LLi_Intrinsic<"HEXAGON_V6_vrmpybub_rtt">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybub_rtt_128B,VD_ftype_VIDI,2)
+// tag : V6_vrmpybub_rtt_128B
+def int_hexagon_V6_vrmpybub_rtt_128B :
+Hexagon_V65_v2048v1024LLi_Intrinsic<"HEXAGON_V6_vrmpybub_rtt_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybub_rtt_acc,VD_ftype_VDVIDI,3)
+// tag : V6_vrmpybub_rtt_acc
+def int_hexagon_V6_vrmpybub_rtt_acc :
+Hexagon_V65_v1024v1024v512LLi_Intrinsic<"HEXAGON_V6_vrmpybub_rtt_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vrmpybub_rtt_acc_128B,VD_ftype_VDVIDI,3)
+// tag : V6_vrmpybub_rtt_acc_128B
+def int_hexagon_V6_vrmpybub_rtt_acc_128B :
+Hexagon_V65_v2048v2048v1024LLi_Intrinsic<"HEXAGON_V6_vrmpybub_rtt_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruwuhsat,VI_ftype_VIVISI,3)
+// tag : V6_vasruwuhsat
+def int_hexagon_V6_vasruwuhsat :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vasruwuhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruwuhsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasruwuhsat_128B
+def int_hexagon_V6_vasruwuhsat_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasruwuhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruhubsat,VI_ftype_VIVISI,3)
+// tag : V6_vasruhubsat
+def int_hexagon_V6_vasruhubsat :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vasruhubsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruhubsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasruhubsat_128B
+def int_hexagon_V6_vasruhubsat_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasruhubsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruhubrndsat,VI_ftype_VIVISI,3)
+// tag : V6_vasruhubrndsat
+def int_hexagon_V6_vasruhubrndsat :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vasruhubrndsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasruhubrndsat_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasruhubrndsat_128B
+def int_hexagon_V6_vasruhubrndsat_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasruhubrndsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslh_acc,VI_ftype_VIVISI,3)
+// tag : V6_vaslh_acc
+def int_hexagon_V6_vaslh_acc :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vaslh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vaslh_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vaslh_acc_128B
+def int_hexagon_V6_vaslh_acc_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vaslh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrh_acc,VI_ftype_VIVISI,3)
+// tag : V6_vasrh_acc
+def int_hexagon_V6_vasrh_acc :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vasrh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vasrh_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vasrh_acc_128B
+def int_hexagon_V6_vasrh_acc_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vasrh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguw,VI_ftype_VIVI,2)
+// tag : V6_vavguw
+def int_hexagon_V6_vavguw :
+Hexagon_V65_v512v512v512_Intrinsic<"HEXAGON_V6_vavguw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguw_128B,VI_ftype_VIVI,2)
+// tag : V6_vavguw_128B
+def int_hexagon_V6_vavguw_128B :
+Hexagon_V65_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavguw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguwrnd,VI_ftype_VIVI,2)
+// tag : V6_vavguwrnd
+def int_hexagon_V6_vavguwrnd :
+Hexagon_V65_v512v512v512_Intrinsic<"HEXAGON_V6_vavguwrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavguwrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavguwrnd_128B
+def int_hexagon_V6_vavguwrnd_128B :
+Hexagon_V65_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavguwrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgb,VI_ftype_VIVI,2)
+// tag : V6_vavgb
+def int_hexagon_V6_vavgb :
+Hexagon_V65_v512v512v512_Intrinsic<"HEXAGON_V6_vavgb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgb_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgb_128B
+def int_hexagon_V6_vavgb_128B :
+Hexagon_V65_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgbrnd,VI_ftype_VIVI,2)
+// tag : V6_vavgbrnd
+def int_hexagon_V6_vavgbrnd :
+Hexagon_V65_v512v512v512_Intrinsic<"HEXAGON_V6_vavgbrnd">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vavgbrnd_128B,VI_ftype_VIVI,2)
+// tag : V6_vavgbrnd_128B
+def int_hexagon_V6_vavgbrnd_128B :
+Hexagon_V65_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vavgbrnd_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgb,VI_ftype_VIVI,2)
+// tag : V6_vnavgb
+def int_hexagon_V6_vnavgb :
+Hexagon_V65_v512v512v512_Intrinsic<"HEXAGON_V6_vnavgb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vnavgb_128B,VI_ftype_VIVI,2)
+// tag : V6_vnavgb_128B
+def int_hexagon_V6_vnavgb_128B :
+Hexagon_V65_v1024v1024v1024_Intrinsic<"HEXAGON_V6_vnavgb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsb,VI_ftype_VI,1)
+// tag : V6_vabsb
+def int_hexagon_V6_vabsb :
+Hexagon_V65_v512v512_Intrinsic<"HEXAGON_V6_vabsb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsb_128B,VI_ftype_VI,1)
+// tag : V6_vabsb_128B
+def int_hexagon_V6_vabsb_128B :
+Hexagon_V65_v1024v1024_Intrinsic<"HEXAGON_V6_vabsb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsb_sat,VI_ftype_VI,1)
+// tag : V6_vabsb_sat
+def int_hexagon_V6_vabsb_sat :
+Hexagon_V65_v512v512_Intrinsic<"HEXAGON_V6_vabsb_sat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vabsb_sat_128B,VI_ftype_VI,1)
+// tag : V6_vabsb_sat_128B
+def int_hexagon_V6_vabsb_sat_128B :
+Hexagon_V65_v1024v1024_Intrinsic<"HEXAGON_V6_vabsb_sat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuu,VD_ftype_VDSI,2)
+// tag : V6_vmpabuu
+def int_hexagon_V6_vmpabuu :
+Hexagon_V65_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpabuu">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuu_128B,VD_ftype_VDSI,2)
+// tag : V6_vmpabuu_128B
+def int_hexagon_V6_vmpabuu_128B :
+Hexagon_V65_v2048v2048i_Intrinsic<"HEXAGON_V6_vmpabuu_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuu_acc,VD_ftype_VDVDSI,3)
+// tag : V6_vmpabuu_acc
+def int_hexagon_V6_vmpabuu_acc :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpabuu_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpabuu_acc_128B,VD_ftype_VDVDSI,3)
+// tag : V6_vmpabuu_acc_128B
+def int_hexagon_V6_vmpabuu_acc_128B :
+Hexagon_V65_v2048v2048v2048i_Intrinsic<"HEXAGON_V6_vmpabuu_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyh_acc,VD_ftype_VDVISI,3)
+// tag : V6_vmpyh_acc
+def int_hexagon_V6_vmpyh_acc :
+Hexagon_V65_v1024v1024v512i_Intrinsic<"HEXAGON_V6_vmpyh_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyh_acc_128B,VD_ftype_VDVISI,3)
+// tag : V6_vmpyh_acc_128B
+def int_hexagon_V6_vmpyh_acc_128B :
+Hexagon_V65_v2048v2048v1024i_Intrinsic<"HEXAGON_V6_vmpyh_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahhsat,VI_ftype_VIVIDI,3)
+// tag : V6_vmpahhsat
+def int_hexagon_V6_vmpahhsat :
+Hexagon_V65_v512v512v512LLi_Intrinsic<"HEXAGON_V6_vmpahhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpahhsat_128B,VI_ftype_VIVIDI,3)
+// tag : V6_vmpahhsat_128B
+def int_hexagon_V6_vmpahhsat_128B :
+Hexagon_V65_v1024v1024v1024LLi_Intrinsic<"HEXAGON_V6_vmpahhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhuhsat,VI_ftype_VIVIDI,3)
+// tag : V6_vmpauhuhsat
+def int_hexagon_V6_vmpauhuhsat :
+Hexagon_V65_v512v512v512LLi_Intrinsic<"HEXAGON_V6_vmpauhuhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpauhuhsat_128B,VI_ftype_VIVIDI,3)
+// tag : V6_vmpauhuhsat_128B
+def int_hexagon_V6_vmpauhuhsat_128B :
+Hexagon_V65_v1024v1024v1024LLi_Intrinsic<"HEXAGON_V6_vmpauhuhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpsuhuhsat,VI_ftype_VIVIDI,3)
+// tag : V6_vmpsuhuhsat
+def int_hexagon_V6_vmpsuhuhsat :
+Hexagon_V65_v512v512v512LLi_Intrinsic<"HEXAGON_V6_vmpsuhuhsat">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpsuhuhsat_128B,VI_ftype_VIVIDI,3)
+// tag : V6_vmpsuhuhsat_128B
+def int_hexagon_V6_vmpsuhuhsat_128B :
+Hexagon_V65_v1024v1024v1024LLi_Intrinsic<"HEXAGON_V6_vmpsuhuhsat_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlut4,VI_ftype_VIDI,2)
+// tag : V6_vlut4
+def int_hexagon_V6_vlut4 :
+Hexagon_V65_v512v512LLi_Intrinsic<"HEXAGON_V6_vlut4">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vlut4_128B,VI_ftype_VIDI,2)
+// tag : V6_vlut4_128B
+def int_hexagon_V6_vlut4_128B :
+Hexagon_V65_v1024v1024LLi_Intrinsic<"HEXAGON_V6_vlut4_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhe,VI_ftype_VISI,2)
+// tag : V6_vmpyuhe
+def int_hexagon_V6_vmpyuhe :
+Hexagon_V65_v512v512i_Intrinsic<"HEXAGON_V6_vmpyuhe">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhe_128B,VI_ftype_VISI,2)
+// tag : V6_vmpyuhe_128B
+def int_hexagon_V6_vmpyuhe_128B :
+Hexagon_V65_v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyuhe_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhe_acc,VI_ftype_VIVISI,3)
+// tag : V6_vmpyuhe_acc
+def int_hexagon_V6_vmpyuhe_acc :
+Hexagon_V65_v512v512v512i_Intrinsic<"HEXAGON_V6_vmpyuhe_acc">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vmpyuhe_acc_128B,VI_ftype_VIVISI,3)
+// tag : V6_vmpyuhe_acc_128B
+def int_hexagon_V6_vmpyuhe_acc_128B :
+Hexagon_V65_v1024v1024v1024i_Intrinsic<"HEXAGON_V6_vmpyuhe_acc_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqb,VI_ftype_QV,1)
+// tag : V6_vprefixqb
+def int_hexagon_V6_vprefixqb :
+Hexagon_V65_v512v64i_Intrinsic<"HEXAGON_V6_vprefixqb">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqb_128B,VI_ftype_QV,1)
+// tag : V6_vprefixqb_128B
+def int_hexagon_V6_vprefixqb_128B :
+Hexagon_V65_v1024v128i_Intrinsic<"HEXAGON_V6_vprefixqb_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqh,VI_ftype_QV,1)
+// tag : V6_vprefixqh
+def int_hexagon_V6_vprefixqh :
+Hexagon_V65_v512v64i_Intrinsic<"HEXAGON_V6_vprefixqh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqh_128B,VI_ftype_QV,1)
+// tag : V6_vprefixqh_128B
+def int_hexagon_V6_vprefixqh_128B :
+Hexagon_V65_v1024v128i_Intrinsic<"HEXAGON_V6_vprefixqh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqw,VI_ftype_QV,1)
+// tag : V6_vprefixqw
+def int_hexagon_V6_vprefixqw :
+Hexagon_V65_v512v64i_Intrinsic<"HEXAGON_V6_vprefixqw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vprefixqw_128B,VI_ftype_QV,1)
+// tag : V6_vprefixqw_128B
+def int_hexagon_V6_vprefixqw_128B :
+Hexagon_V65_v1024v128i_Intrinsic<"HEXAGON_V6_vprefixqw_128B">;
+
+
+// The scatter/gather ones below will not be generated from iset.py. Make sure
+// you don't overwrite these.
+class Hexagon_V65_vvmemiiv512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,
+ llvm_v16i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemiiv1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,
+ llvm_v32i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemiiv2048_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,
+ llvm_v64i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemv64iiiv512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_v512i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v16i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemv128iiiv1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_v1024i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v32i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemv64iiiv1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_v512i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v32i32_ty],
+ [IntrArgMemOnly]>;
+
+class Hexagon_V65_vvmemv128iiiv2048_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_v1024i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v64i32_ty],
+ [IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermw :
+Hexagon_V65_vvmemiiv512_Intrinsic<"HEXAGON_V6_vgathermw">;
+
+def int_hexagon_V6_vgathermw_128B :
+Hexagon_V65_vvmemiiv1024_Intrinsic<"HEXAGON_V6_vgathermw_128B">;
+
+def int_hexagon_V6_vgathermh :
+Hexagon_V65_vvmemiiv512_Intrinsic<"HEXAGON_V6_vgathermh">;
+
+def int_hexagon_V6_vgathermh_128B :
+Hexagon_V65_vvmemiiv1024_Intrinsic<"HEXAGON_V6_vgathermh_128B">;
+
+def int_hexagon_V6_vgathermhw :
+Hexagon_V65_vvmemiiv1024_Intrinsic<"HEXAGON_V6_vgathermhw">;
+
+def int_hexagon_V6_vgathermhw_128B :
+Hexagon_V65_vvmemiiv2048_Intrinsic<"HEXAGON_V6_vgathermhw_128B">;
+
+def int_hexagon_V6_vgathermwq :
+Hexagon_V65_vvmemv64iiiv512_Intrinsic<"HEXAGON_V6_vgathermwq">;
+
+def int_hexagon_V6_vgathermwq_128B :
+Hexagon_V65_vvmemv128iiiv1024_Intrinsic<"HEXAGON_V6_vgathermwq_128B">;
+
+def int_hexagon_V6_vgathermhq :
+Hexagon_V65_vvmemv64iiiv512_Intrinsic<"HEXAGON_V6_vgathermhq">;
+
+def int_hexagon_V6_vgathermhq_128B :
+Hexagon_V65_vvmemv128iiiv1024_Intrinsic<"HEXAGON_V6_vgathermhq_128B">;
+
+def int_hexagon_V6_vgathermhwq :
+Hexagon_V65_vvmemv64iiiv1024_Intrinsic<"HEXAGON_V6_vgathermhwq">;
+
+def int_hexagon_V6_vgathermhwq_128B :
+Hexagon_V65_vvmemv128iiiv2048_Intrinsic<"HEXAGON_V6_vgathermhwq_128B">;
+
+class Hexagon_V65_viiv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_i32_ty,llvm_i32_ty,
+ llvm_v16i32_ty,llvm_v16i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_viiv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_i32_ty,llvm_i32_ty,
+ llvm_v32i32_ty,llvm_v32i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_vv64iiiv512v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v512i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v16i32_ty,
+ llvm_v16i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_vv128iiiv1024v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v1024i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v32i32_ty,
+ llvm_v32i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_viiv1024v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_i32_ty,llvm_i32_ty,
+ llvm_v32i32_ty,llvm_v16i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_viiv2048v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_i32_ty,llvm_i32_ty,
+ llvm_v64i32_ty,llvm_v32i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_vv64iiiv1024v512_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v512i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v32i32_ty,
+ llvm_v16i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_vv128iiiv2048v1024_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_v1024i1_ty,llvm_i32_ty,
+ llvm_i32_ty,llvm_v64i32_ty,
+ llvm_v32i32_ty],
+ [IntrWriteMem]>;
+
+class Hexagon_V65_v2048_Intrinsic<string GCCIntSuffix>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [],
+ [IntrNoMem]>;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermw,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermw
+def int_hexagon_V6_vscattermw :
+Hexagon_V65_viiv512v512_Intrinsic<"HEXAGON_V6_vscattermw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermw_128B,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermw_128B
+def int_hexagon_V6_vscattermw_128B :
+Hexagon_V65_viiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermh,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermh
+def int_hexagon_V6_vscattermh :
+Hexagon_V65_viiv512v512_Intrinsic<"HEXAGON_V6_vscattermh">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermh_128B,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermh_128B
+def int_hexagon_V6_vscattermh_128B :
+Hexagon_V65_viiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermh_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermw_add,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermw_add
+def int_hexagon_V6_vscattermw_add :
+Hexagon_V65_viiv512v512_Intrinsic<"HEXAGON_V6_vscattermw_add">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermw_add_128B,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermw_add_128B
+def int_hexagon_V6_vscattermw_add_128B :
+Hexagon_V65_viiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermw_add_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermh_add,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermh_add
+def int_hexagon_V6_vscattermh_add :
+Hexagon_V65_viiv512v512_Intrinsic<"HEXAGON_V6_vscattermh_add">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermh_add_128B,v_ftype_SISIVIVI,4)
+// tag : V6_vscattermh_add_128B
+def int_hexagon_V6_vscattermh_add_128B :
+Hexagon_V65_viiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermh_add_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermwq,v_ftype_QVSISIVIVI,5)
+// tag : V6_vscattermwq
+def int_hexagon_V6_vscattermwq :
+Hexagon_V65_vv64iiiv512v512_Intrinsic<"HEXAGON_V6_vscattermwq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermwq_128B,v_ftype_QVSISIVIVI,5)
+// tag : V6_vscattermwq_128B
+def int_hexagon_V6_vscattermwq_128B :
+Hexagon_V65_vv128iiiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermwq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhq,v_ftype_QVSISIVIVI,5)
+// tag : V6_vscattermhq
+def int_hexagon_V6_vscattermhq :
+Hexagon_V65_vv64iiiv512v512_Intrinsic<"HEXAGON_V6_vscattermhq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhq_128B,v_ftype_QVSISIVIVI,5)
+// tag : V6_vscattermhq_128B
+def int_hexagon_V6_vscattermhq_128B :
+Hexagon_V65_vv128iiiv1024v1024_Intrinsic<"HEXAGON_V6_vscattermhq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhw,v_ftype_SISIVDVI,4)
+// tag : V6_vscattermhw
+def int_hexagon_V6_vscattermhw :
+Hexagon_V65_viiv1024v512_Intrinsic<"HEXAGON_V6_vscattermhw">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhw_128B,v_ftype_SISIVDVI,4)
+// tag : V6_vscattermhw_128B
+def int_hexagon_V6_vscattermhw_128B :
+Hexagon_V65_viiv2048v1024_Intrinsic<"HEXAGON_V6_vscattermhw_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhwq,v_ftype_QVSISIVDVI,5)
+// tag : V6_vscattermhwq
+def int_hexagon_V6_vscattermhwq :
+Hexagon_V65_vv64iiiv1024v512_Intrinsic<"HEXAGON_V6_vscattermhwq">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhwq_128B,v_ftype_QVSISIVDVI,5)
+// tag : V6_vscattermhwq_128B
+def int_hexagon_V6_vscattermhwq_128B :
+Hexagon_V65_vv128iiiv2048v1024_Intrinsic<"HEXAGON_V6_vscattermhwq_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhw_add,v_ftype_SISIVDVI,4)
+// tag : V6_vscattermhw_add
+def int_hexagon_V6_vscattermhw_add :
+Hexagon_V65_viiv1024v512_Intrinsic<"HEXAGON_V6_vscattermhw_add">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vscattermhw_add_128B,v_ftype_SISIVDVI,4)
+// tag : V6_vscattermhw_add_128B
+def int_hexagon_V6_vscattermhw_add_128B :
+Hexagon_V65_viiv2048v1024_Intrinsic<"HEXAGON_V6_vscattermhw_add_128B">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdd0,VD_ftype_,0)
+// tag : V6_vdd0
+def int_hexagon_V6_vdd0 :
+Hexagon_v1024_Intrinsic<"HEXAGON_V6_vdd0">;
+
+//
+// BUILTIN_INFO(HEXAGON.V6_vdd0_128B,VD_ftype_,0)
+// tag : V6_vdd0_128B
+def int_hexagon_V6_vdd0_128B :
+Hexagon_V65_v2048_Intrinsic<"HEXAGON_V6_vdd0_128B">;
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsMips.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsMips.td
new file mode 100644
index 000000000..421a79be4
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsMips.td
@@ -0,0 +1,1771 @@
+//===- IntrinsicsMips.td - Defines Mips intrinsics ---------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the MIPS-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// MIPS DSP data types
+def mips_v2q15_ty: LLVMType<v2i16>;
+def mips_v4q7_ty: LLVMType<v4i8>;
+def mips_q31_ty: LLVMType<i32>;
+
+let TargetPrefix = "mips" in { // All intrinsics start with "llvm.mips.".
+
+//===----------------------------------------------------------------------===//
+// MIPS DSP Rev 1
+
+//===----------------------------------------------------------------------===//
+// Addition/subtraction
+
+def int_mips_addu_qb : GCCBuiltin<"__builtin_mips_addu_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addu_s_qb : GCCBuiltin<"__builtin_mips_addu_s_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_subu_qb : GCCBuiltin<"__builtin_mips_subu_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_subu_s_qb : GCCBuiltin<"__builtin_mips_subu_s_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
+
+def int_mips_addq_ph : GCCBuiltin<"__builtin_mips_addq_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addq_s_ph : GCCBuiltin<"__builtin_mips_addq_s_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_subq_ph : GCCBuiltin<"__builtin_mips_subq_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_subq_s_ph : GCCBuiltin<"__builtin_mips_subq_s_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+
+def int_mips_madd: GCCBuiltin<"__builtin_mips_madd">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_maddu: GCCBuiltin<"__builtin_mips_maddu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+def int_mips_msub: GCCBuiltin<"__builtin_mips_msub">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_msubu: GCCBuiltin<"__builtin_mips_msubu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_addq_s_w: GCCBuiltin<"__builtin_mips_addq_s_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], [Commutative]>;
+def int_mips_subq_s_w: GCCBuiltin<"__builtin_mips_subq_s_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], []>;
+
+def int_mips_addsc: GCCBuiltin<"__builtin_mips_addsc">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [Commutative]>;
+def int_mips_addwc: GCCBuiltin<"__builtin_mips_addwc">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [Commutative]>;
+
+def int_mips_modsub: GCCBuiltin<"__builtin_mips_modsub">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_raddu_w_qb: GCCBuiltin<"__builtin_mips_raddu_w_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// Absolute value
+
+def int_mips_absq_s_ph: GCCBuiltin<"__builtin_mips_absq_s_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty], []>;
+def int_mips_absq_s_w: GCCBuiltin<"__builtin_mips_absq_s_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty], []>;
+
+//===----------------------------------------------------------------------===//
+// Precision reduce/expand
+
+def int_mips_precrq_qb_ph: GCCBuiltin<"__builtin_mips_precrq_qb_ph">,
+ Intrinsic<[llvm_v4i8_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_precrqu_s_qb_ph: GCCBuiltin<"__builtin_mips_precrqu_s_qb_ph">,
+ Intrinsic<[llvm_v4i8_ty], [mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_precrq_ph_w: GCCBuiltin<"__builtin_mips_precrq_ph_w">,
+ Intrinsic<[mips_v2q15_ty], [mips_q31_ty, mips_q31_ty], [IntrNoMem]>;
+def int_mips_precrq_rs_ph_w: GCCBuiltin<"__builtin_mips_precrq_rs_ph_w">,
+ Intrinsic<[mips_v2q15_ty], [mips_q31_ty, mips_q31_ty], []>;
+def int_mips_preceq_w_phl: GCCBuiltin<"__builtin_mips_preceq_w_phl">,
+ Intrinsic<[mips_q31_ty], [mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_preceq_w_phr: GCCBuiltin<"__builtin_mips_preceq_w_phr">,
+ Intrinsic<[mips_q31_ty], [mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_precequ_ph_qbl: GCCBuiltin<"__builtin_mips_precequ_ph_qbl">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_precequ_ph_qbr: GCCBuiltin<"__builtin_mips_precequ_ph_qbr">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_precequ_ph_qbla: GCCBuiltin<"__builtin_mips_precequ_ph_qbla">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_precequ_ph_qbra: GCCBuiltin<"__builtin_mips_precequ_ph_qbra">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_preceu_ph_qbl: GCCBuiltin<"__builtin_mips_preceu_ph_qbl">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_preceu_ph_qbr: GCCBuiltin<"__builtin_mips_preceu_ph_qbr">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_preceu_ph_qbla: GCCBuiltin<"__builtin_mips_preceu_ph_qbla">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_preceu_ph_qbra: GCCBuiltin<"__builtin_mips_preceu_ph_qbra">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// Shift
+
+def int_mips_shll_qb: GCCBuiltin<"__builtin_mips_shll_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_i32_ty], []>;
+def int_mips_shrl_qb: GCCBuiltin<"__builtin_mips_shrl_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shll_ph: GCCBuiltin<"__builtin_mips_shll_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, llvm_i32_ty], []>;
+def int_mips_shll_s_ph: GCCBuiltin<"__builtin_mips_shll_s_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, llvm_i32_ty], []>;
+def int_mips_shra_ph: GCCBuiltin<"__builtin_mips_shra_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shra_r_ph: GCCBuiltin<"__builtin_mips_shra_r_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shll_s_w: GCCBuiltin<"__builtin_mips_shll_s_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, llvm_i32_ty], []>;
+def int_mips_shra_r_w: GCCBuiltin<"__builtin_mips_shra_r_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shilo: GCCBuiltin<"__builtin_mips_shilo">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// Multiplication
+
+def int_mips_muleu_s_ph_qbl: GCCBuiltin<"__builtin_mips_muleu_s_ph_qbl">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty, mips_v2q15_ty], []>;
+def int_mips_muleu_s_ph_qbr: GCCBuiltin<"__builtin_mips_muleu_s_ph_qbr">,
+ Intrinsic<[mips_v2q15_ty], [llvm_v4i8_ty, mips_v2q15_ty], []>;
+def int_mips_mulq_rs_ph: GCCBuiltin<"__builtin_mips_mulq_rs_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [Commutative]>;
+def int_mips_muleq_s_w_phl: GCCBuiltin<"__builtin_mips_muleq_s_w_phl">,
+ Intrinsic<[mips_q31_ty], [mips_v2q15_ty, mips_v2q15_ty], [Commutative]>;
+def int_mips_muleq_s_w_phr: GCCBuiltin<"__builtin_mips_muleq_s_w_phr">,
+ Intrinsic<[mips_q31_ty], [mips_v2q15_ty, mips_v2q15_ty], [Commutative]>;
+def int_mips_mulsaq_s_w_ph: GCCBuiltin<"__builtin_mips_mulsaq_s_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_maq_s_w_phl: GCCBuiltin<"__builtin_mips_maq_s_w_phl">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_maq_s_w_phr: GCCBuiltin<"__builtin_mips_maq_s_w_phr">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_maq_sa_w_phl: GCCBuiltin<"__builtin_mips_maq_sa_w_phl">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_maq_sa_w_phr: GCCBuiltin<"__builtin_mips_maq_sa_w_phr">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_mult: GCCBuiltin<"__builtin_mips_mult">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_multu: GCCBuiltin<"__builtin_mips_multu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+//===----------------------------------------------------------------------===//
+// Dot product with accumulate/subtract
+
+def int_mips_dpau_h_qbl: GCCBuiltin<"__builtin_mips_dpau_h_qbl">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpau_h_qbr: GCCBuiltin<"__builtin_mips_dpau_h_qbr">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsu_h_qbl: GCCBuiltin<"__builtin_mips_dpsu_h_qbl">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsu_h_qbr: GCCBuiltin<"__builtin_mips_dpsu_h_qbr">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpaq_s_w_ph: GCCBuiltin<"__builtin_mips_dpaq_s_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_dpsq_s_w_ph: GCCBuiltin<"__builtin_mips_dpsq_s_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_dpaq_sa_l_w: GCCBuiltin<"__builtin_mips_dpaq_sa_l_w">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_q31_ty, mips_q31_ty], []>;
+def int_mips_dpsq_sa_l_w: GCCBuiltin<"__builtin_mips_dpsq_sa_l_w">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_q31_ty, mips_q31_ty], []>;
+
+//===----------------------------------------------------------------------===//
+// Comparison
+
+def int_mips_cmpu_eq_qb: GCCBuiltin<"__builtin_mips_cmpu_eq_qb">,
+ Intrinsic<[], [llvm_v4i8_ty, llvm_v4i8_ty], [Commutative]>;
+def int_mips_cmpu_lt_qb: GCCBuiltin<"__builtin_mips_cmpu_lt_qb">,
+ Intrinsic<[], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+def int_mips_cmpu_le_qb: GCCBuiltin<"__builtin_mips_cmpu_le_qb">,
+ Intrinsic<[], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+def int_mips_cmpgu_eq_qb: GCCBuiltin<"__builtin_mips_cmpgu_eq_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [Commutative]>;
+def int_mips_cmpgu_lt_qb: GCCBuiltin<"__builtin_mips_cmpgu_lt_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+def int_mips_cmpgu_le_qb: GCCBuiltin<"__builtin_mips_cmpgu_le_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+def int_mips_cmp_eq_ph: GCCBuiltin<"__builtin_mips_cmp_eq_ph">,
+ Intrinsic<[], [mips_v2q15_ty, mips_v2q15_ty], [Commutative]>;
+def int_mips_cmp_lt_ph: GCCBuiltin<"__builtin_mips_cmp_lt_ph">,
+ Intrinsic<[], [mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_cmp_le_ph: GCCBuiltin<"__builtin_mips_cmp_le_ph">,
+ Intrinsic<[], [mips_v2q15_ty, mips_v2q15_ty], []>;
+
+//===----------------------------------------------------------------------===//
+// Extracting
+
+def int_mips_extr_s_h: GCCBuiltin<"__builtin_mips_extr_s_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+def int_mips_extr_w: GCCBuiltin<"__builtin_mips_extr_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+def int_mips_extr_rs_w: GCCBuiltin<"__builtin_mips_extr_rs_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+def int_mips_extr_r_w: GCCBuiltin<"__builtin_mips_extr_r_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+def int_mips_extp: GCCBuiltin<"__builtin_mips_extp">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+def int_mips_extpdp: GCCBuiltin<"__builtin_mips_extpdp">,
+ Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+
+//===----------------------------------------------------------------------===//
+// Misc
+
+def int_mips_wrdsp: GCCBuiltin<"__builtin_mips_wrdsp">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty], []>;
+def int_mips_rddsp: GCCBuiltin<"__builtin_mips_rddsp">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrReadMem]>;
+
+def int_mips_insv: GCCBuiltin<"__builtin_mips_insv">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrReadMem]>;
+def int_mips_bitrev: GCCBuiltin<"__builtin_mips_bitrev">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_packrl_ph: GCCBuiltin<"__builtin_mips_packrl_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+
+def int_mips_repl_qb: GCCBuiltin<"__builtin_mips_repl_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_repl_ph: GCCBuiltin<"__builtin_mips_repl_ph">,
+ Intrinsic<[mips_v2q15_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_pick_qb: GCCBuiltin<"__builtin_mips_pick_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrReadMem]>;
+def int_mips_pick_ph: GCCBuiltin<"__builtin_mips_pick_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrReadMem]>;
+
+def int_mips_mthlip: GCCBuiltin<"__builtin_mips_mthlip">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty], []>;
+
+def int_mips_bposge32: GCCBuiltin<"__builtin_mips_bposge32">,
+ Intrinsic<[llvm_i32_ty], [], [IntrReadMem]>;
+
+def int_mips_lbux: GCCBuiltin<"__builtin_mips_lbux">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_mips_lhx: GCCBuiltin<"__builtin_mips_lhx">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_mips_lwx: GCCBuiltin<"__builtin_mips_lwx">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty], [IntrReadMem, IntrArgMemOnly]>;
+
+//===----------------------------------------------------------------------===//
+// MIPS DSP Rev 2
+
+def int_mips_absq_s_qb: GCCBuiltin<"__builtin_mips_absq_s_qb">,
+ Intrinsic<[mips_v4q7_ty], [mips_v4q7_ty], []>;
+
+def int_mips_addqh_ph: GCCBuiltin<"__builtin_mips_addqh_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_addqh_r_ph: GCCBuiltin<"__builtin_mips_addqh_r_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_addqh_w: GCCBuiltin<"__builtin_mips_addqh_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_addqh_r_w: GCCBuiltin<"__builtin_mips_addqh_r_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty],
+ [IntrNoMem, Commutative]>;
+
+def int_mips_addu_ph: GCCBuiltin<"__builtin_mips_addu_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], [Commutative]>;
+def int_mips_addu_s_ph: GCCBuiltin<"__builtin_mips_addu_s_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], [Commutative]>;
+
+def int_mips_adduh_qb: GCCBuiltin<"__builtin_mips_adduh_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem, Commutative]>;
+def int_mips_adduh_r_qb: GCCBuiltin<"__builtin_mips_adduh_r_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty],
+ [IntrNoMem, Commutative]>;
+
+def int_mips_append: GCCBuiltin<"__builtin_mips_append">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_balign: GCCBuiltin<"__builtin_mips_balign">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_cmpgdu_eq_qb: GCCBuiltin<"__builtin_mips_cmpgdu_eq_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [Commutative]>;
+def int_mips_cmpgdu_lt_qb: GCCBuiltin<"__builtin_mips_cmpgdu_lt_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+def int_mips_cmpgdu_le_qb: GCCBuiltin<"__builtin_mips_cmpgdu_le_qb">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i8_ty, llvm_v4i8_ty], []>;
+
+def int_mips_dpa_w_ph: GCCBuiltin<"__builtin_mips_dpa_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v2i16_ty, llvm_v2i16_ty],
+ [IntrNoMem]>;
+def int_mips_dps_w_ph: GCCBuiltin<"__builtin_mips_dps_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v2i16_ty, llvm_v2i16_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpaqx_s_w_ph: GCCBuiltin<"__builtin_mips_dpaqx_s_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_dpaqx_sa_w_ph: GCCBuiltin<"__builtin_mips_dpaqx_sa_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_dpax_w_ph: GCCBuiltin<"__builtin_mips_dpax_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v2i16_ty, llvm_v2i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsx_w_ph: GCCBuiltin<"__builtin_mips_dpsx_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v2i16_ty, llvm_v2i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsqx_s_w_ph: GCCBuiltin<"__builtin_mips_dpsqx_s_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+def int_mips_dpsqx_sa_w_ph: GCCBuiltin<"__builtin_mips_dpsqx_sa_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, mips_v2q15_ty, mips_v2q15_ty], []>;
+
+def int_mips_mul_ph: GCCBuiltin<"__builtin_mips_mul_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], [Commutative]>;
+def int_mips_mul_s_ph: GCCBuiltin<"__builtin_mips_mul_s_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], [Commutative]>;
+
+def int_mips_mulq_rs_w: GCCBuiltin<"__builtin_mips_mulq_rs_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], [Commutative]>;
+def int_mips_mulq_s_ph: GCCBuiltin<"__builtin_mips_mulq_s_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [Commutative]>;
+def int_mips_mulq_s_w: GCCBuiltin<"__builtin_mips_mulq_s_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], [Commutative]>;
+def int_mips_mulsa_w_ph: GCCBuiltin<"__builtin_mips_mulsa_w_ph">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_v2i16_ty, llvm_v2i16_ty],
+ [IntrNoMem]>;
+
+def int_mips_precr_qb_ph: GCCBuiltin<"__builtin_mips_precr_qb_ph">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v2i16_ty, llvm_v2i16_ty], []>;
+def int_mips_precr_sra_ph_w: GCCBuiltin<"__builtin_mips_precr_sra_ph_w">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_precr_sra_r_ph_w: GCCBuiltin<"__builtin_mips_precr_sra_r_ph_w">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_prepend: GCCBuiltin<"__builtin_mips_prepend">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_shra_qb: GCCBuiltin<"__builtin_mips_shra_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shra_r_qb: GCCBuiltin<"__builtin_mips_shra_r_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shrl_ph: GCCBuiltin<"__builtin_mips_shrl_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_subqh_ph: GCCBuiltin<"__builtin_mips_subqh_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_subqh_r_ph: GCCBuiltin<"__builtin_mips_subqh_r_ph">,
+ Intrinsic<[mips_v2q15_ty], [mips_v2q15_ty, mips_v2q15_ty], [IntrNoMem]>;
+def int_mips_subqh_w: GCCBuiltin<"__builtin_mips_subqh_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], [IntrNoMem]>;
+def int_mips_subqh_r_w: GCCBuiltin<"__builtin_mips_subqh_r_w">,
+ Intrinsic<[mips_q31_ty], [mips_q31_ty, mips_q31_ty], [IntrNoMem]>;
+
+def int_mips_subu_ph: GCCBuiltin<"__builtin_mips_subu_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], []>;
+def int_mips_subu_s_ph: GCCBuiltin<"__builtin_mips_subu_s_ph">,
+ Intrinsic<[llvm_v2i16_ty], [llvm_v2i16_ty, llvm_v2i16_ty], []>;
+
+def int_mips_subuh_qb: GCCBuiltin<"__builtin_mips_subuh_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
+def int_mips_subuh_r_qb: GCCBuiltin<"__builtin_mips_subuh_r_qb">,
+ Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// MIPS MSA
+
+//===----------------------------------------------------------------------===//
+// Addition/subtraction
+
+def int_mips_add_a_b : GCCBuiltin<"__builtin_msa_add_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_h : GCCBuiltin<"__builtin_msa_add_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_w : GCCBuiltin<"__builtin_msa_add_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_d : GCCBuiltin<"__builtin_msa_add_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_a_b : GCCBuiltin<"__builtin_msa_adds_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_h : GCCBuiltin<"__builtin_msa_adds_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_w : GCCBuiltin<"__builtin_msa_adds_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_d : GCCBuiltin<"__builtin_msa_adds_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_s_b : GCCBuiltin<"__builtin_msa_adds_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_h : GCCBuiltin<"__builtin_msa_adds_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_w : GCCBuiltin<"__builtin_msa_adds_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_d : GCCBuiltin<"__builtin_msa_adds_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_u_b : GCCBuiltin<"__builtin_msa_adds_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_h : GCCBuiltin<"__builtin_msa_adds_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_w : GCCBuiltin<"__builtin_msa_adds_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_d : GCCBuiltin<"__builtin_msa_adds_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_addv_b : GCCBuiltin<"__builtin_msa_addv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_h : GCCBuiltin<"__builtin_msa_addv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_w : GCCBuiltin<"__builtin_msa_addv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_d : GCCBuiltin<"__builtin_msa_addv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_addvi_b : GCCBuiltin<"__builtin_msa_addvi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_h : GCCBuiltin<"__builtin_msa_addvi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_w : GCCBuiltin<"__builtin_msa_addvi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_d : GCCBuiltin<"__builtin_msa_addvi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_and_v : GCCBuiltin<"__builtin_msa_and_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_andi_b : GCCBuiltin<"__builtin_msa_andi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_asub_s_b : GCCBuiltin<"__builtin_msa_asub_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_asub_s_h : GCCBuiltin<"__builtin_msa_asub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_asub_s_w : GCCBuiltin<"__builtin_msa_asub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_asub_s_d : GCCBuiltin<"__builtin_msa_asub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_asub_u_b : GCCBuiltin<"__builtin_msa_asub_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_asub_u_h : GCCBuiltin<"__builtin_msa_asub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_asub_u_w : GCCBuiltin<"__builtin_msa_asub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_asub_u_d : GCCBuiltin<"__builtin_msa_asub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ave_s_b : GCCBuiltin<"__builtin_msa_ave_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_h : GCCBuiltin<"__builtin_msa_ave_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_w : GCCBuiltin<"__builtin_msa_ave_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_d : GCCBuiltin<"__builtin_msa_ave_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_ave_u_b : GCCBuiltin<"__builtin_msa_ave_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_h : GCCBuiltin<"__builtin_msa_ave_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_w : GCCBuiltin<"__builtin_msa_ave_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_d : GCCBuiltin<"__builtin_msa_ave_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_aver_s_b : GCCBuiltin<"__builtin_msa_aver_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_h : GCCBuiltin<"__builtin_msa_aver_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_w : GCCBuiltin<"__builtin_msa_aver_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_d : GCCBuiltin<"__builtin_msa_aver_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_aver_u_b : GCCBuiltin<"__builtin_msa_aver_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_h : GCCBuiltin<"__builtin_msa_aver_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_w : GCCBuiltin<"__builtin_msa_aver_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_d : GCCBuiltin<"__builtin_msa_aver_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_bclr_b : GCCBuiltin<"__builtin_msa_bclr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bclr_h : GCCBuiltin<"__builtin_msa_bclr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bclr_w : GCCBuiltin<"__builtin_msa_bclr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bclr_d : GCCBuiltin<"__builtin_msa_bclr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bclri_b : GCCBuiltin<"__builtin_msa_bclri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_h : GCCBuiltin<"__builtin_msa_bclri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_w : GCCBuiltin<"__builtin_msa_bclri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_d : GCCBuiltin<"__builtin_msa_bclri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_binsl_b : GCCBuiltin<"__builtin_msa_binsl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_h : GCCBuiltin<"__builtin_msa_binsl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_w : GCCBuiltin<"__builtin_msa_binsl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_d : GCCBuiltin<"__builtin_msa_binsl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsli_b : GCCBuiltin<"__builtin_msa_binsli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_h : GCCBuiltin<"__builtin_msa_binsli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_w : GCCBuiltin<"__builtin_msa_binsli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_d : GCCBuiltin<"__builtin_msa_binsli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsr_b : GCCBuiltin<"__builtin_msa_binsr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_h : GCCBuiltin<"__builtin_msa_binsr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_w : GCCBuiltin<"__builtin_msa_binsr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_d : GCCBuiltin<"__builtin_msa_binsr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsri_b : GCCBuiltin<"__builtin_msa_binsri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_h : GCCBuiltin<"__builtin_msa_binsri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_w : GCCBuiltin<"__builtin_msa_binsri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_d : GCCBuiltin<"__builtin_msa_binsri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmnz_v : GCCBuiltin<"__builtin_msa_bmnz_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmnzi_b : GCCBuiltin<"__builtin_msa_bmnzi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmz_v : GCCBuiltin<"__builtin_msa_bmz_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmzi_b : GCCBuiltin<"__builtin_msa_bmzi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bneg_b : GCCBuiltin<"__builtin_msa_bneg_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bneg_h : GCCBuiltin<"__builtin_msa_bneg_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bneg_w : GCCBuiltin<"__builtin_msa_bneg_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bneg_d : GCCBuiltin<"__builtin_msa_bneg_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bnegi_b : GCCBuiltin<"__builtin_msa_bnegi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_h : GCCBuiltin<"__builtin_msa_bnegi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_w : GCCBuiltin<"__builtin_msa_bnegi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_d : GCCBuiltin<"__builtin_msa_bnegi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_bnz_b : GCCBuiltin<"__builtin_msa_bnz_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bnz_h : GCCBuiltin<"__builtin_msa_bnz_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bnz_w : GCCBuiltin<"__builtin_msa_bnz_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bnz_d : GCCBuiltin<"__builtin_msa_bnz_d">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bnz_v : GCCBuiltin<"__builtin_msa_bnz_v">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_bsel_v : GCCBuiltin<"__builtin_msa_bsel_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bseli_b : GCCBuiltin<"__builtin_msa_bseli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bset_b : GCCBuiltin<"__builtin_msa_bset_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bset_h : GCCBuiltin<"__builtin_msa_bset_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bset_w : GCCBuiltin<"__builtin_msa_bset_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bset_d : GCCBuiltin<"__builtin_msa_bset_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bseti_b : GCCBuiltin<"__builtin_msa_bseti_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_h : GCCBuiltin<"__builtin_msa_bseti_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_w : GCCBuiltin<"__builtin_msa_bseti_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_d : GCCBuiltin<"__builtin_msa_bseti_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_bz_b : GCCBuiltin<"__builtin_msa_bz_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bz_h : GCCBuiltin<"__builtin_msa_bz_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bz_w : GCCBuiltin<"__builtin_msa_bz_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bz_d : GCCBuiltin<"__builtin_msa_bz_d">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bz_v : GCCBuiltin<"__builtin_msa_bz_v">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_ceq_b : GCCBuiltin<"__builtin_msa_ceq_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ceq_h : GCCBuiltin<"__builtin_msa_ceq_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ceq_w : GCCBuiltin<"__builtin_msa_ceq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ceq_d : GCCBuiltin<"__builtin_msa_ceq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ceqi_b : GCCBuiltin<"__builtin_msa_ceqi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_h : GCCBuiltin<"__builtin_msa_ceqi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_w : GCCBuiltin<"__builtin_msa_ceqi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_d : GCCBuiltin<"__builtin_msa_ceqi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_cfcmsa : GCCBuiltin<"__builtin_msa_cfcmsa">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+
+def int_mips_cle_s_b : GCCBuiltin<"__builtin_msa_cle_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_cle_s_h : GCCBuiltin<"__builtin_msa_cle_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_cle_s_w : GCCBuiltin<"__builtin_msa_cle_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_cle_s_d : GCCBuiltin<"__builtin_msa_cle_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_cle_u_b : GCCBuiltin<"__builtin_msa_cle_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_cle_u_h : GCCBuiltin<"__builtin_msa_cle_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_cle_u_w : GCCBuiltin<"__builtin_msa_cle_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_cle_u_d : GCCBuiltin<"__builtin_msa_cle_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clei_s_b : GCCBuiltin<"__builtin_msa_clei_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_h : GCCBuiltin<"__builtin_msa_clei_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_w : GCCBuiltin<"__builtin_msa_clei_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_d : GCCBuiltin<"__builtin_msa_clei_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clei_u_b : GCCBuiltin<"__builtin_msa_clei_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_h : GCCBuiltin<"__builtin_msa_clei_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_w : GCCBuiltin<"__builtin_msa_clei_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_d : GCCBuiltin<"__builtin_msa_clei_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clt_s_b : GCCBuiltin<"__builtin_msa_clt_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_clt_s_h : GCCBuiltin<"__builtin_msa_clt_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_clt_s_w : GCCBuiltin<"__builtin_msa_clt_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_clt_s_d : GCCBuiltin<"__builtin_msa_clt_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clt_u_b : GCCBuiltin<"__builtin_msa_clt_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_clt_u_h : GCCBuiltin<"__builtin_msa_clt_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_clt_u_w : GCCBuiltin<"__builtin_msa_clt_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_clt_u_d : GCCBuiltin<"__builtin_msa_clt_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clti_s_b : GCCBuiltin<"__builtin_msa_clti_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_h : GCCBuiltin<"__builtin_msa_clti_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_w : GCCBuiltin<"__builtin_msa_clti_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_d : GCCBuiltin<"__builtin_msa_clti_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clti_u_b : GCCBuiltin<"__builtin_msa_clti_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_h : GCCBuiltin<"__builtin_msa_clti_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_w : GCCBuiltin<"__builtin_msa_clti_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_d : GCCBuiltin<"__builtin_msa_clti_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_copy_s_b : GCCBuiltin<"__builtin_msa_copy_s_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_h : GCCBuiltin<"__builtin_msa_copy_s_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_w : GCCBuiltin<"__builtin_msa_copy_s_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_d : GCCBuiltin<"__builtin_msa_copy_s_d">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_copy_u_b : GCCBuiltin<"__builtin_msa_copy_u_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_h : GCCBuiltin<"__builtin_msa_copy_u_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_w : GCCBuiltin<"__builtin_msa_copy_u_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_d : GCCBuiltin<"__builtin_msa_copy_u_d">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_ctcmsa : GCCBuiltin<"__builtin_msa_ctcmsa">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty], []>;
+
+def int_mips_div_s_b : GCCBuiltin<"__builtin_msa_div_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_div_s_h : GCCBuiltin<"__builtin_msa_div_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_div_s_w : GCCBuiltin<"__builtin_msa_div_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_div_s_d : GCCBuiltin<"__builtin_msa_div_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_div_u_b : GCCBuiltin<"__builtin_msa_div_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_div_u_h : GCCBuiltin<"__builtin_msa_div_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_div_u_w : GCCBuiltin<"__builtin_msa_div_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_div_u_d : GCCBuiltin<"__builtin_msa_div_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+// This instruction is part of the MSA spec but it does not share the
+// __builtin_msa prefix because it operates on GP registers.
+def int_mips_dlsa : GCCBuiltin<"__builtin_mips_dlsa">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dotp_s_h : GCCBuiltin<"__builtin_msa_dotp_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_dotp_s_w : GCCBuiltin<"__builtin_msa_dotp_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_dotp_s_d : GCCBuiltin<"__builtin_msa_dotp_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_dotp_u_h : GCCBuiltin<"__builtin_msa_dotp_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_dotp_u_w : GCCBuiltin<"__builtin_msa_dotp_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_dotp_u_d : GCCBuiltin<"__builtin_msa_dotp_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_dpadd_s_h : GCCBuiltin<"__builtin_msa_dpadd_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_s_w : GCCBuiltin<"__builtin_msa_dpadd_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_s_d : GCCBuiltin<"__builtin_msa_dpadd_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpadd_u_h : GCCBuiltin<"__builtin_msa_dpadd_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_u_w : GCCBuiltin<"__builtin_msa_dpadd_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_u_d : GCCBuiltin<"__builtin_msa_dpadd_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpsub_s_h : GCCBuiltin<"__builtin_msa_dpsub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_s_w : GCCBuiltin<"__builtin_msa_dpsub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_s_d : GCCBuiltin<"__builtin_msa_dpsub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpsub_u_h : GCCBuiltin<"__builtin_msa_dpsub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_u_w : GCCBuiltin<"__builtin_msa_dpsub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_u_d : GCCBuiltin<"__builtin_msa_dpsub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_fadd_w : GCCBuiltin<"__builtin_msa_fadd_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fadd_d : GCCBuiltin<"__builtin_msa_fadd_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcaf_w : GCCBuiltin<"__builtin_msa_fcaf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcaf_d : GCCBuiltin<"__builtin_msa_fcaf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fceq_w : GCCBuiltin<"__builtin_msa_fceq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fceq_d : GCCBuiltin<"__builtin_msa_fceq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcle_w : GCCBuiltin<"__builtin_msa_fcle_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcle_d : GCCBuiltin<"__builtin_msa_fcle_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fclt_w : GCCBuiltin<"__builtin_msa_fclt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fclt_d : GCCBuiltin<"__builtin_msa_fclt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fclass_w : GCCBuiltin<"__builtin_msa_fclass_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fclass_d : GCCBuiltin<"__builtin_msa_fclass_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcne_w : GCCBuiltin<"__builtin_msa_fcne_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcne_d : GCCBuiltin<"__builtin_msa_fcne_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcor_w : GCCBuiltin<"__builtin_msa_fcor_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcor_d : GCCBuiltin<"__builtin_msa_fcor_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcueq_w : GCCBuiltin<"__builtin_msa_fcueq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcueq_d : GCCBuiltin<"__builtin_msa_fcueq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcule_w : GCCBuiltin<"__builtin_msa_fcule_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcule_d : GCCBuiltin<"__builtin_msa_fcule_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcult_w : GCCBuiltin<"__builtin_msa_fcult_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcult_d : GCCBuiltin<"__builtin_msa_fcult_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcun_w : GCCBuiltin<"__builtin_msa_fcun_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcun_d : GCCBuiltin<"__builtin_msa_fcun_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcune_w : GCCBuiltin<"__builtin_msa_fcune_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcune_d : GCCBuiltin<"__builtin_msa_fcune_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fdiv_w : GCCBuiltin<"__builtin_msa_fdiv_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fdiv_d : GCCBuiltin<"__builtin_msa_fdiv_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fexdo_h : GCCBuiltin<"__builtin_msa_fexdo_h">,
+ Intrinsic<[llvm_v8f16_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fexdo_w : GCCBuiltin<"__builtin_msa_fexdo_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fexp2_w : GCCBuiltin<"__builtin_msa_fexp2_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_fexp2_d : GCCBuiltin<"__builtin_msa_fexp2_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_fexupl_w : GCCBuiltin<"__builtin_msa_fexupl_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8f16_ty], [IntrNoMem]>;
+def int_mips_fexupl_d : GCCBuiltin<"__builtin_msa_fexupl_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+def int_mips_fexupr_w : GCCBuiltin<"__builtin_msa_fexupr_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8f16_ty], [IntrNoMem]>;
+def int_mips_fexupr_d : GCCBuiltin<"__builtin_msa_fexupr_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+def int_mips_ffint_s_w : GCCBuiltin<"__builtin_msa_ffint_s_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ffint_s_d : GCCBuiltin<"__builtin_msa_ffint_s_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ffint_u_w : GCCBuiltin<"__builtin_msa_ffint_u_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ffint_u_d : GCCBuiltin<"__builtin_msa_ffint_u_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ffql_w : GCCBuiltin<"__builtin_msa_ffql_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ffql_d : GCCBuiltin<"__builtin_msa_ffql_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_ffqr_w : GCCBuiltin<"__builtin_msa_ffqr_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ffqr_d : GCCBuiltin<"__builtin_msa_ffqr_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_fill_b : GCCBuiltin<"__builtin_msa_fill_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_h : GCCBuiltin<"__builtin_msa_fill_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_w : GCCBuiltin<"__builtin_msa_fill_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_d : GCCBuiltin<"__builtin_msa_fill_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_i64_ty], [IntrNoMem]>;
+
+def int_mips_flog2_w : GCCBuiltin<"__builtin_msa_flog2_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_flog2_d : GCCBuiltin<"__builtin_msa_flog2_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmadd_w : GCCBuiltin<"__builtin_msa_fmadd_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_mips_fmadd_d : GCCBuiltin<"__builtin_msa_fmadd_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+def int_mips_fmax_w : GCCBuiltin<"__builtin_msa_fmax_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmax_d : GCCBuiltin<"__builtin_msa_fmax_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmax_a_w : GCCBuiltin<"__builtin_msa_fmax_a_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmax_a_d : GCCBuiltin<"__builtin_msa_fmax_a_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmin_w : GCCBuiltin<"__builtin_msa_fmin_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmin_d : GCCBuiltin<"__builtin_msa_fmin_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmin_a_w : GCCBuiltin<"__builtin_msa_fmin_a_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmin_a_d : GCCBuiltin<"__builtin_msa_fmin_a_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmsub_w : GCCBuiltin<"__builtin_msa_fmsub_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_mips_fmsub_d : GCCBuiltin<"__builtin_msa_fmsub_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+def int_mips_fmul_w : GCCBuiltin<"__builtin_msa_fmul_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmul_d : GCCBuiltin<"__builtin_msa_fmul_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frint_w : GCCBuiltin<"__builtin_msa_frint_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frint_d : GCCBuiltin<"__builtin_msa_frint_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frcp_w : GCCBuiltin<"__builtin_msa_frcp_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frcp_d : GCCBuiltin<"__builtin_msa_frcp_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frsqrt_w : GCCBuiltin<"__builtin_msa_frsqrt_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frsqrt_d : GCCBuiltin<"__builtin_msa_frsqrt_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsaf_w : GCCBuiltin<"__builtin_msa_fsaf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsaf_d : GCCBuiltin<"__builtin_msa_fsaf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fseq_w : GCCBuiltin<"__builtin_msa_fseq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fseq_d : GCCBuiltin<"__builtin_msa_fseq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsle_w : GCCBuiltin<"__builtin_msa_fsle_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsle_d : GCCBuiltin<"__builtin_msa_fsle_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fslt_w : GCCBuiltin<"__builtin_msa_fslt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fslt_d : GCCBuiltin<"__builtin_msa_fslt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsne_w : GCCBuiltin<"__builtin_msa_fsne_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsne_d : GCCBuiltin<"__builtin_msa_fsne_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsor_w : GCCBuiltin<"__builtin_msa_fsor_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsor_d : GCCBuiltin<"__builtin_msa_fsor_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsqrt_w : GCCBuiltin<"__builtin_msa_fsqrt_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsqrt_d : GCCBuiltin<"__builtin_msa_fsqrt_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsub_w : GCCBuiltin<"__builtin_msa_fsub_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsub_d : GCCBuiltin<"__builtin_msa_fsub_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsueq_w : GCCBuiltin<"__builtin_msa_fsueq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsueq_d : GCCBuiltin<"__builtin_msa_fsueq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsule_w : GCCBuiltin<"__builtin_msa_fsule_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsule_d : GCCBuiltin<"__builtin_msa_fsule_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsult_w : GCCBuiltin<"__builtin_msa_fsult_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsult_d : GCCBuiltin<"__builtin_msa_fsult_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsun_w : GCCBuiltin<"__builtin_msa_fsun_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsun_d : GCCBuiltin<"__builtin_msa_fsun_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsune_w : GCCBuiltin<"__builtin_msa_fsune_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsune_d : GCCBuiltin<"__builtin_msa_fsune_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftint_s_w : GCCBuiltin<"__builtin_msa_ftint_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftint_s_d : GCCBuiltin<"__builtin_msa_ftint_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftint_u_w : GCCBuiltin<"__builtin_msa_ftint_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftint_u_d : GCCBuiltin<"__builtin_msa_ftint_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftq_h : GCCBuiltin<"__builtin_msa_ftq_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftq_w : GCCBuiltin<"__builtin_msa_ftq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftrunc_s_w : GCCBuiltin<"__builtin_msa_ftrunc_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftrunc_s_d : GCCBuiltin<"__builtin_msa_ftrunc_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftrunc_u_w : GCCBuiltin<"__builtin_msa_ftrunc_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftrunc_u_d : GCCBuiltin<"__builtin_msa_ftrunc_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_hadd_s_h : GCCBuiltin<"__builtin_msa_hadd_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hadd_s_w : GCCBuiltin<"__builtin_msa_hadd_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hadd_s_d : GCCBuiltin<"__builtin_msa_hadd_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hadd_u_h : GCCBuiltin<"__builtin_msa_hadd_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hadd_u_w : GCCBuiltin<"__builtin_msa_hadd_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hadd_u_d : GCCBuiltin<"__builtin_msa_hadd_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hsub_s_h : GCCBuiltin<"__builtin_msa_hsub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hsub_s_w : GCCBuiltin<"__builtin_msa_hsub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hsub_s_d : GCCBuiltin<"__builtin_msa_hsub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hsub_u_h : GCCBuiltin<"__builtin_msa_hsub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hsub_u_w : GCCBuiltin<"__builtin_msa_hsub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hsub_u_d : GCCBuiltin<"__builtin_msa_hsub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_ilvev_b : GCCBuiltin<"__builtin_msa_ilvev_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvev_h : GCCBuiltin<"__builtin_msa_ilvev_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvev_w : GCCBuiltin<"__builtin_msa_ilvev_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvev_d : GCCBuiltin<"__builtin_msa_ilvev_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvl_b : GCCBuiltin<"__builtin_msa_ilvl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvl_h : GCCBuiltin<"__builtin_msa_ilvl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvl_w : GCCBuiltin<"__builtin_msa_ilvl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvl_d : GCCBuiltin<"__builtin_msa_ilvl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvod_b : GCCBuiltin<"__builtin_msa_ilvod_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvod_h : GCCBuiltin<"__builtin_msa_ilvod_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvod_w : GCCBuiltin<"__builtin_msa_ilvod_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvod_d : GCCBuiltin<"__builtin_msa_ilvod_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvr_b : GCCBuiltin<"__builtin_msa_ilvr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvr_h : GCCBuiltin<"__builtin_msa_ilvr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvr_w : GCCBuiltin<"__builtin_msa_ilvr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvr_d : GCCBuiltin<"__builtin_msa_ilvr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_insert_b : GCCBuiltin<"__builtin_msa_insert_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_h : GCCBuiltin<"__builtin_msa_insert_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_w : GCCBuiltin<"__builtin_msa_insert_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_d : GCCBuiltin<"__builtin_msa_insert_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_insve_b : GCCBuiltin<"__builtin_msa_insve_b">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_insve_h : GCCBuiltin<"__builtin_msa_insve_h">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_i32_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_insve_w : GCCBuiltin<"__builtin_msa_insve_w">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_insve_d : GCCBuiltin<"__builtin_msa_insve_d">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_i32_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_ld_b : GCCBuiltin<"__builtin_msa_ld_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_mips_ld_h : GCCBuiltin<"__builtin_msa_ld_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_mips_ld_w : GCCBuiltin<"__builtin_msa_ld_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_mips_ld_d : GCCBuiltin<"__builtin_msa_ld_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+def int_mips_ldi_b : GCCBuiltin<"__builtin_msa_ldi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_h : GCCBuiltin<"__builtin_msa_ldi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_w : GCCBuiltin<"__builtin_msa_ldi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_d : GCCBuiltin<"__builtin_msa_ldi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+// This instruction is part of the MSA spec but it does not share the
+// __builtin_msa prefix because it operates on the GPR registers.
+def int_mips_lsa : GCCBuiltin<"__builtin_mips_lsa">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_madd_q_h : GCCBuiltin<"__builtin_msa_madd_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_madd_q_w : GCCBuiltin<"__builtin_msa_madd_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_maddr_q_h : GCCBuiltin<"__builtin_msa_maddr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_maddr_q_w : GCCBuiltin<"__builtin_msa_maddr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_maddv_b : GCCBuiltin<"__builtin_msa_maddv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_h : GCCBuiltin<"__builtin_msa_maddv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_w : GCCBuiltin<"__builtin_msa_maddv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_d : GCCBuiltin<"__builtin_msa_maddv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_max_a_b : GCCBuiltin<"__builtin_msa_max_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_a_h : GCCBuiltin<"__builtin_msa_max_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_a_w : GCCBuiltin<"__builtin_msa_max_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_a_d : GCCBuiltin<"__builtin_msa_max_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_max_s_b : GCCBuiltin<"__builtin_msa_max_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_s_h : GCCBuiltin<"__builtin_msa_max_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_s_w : GCCBuiltin<"__builtin_msa_max_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_s_d : GCCBuiltin<"__builtin_msa_max_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_max_u_b : GCCBuiltin<"__builtin_msa_max_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_u_h : GCCBuiltin<"__builtin_msa_max_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_u_w : GCCBuiltin<"__builtin_msa_max_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_u_d : GCCBuiltin<"__builtin_msa_max_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_maxi_s_b : GCCBuiltin<"__builtin_msa_maxi_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_h : GCCBuiltin<"__builtin_msa_maxi_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_w : GCCBuiltin<"__builtin_msa_maxi_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_d : GCCBuiltin<"__builtin_msa_maxi_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_maxi_u_b : GCCBuiltin<"__builtin_msa_maxi_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_h : GCCBuiltin<"__builtin_msa_maxi_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_w : GCCBuiltin<"__builtin_msa_maxi_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_d : GCCBuiltin<"__builtin_msa_maxi_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_min_a_b : GCCBuiltin<"__builtin_msa_min_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_a_h : GCCBuiltin<"__builtin_msa_min_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_a_w : GCCBuiltin<"__builtin_msa_min_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_a_d : GCCBuiltin<"__builtin_msa_min_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_min_s_b : GCCBuiltin<"__builtin_msa_min_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_s_h : GCCBuiltin<"__builtin_msa_min_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_s_w : GCCBuiltin<"__builtin_msa_min_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_s_d : GCCBuiltin<"__builtin_msa_min_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_min_u_b : GCCBuiltin<"__builtin_msa_min_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_u_h : GCCBuiltin<"__builtin_msa_min_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_u_w : GCCBuiltin<"__builtin_msa_min_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_u_d : GCCBuiltin<"__builtin_msa_min_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_mini_s_b : GCCBuiltin<"__builtin_msa_mini_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_h : GCCBuiltin<"__builtin_msa_mini_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_w : GCCBuiltin<"__builtin_msa_mini_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_d : GCCBuiltin<"__builtin_msa_mini_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_mini_u_b : GCCBuiltin<"__builtin_msa_mini_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_h : GCCBuiltin<"__builtin_msa_mini_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_w : GCCBuiltin<"__builtin_msa_mini_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_d : GCCBuiltin<"__builtin_msa_mini_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_mod_s_b : GCCBuiltin<"__builtin_msa_mod_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mod_s_h : GCCBuiltin<"__builtin_msa_mod_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mod_s_w : GCCBuiltin<"__builtin_msa_mod_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mod_s_d : GCCBuiltin<"__builtin_msa_mod_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_mod_u_b : GCCBuiltin<"__builtin_msa_mod_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mod_u_h : GCCBuiltin<"__builtin_msa_mod_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mod_u_w : GCCBuiltin<"__builtin_msa_mod_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mod_u_d : GCCBuiltin<"__builtin_msa_mod_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_move_v : GCCBuiltin<"__builtin_msa_move_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_msub_q_h : GCCBuiltin<"__builtin_msa_msub_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msub_q_w : GCCBuiltin<"__builtin_msa_msub_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_msubr_q_h : GCCBuiltin<"__builtin_msa_msubr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msubr_q_w : GCCBuiltin<"__builtin_msa_msubr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_msubv_b : GCCBuiltin<"__builtin_msa_msubv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_h : GCCBuiltin<"__builtin_msa_msubv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_w : GCCBuiltin<"__builtin_msa_msubv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_d : GCCBuiltin<"__builtin_msa_msubv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_mul_q_h : GCCBuiltin<"__builtin_msa_mul_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mul_q_w : GCCBuiltin<"__builtin_msa_mul_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_mulr_q_h : GCCBuiltin<"__builtin_msa_mulr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mulr_q_w : GCCBuiltin<"__builtin_msa_mulr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_mulv_b : GCCBuiltin<"__builtin_msa_mulv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mulv_h : GCCBuiltin<"__builtin_msa_mulv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mulv_w : GCCBuiltin<"__builtin_msa_mulv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mulv_d : GCCBuiltin<"__builtin_msa_mulv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nloc_b : GCCBuiltin<"__builtin_msa_nloc_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_nloc_h : GCCBuiltin<"__builtin_msa_nloc_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_nloc_w : GCCBuiltin<"__builtin_msa_nloc_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_nloc_d : GCCBuiltin<"__builtin_msa_nloc_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nlzc_b : GCCBuiltin<"__builtin_msa_nlzc_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_nlzc_h : GCCBuiltin<"__builtin_msa_nlzc_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_nlzc_w : GCCBuiltin<"__builtin_msa_nlzc_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_nlzc_d : GCCBuiltin<"__builtin_msa_nlzc_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nor_v : GCCBuiltin<"__builtin_msa_nor_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_nori_b : GCCBuiltin<"__builtin_msa_nori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_or_v : GCCBuiltin<"__builtin_msa_or_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_ori_b : GCCBuiltin<"__builtin_msa_ori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_pckev_b : GCCBuiltin<"__builtin_msa_pckev_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pckev_h : GCCBuiltin<"__builtin_msa_pckev_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pckev_w : GCCBuiltin<"__builtin_msa_pckev_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pckev_d : GCCBuiltin<"__builtin_msa_pckev_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_pckod_b : GCCBuiltin<"__builtin_msa_pckod_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pckod_h : GCCBuiltin<"__builtin_msa_pckod_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pckod_w : GCCBuiltin<"__builtin_msa_pckod_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pckod_d : GCCBuiltin<"__builtin_msa_pckod_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_pcnt_b : GCCBuiltin<"__builtin_msa_pcnt_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pcnt_h : GCCBuiltin<"__builtin_msa_pcnt_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pcnt_w : GCCBuiltin<"__builtin_msa_pcnt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pcnt_d : GCCBuiltin<"__builtin_msa_pcnt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_sat_s_b : GCCBuiltin<"__builtin_msa_sat_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_h : GCCBuiltin<"__builtin_msa_sat_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_w : GCCBuiltin<"__builtin_msa_sat_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_d : GCCBuiltin<"__builtin_msa_sat_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sat_u_b : GCCBuiltin<"__builtin_msa_sat_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_h : GCCBuiltin<"__builtin_msa_sat_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_w : GCCBuiltin<"__builtin_msa_sat_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_d : GCCBuiltin<"__builtin_msa_sat_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_shf_b : GCCBuiltin<"__builtin_msa_shf_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shf_h : GCCBuiltin<"__builtin_msa_shf_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shf_w : GCCBuiltin<"__builtin_msa_shf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sld_b : GCCBuiltin<"__builtin_msa_sld_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_h : GCCBuiltin<"__builtin_msa_sld_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_w : GCCBuiltin<"__builtin_msa_sld_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_d : GCCBuiltin<"__builtin_msa_sld_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sldi_b : GCCBuiltin<"__builtin_msa_sldi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_sldi_h : GCCBuiltin<"__builtin_msa_sldi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_sldi_w : GCCBuiltin<"__builtin_msa_sldi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_sldi_d : GCCBuiltin<"__builtin_msa_sldi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_sll_b : GCCBuiltin<"__builtin_msa_sll_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_sll_h : GCCBuiltin<"__builtin_msa_sll_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_sll_w : GCCBuiltin<"__builtin_msa_sll_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_sll_d : GCCBuiltin<"__builtin_msa_sll_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_slli_b : GCCBuiltin<"__builtin_msa_slli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_h : GCCBuiltin<"__builtin_msa_slli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_w : GCCBuiltin<"__builtin_msa_slli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_d : GCCBuiltin<"__builtin_msa_slli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_splat_b : GCCBuiltin<"__builtin_msa_splat_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_h : GCCBuiltin<"__builtin_msa_splat_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_w : GCCBuiltin<"__builtin_msa_splat_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_d : GCCBuiltin<"__builtin_msa_splat_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_splati_b : GCCBuiltin<"__builtin_msa_splati_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_h : GCCBuiltin<"__builtin_msa_splati_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_w : GCCBuiltin<"__builtin_msa_splati_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_d : GCCBuiltin<"__builtin_msa_splati_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sra_b : GCCBuiltin<"__builtin_msa_sra_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_sra_h : GCCBuiltin<"__builtin_msa_sra_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_sra_w : GCCBuiltin<"__builtin_msa_sra_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_sra_d : GCCBuiltin<"__builtin_msa_sra_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srai_b : GCCBuiltin<"__builtin_msa_srai_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_h : GCCBuiltin<"__builtin_msa_srai_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_w : GCCBuiltin<"__builtin_msa_srai_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_d : GCCBuiltin<"__builtin_msa_srai_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srar_b : GCCBuiltin<"__builtin_msa_srar_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srar_h : GCCBuiltin<"__builtin_msa_srar_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srar_w : GCCBuiltin<"__builtin_msa_srar_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srar_d : GCCBuiltin<"__builtin_msa_srar_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srari_b : GCCBuiltin<"__builtin_msa_srari_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_h : GCCBuiltin<"__builtin_msa_srari_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_w : GCCBuiltin<"__builtin_msa_srari_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_d : GCCBuiltin<"__builtin_msa_srari_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srl_b : GCCBuiltin<"__builtin_msa_srl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srl_h : GCCBuiltin<"__builtin_msa_srl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srl_w : GCCBuiltin<"__builtin_msa_srl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srl_d : GCCBuiltin<"__builtin_msa_srl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srli_b : GCCBuiltin<"__builtin_msa_srli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_h : GCCBuiltin<"__builtin_msa_srli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_w : GCCBuiltin<"__builtin_msa_srli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_d : GCCBuiltin<"__builtin_msa_srli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srlr_b : GCCBuiltin<"__builtin_msa_srlr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srlr_h : GCCBuiltin<"__builtin_msa_srlr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srlr_w : GCCBuiltin<"__builtin_msa_srlr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srlr_d : GCCBuiltin<"__builtin_msa_srlr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srlri_b : GCCBuiltin<"__builtin_msa_srlri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_h : GCCBuiltin<"__builtin_msa_srlri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_w : GCCBuiltin<"__builtin_msa_srlri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_d : GCCBuiltin<"__builtin_msa_srlri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_st_b : GCCBuiltin<"__builtin_msa_st_b">,
+ Intrinsic<[], [llvm_v16i8_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+def int_mips_st_h : GCCBuiltin<"__builtin_msa_st_h">,
+ Intrinsic<[], [llvm_v8i16_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+def int_mips_st_w : GCCBuiltin<"__builtin_msa_st_w">,
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+def int_mips_st_d : GCCBuiltin<"__builtin_msa_st_d">,
+ Intrinsic<[], [llvm_v2i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+def int_mips_subs_s_b : GCCBuiltin<"__builtin_msa_subs_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subs_s_h : GCCBuiltin<"__builtin_msa_subs_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subs_s_w : GCCBuiltin<"__builtin_msa_subs_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subs_s_d : GCCBuiltin<"__builtin_msa_subs_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subs_u_b : GCCBuiltin<"__builtin_msa_subs_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subs_u_h : GCCBuiltin<"__builtin_msa_subs_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subs_u_w : GCCBuiltin<"__builtin_msa_subs_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subs_u_d : GCCBuiltin<"__builtin_msa_subs_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subsus_u_b : GCCBuiltin<"__builtin_msa_subsus_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subsus_u_h : GCCBuiltin<"__builtin_msa_subsus_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subsus_u_w : GCCBuiltin<"__builtin_msa_subsus_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subsus_u_d : GCCBuiltin<"__builtin_msa_subsus_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subsuu_s_b : GCCBuiltin<"__builtin_msa_subsuu_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_h : GCCBuiltin<"__builtin_msa_subsuu_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_w : GCCBuiltin<"__builtin_msa_subsuu_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_d : GCCBuiltin<"__builtin_msa_subsuu_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subv_b : GCCBuiltin<"__builtin_msa_subv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subv_h : GCCBuiltin<"__builtin_msa_subv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subv_w : GCCBuiltin<"__builtin_msa_subv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subv_d : GCCBuiltin<"__builtin_msa_subv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subvi_b : GCCBuiltin<"__builtin_msa_subvi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_h : GCCBuiltin<"__builtin_msa_subvi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_w : GCCBuiltin<"__builtin_msa_subvi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_d : GCCBuiltin<"__builtin_msa_subvi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_vshf_b : GCCBuiltin<"__builtin_msa_vshf_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_h : GCCBuiltin<"__builtin_msa_vshf_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_w : GCCBuiltin<"__builtin_msa_vshf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_d : GCCBuiltin<"__builtin_msa_vshf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_xor_v : GCCBuiltin<"__builtin_msa_xor_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_xori_b : GCCBuiltin<"__builtin_msa_xori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsNVVM.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsNVVM.td
new file mode 100644
index 000000000..7f694f689
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsNVVM.td
@@ -0,0 +1,4047 @@
+//===- IntrinsicsNVVM.td - Defines NVVM intrinsics ---------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the NVVM-specific intrinsics for use with NVPTX.
+//
+//===----------------------------------------------------------------------===//
+
+// The following intrinsics were once defined here, but are now auto-upgraded
+// to target-generic LLVM intrinsics.
+//
+// * llvm.nvvm.brev32 --> llvm.bitreverse.i32
+// * llvm.nvvm.brev64 --> llvm.bitreverse.i64
+// * llvm.nvvm.clz.i --> llvm.ctlz.i32
+// * llvm.nvvm.clz.ll --> trunc i64 llvm.ctlz.i64(x) to i32
+// * llvm.nvvm.popc.i --> llvm.ctpop.i32
+// * llvm.nvvm.popc.ll --> trunc i64 llvm.ctpop.i64 to i32
+// * llvm.nvvm.abs.i --> select(x >= -x, x, -x)
+// * llvm.nvvm.abs.ll --> ibid.
+// * llvm.nvvm.max.i --> select(x sge y, x, y)
+// * llvm.nvvm.max.ll --> ibid.
+// * llvm.nvvm.max.ui --> select(x uge y, x, y)
+// * llvm.nvvm.max.ull --> ibid.
+// * llvm.nvvm.max.i --> select(x sle y, x, y)
+// * llvm.nvvm.max.ll --> ibid.
+// * llvm.nvvm.max.ui --> select(x ule y, x, y)
+// * llvm.nvvm.max.ull --> ibid.
+// * llvm.nvvm.h2f --> llvm.convert.to.fp16.f32
+
+def llvm_anyi64ptr_ty : LLVMAnyPointerType<llvm_i64_ty>; // (space)i64*
+
+//
+// MISC
+//
+
+let TargetPrefix = "nvvm" in {
+ def int_nvvm_prmt : GCCBuiltin<"__nvvm_prmt">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Min Max
+//
+
+ def int_nvvm_fmin_f : GCCBuiltin<"__nvvm_fmin_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fmin_ftz_f : GCCBuiltin<"__nvvm_fmin_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_fmax_f : GCCBuiltin<"__nvvm_fmax_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty]
+ , [IntrNoMem, Commutative]>;
+ def int_nvvm_fmax_ftz_f : GCCBuiltin<"__nvvm_fmax_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_fmin_d : GCCBuiltin<"__nvvm_fmin_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fmax_d : GCCBuiltin<"__nvvm_fmax_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Multiplication
+//
+
+ def int_nvvm_mulhi_i : GCCBuiltin<"__nvvm_mulhi_i">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mulhi_ui : GCCBuiltin<"__nvvm_mulhi_ui">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_mulhi_ll : GCCBuiltin<"__nvvm_mulhi_ll">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mulhi_ull : GCCBuiltin<"__nvvm_mulhi_ull">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_mul_rn_ftz_f : GCCBuiltin<"__nvvm_mul_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rn_f : GCCBuiltin<"__nvvm_mul_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rz_ftz_f : GCCBuiltin<"__nvvm_mul_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rz_f : GCCBuiltin<"__nvvm_mul_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rm_ftz_f : GCCBuiltin<"__nvvm_mul_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rm_f : GCCBuiltin<"__nvvm_mul_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rp_ftz_f : GCCBuiltin<"__nvvm_mul_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rp_f : GCCBuiltin<"__nvvm_mul_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_mul_rn_d : GCCBuiltin<"__nvvm_mul_rn_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rz_d : GCCBuiltin<"__nvvm_mul_rz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rm_d : GCCBuiltin<"__nvvm_mul_rm_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul_rp_d : GCCBuiltin<"__nvvm_mul_rp_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_mul24_i : GCCBuiltin<"__nvvm_mul24_i">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_mul24_ui : GCCBuiltin<"__nvvm_mul24_ui">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Div
+//
+
+ def int_nvvm_div_approx_ftz_f : GCCBuiltin<"__nvvm_div_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_approx_f : GCCBuiltin<"__nvvm_div_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_div_rn_ftz_f : GCCBuiltin<"__nvvm_div_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rn_f : GCCBuiltin<"__nvvm_div_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_div_rz_ftz_f : GCCBuiltin<"__nvvm_div_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rz_f : GCCBuiltin<"__nvvm_div_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_div_rm_ftz_f : GCCBuiltin<"__nvvm_div_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rm_f : GCCBuiltin<"__nvvm_div_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_div_rp_ftz_f : GCCBuiltin<"__nvvm_div_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rp_f : GCCBuiltin<"__nvvm_div_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_div_rn_d : GCCBuiltin<"__nvvm_div_rn_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rz_d : GCCBuiltin<"__nvvm_div_rz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rm_d : GCCBuiltin<"__nvvm_div_rm_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_div_rp_d : GCCBuiltin<"__nvvm_div_rp_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Sad
+//
+
+ def int_nvvm_sad_i : GCCBuiltin<"__nvvm_sad_i">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_sad_ui : GCCBuiltin<"__nvvm_sad_ui">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Floor Ceil
+//
+
+ def int_nvvm_floor_ftz_f : GCCBuiltin<"__nvvm_floor_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_floor_f : GCCBuiltin<"__nvvm_floor_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_floor_d : GCCBuiltin<"__nvvm_floor_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_ceil_ftz_f : GCCBuiltin<"__nvvm_ceil_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_ceil_f : GCCBuiltin<"__nvvm_ceil_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_ceil_d : GCCBuiltin<"__nvvm_ceil_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Abs
+//
+
+ def int_nvvm_fabs_ftz_f : GCCBuiltin<"__nvvm_fabs_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_fabs_f : GCCBuiltin<"__nvvm_fabs_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_fabs_d : GCCBuiltin<"__nvvm_fabs_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Round
+//
+
+ def int_nvvm_round_ftz_f : GCCBuiltin<"__nvvm_round_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_round_f : GCCBuiltin<"__nvvm_round_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_round_d : GCCBuiltin<"__nvvm_round_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Trunc
+//
+
+ def int_nvvm_trunc_ftz_f : GCCBuiltin<"__nvvm_trunc_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_trunc_f : GCCBuiltin<"__nvvm_trunc_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_trunc_d : GCCBuiltin<"__nvvm_trunc_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Saturate
+//
+
+ def int_nvvm_saturate_ftz_f : GCCBuiltin<"__nvvm_saturate_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_saturate_f : GCCBuiltin<"__nvvm_saturate_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_saturate_d : GCCBuiltin<"__nvvm_saturate_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Exp2 Log2
+//
+
+ def int_nvvm_ex2_approx_ftz_f : GCCBuiltin<"__nvvm_ex2_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_ex2_approx_f : GCCBuiltin<"__nvvm_ex2_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_ex2_approx_d : GCCBuiltin<"__nvvm_ex2_approx_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_lg2_approx_ftz_f : GCCBuiltin<"__nvvm_lg2_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_lg2_approx_f : GCCBuiltin<"__nvvm_lg2_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_lg2_approx_d : GCCBuiltin<"__nvvm_lg2_approx_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Sin Cos
+//
+
+ def int_nvvm_sin_approx_ftz_f : GCCBuiltin<"__nvvm_sin_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sin_approx_f : GCCBuiltin<"__nvvm_sin_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_cos_approx_ftz_f : GCCBuiltin<"__nvvm_cos_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_cos_approx_f : GCCBuiltin<"__nvvm_cos_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+//
+// Fma
+//
+
+ def int_nvvm_fma_rn_ftz_f : GCCBuiltin<"__nvvm_fma_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rn_f : GCCBuiltin<"__nvvm_fma_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rz_ftz_f : GCCBuiltin<"__nvvm_fma_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rz_f : GCCBuiltin<"__nvvm_fma_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rm_ftz_f : GCCBuiltin<"__nvvm_fma_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rm_f : GCCBuiltin<"__nvvm_fma_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rp_ftz_f : GCCBuiltin<"__nvvm_fma_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rp_f : GCCBuiltin<"__nvvm_fma_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_fma_rn_d : GCCBuiltin<"__nvvm_fma_rn_d">,
+ Intrinsic<[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rz_d : GCCBuiltin<"__nvvm_fma_rz_d">,
+ Intrinsic<[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rm_d : GCCBuiltin<"__nvvm_fma_rm_d">,
+ Intrinsic<[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_fma_rp_d : GCCBuiltin<"__nvvm_fma_rp_d">,
+ Intrinsic<[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Rcp
+//
+
+ def int_nvvm_rcp_rn_ftz_f : GCCBuiltin<"__nvvm_rcp_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rn_f : GCCBuiltin<"__nvvm_rcp_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rz_ftz_f : GCCBuiltin<"__nvvm_rcp_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rz_f : GCCBuiltin<"__nvvm_rcp_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rm_ftz_f : GCCBuiltin<"__nvvm_rcp_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rm_f : GCCBuiltin<"__nvvm_rcp_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rp_ftz_f : GCCBuiltin<"__nvvm_rcp_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rp_f : GCCBuiltin<"__nvvm_rcp_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_rcp_rn_d : GCCBuiltin<"__nvvm_rcp_rn_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rz_d : GCCBuiltin<"__nvvm_rcp_rz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rm_d : GCCBuiltin<"__nvvm_rcp_rm_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_rcp_rp_d : GCCBuiltin<"__nvvm_rcp_rp_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_rcp_approx_ftz_d : GCCBuiltin<"__nvvm_rcp_approx_ftz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Sqrt
+//
+
+ def int_nvvm_sqrt_f : GCCBuiltin<"__nvvm_sqrt_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rn_ftz_f : GCCBuiltin<"__nvvm_sqrt_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rn_f : GCCBuiltin<"__nvvm_sqrt_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rz_ftz_f : GCCBuiltin<"__nvvm_sqrt_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rz_f : GCCBuiltin<"__nvvm_sqrt_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rm_ftz_f : GCCBuiltin<"__nvvm_sqrt_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rm_f : GCCBuiltin<"__nvvm_sqrt_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rp_ftz_f : GCCBuiltin<"__nvvm_sqrt_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rp_f : GCCBuiltin<"__nvvm_sqrt_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_approx_ftz_f : GCCBuiltin<"__nvvm_sqrt_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_approx_f : GCCBuiltin<"__nvvm_sqrt_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_sqrt_rn_d : GCCBuiltin<"__nvvm_sqrt_rn_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rz_d : GCCBuiltin<"__nvvm_sqrt_rz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rm_d : GCCBuiltin<"__nvvm_sqrt_rm_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_sqrt_rp_d : GCCBuiltin<"__nvvm_sqrt_rp_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Rsqrt
+//
+
+ def int_nvvm_rsqrt_approx_ftz_f : GCCBuiltin<"__nvvm_rsqrt_approx_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rsqrt_approx_f : GCCBuiltin<"__nvvm_rsqrt_approx_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_rsqrt_approx_d : GCCBuiltin<"__nvvm_rsqrt_approx_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+
+//
+// Add
+//
+
+ def int_nvvm_add_rn_ftz_f : GCCBuiltin<"__nvvm_add_rn_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rn_f : GCCBuiltin<"__nvvm_add_rn_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rz_ftz_f : GCCBuiltin<"__nvvm_add_rz_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rz_f : GCCBuiltin<"__nvvm_add_rz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rm_ftz_f : GCCBuiltin<"__nvvm_add_rm_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rm_f : GCCBuiltin<"__nvvm_add_rm_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rp_ftz_f : GCCBuiltin<"__nvvm_add_rp_ftz_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rp_f : GCCBuiltin<"__nvvm_add_rp_f">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_add_rn_d : GCCBuiltin<"__nvvm_add_rn_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rz_d : GCCBuiltin<"__nvvm_add_rz_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rm_d : GCCBuiltin<"__nvvm_add_rm_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+ def int_nvvm_add_rp_d : GCCBuiltin<"__nvvm_add_rp_d">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem, Commutative]>;
+
+//
+// Convert
+//
+
+ def int_nvvm_d2f_rn_ftz : GCCBuiltin<"__nvvm_d2f_rn_ftz">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rn : GCCBuiltin<"__nvvm_d2f_rn">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rz_ftz : GCCBuiltin<"__nvvm_d2f_rz_ftz">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rz : GCCBuiltin<"__nvvm_d2f_rz">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rm_ftz : GCCBuiltin<"__nvvm_d2f_rm_ftz">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rm : GCCBuiltin<"__nvvm_d2f_rm">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rp_ftz : GCCBuiltin<"__nvvm_d2f_rp_ftz">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2f_rp : GCCBuiltin<"__nvvm_d2f_rp">,
+ Intrinsic<[llvm_float_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_d2i_rn : GCCBuiltin<"__nvvm_d2i_rn">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2i_rz : GCCBuiltin<"__nvvm_d2i_rz">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2i_rm : GCCBuiltin<"__nvvm_d2i_rm">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2i_rp : GCCBuiltin<"__nvvm_d2i_rp">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_d2ui_rn : GCCBuiltin<"__nvvm_d2ui_rn">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ui_rz : GCCBuiltin<"__nvvm_d2ui_rz">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ui_rm : GCCBuiltin<"__nvvm_d2ui_rm">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ui_rp : GCCBuiltin<"__nvvm_d2ui_rp">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_i2d_rn : GCCBuiltin<"__nvvm_i2d_rn">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2d_rz : GCCBuiltin<"__nvvm_i2d_rz">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2d_rm : GCCBuiltin<"__nvvm_i2d_rm">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2d_rp : GCCBuiltin<"__nvvm_i2d_rp">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_nvvm_ui2d_rn : GCCBuiltin<"__nvvm_ui2d_rn">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2d_rz : GCCBuiltin<"__nvvm_ui2d_rz">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2d_rm : GCCBuiltin<"__nvvm_ui2d_rm">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2d_rp : GCCBuiltin<"__nvvm_ui2d_rp">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_nvvm_f2i_rn_ftz : GCCBuiltin<"__nvvm_f2i_rn_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rn : GCCBuiltin<"__nvvm_f2i_rn">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rz_ftz : GCCBuiltin<"__nvvm_f2i_rz_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rz : GCCBuiltin<"__nvvm_f2i_rz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rm_ftz : GCCBuiltin<"__nvvm_f2i_rm_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rm : GCCBuiltin<"__nvvm_f2i_rm">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rp_ftz : GCCBuiltin<"__nvvm_f2i_rp_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2i_rp : GCCBuiltin<"__nvvm_f2i_rp">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_f2ui_rn_ftz : GCCBuiltin<"__nvvm_f2ui_rn_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rn : GCCBuiltin<"__nvvm_f2ui_rn">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rz_ftz : GCCBuiltin<"__nvvm_f2ui_rz_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rz : GCCBuiltin<"__nvvm_f2ui_rz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rm_ftz : GCCBuiltin<"__nvvm_f2ui_rm_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rm : GCCBuiltin<"__nvvm_f2ui_rm">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rp_ftz : GCCBuiltin<"__nvvm_f2ui_rp_ftz">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ui_rp : GCCBuiltin<"__nvvm_f2ui_rp">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_i2f_rn : GCCBuiltin<"__nvvm_i2f_rn">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2f_rz : GCCBuiltin<"__nvvm_i2f_rz">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2f_rm : GCCBuiltin<"__nvvm_i2f_rm">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_i2f_rp : GCCBuiltin<"__nvvm_i2f_rp">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_nvvm_ui2f_rn : GCCBuiltin<"__nvvm_ui2f_rn">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2f_rz : GCCBuiltin<"__nvvm_ui2f_rz">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2f_rm : GCCBuiltin<"__nvvm_ui2f_rm">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+ def int_nvvm_ui2f_rp : GCCBuiltin<"__nvvm_ui2f_rp">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_nvvm_lohi_i2d : GCCBuiltin<"__nvvm_lohi_i2d">,
+ Intrinsic<[llvm_double_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_nvvm_d2i_lo : GCCBuiltin<"__nvvm_d2i_lo">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2i_hi : GCCBuiltin<"__nvvm_d2i_hi">,
+ Intrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_f2ll_rn_ftz : GCCBuiltin<"__nvvm_f2ll_rn_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rn : GCCBuiltin<"__nvvm_f2ll_rn">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rz_ftz : GCCBuiltin<"__nvvm_f2ll_rz_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rz : GCCBuiltin<"__nvvm_f2ll_rz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rm_ftz : GCCBuiltin<"__nvvm_f2ll_rm_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rm : GCCBuiltin<"__nvvm_f2ll_rm">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rp_ftz : GCCBuiltin<"__nvvm_f2ll_rp_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ll_rp : GCCBuiltin<"__nvvm_f2ll_rp">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_f2ull_rn_ftz : GCCBuiltin<"__nvvm_f2ull_rn_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rn : GCCBuiltin<"__nvvm_f2ull_rn">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rz_ftz : GCCBuiltin<"__nvvm_f2ull_rz_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rz : GCCBuiltin<"__nvvm_f2ull_rz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rm_ftz : GCCBuiltin<"__nvvm_f2ull_rm_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rm : GCCBuiltin<"__nvvm_f2ull_rm">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rp_ftz : GCCBuiltin<"__nvvm_f2ull_rp_ftz">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2ull_rp : GCCBuiltin<"__nvvm_f2ull_rp">,
+ Intrinsic<[llvm_i64_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_nvvm_d2ll_rn : GCCBuiltin<"__nvvm_d2ll_rn">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ll_rz : GCCBuiltin<"__nvvm_d2ll_rz">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ll_rm : GCCBuiltin<"__nvvm_d2ll_rm">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ll_rp : GCCBuiltin<"__nvvm_d2ll_rp">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_d2ull_rn : GCCBuiltin<"__nvvm_d2ull_rn">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ull_rz : GCCBuiltin<"__nvvm_d2ull_rz">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ull_rm : GCCBuiltin<"__nvvm_d2ull_rm">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_nvvm_d2ull_rp : GCCBuiltin<"__nvvm_d2ull_rp">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+
+ def int_nvvm_ll2f_rn : GCCBuiltin<"__nvvm_ll2f_rn">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2f_rz : GCCBuiltin<"__nvvm_ll2f_rz">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2f_rm : GCCBuiltin<"__nvvm_ll2f_rm">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2f_rp : GCCBuiltin<"__nvvm_ll2f_rp">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2f_rn : GCCBuiltin<"__nvvm_ull2f_rn">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2f_rz : GCCBuiltin<"__nvvm_ull2f_rz">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2f_rm : GCCBuiltin<"__nvvm_ull2f_rm">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2f_rp : GCCBuiltin<"__nvvm_ull2f_rp">,
+ Intrinsic<[llvm_float_ty], [llvm_i64_ty], [IntrNoMem]>;
+
+ def int_nvvm_ll2d_rn : GCCBuiltin<"__nvvm_ll2d_rn">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2d_rz : GCCBuiltin<"__nvvm_ll2d_rz">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2d_rm : GCCBuiltin<"__nvvm_ll2d_rm">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ll2d_rp : GCCBuiltin<"__nvvm_ll2d_rp">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2d_rn : GCCBuiltin<"__nvvm_ull2d_rn">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2d_rz : GCCBuiltin<"__nvvm_ull2d_rz">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2d_rm : GCCBuiltin<"__nvvm_ull2d_rm">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_ull2d_rp : GCCBuiltin<"__nvvm_ull2d_rp">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+
+ def int_nvvm_f2h_rn_ftz : GCCBuiltin<"__nvvm_f2h_rn_ftz">,
+ Intrinsic<[llvm_i16_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_f2h_rn : GCCBuiltin<"__nvvm_f2h_rn">,
+ Intrinsic<[llvm_i16_ty], [llvm_float_ty], [IntrNoMem]>;
+
+//
+// Bitcast
+//
+
+ def int_nvvm_bitcast_f2i : GCCBuiltin<"__nvvm_bitcast_f2i">,
+ Intrinsic<[llvm_i32_ty], [llvm_float_ty], [IntrNoMem]>;
+ def int_nvvm_bitcast_i2f : GCCBuiltin<"__nvvm_bitcast_i2f">,
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_nvvm_bitcast_ll2d : GCCBuiltin<"__nvvm_bitcast_ll2d">,
+ Intrinsic<[llvm_double_ty], [llvm_i64_ty], [IntrNoMem]>;
+ def int_nvvm_bitcast_d2ll : GCCBuiltin<"__nvvm_bitcast_d2ll">,
+ Intrinsic<[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+
+// FNS
+
+ def int_nvvm_fns : GCCBuiltin<"__nvvm_fns">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Atomics not available as llvm intrinsics.
+ def int_nvvm_atomic_load_add_f32 : Intrinsic<[llvm_float_ty],
+ [LLVMAnyPointerType<llvm_float_ty>, llvm_float_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+ // Atomic add of f64 requires sm_60.
+ def int_nvvm_atomic_load_add_f64 : Intrinsic<[llvm_double_ty],
+ [LLVMAnyPointerType<llvm_double_ty>, llvm_double_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+
+ def int_nvvm_atomic_load_inc_32 : Intrinsic<[llvm_i32_ty],
+ [LLVMAnyPointerType<llvm_i32_ty>, llvm_i32_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+ def int_nvvm_atomic_load_dec_32 : Intrinsic<[llvm_i32_ty],
+ [LLVMAnyPointerType<llvm_i32_ty>, llvm_i32_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+
+ class SCOPED_ATOMIC2_impl<LLVMType elty>
+ : Intrinsic<[elty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, LLVMMatchType<0>],
+ [IntrArgMemOnly, NoCapture<0>]>;
+ class SCOPED_ATOMIC3_impl<LLVMType elty>
+ : Intrinsic<[elty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [IntrArgMemOnly, NoCapture<0>]>;
+
+ multiclass PTXAtomicWithScope2<LLVMType elty> {
+ def _cta : SCOPED_ATOMIC2_impl<elty>;
+ def _sys : SCOPED_ATOMIC2_impl<elty>;
+ }
+ multiclass PTXAtomicWithScope3<LLVMType elty> {
+ def _cta : SCOPED_ATOMIC3_impl<elty>;
+ def _sys : SCOPED_ATOMIC3_impl<elty>;
+ }
+ multiclass PTXAtomicWithScope2_fi {
+ defm _f: PTXAtomicWithScope2<llvm_anyfloat_ty>;
+ defm _i: PTXAtomicWithScope2<llvm_anyint_ty>;
+ }
+ defm int_nvvm_atomic_add_gen : PTXAtomicWithScope2_fi;
+ defm int_nvvm_atomic_inc_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_dec_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_exch_gen_i: PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_xor_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_max_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_min_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_or_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_and_gen_i : PTXAtomicWithScope2<llvm_anyint_ty>;
+ defm int_nvvm_atomic_cas_gen_i : PTXAtomicWithScope3<llvm_anyint_ty>;
+
+// Bar.Sync
+
+ // The builtin for "bar.sync 0" is called __syncthreads. Unlike most of the
+ // intrinsics in this file, this one is a user-facing API.
+ def int_nvvm_barrier0 : GCCBuiltin<"__syncthreads">,
+ Intrinsic<[], [], [IntrConvergent]>;
+ // Synchronize all threads in the CTA at barrier 'n'.
+ def int_nvvm_barrier_n : GCCBuiltin<"__nvvm_bar_n">,
+ Intrinsic<[], [llvm_i32_ty], [IntrConvergent]>;
+ // Synchronize 'm', a multiple of warp size, (arg 2) threads in
+ // the CTA at barrier 'n' (arg 1).
+ def int_nvvm_barrier : GCCBuiltin<"__nvvm_bar">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty], [IntrConvergent]>;
+ def int_nvvm_barrier0_popc : GCCBuiltin<"__nvvm_bar0_popc">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrConvergent]>;
+ def int_nvvm_barrier0_and : GCCBuiltin<"__nvvm_bar0_and">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrConvergent]>;
+ def int_nvvm_barrier0_or : GCCBuiltin<"__nvvm_bar0_or">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrConvergent]>;
+
+ def int_nvvm_bar_sync :
+ Intrinsic<[], [llvm_i32_ty], [IntrConvergent]>,
+ GCCBuiltin<"__nvvm_bar_sync">;
+ def int_nvvm_bar_warp_sync :
+ Intrinsic<[], [llvm_i32_ty], [IntrConvergent]>,
+ GCCBuiltin<"__nvvm_bar_warp_sync">;
+
+ // barrier.sync id[, cnt]
+ def int_nvvm_barrier_sync :
+ Intrinsic<[], [llvm_i32_ty], [IntrConvergent]>,
+ GCCBuiltin<"__nvvm_barrier_sync">;
+ def int_nvvm_barrier_sync_cnt :
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty], [IntrConvergent]>,
+ GCCBuiltin<"__nvvm_barrier_sync_cnt">;
+
+ // Membar
+ def int_nvvm_membar_cta : GCCBuiltin<"__nvvm_membar_cta">,
+ Intrinsic<[], [], []>;
+ def int_nvvm_membar_gl : GCCBuiltin<"__nvvm_membar_gl">,
+ Intrinsic<[], [], []>;
+ def int_nvvm_membar_sys : GCCBuiltin<"__nvvm_membar_sys">,
+ Intrinsic<[], [], []>;
+
+// Generated within nvvm. Use for ldu on sm_20 or later. Second arg is the
+// pointer's alignment.
+def int_nvvm_ldu_global_i : Intrinsic<[llvm_anyint_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldu.global.i">;
+def int_nvvm_ldu_global_f : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldu.global.f">;
+def int_nvvm_ldu_global_p : Intrinsic<[llvm_anyptr_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldu.global.p">;
+
+// Generated within nvvm. Use for ldg on sm_35 or later. Second arg is the
+// pointer's alignment.
+def int_nvvm_ldg_global_i : Intrinsic<[llvm_anyint_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldg.global.i">;
+def int_nvvm_ldg_global_f : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldg.global.f">;
+def int_nvvm_ldg_global_p : Intrinsic<[llvm_anyptr_ty],
+ [LLVMAnyPointerType<LLVMMatchType<0>>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<0>],
+ "llvm.nvvm.ldg.global.p">;
+
+// Use for generic pointers
+// - These intrinsics are used to convert address spaces.
+// - The input pointer and output pointer must have the same type, except for
+// the address-space. (This restriction is not enforced here as there is
+// currently no way to describe it).
+// - This complements the llvm bitcast, which can be used to cast one type
+// of pointer to another type of pointer, while the address space remains
+// the same.
+def int_nvvm_ptr_local_to_gen: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.local.to.gen">;
+def int_nvvm_ptr_shared_to_gen: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.shared.to.gen">;
+def int_nvvm_ptr_global_to_gen: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.global.to.gen">;
+def int_nvvm_ptr_constant_to_gen: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.constant.to.gen">;
+
+def int_nvvm_ptr_gen_to_global: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.gen.to.global">;
+def int_nvvm_ptr_gen_to_shared: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.gen.to.shared">;
+def int_nvvm_ptr_gen_to_local: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.gen.to.local">;
+def int_nvvm_ptr_gen_to_constant: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem],
+ "llvm.nvvm.ptr.gen.to.constant">;
+
+// Used in nvvm internally to help address space opt and ptx code generation
+// This is for params that are passed to kernel functions by pointer by-val.
+def int_nvvm_ptr_gen_to_param: Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty],
+ [IntrNoMem],
+ "llvm.nvvm.ptr.gen.to.param">;
+
+// Move intrinsics, used in nvvm internally
+
+def int_nvvm_move_i16 : Intrinsic<[llvm_i16_ty], [llvm_i16_ty], [IntrNoMem],
+ "llvm.nvvm.move.i16">;
+def int_nvvm_move_i32 : Intrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem],
+ "llvm.nvvm.move.i32">;
+def int_nvvm_move_i64 : Intrinsic<[llvm_i64_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.move.i64">;
+def int_nvvm_move_float : Intrinsic<[llvm_float_ty], [llvm_float_ty],
+ [IntrNoMem], "llvm.nvvm.move.float">;
+def int_nvvm_move_double : Intrinsic<[llvm_double_ty], [llvm_double_ty],
+ [IntrNoMem], "llvm.nvvm.move.double">;
+def int_nvvm_move_ptr : Intrinsic<[llvm_anyptr_ty], [llvm_anyptr_ty],
+ [IntrNoMem, NoCapture<0>], "llvm.nvvm.move.ptr">;
+
+
+// For getting the handle from a texture or surface variable
+def int_nvvm_texsurf_handle
+ : Intrinsic<[llvm_i64_ty], [llvm_metadata_ty, llvm_anyi64ptr_ty],
+ [IntrNoMem], "llvm.nvvm.texsurf.handle">;
+def int_nvvm_texsurf_handle_internal
+ : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty],
+ [IntrNoMem], "llvm.nvvm.texsurf.handle.internal">;
+
+/// Error / Warn
+def int_nvvm_compiler_error :
+ Intrinsic<[], [llvm_anyptr_ty], [], "llvm.nvvm.compiler.error">;
+def int_nvvm_compiler_warn :
+ Intrinsic<[], [llvm_anyptr_ty], [], "llvm.nvvm.compiler.warn">;
+
+def int_nvvm_reflect :
+ Intrinsic<[llvm_i32_ty], [llvm_anyptr_ty], [IntrNoMem], "llvm.nvvm.reflect">;
+
+// isspacep.{const, global, local, shared}
+def int_nvvm_isspacep_const
+ : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty], [IntrNoMem],
+ "llvm.nvvm.isspacep.const">,
+ GCCBuiltin<"__nvvm_isspacep_const">;
+def int_nvvm_isspacep_global
+ : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty], [IntrNoMem],
+ "llvm.nvvm.isspacep.global">,
+ GCCBuiltin<"__nvvm_isspacep_global">;
+def int_nvvm_isspacep_local
+ : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty], [IntrNoMem],
+ "llvm.nvvm.isspacep.local">,
+ GCCBuiltin<"__nvvm_isspacep_local">;
+def int_nvvm_isspacep_shared
+ : Intrinsic<[llvm_i1_ty], [llvm_ptr_ty], [IntrNoMem],
+ "llvm.nvvm.isspacep.shared">,
+ GCCBuiltin<"__nvvm_isspacep_shared">;
+
+// Environment register read
+def int_nvvm_read_ptx_sreg_envreg0
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg0">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg0">;
+def int_nvvm_read_ptx_sreg_envreg1
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg1">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg1">;
+def int_nvvm_read_ptx_sreg_envreg2
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg2">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg2">;
+def int_nvvm_read_ptx_sreg_envreg3
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg3">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg3">;
+def int_nvvm_read_ptx_sreg_envreg4
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg4">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg4">;
+def int_nvvm_read_ptx_sreg_envreg5
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg5">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg5">;
+def int_nvvm_read_ptx_sreg_envreg6
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg6">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg6">;
+def int_nvvm_read_ptx_sreg_envreg7
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg7">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg7">;
+def int_nvvm_read_ptx_sreg_envreg8
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg8">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg8">;
+def int_nvvm_read_ptx_sreg_envreg9
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg9">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg9">;
+def int_nvvm_read_ptx_sreg_envreg10
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg10">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg10">;
+def int_nvvm_read_ptx_sreg_envreg11
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg11">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg11">;
+def int_nvvm_read_ptx_sreg_envreg12
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg12">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg12">;
+def int_nvvm_read_ptx_sreg_envreg13
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg13">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg13">;
+def int_nvvm_read_ptx_sreg_envreg14
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg14">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg14">;
+def int_nvvm_read_ptx_sreg_envreg15
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg15">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg15">;
+def int_nvvm_read_ptx_sreg_envreg16
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg16">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg16">;
+def int_nvvm_read_ptx_sreg_envreg17
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg17">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg17">;
+def int_nvvm_read_ptx_sreg_envreg18
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg18">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg18">;
+def int_nvvm_read_ptx_sreg_envreg19
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg19">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg19">;
+def int_nvvm_read_ptx_sreg_envreg20
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg20">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg20">;
+def int_nvvm_read_ptx_sreg_envreg21
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg21">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg21">;
+def int_nvvm_read_ptx_sreg_envreg22
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg22">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg22">;
+def int_nvvm_read_ptx_sreg_envreg23
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg23">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg23">;
+def int_nvvm_read_ptx_sreg_envreg24
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg24">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg24">;
+def int_nvvm_read_ptx_sreg_envreg25
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg25">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg25">;
+def int_nvvm_read_ptx_sreg_envreg26
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg26">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg26">;
+def int_nvvm_read_ptx_sreg_envreg27
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg27">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg27">;
+def int_nvvm_read_ptx_sreg_envreg28
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg28">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg28">;
+def int_nvvm_read_ptx_sreg_envreg29
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg29">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg29">;
+def int_nvvm_read_ptx_sreg_envreg30
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg30">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg30">;
+def int_nvvm_read_ptx_sreg_envreg31
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem],
+ "llvm.nvvm.read.ptx.sreg.envreg31">,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_envreg31">;
+
+
+// Texture Fetch
+// texmode_independent
+def int_nvvm_tex_1d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.v4f32.s32">;
+def int_nvvm_tex_1d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.v4f32.f32">;
+def int_nvvm_tex_1d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.level.v4f32.f32">;
+def int_nvvm_tex_1d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.grad.v4f32.f32">;
+def int_nvvm_tex_1d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.v4s32.s32">;
+def int_nvvm_tex_1d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.v4s32.f32">;
+def int_nvvm_tex_1d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.level.v4s32.f32">;
+def int_nvvm_tex_1d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.grad.v4s32.f32">;
+def int_nvvm_tex_1d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.v4u32.s32">;
+def int_nvvm_tex_1d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.v4u32.f32">;
+def int_nvvm_tex_1d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.level.v4u32.f32">;
+def int_nvvm_tex_1d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.grad.v4u32.f32">;
+
+def int_nvvm_tex_1d_array_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.array.v4f32.s32">;
+def int_nvvm_tex_1d_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.v4f32.f32">;
+def int_nvvm_tex_1d_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.level.v4f32.f32">;
+def int_nvvm_tex_1d_array_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.grad.v4f32.f32">;
+def int_nvvm_tex_1d_array_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.array.v4s32.s32">;
+def int_nvvm_tex_1d_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.v4s32.f32">;
+def int_nvvm_tex_1d_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.level.v4s32.f32">;
+def int_nvvm_tex_1d_array_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.grad.v4s32.f32">;
+def int_nvvm_tex_1d_array_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.1d.array.v4u32.s32">;
+def int_nvvm_tex_1d_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.v4u32.f32">;
+def int_nvvm_tex_1d_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.level.v4u32.f32">;
+def int_nvvm_tex_1d_array_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.1d.array.grad.v4u32.f32">;
+
+def int_nvvm_tex_2d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.v4f32.s32">;
+def int_nvvm_tex_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.v4f32.f32">;
+def int_nvvm_tex_2d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.level.v4f32.f32">;
+def int_nvvm_tex_2d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.grad.v4f32.f32">;
+def int_nvvm_tex_2d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.v4s32.s32">;
+def int_nvvm_tex_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.v4s32.f32">;
+def int_nvvm_tex_2d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.level.v4s32.f32">;
+def int_nvvm_tex_2d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.grad.v4s32.f32">;
+def int_nvvm_tex_2d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.v4u32.s32">;
+def int_nvvm_tex_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.v4u32.f32">;
+def int_nvvm_tex_2d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.level.v4u32.f32">;
+def int_nvvm_tex_2d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.grad.v4u32.f32">;
+
+def int_nvvm_tex_2d_array_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.array.v4f32.s32">;
+def int_nvvm_tex_2d_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.v4f32.f32">;
+def int_nvvm_tex_2d_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.level.v4f32.f32">;
+def int_nvvm_tex_2d_array_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.grad.v4f32.f32">;
+def int_nvvm_tex_2d_array_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.array.v4s32.s32">;
+def int_nvvm_tex_2d_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.v4s32.f32">;
+def int_nvvm_tex_2d_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.level.v4s32.f32">;
+def int_nvvm_tex_2d_array_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.grad.v4s32.f32">;
+def int_nvvm_tex_2d_array_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.2d.array.v4u32.s32">;
+def int_nvvm_tex_2d_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.v4u32.f32">;
+def int_nvvm_tex_2d_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.level.v4u32.f32">;
+def int_nvvm_tex_2d_array_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.2d.array.grad.v4u32.f32">;
+
+def int_nvvm_tex_3d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.3d.v4f32.s32">;
+def int_nvvm_tex_3d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.v4f32.f32">;
+def int_nvvm_tex_3d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.level.v4f32.f32">;
+def int_nvvm_tex_3d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.grad.v4f32.f32">;
+def int_nvvm_tex_3d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.3d.v4s32.s32">;
+def int_nvvm_tex_3d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.v4s32.f32">;
+def int_nvvm_tex_3d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.level.v4s32.f32">;
+def int_nvvm_tex_3d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.grad.v4s32.f32">;
+def int_nvvm_tex_3d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.3d.v4u32.s32">;
+def int_nvvm_tex_3d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.v4u32.f32">;
+def int_nvvm_tex_3d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.level.v4u32.f32">;
+def int_nvvm_tex_3d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.3d.grad.v4u32.f32">;
+
+def int_nvvm_tex_cube_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.v4f32.f32">;
+def int_nvvm_tex_cube_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.level.v4f32.f32">;
+def int_nvvm_tex_cube_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.v4s32.f32">;
+def int_nvvm_tex_cube_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.level.v4s32.f32">;
+def int_nvvm_tex_cube_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.v4u32.f32">;
+def int_nvvm_tex_cube_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.level.v4u32.f32">;
+
+def int_nvvm_tex_cube_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.v4f32.f32">;
+def int_nvvm_tex_cube_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.level.v4f32.f32">;
+def int_nvvm_tex_cube_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.v4s32.f32">;
+def int_nvvm_tex_cube_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.level.v4s32.f32">;
+def int_nvvm_tex_cube_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.v4u32.f32">;
+def int_nvvm_tex_cube_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.cube.array.level.v4u32.f32">;
+
+def int_nvvm_tld4_r_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.r.2d.v4f32.f32">;
+def int_nvvm_tld4_g_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.g.2d.v4f32.f32">;
+def int_nvvm_tld4_b_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.b.2d.v4f32.f32">;
+def int_nvvm_tld4_a_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.a.2d.v4f32.f32">;
+def int_nvvm_tld4_r_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.r.2d.v4s32.f32">;
+def int_nvvm_tld4_g_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.g.2d.v4s32.f32">;
+def int_nvvm_tld4_b_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.b.2d.v4s32.f32">;
+def int_nvvm_tld4_a_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.a.2d.v4s32.f32">;
+def int_nvvm_tld4_r_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.r.2d.v4u32.f32">;
+def int_nvvm_tld4_g_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.g.2d.v4u32.f32">;
+def int_nvvm_tld4_b_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.b.2d.v4u32.f32">;
+def int_nvvm_tld4_a_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.a.2d.v4u32.f32">;
+
+
+// texmode_unified
+def int_nvvm_tex_unified_1d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4f32.s32">;
+def int_nvvm_tex_unified_1d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4f32.f32">;
+def int_nvvm_tex_unified_1d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.level.v4f32.f32">;
+def int_nvvm_tex_unified_1d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.grad.v4f32.f32">;
+def int_nvvm_tex_unified_1d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4s32.s32">;
+def int_nvvm_tex_unified_1d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4s32.f32">;
+def int_nvvm_tex_unified_1d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.level.v4s32.f32">;
+def int_nvvm_tex_unified_1d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.grad.v4s32.f32">;
+def int_nvvm_tex_unified_1d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4u32.s32">;
+def int_nvvm_tex_unified_1d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.v4u32.f32">;
+def int_nvvm_tex_unified_1d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.level.v4u32.f32">;
+def int_nvvm_tex_unified_1d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.grad.v4u32.f32">;
+
+def int_nvvm_tex_unified_1d_array_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4f32.s32">;
+def int_nvvm_tex_unified_1d_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4f32.f32">;
+def int_nvvm_tex_unified_1d_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.level.v4f32.f32">;
+def int_nvvm_tex_unified_1d_array_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.grad.v4f32.f32">;
+def int_nvvm_tex_unified_1d_array_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4s32.s32">;
+def int_nvvm_tex_unified_1d_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4s32.f32">;
+def int_nvvm_tex_unified_1d_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.level.v4s32.f32">;
+def int_nvvm_tex_unified_1d_array_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.grad.v4s32.f32">;
+def int_nvvm_tex_unified_1d_array_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4u32.s32">;
+def int_nvvm_tex_unified_1d_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.v4u32.f32">;
+def int_nvvm_tex_unified_1d_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.level.v4u32.f32">;
+def int_nvvm_tex_unified_1d_array_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.1d.array.grad.v4u32.f32">;
+
+def int_nvvm_tex_unified_2d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4f32.s32">;
+def int_nvvm_tex_unified_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4f32.f32">;
+def int_nvvm_tex_unified_2d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.level.v4f32.f32">;
+def int_nvvm_tex_unified_2d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.grad.v4f32.f32">;
+def int_nvvm_tex_unified_2d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4s32.s32">;
+def int_nvvm_tex_unified_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4s32.f32">;
+def int_nvvm_tex_unified_2d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.level.v4s32.f32">;
+def int_nvvm_tex_unified_2d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.grad.v4s32.f32">;
+def int_nvvm_tex_unified_2d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4u32.s32">;
+def int_nvvm_tex_unified_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.v4u32.f32">;
+def int_nvvm_tex_unified_2d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.level.v4u32.f32">;
+def int_nvvm_tex_unified_2d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.grad.v4u32.f32">;
+
+def int_nvvm_tex_unified_2d_array_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4f32.s32">;
+def int_nvvm_tex_unified_2d_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4f32.f32">;
+def int_nvvm_tex_unified_2d_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.level.v4f32.f32">;
+def int_nvvm_tex_unified_2d_array_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.grad.v4f32.f32">;
+def int_nvvm_tex_unified_2d_array_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4s32.s32">;
+def int_nvvm_tex_unified_2d_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4s32.f32">;
+def int_nvvm_tex_unified_2d_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.level.v4s32.f32">;
+def int_nvvm_tex_unified_2d_array_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.grad.v4s32.f32">;
+def int_nvvm_tex_unified_2d_array_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4u32.s32">;
+def int_nvvm_tex_unified_2d_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.v4u32.f32">;
+def int_nvvm_tex_unified_2d_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.level.v4u32.f32">;
+def int_nvvm_tex_unified_2d_array_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.2d.array.grad.v4u32.f32">;
+
+def int_nvvm_tex_unified_3d_v4f32_s32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.unified.3d.v4f32.s32">;
+def int_nvvm_tex_unified_3d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.v4f32.f32">;
+def int_nvvm_tex_unified_3d_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.level.v4f32.f32">;
+def int_nvvm_tex_unified_3d_grad_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.grad.v4f32.f32">;
+def int_nvvm_tex_unified_3d_v4s32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.unified.3d.v4s32.s32">;
+def int_nvvm_tex_unified_3d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.v4s32.f32">;
+def int_nvvm_tex_unified_3d_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.level.v4s32.f32">;
+def int_nvvm_tex_unified_3d_grad_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.grad.v4s32.f32">;
+def int_nvvm_tex_unified_3d_v4u32_s32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [], "llvm.nvvm.tex.unified.3d.v4u32.s32">;
+def int_nvvm_tex_unified_3d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.v4u32.f32">;
+def int_nvvm_tex_unified_3d_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.level.v4u32.f32">;
+def int_nvvm_tex_unified_3d_grad_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.3d.grad.v4u32.f32">;
+
+def int_nvvm_tex_unified_cube_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.v4f32.f32">;
+def int_nvvm_tex_unified_cube_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.level.v4f32.f32">;
+def int_nvvm_tex_unified_cube_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.v4s32.f32">;
+def int_nvvm_tex_unified_cube_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.level.v4s32.f32">;
+def int_nvvm_tex_unified_cube_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.v4u32.f32">;
+def int_nvvm_tex_unified_cube_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.level.v4u32.f32">;
+
+def int_nvvm_tex_unified_cube_array_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.v4f32.f32">;
+def int_nvvm_tex_unified_cube_array_level_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.level.v4f32.f32">;
+def int_nvvm_tex_unified_cube_array_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.v4s32.f32">;
+def int_nvvm_tex_unified_cube_array_level_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.level.v4s32.f32">;
+def int_nvvm_tex_unified_cube_array_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.v4u32.f32">;
+def int_nvvm_tex_unified_cube_array_level_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty,
+ llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tex.unified.cube.array.level.v4u32.f32">;
+
+def int_nvvm_tld4_unified_r_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.r.2d.v4f32.f32">;
+def int_nvvm_tld4_unified_g_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.g.2d.v4f32.f32">;
+def int_nvvm_tld4_unified_b_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.b.2d.v4f32.f32">;
+def int_nvvm_tld4_unified_a_2d_v4f32_f32
+ : Intrinsic<[llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.a.2d.v4f32.f32">;
+def int_nvvm_tld4_unified_r_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.r.2d.v4s32.f32">;
+def int_nvvm_tld4_unified_g_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.g.2d.v4s32.f32">;
+def int_nvvm_tld4_unified_b_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.b.2d.v4s32.f32">;
+def int_nvvm_tld4_unified_a_2d_v4s32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.a.2d.v4s32.f32">;
+def int_nvvm_tld4_unified_r_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.r.2d.v4u32.f32">;
+def int_nvvm_tld4_unified_g_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.g.2d.v4u32.f32">;
+def int_nvvm_tld4_unified_b_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.b.2d.v4u32.f32">;
+def int_nvvm_tld4_unified_a_2d_v4u32_f32
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_float_ty, llvm_float_ty], [],
+ "llvm.nvvm.tld4.unified.a.2d.v4u32.f32">;
+
+
+//=== Surface Load
+// .clamp variants
+def int_nvvm_suld_1d_i8_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i8.clamp">;
+def int_nvvm_suld_1d_i16_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i16.clamp">;
+def int_nvvm_suld_1d_i32_clamp
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i32.clamp">;
+def int_nvvm_suld_1d_i64_clamp
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i64.clamp">;
+def int_nvvm_suld_1d_v2i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i8.clamp">;
+def int_nvvm_suld_1d_v2i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i16.clamp">;
+def int_nvvm_suld_1d_v2i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i32.clamp">;
+def int_nvvm_suld_1d_v2i64_clamp
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i64.clamp">;
+def int_nvvm_suld_1d_v4i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i8.clamp">;
+def int_nvvm_suld_1d_v4i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i16.clamp">;
+def int_nvvm_suld_1d_v4i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i32.clamp">;
+
+def int_nvvm_suld_1d_array_i8_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i8.clamp">;
+def int_nvvm_suld_1d_array_i16_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i16.clamp">;
+def int_nvvm_suld_1d_array_i32_clamp
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i32.clamp">;
+def int_nvvm_suld_1d_array_i64_clamp
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i64.clamp">;
+def int_nvvm_suld_1d_array_v2i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i8.clamp">;
+def int_nvvm_suld_1d_array_v2i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i16.clamp">;
+def int_nvvm_suld_1d_array_v2i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i32.clamp">;
+def int_nvvm_suld_1d_array_v2i64_clamp
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i64.clamp">;
+def int_nvvm_suld_1d_array_v4i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i8.clamp">;
+def int_nvvm_suld_1d_array_v4i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i16.clamp">;
+def int_nvvm_suld_1d_array_v4i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i32.clamp">;
+
+def int_nvvm_suld_2d_i8_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i8.clamp">;
+def int_nvvm_suld_2d_i16_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i16.clamp">;
+def int_nvvm_suld_2d_i32_clamp
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i32.clamp">;
+def int_nvvm_suld_2d_i64_clamp
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i64.clamp">;
+def int_nvvm_suld_2d_v2i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i8.clamp">;
+def int_nvvm_suld_2d_v2i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i16.clamp">;
+def int_nvvm_suld_2d_v2i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i32.clamp">;
+def int_nvvm_suld_2d_v2i64_clamp
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i64.clamp">;
+def int_nvvm_suld_2d_v4i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i8.clamp">;
+def int_nvvm_suld_2d_v4i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i16.clamp">;
+def int_nvvm_suld_2d_v4i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i32.clamp">;
+
+def int_nvvm_suld_2d_array_i8_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i8.clamp">;
+def int_nvvm_suld_2d_array_i16_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i16.clamp">;
+def int_nvvm_suld_2d_array_i32_clamp
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i32.clamp">;
+def int_nvvm_suld_2d_array_i64_clamp
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i64.clamp">;
+def int_nvvm_suld_2d_array_v2i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i8.clamp">;
+def int_nvvm_suld_2d_array_v2i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i16.clamp">;
+def int_nvvm_suld_2d_array_v2i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i32.clamp">;
+def int_nvvm_suld_2d_array_v2i64_clamp
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i64.clamp">;
+def int_nvvm_suld_2d_array_v4i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i8.clamp">;
+def int_nvvm_suld_2d_array_v4i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i16.clamp">;
+def int_nvvm_suld_2d_array_v4i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i32.clamp">;
+
+def int_nvvm_suld_3d_i8_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i8.clamp">;
+def int_nvvm_suld_3d_i16_clamp
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i16.clamp">;
+def int_nvvm_suld_3d_i32_clamp
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i32.clamp">;
+def int_nvvm_suld_3d_i64_clamp
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i64.clamp">;
+def int_nvvm_suld_3d_v2i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i8.clamp">;
+def int_nvvm_suld_3d_v2i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i16.clamp">;
+def int_nvvm_suld_3d_v2i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i32.clamp">;
+def int_nvvm_suld_3d_v2i64_clamp
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i64.clamp">;
+def int_nvvm_suld_3d_v4i8_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i8.clamp">;
+def int_nvvm_suld_3d_v4i16_clamp
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i16.clamp">;
+def int_nvvm_suld_3d_v4i32_clamp
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i32.clamp">;
+
+// .trap variants
+def int_nvvm_suld_1d_i8_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i8.trap">;
+def int_nvvm_suld_1d_i16_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i16.trap">;
+def int_nvvm_suld_1d_i32_trap
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i32.trap">;
+def int_nvvm_suld_1d_i64_trap
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i64.trap">;
+def int_nvvm_suld_1d_v2i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i8.trap">;
+def int_nvvm_suld_1d_v2i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i16.trap">;
+def int_nvvm_suld_1d_v2i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i32.trap">;
+def int_nvvm_suld_1d_v2i64_trap
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i64.trap">;
+def int_nvvm_suld_1d_v4i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i8.trap">;
+def int_nvvm_suld_1d_v4i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i16.trap">;
+def int_nvvm_suld_1d_v4i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i32.trap">;
+
+def int_nvvm_suld_1d_array_i8_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i8.trap">;
+def int_nvvm_suld_1d_array_i16_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i16.trap">;
+def int_nvvm_suld_1d_array_i32_trap
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i32.trap">;
+def int_nvvm_suld_1d_array_i64_trap
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i64.trap">;
+def int_nvvm_suld_1d_array_v2i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i8.trap">;
+def int_nvvm_suld_1d_array_v2i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i16.trap">;
+def int_nvvm_suld_1d_array_v2i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i32.trap">;
+def int_nvvm_suld_1d_array_v2i64_trap
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i64.trap">;
+def int_nvvm_suld_1d_array_v4i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i8.trap">;
+def int_nvvm_suld_1d_array_v4i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i16.trap">;
+def int_nvvm_suld_1d_array_v4i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i32.trap">;
+
+def int_nvvm_suld_2d_i8_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i8.trap">;
+def int_nvvm_suld_2d_i16_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i16.trap">;
+def int_nvvm_suld_2d_i32_trap
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i32.trap">;
+def int_nvvm_suld_2d_i64_trap
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i64.trap">;
+def int_nvvm_suld_2d_v2i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i8.trap">;
+def int_nvvm_suld_2d_v2i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i16.trap">;
+def int_nvvm_suld_2d_v2i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i32.trap">;
+def int_nvvm_suld_2d_v2i64_trap
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i64.trap">;
+def int_nvvm_suld_2d_v4i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i8.trap">;
+def int_nvvm_suld_2d_v4i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i16.trap">;
+def int_nvvm_suld_2d_v4i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i32.trap">;
+
+def int_nvvm_suld_2d_array_i8_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i8.trap">;
+def int_nvvm_suld_2d_array_i16_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i16.trap">;
+def int_nvvm_suld_2d_array_i32_trap
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i32.trap">;
+def int_nvvm_suld_2d_array_i64_trap
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i64.trap">;
+def int_nvvm_suld_2d_array_v2i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i8.trap">;
+def int_nvvm_suld_2d_array_v2i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i16.trap">;
+def int_nvvm_suld_2d_array_v2i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i32.trap">;
+def int_nvvm_suld_2d_array_v2i64_trap
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i64.trap">;
+def int_nvvm_suld_2d_array_v4i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i8.trap">;
+def int_nvvm_suld_2d_array_v4i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i16.trap">;
+def int_nvvm_suld_2d_array_v4i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i32.trap">;
+
+def int_nvvm_suld_3d_i8_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i8.trap">;
+def int_nvvm_suld_3d_i16_trap
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i16.trap">;
+def int_nvvm_suld_3d_i32_trap
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i32.trap">;
+def int_nvvm_suld_3d_i64_trap
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i64.trap">;
+def int_nvvm_suld_3d_v2i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i8.trap">;
+def int_nvvm_suld_3d_v2i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i16.trap">;
+def int_nvvm_suld_3d_v2i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i32.trap">;
+def int_nvvm_suld_3d_v2i64_trap
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i64.trap">;
+def int_nvvm_suld_3d_v4i8_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i8.trap">;
+def int_nvvm_suld_3d_v4i16_trap
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i16.trap">;
+def int_nvvm_suld_3d_v4i32_trap
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i32.trap">;
+
+// .zero variants
+def int_nvvm_suld_1d_i8_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i8.zero">;
+def int_nvvm_suld_1d_i16_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i16.zero">;
+def int_nvvm_suld_1d_i32_zero
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i32.zero">;
+def int_nvvm_suld_1d_i64_zero
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.i64.zero">;
+def int_nvvm_suld_1d_v2i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i8.zero">;
+def int_nvvm_suld_1d_v2i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i16.zero">;
+def int_nvvm_suld_1d_v2i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i32.zero">;
+def int_nvvm_suld_1d_v2i64_zero
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v2i64.zero">;
+def int_nvvm_suld_1d_v4i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i8.zero">;
+def int_nvvm_suld_1d_v4i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i16.zero">;
+def int_nvvm_suld_1d_v4i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.v4i32.zero">;
+
+def int_nvvm_suld_1d_array_i8_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i8.zero">;
+def int_nvvm_suld_1d_array_i16_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i16.zero">;
+def int_nvvm_suld_1d_array_i32_zero
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i32.zero">;
+def int_nvvm_suld_1d_array_i64_zero
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.i64.zero">;
+def int_nvvm_suld_1d_array_v2i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i8.zero">;
+def int_nvvm_suld_1d_array_v2i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i16.zero">;
+def int_nvvm_suld_1d_array_v2i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i32.zero">;
+def int_nvvm_suld_1d_array_v2i64_zero
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v2i64.zero">;
+def int_nvvm_suld_1d_array_v4i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i8.zero">;
+def int_nvvm_suld_1d_array_v4i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i16.zero">;
+def int_nvvm_suld_1d_array_v4i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.1d.array.v4i32.zero">;
+
+def int_nvvm_suld_2d_i8_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i8.zero">;
+def int_nvvm_suld_2d_i16_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i16.zero">;
+def int_nvvm_suld_2d_i32_zero
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i32.zero">;
+def int_nvvm_suld_2d_i64_zero
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.i64.zero">;
+def int_nvvm_suld_2d_v2i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i8.zero">;
+def int_nvvm_suld_2d_v2i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i16.zero">;
+def int_nvvm_suld_2d_v2i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i32.zero">;
+def int_nvvm_suld_2d_v2i64_zero
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v2i64.zero">;
+def int_nvvm_suld_2d_v4i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i8.zero">;
+def int_nvvm_suld_2d_v4i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i16.zero">;
+def int_nvvm_suld_2d_v4i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.v4i32.zero">;
+
+def int_nvvm_suld_2d_array_i8_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i8.zero">;
+def int_nvvm_suld_2d_array_i16_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i16.zero">;
+def int_nvvm_suld_2d_array_i32_zero
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i32.zero">;
+def int_nvvm_suld_2d_array_i64_zero
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.i64.zero">;
+def int_nvvm_suld_2d_array_v2i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i8.zero">;
+def int_nvvm_suld_2d_array_v2i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i16.zero">;
+def int_nvvm_suld_2d_array_v2i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i32.zero">;
+def int_nvvm_suld_2d_array_v2i64_zero
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v2i64.zero">;
+def int_nvvm_suld_2d_array_v4i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i8.zero">;
+def int_nvvm_suld_2d_array_v4i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i16.zero">;
+def int_nvvm_suld_2d_array_v4i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.2d.array.v4i32.zero">;
+
+def int_nvvm_suld_3d_i8_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i8.zero">;
+def int_nvvm_suld_3d_i16_zero
+ : Intrinsic<[llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i16.zero">;
+def int_nvvm_suld_3d_i32_zero
+ : Intrinsic<[llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i32.zero">;
+def int_nvvm_suld_3d_i64_zero
+ : Intrinsic<[llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.i64.zero">;
+def int_nvvm_suld_3d_v2i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i8.zero">;
+def int_nvvm_suld_3d_v2i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i16.zero">;
+def int_nvvm_suld_3d_v2i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i32.zero">;
+def int_nvvm_suld_3d_v2i64_zero
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v2i64.zero">;
+def int_nvvm_suld_3d_v4i8_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i8.zero">;
+def int_nvvm_suld_3d_v4i16_zero
+ : Intrinsic<[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i16.zero">;
+def int_nvvm_suld_3d_v4i32_zero
+ : Intrinsic<[llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.suld.3d.v4i32.zero">;
+
+//===- Texture Query ------------------------------------------------------===//
+
+def int_nvvm_txq_channel_order
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.channel.order">,
+ GCCBuiltin<"__nvvm_txq_channel_order">;
+def int_nvvm_txq_channel_data_type
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.channel.data.type">,
+ GCCBuiltin<"__nvvm_txq_channel_data_type">;
+def int_nvvm_txq_width
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.width">,
+ GCCBuiltin<"__nvvm_txq_width">;
+def int_nvvm_txq_height
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.height">,
+ GCCBuiltin<"__nvvm_txq_height">;
+def int_nvvm_txq_depth
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.depth">,
+ GCCBuiltin<"__nvvm_txq_depth">;
+def int_nvvm_txq_array_size
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.array.size">,
+ GCCBuiltin<"__nvvm_txq_array_size">;
+def int_nvvm_txq_num_samples
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.num.samples">,
+ GCCBuiltin<"__nvvm_txq_num_samples">;
+def int_nvvm_txq_num_mipmap_levels
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.txq.num.mipmap.levels">,
+ GCCBuiltin<"__nvvm_txq_num_mipmap_levels">;
+
+//===- Surface Query ------------------------------------------------------===//
+
+def int_nvvm_suq_channel_order
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.channel.order">,
+ GCCBuiltin<"__nvvm_suq_channel_order">;
+def int_nvvm_suq_channel_data_type
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.channel.data.type">,
+ GCCBuiltin<"__nvvm_suq_channel_data_type">;
+def int_nvvm_suq_width
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.width">,
+ GCCBuiltin<"__nvvm_suq_width">;
+def int_nvvm_suq_height
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.height">,
+ GCCBuiltin<"__nvvm_suq_height">;
+def int_nvvm_suq_depth
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.depth">,
+ GCCBuiltin<"__nvvm_suq_depth">;
+def int_nvvm_suq_array_size
+ : Intrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.suq.array.size">,
+ GCCBuiltin<"__nvvm_suq_array_size">;
+
+
+//===- Handle Query -------------------------------------------------------===//
+
+def int_nvvm_istypep_sampler
+ : Intrinsic<[llvm_i1_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.istypep.sampler">,
+ GCCBuiltin<"__nvvm_istypep_sampler">;
+def int_nvvm_istypep_surface
+ : Intrinsic<[llvm_i1_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.istypep.surface">,
+ GCCBuiltin<"__nvvm_istypep_surface">;
+def int_nvvm_istypep_texture
+ : Intrinsic<[llvm_i1_ty], [llvm_i64_ty], [IntrNoMem],
+ "llvm.nvvm.istypep.texture">,
+ GCCBuiltin<"__nvvm_istypep_texture">;
+
+
+
+//===- Surface Stores -----------------------------------------------------===//
+
+// Unformatted
+// .clamp variant
+def int_nvvm_sust_b_1d_i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i8_clamp">;
+def int_nvvm_sust_b_1d_i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i16_clamp">;
+def int_nvvm_sust_b_1d_i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i32_clamp">;
+def int_nvvm_sust_b_1d_i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i64_clamp">;
+def int_nvvm_sust_b_1d_v2i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i8_clamp">;
+def int_nvvm_sust_b_1d_v2i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i16_clamp">;
+def int_nvvm_sust_b_1d_v2i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i32_clamp">;
+def int_nvvm_sust_b_1d_v2i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i64_clamp">;
+def int_nvvm_sust_b_1d_v4i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i8_clamp">;
+def int_nvvm_sust_b_1d_v4i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i16_clamp">;
+def int_nvvm_sust_b_1d_v4i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i32_clamp">;
+
+
+def int_nvvm_sust_b_1d_array_i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i8_clamp">;
+def int_nvvm_sust_b_1d_array_i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i16_clamp">;
+def int_nvvm_sust_b_1d_array_i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i32_clamp">;
+def int_nvvm_sust_b_1d_array_i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i64_clamp">;
+def int_nvvm_sust_b_1d_array_v2i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i8_clamp">;
+def int_nvvm_sust_b_1d_array_v2i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i16_clamp">;
+def int_nvvm_sust_b_1d_array_v2i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i32_clamp">;
+def int_nvvm_sust_b_1d_array_v2i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i64_clamp">;
+def int_nvvm_sust_b_1d_array_v4i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i8_clamp">;
+def int_nvvm_sust_b_1d_array_v4i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i16_clamp">;
+def int_nvvm_sust_b_1d_array_v4i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i32_clamp">;
+
+
+def int_nvvm_sust_b_2d_i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i8_clamp">;
+def int_nvvm_sust_b_2d_i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i16_clamp">;
+def int_nvvm_sust_b_2d_i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i32_clamp">;
+def int_nvvm_sust_b_2d_i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i64_clamp">;
+def int_nvvm_sust_b_2d_v2i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i8_clamp">;
+def int_nvvm_sust_b_2d_v2i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i16_clamp">;
+def int_nvvm_sust_b_2d_v2i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i32_clamp">;
+def int_nvvm_sust_b_2d_v2i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i64_clamp">;
+def int_nvvm_sust_b_2d_v4i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i8_clamp">;
+def int_nvvm_sust_b_2d_v4i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i16_clamp">;
+def int_nvvm_sust_b_2d_v4i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i32_clamp">;
+
+
+def int_nvvm_sust_b_2d_array_i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i8_clamp">;
+def int_nvvm_sust_b_2d_array_i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i16_clamp">;
+def int_nvvm_sust_b_2d_array_i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i32_clamp">;
+def int_nvvm_sust_b_2d_array_i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i64_clamp">;
+def int_nvvm_sust_b_2d_array_v2i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i8_clamp">;
+def int_nvvm_sust_b_2d_array_v2i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i16_clamp">;
+def int_nvvm_sust_b_2d_array_v2i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i32_clamp">;
+def int_nvvm_sust_b_2d_array_v2i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i64_clamp">;
+def int_nvvm_sust_b_2d_array_v4i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i8_clamp">;
+def int_nvvm_sust_b_2d_array_v4i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i16_clamp">;
+def int_nvvm_sust_b_2d_array_v4i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i32_clamp">;
+
+
+def int_nvvm_sust_b_3d_i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i8_clamp">;
+def int_nvvm_sust_b_3d_i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i16_clamp">;
+def int_nvvm_sust_b_3d_i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i32_clamp">;
+def int_nvvm_sust_b_3d_i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i64_clamp">;
+def int_nvvm_sust_b_3d_v2i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i8_clamp">;
+def int_nvvm_sust_b_3d_v2i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i16_clamp">;
+def int_nvvm_sust_b_3d_v2i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i32_clamp">;
+def int_nvvm_sust_b_3d_v2i64_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i64.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i64_clamp">;
+def int_nvvm_sust_b_3d_v4i8_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i8.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i8_clamp">;
+def int_nvvm_sust_b_3d_v4i16_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i16.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i16_clamp">;
+def int_nvvm_sust_b_3d_v4i32_clamp
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i32.clamp">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i32_clamp">;
+
+
+// .trap variant
+def int_nvvm_sust_b_1d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i8_trap">;
+def int_nvvm_sust_b_1d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i16_trap">;
+def int_nvvm_sust_b_1d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i32_trap">;
+def int_nvvm_sust_b_1d_i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i64_trap">;
+def int_nvvm_sust_b_1d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i8_trap">;
+def int_nvvm_sust_b_1d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i16_trap">;
+def int_nvvm_sust_b_1d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i32_trap">;
+def int_nvvm_sust_b_1d_v2i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i64_trap">;
+def int_nvvm_sust_b_1d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i8_trap">;
+def int_nvvm_sust_b_1d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i16_trap">;
+def int_nvvm_sust_b_1d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i32_trap">;
+
+
+def int_nvvm_sust_b_1d_array_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i8_trap">;
+def int_nvvm_sust_b_1d_array_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i16_trap">;
+def int_nvvm_sust_b_1d_array_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i32_trap">;
+def int_nvvm_sust_b_1d_array_i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i64_trap">;
+def int_nvvm_sust_b_1d_array_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i8_trap">;
+def int_nvvm_sust_b_1d_array_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i16_trap">;
+def int_nvvm_sust_b_1d_array_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i32_trap">;
+def int_nvvm_sust_b_1d_array_v2i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i64_trap">;
+def int_nvvm_sust_b_1d_array_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i8_trap">;
+def int_nvvm_sust_b_1d_array_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i16_trap">;
+def int_nvvm_sust_b_1d_array_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i32_trap">;
+
+
+def int_nvvm_sust_b_2d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i8_trap">;
+def int_nvvm_sust_b_2d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i16_trap">;
+def int_nvvm_sust_b_2d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i32_trap">;
+def int_nvvm_sust_b_2d_i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i64_trap">;
+def int_nvvm_sust_b_2d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i8_trap">;
+def int_nvvm_sust_b_2d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i16_trap">;
+def int_nvvm_sust_b_2d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i32_trap">;
+def int_nvvm_sust_b_2d_v2i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i64_trap">;
+def int_nvvm_sust_b_2d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i8_trap">;
+def int_nvvm_sust_b_2d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i16_trap">;
+def int_nvvm_sust_b_2d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i32_trap">;
+
+
+def int_nvvm_sust_b_2d_array_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i8_trap">;
+def int_nvvm_sust_b_2d_array_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i16_trap">;
+def int_nvvm_sust_b_2d_array_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i32_trap">;
+def int_nvvm_sust_b_2d_array_i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i64_trap">;
+def int_nvvm_sust_b_2d_array_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i8_trap">;
+def int_nvvm_sust_b_2d_array_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i16_trap">;
+def int_nvvm_sust_b_2d_array_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i32_trap">;
+def int_nvvm_sust_b_2d_array_v2i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i64_trap">;
+def int_nvvm_sust_b_2d_array_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i8_trap">;
+def int_nvvm_sust_b_2d_array_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i16_trap">;
+def int_nvvm_sust_b_2d_array_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i32_trap">;
+
+
+def int_nvvm_sust_b_3d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i8_trap">;
+def int_nvvm_sust_b_3d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i16_trap">;
+def int_nvvm_sust_b_3d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i32_trap">;
+def int_nvvm_sust_b_3d_i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i64_trap">;
+def int_nvvm_sust_b_3d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i8_trap">;
+def int_nvvm_sust_b_3d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i16_trap">;
+def int_nvvm_sust_b_3d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i32_trap">;
+def int_nvvm_sust_b_3d_v2i64_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i64.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i64_trap">;
+def int_nvvm_sust_b_3d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i8_trap">;
+def int_nvvm_sust_b_3d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i16_trap">;
+def int_nvvm_sust_b_3d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i32_trap">;
+
+
+// .zero variant
+def int_nvvm_sust_b_1d_i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i8_zero">;
+def int_nvvm_sust_b_1d_i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i16_zero">;
+def int_nvvm_sust_b_1d_i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i32_zero">;
+def int_nvvm_sust_b_1d_i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_i64_zero">;
+def int_nvvm_sust_b_1d_v2i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i8_zero">;
+def int_nvvm_sust_b_1d_v2i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i16_zero">;
+def int_nvvm_sust_b_1d_v2i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i32_zero">;
+def int_nvvm_sust_b_1d_v2i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.v2i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v2i64_zero">;
+def int_nvvm_sust_b_1d_v4i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i8_zero">;
+def int_nvvm_sust_b_1d_v4i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i16_zero">;
+def int_nvvm_sust_b_1d_v4i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.v4i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_v4i32_zero">;
+
+
+def int_nvvm_sust_b_1d_array_i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i8_zero">;
+def int_nvvm_sust_b_1d_array_i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i16_zero">;
+def int_nvvm_sust_b_1d_array_i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i32_zero">;
+def int_nvvm_sust_b_1d_array_i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_i64_zero">;
+def int_nvvm_sust_b_1d_array_v2i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i8_zero">;
+def int_nvvm_sust_b_1d_array_v2i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i16_zero">;
+def int_nvvm_sust_b_1d_array_v2i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i32_zero">;
+def int_nvvm_sust_b_1d_array_v2i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v2i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v2i64_zero">;
+def int_nvvm_sust_b_1d_array_v4i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i8_zero">;
+def int_nvvm_sust_b_1d_array_v4i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i16_zero">;
+def int_nvvm_sust_b_1d_array_v4i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.1d.array.v4i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_1d_array_v4i32_zero">;
+
+
+def int_nvvm_sust_b_2d_i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i8_zero">;
+def int_nvvm_sust_b_2d_i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i16_zero">;
+def int_nvvm_sust_b_2d_i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i32_zero">;
+def int_nvvm_sust_b_2d_i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_i64_zero">;
+def int_nvvm_sust_b_2d_v2i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i8_zero">;
+def int_nvvm_sust_b_2d_v2i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i16_zero">;
+def int_nvvm_sust_b_2d_v2i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i32_zero">;
+def int_nvvm_sust_b_2d_v2i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.v2i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v2i64_zero">;
+def int_nvvm_sust_b_2d_v4i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i8_zero">;
+def int_nvvm_sust_b_2d_v4i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i16_zero">;
+def int_nvvm_sust_b_2d_v4i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.v4i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_v4i32_zero">;
+
+
+def int_nvvm_sust_b_2d_array_i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i8_zero">;
+def int_nvvm_sust_b_2d_array_i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i16_zero">;
+def int_nvvm_sust_b_2d_array_i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i32_zero">;
+def int_nvvm_sust_b_2d_array_i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_i64_zero">;
+def int_nvvm_sust_b_2d_array_v2i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i8_zero">;
+def int_nvvm_sust_b_2d_array_v2i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i16_zero">;
+def int_nvvm_sust_b_2d_array_v2i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i32_zero">;
+def int_nvvm_sust_b_2d_array_v2i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v2i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v2i64_zero">;
+def int_nvvm_sust_b_2d_array_v4i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i8_zero">;
+def int_nvvm_sust_b_2d_array_v4i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i16_zero">;
+def int_nvvm_sust_b_2d_array_v4i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.2d.array.v4i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_2d_array_v4i32_zero">;
+
+
+def int_nvvm_sust_b_3d_i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i8_zero">;
+def int_nvvm_sust_b_3d_i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i16_zero">;
+def int_nvvm_sust_b_3d_i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i32_zero">;
+def int_nvvm_sust_b_3d_i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_i64_zero">;
+def int_nvvm_sust_b_3d_v2i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i8_zero">;
+def int_nvvm_sust_b_3d_v2i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i16_zero">;
+def int_nvvm_sust_b_3d_v2i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i32_zero">;
+def int_nvvm_sust_b_3d_v2i64_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i64_ty, llvm_i64_ty], [],
+ "llvm.nvvm.sust.b.3d.v2i64.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v2i64_zero">;
+def int_nvvm_sust_b_3d_v4i8_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i8.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i8_zero">;
+def int_nvvm_sust_b_3d_v4i16_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i16.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i16_zero">;
+def int_nvvm_sust_b_3d_v4i32_zero
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.b.3d.v4i32.zero">,
+ GCCBuiltin<"__nvvm_sust_b_3d_v4i32_zero">;
+
+
+
+// Formatted
+
+def int_nvvm_sust_p_1d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_i8_trap">;
+def int_nvvm_sust_p_1d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_i16_trap">;
+def int_nvvm_sust_p_1d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_i32_trap">;
+def int_nvvm_sust_p_1d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v2i8_trap">;
+def int_nvvm_sust_p_1d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v2i16_trap">;
+def int_nvvm_sust_p_1d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v2i32_trap">;
+def int_nvvm_sust_p_1d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v4i8_trap">;
+def int_nvvm_sust_p_1d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v4i16_trap">;
+def int_nvvm_sust_p_1d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_v4i32_trap">;
+
+
+def int_nvvm_sust_p_1d_array_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_i8_trap">;
+def int_nvvm_sust_p_1d_array_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_i16_trap">;
+def int_nvvm_sust_p_1d_array_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.array.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_i32_trap">;
+def int_nvvm_sust_p_1d_array_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v2i8_trap">;
+def int_nvvm_sust_p_1d_array_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v2i16_trap">;
+def int_nvvm_sust_p_1d_array_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v2i32_trap">;
+def int_nvvm_sust_p_1d_array_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v4i8_trap">;
+def int_nvvm_sust_p_1d_array_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v4i16_trap">;
+def int_nvvm_sust_p_1d_array_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.1d.array.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_1d_array_v4i32_trap">;
+
+
+def int_nvvm_sust_p_2d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_i8_trap">;
+def int_nvvm_sust_p_2d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_i16_trap">;
+def int_nvvm_sust_p_2d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_i32_trap">;
+def int_nvvm_sust_p_2d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v2i8_trap">;
+def int_nvvm_sust_p_2d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v2i16_trap">;
+def int_nvvm_sust_p_2d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v2i32_trap">;
+def int_nvvm_sust_p_2d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v4i8_trap">;
+def int_nvvm_sust_p_2d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i16_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v4i16_trap">;
+def int_nvvm_sust_p_2d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_v4i32_trap">;
+
+
+def int_nvvm_sust_p_2d_array_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_i8_trap">;
+def int_nvvm_sust_p_2d_array_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_i16_trap">;
+def int_nvvm_sust_p_2d_array_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.array.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_i32_trap">;
+def int_nvvm_sust_p_2d_array_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v2i8_trap">;
+def int_nvvm_sust_p_2d_array_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v2i16_trap">;
+def int_nvvm_sust_p_2d_array_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v2i32_trap">;
+def int_nvvm_sust_p_2d_array_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v4i8_trap">;
+def int_nvvm_sust_p_2d_array_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v4i16_trap">;
+def int_nvvm_sust_p_2d_array_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.2d.array.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_2d_array_v4i32_trap">;
+
+
+def int_nvvm_sust_p_3d_i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_i8_trap">;
+def int_nvvm_sust_p_3d_i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_i16_trap">;
+def int_nvvm_sust_p_3d_i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.3d.i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_i32_trap">;
+def int_nvvm_sust_p_3d_v2i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.v2i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v2i8_trap">;
+def int_nvvm_sust_p_3d_v2i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.v2i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v2i16_trap">;
+def int_nvvm_sust_p_3d_v2i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.3d.v2i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v2i32_trap">;
+def int_nvvm_sust_p_3d_v4i8_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.v4i8.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v4i8_trap">;
+def int_nvvm_sust_p_3d_v4i16_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i16_ty, llvm_i16_ty, llvm_i16_ty], [],
+ "llvm.nvvm.sust.p.3d.v4i16.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v4i16_trap">;
+def int_nvvm_sust_p_3d_v4i32_trap
+ : Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [],
+ "llvm.nvvm.sust.p.3d.v4i32.trap">,
+ GCCBuiltin<"__nvvm_sust_p_3d_v4i32_trap">;
+
+
+def int_nvvm_rotate_b32
+ : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem], "llvm.nvvm.rotate.b32">,
+ GCCBuiltin<"__nvvm_rotate_b32">;
+
+def int_nvvm_rotate_b64
+ :Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem], "llvm.nvvm.rotate.b64">,
+ GCCBuiltin<"__nvvm_rotate_b64">;
+
+def int_nvvm_rotate_right_b64
+ : Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i32_ty],
+ [IntrNoMem], "llvm.nvvm.rotate.right.b64">,
+ GCCBuiltin<"__nvvm_rotate_right_b64">;
+
+def int_nvvm_swap_lo_hi_b64
+ : Intrinsic<[llvm_i64_ty], [llvm_i64_ty],
+ [IntrNoMem], "llvm.nvvm.swap.lo.hi.b64">,
+ GCCBuiltin<"__nvvm_swap_lo_hi_b64">;
+
+
+// Accessing special registers.
+multiclass PTXReadSRegIntrinsic_v4i32<string regname> {
+// FIXME: Do we need the 128-bit integer type version?
+// def _r64 : Intrinsic<[llvm_i128_ty], [], [IntrNoMem]>;
+
+// FIXME: Enable this once v4i32 support is enabled in back-end.
+// def _v4i16 : Intrinsic<[llvm_v4i32_ty], [], [IntrNoMem]>;
+
+ def _x : Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # regname # "_x">;
+ def _y : Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # regname # "_y">;
+ def _z : Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # regname # "_z">;
+ def _w : Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # regname # "_w">;
+}
+
+class PTXReadSRegIntrinsic_r32<string name>
+ : Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # name>;
+
+class PTXReadSRegIntrinsic_r64<string name>
+ : Intrinsic<[llvm_i64_ty], [], [IntrNoMem]>,
+ GCCBuiltin<"__nvvm_read_ptx_sreg_" # name>;
+
+defm int_nvvm_read_ptx_sreg_tid : PTXReadSRegIntrinsic_v4i32<"tid">;
+defm int_nvvm_read_ptx_sreg_ntid : PTXReadSRegIntrinsic_v4i32<"ntid">;
+
+def int_nvvm_read_ptx_sreg_laneid : PTXReadSRegIntrinsic_r32<"laneid">;
+def int_nvvm_read_ptx_sreg_warpid : PTXReadSRegIntrinsic_r32<"warpid">;
+def int_nvvm_read_ptx_sreg_nwarpid : PTXReadSRegIntrinsic_r32<"nwarpid">;
+
+defm int_nvvm_read_ptx_sreg_ctaid : PTXReadSRegIntrinsic_v4i32<"ctaid">;
+defm int_nvvm_read_ptx_sreg_nctaid : PTXReadSRegIntrinsic_v4i32<"nctaid">;
+
+def int_nvvm_read_ptx_sreg_smid : PTXReadSRegIntrinsic_r32<"smid">;
+def int_nvvm_read_ptx_sreg_nsmid : PTXReadSRegIntrinsic_r32<"nsmid">;
+def int_nvvm_read_ptx_sreg_gridid : PTXReadSRegIntrinsic_r32<"gridid">;
+
+def int_nvvm_read_ptx_sreg_lanemask_eq :
+ PTXReadSRegIntrinsic_r32<"lanemask_eq">;
+def int_nvvm_read_ptx_sreg_lanemask_le :
+ PTXReadSRegIntrinsic_r32<"lanemask_le">;
+def int_nvvm_read_ptx_sreg_lanemask_lt :
+ PTXReadSRegIntrinsic_r32<"lanemask_lt">;
+def int_nvvm_read_ptx_sreg_lanemask_ge :
+ PTXReadSRegIntrinsic_r32<"lanemask_ge">;
+def int_nvvm_read_ptx_sreg_lanemask_gt :
+ PTXReadSRegIntrinsic_r32<"lanemask_gt">;
+
+def int_nvvm_read_ptx_sreg_clock : PTXReadSRegIntrinsic_r32<"clock">;
+def int_nvvm_read_ptx_sreg_clock64 : PTXReadSRegIntrinsic_r64<"clock64">;
+
+def int_nvvm_read_ptx_sreg_pm0 : PTXReadSRegIntrinsic_r32<"pm0">;
+def int_nvvm_read_ptx_sreg_pm1 : PTXReadSRegIntrinsic_r32<"pm1">;
+def int_nvvm_read_ptx_sreg_pm2 : PTXReadSRegIntrinsic_r32<"pm2">;
+def int_nvvm_read_ptx_sreg_pm3 : PTXReadSRegIntrinsic_r32<"pm3">;
+
+def int_nvvm_read_ptx_sreg_warpsize : PTXReadSRegIntrinsic_r32<"warpsize">;
+
+//
+// SHUFFLE
+//
+
+// shfl.down.b32 dest, val, offset, mask_and_clamp
+def int_nvvm_shfl_down_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.down.i32">,
+ GCCBuiltin<"__nvvm_shfl_down_i32">;
+def int_nvvm_shfl_down_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.down.f32">,
+ GCCBuiltin<"__nvvm_shfl_down_f32">;
+
+// shfl.up.b32 dest, val, offset, mask_and_clamp
+def int_nvvm_shfl_up_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.up.i32">,
+ GCCBuiltin<"__nvvm_shfl_up_i32">;
+def int_nvvm_shfl_up_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.up.f32">,
+ GCCBuiltin<"__nvvm_shfl_up_f32">;
+
+// shfl.bfly.b32 dest, val, offset, mask_and_clamp
+def int_nvvm_shfl_bfly_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.bfly.i32">,
+ GCCBuiltin<"__nvvm_shfl_bfly_i32">;
+def int_nvvm_shfl_bfly_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.bfly.f32">,
+ GCCBuiltin<"__nvvm_shfl_bfly_f32">;
+
+// shfl.idx.b32 dest, val, lane, mask_and_clamp
+def int_nvvm_shfl_idx_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.idx.i32">,
+ GCCBuiltin<"__nvvm_shfl_idx_i32">;
+def int_nvvm_shfl_idx_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.idx.f32">,
+ GCCBuiltin<"__nvvm_shfl_idx_f32">;
+
+// Synchronizing shfl variants available in CUDA-9.
+// On sm_70 these don't have to be convergent, so we may eventually want to
+// implement non-convergent variant of this intrinsic.
+
+// shfl.sync.down.b32 dest, threadmask, val, offset , mask_and_clamp
+def int_nvvm_shfl_sync_down_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.down.i32">,
+ GCCBuiltin<"__nvvm_shfl_sync_down_i32">;
+def int_nvvm_shfl_sync_down_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty, llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.down.f32">,
+ GCCBuiltin<"__nvvm_shfl_sync_down_f32">;
+
+// shfl.sync.up.b32 dest, threadmask, val, offset, mask_and_clamp
+def int_nvvm_shfl_sync_up_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.up.i32">,
+ GCCBuiltin<"__nvvm_shfl_sync_up_i32">;
+def int_nvvm_shfl_sync_up_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty, llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.up.f32">,
+ GCCBuiltin<"__nvvm_shfl_sync_up_f32">;
+
+// shfl.sync.bfly.b32 dest, threadmask, val, offset, mask_and_clamp
+def int_nvvm_shfl_sync_bfly_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.bfly.i32">,
+ GCCBuiltin<"__nvvm_shfl_sync_bfly_i32">;
+def int_nvvm_shfl_sync_bfly_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty, llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.bfly.f32">,
+ GCCBuiltin<"__nvvm_shfl_sync_bfly_f32">;
+
+// shfl.sync.idx.b32 dest, threadmask, val, lane, mask_and_clamp
+def int_nvvm_shfl_sync_idx_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.idx.i32">,
+ GCCBuiltin<"__nvvm_shfl_sync_idx_i32">;
+def int_nvvm_shfl_sync_idx_f32 :
+ Intrinsic<[llvm_float_ty], [llvm_i32_ty, llvm_float_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.shfl.sync.idx.f32">,
+ GCCBuiltin<"__nvvm_shfl_sync_idx_f32">;
+
+//
+// VOTE
+//
+
+// vote.all pred
+def int_nvvm_vote_all :
+ Intrinsic<[llvm_i1_ty], [llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.all">,
+ GCCBuiltin<"__nvvm_vote_all">;
+// vote.any pred
+def int_nvvm_vote_any :
+ Intrinsic<[llvm_i1_ty], [llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.any">,
+ GCCBuiltin<"__nvvm_vote_any">;
+// vote.uni pred
+def int_nvvm_vote_uni :
+ Intrinsic<[llvm_i1_ty], [llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.uni">,
+ GCCBuiltin<"__nvvm_vote_uni">;
+// vote.ballot pred
+def int_nvvm_vote_ballot :
+ Intrinsic<[llvm_i32_ty], [llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.ballot">,
+ GCCBuiltin<"__nvvm_vote_ballot">;
+
+//
+// VOTE.SYNC
+//
+
+// vote.sync.all mask, pred
+def int_nvvm_vote_all_sync :
+ Intrinsic<[llvm_i1_ty], [llvm_i32_ty, llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.all.sync">,
+ GCCBuiltin<"__nvvm_vote_all_sync">;
+// vote.sync.any mask, pred
+def int_nvvm_vote_any_sync :
+ Intrinsic<[llvm_i1_ty], [llvm_i32_ty, llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.any.sync">,
+ GCCBuiltin<"__nvvm_vote_any_sync">;
+// vote.sync.uni mask, pred
+def int_nvvm_vote_uni_sync :
+ Intrinsic<[llvm_i1_ty], [llvm_i32_ty, llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.uni.sync">,
+ GCCBuiltin<"__nvvm_vote_uni_sync">;
+// vote.sync.ballot mask, pred
+def int_nvvm_vote_ballot_sync :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i1_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.vote.ballot.sync">,
+ GCCBuiltin<"__nvvm_vote_ballot_sync">;
+
+//
+// MATCH.SYNC
+//
+// match.any.sync.b32 mask, value
+def int_nvvm_match_any_sync_i32 :
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.match.any.sync.i32">,
+ GCCBuiltin<"__nvvm_match_any_sync_i32">;
+// match.any.sync.b64 mask, value
+def int_nvvm_match_any_sync_i64 :
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.match.any.sync.i64">,
+ GCCBuiltin<"__nvvm_match_any_sync_i64">;
+
+// match.all instruction have two variants -- one returns a single value, another
+// returns a pair {value, predicate}. We currently only implement the latter as
+// that's the variant exposed by CUDA API.
+
+// match.all.sync.b32p mask, value
+def int_nvvm_match_all_sync_i32p :
+ Intrinsic<[llvm_i32_ty, llvm_i1_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.match.all.sync.i32p">;
+// match.all.sync.b64p mask, value
+def int_nvvm_match_all_sync_i64p :
+ Intrinsic<[llvm_i64_ty, llvm_i1_ty], [llvm_i32_ty, llvm_i64_ty],
+ [IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.match.all.sync.i64p">;
+
+//
+// WMMA instructions
+//
+
+// WMMA.LOAD
+class NVVM_WMMA_LD_GALSTS<string Geometry, string Abc, string Layout,
+ string Type, LLVMType regty, int WithStride>
+ : Intrinsic<!if(!eq(Abc#Type,"cf16"),
+ [regty, regty, regty, regty],
+ [regty, regty, regty, regty,
+ regty, regty, regty, regty]),
+ !if(WithStride, [llvm_anyptr_ty, llvm_i32_ty], [llvm_anyptr_ty]),
+ [IntrReadMem, IntrArgMemOnly, ReadOnly<0>, NoCapture<0>],
+ "llvm.nvvm.wmma."
+ # Geometry
+ # ".load"
+ # "." # Abc
+ # "." # Layout
+ # !if(WithStride, ".stride", "")
+ # "." # Type>;
+
+multiclass NVVM_WMMA_LD_GALT<string Geometry, string Abc, string Layout,
+ string Type, LLVMType regty> {
+ def _stride: NVVM_WMMA_LD_GALSTS<Geometry, Abc, Layout, Type, regty, 1>;
+ def NAME : NVVM_WMMA_LD_GALSTS<Geometry, Abc, Layout, Type, regty, 0>;
+}
+
+multiclass NVVM_WMMA_LD_GAT<string Geometry, string Abc,
+ string Type, LLVMType regty> {
+ defm _row: NVVM_WMMA_LD_GALT<Geometry, Abc, "row", Type, regty>;
+ defm _col: NVVM_WMMA_LD_GALT<Geometry, Abc, "col", Type, regty>;
+}
+
+multiclass NVVM_WMMA_LD_G<string Geometry> {
+ defm _a_f16: NVVM_WMMA_LD_GAT<Geometry, "a", "f16", llvm_v2f16_ty>;
+ defm _b_f16: NVVM_WMMA_LD_GAT<Geometry, "b", "f16", llvm_v2f16_ty>;
+ defm _c_f16: NVVM_WMMA_LD_GAT<Geometry, "c", "f16", llvm_v2f16_ty>;
+ defm _c_f32: NVVM_WMMA_LD_GAT<Geometry, "c", "f32", llvm_float_ty>;
+}
+
+multiclass NVVM_WMMA_LD {
+ defm _m32n8k16_load: NVVM_WMMA_LD_G<"m32n8k16">;
+ defm _m16n16k16_load: NVVM_WMMA_LD_G<"m16n16k16">;
+ defm _m8n32k16_load: NVVM_WMMA_LD_G<"m8n32k16">;
+}
+
+defm int_nvvm_wmma: NVVM_WMMA_LD;
+
+// WMMA.STORE.D
+class NVVM_WMMA_STD_GLSTS<string Geometry, string Layout,
+ string Type, LLVMType regty, int WithStride,
+ // This is only used to create a typed empty array we
+ // need to pass to !if below.
+ list<LLVMType>Empty=[]>
+ : Intrinsic<[],
+ !listconcat(
+ [llvm_anyptr_ty],
+ !if(!eq(Type,"f16"),
+ [regty, regty, regty, regty],
+ [regty, regty, regty, regty,
+ regty, regty, regty, regty]),
+ !if(WithStride, [llvm_i32_ty], Empty)),
+ [IntrWriteMem, IntrArgMemOnly, WriteOnly<0>, NoCapture<0>],
+ "llvm.nvvm.wmma."
+ # Geometry
+ # ".store.d"
+ # "." # Layout
+ # !if(WithStride, ".stride", "")
+ # "." # Type>;
+
+multiclass NVVM_WMMA_STD_GLT<string Geometry, string Layout,
+ string Type, LLVMType regty> {
+ def _stride: NVVM_WMMA_STD_GLSTS<Geometry, Layout, Type, regty, 1>;
+ def NAME: NVVM_WMMA_STD_GLSTS<Geometry, Layout, Type, regty, 0>;
+}
+
+multiclass NVVM_WMMA_STD_GT<string Geometry, string Type, LLVMType regty> {
+ defm _row: NVVM_WMMA_STD_GLT<Geometry, "row", Type, regty>;
+ defm _col: NVVM_WMMA_STD_GLT<Geometry, "col", Type, regty>;
+}
+multiclass NVVM_WMMA_STD_G<string Geometry> {
+ defm _d_f16: NVVM_WMMA_STD_GT<Geometry, "f16", llvm_v2f16_ty>;
+ defm _d_f32: NVVM_WMMA_STD_GT<Geometry, "f32", llvm_float_ty>;
+}
+
+multiclass NVVM_WMMA_STD {
+ defm _m32n8k16_store: NVVM_WMMA_STD_G<"m32n8k16">;
+ defm _m16n16k16_store: NVVM_WMMA_STD_G<"m16n16k16">;
+ defm _m8n32k16_store: NVVM_WMMA_STD_G<"m8n32k16">;
+}
+
+defm int_nvvm_wmma: NVVM_WMMA_STD;
+
+// WMMA.MMA
+class NVVM_WMMA_MMA_GABDCS<string Geometry,
+ string ALayout, string BLayout,
+ string DType, LLVMType d_regty,
+ string CType, LLVMType c_regty,
+ string Satfinite = "">
+ : Intrinsic<!if(!eq(DType,"f16"),
+ [d_regty, d_regty, d_regty, d_regty],
+ [d_regty, d_regty, d_regty, d_regty,
+ d_regty, d_regty, d_regty, d_regty]),
+ !listconcat(
+ [// A
+ llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty,
+ llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty,
+ // B
+ llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty,
+ llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty, llvm_v2f16_ty],
+ !if(!eq(CType,"f16"),
+ [c_regty, c_regty, c_regty, c_regty],
+ [c_regty, c_regty, c_regty, c_regty,
+ c_regty, c_regty, c_regty, c_regty])),
+ [IntrNoMem],
+ "llvm.nvvm.wmma."
+ # Geometry
+ # ".mma"
+ # "." # ALayout
+ # "." # BLayout
+ # "." # DType
+ # "." # CType
+ # Satfinite> {
+}
+
+multiclass NVVM_WMMA_MMA_GABDC<string Geometry, string ALayout, string BLayout,
+ string DType, LLVMType d_regty,
+ string CType, LLVMType c_regty> {
+ def NAME : NVVM_WMMA_MMA_GABDCS<Geometry, ALayout, BLayout,
+ DType, d_regty, CType, c_regty>;
+ def _satfinite: NVVM_WMMA_MMA_GABDCS<Geometry, ALayout, BLayout,
+ DType, d_regty, CType, c_regty,".satfinite">;
+}
+
+multiclass NVVM_WMMA_MMA_GABD<string Geometry, string ALayout, string BLayout,
+ string DType, LLVMType d_regty> {
+ defm _f16: NVVM_WMMA_MMA_GABDC<Geometry, ALayout, BLayout, DType, d_regty,
+ "f16", llvm_v2f16_ty>;
+ defm _f32: NVVM_WMMA_MMA_GABDC<Geometry, ALayout, BLayout, DType, d_regty,
+ "f32", llvm_float_ty>;
+}
+
+multiclass NVVM_WMMA_MMA_GAB<string Geometry, string ALayout, string BLayout> {
+ defm _f16: NVVM_WMMA_MMA_GABD<Geometry, ALayout, BLayout, "f16", llvm_v2f16_ty>;
+ defm _f32: NVVM_WMMA_MMA_GABD<Geometry, ALayout, BLayout, "f32", llvm_float_ty>;
+}
+
+multiclass NVVM_WMMA_MMA_GA<string Geometry, string ALayout> {
+ defm _col: NVVM_WMMA_MMA_GAB<Geometry, ALayout, "col">;
+ defm _row: NVVM_WMMA_MMA_GAB<Geometry, ALayout, "row">;
+}
+
+multiclass NVVM_WMMA_MMA_G<string Geometry> {
+ defm _col: NVVM_WMMA_MMA_GA<Geometry, "col">;
+ defm _row: NVVM_WMMA_MMA_GA<Geometry, "row">;
+}
+
+multiclass NVVM_WMMA_MMA {
+ defm _m32n8k16_mma : NVVM_WMMA_MMA_G<"m32n8k16">;
+ defm _m16n16k16_mma : NVVM_WMMA_MMA_G<"m16n16k16">;
+ defm _m8n32k16_mma : NVVM_WMMA_MMA_G<"m8n32k16">;
+}
+
+defm int_nvvm_wmma : NVVM_WMMA_MMA;
+
+} // let TargetPrefix = "nvvm"
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsPowerPC.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsPowerPC.td
new file mode 100644
index 000000000..3433aaa40
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsPowerPC.td
@@ -0,0 +1,1164 @@
+//===- IntrinsicsPowerPC.td - Defines PowerPC intrinsics ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the PowerPC-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// Definitions for all PowerPC intrinsics.
+//
+
+// Non-altivec intrinsics.
+let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
+ // dcba/dcbf/dcbi/dcbst/dcbt/dcbz/dcbzl(PPC970) instructions.
+ def int_ppc_dcba : Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbf : Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbi : Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbst : Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbt : Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+ def int_ppc_dcbtst: Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<0>]>;
+ def int_ppc_dcbz : Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbzl : Intrinsic<[], [llvm_ptr_ty], []>;
+
+ // sync instruction (i.e. sync 0, a.k.a hwsync)
+ def int_ppc_sync : Intrinsic<[], [], []>;
+ // lwsync is sync 1
+ def int_ppc_lwsync : Intrinsic<[], [], []>;
+
+ // Intrinsics used to generate ctr-based loops. These should only be
+ // generated by the PowerPC backend!
+ // The branch intrinsic is marked as NoDuplicate because loop rotation will
+ // attempt to duplicate it forming loops where a block reachable from one
+ // instance of it can contain another.
+ def int_ppc_mtctr : Intrinsic<[], [llvm_anyint_ty], []>;
+ def int_ppc_is_decremented_ctr_nonzero :
+ Intrinsic<[llvm_i1_ty], [], [IntrNoDuplicate]>;
+
+ // Intrinsics for [double]word extended forms of divide instructions
+ def int_ppc_divwe : GCCBuiltin<"__builtin_divwe">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_divweu : GCCBuiltin<"__builtin_divweu">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_divde : GCCBuiltin<"__builtin_divde">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_divdeu : GCCBuiltin<"__builtin_divdeu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+
+ // Bit permute doubleword
+ def int_ppc_bpermd : GCCBuiltin<"__builtin_bpermd">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_truncf128_round_to_odd
+ : GCCBuiltin<"__builtin_truncf128_round_to_odd">,
+ Intrinsic <[llvm_double_ty], [llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_sqrtf128_round_to_odd
+ : GCCBuiltin<"__builtin_sqrtf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_addf128_round_to_odd
+ : GCCBuiltin<"__builtin_addf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty,llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_subf128_round_to_odd
+ : GCCBuiltin<"__builtin_subf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty,llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_mulf128_round_to_odd
+ : GCCBuiltin<"__builtin_mulf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty,llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_divf128_round_to_odd
+ : GCCBuiltin<"__builtin_divf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty,llvm_f128_ty], [IntrNoMem]>;
+ def int_ppc_fmaf128_round_to_odd
+ : GCCBuiltin<"__builtin_fmaf128_round_to_odd">,
+ Intrinsic <[llvm_f128_ty], [llvm_f128_ty,llvm_f128_ty,llvm_f128_ty], [IntrNoMem]>;
+
+}
+
+
+let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
+ /// PowerPC_Vec_Intrinsic - Base class for all altivec intrinsics.
+ class PowerPC_Vec_Intrinsic<string GCCIntSuffix, list<LLVMType> ret_types,
+ list<LLVMType> param_types,
+ list<IntrinsicProperty> properties>
+ : GCCBuiltin<!strconcat("__builtin_altivec_", GCCIntSuffix)>,
+ Intrinsic<ret_types, param_types, properties>;
+
+ /// PowerPC_VSX_Intrinsic - Base class for all VSX intrinsics.
+ class PowerPC_VSX_Intrinsic<string GCCIntSuffix, list<LLVMType> ret_types,
+ list<LLVMType> param_types,
+ list<IntrinsicProperty> properties>
+ : GCCBuiltin<!strconcat("__builtin_vsx_", GCCIntSuffix)>,
+ Intrinsic<ret_types, param_types, properties>;
+}
+
+//===----------------------------------------------------------------------===//
+// PowerPC Altivec Intrinsic Class Definitions.
+//
+
+/// PowerPC_Vec_FF_Intrinsic - A PowerPC intrinsic that takes one v4f32
+/// vector and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_FF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+/// PowerPC_Vec_FFF_Intrinsic - A PowerPC intrinsic that takes two v4f32
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_FFF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_Vec_BBB_Intrinsic - A PowerPC intrinsic that takes two v16i8
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_BBB_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_Vec_HHH_Intrinsic - A PowerPC intrinsic that takes two v8i16
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_HHH_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_Vec_WWW_Intrinsic - A PowerPC intrinsic that takes two v4i32
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_WWW_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_Vec_DDD_Intrinsic - A PowerPC intrinsic that takes two v2i64
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_DDD_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_Vec_QQQ_Intrinsic - A PowerPC intrinsic that takes two v1i128
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_Vec_QQQ_Intrinsic<string GCCIntSuffix>
+ : PowerPC_Vec_Intrinsic<GCCIntSuffix,
+ [llvm_v1i128_ty], [llvm_v1i128_ty, llvm_v1i128_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// PowerPC VSX Intrinsic Class Definitions.
+//
+
+/// PowerPC_VSX_Vec_DDD_Intrinsic - A PowerPC intrinsic that takes two v2f64
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_VSX_Vec_DDD_Intrinsic<string GCCIntSuffix>
+ : PowerPC_VSX_Intrinsic<GCCIntSuffix,
+ [llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_VSX_Vec_FFF_Intrinsic - A PowerPC intrinsic that takes two v4f32
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_VSX_Vec_FFF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_VSX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_VSX_Sca_DDD_Intrinsic - A PowerPC intrinsic that takes two f64
+/// scalars and returns one. These intrinsics have no side effects.
+class PowerPC_VSX_Sca_DDD_Intrinsic<string GCCIntSuffix>
+ : PowerPC_VSX_Intrinsic<GCCIntSuffix,
+ [llvm_double_ty], [llvm_double_ty, llvm_double_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// PowerPC Altivec Intrinsic Definitions.
+
+let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
+ // Data Stream Control.
+ def int_ppc_altivec_dss : GCCBuiltin<"__builtin_altivec_dss">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+ def int_ppc_altivec_dssall : GCCBuiltin<"__builtin_altivec_dssall">,
+ Intrinsic<[], [], []>;
+ def int_ppc_altivec_dst : GCCBuiltin<"__builtin_altivec_dst">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty],
+ []>;
+ def int_ppc_altivec_dstt : GCCBuiltin<"__builtin_altivec_dstt">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty],
+ []>;
+ def int_ppc_altivec_dstst : GCCBuiltin<"__builtin_altivec_dstst">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty],
+ []>;
+ def int_ppc_altivec_dststt : GCCBuiltin<"__builtin_altivec_dststt">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty],
+ []>;
+
+ // VSCR access.
+ def int_ppc_altivec_mfvscr : GCCBuiltin<"__builtin_altivec_mfvscr">,
+ Intrinsic<[llvm_v8i16_ty], [], [IntrReadMem]>;
+ def int_ppc_altivec_mtvscr : GCCBuiltin<"__builtin_altivec_mtvscr">,
+ Intrinsic<[], [llvm_v4i32_ty], []>;
+
+
+ // Loads. These don't map directly to GCC builtins because they represent the
+ // source address with a single pointer.
+ def int_ppc_altivec_lvx :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_lvxl :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_lvebx :
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_lvehx :
+ Intrinsic<[llvm_v8i16_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_lvewx :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+
+ // Stores. These don't map directly to GCC builtins because they represent the
+ // source address with a single pointer.
+ def int_ppc_altivec_stvx :
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_stvxl :
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_stvebx :
+ Intrinsic<[], [llvm_v16i8_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_stvehx :
+ Intrinsic<[], [llvm_v8i16_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+ def int_ppc_altivec_stvewx :
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+
+ // Comparisons setting a vector.
+ def int_ppc_altivec_vcmpbfp : GCCBuiltin<"__builtin_altivec_vcmpbfp">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpeqfp : GCCBuiltin<"__builtin_altivec_vcmpeqfp">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgefp : GCCBuiltin<"__builtin_altivec_vcmpgefp">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtfp : GCCBuiltin<"__builtin_altivec_vcmpgtfp">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequd : GCCBuiltin<"__builtin_altivec_vcmpequd">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsd : GCCBuiltin<"__builtin_altivec_vcmpgtsd">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtud : GCCBuiltin<"__builtin_altivec_vcmpgtud">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequw : GCCBuiltin<"__builtin_altivec_vcmpequw">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsw : GCCBuiltin<"__builtin_altivec_vcmpgtsw">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtuw : GCCBuiltin<"__builtin_altivec_vcmpgtuw">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnew : GCCBuiltin<"__builtin_altivec_vcmpnew">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezw : GCCBuiltin<"__builtin_altivec_vcmpnezw">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequh : GCCBuiltin<"__builtin_altivec_vcmpequh">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsh : GCCBuiltin<"__builtin_altivec_vcmpgtsh">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtuh : GCCBuiltin<"__builtin_altivec_vcmpgtuh">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpneh : GCCBuiltin<"__builtin_altivec_vcmpneh">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezh : GCCBuiltin<"__builtin_altivec_vcmpnezh">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequb : GCCBuiltin<"__builtin_altivec_vcmpequb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsb : GCCBuiltin<"__builtin_altivec_vcmpgtsb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtub : GCCBuiltin<"__builtin_altivec_vcmpgtub">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpneb : GCCBuiltin<"__builtin_altivec_vcmpneb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezb : GCCBuiltin<"__builtin_altivec_vcmpnezb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+ // Predicate Comparisons. The first operand specifies interpretation of CR6.
+ def int_ppc_altivec_vcmpbfp_p : GCCBuiltin<"__builtin_altivec_vcmpbfp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpeqfp_p : GCCBuiltin<"__builtin_altivec_vcmpeqfp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgefp_p : GCCBuiltin<"__builtin_altivec_vcmpgefp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtfp_p : GCCBuiltin<"__builtin_altivec_vcmpgtfp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequd_p : GCCBuiltin<"__builtin_altivec_vcmpequd_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2i64_ty,llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsd_p : GCCBuiltin<"__builtin_altivec_vcmpgtsd_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2i64_ty,llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtud_p : GCCBuiltin<"__builtin_altivec_vcmpgtud_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2i64_ty,llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequw_p : GCCBuiltin<"__builtin_altivec_vcmpequw_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsw_p : GCCBuiltin<"__builtin_altivec_vcmpgtsw_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtuw_p : GCCBuiltin<"__builtin_altivec_vcmpgtuw_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnew_p : GCCBuiltin<"__builtin_altivec_vcmpnew_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezw_p : GCCBuiltin<"__builtin_altivec_vcmpnezw_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4i32_ty,llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequh_p : GCCBuiltin<"__builtin_altivec_vcmpequh_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v8i16_ty,llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsh_p : GCCBuiltin<"__builtin_altivec_vcmpgtsh_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v8i16_ty,llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtuh_p : GCCBuiltin<"__builtin_altivec_vcmpgtuh_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v8i16_ty,llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpneh_p : GCCBuiltin<"__builtin_altivec_vcmpneh_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v8i16_ty,llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezh_p : GCCBuiltin<"__builtin_altivec_vcmpnezh_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v8i16_ty,llvm_v8i16_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vcmpequb_p : GCCBuiltin<"__builtin_altivec_vcmpequb_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtsb_p : GCCBuiltin<"__builtin_altivec_vcmpgtsb_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpgtub_p : GCCBuiltin<"__builtin_altivec_vcmpgtub_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpneb_p : GCCBuiltin<"__builtin_altivec_vcmpneb_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcmpnezb_p : GCCBuiltin<"__builtin_altivec_vcmpnezb_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vclzlsbb : GCCBuiltin<"__builtin_altivec_vclzlsbb">,
+ Intrinsic<[llvm_i32_ty],[llvm_v16i8_ty],[IntrNoMem]>;
+ def int_ppc_altivec_vctzlsbb : GCCBuiltin<"__builtin_altivec_vctzlsbb">,
+ Intrinsic<[llvm_i32_ty],[llvm_v16i8_ty],[IntrNoMem]>;
+ def int_ppc_altivec_vprtybw : GCCBuiltin<"__builtin_altivec_vprtybw">,
+ Intrinsic<[llvm_v4i32_ty],[llvm_v4i32_ty],[IntrNoMem]>;
+ def int_ppc_altivec_vprtybd : GCCBuiltin<"__builtin_altivec_vprtybd">,
+ Intrinsic<[llvm_v2i64_ty],[llvm_v2i64_ty],[IntrNoMem]>;
+ def int_ppc_altivec_vprtybq : GCCBuiltin<"__builtin_altivec_vprtybq">,
+ Intrinsic<[llvm_v1i128_ty],[llvm_v1i128_ty],[IntrNoMem]>;
+
+}
+
+// Vector average.
+def int_ppc_altivec_vavgsb : PowerPC_Vec_BBB_Intrinsic<"vavgsb">;
+def int_ppc_altivec_vavgsh : PowerPC_Vec_HHH_Intrinsic<"vavgsh">;
+def int_ppc_altivec_vavgsw : PowerPC_Vec_WWW_Intrinsic<"vavgsw">;
+def int_ppc_altivec_vavgub : PowerPC_Vec_BBB_Intrinsic<"vavgub">;
+def int_ppc_altivec_vavguh : PowerPC_Vec_HHH_Intrinsic<"vavguh">;
+def int_ppc_altivec_vavguw : PowerPC_Vec_WWW_Intrinsic<"vavguw">;
+
+// Vector maximum.
+def int_ppc_altivec_vmaxfp : PowerPC_Vec_FFF_Intrinsic<"vmaxfp">;
+def int_ppc_altivec_vmaxsb : PowerPC_Vec_BBB_Intrinsic<"vmaxsb">;
+def int_ppc_altivec_vmaxsh : PowerPC_Vec_HHH_Intrinsic<"vmaxsh">;
+def int_ppc_altivec_vmaxsw : PowerPC_Vec_WWW_Intrinsic<"vmaxsw">;
+def int_ppc_altivec_vmaxsd : PowerPC_Vec_DDD_Intrinsic<"vmaxsd">;
+def int_ppc_altivec_vmaxub : PowerPC_Vec_BBB_Intrinsic<"vmaxub">;
+def int_ppc_altivec_vmaxuh : PowerPC_Vec_HHH_Intrinsic<"vmaxuh">;
+def int_ppc_altivec_vmaxuw : PowerPC_Vec_WWW_Intrinsic<"vmaxuw">;
+def int_ppc_altivec_vmaxud : PowerPC_Vec_DDD_Intrinsic<"vmaxud">;
+
+// Vector minimum.
+def int_ppc_altivec_vminfp : PowerPC_Vec_FFF_Intrinsic<"vminfp">;
+def int_ppc_altivec_vminsb : PowerPC_Vec_BBB_Intrinsic<"vminsb">;
+def int_ppc_altivec_vminsh : PowerPC_Vec_HHH_Intrinsic<"vminsh">;
+def int_ppc_altivec_vminsw : PowerPC_Vec_WWW_Intrinsic<"vminsw">;
+def int_ppc_altivec_vminsd : PowerPC_Vec_DDD_Intrinsic<"vminsd">;
+def int_ppc_altivec_vminub : PowerPC_Vec_BBB_Intrinsic<"vminub">;
+def int_ppc_altivec_vminuh : PowerPC_Vec_HHH_Intrinsic<"vminuh">;
+def int_ppc_altivec_vminuw : PowerPC_Vec_WWW_Intrinsic<"vminuw">;
+def int_ppc_altivec_vminud : PowerPC_Vec_DDD_Intrinsic<"vminud">;
+
+// Saturating adds.
+def int_ppc_altivec_vaddubs : PowerPC_Vec_BBB_Intrinsic<"vaddubs">;
+def int_ppc_altivec_vaddsbs : PowerPC_Vec_BBB_Intrinsic<"vaddsbs">;
+def int_ppc_altivec_vadduhs : PowerPC_Vec_HHH_Intrinsic<"vadduhs">;
+def int_ppc_altivec_vaddshs : PowerPC_Vec_HHH_Intrinsic<"vaddshs">;
+def int_ppc_altivec_vadduws : PowerPC_Vec_WWW_Intrinsic<"vadduws">;
+def int_ppc_altivec_vaddsws : PowerPC_Vec_WWW_Intrinsic<"vaddsws">;
+def int_ppc_altivec_vaddcuw : PowerPC_Vec_WWW_Intrinsic<"vaddcuw">;
+def int_ppc_altivec_vaddcuq : PowerPC_Vec_QQQ_Intrinsic<"vaddcuq">;
+
+// Saturating subs.
+def int_ppc_altivec_vsububs : PowerPC_Vec_BBB_Intrinsic<"vsububs">;
+def int_ppc_altivec_vsubsbs : PowerPC_Vec_BBB_Intrinsic<"vsubsbs">;
+def int_ppc_altivec_vsubuhs : PowerPC_Vec_HHH_Intrinsic<"vsubuhs">;
+def int_ppc_altivec_vsubshs : PowerPC_Vec_HHH_Intrinsic<"vsubshs">;
+def int_ppc_altivec_vsubuws : PowerPC_Vec_WWW_Intrinsic<"vsubuws">;
+def int_ppc_altivec_vsubsws : PowerPC_Vec_WWW_Intrinsic<"vsubsws">;
+def int_ppc_altivec_vsubcuw : PowerPC_Vec_WWW_Intrinsic<"vsubcuw">;
+def int_ppc_altivec_vsubcuq : PowerPC_Vec_QQQ_Intrinsic<"vsubcuq">;
+
+let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
+ // Saturating multiply-adds.
+ def int_ppc_altivec_vmhaddshs : GCCBuiltin<"__builtin_altivec_vmhaddshs">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmhraddshs : GCCBuiltin<"__builtin_altivec_vmhraddshs">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_ppc_altivec_vmaddfp : GCCBuiltin<"__builtin_altivec_vmaddfp">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vnmsubfp : GCCBuiltin<"__builtin_altivec_vnmsubfp">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+
+ // Vector Multiply Sum Intructions.
+ def int_ppc_altivec_vmsummbm : GCCBuiltin<"__builtin_altivec_vmsummbm">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmsumshm : GCCBuiltin<"__builtin_altivec_vmsumshm">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmsumshs : GCCBuiltin<"__builtin_altivec_vmsumshs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmsumubm : GCCBuiltin<"__builtin_altivec_vmsumubm">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmsumuhm : GCCBuiltin<"__builtin_altivec_vmsumuhm">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vmsumuhs : GCCBuiltin<"__builtin_altivec_vmsumuhs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ // Vector Multiply Intructions.
+ def int_ppc_altivec_vmulesb : GCCBuiltin<"__builtin_altivec_vmulesb">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulesh : GCCBuiltin<"__builtin_altivec_vmulesh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulesw : GCCBuiltin<"__builtin_altivec_vmulesw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmuleub : GCCBuiltin<"__builtin_altivec_vmuleub">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmuleuh : GCCBuiltin<"__builtin_altivec_vmuleuh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmuleuw : GCCBuiltin<"__builtin_altivec_vmuleuw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vmulosb : GCCBuiltin<"__builtin_altivec_vmulosb">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulosh : GCCBuiltin<"__builtin_altivec_vmulosh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulosw : GCCBuiltin<"__builtin_altivec_vmulosw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmuloub : GCCBuiltin<"__builtin_altivec_vmuloub">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulouh : GCCBuiltin<"__builtin_altivec_vmulouh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vmulouw : GCCBuiltin<"__builtin_altivec_vmulouw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ // Vector Sum Intructions.
+ def int_ppc_altivec_vsumsws : GCCBuiltin<"__builtin_altivec_vsumsws">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vsum2sws : GCCBuiltin<"__builtin_altivec_vsum2sws">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vsum4sbs : GCCBuiltin<"__builtin_altivec_vsum4sbs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vsum4shs : GCCBuiltin<"__builtin_altivec_vsum4shs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vsum4ubs : GCCBuiltin<"__builtin_altivec_vsum4ubs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ // Other multiplies.
+ def int_ppc_altivec_vmladduhm : GCCBuiltin<"__builtin_altivec_vmladduhm">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ // Packs.
+ def int_ppc_altivec_vpkpx : GCCBuiltin<"__builtin_altivec_vpkpx">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpkshss : GCCBuiltin<"__builtin_altivec_vpkshss">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpkshus : GCCBuiltin<"__builtin_altivec_vpkshus">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpkswss : GCCBuiltin<"__builtin_altivec_vpkswss">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpkswus : GCCBuiltin<"__builtin_altivec_vpkswus">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpksdss : GCCBuiltin<"__builtin_altivec_vpksdss">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vpksdus : GCCBuiltin<"__builtin_altivec_vpksdus">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ // vpkuhum is lowered to a shuffle.
+ def int_ppc_altivec_vpkuhus : GCCBuiltin<"__builtin_altivec_vpkuhus">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ // vpkuwum is lowered to a shuffle.
+ def int_ppc_altivec_vpkuwus : GCCBuiltin<"__builtin_altivec_vpkuwus">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ // vpkudum is lowered to a shuffle.
+ def int_ppc_altivec_vpkudus : GCCBuiltin<"__builtin_altivec_vpkudus">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ // Unpacks.
+ def int_ppc_altivec_vupkhpx : GCCBuiltin<"__builtin_altivec_vupkhpx">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupkhsb : GCCBuiltin<"__builtin_altivec_vupkhsb">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupkhsh : GCCBuiltin<"__builtin_altivec_vupkhsh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupkhsw : GCCBuiltin<"__builtin_altivec_vupkhsw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupklpx : GCCBuiltin<"__builtin_altivec_vupklpx">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupklsb : GCCBuiltin<"__builtin_altivec_vupklsb">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupklsh : GCCBuiltin<"__builtin_altivec_vupklsh">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vupklsw : GCCBuiltin<"__builtin_altivec_vupklsw">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+
+
+ // FP <-> integer conversion.
+ def int_ppc_altivec_vcfsx : GCCBuiltin<"__builtin_altivec_vcfsx">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vcfux : GCCBuiltin<"__builtin_altivec_vcfux">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vctsxs : GCCBuiltin<"__builtin_altivec_vctsxs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vctuxs : GCCBuiltin<"__builtin_altivec_vctuxs">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_ppc_altivec_vrfim : GCCBuiltin<"__builtin_altivec_vrfim">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vrfin : GCCBuiltin<"__builtin_altivec_vrfin">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vrfip : GCCBuiltin<"__builtin_altivec_vrfip">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vrfiz : GCCBuiltin<"__builtin_altivec_vrfiz">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+ // Add Extended Quadword
+ def int_ppc_altivec_vaddeuqm : GCCBuiltin<"__builtin_altivec_vaddeuqm">,
+ Intrinsic<[llvm_v1i128_ty],
+ [llvm_v1i128_ty, llvm_v1i128_ty, llvm_v1i128_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vaddecuq : GCCBuiltin<"__builtin_altivec_vaddecuq">,
+ Intrinsic<[llvm_v1i128_ty],
+ [llvm_v1i128_ty, llvm_v1i128_ty, llvm_v1i128_ty],
+ [IntrNoMem]>;
+
+ // Sub Extended Quadword
+ def int_ppc_altivec_vsubeuqm : GCCBuiltin<"__builtin_altivec_vsubeuqm">,
+ Intrinsic<[llvm_v1i128_ty],
+ [llvm_v1i128_ty, llvm_v1i128_ty, llvm_v1i128_ty],
+ [IntrNoMem]>;
+ def int_ppc_altivec_vsubecuq : GCCBuiltin<"__builtin_altivec_vsubecuq">,
+ Intrinsic<[llvm_v1i128_ty],
+ [llvm_v1i128_ty, llvm_v1i128_ty, llvm_v1i128_ty],
+ [IntrNoMem]>;
+}
+
+def int_ppc_altivec_vsl : PowerPC_Vec_WWW_Intrinsic<"vsl">;
+def int_ppc_altivec_vslo : PowerPC_Vec_WWW_Intrinsic<"vslo">;
+
+def int_ppc_altivec_vslb : PowerPC_Vec_BBB_Intrinsic<"vslb">;
+def int_ppc_altivec_vslv : PowerPC_Vec_BBB_Intrinsic<"vslv">;
+def int_ppc_altivec_vsrv : PowerPC_Vec_BBB_Intrinsic<"vsrv">;
+def int_ppc_altivec_vslh : PowerPC_Vec_HHH_Intrinsic<"vslh">;
+def int_ppc_altivec_vslw : PowerPC_Vec_WWW_Intrinsic<"vslw">;
+
+// Right Shifts.
+def int_ppc_altivec_vsr : PowerPC_Vec_WWW_Intrinsic<"vsr">;
+def int_ppc_altivec_vsro : PowerPC_Vec_WWW_Intrinsic<"vsro">;
+
+def int_ppc_altivec_vsrb : PowerPC_Vec_BBB_Intrinsic<"vsrb">;
+def int_ppc_altivec_vsrh : PowerPC_Vec_HHH_Intrinsic<"vsrh">;
+def int_ppc_altivec_vsrw : PowerPC_Vec_WWW_Intrinsic<"vsrw">;
+def int_ppc_altivec_vsrab : PowerPC_Vec_BBB_Intrinsic<"vsrab">;
+def int_ppc_altivec_vsrah : PowerPC_Vec_HHH_Intrinsic<"vsrah">;
+def int_ppc_altivec_vsraw : PowerPC_Vec_WWW_Intrinsic<"vsraw">;
+
+// Rotates.
+def int_ppc_altivec_vrlb : PowerPC_Vec_BBB_Intrinsic<"vrlb">;
+def int_ppc_altivec_vrlh : PowerPC_Vec_HHH_Intrinsic<"vrlh">;
+def int_ppc_altivec_vrlw : PowerPC_Vec_WWW_Intrinsic<"vrlw">;
+def int_ppc_altivec_vrld : PowerPC_Vec_DDD_Intrinsic<"vrld">;
+
+let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
+ // Miscellaneous.
+ def int_ppc_altivec_lvsl :
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty], [IntrNoMem]>;
+ def int_ppc_altivec_lvsr :
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty], [IntrNoMem]>;
+
+ def int_ppc_altivec_vperm : GCCBuiltin<"__builtin_altivec_vperm_4si">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_v16i8_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vsel : GCCBuiltin<"__builtin_altivec_vsel_4si">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vgbbd : GCCBuiltin<"__builtin_altivec_vgbbd">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_ppc_altivec_vbpermq : GCCBuiltin<"__builtin_altivec_vbpermq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+}
+
+def int_ppc_altivec_vexptefp : PowerPC_Vec_FF_Intrinsic<"vexptefp">;
+def int_ppc_altivec_vlogefp : PowerPC_Vec_FF_Intrinsic<"vlogefp">;
+def int_ppc_altivec_vrefp : PowerPC_Vec_FF_Intrinsic<"vrefp">;
+def int_ppc_altivec_vrsqrtefp : PowerPC_Vec_FF_Intrinsic<"vrsqrtefp">;
+
+// Power8 Intrinsics
+// Crypto
+let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
+ def int_ppc_altivec_crypto_vsbox :
+ GCCBuiltin<"__builtin_altivec_crypto_vsbox">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+ def int_ppc_altivec_crypto_vpermxor :
+ GCCBuiltin<"__builtin_altivec_crypto_vpermxor">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_ppc_altivec_crypto_vshasigmad :
+ GCCBuiltin<"__builtin_altivec_crypto_vshasigmad">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_ppc_altivec_crypto_vshasigmaw :
+ GCCBuiltin<"__builtin_altivec_crypto_vshasigmaw">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+def int_ppc_altivec_crypto_vcipher :
+ PowerPC_Vec_DDD_Intrinsic<"crypto_vcipher">;
+def int_ppc_altivec_crypto_vcipherlast :
+ PowerPC_Vec_DDD_Intrinsic<"crypto_vcipherlast">;
+def int_ppc_altivec_crypto_vncipher :
+ PowerPC_Vec_DDD_Intrinsic<"crypto_vncipher">;
+def int_ppc_altivec_crypto_vncipherlast :
+ PowerPC_Vec_DDD_Intrinsic<"crypto_vncipherlast">;
+def int_ppc_altivec_crypto_vpmsumb :
+ PowerPC_Vec_BBB_Intrinsic<"crypto_vpmsumb">;
+def int_ppc_altivec_crypto_vpmsumh :
+ PowerPC_Vec_HHH_Intrinsic<"crypto_vpmsumh">;
+def int_ppc_altivec_crypto_vpmsumw :
+ PowerPC_Vec_WWW_Intrinsic<"crypto_vpmsumw">;
+def int_ppc_altivec_crypto_vpmsumd :
+ PowerPC_Vec_DDD_Intrinsic<"crypto_vpmsumd">;
+
+// Absolute Difference intrinsics
+def int_ppc_altivec_vabsdub : PowerPC_Vec_BBB_Intrinsic<"vabsdub">;
+def int_ppc_altivec_vabsduh : PowerPC_Vec_HHH_Intrinsic<"vabsduh">;
+def int_ppc_altivec_vabsduw : PowerPC_Vec_WWW_Intrinsic<"vabsduw">;
+
+// Vector rotates
+def int_ppc_altivec_vrlwnm :
+ PowerPC_Vec_Intrinsic<"vrlwnm", [llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_ppc_altivec_vrlwmi :
+ PowerPC_Vec_Intrinsic<"vrlwmi", [llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_ppc_altivec_vrldnm :
+ PowerPC_Vec_Intrinsic<"vrldnm", [llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+def int_ppc_altivec_vrldmi :
+ PowerPC_Vec_Intrinsic<"vrldmi", [llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// PowerPC VSX Intrinsic Definitions.
+
+let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
+
+// Vector load.
+def int_ppc_vsx_lxvw4x :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_ppc_vsx_lxvd2x :
+ Intrinsic<[llvm_v2f64_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_ppc_vsx_lxvw4x_be :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_ppc_vsx_lxvd2x_be :
+ Intrinsic<[llvm_v2f64_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_ppc_vsx_lxvl :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty, llvm_i64_ty], [IntrReadMem,
+ IntrArgMemOnly]>;
+def int_ppc_vsx_lxvll :
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty, llvm_i64_ty], [IntrReadMem,
+ IntrArgMemOnly]>;
+def int_ppc_vsx_stxvl :
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty, llvm_i64_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+def int_ppc_vsx_stxvll :
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty, llvm_i64_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+
+// Vector store.
+def int_ppc_vsx_stxvw4x : Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+def int_ppc_vsx_stxvd2x : Intrinsic<[], [llvm_v2f64_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+def int_ppc_vsx_stxvw4x_be : Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+def int_ppc_vsx_stxvd2x_be : Intrinsic<[], [llvm_v2f64_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+// Vector and scalar maximum.
+def int_ppc_vsx_xvmaxdp : PowerPC_VSX_Vec_DDD_Intrinsic<"xvmaxdp">;
+def int_ppc_vsx_xvmaxsp : PowerPC_VSX_Vec_FFF_Intrinsic<"xvmaxsp">;
+def int_ppc_vsx_xsmaxdp : PowerPC_VSX_Sca_DDD_Intrinsic<"xsmaxdp">;
+
+// Vector and scalar minimum.
+def int_ppc_vsx_xvmindp : PowerPC_VSX_Vec_DDD_Intrinsic<"xvmindp">;
+def int_ppc_vsx_xvminsp : PowerPC_VSX_Vec_FFF_Intrinsic<"xvminsp">;
+def int_ppc_vsx_xsmindp : PowerPC_VSX_Sca_DDD_Intrinsic<"xsmindp">;
+
+// Vector divide.
+def int_ppc_vsx_xvdivdp : PowerPC_VSX_Vec_DDD_Intrinsic<"xvdivdp">;
+def int_ppc_vsx_xvdivsp : PowerPC_VSX_Vec_FFF_Intrinsic<"xvdivsp">;
+
+// Vector round-to-infinity (ceil)
+def int_ppc_vsx_xvrspip :
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvrdpip :
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Vector reciprocal estimate
+def int_ppc_vsx_xvresp : GCCBuiltin<"__builtin_vsx_xvresp">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvredp : GCCBuiltin<"__builtin_vsx_xvredp">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Vector rsqrte
+def int_ppc_vsx_xvrsqrtesp : GCCBuiltin<"__builtin_vsx_xvrsqrtesp">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvrsqrtedp : GCCBuiltin<"__builtin_vsx_xvrsqrtedp">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Vector compare
+def int_ppc_vsx_xvcmpeqdp :
+ PowerPC_VSX_Intrinsic<"xvcmpeqdp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpeqdp_p : GCCBuiltin<"__builtin_vsx_xvcmpeqdp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2f64_ty,llvm_v2f64_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xvcmpeqsp :
+ PowerPC_VSX_Intrinsic<"xvcmpeqsp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpeqsp_p : GCCBuiltin<"__builtin_vsx_xvcmpeqsp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgedp :
+ PowerPC_VSX_Intrinsic<"xvcmpgedp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgedp_p : GCCBuiltin<"__builtin_vsx_xvcmpgedp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2f64_ty,llvm_v2f64_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgesp :
+ PowerPC_VSX_Intrinsic<"xvcmpgesp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgesp_p : GCCBuiltin<"__builtin_vsx_xvcmpgesp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgtdp :
+ PowerPC_VSX_Intrinsic<"xvcmpgtdp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgtdp_p : GCCBuiltin<"__builtin_vsx_xvcmpgtdp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v2f64_ty,llvm_v2f64_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgtsp :
+ PowerPC_VSX_Intrinsic<"xvcmpgtsp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcmpgtsp_p : GCCBuiltin<"__builtin_vsx_xvcmpgtsp_p">,
+ Intrinsic<[llvm_i32_ty],[llvm_i32_ty,llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_ppc_vsx_xxleqv :
+ PowerPC_VSX_Intrinsic<"xxleqv", [llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xviexpdp :
+ PowerPC_VSX_Intrinsic<"xviexpdp",[llvm_v2f64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty],[IntrNoMem]>;
+def int_ppc_vsx_xviexpsp :
+ PowerPC_VSX_Intrinsic<"xviexpsp",[llvm_v4f32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty],[IntrNoMem]>;
+def int_ppc_vsx_xvcvdpsxws :
+ PowerPC_VSX_Intrinsic<"xvcvdpsxws", [llvm_v4i32_ty],
+ [llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvdpuxws :
+ PowerPC_VSX_Intrinsic<"xvcvdpuxws", [llvm_v4i32_ty],
+ [llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvsxwdp :
+ PowerPC_VSX_Intrinsic<"xvcvsxwdp", [llvm_v2f64_ty],
+ [llvm_v4i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvuxwdp :
+ PowerPC_VSX_Intrinsic<"xvcvuxwdp", [llvm_v2f64_ty],
+ [llvm_v4i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvspdp :
+ PowerPC_VSX_Intrinsic<"xvcvspdp", [llvm_v2f64_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvsxdsp :
+ PowerPC_VSX_Intrinsic<"xvcvsxdsp", [llvm_v4f32_ty],
+ [llvm_v2i64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvuxdsp :
+ PowerPC_VSX_Intrinsic<"xvcvuxdsp", [llvm_v4f32_ty],
+ [llvm_v2i64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvdpsp :
+ PowerPC_VSX_Intrinsic<"xvcvdpsp", [llvm_v4f32_ty],
+ [llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvsphp :
+ PowerPC_VSX_Intrinsic<"xvcvsphp", [llvm_v4f32_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvxexpdp :
+ PowerPC_VSX_Intrinsic<"xvxexpdp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvxexpsp :
+ PowerPC_VSX_Intrinsic<"xvxexpsp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvxsigdp :
+ PowerPC_VSX_Intrinsic<"xvxsigdp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvxsigsp :
+ PowerPC_VSX_Intrinsic<"xvxsigsp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvtstdcdp :
+ PowerPC_VSX_Intrinsic<"xvtstdcdp", [llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvtstdcsp :
+ PowerPC_VSX_Intrinsic<"xvtstdcsp", [llvm_v4i32_ty],
+ [llvm_v4f32_ty,llvm_i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvhpsp :
+ PowerPC_VSX_Intrinsic<"xvcvhpsp", [llvm_v4f32_ty],
+ [llvm_v8i16_ty],[IntrNoMem]>;
+def int_ppc_vsx_xxextractuw :
+ PowerPC_VSX_Intrinsic<"xxextractuw",[llvm_v2i64_ty],
+ [llvm_v2i64_ty,llvm_i32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xxinsertw :
+ PowerPC_VSX_Intrinsic<"xxinsertw",[llvm_v4i32_ty],
+ [llvm_v4i32_ty,llvm_v2i64_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// PowerPC QPX Intrinsics.
+//
+
+let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
+ /// PowerPC_QPX_Intrinsic - Base class for all QPX intrinsics.
+ class PowerPC_QPX_Intrinsic<string GCCIntSuffix, list<LLVMType> ret_types,
+ list<LLVMType> param_types,
+ list<IntrinsicProperty> properties>
+ : GCCBuiltin<!strconcat("__builtin_qpx_", GCCIntSuffix)>,
+ Intrinsic<ret_types, param_types, properties>;
+}
+
+//===----------------------------------------------------------------------===//
+// PowerPC QPX Intrinsic Class Definitions.
+//
+
+/// PowerPC_QPX_FF_Intrinsic - A PowerPC intrinsic that takes one v4f64
+/// vector and returns one. These intrinsics have no side effects.
+class PowerPC_QPX_FF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f64_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+
+/// PowerPC_QPX_FFF_Intrinsic - A PowerPC intrinsic that takes two v4f64
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_QPX_FFF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_QPX_FFFF_Intrinsic - A PowerPC intrinsic that takes three v4f64
+/// vectors and returns one. These intrinsics have no side effects.
+class PowerPC_QPX_FFFF_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
+ [IntrNoMem]>;
+
+/// PowerPC_QPX_Load_Intrinsic - A PowerPC intrinsic that takes a pointer
+/// and returns a v4f64.
+class PowerPC_QPX_Load_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f64_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+
+/// PowerPC_QPX_LoadPerm_Intrinsic - A PowerPC intrinsic that takes a pointer
+/// and returns a v4f64 permutation.
+class PowerPC_QPX_LoadPerm_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [llvm_v4f64_ty], [llvm_ptr_ty], [IntrNoMem]>;
+
+/// PowerPC_QPX_Store_Intrinsic - A PowerPC intrinsic that takes a pointer
+/// and stores a v4f64.
+class PowerPC_QPX_Store_Intrinsic<string GCCIntSuffix>
+ : PowerPC_QPX_Intrinsic<GCCIntSuffix,
+ [], [llvm_v4f64_ty, llvm_ptr_ty],
+ [IntrWriteMem, IntrArgMemOnly]>;
+
+//===----------------------------------------------------------------------===//
+// PowerPC QPX Intrinsic Definitions.
+
+let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
+ // Add Instructions
+ def int_ppc_qpx_qvfadd : PowerPC_QPX_FFF_Intrinsic<"qvfadd">;
+ def int_ppc_qpx_qvfadds : PowerPC_QPX_FFF_Intrinsic<"qvfadds">;
+ def int_ppc_qpx_qvfsub : PowerPC_QPX_FFF_Intrinsic<"qvfsub">;
+ def int_ppc_qpx_qvfsubs : PowerPC_QPX_FFF_Intrinsic<"qvfsubs">;
+
+ // Estimate Instructions
+ def int_ppc_qpx_qvfre : PowerPC_QPX_FF_Intrinsic<"qvfre">;
+ def int_ppc_qpx_qvfres : PowerPC_QPX_FF_Intrinsic<"qvfres">;
+ def int_ppc_qpx_qvfrsqrte : PowerPC_QPX_FF_Intrinsic<"qvfrsqrte">;
+ def int_ppc_qpx_qvfrsqrtes : PowerPC_QPX_FF_Intrinsic<"qvfrsqrtes">;
+
+ // Multiply Instructions
+ def int_ppc_qpx_qvfmul : PowerPC_QPX_FFF_Intrinsic<"qvfmul">;
+ def int_ppc_qpx_qvfmuls : PowerPC_QPX_FFF_Intrinsic<"qvfmuls">;
+ def int_ppc_qpx_qvfxmul : PowerPC_QPX_FFF_Intrinsic<"qvfxmul">;
+ def int_ppc_qpx_qvfxmuls : PowerPC_QPX_FFF_Intrinsic<"qvfxmuls">;
+
+ // Multiply-add instructions
+ def int_ppc_qpx_qvfmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfmadd">;
+ def int_ppc_qpx_qvfmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfmadds">;
+ def int_ppc_qpx_qvfnmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfnmadd">;
+ def int_ppc_qpx_qvfnmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfnmadds">;
+ def int_ppc_qpx_qvfmsub : PowerPC_QPX_FFFF_Intrinsic<"qvfmsub">;
+ def int_ppc_qpx_qvfmsubs : PowerPC_QPX_FFFF_Intrinsic<"qvfmsubs">;
+ def int_ppc_qpx_qvfnmsub : PowerPC_QPX_FFFF_Intrinsic<"qvfnmsub">;
+ def int_ppc_qpx_qvfnmsubs : PowerPC_QPX_FFFF_Intrinsic<"qvfnmsubs">;
+ def int_ppc_qpx_qvfxmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfxmadd">;
+ def int_ppc_qpx_qvfxmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfxmadds">;
+ def int_ppc_qpx_qvfxxnpmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfxxnpmadd">;
+ def int_ppc_qpx_qvfxxnpmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfxxnpmadds">;
+ def int_ppc_qpx_qvfxxcpnmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfxxcpnmadd">;
+ def int_ppc_qpx_qvfxxcpnmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfxxcpnmadds">;
+ def int_ppc_qpx_qvfxxmadd : PowerPC_QPX_FFFF_Intrinsic<"qvfxxmadd">;
+ def int_ppc_qpx_qvfxxmadds : PowerPC_QPX_FFFF_Intrinsic<"qvfxxmadds">;
+
+ // Select Instruction
+ def int_ppc_qpx_qvfsel : PowerPC_QPX_FFFF_Intrinsic<"qvfsel">;
+
+ // Permute Instruction
+ def int_ppc_qpx_qvfperm : PowerPC_QPX_FFFF_Intrinsic<"qvfperm">;
+
+ // Convert and Round Instructions
+ def int_ppc_qpx_qvfctid : PowerPC_QPX_FF_Intrinsic<"qvfctid">;
+ def int_ppc_qpx_qvfctidu : PowerPC_QPX_FF_Intrinsic<"qvfctidu">;
+ def int_ppc_qpx_qvfctidz : PowerPC_QPX_FF_Intrinsic<"qvfctidz">;
+ def int_ppc_qpx_qvfctiduz : PowerPC_QPX_FF_Intrinsic<"qvfctiduz">;
+ def int_ppc_qpx_qvfctiw : PowerPC_QPX_FF_Intrinsic<"qvfctiw">;
+ def int_ppc_qpx_qvfctiwu : PowerPC_QPX_FF_Intrinsic<"qvfctiwu">;
+ def int_ppc_qpx_qvfctiwz : PowerPC_QPX_FF_Intrinsic<"qvfctiwz">;
+ def int_ppc_qpx_qvfctiwuz : PowerPC_QPX_FF_Intrinsic<"qvfctiwuz">;
+ def int_ppc_qpx_qvfcfid : PowerPC_QPX_FF_Intrinsic<"qvfcfid">;
+ def int_ppc_qpx_qvfcfidu : PowerPC_QPX_FF_Intrinsic<"qvfcfidu">;
+ def int_ppc_qpx_qvfcfids : PowerPC_QPX_FF_Intrinsic<"qvfcfids">;
+ def int_ppc_qpx_qvfcfidus : PowerPC_QPX_FF_Intrinsic<"qvfcfidus">;
+ def int_ppc_qpx_qvfrsp : PowerPC_QPX_FF_Intrinsic<"qvfrsp">;
+ def int_ppc_qpx_qvfriz : PowerPC_QPX_FF_Intrinsic<"qvfriz">;
+ def int_ppc_qpx_qvfrin : PowerPC_QPX_FF_Intrinsic<"qvfrin">;
+ def int_ppc_qpx_qvfrip : PowerPC_QPX_FF_Intrinsic<"qvfrip">;
+ def int_ppc_qpx_qvfrim : PowerPC_QPX_FF_Intrinsic<"qvfrim">;
+
+ // Move Instructions
+ def int_ppc_qpx_qvfneg : PowerPC_QPX_FF_Intrinsic<"qvfneg">;
+ def int_ppc_qpx_qvfabs : PowerPC_QPX_FF_Intrinsic<"qvfabs">;
+ def int_ppc_qpx_qvfnabs : PowerPC_QPX_FF_Intrinsic<"qvfnabs">;
+ def int_ppc_qpx_qvfcpsgn : PowerPC_QPX_FFF_Intrinsic<"qvfcpsgn">;
+
+ // Compare Instructions
+ def int_ppc_qpx_qvftstnan : PowerPC_QPX_FFF_Intrinsic<"qvftstnan">;
+ def int_ppc_qpx_qvfcmplt : PowerPC_QPX_FFF_Intrinsic<"qvfcmplt">;
+ def int_ppc_qpx_qvfcmpgt : PowerPC_QPX_FFF_Intrinsic<"qvfcmpgt">;
+ def int_ppc_qpx_qvfcmpeq : PowerPC_QPX_FFF_Intrinsic<"qvfcmpeq">;
+
+ // Load instructions
+ def int_ppc_qpx_qvlfd : PowerPC_QPX_Load_Intrinsic<"qvlfd">;
+ def int_ppc_qpx_qvlfda : PowerPC_QPX_Load_Intrinsic<"qvlfda">;
+ def int_ppc_qpx_qvlfs : PowerPC_QPX_Load_Intrinsic<"qvlfs">;
+ def int_ppc_qpx_qvlfsa : PowerPC_QPX_Load_Intrinsic<"qvlfsa">;
+
+ def int_ppc_qpx_qvlfcda : PowerPC_QPX_Load_Intrinsic<"qvlfcda">;
+ def int_ppc_qpx_qvlfcd : PowerPC_QPX_Load_Intrinsic<"qvlfcd">;
+ def int_ppc_qpx_qvlfcsa : PowerPC_QPX_Load_Intrinsic<"qvlfcsa">;
+ def int_ppc_qpx_qvlfcs : PowerPC_QPX_Load_Intrinsic<"qvlfcs">;
+ def int_ppc_qpx_qvlfiwaa : PowerPC_QPX_Load_Intrinsic<"qvlfiwaa">;
+ def int_ppc_qpx_qvlfiwa : PowerPC_QPX_Load_Intrinsic<"qvlfiwa">;
+ def int_ppc_qpx_qvlfiwza : PowerPC_QPX_Load_Intrinsic<"qvlfiwza">;
+ def int_ppc_qpx_qvlfiwz : PowerPC_QPX_Load_Intrinsic<"qvlfiwz">;
+
+ def int_ppc_qpx_qvlpcld : PowerPC_QPX_LoadPerm_Intrinsic<"qvlpcld">;
+ def int_ppc_qpx_qvlpcls : PowerPC_QPX_LoadPerm_Intrinsic<"qvlpcls">;
+ def int_ppc_qpx_qvlpcrd : PowerPC_QPX_LoadPerm_Intrinsic<"qvlpcrd">;
+ def int_ppc_qpx_qvlpcrs : PowerPC_QPX_LoadPerm_Intrinsic<"qvlpcrs">;
+
+ // Store instructions
+ def int_ppc_qpx_qvstfd : PowerPC_QPX_Store_Intrinsic<"qvstfd">;
+ def int_ppc_qpx_qvstfda : PowerPC_QPX_Store_Intrinsic<"qvstfda">;
+ def int_ppc_qpx_qvstfs : PowerPC_QPX_Store_Intrinsic<"qvstfs">;
+ def int_ppc_qpx_qvstfsa : PowerPC_QPX_Store_Intrinsic<"qvstfsa">;
+
+ def int_ppc_qpx_qvstfcda : PowerPC_QPX_Store_Intrinsic<"qvstfcda">;
+ def int_ppc_qpx_qvstfcd : PowerPC_QPX_Store_Intrinsic<"qvstfcd">;
+ def int_ppc_qpx_qvstfcsa : PowerPC_QPX_Store_Intrinsic<"qvstfcsa">;
+ def int_ppc_qpx_qvstfcs : PowerPC_QPX_Store_Intrinsic<"qvstfcs">;
+ def int_ppc_qpx_qvstfiwa : PowerPC_QPX_Store_Intrinsic<"qvstfiwa">;
+ def int_ppc_qpx_qvstfiw : PowerPC_QPX_Store_Intrinsic<"qvstfiw">;
+
+ // Logical and permutation formation
+ def int_ppc_qpx_qvflogical : PowerPC_QPX_Intrinsic<"qvflogical",
+ [llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_ppc_qpx_qvgpci : PowerPC_QPX_Intrinsic<"qvgpci",
+ [llvm_v4f64_ty], [llvm_i32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// PowerPC HTM Intrinsic Definitions.
+
+let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
+
+def int_ppc_tbegin : GCCBuiltin<"__builtin_tbegin">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+def int_ppc_tend : GCCBuiltin<"__builtin_tend">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+
+def int_ppc_tabort : GCCBuiltin<"__builtin_tabort">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+def int_ppc_tabortwc : GCCBuiltin<"__builtin_tabortwc">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+def int_ppc_tabortwci : GCCBuiltin<"__builtin_tabortwci">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+def int_ppc_tabortdc : GCCBuiltin<"__builtin_tabortdc">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+def int_ppc_tabortdci : GCCBuiltin<"__builtin_tabortdci">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+
+def int_ppc_tcheck : GCCBuiltin<"__builtin_tcheck">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+def int_ppc_treclaim : GCCBuiltin<"__builtin_treclaim">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+def int_ppc_trechkpt : GCCBuiltin<"__builtin_trechkpt">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+def int_ppc_tsr : GCCBuiltin<"__builtin_tsr">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+
+def int_ppc_get_texasr : GCCBuiltin<"__builtin_get_texasr">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+def int_ppc_get_texasru : GCCBuiltin<"__builtin_get_texasru">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+def int_ppc_get_tfhar : GCCBuiltin<"__builtin_get_tfhar">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+def int_ppc_get_tfiar : GCCBuiltin<"__builtin_get_tfiar">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+
+def int_ppc_set_texasr : GCCBuiltin<"__builtin_set_texasr">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+def int_ppc_set_texasru : GCCBuiltin<"__builtin_set_texasru">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+def int_ppc_set_tfhar : GCCBuiltin<"__builtin_set_tfhar">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+def int_ppc_set_tfiar : GCCBuiltin<"__builtin_set_tfiar">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+
+// Extended mnemonics
+def int_ppc_tendall : GCCBuiltin<"__builtin_tendall">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+def int_ppc_tresume : GCCBuiltin<"__builtin_tresume">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+def int_ppc_tsuspend : GCCBuiltin<"__builtin_tsuspend">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+
+def int_ppc_ttest : GCCBuiltin<"__builtin_ttest">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+
+def int_ppc_cfence : Intrinsic<[], [llvm_anyint_ty], []>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsSystemZ.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsSystemZ.td
new file mode 100644
index 000000000..caa2ec209
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsSystemZ.td
@@ -0,0 +1,431 @@
+//===- IntrinsicsSystemZ.td - Defines SystemZ intrinsics ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the SystemZ-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+class SystemZUnaryConv<string name, LLVMType result, LLVMType arg>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[result], [arg], [IntrNoMem]>;
+
+class SystemZUnary<string name, LLVMType type>
+ : SystemZUnaryConv<name, type, type>;
+
+class SystemZUnaryConvCC<LLVMType result, LLVMType arg>
+ : Intrinsic<[result, llvm_i32_ty], [arg], [IntrNoMem]>;
+
+class SystemZUnaryCC<LLVMType type>
+ : SystemZUnaryConvCC<type, type>;
+
+class SystemZBinaryConv<string name, LLVMType result, LLVMType arg>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[result], [arg, arg], [IntrNoMem]>;
+
+class SystemZBinary<string name, LLVMType type>
+ : SystemZBinaryConv<name, type, type>;
+
+class SystemZBinaryInt<string name, LLVMType type>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[type], [type, llvm_i32_ty], [IntrNoMem]>;
+
+class SystemZBinaryConvCC<LLVMType result, LLVMType arg>
+ : Intrinsic<[result, llvm_i32_ty], [arg, arg], [IntrNoMem]>;
+
+class SystemZBinaryConvIntCC<LLVMType result, LLVMType arg>
+ : Intrinsic<[result, llvm_i32_ty], [arg, llvm_i32_ty], [IntrNoMem]>;
+
+class SystemZBinaryCC<LLVMType type>
+ : SystemZBinaryConvCC<type, type>;
+
+class SystemZTernaryConv<string name, LLVMType result, LLVMType arg>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[result], [arg, arg, result], [IntrNoMem]>;
+
+class SystemZTernary<string name, LLVMType type>
+ : SystemZTernaryConv<name, type, type>;
+
+class SystemZTernaryInt<string name, LLVMType type>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[type], [type, type, llvm_i32_ty], [IntrNoMem]>;
+
+class SystemZTernaryIntCC<LLVMType type>
+ : Intrinsic<[type, llvm_i32_ty], [type, type, llvm_i32_ty], [IntrNoMem]>;
+
+class SystemZQuaternaryInt<string name, LLVMType type>
+ : GCCBuiltin<"__builtin_s390_" ## name>,
+ Intrinsic<[type], [type, type, type, llvm_i32_ty], [IntrNoMem]>;
+
+class SystemZQuaternaryIntCC<LLVMType type>
+ : Intrinsic<[type, llvm_i32_ty], [type, type, type, llvm_i32_ty],
+ [IntrNoMem]>;
+
+multiclass SystemZUnaryExtBHF<string name> {
+ def b : SystemZUnaryConv<name##"b", llvm_v8i16_ty, llvm_v16i8_ty>;
+ def h : SystemZUnaryConv<name##"h", llvm_v4i32_ty, llvm_v8i16_ty>;
+ def f : SystemZUnaryConv<name##"f", llvm_v2i64_ty, llvm_v4i32_ty>;
+}
+
+multiclass SystemZUnaryExtBHWF<string name> {
+ def b : SystemZUnaryConv<name##"b", llvm_v8i16_ty, llvm_v16i8_ty>;
+ def hw : SystemZUnaryConv<name##"hw", llvm_v4i32_ty, llvm_v8i16_ty>;
+ def f : SystemZUnaryConv<name##"f", llvm_v2i64_ty, llvm_v4i32_ty>;
+}
+
+multiclass SystemZUnaryBHF<string name> {
+ def b : SystemZUnary<name##"b", llvm_v16i8_ty>;
+ def h : SystemZUnary<name##"h", llvm_v8i16_ty>;
+ def f : SystemZUnary<name##"f", llvm_v4i32_ty>;
+}
+
+multiclass SystemZUnaryBHFG<string name> : SystemZUnaryBHF<name> {
+ def g : SystemZUnary<name##"g", llvm_v2i64_ty>;
+}
+
+multiclass SystemZUnaryCCBHF {
+ def bs : SystemZUnaryCC<llvm_v16i8_ty>;
+ def hs : SystemZUnaryCC<llvm_v8i16_ty>;
+ def fs : SystemZUnaryCC<llvm_v4i32_ty>;
+}
+
+multiclass SystemZBinaryTruncHFG<string name> {
+ def h : SystemZBinaryConv<name##"h", llvm_v16i8_ty, llvm_v8i16_ty>;
+ def f : SystemZBinaryConv<name##"f", llvm_v8i16_ty, llvm_v4i32_ty>;
+ def g : SystemZBinaryConv<name##"g", llvm_v4i32_ty, llvm_v2i64_ty>;
+}
+
+multiclass SystemZBinaryTruncCCHFG {
+ def hs : SystemZBinaryConvCC<llvm_v16i8_ty, llvm_v8i16_ty>;
+ def fs : SystemZBinaryConvCC<llvm_v8i16_ty, llvm_v4i32_ty>;
+ def gs : SystemZBinaryConvCC<llvm_v4i32_ty, llvm_v2i64_ty>;
+}
+
+multiclass SystemZBinaryExtBHF<string name> {
+ def b : SystemZBinaryConv<name##"b", llvm_v8i16_ty, llvm_v16i8_ty>;
+ def h : SystemZBinaryConv<name##"h", llvm_v4i32_ty, llvm_v8i16_ty>;
+ def f : SystemZBinaryConv<name##"f", llvm_v2i64_ty, llvm_v4i32_ty>;
+}
+
+multiclass SystemZBinaryExtBHFG<string name> : SystemZBinaryExtBHF<name> {
+ def g : SystemZBinaryConv<name##"g", llvm_v16i8_ty, llvm_v2i64_ty>;
+}
+
+multiclass SystemZBinaryBHF<string name> {
+ def b : SystemZBinary<name##"b", llvm_v16i8_ty>;
+ def h : SystemZBinary<name##"h", llvm_v8i16_ty>;
+ def f : SystemZBinary<name##"f", llvm_v4i32_ty>;
+}
+
+multiclass SystemZBinaryBHFG<string name> : SystemZBinaryBHF<name> {
+ def g : SystemZBinary<name##"g", llvm_v2i64_ty>;
+}
+
+multiclass SystemZBinaryIntBHFG<string name> {
+ def b : SystemZBinaryInt<name##"b", llvm_v16i8_ty>;
+ def h : SystemZBinaryInt<name##"h", llvm_v8i16_ty>;
+ def f : SystemZBinaryInt<name##"f", llvm_v4i32_ty>;
+ def g : SystemZBinaryInt<name##"g", llvm_v2i64_ty>;
+}
+
+multiclass SystemZBinaryCCBHF {
+ def bs : SystemZBinaryCC<llvm_v16i8_ty>;
+ def hs : SystemZBinaryCC<llvm_v8i16_ty>;
+ def fs : SystemZBinaryCC<llvm_v4i32_ty>;
+}
+
+multiclass SystemZCompareBHFG<string name> {
+ def bs : SystemZBinaryCC<llvm_v16i8_ty>;
+ def hs : SystemZBinaryCC<llvm_v8i16_ty>;
+ def fs : SystemZBinaryCC<llvm_v4i32_ty>;
+ def gs : SystemZBinaryCC<llvm_v2i64_ty>;
+}
+
+multiclass SystemZTernaryExtBHF<string name> {
+ def b : SystemZTernaryConv<name##"b", llvm_v8i16_ty, llvm_v16i8_ty>;
+ def h : SystemZTernaryConv<name##"h", llvm_v4i32_ty, llvm_v8i16_ty>;
+ def f : SystemZTernaryConv<name##"f", llvm_v2i64_ty, llvm_v4i32_ty>;
+}
+
+multiclass SystemZTernaryExtBHFG<string name> : SystemZTernaryExtBHF<name> {
+ def g : SystemZTernaryConv<name##"g", llvm_v16i8_ty, llvm_v2i64_ty>;
+}
+
+multiclass SystemZTernaryBHF<string name> {
+ def b : SystemZTernary<name##"b", llvm_v16i8_ty>;
+ def h : SystemZTernary<name##"h", llvm_v8i16_ty>;
+ def f : SystemZTernary<name##"f", llvm_v4i32_ty>;
+}
+
+multiclass SystemZTernaryIntBHF<string name> {
+ def b : SystemZTernaryInt<name##"b", llvm_v16i8_ty>;
+ def h : SystemZTernaryInt<name##"h", llvm_v8i16_ty>;
+ def f : SystemZTernaryInt<name##"f", llvm_v4i32_ty>;
+}
+
+multiclass SystemZTernaryIntCCBHF {
+ def bs : SystemZTernaryIntCC<llvm_v16i8_ty>;
+ def hs : SystemZTernaryIntCC<llvm_v8i16_ty>;
+ def fs : SystemZTernaryIntCC<llvm_v4i32_ty>;
+}
+
+multiclass SystemZQuaternaryIntBHF<string name> {
+ def b : SystemZQuaternaryInt<name##"b", llvm_v16i8_ty>;
+ def h : SystemZQuaternaryInt<name##"h", llvm_v8i16_ty>;
+ def f : SystemZQuaternaryInt<name##"f", llvm_v4i32_ty>;
+}
+
+multiclass SystemZQuaternaryIntBHFG<string name> : SystemZQuaternaryIntBHF<name> {
+ def g : SystemZQuaternaryInt<name##"g", llvm_v2i64_ty>;
+}
+
+multiclass SystemZQuaternaryIntCCBHF {
+ def bs : SystemZQuaternaryIntCC<llvm_v16i8_ty>;
+ def hs : SystemZQuaternaryIntCC<llvm_v8i16_ty>;
+ def fs : SystemZQuaternaryIntCC<llvm_v4i32_ty>;
+}
+
+//===----------------------------------------------------------------------===//
+//
+// Transactional-execution intrinsics
+//
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "s390" in {
+ def int_s390_tbegin : Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrNoDuplicate, IntrWriteMem]>;
+
+ def int_s390_tbegin_nofloat : Intrinsic<[llvm_i32_ty],
+ [llvm_ptr_ty, llvm_i32_ty],
+ [IntrNoDuplicate, IntrWriteMem]>;
+
+ def int_s390_tbeginc : Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrNoDuplicate, IntrWriteMem]>;
+
+ def int_s390_tabort : Intrinsic<[], [llvm_i64_ty],
+ [IntrNoReturn, Throws, IntrWriteMem]>;
+
+ def int_s390_tend : GCCBuiltin<"__builtin_tend">,
+ Intrinsic<[llvm_i32_ty], []>;
+
+ def int_s390_etnd : GCCBuiltin<"__builtin_tx_nesting_depth">,
+ Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>;
+
+ def int_s390_ntstg : Intrinsic<[], [llvm_i64_ty, llvm_ptr64_ty],
+ [IntrArgMemOnly, IntrWriteMem]>;
+
+ def int_s390_ppa_txassist : GCCBuiltin<"__builtin_tx_assist">,
+ Intrinsic<[], [llvm_i32_ty]>;
+}
+
+//===----------------------------------------------------------------------===//
+//
+// Vector intrinsics
+//
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "s390" in {
+ def int_s390_lcbb : GCCBuiltin<"__builtin_s390_lcbb">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_s390_vlbb : GCCBuiltin<"__builtin_s390_vlbb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_s390_vll : GCCBuiltin<"__builtin_s390_vll">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty, llvm_ptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_s390_vpdi : GCCBuiltin<"__builtin_s390_vpdi">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_s390_vperm : GCCBuiltin<"__builtin_s390_vperm">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+ defm int_s390_vpks : SystemZBinaryTruncHFG<"vpks">;
+ defm int_s390_vpks : SystemZBinaryTruncCCHFG;
+
+ defm int_s390_vpkls : SystemZBinaryTruncHFG<"vpkls">;
+ defm int_s390_vpkls : SystemZBinaryTruncCCHFG;
+
+ def int_s390_vstl : GCCBuiltin<"__builtin_s390_vstl">,
+ Intrinsic<[], [llvm_v16i8_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, IntrWriteMem]>;
+
+ defm int_s390_vupl : SystemZUnaryExtBHWF<"vupl">;
+ defm int_s390_vupll : SystemZUnaryExtBHF<"vupll">;
+
+ defm int_s390_vuph : SystemZUnaryExtBHF<"vuph">;
+ defm int_s390_vuplh : SystemZUnaryExtBHF<"vuplh">;
+
+ defm int_s390_vacc : SystemZBinaryBHFG<"vacc">;
+
+ def int_s390_vaq : SystemZBinary<"vaq", llvm_v16i8_ty>;
+ def int_s390_vacq : SystemZTernary<"vacq", llvm_v16i8_ty>;
+ def int_s390_vaccq : SystemZBinary<"vaccq", llvm_v16i8_ty>;
+ def int_s390_vacccq : SystemZTernary<"vacccq", llvm_v16i8_ty>;
+
+ defm int_s390_vavg : SystemZBinaryBHFG<"vavg">;
+ defm int_s390_vavgl : SystemZBinaryBHFG<"vavgl">;
+
+ def int_s390_vcksm : SystemZBinary<"vcksm", llvm_v4i32_ty>;
+
+ defm int_s390_vgfm : SystemZBinaryExtBHFG<"vgfm">;
+ defm int_s390_vgfma : SystemZTernaryExtBHFG<"vgfma">;
+
+ defm int_s390_vmah : SystemZTernaryBHF<"vmah">;
+ defm int_s390_vmalh : SystemZTernaryBHF<"vmalh">;
+ defm int_s390_vmae : SystemZTernaryExtBHF<"vmae">;
+ defm int_s390_vmale : SystemZTernaryExtBHF<"vmale">;
+ defm int_s390_vmao : SystemZTernaryExtBHF<"vmao">;
+ defm int_s390_vmalo : SystemZTernaryExtBHF<"vmalo">;
+
+ defm int_s390_vmh : SystemZBinaryBHF<"vmh">;
+ defm int_s390_vmlh : SystemZBinaryBHF<"vmlh">;
+ defm int_s390_vme : SystemZBinaryExtBHF<"vme">;
+ defm int_s390_vmle : SystemZBinaryExtBHF<"vmle">;
+ defm int_s390_vmo : SystemZBinaryExtBHF<"vmo">;
+ defm int_s390_vmlo : SystemZBinaryExtBHF<"vmlo">;
+
+ defm int_s390_verllv : SystemZBinaryBHFG<"verllv">;
+ defm int_s390_verll : SystemZBinaryIntBHFG<"verll">;
+ defm int_s390_verim : SystemZQuaternaryIntBHFG<"verim">;
+
+ def int_s390_vsl : SystemZBinary<"vsl", llvm_v16i8_ty>;
+ def int_s390_vslb : SystemZBinary<"vslb", llvm_v16i8_ty>;
+ def int_s390_vsra : SystemZBinary<"vsra", llvm_v16i8_ty>;
+ def int_s390_vsrab : SystemZBinary<"vsrab", llvm_v16i8_ty>;
+ def int_s390_vsrl : SystemZBinary<"vsrl", llvm_v16i8_ty>;
+ def int_s390_vsrlb : SystemZBinary<"vsrlb", llvm_v16i8_ty>;
+
+ def int_s390_vsldb : GCCBuiltin<"__builtin_s390_vsldb">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ defm int_s390_vscbi : SystemZBinaryBHFG<"vscbi">;
+
+ def int_s390_vsq : SystemZBinary<"vsq", llvm_v16i8_ty>;
+ def int_s390_vsbiq : SystemZTernary<"vsbiq", llvm_v16i8_ty>;
+ def int_s390_vscbiq : SystemZBinary<"vscbiq", llvm_v16i8_ty>;
+ def int_s390_vsbcbiq : SystemZTernary<"vsbcbiq", llvm_v16i8_ty>;
+
+ def int_s390_vsumb : SystemZBinaryConv<"vsumb", llvm_v4i32_ty, llvm_v16i8_ty>;
+ def int_s390_vsumh : SystemZBinaryConv<"vsumh", llvm_v4i32_ty, llvm_v8i16_ty>;
+
+ def int_s390_vsumgh : SystemZBinaryConv<"vsumgh", llvm_v2i64_ty,
+ llvm_v8i16_ty>;
+ def int_s390_vsumgf : SystemZBinaryConv<"vsumgf", llvm_v2i64_ty,
+ llvm_v4i32_ty>;
+
+ def int_s390_vsumqf : SystemZBinaryConv<"vsumqf", llvm_v16i8_ty,
+ llvm_v4i32_ty>;
+ def int_s390_vsumqg : SystemZBinaryConv<"vsumqg", llvm_v16i8_ty,
+ llvm_v2i64_ty>;
+
+ def int_s390_vtm : SystemZBinaryConv<"vtm", llvm_i32_ty, llvm_v16i8_ty>;
+
+ defm int_s390_vceq : SystemZCompareBHFG<"vceq">;
+ defm int_s390_vch : SystemZCompareBHFG<"vch">;
+ defm int_s390_vchl : SystemZCompareBHFG<"vchl">;
+
+ defm int_s390_vfae : SystemZTernaryIntBHF<"vfae">;
+ defm int_s390_vfae : SystemZTernaryIntCCBHF;
+ defm int_s390_vfaez : SystemZTernaryIntBHF<"vfaez">;
+ defm int_s390_vfaez : SystemZTernaryIntCCBHF;
+
+ defm int_s390_vfee : SystemZBinaryBHF<"vfee">;
+ defm int_s390_vfee : SystemZBinaryCCBHF;
+ defm int_s390_vfeez : SystemZBinaryBHF<"vfeez">;
+ defm int_s390_vfeez : SystemZBinaryCCBHF;
+
+ defm int_s390_vfene : SystemZBinaryBHF<"vfene">;
+ defm int_s390_vfene : SystemZBinaryCCBHF;
+ defm int_s390_vfenez : SystemZBinaryBHF<"vfenez">;
+ defm int_s390_vfenez : SystemZBinaryCCBHF;
+
+ defm int_s390_vistr : SystemZUnaryBHF<"vistr">;
+ defm int_s390_vistr : SystemZUnaryCCBHF;
+
+ defm int_s390_vstrc : SystemZQuaternaryIntBHF<"vstrc">;
+ defm int_s390_vstrc : SystemZQuaternaryIntCCBHF;
+ defm int_s390_vstrcz : SystemZQuaternaryIntBHF<"vstrcz">;
+ defm int_s390_vstrcz : SystemZQuaternaryIntCCBHF;
+
+ def int_s390_vfcedbs : SystemZBinaryConvCC<llvm_v2i64_ty, llvm_v2f64_ty>;
+ def int_s390_vfchdbs : SystemZBinaryConvCC<llvm_v2i64_ty, llvm_v2f64_ty>;
+ def int_s390_vfchedbs : SystemZBinaryConvCC<llvm_v2i64_ty, llvm_v2f64_ty>;
+
+ def int_s390_vftcidb : SystemZBinaryConvIntCC<llvm_v2i64_ty, llvm_v2f64_ty>;
+
+ def int_s390_vfidb : Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ // Instructions from the Vector Enhancements Facility 1
+ def int_s390_vbperm : SystemZBinaryConv<"vbperm", llvm_v2i64_ty,
+ llvm_v16i8_ty>;
+
+ def int_s390_vmslg : GCCBuiltin<"__builtin_s390_vmslg">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v16i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_s390_vfmaxdb : Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_s390_vfmindb : Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_s390_vfmaxsb : Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_s390_vfminsb : Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_s390_vfcesbs : SystemZBinaryConvCC<llvm_v4i32_ty, llvm_v4f32_ty>;
+ def int_s390_vfchsbs : SystemZBinaryConvCC<llvm_v4i32_ty, llvm_v4f32_ty>;
+ def int_s390_vfchesbs : SystemZBinaryConvCC<llvm_v4i32_ty, llvm_v4f32_ty>;
+
+ def int_s390_vftcisb : SystemZBinaryConvIntCC<llvm_v4i32_ty, llvm_v4f32_ty>;
+
+ def int_s390_vfisb : Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ // Instructions from the Vector Packed Decimal Facility
+ def int_s390_vlrl : GCCBuiltin<"__builtin_s390_vlrl">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty, llvm_ptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_s390_vstrl : GCCBuiltin<"__builtin_s390_vstrl">,
+ Intrinsic<[], [llvm_v16i8_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrArgMemOnly, IntrWriteMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+//
+// Misc intrinsics
+//
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "s390" in {
+ def int_s390_sfpc : GCCBuiltin<"__builtin_s390_sfpc">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+ def int_s390_efpc : GCCBuiltin<"__builtin_s390_efpc">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+
+ def int_s390_tdc : Intrinsic<[llvm_i32_ty], [llvm_anyfloat_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsWebAssembly.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsWebAssembly.td
new file mode 100644
index 000000000..7afc755a1
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsWebAssembly.td
@@ -0,0 +1,67 @@
+//===- IntrinsicsWebAssembly.td - Defines wasm intrinsics --*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+///
+/// \file
+/// This file defines all of the WebAssembly-specific intrinsics.
+///
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "wasm" in { // All intrinsics start with "llvm.wasm.".
+
+// Query the current memory size, and increase the current memory size.
+// Note that memory.size is not IntrNoMem because it must be sequenced with
+// respect to memory.grow calls.
+def int_wasm_memory_size : Intrinsic<[llvm_anyint_ty],
+ [llvm_i32_ty],
+ [IntrReadMem]>;
+def int_wasm_memory_grow : Intrinsic<[llvm_anyint_ty],
+ [llvm_i32_ty, LLVMMatchType<0>],
+ []>;
+
+// These are the old names.
+def int_wasm_mem_size : Intrinsic<[llvm_anyint_ty],
+ [llvm_i32_ty],
+ [IntrReadMem]>;
+def int_wasm_mem_grow : Intrinsic<[llvm_anyint_ty],
+ [llvm_i32_ty, LLVMMatchType<0>],
+ []>;
+
+// These are the old old names. They also lack the immediate field.
+def int_wasm_current_memory : Intrinsic<[llvm_anyint_ty], [], [IntrReadMem]>;
+def int_wasm_grow_memory : Intrinsic<[llvm_anyint_ty], [LLVMMatchType<0>], []>;
+
+//===----------------------------------------------------------------------===//
+// Exception handling intrinsics
+//===----------------------------------------------------------------------===//
+
+// throw / rethrow
+def int_wasm_throw : Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty],
+ [Throws, IntrNoReturn]>;
+def int_wasm_rethrow : Intrinsic<[], [], [Throws, IntrNoReturn]>;
+
+// Since wasm does not use landingpad instructions, these instructions return
+// exception pointer and selector values until we lower them in WasmEHPrepare.
+def int_wasm_get_exception : Intrinsic<[llvm_ptr_ty], [llvm_token_ty],
+ [IntrHasSideEffects]>;
+def int_wasm_get_ehselector : Intrinsic<[llvm_i32_ty], [llvm_token_ty],
+ [IntrHasSideEffects]>;
+
+// wasm.catch returns the pointer to the exception object caught by wasm 'catch'
+// instruction.
+def int_wasm_catch : Intrinsic<[llvm_ptr_ty], [llvm_i32_ty],
+ [IntrHasSideEffects]>;
+
+// WebAssembly EH must maintain the landingpads in the order assigned to them
+// by WasmEHPrepare pass to generate landingpad table in EHStreamer. This is
+// used in order to give them the indices in WasmEHPrepare.
+def int_wasm_landingpad_index: Intrinsic<[], [llvm_i32_ty], [IntrNoMem]>;
+
+// Returns LSDA address of the current function.
+def int_wasm_lsda : Intrinsic<[llvm_ptr_ty], [], [IntrNoMem]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsX86.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsX86.td
new file mode 100644
index 000000000..905afc130
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsX86.td
@@ -0,0 +1,5215 @@
+//===- IntrinsicsX86.td - Defines X86 intrinsics -----------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the X86-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// Interrupt traps
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_int : Intrinsic<[], [llvm_i8_ty]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SEH intrinsics for Windows
+let TargetPrefix = "x86" in {
+ def int_x86_seh_lsda : Intrinsic<[llvm_ptr_ty], [llvm_ptr_ty], [IntrNoMem]>;
+
+ // Marks the EH registration node created in LLVM IR prior to code generation.
+ def int_x86_seh_ehregnode : Intrinsic<[], [llvm_ptr_ty], []>;
+
+ // Marks the EH guard slot node created in LLVM IR prior to code generation.
+ def int_x86_seh_ehguard : Intrinsic<[], [llvm_ptr_ty], []>;
+
+ // Given a pointer to the end of an EH registration object, returns the true
+ // parent frame address that can be used with llvm.localrecover.
+ def int_x86_seh_recoverfp : Intrinsic<[llvm_ptr_ty],
+ [llvm_ptr_ty, llvm_ptr_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// FLAGS.
+let TargetPrefix = "x86" in {
+ def int_x86_flags_read_u32 : GCCBuiltin<"__builtin_ia32_readeflags_u32">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+ def int_x86_flags_read_u64 : GCCBuiltin<"__builtin_ia32_readeflags_u64">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+ def int_x86_flags_write_u32 : GCCBuiltin<"__builtin_ia32_writeeflags_u32">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+ def int_x86_flags_write_u64 : GCCBuiltin<"__builtin_ia32_writeeflags_u64">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Read Time Stamp Counter.
+let TargetPrefix = "x86" in {
+ def int_x86_rdtsc : GCCBuiltin<"__builtin_ia32_rdtsc">,
+ Intrinsic<[llvm_i64_ty], [], []>;
+ def int_x86_rdtscp : GCCBuiltin<"__builtin_ia32_rdtscp">,
+ Intrinsic<[llvm_i64_ty], [llvm_ptr_ty], [IntrArgMemOnly]>;
+}
+
+// Read Performance-Monitoring Counter.
+let TargetPrefix = "x86" in {
+ def int_x86_rdpmc : GCCBuiltin<"__builtin_ia32_rdpmc">,
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty], []>;
+}
+
+// Read processor ID.
+let TargetPrefix = "x86" in {
+ def int_x86_rdpid : GCCBuiltin<"__builtin_ia32_rdpid">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// CET SS
+let TargetPrefix = "x86" in {
+ def int_x86_incsspd : GCCBuiltin<"__builtin_ia32_incsspd">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+ def int_x86_incsspq : GCCBuiltin<"__builtin_ia32_incsspq">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+ def int_x86_rdsspd : GCCBuiltin<"__builtin_ia32_rdsspd">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+ def int_x86_rdsspq : GCCBuiltin<"__builtin_ia32_rdsspq">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty], []>;
+ def int_x86_saveprevssp : GCCBuiltin<"__builtin_ia32_saveprevssp">,
+ Intrinsic<[], [], []>;
+ def int_x86_rstorssp : GCCBuiltin<"__builtin_ia32_rstorssp">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_wrssd : GCCBuiltin<"__builtin_ia32_wrssd">,
+ Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty], []>;
+ def int_x86_wrssq : GCCBuiltin<"__builtin_ia32_wrssq">,
+ Intrinsic<[], [llvm_i64_ty, llvm_ptr_ty], []>;
+ def int_x86_wrussd : GCCBuiltin<"__builtin_ia32_wrussd">,
+ Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty], []>;
+ def int_x86_wrussq : GCCBuiltin<"__builtin_ia32_wrussq">,
+ Intrinsic<[], [llvm_i64_ty, llvm_ptr_ty], []>;
+ def int_x86_setssbsy : GCCBuiltin<"__builtin_ia32_setssbsy">,
+ Intrinsic<[], [], []>;
+ def int_x86_clrssbsy : GCCBuiltin<"__builtin_ia32_clrssbsy">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// 3DNow!
+
+let TargetPrefix = "x86" in {
+ def int_x86_3dnow_pavgusb : GCCBuiltin<"__builtin_ia32_pavgusb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pf2id : GCCBuiltin<"__builtin_ia32_pf2id">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnow_pfacc : GCCBuiltin<"__builtin_ia32_pfacc">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfadd : GCCBuiltin<"__builtin_ia32_pfadd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfcmpeq : GCCBuiltin<"__builtin_ia32_pfcmpeq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfcmpge : GCCBuiltin<"__builtin_ia32_pfcmpge">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfcmpgt : GCCBuiltin<"__builtin_ia32_pfcmpgt">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfmax : GCCBuiltin<"__builtin_ia32_pfmax">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfmin : GCCBuiltin<"__builtin_ia32_pfmin">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfmul : GCCBuiltin<"__builtin_ia32_pfmul">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfrcp : GCCBuiltin<"__builtin_ia32_pfrcp">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnow_pfrcpit1 : GCCBuiltin<"__builtin_ia32_pfrcpit1">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfrcpit2 : GCCBuiltin<"__builtin_ia32_pfrcpit2">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfrsqrt : GCCBuiltin<"__builtin_ia32_pfrsqrt">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnow_pfrsqit1 : GCCBuiltin<"__builtin_ia32_pfrsqit1">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfsub : GCCBuiltin<"__builtin_ia32_pfsub">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pfsubr : GCCBuiltin<"__builtin_ia32_pfsubr">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnow_pi2fd : GCCBuiltin<"__builtin_ia32_pi2fd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnow_pmulhrw : GCCBuiltin<"__builtin_ia32_pmulhrw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// 3DNow! extensions
+
+let TargetPrefix = "x86" in {
+ def int_x86_3dnowa_pf2iw : GCCBuiltin<"__builtin_ia32_pf2iw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnowa_pfnacc : GCCBuiltin<"__builtin_ia32_pfnacc">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnowa_pfpnacc : GCCBuiltin<"__builtin_ia32_pfpnacc">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_3dnowa_pi2fw : GCCBuiltin<"__builtin_ia32_pi2fw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_3dnowa_pswapd :
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE1
+
+// Arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_rcp_ss : GCCBuiltin<"__builtin_ia32_rcpss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_sse_rcp_ps : GCCBuiltin<"__builtin_ia32_rcpps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_sse_rsqrt_ss : GCCBuiltin<"__builtin_ia32_rsqrtss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_sse_rsqrt_ps : GCCBuiltin<"__builtin_ia32_rsqrtps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_sse_min_ss : GCCBuiltin<"__builtin_ia32_minss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_min_ps : GCCBuiltin<"__builtin_ia32_minps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_max_ss : GCCBuiltin<"__builtin_ia32_maxss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_max_ps : GCCBuiltin<"__builtin_ia32_maxps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+}
+
+// Comparison ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_cmp_ss : GCCBuiltin<"__builtin_ia32_cmpss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ // NOTE: This comparison intrinsic is not used by clang as long as the
+ // distinction in signaling behaviour is not implemented.
+ def int_x86_sse_cmp_ps :
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_sse_comieq_ss : GCCBuiltin<"__builtin_ia32_comieq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_comilt_ss : GCCBuiltin<"__builtin_ia32_comilt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_comile_ss : GCCBuiltin<"__builtin_ia32_comile">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_comigt_ss : GCCBuiltin<"__builtin_ia32_comigt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_comige_ss : GCCBuiltin<"__builtin_ia32_comige">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_comineq_ss : GCCBuiltin<"__builtin_ia32_comineq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomieq_ss : GCCBuiltin<"__builtin_ia32_ucomieq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomilt_ss : GCCBuiltin<"__builtin_ia32_ucomilt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomile_ss : GCCBuiltin<"__builtin_ia32_ucomile">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomigt_ss : GCCBuiltin<"__builtin_ia32_ucomigt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomige_ss : GCCBuiltin<"__builtin_ia32_ucomige">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_ucomineq_ss : GCCBuiltin<"__builtin_ia32_ucomineq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+}
+
+
+// Conversion ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_cvtss2si : GCCBuiltin<"__builtin_ia32_cvtss2si">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_cvtss2si64 : GCCBuiltin<"__builtin_ia32_cvtss2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_cvttss2si : GCCBuiltin<"__builtin_ia32_cvttss2si">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_cvttss2si64 : GCCBuiltin<"__builtin_ia32_cvttss2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+ def int_x86_sse_cvtps2pi : GCCBuiltin<"__builtin_ia32_cvtps2pi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_cvttps2pi: GCCBuiltin<"__builtin_ia32_cvttps2pi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse_cvtpi2ps : GCCBuiltin<"__builtin_ia32_cvtpi2ps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+// Cacheability support ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_sfence : GCCBuiltin<"__builtin_ia32_sfence">,
+ Intrinsic<[], [], []>;
+}
+
+// Control register.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_stmxcsr :
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_sse_ldmxcsr :
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse_movmsk_ps : GCCBuiltin<"__builtin_ia32_movmskps">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE2
+
+// FP arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_min_sd : GCCBuiltin<"__builtin_ia32_minsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_min_pd : GCCBuiltin<"__builtin_ia32_minpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_max_sd : GCCBuiltin<"__builtin_ia32_maxsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_max_pd : GCCBuiltin<"__builtin_ia32_maxpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+}
+
+// FP comparison ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_cmp_sd : GCCBuiltin<"__builtin_ia32_cmpsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ // NOTE: This comparison intrinsic is not used by clang as long as the
+ // distinction in signaling behaviour is not implemented.
+ def int_x86_sse2_cmp_pd :
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_sse2_comieq_sd : GCCBuiltin<"__builtin_ia32_comisdeq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_comilt_sd : GCCBuiltin<"__builtin_ia32_comisdlt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_comile_sd : GCCBuiltin<"__builtin_ia32_comisdle">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_comigt_sd : GCCBuiltin<"__builtin_ia32_comisdgt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_comige_sd : GCCBuiltin<"__builtin_ia32_comisdge">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_comineq_sd : GCCBuiltin<"__builtin_ia32_comisdneq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomieq_sd : GCCBuiltin<"__builtin_ia32_ucomisdeq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomilt_sd : GCCBuiltin<"__builtin_ia32_ucomisdlt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomile_sd : GCCBuiltin<"__builtin_ia32_ucomisdle">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomigt_sd : GCCBuiltin<"__builtin_ia32_ucomisdgt">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomige_sd : GCCBuiltin<"__builtin_ia32_ucomisdge">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_ucomineq_sd : GCCBuiltin<"__builtin_ia32_ucomisdneq">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+}
+
+// Integer arithmetic ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_padds_b : GCCBuiltin<"__builtin_ia32_paddsb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_padds_w : GCCBuiltin<"__builtin_ia32_paddsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_paddus_b : GCCBuiltin<"__builtin_ia32_paddusb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_paddus_w : GCCBuiltin<"__builtin_ia32_paddusw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_psubs_b : GCCBuiltin<"__builtin_ia32_psubsb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_sse2_psubs_w : GCCBuiltin<"__builtin_ia32_psubsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_psubus_b : GCCBuiltin<"__builtin_ia32_psubusb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_sse2_psubus_w : GCCBuiltin<"__builtin_ia32_psubusw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_pmulhu_w : GCCBuiltin<"__builtin_ia32_pmulhuw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_pmulh_w : GCCBuiltin<"__builtin_ia32_pmulhw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_pmadd_wd : GCCBuiltin<"__builtin_ia32_pmaddwd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_sse2_psad_bw : GCCBuiltin<"__builtin_ia32_psadbw128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem, Commutative]>;
+}
+
+// Integer shift ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_psll_w : GCCBuiltin<"__builtin_ia32_psllw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_psll_d : GCCBuiltin<"__builtin_ia32_pslld128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psll_q : GCCBuiltin<"__builtin_ia32_psllq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrl_w : GCCBuiltin<"__builtin_ia32_psrlw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrl_d : GCCBuiltin<"__builtin_ia32_psrld128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrl_q : GCCBuiltin<"__builtin_ia32_psrlq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_sse2_psra_w : GCCBuiltin<"__builtin_ia32_psraw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_psra_d : GCCBuiltin<"__builtin_ia32_psrad128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_sse2_pslli_w : GCCBuiltin<"__builtin_ia32_psllwi128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_pslli_d : GCCBuiltin<"__builtin_ia32_pslldi128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_pslli_q : GCCBuiltin<"__builtin_ia32_psllqi128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrli_w : GCCBuiltin<"__builtin_ia32_psrlwi128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrli_d : GCCBuiltin<"__builtin_ia32_psrldi128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrli_q : GCCBuiltin<"__builtin_ia32_psrlqi128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrai_w : GCCBuiltin<"__builtin_ia32_psrawi128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_psrai_d : GCCBuiltin<"__builtin_ia32_psradi128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Conversion ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_cvtpd2dq : GCCBuiltin<"__builtin_ia32_cvtpd2dq">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvttpd2dq : GCCBuiltin<"__builtin_ia32_cvttpd2dq">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvtpd2ps : GCCBuiltin<"__builtin_ia32_cvtpd2ps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvtps2dq : GCCBuiltin<"__builtin_ia32_cvtps2dq">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvttps2dq : GCCBuiltin<"__builtin_ia32_cvttps2dq">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvtsd2si : GCCBuiltin<"__builtin_ia32_cvtsd2si">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvtsd2si64 : GCCBuiltin<"__builtin_ia32_cvtsd2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvttsd2si : GCCBuiltin<"__builtin_ia32_cvttsd2si">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvttsd2si64 : GCCBuiltin<"__builtin_ia32_cvttsd2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_cvtsd2ss : GCCBuiltin<"__builtin_ia32_cvtsd2ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse_cvtpd2pi : GCCBuiltin<"__builtin_ia32_cvtpd2pi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse_cvttpd2pi: GCCBuiltin<"__builtin_ia32_cvttpd2pi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse_cvtpi2pd : GCCBuiltin<"__builtin_ia32_cvtpi2pd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse2_packsswb_128 : GCCBuiltin<"__builtin_ia32_packsswb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_packssdw_128 : GCCBuiltin<"__builtin_ia32_packssdw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sse2_packuswb_128 : GCCBuiltin<"__builtin_ia32_packuswb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_sse2_movmsk_pd : GCCBuiltin<"__builtin_ia32_movmskpd">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse2_pmovmskb_128 : GCCBuiltin<"__builtin_ia32_pmovmskb128">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_sse2_maskmov_dqu : GCCBuiltin<"__builtin_ia32_maskmovdqu">,
+ Intrinsic<[], [llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_ptr_ty], []>;
+ def int_x86_sse2_clflush : GCCBuiltin<"__builtin_ia32_clflush">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_sse2_lfence : GCCBuiltin<"__builtin_ia32_lfence">,
+ Intrinsic<[], [], []>;
+ def int_x86_sse2_mfence : GCCBuiltin<"__builtin_ia32_mfence">,
+ Intrinsic<[], [], []>;
+ def int_x86_sse2_pause : GCCBuiltin<"__builtin_ia32_pause">,
+ Intrinsic<[], [], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE3
+
+// Addition / subtraction ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse3_addsub_ps : GCCBuiltin<"__builtin_ia32_addsubps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse3_addsub_pd : GCCBuiltin<"__builtin_ia32_addsubpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+}
+
+// Horizontal ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse3_hadd_ps : GCCBuiltin<"__builtin_ia32_haddps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse3_hadd_pd : GCCBuiltin<"__builtin_ia32_haddpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_sse3_hsub_ps : GCCBuiltin<"__builtin_ia32_hsubps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_sse3_hsub_pd : GCCBuiltin<"__builtin_ia32_hsubpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+}
+
+// Specialized unaligned load.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse3_ldu_dq : GCCBuiltin<"__builtin_ia32_lddqu">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty], [IntrReadMem]>;
+}
+
+// Thread synchronization ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse3_monitor : GCCBuiltin<"__builtin_ia32_monitor">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_sse3_mwait : GCCBuiltin<"__builtin_ia32_mwait">,
+ Intrinsic<[], [llvm_i32_ty,
+ llvm_i32_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSSE3
+
+// Horizontal arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_ssse3_phadd_w : GCCBuiltin<"__builtin_ia32_phaddw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phadd_w_128 : GCCBuiltin<"__builtin_ia32_phaddw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_phadd_d : GCCBuiltin<"__builtin_ia32_phaddd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phadd_d_128 : GCCBuiltin<"__builtin_ia32_phaddd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_phadd_sw : GCCBuiltin<"__builtin_ia32_phaddsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phadd_sw_128 : GCCBuiltin<"__builtin_ia32_phaddsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_phsub_w : GCCBuiltin<"__builtin_ia32_phsubw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phsub_w_128 : GCCBuiltin<"__builtin_ia32_phsubw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_phsub_d : GCCBuiltin<"__builtin_ia32_phsubd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phsub_d_128 : GCCBuiltin<"__builtin_ia32_phsubd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_phsub_sw : GCCBuiltin<"__builtin_ia32_phsubsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_phsub_sw_128 : GCCBuiltin<"__builtin_ia32_phsubsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_pmadd_ub_sw : GCCBuiltin<"__builtin_ia32_pmaddubsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_pmadd_ub_sw_128 : GCCBuiltin<"__builtin_ia32_pmaddubsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+}
+
+// Packed multiply high with round and scale
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_ssse3_pmul_hr_sw : GCCBuiltin<"__builtin_ia32_pmulhrsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_ssse3_pmul_hr_sw_128 : GCCBuiltin<"__builtin_ia32_pmulhrsw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem, Commutative]>;
+}
+
+// Shuffle ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_ssse3_pshuf_b : GCCBuiltin<"__builtin_ia32_pshufb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_pshuf_b_128 : GCCBuiltin<"__builtin_ia32_pshufb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_sse_pshuf_w : GCCBuiltin<"__builtin_ia32_pshufw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// Sign ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_ssse3_psign_b : GCCBuiltin<"__builtin_ia32_psignb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_psign_b_128 : GCCBuiltin<"__builtin_ia32_psignb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_psign_w : GCCBuiltin<"__builtin_ia32_psignw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_psign_w_128 : GCCBuiltin<"__builtin_ia32_psignw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_psign_d : GCCBuiltin<"__builtin_ia32_psignd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_ssse3_psign_d_128 : GCCBuiltin<"__builtin_ia32_psignd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+}
+
+// Absolute value ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_ssse3_pabs_b : GCCBuiltin<"__builtin_ia32_pabsb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_pabs_w : GCCBuiltin<"__builtin_ia32_pabsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_ssse3_pabs_d : GCCBuiltin<"__builtin_ia32_pabsd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE4.1
+
+// FP rounding ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_round_ss : GCCBuiltin<"__builtin_ia32_roundss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse41_round_ps : GCCBuiltin<"__builtin_ia32_roundps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse41_round_sd : GCCBuiltin<"__builtin_ia32_roundsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_sse41_round_pd : GCCBuiltin<"__builtin_ia32_roundpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Vector min element
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_phminposuw : GCCBuiltin<"__builtin_ia32_phminposuw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty],
+ [IntrNoMem]>;
+}
+
+// Advanced Encryption Standard (AES) Instructions
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_aesni_aesimc : GCCBuiltin<"__builtin_ia32_aesimc128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_aesni_aesenc : GCCBuiltin<"__builtin_ia32_aesenc128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesenc_256 : GCCBuiltin<"__builtin_ia32_aesenc256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesenc_512 : GCCBuiltin<"__builtin_ia32_aesenc512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_aesni_aesenclast : GCCBuiltin<"__builtin_ia32_aesenclast128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesenclast_256 :
+ GCCBuiltin<"__builtin_ia32_aesenclast256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesenclast_512 :
+ GCCBuiltin<"__builtin_ia32_aesenclast512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_aesni_aesdec : GCCBuiltin<"__builtin_ia32_aesdec128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesdec_256 : GCCBuiltin<"__builtin_ia32_aesdec256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesdec_512 : GCCBuiltin<"__builtin_ia32_aesdec512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_aesni_aesdeclast : GCCBuiltin<"__builtin_ia32_aesdeclast128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesdeclast_256 :
+ GCCBuiltin<"__builtin_ia32_aesdeclast256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+ def int_x86_aesni_aesdeclast_512 :
+ GCCBuiltin<"__builtin_ia32_aesdeclast512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_aesni_aeskeygenassist :
+ GCCBuiltin<"__builtin_ia32_aeskeygenassist128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// PCLMUL instructions
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_pclmulqdq : GCCBuiltin<"__builtin_ia32_pclmulqdq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_pclmulqdq_256 : GCCBuiltin<"__builtin_ia32_pclmulqdq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_pclmulqdq_512 : GCCBuiltin<"__builtin_ia32_pclmulqdq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// Vector pack
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_packusdw : GCCBuiltin<"__builtin_ia32_packusdw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+}
+
+// Vector insert
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_insertps : GCCBuiltin<"__builtin_ia32_insertps128">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// Vector blend
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_pblendvb : GCCBuiltin<"__builtin_ia32_pblendvb128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse41_blendvpd : GCCBuiltin<"__builtin_ia32_blendvpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,llvm_v2f64_ty],
+ [IntrNoMem]>;
+ def int_x86_sse41_blendvps : GCCBuiltin<"__builtin_ia32_blendvps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,llvm_v4f32_ty],
+ [IntrNoMem]>;
+}
+
+// Vector dot product
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_dppd : GCCBuiltin<"__builtin_ia32_dppd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_i8_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_sse41_dpps : GCCBuiltin<"__builtin_ia32_dpps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem, Commutative]>;
+}
+
+// Vector sum of absolute differences
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_mpsadbw : GCCBuiltin<"__builtin_ia32_mpsadbw128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty,llvm_i8_ty],
+ [IntrNoMem, Commutative]>;
+}
+
+// Test instruction with bitwise comparison.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse41_ptestz : GCCBuiltin<"__builtin_ia32_ptestz128">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_sse41_ptestc : GCCBuiltin<"__builtin_ia32_ptestc128">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_sse41_ptestnzc : GCCBuiltin<"__builtin_ia32_ptestnzc128">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE4.2
+
+// Miscellaneous
+// CRC Instruction
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse42_crc32_32_8 : GCCBuiltin<"__builtin_ia32_crc32qi">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_crc32_32_16 : GCCBuiltin<"__builtin_ia32_crc32hi">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_crc32_32_32 : GCCBuiltin<"__builtin_ia32_crc32si">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_crc32_64_64 : GCCBuiltin<"__builtin_ia32_crc32di">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+}
+
+// String/text processing ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse42_pcmpistrm128 : GCCBuiltin<"__builtin_ia32_pcmpistrm128">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistri128 : GCCBuiltin<"__builtin_ia32_pcmpistri128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistria128 : GCCBuiltin<"__builtin_ia32_pcmpistria128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistric128 : GCCBuiltin<"__builtin_ia32_pcmpistric128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistrio128 : GCCBuiltin<"__builtin_ia32_pcmpistrio128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistris128 : GCCBuiltin<"__builtin_ia32_pcmpistris128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpistriz128 : GCCBuiltin<"__builtin_ia32_pcmpistriz128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestrm128 : GCCBuiltin<"__builtin_ia32_pcmpestrm128">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestri128 : GCCBuiltin<"__builtin_ia32_pcmpestri128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestria128 : GCCBuiltin<"__builtin_ia32_pcmpestria128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestric128 : GCCBuiltin<"__builtin_ia32_pcmpestric128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestrio128 : GCCBuiltin<"__builtin_ia32_pcmpestrio128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestris128 : GCCBuiltin<"__builtin_ia32_pcmpestris128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse42_pcmpestriz128 : GCCBuiltin<"__builtin_ia32_pcmpestriz128">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty, llvm_i32_ty,
+ llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SSE4A
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_sse4a_extrqi : GCCBuiltin<"__builtin_ia32_extrqi">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sse4a_extrq : GCCBuiltin<"__builtin_ia32_extrq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+ def int_x86_sse4a_insertqi : GCCBuiltin<"__builtin_ia32_insertqi">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_i8_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_sse4a_insertq : GCCBuiltin<"__builtin_ia32_insertq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// AVX
+
+// Arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_addsub_pd_256 : GCCBuiltin<"__builtin_ia32_addsubpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_addsub_ps_256 : GCCBuiltin<"__builtin_ia32_addsubps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_max_pd_256 : GCCBuiltin<"__builtin_ia32_maxpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_max_ps_256 : GCCBuiltin<"__builtin_ia32_maxps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_min_pd_256 : GCCBuiltin<"__builtin_ia32_minpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_min_ps_256 : GCCBuiltin<"__builtin_ia32_minps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx_rsqrt_ps_256 : GCCBuiltin<"__builtin_ia32_rsqrtps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx_rcp_ps_256 : GCCBuiltin<"__builtin_ia32_rcpps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx_round_pd_256 : GCCBuiltin<"__builtin_ia32_roundpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx_round_ps_256 : GCCBuiltin<"__builtin_ia32_roundps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Horizontal ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_hadd_pd_256 : GCCBuiltin<"__builtin_ia32_haddpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_hsub_ps_256 : GCCBuiltin<"__builtin_ia32_hsubps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_hsub_pd_256 : GCCBuiltin<"__builtin_ia32_hsubpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_hadd_ps_256 : GCCBuiltin<"__builtin_ia32_haddps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+}
+
+// Vector permutation
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_vpermilvar_pd : GCCBuiltin<"__builtin_ia32_vpermilvarpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx_vpermilvar_ps : GCCBuiltin<"__builtin_ia32_vpermilvarps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx_vpermilvar_pd_256 :
+ GCCBuiltin<"__builtin_ia32_vpermilvarpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx_vpermilvar_ps_256 :
+ GCCBuiltin<"__builtin_ia32_vpermilvarps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2vard128">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2vard256">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_v8i32_ty, llvm_v8i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2vard512">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i32_ty, llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_hi_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varhi128">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_hi_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varhi256">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i16_ty, llvm_v16i16_ty, llvm_v16i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_hi_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varhi512">,
+ Intrinsic<[llvm_v32i16_ty],
+ [llvm_v32i16_ty, llvm_v32i16_ty, llvm_v32i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_pd_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varpd128">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2i64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_pd_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varpd256">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4i64_ty, llvm_v4f64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_pd_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8i64_ty, llvm_v8f64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_ps_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varps128">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4i32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_ps_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varps256">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_v8i32_ty, llvm_v8f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_ps_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16i32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varq128">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varq256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_v4i64_ty, llvm_v4i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varq512">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_v8i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_qi_128 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varqi128">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_qi_256 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varqi256">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty, llvm_v32i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermi2var_qi_512 :
+ GCCBuiltin<"__builtin_ia32_vpermi2varqi512">,
+ Intrinsic<[llvm_v64i8_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_v64i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpermilvar_pd_512 :
+ GCCBuiltin<"__builtin_ia32_vpermilvarpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpermilvar_ps_512 :
+ GCCBuiltin<"__builtin_ia32_vpermilvarps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_pshuf_b_512 :
+ GCCBuiltin<"__builtin_ia32_pshufb512">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty],
+ [IntrNoMem]>;
+
+}
+
+// GFNI Instructions
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_vgf2p8affineinvqb_128 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineinvqb_v16qi">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8affineinvqb_256 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineinvqb_v32qi">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8affineinvqb_512 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineinvqb_v64qi">,
+ Intrinsic<[llvm_v64i8_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_vgf2p8affineqb_128 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineqb_v16qi">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8affineqb_256 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineqb_v32qi">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8affineqb_512 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8affineqb_v64qi">,
+ Intrinsic<[llvm_v64i8_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_vgf2p8mulb_128 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8mulb_v16qi">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8mulb_256 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8mulb_v32qi">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty],
+ [IntrNoMem]>;
+ def int_x86_vgf2p8mulb_512 :
+ GCCBuiltin<"__builtin_ia32_vgf2p8mulb_v64qi">,
+ Intrinsic<[llvm_v64i8_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty],
+ [IntrNoMem]>;
+}
+
+// Vector blend
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_blendv_pd_256 : GCCBuiltin<"__builtin_ia32_blendvpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty, llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_blendv_ps_256 : GCCBuiltin<"__builtin_ia32_blendvps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty, llvm_v8f32_ty], [IntrNoMem]>;
+}
+
+// Vector dot product
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_dp_ps_256 : GCCBuiltin<"__builtin_ia32_dpps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem, Commutative]>;
+}
+
+// Vector compare
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_cmp_pd_256 :
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx_cmp_ps_256 :
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem]>;
+}
+
+// Vector convert
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_cvt_pd2_ps_256 : GCCBuiltin<"__builtin_ia32_cvtpd2ps256">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_cvt_ps2dq_256 : GCCBuiltin<"__builtin_ia32_cvtps2dq256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_cvtt_pd2dq_256 : GCCBuiltin<"__builtin_ia32_cvttpd2dq256">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_cvt_pd2dq_256 : GCCBuiltin<"__builtin_ia32_cvtpd2dq256">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_cvtt_ps2dq_256 : GCCBuiltin<"__builtin_ia32_cvttps2dq256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+}
+
+// Vector bit test
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_vtestz_pd : GCCBuiltin<"__builtin_ia32_vtestzpd">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestc_pd : GCCBuiltin<"__builtin_ia32_vtestcpd">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestnzc_pd : GCCBuiltin<"__builtin_ia32_vtestnzcpd">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestz_ps : GCCBuiltin<"__builtin_ia32_vtestzps">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestc_ps : GCCBuiltin<"__builtin_ia32_vtestcps">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestnzc_ps : GCCBuiltin<"__builtin_ia32_vtestnzcps">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestz_pd_256 : GCCBuiltin<"__builtin_ia32_vtestzpd256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestc_pd_256 : GCCBuiltin<"__builtin_ia32_vtestcpd256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestnzc_pd_256 : GCCBuiltin<"__builtin_ia32_vtestnzcpd256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f64_ty,
+ llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestz_ps_256 : GCCBuiltin<"__builtin_ia32_vtestzps256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestc_ps_256 : GCCBuiltin<"__builtin_ia32_vtestcps256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_vtestnzc_ps_256 : GCCBuiltin<"__builtin_ia32_vtestnzcps256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8f32_ty,
+ llvm_v8f32_ty], [IntrNoMem]>;
+ def int_x86_avx_ptestz_256 : GCCBuiltin<"__builtin_ia32_ptestz256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx_ptestc_256 : GCCBuiltin<"__builtin_ia32_ptestc256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx_ptestnzc_256 : GCCBuiltin<"__builtin_ia32_ptestnzc256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_fpclass_pd_128 :
+ Intrinsic<[llvm_v2i1_ty], [llvm_v2f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_fpclass_pd_256 :
+ Intrinsic<[llvm_v4i1_ty], [llvm_v4f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_fpclass_pd_512 :
+ Intrinsic<[llvm_v8i1_ty], [llvm_v8f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_fpclass_ps_128 :
+ Intrinsic<[llvm_v4i1_ty], [llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_fpclass_ps_256 :
+ Intrinsic<[llvm_v8i1_ty], [llvm_v8f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_fpclass_ps_512 :
+ Intrinsic<[llvm_v16i1_ty], [llvm_v16f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fpclass_sd :
+ GCCBuiltin<"__builtin_ia32_fpclasssd_mask">,
+ Intrinsic<[llvm_i8_ty], [llvm_v2f64_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fpclass_ss :
+ GCCBuiltin<"__builtin_ia32_fpclassss_mask">,
+ Intrinsic<[llvm_i8_ty], [llvm_v4f32_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// Vector extract sign mask
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_movmsk_pd_256 : GCCBuiltin<"__builtin_ia32_movmskpd256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_avx_movmsk_ps_256 : GCCBuiltin<"__builtin_ia32_movmskps256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+}
+
+// Vector zero
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_vzeroall : GCCBuiltin<"__builtin_ia32_vzeroall">,
+ Intrinsic<[], [], []>;
+ def int_x86_avx_vzeroupper : GCCBuiltin<"__builtin_ia32_vzeroupper">,
+ Intrinsic<[], [], []>;
+}
+
+// SIMD load ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_ldu_dq_256 : GCCBuiltin<"__builtin_ia32_lddqu256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_ptr_ty], [IntrReadMem]>;
+}
+
+// Conditional load ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_maskload_pd : GCCBuiltin<"__builtin_ia32_maskloadpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_ptr_ty, llvm_v2i64_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx_maskload_ps : GCCBuiltin<"__builtin_ia32_maskloadps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_ptr_ty, llvm_v4i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx_maskload_pd_256 : GCCBuiltin<"__builtin_ia32_maskloadpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_ptr_ty, llvm_v4i64_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx_maskload_ps_256 : GCCBuiltin<"__builtin_ia32_maskloadps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_ptr_ty, llvm_v8i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+}
+
+// Conditional store ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx_maskstore_pd : GCCBuiltin<"__builtin_ia32_maskstorepd">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v2i64_ty, llvm_v2f64_ty], [IntrArgMemOnly]>;
+ def int_x86_avx_maskstore_ps : GCCBuiltin<"__builtin_ia32_maskstoreps">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v4i32_ty, llvm_v4f32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx_maskstore_pd_256 :
+ GCCBuiltin<"__builtin_ia32_maskstorepd256">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v4i64_ty, llvm_v4f64_ty], [IntrArgMemOnly]>;
+ def int_x86_avx_maskstore_ps_256 :
+ GCCBuiltin<"__builtin_ia32_maskstoreps256">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v8i32_ty, llvm_v8f32_ty], [IntrArgMemOnly]>;
+}
+
+// BITALG bits shuffle
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_mask_vpshufbitqmb_128 :
+ GCCBuiltin<"__builtin_ia32_vpshufbitqmb128_mask">,
+ Intrinsic<[llvm_i16_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshufbitqmb_256 :
+ GCCBuiltin<"__builtin_ia32_vpshufbitqmb256_mask">,
+ Intrinsic<[llvm_i32_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshufbitqmb_512 :
+ GCCBuiltin<"__builtin_ia32_vpshufbitqmb512_mask">,
+ Intrinsic<[llvm_i64_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// AVX2
+
+// Integer arithmetic ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_padds_b : GCCBuiltin<"__builtin_ia32_paddsb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_padds_w : GCCBuiltin<"__builtin_ia32_paddsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_paddus_b : GCCBuiltin<"__builtin_ia32_paddusb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_paddus_w : GCCBuiltin<"__builtin_ia32_paddusw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_psubs_b : GCCBuiltin<"__builtin_ia32_psubsb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx2_psubs_w : GCCBuiltin<"__builtin_ia32_psubsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_psubus_b : GCCBuiltin<"__builtin_ia32_psubusb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx2_psubus_w : GCCBuiltin<"__builtin_ia32_psubusw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_pmulhu_w : GCCBuiltin<"__builtin_ia32_pmulhuw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_pmulh_w : GCCBuiltin<"__builtin_ia32_pmulhw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_pmadd_wd : GCCBuiltin<"__builtin_ia32_pmaddwd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx2_psad_bw : GCCBuiltin<"__builtin_ia32_psadbw256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem, Commutative]>;
+}
+
+// Integer shift ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_psll_w : GCCBuiltin<"__builtin_ia32_psllw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_psll_d : GCCBuiltin<"__builtin_ia32_pslld256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psll_q : GCCBuiltin<"__builtin_ia32_psllq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrl_w : GCCBuiltin<"__builtin_ia32_psrlw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrl_d : GCCBuiltin<"__builtin_ia32_psrld256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrl_q : GCCBuiltin<"__builtin_ia32_psrlq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx2_psra_w : GCCBuiltin<"__builtin_ia32_psraw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_psra_d : GCCBuiltin<"__builtin_ia32_psrad256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx2_pslli_w : GCCBuiltin<"__builtin_ia32_psllwi256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_pslli_d : GCCBuiltin<"__builtin_ia32_pslldi256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_pslli_q : GCCBuiltin<"__builtin_ia32_psllqi256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrli_w : GCCBuiltin<"__builtin_ia32_psrlwi256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrli_d : GCCBuiltin<"__builtin_ia32_psrldi256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrli_q : GCCBuiltin<"__builtin_ia32_psrlqi256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrai_w : GCCBuiltin<"__builtin_ia32_psrawi256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_psrai_d : GCCBuiltin<"__builtin_ia32_psradi256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_psra_q_128 : GCCBuiltin<"__builtin_ia32_psraq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_psra_q_256 : GCCBuiltin<"__builtin_ia32_psraq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_psrai_q_128 : GCCBuiltin<"__builtin_ia32_psraqi128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrai_q_256 : GCCBuiltin<"__builtin_ia32_psraqi256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_psll_w_512 : GCCBuiltin<"__builtin_ia32_psllw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_psll_d_512 : GCCBuiltin<"__builtin_ia32_pslld512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psll_q_512 : GCCBuiltin<"__builtin_ia32_psllq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrl_w_512 : GCCBuiltin<"__builtin_ia32_psrlw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrl_d_512 : GCCBuiltin<"__builtin_ia32_psrld512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrl_q_512 : GCCBuiltin<"__builtin_ia32_psrlq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_psra_w_512 : GCCBuiltin<"__builtin_ia32_psraw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_psra_d_512 : GCCBuiltin<"__builtin_ia32_psrad512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psra_q_512 : GCCBuiltin<"__builtin_ia32_psraq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pslli_w_512 : GCCBuiltin<"__builtin_ia32_psllwi512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pslli_d_512 : GCCBuiltin<"__builtin_ia32_pslldi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pslli_q_512 : GCCBuiltin<"__builtin_ia32_psllqi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrli_w_512 : GCCBuiltin<"__builtin_ia32_psrlwi512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrli_d_512 : GCCBuiltin<"__builtin_ia32_psrldi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrli_q_512 : GCCBuiltin<"__builtin_ia32_psrlqi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrai_w_512 : GCCBuiltin<"__builtin_ia32_psrawi512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrai_d_512 : GCCBuiltin<"__builtin_ia32_psradi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrai_q_512 : GCCBuiltin<"__builtin_ia32_psraqi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_pmultishift_qb_128:
+ GCCBuiltin<"__builtin_ia32_vpmultishiftqb128_mask">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_v16i8_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_pmultishift_qb_256:
+ GCCBuiltin<"__builtin_ia32_vpmultishiftqb256_mask">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty, llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_pmultishift_qb_512:
+ GCCBuiltin<"__builtin_ia32_vpmultishiftqb512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty,
+ llvm_v64i8_ty, llvm_v64i8_ty, llvm_i64_ty], [IntrNoMem]>;
+}
+
+// Pack ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_packsswb : GCCBuiltin<"__builtin_ia32_packsswb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_packssdw : GCCBuiltin<"__builtin_ia32_packssdw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_packuswb : GCCBuiltin<"__builtin_ia32_packuswb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_packusdw : GCCBuiltin<"__builtin_ia32_packusdw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+}
+
+// Horizontal arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_phadd_w : GCCBuiltin<"__builtin_ia32_phaddw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_phadd_d : GCCBuiltin<"__builtin_ia32_phaddd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_phadd_sw : GCCBuiltin<"__builtin_ia32_phaddsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_phsub_w : GCCBuiltin<"__builtin_ia32_phsubw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_phsub_d : GCCBuiltin<"__builtin_ia32_phsubd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx2_phsub_sw : GCCBuiltin<"__builtin_ia32_phsubsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_pmadd_ub_sw : GCCBuiltin<"__builtin_ia32_pmaddubsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+}
+
+// Sign ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_psign_b : GCCBuiltin<"__builtin_ia32_psignb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx2_psign_w : GCCBuiltin<"__builtin_ia32_psignw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx2_psign_d : GCCBuiltin<"__builtin_ia32_psignd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+}
+
+// Packed multiply high with round and scale
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_pmul_hr_sw : GCCBuiltin<"__builtin_ia32_pmulhrsw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx512_pmul_hr_sw_512 : GCCBuiltin<"__builtin_ia32_pmulhrsw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v32i16_ty], [IntrNoMem, Commutative]>;
+}
+
+// Vector blend
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_pblendvb : GCCBuiltin<"__builtin_ia32_pblendvb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+}
+
+
+// Vector permutation
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_permd : GCCBuiltin<"__builtin_ia32_permvarsi256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_permps : GCCBuiltin<"__builtin_ia32_permvarsf256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8i32_ty],
+ [IntrNoMem]>;
+}
+
+// Conditional load ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_maskload_d : GCCBuiltin<"__builtin_ia32_maskloadd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty, llvm_v4i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_maskload_q : GCCBuiltin<"__builtin_ia32_maskloadq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_ptr_ty, llvm_v2i64_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_maskload_d_256 : GCCBuiltin<"__builtin_ia32_maskloadd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_ptr_ty, llvm_v8i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_maskload_q_256 : GCCBuiltin<"__builtin_ia32_maskloadq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_ptr_ty, llvm_v4i64_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+}
+
+// Conditional store ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_maskstore_d : GCCBuiltin<"__builtin_ia32_maskstored">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx2_maskstore_q : GCCBuiltin<"__builtin_ia32_maskstoreq">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx2_maskstore_d_256 :
+ GCCBuiltin<"__builtin_ia32_maskstored256">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx2_maskstore_q_256 :
+ GCCBuiltin<"__builtin_ia32_maskstoreq256">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrArgMemOnly]>;
+}
+
+// Variable bit shift ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_psllv_d : GCCBuiltin<"__builtin_ia32_psllv4si">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psllv_d_256 : GCCBuiltin<"__builtin_ia32_psllv8si">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psllv_q : GCCBuiltin<"__builtin_ia32_psllv2di">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psllv_q_256 : GCCBuiltin<"__builtin_ia32_psllv4di">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psllv_d_512 : GCCBuiltin<"__builtin_ia32_psllv16si">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psllv_q_512 : GCCBuiltin<"__builtin_ia32_psllv8di">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx2_psrlv_d : GCCBuiltin<"__builtin_ia32_psrlv4si">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psrlv_d_256 : GCCBuiltin<"__builtin_ia32_psrlv8si">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psrlv_q : GCCBuiltin<"__builtin_ia32_psrlv2di">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psrlv_q_256 : GCCBuiltin<"__builtin_ia32_psrlv4di">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psrlv_d_512 : GCCBuiltin<"__builtin_ia32_psrlv16si">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrlv_q_512 : GCCBuiltin<"__builtin_ia32_psrlv8di">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx2_psrav_d : GCCBuiltin<"__builtin_ia32_psrav4si">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx2_psrav_d_256 : GCCBuiltin<"__builtin_ia32_psrav8si">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psrav_d_512 : GCCBuiltin<"__builtin_ia32_psrav16si">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrav_q_128 : GCCBuiltin<"__builtin_ia32_psravq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrav_q_256 : GCCBuiltin<"__builtin_ia32_psravq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrav_q_512 : GCCBuiltin<"__builtin_ia32_psrav8di">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psllv_w_128 : GCCBuiltin<"__builtin_ia32_psllv8hi">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psllv_w_256 : GCCBuiltin<"__builtin_ia32_psllv16hi">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psllv_w_512 : GCCBuiltin<"__builtin_ia32_psllv32hi">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psrlv_w_128 : GCCBuiltin<"__builtin_ia32_psrlv8hi">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrlv_w_256 : GCCBuiltin<"__builtin_ia32_psrlv16hi">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrlv_w_512 : GCCBuiltin<"__builtin_ia32_psrlv32hi">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_psrav_w_128 : GCCBuiltin<"__builtin_ia32_psrav8hi">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrav_w_256 : GCCBuiltin<"__builtin_ia32_psrav16hi">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psrav_w_512 : GCCBuiltin<"__builtin_ia32_psrav32hi">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_prorv_d_128 : GCCBuiltin<"__builtin_ia32_prorvd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prorv_d_256 : GCCBuiltin<"__builtin_ia32_prorvd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prorv_d_512 : GCCBuiltin<"__builtin_ia32_prorvd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prorv_q_128 : GCCBuiltin<"__builtin_ia32_prorvq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_prorv_q_256 : GCCBuiltin<"__builtin_ia32_prorvq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_prorv_q_512 : GCCBuiltin<"__builtin_ia32_prorvq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_prol_d_128 : GCCBuiltin<"__builtin_ia32_prold128">,
+ Intrinsic<[llvm_v4i32_ty] , [llvm_v4i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prol_d_256 : GCCBuiltin<"__builtin_ia32_prold256">,
+ Intrinsic<[llvm_v8i32_ty] , [llvm_v8i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prol_d_512 : GCCBuiltin<"__builtin_ia32_prold512">,
+ Intrinsic<[llvm_v16i32_ty] , [llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prol_q_128 : GCCBuiltin<"__builtin_ia32_prolq128">,
+ Intrinsic<[llvm_v2i64_ty] , [llvm_v2i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prol_q_256 : GCCBuiltin<"__builtin_ia32_prolq256">,
+ Intrinsic<[llvm_v4i64_ty] , [llvm_v4i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prol_q_512 : GCCBuiltin<"__builtin_ia32_prolq512">,
+ Intrinsic<[llvm_v8i64_ty] , [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+
+ def int_x86_avx512_prolv_d_128 : GCCBuiltin<"__builtin_ia32_prolvd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prolv_d_256 : GCCBuiltin<"__builtin_ia32_prolvd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prolv_d_512 : GCCBuiltin<"__builtin_ia32_prolvd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_prolv_q_128 : GCCBuiltin<"__builtin_ia32_prolvq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_prolv_q_256 : GCCBuiltin<"__builtin_ia32_prolvq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_prolv_q_512 : GCCBuiltin<"__builtin_ia32_prolvq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_d_128 : GCCBuiltin<"__builtin_ia32_prord128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_d_256 : GCCBuiltin<"__builtin_ia32_prord256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_d_512 : GCCBuiltin<"__builtin_ia32_prord512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_q_128 : GCCBuiltin<"__builtin_ia32_prorq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_q_256 : GCCBuiltin<"__builtin_ia32_prorq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pror_q_512 : GCCBuiltin<"__builtin_ia32_prorq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+}
+
+// Gather ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_gather_d_pd : GCCBuiltin<"__builtin_ia32_gatherd_pd">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v2f64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_pd_256 : GCCBuiltin<"__builtin_ia32_gatherd_pd256">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v4f64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_pd : GCCBuiltin<"__builtin_ia32_gatherq_pd">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_v2f64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_pd_256 : GCCBuiltin<"__builtin_ia32_gatherq_pd256">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_v4f64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_ps : GCCBuiltin<"__builtin_ia32_gatherd_ps">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_ps_256 : GCCBuiltin<"__builtin_ia32_gatherd_ps256">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_ptr_ty, llvm_v8i32_ty, llvm_v8f32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_ps : GCCBuiltin<"__builtin_ia32_gatherq_ps">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_ps_256 : GCCBuiltin<"__builtin_ia32_gatherq_ps256">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx2_gather_d_q : GCCBuiltin<"__builtin_ia32_gatherd_q">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_q_256 : GCCBuiltin<"__builtin_ia32_gatherd_q256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_q : GCCBuiltin<"__builtin_ia32_gatherq_q">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_q_256 : GCCBuiltin<"__builtin_ia32_gatherq_q256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_d : GCCBuiltin<"__builtin_ia32_gatherd_d">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_d_d_256 : GCCBuiltin<"__builtin_ia32_gatherd_d256">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_ptr_ty, llvm_v8i32_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_d : GCCBuiltin<"__builtin_ia32_gatherq_d">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx2_gather_q_d_256 : GCCBuiltin<"__builtin_ia32_gatherq_d256">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx2_pmovmskb : GCCBuiltin<"__builtin_ia32_pmovmskb256">,
+ Intrinsic<[llvm_i32_ty], [llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx2_pshuf_b : GCCBuiltin<"__builtin_ia32_pshufb256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx2_mpsadbw : GCCBuiltin<"__builtin_ia32_mpsadbw256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_i8_ty], [IntrNoMem, Commutative]>;
+}
+
+//===----------------------------------------------------------------------===//
+// FMA3 and FMA4
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_vfmadd_pd_512 :
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vfmadd_ps_512 :
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ // TODO: Can we use 2 vfmadds+shufflevector?
+ def int_x86_avx512_vfmaddsub_pd_512 :
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vfmaddsub_ps_512 :
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vfmadd_f64 :
+ Intrinsic<[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_vfmadd_f32 :
+ Intrinsic<[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_vpmadd52h_uq_128 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52huq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpmadd52l_uq_128 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52luq128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpmadd52h_uq_256 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52huq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpmadd52l_uq_256 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52luq256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpmadd52h_uq_512 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52huq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpmadd52l_uq_512 :
+ GCCBuiltin<"__builtin_ia32_vpmadd52luq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+}
+
+// VNNI
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_vpdpbusd_128 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpbusd_256 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpbusd_512 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpdpbusds_128 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusds128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpbusds_256 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusds256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpbusds_512 :
+ GCCBuiltin<"__builtin_ia32_vpdpbusds512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpdpwssd_128 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssd128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpwssd_256 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssd256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpwssd_512 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpdpwssds_128 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssds128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpwssds_256 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssds256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpdpwssds_512 :
+ GCCBuiltin<"__builtin_ia32_vpdpwssds512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// XOP
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_xop_vpermil2pd : GCCBuiltin<"__builtin_ia32_vpermil2pd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_xop_vpermil2pd_256 :
+ GCCBuiltin<"__builtin_ia32_vpermil2pd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_xop_vpermil2ps : GCCBuiltin<"__builtin_ia32_vpermil2ps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpermil2ps_256 :
+ GCCBuiltin<"__builtin_ia32_vpermil2ps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_xop_vfrcz_pd : GCCBuiltin<"__builtin_ia32_vfrczpd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_xop_vfrcz_ps : GCCBuiltin<"__builtin_ia32_vfrczps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_xop_vfrcz_sd : GCCBuiltin<"__builtin_ia32_vfrczsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_xop_vfrcz_ss : GCCBuiltin<"__builtin_ia32_vfrczss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_xop_vfrcz_pd_256 : GCCBuiltin<"__builtin_ia32_vfrczpd256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty], [IntrNoMem]>;
+ def int_x86_xop_vfrcz_ps_256 : GCCBuiltin<"__builtin_ia32_vfrczps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty], [IntrNoMem]>;
+
+ def int_x86_xop_vpcomb : GCCBuiltin<"__builtin_ia32_vpcomb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomw : GCCBuiltin<"__builtin_ia32_vpcomw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomd : GCCBuiltin<"__builtin_ia32_vpcomd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomq : GCCBuiltin<"__builtin_ia32_vpcomq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomub : GCCBuiltin<"__builtin_ia32_vpcomub">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomuw : GCCBuiltin<"__builtin_ia32_vpcomuw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomud : GCCBuiltin<"__builtin_ia32_vpcomud">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vpcomuq : GCCBuiltin<"__builtin_ia32_vpcomuq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_xop_vphaddbd :
+ GCCBuiltin<"__builtin_ia32_vphaddbd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddbq :
+ GCCBuiltin<"__builtin_ia32_vphaddbq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddbw :
+ GCCBuiltin<"__builtin_ia32_vphaddbw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphadddq :
+ GCCBuiltin<"__builtin_ia32_vphadddq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddubd :
+ GCCBuiltin<"__builtin_ia32_vphaddubd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddubq :
+ GCCBuiltin<"__builtin_ia32_vphaddubq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddubw :
+ GCCBuiltin<"__builtin_ia32_vphaddubw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddudq :
+ GCCBuiltin<"__builtin_ia32_vphaddudq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_xop_vphadduwd :
+ GCCBuiltin<"__builtin_ia32_vphadduwd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_xop_vphadduwq :
+ GCCBuiltin<"__builtin_ia32_vphadduwq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddwd :
+ GCCBuiltin<"__builtin_ia32_vphaddwd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_xop_vphaddwq :
+ GCCBuiltin<"__builtin_ia32_vphaddwq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_xop_vphsubbw :
+ GCCBuiltin<"__builtin_ia32_vphsubbw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_xop_vphsubdq :
+ GCCBuiltin<"__builtin_ia32_vphsubdq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_xop_vphsubwd :
+ GCCBuiltin<"__builtin_ia32_vphsubwd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_xop_vpmacsdd :
+ GCCBuiltin<"__builtin_ia32_vpmacsdd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacsdqh :
+ GCCBuiltin<"__builtin_ia32_vpmacsdqh">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v2i64_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacsdql :
+ GCCBuiltin<"__builtin_ia32_vpmacsdql">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v2i64_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacssdd :
+ GCCBuiltin<"__builtin_ia32_vpmacssdd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacssdqh :
+ GCCBuiltin<"__builtin_ia32_vpmacssdqh">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v2i64_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacssdql :
+ GCCBuiltin<"__builtin_ia32_vpmacssdql">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v2i64_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacsswd :
+ GCCBuiltin<"__builtin_ia32_vpmacsswd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacssww :
+ GCCBuiltin<"__builtin_ia32_vpmacssww">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacswd :
+ GCCBuiltin<"__builtin_ia32_vpmacswd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmacsww :
+ GCCBuiltin<"__builtin_ia32_vpmacsww">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmadcsswd :
+ GCCBuiltin<"__builtin_ia32_vpmadcsswd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpmadcswd :
+ GCCBuiltin<"__builtin_ia32_vpmadcswd">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v4i32_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_xop_vpperm :
+ GCCBuiltin<"__builtin_ia32_vpperm">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_xop_vprotb : GCCBuiltin<"__builtin_ia32_vprotb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotd : GCCBuiltin<"__builtin_ia32_vprotd">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotq : GCCBuiltin<"__builtin_ia32_vprotq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotw : GCCBuiltin<"__builtin_ia32_vprotw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotbi : GCCBuiltin<"__builtin_ia32_vprotbi">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotdi : GCCBuiltin<"__builtin_ia32_vprotdi">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotqi : GCCBuiltin<"__builtin_ia32_vprotqi">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vprotwi : GCCBuiltin<"__builtin_ia32_vprotwi">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_xop_vpshab :
+ GCCBuiltin<"__builtin_ia32_vpshab">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshad :
+ GCCBuiltin<"__builtin_ia32_vpshad">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshaq :
+ GCCBuiltin<"__builtin_ia32_vpshaq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshaw :
+ GCCBuiltin<"__builtin_ia32_vpshaw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshlb :
+ GCCBuiltin<"__builtin_ia32_vpshlb">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshld :
+ GCCBuiltin<"__builtin_ia32_vpshld">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshlq :
+ GCCBuiltin<"__builtin_ia32_vpshlq">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+ def int_x86_xop_vpshlw :
+ GCCBuiltin<"__builtin_ia32_vpshlw">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// LWP
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_llwpcb :
+ GCCBuiltin<"__builtin_ia32_llwpcb">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_slwpcb :
+ GCCBuiltin<"__builtin_ia32_slwpcb">,
+ Intrinsic<[llvm_ptr_ty], [], []>;
+ def int_x86_lwpins32 :
+ GCCBuiltin<"__builtin_ia32_lwpins32">,
+ Intrinsic<[llvm_i8_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_lwpins64 :
+ GCCBuiltin<"__builtin_ia32_lwpins64">,
+ Intrinsic<[llvm_i8_ty], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_lwpval32 :
+ GCCBuiltin<"__builtin_ia32_lwpval32">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_lwpval64 :
+ GCCBuiltin<"__builtin_ia32_lwpval64">,
+ Intrinsic<[], [llvm_i64_ty, llvm_i32_ty, llvm_i32_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// MMX
+
+// Empty MMX state op.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_mmx_emms : GCCBuiltin<"__builtin_ia32_emms">,
+ Intrinsic<[], [], []>;
+ def int_x86_mmx_femms : GCCBuiltin<"__builtin_ia32_femms">,
+ Intrinsic<[], [], []>;
+}
+
+// Integer arithmetic ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ // Addition
+ def int_x86_mmx_padd_b : GCCBuiltin<"__builtin_ia32_paddb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_mmx_padd_w : GCCBuiltin<"__builtin_ia32_paddw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_mmx_padd_d : GCCBuiltin<"__builtin_ia32_paddd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_mmx_padd_q : GCCBuiltin<"__builtin_ia32_paddq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+
+ def int_x86_mmx_padds_b : GCCBuiltin<"__builtin_ia32_paddsb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_padds_w : GCCBuiltin<"__builtin_ia32_paddsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ def int_x86_mmx_paddus_b : GCCBuiltin<"__builtin_ia32_paddusb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_paddus_w : GCCBuiltin<"__builtin_ia32_paddusw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ // Subtraction
+ def int_x86_mmx_psub_b : GCCBuiltin<"__builtin_ia32_psubb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_psub_w : GCCBuiltin<"__builtin_ia32_psubw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_psub_d : GCCBuiltin<"__builtin_ia32_psubd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_psub_q : GCCBuiltin<"__builtin_ia32_psubq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+
+ def int_x86_mmx_psubs_b : GCCBuiltin<"__builtin_ia32_psubsb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psubs_w : GCCBuiltin<"__builtin_ia32_psubsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_psubus_b : GCCBuiltin<"__builtin_ia32_psubusb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psubus_w : GCCBuiltin<"__builtin_ia32_psubusw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+
+ // Multiplication
+ def int_x86_mmx_pmulh_w : GCCBuiltin<"__builtin_ia32_pmulhw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmull_w : GCCBuiltin<"__builtin_ia32_pmullw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmulhu_w : GCCBuiltin<"__builtin_ia32_pmulhuw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmulu_dq : GCCBuiltin<"__builtin_ia32_pmuludq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmadd_wd : GCCBuiltin<"__builtin_ia32_pmaddwd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ // Bitwise operations
+ def int_x86_mmx_pand : GCCBuiltin<"__builtin_ia32_pand">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pandn : GCCBuiltin<"__builtin_ia32_pandn">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_por : GCCBuiltin<"__builtin_ia32_por">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pxor : GCCBuiltin<"__builtin_ia32_pxor">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem, Commutative]>;
+
+ // Averages
+ def int_x86_mmx_pavg_b : GCCBuiltin<"__builtin_ia32_pavgb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pavg_w : GCCBuiltin<"__builtin_ia32_pavgw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ // Maximum
+ def int_x86_mmx_pmaxu_b : GCCBuiltin<"__builtin_ia32_pmaxub">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmaxs_w : GCCBuiltin<"__builtin_ia32_pmaxsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ // Minimum
+ def int_x86_mmx_pminu_b : GCCBuiltin<"__builtin_ia32_pminub">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pmins_w : GCCBuiltin<"__builtin_ia32_pminsw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ // Packed sum of absolute differences
+ def int_x86_mmx_psad_bw : GCCBuiltin<"__builtin_ia32_psadbw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+}
+
+// Integer shift ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ // Shift left logical
+ def int_x86_mmx_psll_w : GCCBuiltin<"__builtin_ia32_psllw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psll_d : GCCBuiltin<"__builtin_ia32_pslld">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psll_q : GCCBuiltin<"__builtin_ia32_psllq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_psrl_w : GCCBuiltin<"__builtin_ia32_psrlw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psrl_d : GCCBuiltin<"__builtin_ia32_psrld">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psrl_q : GCCBuiltin<"__builtin_ia32_psrlq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_psra_w : GCCBuiltin<"__builtin_ia32_psraw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_psra_d : GCCBuiltin<"__builtin_ia32_psrad">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_pslli_w : GCCBuiltin<"__builtin_ia32_psllwi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_mmx_pslli_d : GCCBuiltin<"__builtin_ia32_pslldi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_mmx_pslli_q : GCCBuiltin<"__builtin_ia32_psllqi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_psrli_w : GCCBuiltin<"__builtin_ia32_psrlwi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_mmx_psrli_d : GCCBuiltin<"__builtin_ia32_psrldi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_mmx_psrli_q : GCCBuiltin<"__builtin_ia32_psrlqi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_psrai_w : GCCBuiltin<"__builtin_ia32_psrawi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_mmx_psrai_d : GCCBuiltin<"__builtin_ia32_psradi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+}
+// Permute
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_permvar_df_256 : GCCBuiltin<"__builtin_ia32_permvardf256">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_df_512 : GCCBuiltin<"__builtin_ia32_permvardf512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_di_256 : GCCBuiltin<"__builtin_ia32_permvardi256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty,
+ llvm_v4i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_di_512 : GCCBuiltin<"__builtin_ia32_permvardi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_hi_128 : GCCBuiltin<"__builtin_ia32_permvarhi128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty,
+ llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_hi_256 : GCCBuiltin<"__builtin_ia32_permvarhi256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty,
+ llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_hi_512 : GCCBuiltin<"__builtin_ia32_permvarhi512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v32i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_qi_128 : GCCBuiltin<"__builtin_ia32_permvarqi128">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty,
+ llvm_v16i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_qi_256 : GCCBuiltin<"__builtin_ia32_permvarqi256">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty,
+ llvm_v32i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_qi_512 : GCCBuiltin<"__builtin_ia32_permvarqi512">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty,
+ llvm_v64i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_sf_512 : GCCBuiltin<"__builtin_ia32_permvarsf512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_permvar_si_512 : GCCBuiltin<"__builtin_ia32_permvarsi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+}
+// Pack ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_mmx_packsswb : GCCBuiltin<"__builtin_ia32_packsswb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_packssdw : GCCBuiltin<"__builtin_ia32_packssdw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_packuswb : GCCBuiltin<"__builtin_ia32_packuswb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+// Unpacking ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_mmx_punpckhbw : GCCBuiltin<"__builtin_ia32_punpckhbw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_punpckhwd : GCCBuiltin<"__builtin_ia32_punpckhwd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_punpckhdq : GCCBuiltin<"__builtin_ia32_punpckhdq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_punpcklbw : GCCBuiltin<"__builtin_ia32_punpcklbw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_punpcklwd : GCCBuiltin<"__builtin_ia32_punpcklwd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+ def int_x86_mmx_punpckldq : GCCBuiltin<"__builtin_ia32_punpckldq">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty, llvm_x86mmx_ty],
+ [IntrNoMem]>;
+}
+
+// Integer comparison ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_mmx_pcmpeq_b : GCCBuiltin<"__builtin_ia32_pcmpeqb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pcmpeq_w : GCCBuiltin<"__builtin_ia32_pcmpeqw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+ def int_x86_mmx_pcmpeq_d : GCCBuiltin<"__builtin_ia32_pcmpeqd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem, Commutative]>;
+
+ def int_x86_mmx_pcmpgt_b : GCCBuiltin<"__builtin_ia32_pcmpgtb">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_pcmpgt_w : GCCBuiltin<"__builtin_ia32_pcmpgtw">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+ def int_x86_mmx_pcmpgt_d : GCCBuiltin<"__builtin_ia32_pcmpgtd">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty], [IntrNoMem]>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_mmx_maskmovq : GCCBuiltin<"__builtin_ia32_maskmovq">,
+ Intrinsic<[], [llvm_x86mmx_ty, llvm_x86mmx_ty, llvm_ptr_ty], []>;
+
+ def int_x86_mmx_pmovmskb : GCCBuiltin<"__builtin_ia32_pmovmskb">,
+ Intrinsic<[llvm_i32_ty], [llvm_x86mmx_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_movnt_dq : GCCBuiltin<"__builtin_ia32_movntq">,
+ Intrinsic<[], [llvm_ptrx86mmx_ty, llvm_x86mmx_ty], []>;
+
+ def int_x86_mmx_palignr_b : GCCBuiltin<"__builtin_ia32_palignr">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_x86mmx_ty, llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_mmx_pextr_w : GCCBuiltin<"__builtin_ia32_vec_ext_v4hi">,
+ Intrinsic<[llvm_i32_ty], [llvm_x86mmx_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_mmx_pinsr_w : GCCBuiltin<"__builtin_ia32_vec_set_v4hi">,
+ Intrinsic<[llvm_x86mmx_ty], [llvm_x86mmx_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// BMI
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_bmi_bextr_32 : GCCBuiltin<"__builtin_ia32_bextr_u32">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_bmi_bextr_64 : GCCBuiltin<"__builtin_ia32_bextr_u64">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_bmi_bzhi_32 : GCCBuiltin<"__builtin_ia32_bzhi_si">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_bmi_bzhi_64 : GCCBuiltin<"__builtin_ia32_bzhi_di">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_bmi_pdep_32 : GCCBuiltin<"__builtin_ia32_pdep_si">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_bmi_pdep_64 : GCCBuiltin<"__builtin_ia32_pdep_di">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_bmi_pext_32 : GCCBuiltin<"__builtin_ia32_pext_si">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_bmi_pext_64 : GCCBuiltin<"__builtin_ia32_pext_di">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// FS/GS Base
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_rdfsbase_32 : GCCBuiltin<"__builtin_ia32_rdfsbase32">,
+ Intrinsic<[llvm_i32_ty], []>;
+ def int_x86_rdgsbase_32 : GCCBuiltin<"__builtin_ia32_rdgsbase32">,
+ Intrinsic<[llvm_i32_ty], []>;
+ def int_x86_rdfsbase_64 : GCCBuiltin<"__builtin_ia32_rdfsbase64">,
+ Intrinsic<[llvm_i64_ty], []>;
+ def int_x86_rdgsbase_64 : GCCBuiltin<"__builtin_ia32_rdgsbase64">,
+ Intrinsic<[llvm_i64_ty], []>;
+ def int_x86_wrfsbase_32 : GCCBuiltin<"__builtin_ia32_wrfsbase32">,
+ Intrinsic<[], [llvm_i32_ty]>;
+ def int_x86_wrgsbase_32 : GCCBuiltin<"__builtin_ia32_wrgsbase32">,
+ Intrinsic<[], [llvm_i32_ty]>;
+ def int_x86_wrfsbase_64 : GCCBuiltin<"__builtin_ia32_wrfsbase64">,
+ Intrinsic<[], [llvm_i64_ty]>;
+ def int_x86_wrgsbase_64 : GCCBuiltin<"__builtin_ia32_wrgsbase64">,
+ Intrinsic<[], [llvm_i64_ty]>;
+}
+
+//===----------------------------------------------------------------------===//
+// FXSR
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_fxrstor : GCCBuiltin<"__builtin_ia32_fxrstor">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_fxrstor64 : GCCBuiltin<"__builtin_ia32_fxrstor64">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_fxsave : GCCBuiltin<"__builtin_ia32_fxsave">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_fxsave64 : GCCBuiltin<"__builtin_ia32_fxsave64">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// XSAVE
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_xsave :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsave64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xrstor :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xrstor64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsaveopt :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsaveopt64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xrstors :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xrstors64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsavec :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsavec64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsaves :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xsaves64 :
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_xgetbv :
+ Intrinsic<[llvm_i64_ty], [llvm_i32_ty], []>;
+ def int_x86_xsetbv :
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// CLFLUSHOPT and CLWB
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_clflushopt : GCCBuiltin<"__builtin_ia32_clflushopt">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+
+ def int_x86_clwb : GCCBuiltin<"__builtin_ia32_clwb">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Support protection key
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_rdpkru : GCCBuiltin <"__builtin_ia32_rdpkru">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+ def int_x86_wrpkru : GCCBuiltin<"__builtin_ia32_wrpkru">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+}
+//===----------------------------------------------------------------------===//
+// Half float conversion
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_vcvtph2ps_128 : GCCBuiltin<"__builtin_ia32_vcvtph2ps">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_vcvtph2ps_256 : GCCBuiltin<"__builtin_ia32_vcvtph2ps256">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+ def int_x86_vcvtps2ph_128 : GCCBuiltin<"__builtin_ia32_vcvtps2ph">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_vcvtps2ph_256 : GCCBuiltin<"__builtin_ia32_vcvtps2ph256">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtph2ps_512 : GCCBuiltin<"__builtin_ia32_vcvtph2ps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16i16_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtph2ps_256 : GCCBuiltin<"__builtin_ia32_vcvtph2ps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8i16_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtph2ps_128 : GCCBuiltin<"__builtin_ia32_vcvtph2ps_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtps2ph_512 : GCCBuiltin<"__builtin_ia32_vcvtps2ph512_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f32_ty, llvm_i32_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtps2ph_256 : GCCBuiltin<"__builtin_ia32_vcvtps2ph256_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f32_ty, llvm_i32_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vcvtps2ph_128 : GCCBuiltin<"__builtin_ia32_vcvtps2ph_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4f32_ty, llvm_i32_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// TBM
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_tbm_bextri_u32 : GCCBuiltin<"__builtin_ia32_bextri_u32">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_tbm_bextri_u64 : GCCBuiltin<"__builtin_ia32_bextri_u64">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// RDRAND intrinsics - Return a random value and whether it is valid.
+// RDSEED intrinsics - Return a NIST SP800-90B & C compliant random value and
+// whether it is valid.
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ // These are declared side-effecting so they don't get eliminated by CSE or
+ // LICM.
+ def int_x86_rdrand_16 : Intrinsic<[llvm_i16_ty, llvm_i32_ty], [], []>;
+ def int_x86_rdrand_32 : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [], []>;
+ def int_x86_rdrand_64 : Intrinsic<[llvm_i64_ty, llvm_i32_ty], [], []>;
+ def int_x86_rdseed_16 : Intrinsic<[llvm_i16_ty, llvm_i32_ty], [], []>;
+ def int_x86_rdseed_32 : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [], []>;
+ def int_x86_rdseed_64 : Intrinsic<[llvm_i64_ty, llvm_i32_ty], [], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// ADX
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_addcarryx_u32: GCCBuiltin<"__builtin_ia32_addcarryx_u32">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_x86_addcarryx_u64: GCCBuiltin<"__builtin_ia32_addcarryx_u64">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i64_ty, llvm_i64_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_x86_addcarry_u32: GCCBuiltin<"__builtin_ia32_addcarry_u32">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_x86_addcarry_u64: GCCBuiltin<"__builtin_ia32_addcarry_u64">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i64_ty, llvm_i64_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_x86_subborrow_u32: GCCBuiltin<"__builtin_ia32_subborrow_u32">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_x86_subborrow_u64: GCCBuiltin<"__builtin_ia32_subborrow_u64">,
+ Intrinsic<[llvm_i8_ty], [llvm_i8_ty, llvm_i64_ty, llvm_i64_ty,
+ llvm_ptr_ty], [IntrArgMemOnly]>;
+}
+
+//===----------------------------------------------------------------------===//
+// RTM intrinsics. Transactional Memory support.
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_xbegin : GCCBuiltin<"__builtin_ia32_xbegin">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+ def int_x86_xend : GCCBuiltin<"__builtin_ia32_xend">,
+ Intrinsic<[], [], []>;
+ def int_x86_xabort : GCCBuiltin<"__builtin_ia32_xabort">,
+ Intrinsic<[], [llvm_i8_ty], []>;
+ def int_x86_xtest : GCCBuiltin<"__builtin_ia32_xtest">,
+ Intrinsic<[llvm_i32_ty], [], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// AVX512
+
+// Conversion ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_cvttss2si : GCCBuiltin<"__builtin_ia32_vcvttss2si32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttss2si64 : GCCBuiltin<"__builtin_ia32_vcvttss2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttss2usi : GCCBuiltin<"__builtin_ia32_vcvttss2usi32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttss2usi64 : GCCBuiltin<"__builtin_ia32_vcvttss2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi2ss : GCCBuiltin<"__builtin_ia32_cvtusi2ss32">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi642ss : GCCBuiltin<"__builtin_ia32_cvtusi2ss64">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2si : GCCBuiltin<"__builtin_ia32_vcvttsd2si32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2si64 : GCCBuiltin<"__builtin_ia32_vcvttsd2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2usi : GCCBuiltin<"__builtin_ia32_vcvttsd2usi32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2usi64 : GCCBuiltin<"__builtin_ia32_vcvttsd2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi642sd : GCCBuiltin<"__builtin_ia32_cvtusi2sd64">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtss2usi32 : GCCBuiltin<"__builtin_ia32_vcvtss2usi32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtss2usi64 : GCCBuiltin<"__builtin_ia32_vcvtss2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtss2si32 : GCCBuiltin<"__builtin_ia32_vcvtss2si32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtss2si64 : GCCBuiltin<"__builtin_ia32_vcvtss2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtsd2usi32 : GCCBuiltin<"__builtin_ia32_vcvtsd2usi32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtsd2usi64 : GCCBuiltin<"__builtin_ia32_vcvtsd2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtsd2si32 : GCCBuiltin<"__builtin_ia32_vcvtsd2si32">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtsd2si64 : GCCBuiltin<"__builtin_ia32_vcvtsd2si64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtsi2ss32 : GCCBuiltin<"__builtin_ia32_cvtsi2ss32">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtsi2ss64 : GCCBuiltin<"__builtin_ia32_cvtsi2ss64">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtsi2sd64 : GCCBuiltin<"__builtin_ia32_cvtsi2sd64">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_i64_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Pack ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_packsswb_512 : GCCBuiltin<"__builtin_ia32_packsswb512">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v32i16_ty,llvm_v32i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_packssdw_512 : GCCBuiltin<"__builtin_ia32_packssdw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_packuswb_512 : GCCBuiltin<"__builtin_ia32_packuswb512">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v32i16_ty,llvm_v32i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_packusdw_512 : GCCBuiltin<"__builtin_ia32_packusdw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+}
+
+// Vector convert
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_mask_cvtdq2ps_512 :
+ GCCBuiltin<"__builtin_ia32_cvtdq2ps512_mask">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16i32_ty, llvm_v16f32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2dq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2dq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2dq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2dq512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f64_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2ps_512 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2ps512_mask">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f64_ty, llvm_v8f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtsd2ss_round :
+ GCCBuiltin<"__builtin_ia32_cvtsd2ss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v2f64_ty, llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtss2sd_round :
+ GCCBuiltin<"__builtin_ia32_cvtss2sd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v4f32_ty, llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2ps :
+ GCCBuiltin<"__builtin_ia32_cvtpd2ps_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v2f64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2qq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2qq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2qq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2qq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2qq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2qq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f64_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2udq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2udq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2udq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2udq256_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2udq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2udq512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f64_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2uqq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2uqq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2uqq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2uqq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtpd2uqq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtpd2uqq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f64_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2dq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtps2dq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2dq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtps2dq256_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2dq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtps2dq512_mask">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2pd_512 :
+ GCCBuiltin<"__builtin_ia32_cvtps2pd512_mask">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f32_ty, llvm_v8f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2qq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtps2qq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4f32_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2qq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtps2qq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f32_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2qq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtps2qq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f32_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2udq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtps2udq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2udq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtps2udq256_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2udq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtps2udq512_mask">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2uqq_128 :
+ GCCBuiltin<"__builtin_ia32_cvtps2uqq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4f32_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2uqq_256 :
+ GCCBuiltin<"__builtin_ia32_cvtps2uqq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f32_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtps2uqq_512 :
+ GCCBuiltin<"__builtin_ia32_cvtps2uqq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f32_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtqq2pd_512 :
+ GCCBuiltin<"__builtin_ia32_cvtqq2pd512_mask">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8i64_ty, llvm_v8f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtqq2ps_128 :
+ GCCBuiltin<"__builtin_ia32_cvtqq2ps128_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v2i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtqq2ps_256 :
+ GCCBuiltin<"__builtin_ia32_cvtqq2ps256_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtqq2ps_512 :
+ GCCBuiltin<"__builtin_ia32_cvtqq2ps512_mask">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8i64_ty, llvm_v8f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2dq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2dq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2dq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2dq512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f64_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2qq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2qq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2qq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2qq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2qq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2qq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f64_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2udq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2udq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2udq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2udq256_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4f64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2udq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2udq512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f64_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2uqq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2uqq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2f64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2uqq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2uqq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttpd2uqq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttpd2uqq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f64_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2dq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttps2dq512_mask">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2qq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttps2qq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4f32_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2qq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttps2qq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f32_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2qq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttps2qq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f32_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2udq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttps2udq128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2udq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttps2udq256_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2udq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttps2udq512_mask">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2uqq_128 :
+ GCCBuiltin<"__builtin_ia32_cvttps2uqq128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v4f32_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2uqq_256 :
+ GCCBuiltin<"__builtin_ia32_cvttps2uqq256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4f32_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvttps2uqq_512 :
+ GCCBuiltin<"__builtin_ia32_cvttps2uqq512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8f32_ty, llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtudq2ps_512 :
+ GCCBuiltin<"__builtin_ia32_cvtudq2ps512_mask">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16i32_ty, llvm_v16f32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtuqq2pd_512 :
+ GCCBuiltin<"__builtin_ia32_cvtuqq2pd512_mask">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8i64_ty, llvm_v8f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtuqq2ps_128 :
+ GCCBuiltin<"__builtin_ia32_cvtuqq2ps128_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v2i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtuqq2ps_256 :
+ GCCBuiltin<"__builtin_ia32_cvtuqq2ps256_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4i64_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cvtuqq2ps_512 :
+ GCCBuiltin<"__builtin_ia32_cvtuqq2ps512_mask">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8i64_ty, llvm_v8f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_rndscale_pd_128 : GCCBuiltin<"__builtin_ia32_rndscalepd_128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_i32_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_pd_256 : GCCBuiltin<"__builtin_ia32_rndscalepd_256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_i32_ty,
+ llvm_v4f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_pd_512 : GCCBuiltin<"__builtin_ia32_rndscalepd_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_i32_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_ps_128 : GCCBuiltin<"__builtin_ia32_rndscaleps_128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_i32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_ps_256 : GCCBuiltin<"__builtin_ia32_rndscaleps_256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_i32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_ps_512 : GCCBuiltin<"__builtin_ia32_rndscaleps_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_i32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_pd_128 : GCCBuiltin<"__builtin_ia32_reducepd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_i32_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_pd_256 : GCCBuiltin<"__builtin_ia32_reducepd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_i32_ty,
+ llvm_v4f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_pd_512 : GCCBuiltin<"__builtin_ia32_reducepd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_i32_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_ps_128 : GCCBuiltin<"__builtin_ia32_reduceps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_i32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_ps_256 : GCCBuiltin<"__builtin_ia32_reduceps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_i32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_ps_512 : GCCBuiltin<"__builtin_ia32_reduceps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_i32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_pd_128 : GCCBuiltin<"__builtin_ia32_rangepd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_i32_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_pd_256 : GCCBuiltin<"__builtin_ia32_rangepd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty, llvm_i32_ty,
+ llvm_v4f64_ty, llvm_i8_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_pd_512 : GCCBuiltin<"__builtin_ia32_rangepd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty, llvm_i32_ty,
+ llvm_v8f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_ps_128 : GCCBuiltin<"__builtin_ia32_rangeps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_ps_256 : GCCBuiltin<"__builtin_ia32_rangeps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty, llvm_i32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem]>;
+def int_x86_avx512_mask_range_ps_512 : GCCBuiltin<"__builtin_ia32_rangeps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty, llvm_i32_ty,
+ llvm_v16f32_ty, llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Vector load with broadcast
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_broadcastmw_512 :
+ GCCBuiltin<"__builtin_ia32_broadcastmw512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_broadcastmw_256 :
+ GCCBuiltin<"__builtin_ia32_broadcastmw256">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_broadcastmw_128 :
+ GCCBuiltin<"__builtin_ia32_broadcastmw128">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_broadcastmb_512 :
+ GCCBuiltin<"__builtin_ia32_broadcastmb512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_broadcastmb_256 :
+ GCCBuiltin<"__builtin_ia32_broadcastmb256">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_broadcastmb_128 :
+ GCCBuiltin<"__builtin_ia32_broadcastmb128">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_i8_ty], [IntrNoMem]>;
+}
+
+// Arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+
+ def int_x86_avx512_add_ps_512 : GCCBuiltin<"__builtin_ia32_addps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_add_pd_512 : GCCBuiltin<"__builtin_ia32_addpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_sub_ps_512 : GCCBuiltin<"__builtin_ia32_subps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_sub_pd_512 : GCCBuiltin<"__builtin_ia32_subpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mul_ps_512 : GCCBuiltin<"__builtin_ia32_mulps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mul_pd_512 : GCCBuiltin<"__builtin_ia32_mulpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_div_ps_512 : GCCBuiltin<"__builtin_ia32_divps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_div_pd_512 : GCCBuiltin<"__builtin_ia32_divpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_max_ps_512 : GCCBuiltin<"__builtin_ia32_maxps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_max_pd_512 : GCCBuiltin<"__builtin_ia32_maxpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_min_ps_512 : GCCBuiltin<"__builtin_ia32_minps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_min_pd_512 : GCCBuiltin<"__builtin_ia32_minpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_add_ss_round : GCCBuiltin<"__builtin_ia32_addss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_div_ss_round : GCCBuiltin<"__builtin_ia32_divss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_mul_ss_round : GCCBuiltin<"__builtin_ia32_mulss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_sub_ss_round : GCCBuiltin<"__builtin_ia32_subss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_max_ss_round : GCCBuiltin<"__builtin_ia32_maxss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_min_ss_round : GCCBuiltin<"__builtin_ia32_minss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_add_sd_round : GCCBuiltin<"__builtin_ia32_addsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_div_sd_round : GCCBuiltin<"__builtin_ia32_divsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_mul_sd_round : GCCBuiltin<"__builtin_ia32_mulsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_sub_sd_round : GCCBuiltin<"__builtin_ia32_subsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_max_sd_round : GCCBuiltin<"__builtin_ia32_maxsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_min_sd_round : GCCBuiltin<"__builtin_ia32_minsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_rndscale_ss : GCCBuiltin<"__builtin_ia32_rndscaless_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_rndscale_sd : GCCBuiltin<"__builtin_ia32_rndscalesd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_range_ss : GCCBuiltin<"__builtin_ia32_rangess128_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_range_sd : GCCBuiltin<"__builtin_ia32_rangesd128_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_ss : GCCBuiltin<"__builtin_ia32_reducess_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_reduce_sd : GCCBuiltin<"__builtin_ia32_reducesd_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_sd : GCCBuiltin<"__builtin_ia32_scalefsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_ss : GCCBuiltin<"__builtin_ia32_scalefss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_pd_128 : GCCBuiltin<"__builtin_ia32_scalefpd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_pd_256 : GCCBuiltin<"__builtin_ia32_scalefpd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_v4f64_ty, llvm_i8_ty],[IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_pd_512 : GCCBuiltin<"__builtin_ia32_scalefpd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_v8f64_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_ps_128 : GCCBuiltin<"__builtin_ia32_scalefps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_ps_256 : GCCBuiltin<"__builtin_ia32_scalefps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_v8f32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_scalef_ps_512 : GCCBuiltin<"__builtin_ia32_scalefps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_v16f32_ty, llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_sqrt_ss :
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_sqrt_sd :
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_sqrt_pd_512 :
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_sqrt_ps_512 :
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_pd_128 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd128_mask">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2i64_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_pd_128 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd128_maskz">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2i64_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_pd_256 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd256_mask">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4i64_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_pd_256 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd256_maskz">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4i64_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_pd_512 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd512_mask">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8i64_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_pd_512 :
+ GCCBuiltin<"__builtin_ia32_fixupimmpd512_maskz">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8i64_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_ps_128 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps128_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4i32_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_ps_128 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps128_maskz">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4i32_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_ps_256 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps256_mask">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_v8f32_ty, llvm_v8i32_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_ps_256 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps256_maskz">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_v8f32_ty, llvm_v8i32_ty, llvm_i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_ps_512 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps512_mask">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_ps_512 :
+ GCCBuiltin<"__builtin_ia32_fixupimmps512_maskz">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16i32_ty, llvm_i32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_sd :
+ GCCBuiltin<"__builtin_ia32_fixupimmsd_mask">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2i64_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_sd :
+ GCCBuiltin<"__builtin_ia32_fixupimmsd_maskz">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2i64_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_fixupimm_ss :
+ GCCBuiltin<"__builtin_ia32_fixupimmss_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4i32_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_fixupimm_ss :
+ GCCBuiltin<"__builtin_ia32_fixupimmss_maskz">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4i32_ty, llvm_i32_ty, llvm_i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_pd_128 : GCCBuiltin<"__builtin_ia32_getexppd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_pd_256 : GCCBuiltin<"__builtin_ia32_getexppd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_pd_512 : GCCBuiltin<"__builtin_ia32_getexppd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_ps_128 : GCCBuiltin<"__builtin_ia32_getexpps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_ps_256 : GCCBuiltin<"__builtin_ia32_getexpps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_ps_512 : GCCBuiltin<"__builtin_ia32_getexpps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getexp_ss : GCCBuiltin<"__builtin_ia32_getexpss128_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_getexp_sd : GCCBuiltin<"__builtin_ia32_getexpsd128_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_pd_128 :
+ GCCBuiltin<"__builtin_ia32_getmantpd128_mask">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty,llvm_i32_ty, llvm_v2f64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_pd_256 :
+ GCCBuiltin<"__builtin_ia32_getmantpd256_mask">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty,llvm_i32_ty, llvm_v4f64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_pd_512 :
+ GCCBuiltin<"__builtin_ia32_getmantpd512_mask">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty,llvm_i32_ty, llvm_v8f64_ty, llvm_i8_ty,llvm_i32_ty ],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_ps_128 :
+ GCCBuiltin<"__builtin_ia32_getmantps128_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_i32_ty, llvm_v4f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_ps_256 :
+ GCCBuiltin<"__builtin_ia32_getmantps256_mask">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_i32_ty, llvm_v8f32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_ps_512 :
+ GCCBuiltin<"__builtin_ia32_getmantps512_mask">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty,llvm_i32_ty, llvm_v16f32_ty,llvm_i16_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_ss :
+ GCCBuiltin<"__builtin_ia32_getmantss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i32_ty, llvm_v4f32_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_getmant_sd :
+ GCCBuiltin<"__builtin_ia32_getmantsd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_v2f64_ty, llvm_i32_ty, llvm_v2f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rsqrt14_ss : GCCBuiltin<"__builtin_ia32_rsqrt14ss_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_sd : GCCBuiltin<"__builtin_ia32_rsqrt14sd_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rsqrt14_pd_128 : GCCBuiltin<"__builtin_ia32_rsqrt14pd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_pd_256 : GCCBuiltin<"__builtin_ia32_rsqrt14pd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_pd_512 : GCCBuiltin<"__builtin_ia32_rsqrt14pd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_ps_128 : GCCBuiltin<"__builtin_ia32_rsqrt14ps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_ps_256 : GCCBuiltin<"__builtin_ia32_rsqrt14ps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_ps_512 : GCCBuiltin<"__builtin_ia32_rsqrt14ps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_ss : GCCBuiltin<"__builtin_ia32_rcp14ss_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_sd : GCCBuiltin<"__builtin_ia32_rcp14sd_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rcp14_pd_128 : GCCBuiltin<"__builtin_ia32_rcp14pd128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_pd_256 : GCCBuiltin<"__builtin_ia32_rcp14pd256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_pd_512 : GCCBuiltin<"__builtin_ia32_rcp14pd512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_ps_128 : GCCBuiltin<"__builtin_ia32_rcp14ps128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_ps_256 : GCCBuiltin<"__builtin_ia32_rcp14ps256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp14_ps_512 : GCCBuiltin<"__builtin_ia32_rcp14ps512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rcp28_ps : GCCBuiltin<"__builtin_ia32_rcp28ps_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_rcp28_pd : GCCBuiltin<"__builtin_ia32_rcp28pd_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_exp2_ps : GCCBuiltin<"__builtin_ia32_exp2ps_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_exp2_pd : GCCBuiltin<"__builtin_ia32_exp2pd_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rcp28_ss : GCCBuiltin<"__builtin_ia32_rcp28ss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp28_sd : GCCBuiltin<"__builtin_ia32_rcp28sd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_ps : GCCBuiltin<"__builtin_ia32_rsqrt28ps_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_pd : GCCBuiltin<"__builtin_ia32_rsqrt28pd_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_ss : GCCBuiltin<"__builtin_ia32_rsqrt28ss_round_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_sd : GCCBuiltin<"__builtin_ia32_rsqrt28sd_round_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_psad_bw_512 : GCCBuiltin<"__builtin_ia32_psadbw512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v64i8_ty, llvm_v64i8_ty],
+ [IntrNoMem, Commutative]>;
+}
+// Integer arithmetic ops
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_mask_padds_b_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_padds_b_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_padds_b_512 : GCCBuiltin<"__builtin_ia32_paddsb512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_v64i8_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_padds_w_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_padds_w_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_padds_w_512 : GCCBuiltin<"__builtin_ia32_paddsw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_b_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_b_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_b_512 : GCCBuiltin<"__builtin_ia32_paddusb512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_v64i8_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_w_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_w_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_paddus_w_512 : GCCBuiltin<"__builtin_ia32_paddusw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_b_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_b_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_b_512 : GCCBuiltin<"__builtin_ia32_psubsb512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_v64i8_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_w_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_w_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubs_w_512 : GCCBuiltin<"__builtin_ia32_psubsw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_b_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_v16i8_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_b_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_b_512 : GCCBuiltin<"__builtin_ia32_psubusb512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_v64i8_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_w_128 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_w_256 : // FIXME: remove this intrinsic
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_psubus_w_512 : GCCBuiltin<"__builtin_ia32_psubusw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmulhu_w_512 : GCCBuiltin<"__builtin_ia32_pmulhuw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v32i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx512_pmulh_w_512 : GCCBuiltin<"__builtin_ia32_pmulhw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty,
+ llvm_v32i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx512_pmaddw_d_512 : GCCBuiltin<"__builtin_ia32_pmaddwd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v32i16_ty,
+ llvm_v32i16_ty], [IntrNoMem, Commutative]>;
+ def int_x86_avx512_pmaddubs_w_512 : GCCBuiltin<"__builtin_ia32_pmaddubsw512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v64i8_ty,
+ llvm_v64i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_dbpsadbw_128 :
+ GCCBuiltin<"__builtin_ia32_dbpsadbw128">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_dbpsadbw_256 :
+ GCCBuiltin<"__builtin_ia32_dbpsadbw256">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v32i8_ty, llvm_v32i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_dbpsadbw_512 :
+ GCCBuiltin<"__builtin_ia32_dbpsadbw512">,
+ Intrinsic<[llvm_v32i16_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Gather and Scatter ops
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_gather_dpd_512 : GCCBuiltin<"__builtin_ia32_gathersiv8df">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_ptr_ty,
+ llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_dps_512 : GCCBuiltin<"__builtin_ia32_gathersiv16sf">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_ptr_ty,
+ llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_qpd_512 : GCCBuiltin<"__builtin_ia32_gatherdiv8df">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_ptr_ty,
+ llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_qps_512 : GCCBuiltin<"__builtin_ia32_gatherdiv16sf">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_ptr_ty,
+ llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+
+ def int_x86_avx512_gather_dpq_512 : GCCBuiltin<"__builtin_ia32_gathersiv8di">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_dpi_512 : GCCBuiltin<"__builtin_ia32_gathersiv16si">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_ptr_ty,
+ llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_qpq_512 : GCCBuiltin<"__builtin_ia32_gatherdiv8di">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+ def int_x86_avx512_gather_qpi_512 : GCCBuiltin<"__builtin_ia32_gatherdiv16si">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_ptr_ty,
+ llvm_v8i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div2_df :
+ GCCBuiltin<"__builtin_ia32_gather3div2df">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div2_di :
+ GCCBuiltin<"__builtin_ia32_gather3div2di">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div4_df :
+ GCCBuiltin<"__builtin_ia32_gather3div4df">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div4_di :
+ GCCBuiltin<"__builtin_ia32_gather3div4di">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div4_sf :
+ GCCBuiltin<"__builtin_ia32_gather3div4sf">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div4_si :
+ GCCBuiltin<"__builtin_ia32_gather3div4si">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div8_sf :
+ GCCBuiltin<"__builtin_ia32_gather3div8sf">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3div8_si :
+ GCCBuiltin<"__builtin_ia32_gather3div8si">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv2_df :
+ GCCBuiltin<"__builtin_ia32_gather3siv2df">,
+ Intrinsic<[llvm_v2f64_ty],
+ [llvm_v2f64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv2_di :
+ GCCBuiltin<"__builtin_ia32_gather3siv2di">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv4_df :
+ GCCBuiltin<"__builtin_ia32_gather3siv4df">,
+ Intrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv4_di :
+ GCCBuiltin<"__builtin_ia32_gather3siv4di">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv4_sf :
+ GCCBuiltin<"__builtin_ia32_gather3siv4sf">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v4f32_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv4_si :
+ GCCBuiltin<"__builtin_ia32_gather3siv4si">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv8_sf :
+ GCCBuiltin<"__builtin_ia32_gather3siv8sf">,
+ Intrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+ def int_x86_avx512_gather3siv8_si :
+ GCCBuiltin<"__builtin_ia32_gather3siv8si">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+
+// scatter
+ def int_x86_avx512_scatter_dpd_512 : GCCBuiltin<"__builtin_ia32_scattersiv8df">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i32_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_dps_512 : GCCBuiltin<"__builtin_ia32_scattersiv16sf">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_v16f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_qpd_512 : GCCBuiltin<"__builtin_ia32_scatterdiv8df">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_qps_512 : GCCBuiltin<"__builtin_ia32_scatterdiv16sf">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+
+ def int_x86_avx512_scatter_dpq_512 : GCCBuiltin<"__builtin_ia32_scattersiv8di">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i32_ty, llvm_v8i64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_dpi_512 : GCCBuiltin<"__builtin_ia32_scattersiv16si">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_v16i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_qpq_512 : GCCBuiltin<"__builtin_ia32_scatterdiv8di">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_scatter_qpi_512 : GCCBuiltin<"__builtin_ia32_scatterdiv16si">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty, llvm_v8i64_ty, llvm_v8i32_ty,
+ llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv2_df :
+ GCCBuiltin<"__builtin_ia32_scatterdiv2df">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v2i64_ty, llvm_v2f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv2_di :
+ GCCBuiltin<"__builtin_ia32_scatterdiv2di">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv4_df :
+ GCCBuiltin<"__builtin_ia32_scatterdiv4df">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i64_ty, llvm_v4f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv4_di :
+ GCCBuiltin<"__builtin_ia32_scatterdiv4di">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i64_ty, llvm_v4i64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv4_sf :
+ GCCBuiltin<"__builtin_ia32_scatterdiv4sf">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v2i64_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv4_si :
+ GCCBuiltin<"__builtin_ia32_scatterdiv4si">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v2i64_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv8_sf :
+ GCCBuiltin<"__builtin_ia32_scatterdiv8sf">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i64_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scatterdiv8_si :
+ GCCBuiltin<"__builtin_ia32_scatterdiv8si">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i64_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv2_df :
+ GCCBuiltin<"__builtin_ia32_scattersiv2df">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v2f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv2_di :
+ GCCBuiltin<"__builtin_ia32_scattersiv2di">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv4_df :
+ GCCBuiltin<"__builtin_ia32_scattersiv4df">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v4f64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv4_di :
+ GCCBuiltin<"__builtin_ia32_scattersiv4di">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v4i64_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv4_sf :
+ GCCBuiltin<"__builtin_ia32_scattersiv4sf">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv4_si :
+ GCCBuiltin<"__builtin_ia32_scattersiv4si">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv8_sf :
+ GCCBuiltin<"__builtin_ia32_scattersiv8sf">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v8i32_ty, llvm_v8f32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ def int_x86_avx512_scattersiv8_si :
+ GCCBuiltin<"__builtin_ia32_scattersiv8si">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_i8_ty, llvm_v8i32_ty, llvm_v8i32_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+
+ // gather prefetch
+ def int_x86_avx512_gatherpf_dpd_512 : GCCBuiltin<"__builtin_ia32_gatherpfdpd">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i32_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_gatherpf_dps_512 : GCCBuiltin<"__builtin_ia32_gatherpfdps">,
+ Intrinsic<[], [llvm_i16_ty, llvm_v16i32_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_gatherpf_qpd_512 : GCCBuiltin<"__builtin_ia32_gatherpfqpd">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_gatherpf_qps_512 : GCCBuiltin<"__builtin_ia32_gatherpfqps">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+
+ // scatter prefetch
+ def int_x86_avx512_scatterpf_dpd_512 : GCCBuiltin<"__builtin_ia32_scatterpfdpd">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i32_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_scatterpf_dps_512 : GCCBuiltin<"__builtin_ia32_scatterpfdps">,
+ Intrinsic<[], [llvm_i16_ty, llvm_v16i32_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_scatterpf_qpd_512 : GCCBuiltin<"__builtin_ia32_scatterpfqpd">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+ def int_x86_avx512_scatterpf_qps_512 : GCCBuiltin<"__builtin_ia32_scatterpfqps">,
+ Intrinsic<[], [llvm_i8_ty, llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrArgMemOnly]>;
+}
+
+// AVX-512 conflict detection instruction
+// Instructions that count the number of leading zero bits
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_mask_conflict_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpconflictsi_128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_conflict_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpconflictsi_256_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_conflict_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpconflictsi_512_mask">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i32_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mask_conflict_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpconflictdi_128_mask">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_conflict_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpconflictdi_256_mask">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_conflict_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpconflictdi_512_mask">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+}
+
+// Compares
+let TargetPrefix = "x86" in {
+ // 512-bit
+ def int_x86_avx512_vcomi_sd : GCCBuiltin<"__builtin_ia32_vcomisd">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty,
+ llvm_v2f64_ty, llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcomi_ss : GCCBuiltin<"__builtin_ia32_vcomiss">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty,
+ llvm_v4f32_ty, llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Compress, Expand
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_mask_compress_ps_512 :
+ GCCBuiltin<"__builtin_ia32_compresssf512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_pd_512 :
+ GCCBuiltin<"__builtin_ia32_compressdf512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_ps_256 :
+ GCCBuiltin<"__builtin_ia32_compresssf256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_pd_256 :
+ GCCBuiltin<"__builtin_ia32_compressdf256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_ps_128 :
+ GCCBuiltin<"__builtin_ia32_compresssf128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_pd_128 :
+ GCCBuiltin<"__builtin_ia32_compressdf128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_compress_d_512 :
+ GCCBuiltin<"__builtin_ia32_compresssi512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_q_512 :
+ GCCBuiltin<"__builtin_ia32_compressdi512_mask">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_d_256 :
+ GCCBuiltin<"__builtin_ia32_compresssi256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_q_256 :
+ GCCBuiltin<"__builtin_ia32_compressdi256_mask">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_d_128 :
+ GCCBuiltin<"__builtin_ia32_compresssi128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_q_128 :
+ GCCBuiltin<"__builtin_ia32_compressdi128_mask">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_compress_b_512 :
+ GCCBuiltin<"__builtin_ia32_compressqi512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_w_512 :
+ GCCBuiltin<"__builtin_ia32_compresshi512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_b_256 :
+ GCCBuiltin<"__builtin_ia32_compressqi256_mask">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_w_256 :
+ GCCBuiltin<"__builtin_ia32_compresshi256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_b_128 :
+ GCCBuiltin<"__builtin_ia32_compressqi128_mask">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_compress_w_128 :
+ GCCBuiltin<"__builtin_ia32_compresshi128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+// expand
+ def int_x86_avx512_mask_expand_ps_512 :
+ GCCBuiltin<"__builtin_ia32_expandsf512_mask">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_pd_512 :
+ GCCBuiltin<"__builtin_ia32_expanddf512_mask">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_ps_256 :
+ GCCBuiltin<"__builtin_ia32_expandsf256_mask">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_pd_256 :
+ GCCBuiltin<"__builtin_ia32_expanddf256_mask">,
+ Intrinsic<[llvm_v4f64_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_ps_128 :
+ GCCBuiltin<"__builtin_ia32_expandsf128_mask">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_pd_128 :
+ GCCBuiltin<"__builtin_ia32_expanddf128_mask">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_expand_d_512 :
+ GCCBuiltin<"__builtin_ia32_expandsi512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_q_512 :
+ GCCBuiltin<"__builtin_ia32_expanddi512_mask">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_d_256 :
+ GCCBuiltin<"__builtin_ia32_expandsi256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_q_256 :
+ GCCBuiltin<"__builtin_ia32_expanddi256_mask">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_d_128 :
+ GCCBuiltin<"__builtin_ia32_expandsi128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_q_128 :
+ GCCBuiltin<"__builtin_ia32_expanddi128_mask">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_expand_b_512 :
+ GCCBuiltin<"__builtin_ia32_expandqi512_mask">,
+ Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, llvm_v64i8_ty,
+ llvm_i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_w_512 :
+ GCCBuiltin<"__builtin_ia32_expandhi512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_b_256 :
+ GCCBuiltin<"__builtin_ia32_expandqi256_mask">,
+ Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, llvm_v32i8_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_w_256 :
+ GCCBuiltin<"__builtin_ia32_expandhi256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_b_128 :
+ GCCBuiltin<"__builtin_ia32_expandqi128_mask">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty,
+ llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_expand_w_128 :
+ GCCBuiltin<"__builtin_ia32_expandhi128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_i8_ty], [IntrNoMem]>;
+}
+
+// VBMI2 Concat & Shift
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_vpshld_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldq512">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldq256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_v4i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldq128">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpshld_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldd512">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i32_ty, llvm_v16i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldd256">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_v8i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldd128">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpshld_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldw512">,
+ Intrinsic<[llvm_v32i16_ty],
+ [llvm_v32i16_ty, llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldw256">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i16_ty, llvm_v16i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshld_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldw128">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpshrd_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdq512">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdq256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_v4i64_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdq128">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpshrd_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdd512">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i32_ty, llvm_v16i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdd256">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_v8i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdd128">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vpshrd_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdw512">,
+ Intrinsic<[llvm_v32i16_ty],
+ [llvm_v32i16_ty, llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdw256">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i16_ty, llvm_v16i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_vpshrd_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdw128">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshldv_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw128_maskz">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw256_maskz">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvw512_maskz">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshldv_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq128_mask">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq128_maskz">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq256_mask">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq256_maskz">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq512_mask">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvq512_maskz">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshldv_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd128_maskz">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd256_maskz">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshldv_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshldv_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshldvd512_maskz">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshrdv_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_w_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw128_maskz">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty,
+ llvm_v8i16_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_w_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw256_maskz">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16i16_ty, llvm_v16i16_ty,
+ llvm_v16i16_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_w_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvw512_maskz">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32i16_ty, llvm_v32i16_ty,
+ llvm_v32i16_ty, llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshrdv_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq128_mask">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_q_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq128_maskz">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty,
+ llvm_v2i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq256_mask">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_q_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq256_maskz">,
+ Intrinsic<[llvm_v4i64_ty], [llvm_v4i64_ty, llvm_v4i64_ty,
+ llvm_v4i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq512_mask">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_q_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvq512_maskz">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_vpshrdv_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_d_128 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd128_maskz">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty,
+ llvm_v4i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_d_256 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd256_maskz">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_v8i32_ty,
+ llvm_v8i32_ty, llvm_i8_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_vpshrdv_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_maskz_vpshrdv_d_512 :
+ GCCBuiltin<"__builtin_ia32_vpshrdvd512_maskz">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
+}
+
+// truncate
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_mask_pmov_qb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v2i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v2i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v2i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qb_256 :
+ GCCBuiltin<"__builtin_ia32_pmovqb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovqb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qb_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qb_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qb_512 :
+ GCCBuiltin<"__builtin_ia32_pmovqb512_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovqb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qb_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb512_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qb_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb512_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i64_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v2i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v2i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v2i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovqw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovqw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qw_512 :
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovqw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qw_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw512_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qw_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw512_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i64_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qd_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqd128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qd_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovqd128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qd_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qd_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qd_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd128_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v2i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qd_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v2i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qd_256 : // FIXME: Replace with trunc+select.
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qd_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovqd256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qd_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd256_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qd_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qd_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd256_mask">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i64_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qd_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_qd_512 : // FIXME: Replace with trunc+select.
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i64_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_qd_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovqd512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_qd_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i64_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_qd_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsqd512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_qd_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd512_mask">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i64_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_qd_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusqd512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i64_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_db_128 :
+ GCCBuiltin<"__builtin_ia32_pmovdb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_db_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovdb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_db_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_db_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_db_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v4i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_db_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_db_256 :
+ GCCBuiltin<"__builtin_ia32_pmovdb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_db_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovdb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_db_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_db_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_db_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i32_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_db_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_db_512 :
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i32_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_db_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovdb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_db_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb512_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i32_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_db_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsdb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_db_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb512_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i32_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_db_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusdb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_dw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovdw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_dw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovdw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_dw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_dw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_dw_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw128_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_dw_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_dw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovdw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_dw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovdw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_dw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_dw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_dw_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw256_mask">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i32_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_dw_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i32_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_dw_512 :
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i32_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_dw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovdw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_dw_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw512_mask">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i32_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_dw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovsdw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_dw_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw512_mask">,
+ Intrinsic<[llvm_v16i16_ty],
+ [llvm_v16i32_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_dw_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovusdw512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i32_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_wb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovwb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i16_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_wb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovwb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_wb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovswb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i16_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_wb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovswb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_wb_128 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb128_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v8i16_ty, llvm_v16i8_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_wb_mem_128 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb128mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_wb_256 : // FIXME: Replace with trunc+select.
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i16_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_wb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovwb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_wb_256 :
+ GCCBuiltin<"__builtin_ia32_pmovswb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i16_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_wb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovswb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_wb_256 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb256_mask">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i16_ty, llvm_v16i8_ty, llvm_i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_wb_mem_256 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb256mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v16i16_ty, llvm_i16_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmov_wb_512 : // FIXME: Replace with trunc+select.
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i16_ty, llvm_v32i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmov_wb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovwb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v32i16_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovs_wb_512 :
+ GCCBuiltin<"__builtin_ia32_pmovswb512_mask">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i16_ty, llvm_v32i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovs_wb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovswb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v32i16_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+ def int_x86_avx512_mask_pmovus_wb_512 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb512_mask">,
+ Intrinsic<[llvm_v32i8_ty],
+ [llvm_v32i16_ty, llvm_v32i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mask_pmovus_wb_mem_512 :
+ GCCBuiltin<"__builtin_ia32_pmovuswb512mem_mask">,
+ Intrinsic<[],
+ [llvm_ptr_ty, llvm_v32i16_ty, llvm_i32_ty],
+ [IntrArgMemOnly]>;
+}
+
+// Bitwise ternary logic
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_pternlog_d_128 :
+ GCCBuiltin<"__builtin_ia32_pternlogd128">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_pternlog_d_256 :
+ GCCBuiltin<"__builtin_ia32_pternlogd256">,
+ Intrinsic<[llvm_v8i32_ty],
+ [llvm_v8i32_ty, llvm_v8i32_ty, llvm_v8i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_pternlog_d_512 :
+ GCCBuiltin<"__builtin_ia32_pternlogd512">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i32_ty, llvm_v16i32_ty, llvm_v16i32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pternlog_q_128 :
+ GCCBuiltin<"__builtin_ia32_pternlogq128">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_pternlog_q_256 :
+ GCCBuiltin<"__builtin_ia32_pternlogq256">,
+ Intrinsic<[llvm_v4i64_ty],
+ [llvm_v4i64_ty, llvm_v4i64_ty, llvm_v4i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_pternlog_q_512 :
+ GCCBuiltin<"__builtin_ia32_pternlogq512">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_v8i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in {
+ // NOTE: These comparison intrinsics are not used by clang as long as the
+ // distinction in signaling behaviour is not implemented.
+ def int_x86_avx512_cmp_ps_512 :
+ Intrinsic<[llvm_v16i1_ty], [llvm_v16f32_ty, llvm_v16f32_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cmp_pd_512 :
+ Intrinsic<[llvm_v8i1_ty], [llvm_v8f64_ty, llvm_v8f64_ty,
+ llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cmp_ps_256 :
+ Intrinsic<[llvm_v8i1_ty], [llvm_v8f32_ty, llvm_v8f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cmp_pd_256 :
+ Intrinsic<[llvm_v4i1_ty], [llvm_v4f64_ty, llvm_v4f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cmp_ps_128 :
+ Intrinsic<[llvm_v4i1_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cmp_pd_128 :
+ Intrinsic<[llvm_v2i1_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_mask_cmp_ss :
+ GCCBuiltin<"__builtin_ia32_cmpss_mask">,
+ Intrinsic<[llvm_i8_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_mask_cmp_sd :
+ GCCBuiltin<"__builtin_ia32_cmpsd_mask">,
+ Intrinsic<[llvm_i8_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i32_ty, llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SHA intrinsics
+let TargetPrefix = "x86" in {
+ def int_x86_sha1rnds4 : GCCBuiltin<"__builtin_ia32_sha1rnds4">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sha1nexte : GCCBuiltin<"__builtin_ia32_sha1nexte">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha1msg1 : GCCBuiltin<"__builtin_ia32_sha1msg1">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha1msg2 : GCCBuiltin<"__builtin_ia32_sha1msg2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha256rnds2 : GCCBuiltin<"__builtin_ia32_sha256rnds2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_sha256msg1 : GCCBuiltin<"__builtin_ia32_sha256msg1">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha256msg2 : GCCBuiltin<"__builtin_ia32_sha256msg2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// Thread synchronization ops with timer.
+let TargetPrefix = "x86" in {
+ def int_x86_monitorx
+ : GCCBuiltin<"__builtin_ia32_monitorx">,
+ Intrinsic<[], [ llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty ], []>;
+ def int_x86_mwaitx
+ : GCCBuiltin<"__builtin_ia32_mwaitx">,
+ Intrinsic<[], [ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty ], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Cache-line zero
+let TargetPrefix = "x86" in {
+ def int_x86_clzero : GCCBuiltin<"__builtin_ia32_clzero">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Cache write back intrinsics
+
+let TargetPrefix = "x86" in {
+ // Write back and invalidate
+ def int_x86_wbinvd : GCCBuiltin<"__builtin_ia32_wbinvd">,
+ Intrinsic<[], [], []>;
+
+ // Write back no-invalidate
+ def int_x86_wbnoinvd : GCCBuiltin<"__builtin_ia32_wbnoinvd">,
+ Intrinsic<[], [], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Cache-line demote
+
+let TargetPrefix = "x86" in {
+ def int_x86_cldemote : GCCBuiltin<"__builtin_ia32_cldemote">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Wait and pause enhancements
+let TargetPrefix = "x86" in {
+ def int_x86_umonitor : GCCBuiltin<"__builtin_ia32_umonitor">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_x86_umwait : GCCBuiltin<"__builtin_ia32_umwait">,
+ Intrinsic<[llvm_i8_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+ def int_x86_tpause : GCCBuiltin<"__builtin_ia32_tpause">,
+ Intrinsic<[llvm_i8_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// Direct Move Instructions
+
+let TargetPrefix = "x86" in {
+ def int_x86_directstore32 : GCCBuiltin<"__builtin_ia32_directstore_u32">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty], []>;
+ def int_x86_directstore64 : GCCBuiltin<"__builtin_ia32_directstore_u64">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i64_ty], []>;
+ def int_x86_movdir64b : GCCBuiltin<"__builtin_ia32_movdir64b">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// PTWrite - Write data to processor trace pocket
+
+let TargetPrefix = "x86" in {
+ def int_x86_ptwrite32 : GCCBuiltin<"__builtin_ia32_ptwrite32">,
+ Intrinsic<[], [llvm_i32_ty], []>;
+ def int_x86_ptwrite64 : GCCBuiltin<"__builtin_ia32_ptwrite64">,
+ Intrinsic<[], [llvm_i64_ty], []>;
+}
+
+//===----------------------------------------------------------------------===//
+// INVPCID - Invalidate Process-Context Identifier
+
+let TargetPrefix = "x86" in {
+ def int_x86_invpcid : GCCBuiltin<"__builtin_ia32_invpcid">,
+ Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty], []>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsXCore.td b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsXCore.td
new file mode 100644
index 000000000..b614e1ed6
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/IntrinsicsXCore.td
@@ -0,0 +1,121 @@
+//==- IntrinsicsXCore.td - XCore intrinsics -*- tablegen -*-==//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the XCore-specific intrinsics.
+//
+//===----------------------------------------------------------------------===//
+
+let TargetPrefix = "xcore" in { // All intrinsics start with "llvm.xcore.".
+ // Miscellaneous instructions.
+ def int_xcore_bitrev : Intrinsic<[llvm_i32_ty],[llvm_i32_ty],[IntrNoMem]>,
+ GCCBuiltin<"__builtin_bitrev">;
+ def int_xcore_crc8 : Intrinsic<[llvm_i32_ty, llvm_i32_ty],
+ [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_xcore_crc32 : Intrinsic<[llvm_i32_ty],
+ [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_xcore_sext : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_xcore_zext : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+ def int_xcore_getid : Intrinsic<[llvm_i32_ty],[],[IntrNoMem]>,
+ GCCBuiltin<"__builtin_getid">;
+ def int_xcore_getps : Intrinsic<[llvm_i32_ty],[llvm_i32_ty]>,
+ GCCBuiltin<"__builtin_getps">;
+ def int_xcore_setps : Intrinsic<[],[llvm_i32_ty, llvm_i32_ty]>,
+ GCCBuiltin<"__builtin_setps">;
+ def int_xcore_geted : Intrinsic<[llvm_i32_ty],[]>;
+ def int_xcore_getet : Intrinsic<[llvm_i32_ty],[]>;
+ def int_xcore_setsr : Intrinsic<[],[llvm_i32_ty]>;
+ def int_xcore_clrsr : Intrinsic<[],[llvm_i32_ty]>;
+
+ // Resource instructions.
+ def int_xcore_getr : Intrinsic<[llvm_anyptr_ty],[llvm_i32_ty]>;
+ def int_xcore_freer : Intrinsic<[],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_in : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],[NoCapture<0>]>;
+ def int_xcore_int : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_inct : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_out : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_outt : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_outct : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_chkct : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_testct : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_testwct : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_setd : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_setc : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_inshr : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_outshr : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_setpt : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_clrpt : Intrinsic<[],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_getts : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_syncr : Intrinsic<[],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_settw : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_setv : Intrinsic<[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_setev : Intrinsic<[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_eeu : Intrinsic<[],[llvm_anyptr_ty], [NoCapture<0>]>;
+ def int_xcore_edu : Intrinsic<[],[llvm_anyptr_ty], [NoCapture<0>]>;
+ def int_xcore_setclk : Intrinsic<[],[llvm_anyptr_ty, llvm_anyptr_ty],
+ [NoCapture<0>, NoCapture<1>]>;
+ def int_xcore_setrdy : Intrinsic<[],[llvm_anyptr_ty, llvm_anyptr_ty],
+ [NoCapture<0>, NoCapture<1>]>;
+ def int_xcore_setpsc : Intrinsic<[],[llvm_anyptr_ty, llvm_i32_ty],
+ [NoCapture<0>]>;
+ def int_xcore_peek : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_endin : Intrinsic<[llvm_i32_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+
+ // Intrinsics for events.
+ def int_xcore_waitevent : Intrinsic<[llvm_ptr_ty],[], [IntrReadMem]>;
+
+ // If any of the resources owned by the thread are ready this returns the
+ // vector of one of the ready resources. If no resources owned by the thread
+ // are ready then the operand passed to the intrinsic is returned.
+ def int_xcore_checkevent : Intrinsic<[llvm_ptr_ty],[llvm_ptr_ty]>;
+
+ def int_xcore_clre : Intrinsic<[],[],[]>;
+
+ // Intrinsics for threads.
+ def int_xcore_getst : Intrinsic <[llvm_anyptr_ty],[llvm_anyptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_msync : Intrinsic <[],[llvm_anyptr_ty], [NoCapture<0>]>;
+ def int_xcore_ssync : Intrinsic <[],[]>;
+ def int_xcore_mjoin : Intrinsic <[],[llvm_anyptr_ty], [NoCapture<0>]>;
+ def int_xcore_initsp : Intrinsic <[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_initpc : Intrinsic <[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_initlr : Intrinsic <[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_initcp : Intrinsic <[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+ def int_xcore_initdp : Intrinsic <[],[llvm_anyptr_ty, llvm_ptr_ty],
+ [NoCapture<0>]>;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/Metadata.def b/capstone/suite/synctools/tablegen/include/llvm/IR/Metadata.def
new file mode 100644
index 000000000..70a03f28b
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/Metadata.def
@@ -0,0 +1,126 @@
+//===- llvm/IR/Metadata.def - Metadata definitions --------------*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// Macros for running through all types of metadata.
+//
+//===----------------------------------------------------------------------===//
+
+#if !(defined HANDLE_METADATA || defined HANDLE_METADATA_LEAF || \
+ defined HANDLE_METADATA_BRANCH || defined HANDLE_MDNODE_LEAF || \
+ defined HANDLE_MDNODE_LEAF_UNIQUABLE || defined HANDLE_MDNODE_BRANCH || \
+ defined HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE || \
+ defined HANDLE_SPECIALIZED_MDNODE_LEAF || \
+ defined HANDLE_SPECIALIZED_MDNODE_BRANCH)
+#error "Missing macro definition of HANDLE_METADATA*"
+#endif
+
+// Handler for all types of metadata.
+#ifndef HANDLE_METADATA
+#define HANDLE_METADATA(CLASS)
+#endif
+
+// Handler for leaf nodes in the class hierarchy.
+#ifndef HANDLE_METADATA_LEAF
+#define HANDLE_METADATA_LEAF(CLASS) HANDLE_METADATA(CLASS)
+#endif
+
+// Handler for non-leaf nodes in the class hierarchy.
+#ifndef HANDLE_METADATA_BRANCH
+#define HANDLE_METADATA_BRANCH(CLASS) HANDLE_METADATA(CLASS)
+#endif
+
+// Handler for specialized and uniquable leaf nodes under MDNode. Defers to
+// HANDLE_MDNODE_LEAF_UNIQUABLE if it's defined, otherwise to
+// HANDLE_SPECIALIZED_MDNODE_LEAF.
+#ifndef HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE
+#ifdef HANDLE_MDNODE_LEAF_UNIQUABLE
+#define HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(CLASS) \
+ HANDLE_MDNODE_LEAF_UNIQUABLE(CLASS)
+#else
+#define HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(CLASS) \
+ HANDLE_SPECIALIZED_MDNODE_LEAF(CLASS)
+#endif
+#endif
+
+// Handler for leaf nodes under MDNode.
+#ifndef HANDLE_MDNODE_LEAF_UNIQUABLE
+#define HANDLE_MDNODE_LEAF_UNIQUABLE(CLASS) HANDLE_MDNODE_LEAF(CLASS)
+#endif
+
+// Handler for leaf nodes under MDNode.
+#ifndef HANDLE_MDNODE_LEAF
+#define HANDLE_MDNODE_LEAF(CLASS) HANDLE_METADATA_LEAF(CLASS)
+#endif
+
+// Handler for non-leaf nodes under MDNode.
+#ifndef HANDLE_MDNODE_BRANCH
+#define HANDLE_MDNODE_BRANCH(CLASS) HANDLE_METADATA_BRANCH(CLASS)
+#endif
+
+// Handler for specialized leaf nodes under MDNode.
+#ifndef HANDLE_SPECIALIZED_MDNODE_LEAF
+#define HANDLE_SPECIALIZED_MDNODE_LEAF(CLASS) HANDLE_MDNODE_LEAF(CLASS)
+#endif
+
+// Handler for specialized non-leaf nodes under MDNode.
+#ifndef HANDLE_SPECIALIZED_MDNODE_BRANCH
+#define HANDLE_SPECIALIZED_MDNODE_BRANCH(CLASS) HANDLE_MDNODE_BRANCH(CLASS)
+#endif
+
+HANDLE_METADATA_LEAF(MDString)
+HANDLE_METADATA_BRANCH(ValueAsMetadata)
+HANDLE_METADATA_LEAF(ConstantAsMetadata)
+HANDLE_METADATA_LEAF(LocalAsMetadata)
+HANDLE_METADATA_LEAF(DistinctMDOperandPlaceholder)
+HANDLE_MDNODE_BRANCH(MDNode)
+HANDLE_MDNODE_LEAF_UNIQUABLE(MDTuple)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DILocation)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIExpression)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIGlobalVariableExpression)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DINode)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(GenericDINode)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DISubrange)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIEnumerator)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DIScope)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DIType)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIBasicType)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIDerivedType)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DICompositeType)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DISubroutineType)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIFile)
+HANDLE_SPECIALIZED_MDNODE_LEAF(DICompileUnit)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DILocalScope)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DISubprogram)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DILexicalBlockBase)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DILexicalBlock)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DILexicalBlockFile)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DINamespace)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIModule)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DITemplateParameter)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DITemplateTypeParameter)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DITemplateValueParameter)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DIVariable)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIGlobalVariable)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DILocalVariable)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DILabel)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIObjCProperty)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIImportedEntity)
+HANDLE_SPECIALIZED_MDNODE_BRANCH(DIMacroNode)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIMacro)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIMacroFile)
+
+#undef HANDLE_METADATA
+#undef HANDLE_METADATA_LEAF
+#undef HANDLE_METADATA_BRANCH
+#undef HANDLE_MDNODE_LEAF
+#undef HANDLE_MDNODE_LEAF_UNIQUABLE
+#undef HANDLE_MDNODE_BRANCH
+#undef HANDLE_SPECIALIZED_MDNODE_LEAF
+#undef HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE
+#undef HANDLE_SPECIALIZED_MDNODE_BRANCH
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/RuntimeLibcalls.def b/capstone/suite/synctools/tablegen/include/llvm/IR/RuntimeLibcalls.def
new file mode 100644
index 000000000..7ed90d959
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/RuntimeLibcalls.def
@@ -0,0 +1,527 @@
+//===-- llvm/RuntimeLibcalls.def - File that describes libcalls -*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines all of the runtime library calls the backend can emit.
+// The various long double types cannot be merged, because 80-bit library
+// functions use "xf" and 128-bit use "tf".
+//
+// When adding PPCF128 functions here, note that their names generally need
+// to be overridden for Darwin with the xxx$LDBL128 form. See
+// PPCISelLowering.cpp.
+//
+//===----------------------------------------------------------------------===//
+
+// NOTE: NO INCLUDE GUARD DESIRED!
+
+// Provide definitions of macros so that users of this file do not have to
+// define everything to use it...
+
+// Declare the enumerator for each libcall, along with its default name. Some
+// libcalls have different names on particular OSes or architectures. These
+// are set in InitLibcallNames() in TargetLoweringBase.cpp and/or by targets
+// using TargetLoweringBase::setLibcallName()
+#ifndef HANDLE_LIBCALL
+#error "HANDLE_LIBCALL must be defined"
+#endif
+
+// Integer
+HANDLE_LIBCALL(SHL_I16, "__ashlhi3")
+HANDLE_LIBCALL(SHL_I32, "__ashlsi3")
+HANDLE_LIBCALL(SHL_I64, "__ashldi3")
+HANDLE_LIBCALL(SHL_I128, "__ashlti3")
+HANDLE_LIBCALL(SRL_I16, "__lshrhi3")
+HANDLE_LIBCALL(SRL_I32, "__lshrsi3")
+HANDLE_LIBCALL(SRL_I64, "__lshrdi3")
+HANDLE_LIBCALL(SRL_I128, "__lshrti3")
+HANDLE_LIBCALL(SRA_I16, "__ashrhi3")
+HANDLE_LIBCALL(SRA_I32, "__ashrsi3")
+HANDLE_LIBCALL(SRA_I64, "__ashrdi3")
+HANDLE_LIBCALL(SRA_I128, "__ashrti3")
+HANDLE_LIBCALL(MUL_I8, "__mulqi3")
+HANDLE_LIBCALL(MUL_I16, "__mulhi3")
+HANDLE_LIBCALL(MUL_I32, "__mulsi3")
+HANDLE_LIBCALL(MUL_I64, "__muldi3")
+HANDLE_LIBCALL(MUL_I128, "__multi3")
+HANDLE_LIBCALL(MULO_I32, "__mulosi4")
+HANDLE_LIBCALL(MULO_I64, "__mulodi4")
+HANDLE_LIBCALL(MULO_I128, "__muloti4")
+HANDLE_LIBCALL(SDIV_I8, "__divqi3")
+HANDLE_LIBCALL(SDIV_I16, "__divhi3")
+HANDLE_LIBCALL(SDIV_I32, "__divsi3")
+HANDLE_LIBCALL(SDIV_I64, "__divdi3")
+HANDLE_LIBCALL(SDIV_I128, "__divti3")
+HANDLE_LIBCALL(UDIV_I8, "__udivqi3")
+HANDLE_LIBCALL(UDIV_I16, "__udivhi3")
+HANDLE_LIBCALL(UDIV_I32, "__udivsi3")
+HANDLE_LIBCALL(UDIV_I64, "__udivdi3")
+HANDLE_LIBCALL(UDIV_I128, "__udivti3")
+HANDLE_LIBCALL(SREM_I8, "__modqi3")
+HANDLE_LIBCALL(SREM_I16, "__modhi3")
+HANDLE_LIBCALL(SREM_I32, "__modsi3")
+HANDLE_LIBCALL(SREM_I64, "__moddi3")
+HANDLE_LIBCALL(SREM_I128, "__modti3")
+HANDLE_LIBCALL(UREM_I8, "__umodqi3")
+HANDLE_LIBCALL(UREM_I16, "__umodhi3")
+HANDLE_LIBCALL(UREM_I32, "__umodsi3")
+HANDLE_LIBCALL(UREM_I64, "__umoddi3")
+HANDLE_LIBCALL(UREM_I128, "__umodti3")
+HANDLE_LIBCALL(SDIVREM_I8, nullptr)
+HANDLE_LIBCALL(SDIVREM_I16, nullptr)
+HANDLE_LIBCALL(SDIVREM_I32, nullptr)
+HANDLE_LIBCALL(SDIVREM_I64, nullptr)
+HANDLE_LIBCALL(SDIVREM_I128, nullptr)
+HANDLE_LIBCALL(UDIVREM_I8, nullptr)
+HANDLE_LIBCALL(UDIVREM_I16, nullptr)
+HANDLE_LIBCALL(UDIVREM_I32, nullptr)
+HANDLE_LIBCALL(UDIVREM_I64, nullptr)
+HANDLE_LIBCALL(UDIVREM_I128, nullptr)
+HANDLE_LIBCALL(NEG_I32, "__negsi2")
+HANDLE_LIBCALL(NEG_I64, "__negdi2")
+
+// Floating-point
+HANDLE_LIBCALL(ADD_F32, "__addsf3")
+HANDLE_LIBCALL(ADD_F64, "__adddf3")
+HANDLE_LIBCALL(ADD_F80, "__addxf3")
+HANDLE_LIBCALL(ADD_F128, "__addtf3")
+HANDLE_LIBCALL(ADD_PPCF128, "__gcc_qadd")
+HANDLE_LIBCALL(SUB_F32, "__subsf3")
+HANDLE_LIBCALL(SUB_F64, "__subdf3")
+HANDLE_LIBCALL(SUB_F80, "__subxf3")
+HANDLE_LIBCALL(SUB_F128, "__subtf3")
+HANDLE_LIBCALL(SUB_PPCF128, "__gcc_qsub")
+HANDLE_LIBCALL(MUL_F32, "__mulsf3")
+HANDLE_LIBCALL(MUL_F64, "__muldf3")
+HANDLE_LIBCALL(MUL_F80, "__mulxf3")
+HANDLE_LIBCALL(MUL_F128, "__multf3")
+HANDLE_LIBCALL(MUL_PPCF128, "__gcc_qmul")
+HANDLE_LIBCALL(DIV_F32, "__divsf3")
+HANDLE_LIBCALL(DIV_F64, "__divdf3")
+HANDLE_LIBCALL(DIV_F80, "__divxf3")
+HANDLE_LIBCALL(DIV_F128, "__divtf3")
+HANDLE_LIBCALL(DIV_PPCF128, "__gcc_qdiv")
+HANDLE_LIBCALL(REM_F32, "fmodf")
+HANDLE_LIBCALL(REM_F64, "fmod")
+HANDLE_LIBCALL(REM_F80, "fmodl")
+HANDLE_LIBCALL(REM_F128, "fmodl")
+HANDLE_LIBCALL(REM_PPCF128, "fmodl")
+HANDLE_LIBCALL(FMA_F32, "fmaf")
+HANDLE_LIBCALL(FMA_F64, "fma")
+HANDLE_LIBCALL(FMA_F80, "fmal")
+HANDLE_LIBCALL(FMA_F128, "fmal")
+HANDLE_LIBCALL(FMA_PPCF128, "fmal")
+HANDLE_LIBCALL(POWI_F32, "__powisf2")
+HANDLE_LIBCALL(POWI_F64, "__powidf2")
+HANDLE_LIBCALL(POWI_F80, "__powixf2")
+HANDLE_LIBCALL(POWI_F128, "__powitf2")
+HANDLE_LIBCALL(POWI_PPCF128, "__powitf2")
+HANDLE_LIBCALL(SQRT_F32, "sqrtf")
+HANDLE_LIBCALL(SQRT_F64, "sqrt")
+HANDLE_LIBCALL(SQRT_F80, "sqrtl")
+HANDLE_LIBCALL(SQRT_F128, "sqrtl")
+HANDLE_LIBCALL(SQRT_PPCF128, "sqrtl")
+HANDLE_LIBCALL(LOG_F32, "logf")
+HANDLE_LIBCALL(LOG_F64, "log")
+HANDLE_LIBCALL(LOG_F80, "logl")
+HANDLE_LIBCALL(LOG_F128, "logl")
+HANDLE_LIBCALL(LOG_PPCF128, "logl")
+HANDLE_LIBCALL(LOG_FINITE_F32, "__logf_finite")
+HANDLE_LIBCALL(LOG_FINITE_F64, "__log_finite")
+HANDLE_LIBCALL(LOG_FINITE_F80, "__logl_finite")
+HANDLE_LIBCALL(LOG_FINITE_F128, "__logl_finite")
+HANDLE_LIBCALL(LOG_FINITE_PPCF128, "__logl_finite")
+HANDLE_LIBCALL(LOG2_F32, "log2f")
+HANDLE_LIBCALL(LOG2_F64, "log2")
+HANDLE_LIBCALL(LOG2_F80, "log2l")
+HANDLE_LIBCALL(LOG2_F128, "log2l")
+HANDLE_LIBCALL(LOG2_PPCF128, "log2l")
+HANDLE_LIBCALL(LOG2_FINITE_F32, "__log2f_finite")
+HANDLE_LIBCALL(LOG2_FINITE_F64, "__log2_finite")
+HANDLE_LIBCALL(LOG2_FINITE_F80, "__log2l_finite")
+HANDLE_LIBCALL(LOG2_FINITE_F128, "__log2l_finite")
+HANDLE_LIBCALL(LOG2_FINITE_PPCF128, "__log2l_finite")
+HANDLE_LIBCALL(LOG10_F32, "log10f")
+HANDLE_LIBCALL(LOG10_F64, "log10")
+HANDLE_LIBCALL(LOG10_F80, "log10l")
+HANDLE_LIBCALL(LOG10_F128, "log10l")
+HANDLE_LIBCALL(LOG10_PPCF128, "log10l")
+HANDLE_LIBCALL(LOG10_FINITE_F32, "__log10f_finite")
+HANDLE_LIBCALL(LOG10_FINITE_F64, "__log10_finite")
+HANDLE_LIBCALL(LOG10_FINITE_F80, "__log10l_finite")
+HANDLE_LIBCALL(LOG10_FINITE_F128, "__log10l_finite")
+HANDLE_LIBCALL(LOG10_FINITE_PPCF128, "__log10l_finite")
+HANDLE_LIBCALL(EXP_F32, "expf")
+HANDLE_LIBCALL(EXP_F64, "exp")
+HANDLE_LIBCALL(EXP_F80, "expl")
+HANDLE_LIBCALL(EXP_F128, "expl")
+HANDLE_LIBCALL(EXP_PPCF128, "expl")
+HANDLE_LIBCALL(EXP_FINITE_F32, "__expf_finite")
+HANDLE_LIBCALL(EXP_FINITE_F64, "__exp_finite")
+HANDLE_LIBCALL(EXP_FINITE_F80, "__expl_finite")
+HANDLE_LIBCALL(EXP_FINITE_F128, "__expl_finite")
+HANDLE_LIBCALL(EXP_FINITE_PPCF128, "__expl_finite")
+HANDLE_LIBCALL(EXP2_F32, "exp2f")
+HANDLE_LIBCALL(EXP2_F64, "exp2")
+HANDLE_LIBCALL(EXP2_F80, "exp2l")
+HANDLE_LIBCALL(EXP2_F128, "exp2l")
+HANDLE_LIBCALL(EXP2_PPCF128, "exp2l")
+HANDLE_LIBCALL(EXP2_FINITE_F32, "__exp2f_finite")
+HANDLE_LIBCALL(EXP2_FINITE_F64, "__exp2_finite")
+HANDLE_LIBCALL(EXP2_FINITE_F80, "__exp2l_finite")
+HANDLE_LIBCALL(EXP2_FINITE_F128, "__exp2l_finite")
+HANDLE_LIBCALL(EXP2_FINITE_PPCF128, "__exp2l_finite")
+HANDLE_LIBCALL(SIN_F32, "sinf")
+HANDLE_LIBCALL(SIN_F64, "sin")
+HANDLE_LIBCALL(SIN_F80, "sinl")
+HANDLE_LIBCALL(SIN_F128, "sinl")
+HANDLE_LIBCALL(SIN_PPCF128, "sinl")
+HANDLE_LIBCALL(COS_F32, "cosf")
+HANDLE_LIBCALL(COS_F64, "cos")
+HANDLE_LIBCALL(COS_F80, "cosl")
+HANDLE_LIBCALL(COS_F128, "cosl")
+HANDLE_LIBCALL(COS_PPCF128, "cosl")
+HANDLE_LIBCALL(SINCOS_F32, nullptr)
+HANDLE_LIBCALL(SINCOS_F64, nullptr)
+HANDLE_LIBCALL(SINCOS_F80, nullptr)
+HANDLE_LIBCALL(SINCOS_F128, nullptr)
+HANDLE_LIBCALL(SINCOS_PPCF128, nullptr)
+HANDLE_LIBCALL(SINCOS_STRET_F32, nullptr)
+HANDLE_LIBCALL(SINCOS_STRET_F64, nullptr)
+HANDLE_LIBCALL(POW_F32, "powf")
+HANDLE_LIBCALL(POW_F64, "pow")
+HANDLE_LIBCALL(POW_F80, "powl")
+HANDLE_LIBCALL(POW_F128, "powl")
+HANDLE_LIBCALL(POW_PPCF128, "powl")
+HANDLE_LIBCALL(POW_FINITE_F32, "__powf_finite")
+HANDLE_LIBCALL(POW_FINITE_F64, "__pow_finite")
+HANDLE_LIBCALL(POW_FINITE_F80, "__powl_finite")
+HANDLE_LIBCALL(POW_FINITE_F128, "__powl_finite")
+HANDLE_LIBCALL(POW_FINITE_PPCF128, "__powl_finite")
+HANDLE_LIBCALL(CEIL_F32, "ceilf")
+HANDLE_LIBCALL(CEIL_F64, "ceil")
+HANDLE_LIBCALL(CEIL_F80, "ceill")
+HANDLE_LIBCALL(CEIL_F128, "ceill")
+HANDLE_LIBCALL(CEIL_PPCF128, "ceill")
+HANDLE_LIBCALL(TRUNC_F32, "truncf")
+HANDLE_LIBCALL(TRUNC_F64, "trunc")
+HANDLE_LIBCALL(TRUNC_F80, "truncl")
+HANDLE_LIBCALL(TRUNC_F128, "truncl")
+HANDLE_LIBCALL(TRUNC_PPCF128, "truncl")
+HANDLE_LIBCALL(RINT_F32, "rintf")
+HANDLE_LIBCALL(RINT_F64, "rint")
+HANDLE_LIBCALL(RINT_F80, "rintl")
+HANDLE_LIBCALL(RINT_F128, "rintl")
+HANDLE_LIBCALL(RINT_PPCF128, "rintl")
+HANDLE_LIBCALL(NEARBYINT_F32, "nearbyintf")
+HANDLE_LIBCALL(NEARBYINT_F64, "nearbyint")
+HANDLE_LIBCALL(NEARBYINT_F80, "nearbyintl")
+HANDLE_LIBCALL(NEARBYINT_F128, "nearbyintl")
+HANDLE_LIBCALL(NEARBYINT_PPCF128, "nearbyintl")
+HANDLE_LIBCALL(ROUND_F32, "roundf")
+HANDLE_LIBCALL(ROUND_F64, "round")
+HANDLE_LIBCALL(ROUND_F80, "roundl")
+HANDLE_LIBCALL(ROUND_F128, "roundl")
+HANDLE_LIBCALL(ROUND_PPCF128, "roundl")
+HANDLE_LIBCALL(FLOOR_F32, "floorf")
+HANDLE_LIBCALL(FLOOR_F64, "floor")
+HANDLE_LIBCALL(FLOOR_F80, "floorl")
+HANDLE_LIBCALL(FLOOR_F128, "floorl")
+HANDLE_LIBCALL(FLOOR_PPCF128, "floorl")
+HANDLE_LIBCALL(COPYSIGN_F32, "copysignf")
+HANDLE_LIBCALL(COPYSIGN_F64, "copysign")
+HANDLE_LIBCALL(COPYSIGN_F80, "copysignl")
+HANDLE_LIBCALL(COPYSIGN_F128, "copysignl")
+HANDLE_LIBCALL(COPYSIGN_PPCF128, "copysignl")
+HANDLE_LIBCALL(FMIN_F32, "fminf")
+HANDLE_LIBCALL(FMIN_F64, "fmin")
+HANDLE_LIBCALL(FMIN_F80, "fminl")
+HANDLE_LIBCALL(FMIN_F128, "fminl")
+HANDLE_LIBCALL(FMIN_PPCF128, "fminl")
+HANDLE_LIBCALL(FMAX_F32, "fmaxf")
+HANDLE_LIBCALL(FMAX_F64, "fmax")
+HANDLE_LIBCALL(FMAX_F80, "fmaxl")
+HANDLE_LIBCALL(FMAX_F128, "fmaxl")
+HANDLE_LIBCALL(FMAX_PPCF128, "fmaxl")
+
+// Conversion
+HANDLE_LIBCALL(FPEXT_F32_PPCF128, "__gcc_stoq")
+HANDLE_LIBCALL(FPEXT_F64_PPCF128, "__gcc_dtoq")
+HANDLE_LIBCALL(FPEXT_F80_F128, "__extendxftf2")
+HANDLE_LIBCALL(FPEXT_F64_F128, "__extenddftf2")
+HANDLE_LIBCALL(FPEXT_F32_F128, "__extendsftf2")
+HANDLE_LIBCALL(FPEXT_F32_F64, "__extendsfdf2")
+HANDLE_LIBCALL(FPEXT_F16_F32, "__gnu_h2f_ieee")
+HANDLE_LIBCALL(FPROUND_F32_F16, "__gnu_f2h_ieee")
+HANDLE_LIBCALL(FPROUND_F64_F16, "__truncdfhf2")
+HANDLE_LIBCALL(FPROUND_F80_F16, "__truncxfhf2")
+HANDLE_LIBCALL(FPROUND_F128_F16, "__trunctfhf2")
+HANDLE_LIBCALL(FPROUND_PPCF128_F16, "__trunctfhf2")
+HANDLE_LIBCALL(FPROUND_F64_F32, "__truncdfsf2")
+HANDLE_LIBCALL(FPROUND_F80_F32, "__truncxfsf2")
+HANDLE_LIBCALL(FPROUND_F128_F32, "__trunctfsf2")
+HANDLE_LIBCALL(FPROUND_PPCF128_F32, "__gcc_qtos")
+HANDLE_LIBCALL(FPROUND_F80_F64, "__truncxfdf2")
+HANDLE_LIBCALL(FPROUND_F128_F64, "__trunctfdf2")
+HANDLE_LIBCALL(FPROUND_PPCF128_F64, "__gcc_qtod")
+HANDLE_LIBCALL(FPROUND_F128_F80, "__trunctfxf2")
+HANDLE_LIBCALL(FPTOSINT_F32_I32, "__fixsfsi")
+HANDLE_LIBCALL(FPTOSINT_F32_I64, "__fixsfdi")
+HANDLE_LIBCALL(FPTOSINT_F32_I128, "__fixsfti")
+HANDLE_LIBCALL(FPTOSINT_F64_I32, "__fixdfsi")
+HANDLE_LIBCALL(FPTOSINT_F64_I64, "__fixdfdi")
+HANDLE_LIBCALL(FPTOSINT_F64_I128, "__fixdfti")
+HANDLE_LIBCALL(FPTOSINT_F80_I32, "__fixxfsi")
+HANDLE_LIBCALL(FPTOSINT_F80_I64, "__fixxfdi")
+HANDLE_LIBCALL(FPTOSINT_F80_I128, "__fixxfti")
+HANDLE_LIBCALL(FPTOSINT_F128_I32, "__fixtfsi")
+HANDLE_LIBCALL(FPTOSINT_F128_I64, "__fixtfdi")
+HANDLE_LIBCALL(FPTOSINT_F128_I128, "__fixtfti")
+HANDLE_LIBCALL(FPTOSINT_PPCF128_I32, "__gcc_qtou")
+HANDLE_LIBCALL(FPTOSINT_PPCF128_I64, "__fixtfdi")
+HANDLE_LIBCALL(FPTOSINT_PPCF128_I128, "__fixtfti")
+HANDLE_LIBCALL(FPTOUINT_F32_I32, "__fixunssfsi")
+HANDLE_LIBCALL(FPTOUINT_F32_I64, "__fixunssfdi")
+HANDLE_LIBCALL(FPTOUINT_F32_I128, "__fixunssfti")
+HANDLE_LIBCALL(FPTOUINT_F64_I32, "__fixunsdfsi")
+HANDLE_LIBCALL(FPTOUINT_F64_I64, "__fixunsdfdi")
+HANDLE_LIBCALL(FPTOUINT_F64_I128, "__fixunsdfti")
+HANDLE_LIBCALL(FPTOUINT_F80_I32, "__fixunsxfsi")
+HANDLE_LIBCALL(FPTOUINT_F80_I64, "__fixunsxfdi")
+HANDLE_LIBCALL(FPTOUINT_F80_I128, "__fixunsxfti")
+HANDLE_LIBCALL(FPTOUINT_F128_I32, "__fixunstfsi")
+HANDLE_LIBCALL(FPTOUINT_F128_I64, "__fixunstfdi")
+HANDLE_LIBCALL(FPTOUINT_F128_I128, "__fixunstfti")
+HANDLE_LIBCALL(FPTOUINT_PPCF128_I32, "__fixunstfsi")
+HANDLE_LIBCALL(FPTOUINT_PPCF128_I64, "__fixunstfdi")
+HANDLE_LIBCALL(FPTOUINT_PPCF128_I128, "__fixunstfti")
+HANDLE_LIBCALL(SINTTOFP_I32_F32, "__floatsisf")
+HANDLE_LIBCALL(SINTTOFP_I32_F64, "__floatsidf")
+HANDLE_LIBCALL(SINTTOFP_I32_F80, "__floatsixf")
+HANDLE_LIBCALL(SINTTOFP_I32_F128, "__floatsitf")
+HANDLE_LIBCALL(SINTTOFP_I32_PPCF128, "__gcc_itoq")
+HANDLE_LIBCALL(SINTTOFP_I64_F32, "__floatdisf")
+HANDLE_LIBCALL(SINTTOFP_I64_F64, "__floatdidf")
+HANDLE_LIBCALL(SINTTOFP_I64_F80, "__floatdixf")
+HANDLE_LIBCALL(SINTTOFP_I64_F128, "__floatditf")
+HANDLE_LIBCALL(SINTTOFP_I64_PPCF128, "__floatditf")
+HANDLE_LIBCALL(SINTTOFP_I128_F32, "__floattisf")
+HANDLE_LIBCALL(SINTTOFP_I128_F64, "__floattidf")
+HANDLE_LIBCALL(SINTTOFP_I128_F80, "__floattixf")
+HANDLE_LIBCALL(SINTTOFP_I128_F128, "__floattitf")
+HANDLE_LIBCALL(SINTTOFP_I128_PPCF128, "__floattitf")
+HANDLE_LIBCALL(UINTTOFP_I32_F32, "__floatunsisf")
+HANDLE_LIBCALL(UINTTOFP_I32_F64, "__floatunsidf")
+HANDLE_LIBCALL(UINTTOFP_I32_F80, "__floatunsixf")
+HANDLE_LIBCALL(UINTTOFP_I32_F128, "__floatunsitf")
+HANDLE_LIBCALL(UINTTOFP_I32_PPCF128, "__gcc_utoq")
+HANDLE_LIBCALL(UINTTOFP_I64_F32, "__floatundisf")
+HANDLE_LIBCALL(UINTTOFP_I64_F64, "__floatundidf")
+HANDLE_LIBCALL(UINTTOFP_I64_F80, "__floatundixf")
+HANDLE_LIBCALL(UINTTOFP_I64_F128, "__floatunditf")
+HANDLE_LIBCALL(UINTTOFP_I64_PPCF128, "__floatunditf")
+HANDLE_LIBCALL(UINTTOFP_I128_F32, "__floatuntisf")
+HANDLE_LIBCALL(UINTTOFP_I128_F64, "__floatuntidf")
+HANDLE_LIBCALL(UINTTOFP_I128_F80, "__floatuntixf")
+HANDLE_LIBCALL(UINTTOFP_I128_F128, "__floatuntitf")
+HANDLE_LIBCALL(UINTTOFP_I128_PPCF128, "__floatuntitf")
+
+// Comparison
+HANDLE_LIBCALL(OEQ_F32, "__eqsf2")
+HANDLE_LIBCALL(OEQ_F64, "__eqdf2")
+HANDLE_LIBCALL(OEQ_F128, "__eqtf2")
+HANDLE_LIBCALL(OEQ_PPCF128, "__gcc_qeq")
+HANDLE_LIBCALL(UNE_F32, "__nesf2")
+HANDLE_LIBCALL(UNE_F64, "__nedf2")
+HANDLE_LIBCALL(UNE_F128, "__netf2")
+HANDLE_LIBCALL(UNE_PPCF128, "__gcc_qne")
+HANDLE_LIBCALL(OGE_F32, "__gesf2")
+HANDLE_LIBCALL(OGE_F64, "__gedf2")
+HANDLE_LIBCALL(OGE_F128, "__getf2")
+HANDLE_LIBCALL(OGE_PPCF128, "__gcc_qge")
+HANDLE_LIBCALL(OLT_F32, "__ltsf2")
+HANDLE_LIBCALL(OLT_F64, "__ltdf2")
+HANDLE_LIBCALL(OLT_F128, "__lttf2")
+HANDLE_LIBCALL(OLT_PPCF128, "__gcc_qlt")
+HANDLE_LIBCALL(OLE_F32, "__lesf2")
+HANDLE_LIBCALL(OLE_F64, "__ledf2")
+HANDLE_LIBCALL(OLE_F128, "__letf2")
+HANDLE_LIBCALL(OLE_PPCF128, "__gcc_qle")
+HANDLE_LIBCALL(OGT_F32, "__gtsf2")
+HANDLE_LIBCALL(OGT_F64, "__gtdf2")
+HANDLE_LIBCALL(OGT_F128, "__gttf2")
+HANDLE_LIBCALL(OGT_PPCF128, "__gcc_qgt")
+HANDLE_LIBCALL(UO_F32, "__unordsf2")
+HANDLE_LIBCALL(UO_F64, "__unorddf2")
+HANDLE_LIBCALL(UO_F128, "__unordtf2")
+HANDLE_LIBCALL(UO_PPCF128, "__gcc_qunord")
+HANDLE_LIBCALL(O_F32, "__unordsf2")
+HANDLE_LIBCALL(O_F64, "__unorddf2")
+HANDLE_LIBCALL(O_F128, "__unordtf2")
+HANDLE_LIBCALL(O_PPCF128, "__gcc_qunord")
+
+// Memory
+HANDLE_LIBCALL(MEMCPY, "memcpy")
+HANDLE_LIBCALL(MEMMOVE, "memmove")
+HANDLE_LIBCALL(MEMSET, "memset")
+HANDLE_LIBCALL(BZERO, nullptr)
+
+// Element-wise unordered-atomic memory of different sizes
+HANDLE_LIBCALL(MEMCPY_ELEMENT_UNORDERED_ATOMIC_1, "__llvm_memcpy_element_unordered_atomic_1")
+HANDLE_LIBCALL(MEMCPY_ELEMENT_UNORDERED_ATOMIC_2, "__llvm_memcpy_element_unordered_atomic_2")
+HANDLE_LIBCALL(MEMCPY_ELEMENT_UNORDERED_ATOMIC_4, "__llvm_memcpy_element_unordered_atomic_4")
+HANDLE_LIBCALL(MEMCPY_ELEMENT_UNORDERED_ATOMIC_8, "__llvm_memcpy_element_unordered_atomic_8")
+HANDLE_LIBCALL(MEMCPY_ELEMENT_UNORDERED_ATOMIC_16, "__llvm_memcpy_element_unordered_atomic_16")
+HANDLE_LIBCALL(MEMMOVE_ELEMENT_UNORDERED_ATOMIC_1, "__llvm_memmove_element_unordered_atomic_1")
+HANDLE_LIBCALL(MEMMOVE_ELEMENT_UNORDERED_ATOMIC_2, "__llvm_memmove_element_unordered_atomic_2")
+HANDLE_LIBCALL(MEMMOVE_ELEMENT_UNORDERED_ATOMIC_4, "__llvm_memmove_element_unordered_atomic_4")
+HANDLE_LIBCALL(MEMMOVE_ELEMENT_UNORDERED_ATOMIC_8, "__llvm_memmove_element_unordered_atomic_8")
+HANDLE_LIBCALL(MEMMOVE_ELEMENT_UNORDERED_ATOMIC_16, "__llvm_memmove_element_unordered_atomic_16")
+HANDLE_LIBCALL(MEMSET_ELEMENT_UNORDERED_ATOMIC_1, "__llvm_memset_element_unordered_atomic_1")
+HANDLE_LIBCALL(MEMSET_ELEMENT_UNORDERED_ATOMIC_2, "__llvm_memset_element_unordered_atomic_2")
+HANDLE_LIBCALL(MEMSET_ELEMENT_UNORDERED_ATOMIC_4, "__llvm_memset_element_unordered_atomic_4")
+HANDLE_LIBCALL(MEMSET_ELEMENT_UNORDERED_ATOMIC_8, "__llvm_memset_element_unordered_atomic_8")
+HANDLE_LIBCALL(MEMSET_ELEMENT_UNORDERED_ATOMIC_16, "__llvm_memset_element_unordered_atomic_16")
+
+// Exception handling
+HANDLE_LIBCALL(UNWIND_RESUME, "_Unwind_Resume")
+
+// Note: there are two sets of atomics libcalls; see
+// <https://llvm.org/docs/Atomics.html> for more info on the
+// difference between them.
+
+// Atomic '__sync_*' libcalls.
+HANDLE_LIBCALL(SYNC_VAL_COMPARE_AND_SWAP_1, "__sync_val_compare_and_swap_1")
+HANDLE_LIBCALL(SYNC_VAL_COMPARE_AND_SWAP_2, "__sync_val_compare_and_swap_2")
+HANDLE_LIBCALL(SYNC_VAL_COMPARE_AND_SWAP_4, "__sync_val_compare_and_swap_4")
+HANDLE_LIBCALL(SYNC_VAL_COMPARE_AND_SWAP_8, "__sync_val_compare_and_swap_8")
+HANDLE_LIBCALL(SYNC_VAL_COMPARE_AND_SWAP_16, "__sync_val_compare_and_swap_16")
+HANDLE_LIBCALL(SYNC_LOCK_TEST_AND_SET_1, "__sync_lock_test_and_set_1")
+HANDLE_LIBCALL(SYNC_LOCK_TEST_AND_SET_2, "__sync_lock_test_and_set_2")
+HANDLE_LIBCALL(SYNC_LOCK_TEST_AND_SET_4, "__sync_lock_test_and_set_4")
+HANDLE_LIBCALL(SYNC_LOCK_TEST_AND_SET_8, "__sync_lock_test_and_set_8")
+HANDLE_LIBCALL(SYNC_LOCK_TEST_AND_SET_16, "__sync_lock_test_and_set_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_ADD_1, "__sync_fetch_and_add_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_ADD_2, "__sync_fetch_and_add_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_ADD_4, "__sync_fetch_and_add_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_ADD_8, "__sync_fetch_and_add_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_ADD_16, "__sync_fetch_and_add_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_SUB_1, "__sync_fetch_and_sub_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_SUB_2, "__sync_fetch_and_sub_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_SUB_4, "__sync_fetch_and_sub_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_SUB_8, "__sync_fetch_and_sub_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_SUB_16, "__sync_fetch_and_sub_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_AND_1, "__sync_fetch_and_and_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_AND_2, "__sync_fetch_and_and_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_AND_4, "__sync_fetch_and_and_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_AND_8, "__sync_fetch_and_and_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_AND_16, "__sync_fetch_and_and_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_OR_1, "__sync_fetch_and_or_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_OR_2, "__sync_fetch_and_or_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_OR_4, "__sync_fetch_and_or_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_OR_8, "__sync_fetch_and_or_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_OR_16, "__sync_fetch_and_or_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_XOR_1, "__sync_fetch_and_xor_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_XOR_2, "__sync_fetch_and_xor_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_XOR_4, "__sync_fetch_and_xor_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_XOR_8, "__sync_fetch_and_xor_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_XOR_16, "__sync_fetch_and_xor_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_NAND_1, "__sync_fetch_and_nand_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_NAND_2, "__sync_fetch_and_nand_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_NAND_4, "__sync_fetch_and_nand_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_NAND_8, "__sync_fetch_and_nand_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_NAND_16, "__sync_fetch_and_nand_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MAX_1, "__sync_fetch_and_max_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MAX_2, "__sync_fetch_and_max_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MAX_4, "__sync_fetch_and_max_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MAX_8, "__sync_fetch_and_max_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MAX_16, "__sync_fetch_and_max_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMAX_1, "__sync_fetch_and_umax_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMAX_2, "__sync_fetch_and_umax_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMAX_4, "__sync_fetch_and_umax_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMAX_8, "__sync_fetch_and_umax_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMAX_16, "__sync_fetch_and_umax_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MIN_1, "__sync_fetch_and_min_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MIN_2, "__sync_fetch_and_min_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MIN_4, "__sync_fetch_and_min_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MIN_8, "__sync_fetch_and_min_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_MIN_16, "__sync_fetch_and_min_16")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMIN_1, "__sync_fetch_and_umin_1")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMIN_2, "__sync_fetch_and_umin_2")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMIN_4, "__sync_fetch_and_umin_4")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMIN_8, "__sync_fetch_and_umin_8")
+HANDLE_LIBCALL(SYNC_FETCH_AND_UMIN_16, "__sync_fetch_and_umin_16")
+
+// Atomic `__atomic_*' libcalls.
+HANDLE_LIBCALL(ATOMIC_LOAD, "__atomic_load")
+HANDLE_LIBCALL(ATOMIC_LOAD_1, "__atomic_load_1")
+HANDLE_LIBCALL(ATOMIC_LOAD_2, "__atomic_load_2")
+HANDLE_LIBCALL(ATOMIC_LOAD_4, "__atomic_load_4")
+HANDLE_LIBCALL(ATOMIC_LOAD_8, "__atomic_load_8")
+HANDLE_LIBCALL(ATOMIC_LOAD_16, "__atomic_load_16")
+
+HANDLE_LIBCALL(ATOMIC_STORE, "__atomic_store")
+HANDLE_LIBCALL(ATOMIC_STORE_1, "__atomic_store_1")
+HANDLE_LIBCALL(ATOMIC_STORE_2, "__atomic_store_2")
+HANDLE_LIBCALL(ATOMIC_STORE_4, "__atomic_store_4")
+HANDLE_LIBCALL(ATOMIC_STORE_8, "__atomic_store_8")
+HANDLE_LIBCALL(ATOMIC_STORE_16, "__atomic_store_16")
+
+HANDLE_LIBCALL(ATOMIC_EXCHANGE, "__atomic_exchange")
+HANDLE_LIBCALL(ATOMIC_EXCHANGE_1, "__atomic_exchange_1")
+HANDLE_LIBCALL(ATOMIC_EXCHANGE_2, "__atomic_exchange_2")
+HANDLE_LIBCALL(ATOMIC_EXCHANGE_4, "__atomic_exchange_4")
+HANDLE_LIBCALL(ATOMIC_EXCHANGE_8, "__atomic_exchange_8")
+HANDLE_LIBCALL(ATOMIC_EXCHANGE_16, "__atomic_exchange_16")
+
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE, "__atomic_compare_exchange")
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE_1, "__atomic_compare_exchange_1")
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE_2, "__atomic_compare_exchange_2")
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE_4, "__atomic_compare_exchange_4")
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE_8, "__atomic_compare_exchange_8")
+HANDLE_LIBCALL(ATOMIC_COMPARE_EXCHANGE_16, "__atomic_compare_exchange_16")
+
+HANDLE_LIBCALL(ATOMIC_FETCH_ADD_1, "__atomic_fetch_add_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_ADD_2, "__atomic_fetch_add_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_ADD_4, "__atomic_fetch_add_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_ADD_8, "__atomic_fetch_add_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_ADD_16, "__atomic_fetch_add_16")
+HANDLE_LIBCALL(ATOMIC_FETCH_SUB_1, "__atomic_fetch_sub_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_SUB_2, "__atomic_fetch_sub_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_SUB_4, "__atomic_fetch_sub_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_SUB_8, "__atomic_fetch_sub_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_SUB_16, "__atomic_fetch_sub_16")
+HANDLE_LIBCALL(ATOMIC_FETCH_AND_1, "__atomic_fetch_and_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_AND_2, "__atomic_fetch_and_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_AND_4, "__atomic_fetch_and_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_AND_8, "__atomic_fetch_and_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_AND_16, "__atomic_fetch_and_16")
+HANDLE_LIBCALL(ATOMIC_FETCH_OR_1, "__atomic_fetch_or_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_OR_2, "__atomic_fetch_or_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_OR_4, "__atomic_fetch_or_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_OR_8, "__atomic_fetch_or_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_OR_16, "__atomic_fetch_or_16")
+HANDLE_LIBCALL(ATOMIC_FETCH_XOR_1, "__atomic_fetch_xor_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_XOR_2, "__atomic_fetch_xor_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_XOR_4, "__atomic_fetch_xor_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_XOR_8, "__atomic_fetch_xor_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_XOR_16, "__atomic_fetch_xor_16")
+HANDLE_LIBCALL(ATOMIC_FETCH_NAND_1, "__atomic_fetch_nand_1")
+HANDLE_LIBCALL(ATOMIC_FETCH_NAND_2, "__atomic_fetch_nand_2")
+HANDLE_LIBCALL(ATOMIC_FETCH_NAND_4, "__atomic_fetch_nand_4")
+HANDLE_LIBCALL(ATOMIC_FETCH_NAND_8, "__atomic_fetch_nand_8")
+HANDLE_LIBCALL(ATOMIC_FETCH_NAND_16, "__atomic_fetch_nand_16")
+
+// Stack Protector Fail
+HANDLE_LIBCALL(STACKPROTECTOR_CHECK_FAIL, "__stack_chk_fail")
+
+// Deoptimization
+HANDLE_LIBCALL(DEOPTIMIZE, "__llvm_deoptimize")
+
+HANDLE_LIBCALL(UNKNOWN_LIBCALL, nullptr)
+
+#undef HANDLE_LIBCALL
diff --git a/capstone/suite/synctools/tablegen/include/llvm/IR/Value.def b/capstone/suite/synctools/tablegen/include/llvm/IR/Value.def
new file mode 100644
index 000000000..e2ddba0aa
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/IR/Value.def
@@ -0,0 +1,117 @@
+//===-------- llvm/IR/Value.def - File that describes Values ---v-*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file contains descriptions of the various LLVM values. This is
+// used as a central place for enumerating the different values.
+//
+//===----------------------------------------------------------------------===//
+
+// NOTE: NO INCLUDE GUARD DESIRED!
+
+// Provide definitions of macros so that users of this file do not have to
+// define everything to use it...
+//
+#if !(defined HANDLE_GLOBAL_VALUE || defined HANDLE_CONSTANT || \
+ defined HANDLE_INSTRUCTION || defined HANDLE_INLINE_ASM_VALUE || \
+ defined HANDLE_METADATA_VALUE || defined HANDLE_VALUE || \
+ defined HANDLE_CONSTANT_MARKER || defined HANDLE_MEMORY_VALUE)
+#error "Missing macro definition of HANDLE_VALUE*"
+#endif
+
+#ifndef HANDLE_MEMORY_VALUE
+#define HANDLE_MEMORY_VALUE(ValueName) HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_GLOBAL_VALUE
+#define HANDLE_GLOBAL_VALUE(ValueName) HANDLE_CONSTANT(ValueName)
+#endif
+
+#ifndef HANDLE_CONSTANT
+#define HANDLE_CONSTANT(ValueName) HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_INSTRUCTION
+#define HANDLE_INSTRUCTION(ValueName) HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_INLINE_ASM_VALUE
+#define HANDLE_INLINE_ASM_VALUE(ValueName) HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_METADATA_VALUE
+#define HANDLE_METADATA_VALUE(ValueName) HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_VALUE
+#define HANDLE_VALUE(ValueName)
+#endif
+
+#ifndef HANDLE_CONSTANT_MARKER
+#define HANDLE_CONSTANT_MARKER(MarkerName, ValueName)
+#endif
+
+// Having constant first makes the range check for isa<Constant> faster
+// and smaller by one operation.
+
+// Constant
+HANDLE_GLOBAL_VALUE(Function)
+HANDLE_GLOBAL_VALUE(GlobalAlias)
+HANDLE_GLOBAL_VALUE(GlobalIFunc)
+HANDLE_GLOBAL_VALUE(GlobalVariable)
+HANDLE_CONSTANT(BlockAddress)
+HANDLE_CONSTANT(ConstantExpr)
+
+// ConstantAggregate.
+HANDLE_CONSTANT(ConstantArray)
+HANDLE_CONSTANT(ConstantStruct)
+HANDLE_CONSTANT(ConstantVector)
+
+// ConstantData.
+HANDLE_CONSTANT(UndefValue)
+HANDLE_CONSTANT(ConstantAggregateZero)
+HANDLE_CONSTANT(ConstantDataArray)
+HANDLE_CONSTANT(ConstantDataVector)
+HANDLE_CONSTANT(ConstantInt)
+HANDLE_CONSTANT(ConstantFP)
+HANDLE_CONSTANT(ConstantPointerNull)
+HANDLE_CONSTANT(ConstantTokenNone)
+
+HANDLE_CONSTANT_MARKER(ConstantFirstVal, Function)
+HANDLE_CONSTANT_MARKER(ConstantLastVal, ConstantTokenNone)
+HANDLE_CONSTANT_MARKER(ConstantDataFirstVal, UndefValue)
+HANDLE_CONSTANT_MARKER(ConstantDataLastVal, ConstantTokenNone)
+HANDLE_CONSTANT_MARKER(ConstantAggregateFirstVal, ConstantArray)
+HANDLE_CONSTANT_MARKER(ConstantAggregateLastVal, ConstantVector)
+
+HANDLE_VALUE(Argument)
+HANDLE_VALUE(BasicBlock)
+
+
+HANDLE_METADATA_VALUE(MetadataAsValue)
+HANDLE_INLINE_ASM_VALUE(InlineAsm)
+
+// FIXME: It's awkward that Value.def knows about classes in Analysis. While
+// this doesn't introduce a strict link or include dependency, we should remove
+// the circular dependency eventually.
+HANDLE_MEMORY_VALUE(MemoryUse)
+HANDLE_MEMORY_VALUE(MemoryDef)
+HANDLE_MEMORY_VALUE(MemoryPhi)
+
+HANDLE_INSTRUCTION(Instruction)
+// Enum values starting at InstructionVal are used for Instructions;
+// don't add new values here!
+
+#undef HANDLE_MEMORY_VALUE
+#undef HANDLE_GLOBAL_VALUE
+#undef HANDLE_CONSTANT
+#undef HANDLE_INSTRUCTION
+#undef HANDLE_METADATA_VALUE
+#undef HANDLE_INLINE_ASM_VALUE
+#undef HANDLE_VALUE
+#undef HANDLE_CONSTANT_MARKER
diff --git a/capstone/suite/synctools/tablegen/include/llvm/TableGen/SearchableTable.td b/capstone/suite/synctools/tablegen/include/llvm/TableGen/SearchableTable.td
new file mode 100644
index 000000000..1089d363e
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/TableGen/SearchableTable.td
@@ -0,0 +1,136 @@
+//===- SearchableTable.td ----------------------------------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the key top-level classes needed to produce a reasonably
+// generic table that can be binary-searched. Three types of objects can be
+// defined using the classes in this file:
+//
+// 1. (Generic) Enums. By instantiating the GenericEnum class once, an enum with
+// the name of the def is generated. It is guarded by the preprocessor define
+// GET_name_DECL, where name is the name of the def.
+//
+// 2. (Generic) Tables and search indices. By instantiating the GenericTable
+// class once, a table with the name of the instantiating def is generated and
+// guarded by the GET_name_IMPL preprocessor guard.
+//
+// Both a primary key and additional secondary keys / search indices can also
+// be defined, which result in the generation of lookup functions. Their
+// declarations and definitions are all guarded by GET_name_DECL and
+// GET_name_IMPL, respectively, where name is the name of the underlying table.
+//
+// See AArch64SystemOperands.td and its generated header for example uses.
+//
+//===----------------------------------------------------------------------===//
+
+// Define a record derived from this class to generate a generic enum.
+//
+// The name of the record is used as the type name of the C++ enum.
+class GenericEnum {
+ // Name of a TableGen class. The enum will have one entry for each record
+ // that derives from that class.
+ string FilterClass;
+
+ // (Optional) Name of a field that is present in all collected records and
+ // contains the name of enum entries.
+ //
+ // If NameField is not set, the record names will be used instead.
+ string NameField;
+
+ // (Optional) Name of a field that is present in all collected records and
+ // contains the numerical value of enum entries.
+ //
+ // If ValueField is not set, enum values will be assigned automatically,
+ // starting at 0, according to a lexicographical sort of the entry names.
+ string ValueField;
+}
+
+// Define a record derived from this class to generate a generic table. This
+// table can have a searchable primary key, and it can also be referenced by
+// external search indices.
+//
+// The name of the record is used as the name of the global primary array of
+// entries of the table in C++.
+class GenericTable {
+ // Name of a class. The table will have one entry for each record that
+ // derives from that class.
+ string FilterClass;
+
+ // Name of the C++ struct/class type that holds table entries. The
+ // declaration of this type is not generated automatically.
+ string CppTypeName = FilterClass;
+
+ // List of the names of fields of collected records that contain the data for
+ // table entries, in the order that is used for initialization in C++.
+ //
+ // For each field of the table named XXX, TableGen will look for a value
+ // called TypeOf_XXX and use that as a more detailed description of the
+ // type of the field if present. This is required for fields whose type
+ // cannot be deduced automatically, such as enum fields. For example:
+ //
+ // def MyEnum : GenericEnum {
+ // let FilterClass = "MyEnum";
+ // ...
+ // }
+ //
+ // class MyTableEntry {
+ // MyEnum V;
+ // ...
+ // }
+ //
+ // def MyTable : GenericTable {
+ // let FilterClass = "MyTableEntry";
+ // let Fields = ["V", ...];
+ // GenericEnum TypeOf_V = MyEnum;
+ // }
+ //
+ // Fields of type bit, bits<N>, string, Intrinsic, and Instruction (or
+ // derived classes of those) are supported natively.
+ //
+ // Additionally, fields of type `code` can appear, where the value is used
+ // verbatim as an initializer. However, these fields cannot be used as
+ // search keys.
+ list<string> Fields;
+
+ // (Optional) List of fields that make up the primary key.
+ list<string> PrimaryKey;
+
+ // (Optional) Name of the primary key search function.
+ string PrimaryKeyName;
+
+ // See SearchIndex.EarlyOut
+ bit PrimaryKeyEarlyOut = 0;
+}
+
+// Define a record derived from this class to generate an additional search
+// index for a generic table that has been defined earlier.
+//
+// The name of the record will be used as the name of the C++ lookup function.
+class SearchIndex {
+ // Table that this search index refers to.
+ GenericTable Table;
+
+ // List of fields that make up the key.
+ list<string> Key;
+
+ // If true, the lookup function will check the first field of the key against
+ // the minimum and maximum values in the index before entering the binary
+ // search. This is convenient for tables that add extended data for a subset
+ // of a larger enum-based space, e.g. extended data about a subset of
+ // instructions.
+ //
+ // Can only be used when the first field is an integral (non-string) type.
+ bit EarlyOut = 0;
+}
+
+// Legacy table type with integrated enum.
+class SearchableTable {
+ list<string> SearchableFields;
+ string EnumNameField = "Name";
+ string EnumValueField;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/GenericOpcodes.td b/capstone/suite/synctools/tablegen/include/llvm/Target/GenericOpcodes.td
new file mode 100644
index 000000000..79cc1e4d9
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/GenericOpcodes.td
@@ -0,0 +1,672 @@
+//===-- GenericOpcodes.td - Opcodes used with GlobalISel ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the generic opcodes used with GlobalISel.
+// After instruction selection, these opcodes should not appear.
+//
+//===----------------------------------------------------------------------===//
+
+//------------------------------------------------------------------------------
+// Unary ops.
+//------------------------------------------------------------------------------
+
+class GenericInstruction : StandardPseudoInstruction;
+
+// Extend the underlying scalar type of an operation, leaving the high bits
+// unspecified.
+def G_ANYEXT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+// Sign extend the underlying scalar type of an operation, copying the sign bit
+// into the newly-created space.
+def G_SEXT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+// Zero extend the underlying scalar type of an operation, putting zero bits
+// into the newly-created space.
+def G_ZEXT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+
+// Truncate the underlying scalar type of an operation. This is equivalent to
+// G_EXTRACT for scalar types, but acts elementwise on vectors.
+def G_TRUNC : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_IMPLICIT_DEF : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins);
+ let hasSideEffects = 0;
+}
+
+def G_PHI : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins variable_ops);
+ let hasSideEffects = 0;
+}
+
+def G_FRAME_INDEX : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$src2);
+ let hasSideEffects = 0;
+}
+
+def G_GLOBAL_VALUE : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$src);
+ let hasSideEffects = 0;
+}
+
+def G_INTTOPTR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_PTRTOINT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_BITCAST : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_CONSTANT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$imm);
+ let hasSideEffects = 0;
+}
+
+def G_FCONSTANT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$imm);
+ let hasSideEffects = 0;
+}
+
+def G_VASTART : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins type0:$list);
+ let hasSideEffects = 0;
+ let mayStore = 1;
+}
+
+def G_VAARG : GenericInstruction {
+ let OutOperandList = (outs type0:$val);
+ let InOperandList = (ins type1:$list, unknown:$align);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+ let mayStore = 1;
+}
+
+def G_BSWAP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src);
+ let hasSideEffects = 0;
+}
+
+def G_ADDRSPACE_CAST : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_BLOCK_ADDR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$ba);
+ let hasSideEffects = 0;
+}
+
+//------------------------------------------------------------------------------
+// Binary ops.
+//------------------------------------------------------------------------------
+
+// Generic addition.
+def G_ADD : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic subtraction.
+def G_SUB : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic multiplication.
+def G_MUL : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic signed division.
+def G_SDIV : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic unsigned division.
+def G_UDIV : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic signed remainder.
+def G_SREM : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic unsigned remainder.
+def G_UREM : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic bitwise and.
+def G_AND : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic bitwise or.
+def G_OR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic bitwise xor.
+def G_XOR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic left-shift.
+def G_SHL : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic logical right-shift.
+def G_LSHR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic arithmetic right-shift.
+def G_ASHR : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic integer comparison.
+def G_ICMP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$tst, type1:$src1, type1:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic floating-point comparison.
+def G_FCMP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins unknown:$tst, type1:$src1, type1:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic select
+def G_SELECT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$tst, type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic pointer offset.
+def G_GEP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type1:$src2);
+ let hasSideEffects = 0;
+}
+
+def G_PTR_MASK : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src, unknown:$bits);
+ let hasSideEffects = 0;
+}
+
+//------------------------------------------------------------------------------
+// Overflow ops
+//------------------------------------------------------------------------------
+
+// Generic unsigned addition consuming and producing a carry flag.
+def G_UADDE : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2, type1:$carry_in);
+ let hasSideEffects = 0;
+}
+
+// Generic signed addition producing a carry flag.
+def G_SADDO : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic unsigned subtraction consuming and producing a carry flag.
+def G_USUBE : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2, type1:$carry_in);
+ let hasSideEffects = 0;
+}
+
+// Generic unsigned subtraction producing a carry flag.
+def G_SSUBO : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic unsigned multiplication producing a carry flag.
+def G_UMULO : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic signed multiplication producing a carry flag.
+def G_SMULO : GenericInstruction {
+ let OutOperandList = (outs type0:$dst, type1:$carry_out);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Multiply two numbers at twice the incoming bit width (unsigned) and return
+// the high half of the result.
+def G_UMULH : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Multiply two numbers at twice the incoming bit width (signed) and return
+// the high half of the result.
+def G_SMULH : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+//------------------------------------------------------------------------------
+// Floating Point Unary Ops.
+//------------------------------------------------------------------------------
+
+def G_FNEG : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src);
+ let hasSideEffects = 0;
+}
+
+def G_FPEXT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_FPTRUNC : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_FPTOSI : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_FPTOUI : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_SITOFP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_UITOFP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+def G_FABS : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src);
+ let hasSideEffects = 0;
+}
+
+//------------------------------------------------------------------------------
+// Floating Point Binary ops.
+//------------------------------------------------------------------------------
+
+// Generic FP addition.
+def G_FADD : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic FP subtraction.
+def G_FSUB : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic FP multiplication.
+def G_FMUL : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+ let isCommutable = 1;
+}
+
+// Generic fused multiply-add instruction.
+// Behaves like llvm fma intrinsic ie src1 * src2 + src3
+def G_FMA : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2, type0:$src3);
+ let hasSideEffects = 0;
+ let isCommutable = 0;
+}
+
+// Generic FP division.
+def G_FDIV : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Generic FP remainder.
+def G_FREM : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Floating point exponentiation.
+def G_FPOW : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1, type0:$src2);
+ let hasSideEffects = 0;
+}
+
+// Floating point base-e exponential of a value.
+def G_FEXP : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1);
+ let hasSideEffects = 0;
+}
+
+// Floating point base-2 exponential of a value.
+def G_FEXP2 : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1);
+ let hasSideEffects = 0;
+}
+
+// Floating point base-2 logarithm of a value.
+def G_FLOG : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1);
+ let hasSideEffects = 0;
+}
+
+// Floating point base-2 logarithm of a value.
+def G_FLOG2 : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src1);
+ let hasSideEffects = 0;
+}
+
+//------------------------------------------------------------------------------
+// Memory ops
+//------------------------------------------------------------------------------
+
+// Generic load. Expects a MachineMemOperand in addition to explicit operands.
+def G_LOAD : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins ptype1:$addr);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+}
+
+// Generic sign-extended load. Expects a MachineMemOperand in addition to explicit operands.
+def G_SEXTLOAD : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins ptype1:$addr);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+}
+
+// Generic zero-extended load. Expects a MachineMemOperand in addition to explicit operands.
+def G_ZEXTLOAD : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins ptype1:$addr);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+}
+
+// Generic store. Expects a MachineMemOperand in addition to explicit operands.
+def G_STORE : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins type0:$src, ptype1:$addr);
+ let hasSideEffects = 0;
+ let mayStore = 1;
+}
+
+// Generic atomic cmpxchg with internal success check. Expects a
+// MachineMemOperand in addition to explicit operands.
+def G_ATOMIC_CMPXCHG_WITH_SUCCESS : GenericInstruction {
+ let OutOperandList = (outs type0:$oldval, type1:$success);
+ let InOperandList = (ins type2:$addr, type0:$cmpval, type0:$newval);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+ let mayStore = 1;
+}
+
+// Generic atomic cmpxchg. Expects a MachineMemOperand in addition to explicit
+// operands.
+def G_ATOMIC_CMPXCHG : GenericInstruction {
+ let OutOperandList = (outs type0:$oldval);
+ let InOperandList = (ins ptype1:$addr, type0:$cmpval, type0:$newval);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+ let mayStore = 1;
+}
+
+// Generic atomicrmw. Expects a MachineMemOperand in addition to explicit
+// operands.
+class G_ATOMICRMW_OP : GenericInstruction {
+ let OutOperandList = (outs type0:$oldval);
+ let InOperandList = (ins ptype1:$addr, type0:$val);
+ let hasSideEffects = 0;
+ let mayLoad = 1;
+ let mayStore = 1;
+}
+
+def G_ATOMICRMW_XCHG : G_ATOMICRMW_OP;
+def G_ATOMICRMW_ADD : G_ATOMICRMW_OP;
+def G_ATOMICRMW_SUB : G_ATOMICRMW_OP;
+def G_ATOMICRMW_AND : G_ATOMICRMW_OP;
+def G_ATOMICRMW_NAND : G_ATOMICRMW_OP;
+def G_ATOMICRMW_OR : G_ATOMICRMW_OP;
+def G_ATOMICRMW_XOR : G_ATOMICRMW_OP;
+def G_ATOMICRMW_MAX : G_ATOMICRMW_OP;
+def G_ATOMICRMW_MIN : G_ATOMICRMW_OP;
+def G_ATOMICRMW_UMAX : G_ATOMICRMW_OP;
+def G_ATOMICRMW_UMIN : G_ATOMICRMW_OP;
+
+//------------------------------------------------------------------------------
+// Variadic ops
+//------------------------------------------------------------------------------
+
+// Extract a register of the specified size, starting from the block given by
+// index. This will almost certainly be mapped to sub-register COPYs after
+// register banks have been selected.
+def G_EXTRACT : GenericInstruction {
+ let OutOperandList = (outs type0:$res);
+ let InOperandList = (ins type1:$src, unknown:$offset);
+ let hasSideEffects = 0;
+}
+
+// Extract multiple registers specified size, starting from blocks given by
+// indexes. This will almost certainly be mapped to sub-register COPYs after
+// register banks have been selected.
+def G_UNMERGE_VALUES : GenericInstruction {
+ let OutOperandList = (outs type0:$dst0, variable_ops);
+ let InOperandList = (ins type1:$src);
+ let hasSideEffects = 0;
+}
+
+// Insert a smaller register into a larger one at the specified bit-index.
+def G_INSERT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src, type1:$op, unknown:$offset);
+ let hasSideEffects = 0;
+}
+
+/// Concatenate multiple registers of the same size into a wider register.
+def G_MERGE_VALUES : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src0, variable_ops);
+ let hasSideEffects = 0;
+}
+
+// Intrinsic without side effects.
+def G_INTRINSIC : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins unknown:$intrin, variable_ops);
+ let hasSideEffects = 0;
+}
+
+// Intrinsic with side effects.
+def G_INTRINSIC_W_SIDE_EFFECTS : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins unknown:$intrin, variable_ops);
+ let hasSideEffects = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+}
+
+//------------------------------------------------------------------------------
+// Branches.
+//------------------------------------------------------------------------------
+
+// Generic unconditional branch.
+def G_BR : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins unknown:$src1);
+ let hasSideEffects = 0;
+ let isBranch = 1;
+ let isTerminator = 1;
+ let isBarrier = 1;
+}
+
+// Generic conditional branch.
+def G_BRCOND : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins type0:$tst, unknown:$truebb);
+ let hasSideEffects = 0;
+ let isBranch = 1;
+ let isTerminator = 1;
+}
+
+// Generic indirect branch.
+def G_BRINDIRECT : GenericInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins type0:$src1);
+ let hasSideEffects = 0;
+ let isBranch = 1;
+ let isTerminator = 1;
+}
+
+//------------------------------------------------------------------------------
+// Vector ops
+//------------------------------------------------------------------------------
+
+// Generic insertelement.
+def G_INSERT_VECTOR_ELT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type0:$src, type1:$elt, type2:$idx);
+ let hasSideEffects = 0;
+}
+
+// Generic extractelement.
+def G_EXTRACT_VECTOR_ELT : GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$src, type2:$idx);
+ let hasSideEffects = 0;
+}
+
+// Generic shufflevector.
+def G_SHUFFLE_VECTOR: GenericInstruction {
+ let OutOperandList = (outs type0:$dst);
+ let InOperandList = (ins type1:$v1, type1:$v2, type2:$mask);
+ let hasSideEffects = 0;
+}
+
+// TODO: Add the other generic opcodes.
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/RegisterBank.td b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/RegisterBank.td
new file mode 100644
index 000000000..4dfd139e9
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/RegisterBank.td
@@ -0,0 +1,16 @@
+//===- RegisterBank.td - Register bank definitions ---------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+//
+//===----------------------------------------------------------------------===//
+
+class RegisterBank<string name, list<RegisterClass> classes> {
+ string Name = name;
+ list<RegisterClass> RegisterClasses = classes;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/SelectionDAGCompat.td b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/SelectionDAGCompat.td
new file mode 100644
index 000000000..d487759a4
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/SelectionDAGCompat.td
@@ -0,0 +1,131 @@
+//===- TargetGlobalISel.td - Common code for GlobalISel ----*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent interfaces used to support
+// SelectionDAG instruction selection patterns (specified in
+// TargetSelectionDAG.td) when generating GlobalISel instruction selectors.
+//
+// This is intended as a compatibility layer, to enable reuse of target
+// descriptions written for SelectionDAG without requiring explicit GlobalISel
+// support. It will eventually supersede SelectionDAG patterns.
+//
+//===----------------------------------------------------------------------===//
+
+// Declare that a generic Instruction is 'equivalent' to an SDNode, that is,
+// SelectionDAG patterns involving the SDNode can be transformed to match the
+// Instruction instead.
+class GINodeEquiv<Instruction i, SDNode node> {
+ Instruction I = i;
+ SDNode Node = node;
+
+ // SelectionDAG has separate nodes for atomic and non-atomic memory operations
+ // (ISD::LOAD, ISD::ATOMIC_LOAD, ISD::STORE, ISD::ATOMIC_STORE) but GlobalISel
+ // stores this information in the MachineMemoryOperand.
+ bit CheckMMOIsNonAtomic = 0;
+
+ // SelectionDAG has one node for all loads and uses predicates to
+ // differentiate them. GlobalISel on the other hand uses separate opcodes.
+ // When this is true, the resulting opcode is G_LOAD/G_SEXTLOAD/G_ZEXTLOAD
+ // depending on the predicates on the node.
+ Instruction IfSignExtend = ?;
+ Instruction IfZeroExtend = ?;
+}
+
+// These are defined in the same order as the G_* instructions.
+def : GINodeEquiv<G_ANYEXT, anyext>;
+def : GINodeEquiv<G_SEXT, sext>;
+def : GINodeEquiv<G_ZEXT, zext>;
+def : GINodeEquiv<G_TRUNC, trunc>;
+def : GINodeEquiv<G_BITCAST, bitconvert>;
+// G_INTTOPTR - SelectionDAG has no equivalent.
+// G_PTRTOINT - SelectionDAG has no equivalent.
+def : GINodeEquiv<G_CONSTANT, imm>;
+def : GINodeEquiv<G_FCONSTANT, fpimm>;
+def : GINodeEquiv<G_ADD, add>;
+def : GINodeEquiv<G_SUB, sub>;
+def : GINodeEquiv<G_MUL, mul>;
+def : GINodeEquiv<G_SDIV, sdiv>;
+def : GINodeEquiv<G_UDIV, udiv>;
+def : GINodeEquiv<G_SREM, srem>;
+def : GINodeEquiv<G_UREM, urem>;
+def : GINodeEquiv<G_AND, and>;
+def : GINodeEquiv<G_OR, or>;
+def : GINodeEquiv<G_XOR, xor>;
+def : GINodeEquiv<G_SHL, shl>;
+def : GINodeEquiv<G_LSHR, srl>;
+def : GINodeEquiv<G_ASHR, sra>;
+def : GINodeEquiv<G_SELECT, select>;
+def : GINodeEquiv<G_FNEG, fneg>;
+def : GINodeEquiv<G_FPEXT, fpextend>;
+def : GINodeEquiv<G_FPTRUNC, fpround>;
+def : GINodeEquiv<G_FPTOSI, fp_to_sint>;
+def : GINodeEquiv<G_FPTOUI, fp_to_uint>;
+def : GINodeEquiv<G_SITOFP, sint_to_fp>;
+def : GINodeEquiv<G_UITOFP, uint_to_fp>;
+def : GINodeEquiv<G_FADD, fadd>;
+def : GINodeEquiv<G_FSUB, fsub>;
+def : GINodeEquiv<G_FMA, fma>;
+def : GINodeEquiv<G_FMUL, fmul>;
+def : GINodeEquiv<G_FDIV, fdiv>;
+def : GINodeEquiv<G_FREM, frem>;
+def : GINodeEquiv<G_FPOW, fpow>;
+def : GINodeEquiv<G_FEXP2, fexp2>;
+def : GINodeEquiv<G_FLOG2, flog2>;
+def : GINodeEquiv<G_INTRINSIC, intrinsic_wo_chain>;
+// ISD::INTRINSIC_VOID can also be handled with G_INTRINSIC_W_SIDE_EFFECTS.
+def : GINodeEquiv<G_INTRINSIC_W_SIDE_EFFECTS, intrinsic_void>;
+def : GINodeEquiv<G_INTRINSIC_W_SIDE_EFFECTS, intrinsic_w_chain>;
+def : GINodeEquiv<G_BR, br>;
+def : GINodeEquiv<G_BSWAP, bswap>;
+
+// Broadly speaking G_LOAD is equivalent to ISD::LOAD but there are some
+// complications that tablegen must take care of. For example, Predicates such
+// as isSignExtLoad require that this is not a perfect 1:1 mapping since a
+// sign-extending load is (G_SEXTLOAD x) in GlobalISel. Additionally,
+// G_LOAD handles both atomic and non-atomic loads where as SelectionDAG had
+// separate nodes for them. This GINodeEquiv maps the non-atomic loads to
+// G_LOAD with a non-atomic MachineMemOperand.
+def : GINodeEquiv<G_LOAD, ld> {
+ let CheckMMOIsNonAtomic = 1;
+ let IfSignExtend = G_SEXTLOAD;
+ let IfZeroExtend = G_ZEXTLOAD;
+}
+// Broadly speaking G_STORE is equivalent to ISD::STORE but there are some
+// complications that tablegen must take care of. For example, predicates such
+// as isTruncStore require that this is not a perfect 1:1 mapping since a
+// truncating store is (G_STORE (G_TRUNCATE x)) in GlobalISel. Additionally,
+// G_STORE handles both atomic and non-atomic stores where as SelectionDAG had
+// separate nodes for them. This GINodeEquiv maps the non-atomic stores to
+// G_STORE with a non-atomic MachineMemOperand.
+def : GINodeEquiv<G_STORE, st> { let CheckMMOIsNonAtomic = 1; }
+
+def : GINodeEquiv<G_ATOMIC_CMPXCHG, atomic_cmp_swap>;
+def : GINodeEquiv<G_ATOMICRMW_XCHG, atomic_swap>;
+def : GINodeEquiv<G_ATOMICRMW_ADD, atomic_load_add>;
+def : GINodeEquiv<G_ATOMICRMW_SUB, atomic_load_sub>;
+def : GINodeEquiv<G_ATOMICRMW_AND, atomic_load_and>;
+def : GINodeEquiv<G_ATOMICRMW_NAND, atomic_load_nand>;
+def : GINodeEquiv<G_ATOMICRMW_OR, atomic_load_or>;
+def : GINodeEquiv<G_ATOMICRMW_XOR, atomic_load_xor>;
+def : GINodeEquiv<G_ATOMICRMW_MIN, atomic_load_min>;
+def : GINodeEquiv<G_ATOMICRMW_MAX, atomic_load_max>;
+def : GINodeEquiv<G_ATOMICRMW_UMIN, atomic_load_umin>;
+def : GINodeEquiv<G_ATOMICRMW_UMAX, atomic_load_umax>;
+
+// Specifies the GlobalISel equivalents for SelectionDAG's ComplexPattern.
+// Should be used on defs that subclass GIComplexOperandMatcher<>.
+class GIComplexPatternEquiv<ComplexPattern seldag> {
+ ComplexPattern SelDAGEquivalent = seldag;
+}
+
+// Specifies the GlobalISel equivalents for SelectionDAG's SDNodeXForm.
+// Should be used on defs that subclass GICustomOperandRenderer<>.
+class GISDNodeXFormEquiv<SDNodeXForm seldag> {
+ SDNodeXForm SelDAGEquivalent = seldag;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/Target.td b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/Target.td
new file mode 100644
index 000000000..6740f404a
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/GlobalISel/Target.td
@@ -0,0 +1,61 @@
+//===- Target.td - Define GlobalISel rules -----------------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent interfaces used to support
+// SelectionDAG instruction selection patterns (specified in
+// TargetSelectionDAG.td) when generating GlobalISel instruction selectors.
+//
+// This is intended as a compatibility layer, to enable reuse of target
+// descriptions written for SelectionDAG without requiring explicit GlobalISel
+// support. It will eventually supersede SelectionDAG patterns.
+//
+//===----------------------------------------------------------------------===//
+
+// Definitions that inherit from LLT define types that will be used in the
+// GlobalISel matcher.
+class LLT;
+
+def s32 : LLT;
+def s64 : LLT;
+
+// Defines a matcher for complex operands. This is analogous to ComplexPattern
+// from SelectionDAG.
+//
+// Definitions that inherit from this may also inherit from
+// GIComplexPatternEquiv to enable the import of SelectionDAG patterns involving
+// those ComplexPatterns.
+class GIComplexOperandMatcher<LLT type, string matcherfn> {
+ // The expected type of the root of the match.
+ //
+ // TODO: We should probably support, any-type, any-scalar, and multiple types
+ // in the future.
+ LLT Type = type;
+
+ // The function that determines whether the operand matches. It should be of
+ // the form:
+ // bool select(const MatchOperand &Root, MatchOperand &Result1)
+ // and should have the same number of ResultX arguments as the number of
+ // result operands. It must return true on successful match and false
+ // otherwise. If it returns true, then all the ResultX arguments must be
+ // overwritten.
+ string MatcherFn = matcherfn;
+}
+
+// Defines a custom renderer. This is analogous to SDNodeXForm from
+// SelectionDAG. Unlike SDNodeXForm, this matches a MachineInstr and
+// renders directly to the result instruction without an intermediate node.
+//
+// Definitions that inherit from this may also inherit from GISDNodeXFormEquiv
+// to enable the import of SelectionDAG patterns involving those SDNodeXForms.
+class GICustomOperandRenderer<string rendererfn> {
+ // The function renders the operand(s) of the matched instruction to
+ // the specified instruction. It should be of the form:
+ // void render(MachineInstrBuilder &MIB, const MachineInstr &MI)
+ string RendererFn = rendererfn;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/Target.td b/capstone/suite/synctools/tablegen/include/llvm/Target/Target.td
new file mode 100644
index 000000000..b746505d2
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/Target.td
@@ -0,0 +1,1556 @@
+//===- Target.td - Target Independent TableGen interface ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent interfaces which should be
+// implemented by each target which is using a TableGen based code generator.
+//
+//===----------------------------------------------------------------------===//
+
+// Include all information about LLVM intrinsics.
+include "llvm/IR/Intrinsics.td"
+
+//===----------------------------------------------------------------------===//
+// Register file description - These classes are used to fill in the target
+// description classes.
+
+class RegisterClass; // Forward def
+
+class HwMode<string FS> {
+ // A string representing subtarget features that turn on this HW mode.
+ // For example, "+feat1,-feat2" will indicate that the mode is active
+ // when "feat1" is enabled and "feat2" is disabled at the same time.
+ // Any other features are not checked.
+ // When multiple modes are used, they should be mutually exclusive,
+ // otherwise the results are unpredictable.
+ string Features = FS;
+}
+
+// A special mode recognized by tablegen. This mode is considered active
+// when no other mode is active. For targets that do not use specific hw
+// modes, this is the only mode.
+def DefaultMode : HwMode<"">;
+
+// A class used to associate objects with HW modes. It is only intended to
+// be used as a base class, where the derived class should contain a member
+// "Objects", which is a list of the same length as the list of modes.
+// The n-th element on the Objects list will be associated with the n-th
+// element on the Modes list.
+class HwModeSelect<list<HwMode> Ms> {
+ list<HwMode> Modes = Ms;
+}
+
+// A common class that implements a counterpart of ValueType, which is
+// dependent on a HW mode. This class inherits from ValueType itself,
+// which makes it possible to use objects of this class where ValueType
+// objects could be used. This is specifically applicable to selection
+// patterns.
+class ValueTypeByHwMode<list<HwMode> Ms, list<ValueType> Ts>
+ : HwModeSelect<Ms>, ValueType<0, 0> {
+ // The length of this list must be the same as the length of Ms.
+ list<ValueType> Objects = Ts;
+}
+
+// A class representing the register size, spill size and spill alignment
+// in bits of a register.
+class RegInfo<int RS, int SS, int SA> {
+ int RegSize = RS; // Register size in bits.
+ int SpillSize = SS; // Spill slot size in bits.
+ int SpillAlignment = SA; // Spill slot alignment in bits.
+}
+
+// The register size/alignment information, parameterized by a HW mode.
+class RegInfoByHwMode<list<HwMode> Ms = [], list<RegInfo> Ts = []>
+ : HwModeSelect<Ms> {
+ // The length of this list must be the same as the length of Ms.
+ list<RegInfo> Objects = Ts;
+}
+
+// SubRegIndex - Use instances of SubRegIndex to identify subregisters.
+class SubRegIndex<int size, int offset = 0> {
+ string Namespace = "";
+
+ // Size - Size (in bits) of the sub-registers represented by this index.
+ int Size = size;
+
+ // Offset - Offset of the first bit that is part of this sub-register index.
+ // Set it to -1 if the same index is used to represent sub-registers that can
+ // be at different offsets (for example when using an index to access an
+ // element in a register tuple).
+ int Offset = offset;
+
+ // ComposedOf - A list of two SubRegIndex instances, [A, B].
+ // This indicates that this SubRegIndex is the result of composing A and B.
+ // See ComposedSubRegIndex.
+ list<SubRegIndex> ComposedOf = [];
+
+ // CoveringSubRegIndices - A list of two or more sub-register indexes that
+ // cover this sub-register.
+ //
+ // This field should normally be left blank as TableGen can infer it.
+ //
+ // TableGen automatically detects sub-registers that straddle the registers
+ // in the SubRegs field of a Register definition. For example:
+ //
+ // Q0 = dsub_0 -> D0, dsub_1 -> D1
+ // Q1 = dsub_0 -> D2, dsub_1 -> D3
+ // D1_D2 = dsub_0 -> D1, dsub_1 -> D2
+ // QQ0 = qsub_0 -> Q0, qsub_1 -> Q1
+ //
+ // TableGen will infer that D1_D2 is a sub-register of QQ0. It will be given
+ // the synthetic index dsub_1_dsub_2 unless some SubRegIndex is defined with
+ // CoveringSubRegIndices = [dsub_1, dsub_2].
+ list<SubRegIndex> CoveringSubRegIndices = [];
+}
+
+// ComposedSubRegIndex - A sub-register that is the result of composing A and B.
+// Offset is set to the sum of A and B's Offsets. Size is set to B's Size.
+class ComposedSubRegIndex<SubRegIndex A, SubRegIndex B>
+ : SubRegIndex<B.Size, !if(!eq(A.Offset, -1), -1,
+ !if(!eq(B.Offset, -1), -1,
+ !add(A.Offset, B.Offset)))> {
+ // See SubRegIndex.
+ let ComposedOf = [A, B];
+}
+
+// RegAltNameIndex - The alternate name set to use for register operands of
+// this register class when printing.
+class RegAltNameIndex {
+ string Namespace = "";
+}
+def NoRegAltName : RegAltNameIndex;
+
+// Register - You should define one instance of this class for each register
+// in the target machine. String n will become the "name" of the register.
+class Register<string n, list<string> altNames = []> {
+ string Namespace = "";
+ string AsmName = n;
+ list<string> AltNames = altNames;
+
+ // Aliases - A list of registers that this register overlaps with. A read or
+ // modification of this register can potentially read or modify the aliased
+ // registers.
+ list<Register> Aliases = [];
+
+ // SubRegs - A list of registers that are parts of this register. Note these
+ // are "immediate" sub-registers and the registers within the list do not
+ // themselves overlap. e.g. For X86, EAX's SubRegs list contains only [AX],
+ // not [AX, AH, AL].
+ list<Register> SubRegs = [];
+
+ // SubRegIndices - For each register in SubRegs, specify the SubRegIndex used
+ // to address it. Sub-sub-register indices are automatically inherited from
+ // SubRegs.
+ list<SubRegIndex> SubRegIndices = [];
+
+ // RegAltNameIndices - The alternate name indices which are valid for this
+ // register.
+ list<RegAltNameIndex> RegAltNameIndices = [];
+
+ // DwarfNumbers - Numbers used internally by gcc/gdb to identify the register.
+ // These values can be determined by locating the <target>.h file in the
+ // directory llvmgcc/gcc/config/<target>/ and looking for REGISTER_NAMES. The
+ // order of these names correspond to the enumeration used by gcc. A value of
+ // -1 indicates that the gcc number is undefined and -2 that register number
+ // is invalid for this mode/flavour.
+ list<int> DwarfNumbers = [];
+
+ // CostPerUse - Additional cost of instructions using this register compared
+ // to other registers in its class. The register allocator will try to
+ // minimize the number of instructions using a register with a CostPerUse.
+ // This is used by the x86-64 and ARM Thumb targets where some registers
+ // require larger instruction encodings.
+ int CostPerUse = 0;
+
+ // CoveredBySubRegs - When this bit is set, the value of this register is
+ // completely determined by the value of its sub-registers. For example, the
+ // x86 register AX is covered by its sub-registers AL and AH, but EAX is not
+ // covered by its sub-register AX.
+ bit CoveredBySubRegs = 0;
+
+ // HWEncoding - The target specific hardware encoding for this register.
+ bits<16> HWEncoding = 0;
+
+ bit isArtificial = 0;
+}
+
+// RegisterWithSubRegs - This can be used to define instances of Register which
+// need to specify sub-registers.
+// List "subregs" specifies which registers are sub-registers to this one. This
+// is used to populate the SubRegs and AliasSet fields of TargetRegisterDesc.
+// This allows the code generator to be careful not to put two values with
+// overlapping live ranges into registers which alias.
+class RegisterWithSubRegs<string n, list<Register> subregs> : Register<n> {
+ let SubRegs = subregs;
+}
+
+// DAGOperand - An empty base class that unifies RegisterClass's and other forms
+// of Operand's that are legal as type qualifiers in DAG patterns. This should
+// only ever be used for defining multiclasses that are polymorphic over both
+// RegisterClass's and other Operand's.
+class DAGOperand {
+ string OperandNamespace = "MCOI";
+ string DecoderMethod = "";
+}
+
+// RegisterClass - Now that all of the registers are defined, and aliases
+// between registers are defined, specify which registers belong to which
+// register classes. This also defines the default allocation order of
+// registers by register allocators.
+//
+class RegisterClass<string namespace, list<ValueType> regTypes, int alignment,
+ dag regList, RegAltNameIndex idx = NoRegAltName>
+ : DAGOperand {
+ string Namespace = namespace;
+
+ // The register size/alignment information, parameterized by a HW mode.
+ RegInfoByHwMode RegInfos;
+
+ // RegType - Specify the list ValueType of the registers in this register
+ // class. Note that all registers in a register class must have the same
+ // ValueTypes. This is a list because some targets permit storing different
+ // types in same register, for example vector values with 128-bit total size,
+ // but different count/size of items, like SSE on x86.
+ //
+ list<ValueType> RegTypes = regTypes;
+
+ // Size - Specify the spill size in bits of the registers. A default value of
+ // zero lets tablgen pick an appropriate size.
+ int Size = 0;
+
+ // Alignment - Specify the alignment required of the registers when they are
+ // stored or loaded to memory.
+ //
+ int Alignment = alignment;
+
+ // CopyCost - This value is used to specify the cost of copying a value
+ // between two registers in this register class. The default value is one
+ // meaning it takes a single instruction to perform the copying. A negative
+ // value means copying is extremely expensive or impossible.
+ int CopyCost = 1;
+
+ // MemberList - Specify which registers are in this class. If the
+ // allocation_order_* method are not specified, this also defines the order of
+ // allocation used by the register allocator.
+ //
+ dag MemberList = regList;
+
+ // AltNameIndex - The alternate register name to use when printing operands
+ // of this register class. Every register in the register class must have
+ // a valid alternate name for the given index.
+ RegAltNameIndex altNameIndex = idx;
+
+ // isAllocatable - Specify that the register class can be used for virtual
+ // registers and register allocation. Some register classes are only used to
+ // model instruction operand constraints, and should have isAllocatable = 0.
+ bit isAllocatable = 1;
+
+ // AltOrders - List of alternative allocation orders. The default order is
+ // MemberList itself, and that is good enough for most targets since the
+ // register allocators automatically remove reserved registers and move
+ // callee-saved registers to the end.
+ list<dag> AltOrders = [];
+
+ // AltOrderSelect - The body of a function that selects the allocation order
+ // to use in a given machine function. The code will be inserted in a
+ // function like this:
+ //
+ // static inline unsigned f(const MachineFunction &MF) { ... }
+ //
+ // The function should return 0 to select the default order defined by
+ // MemberList, 1 to select the first AltOrders entry and so on.
+ code AltOrderSelect = [{}];
+
+ // Specify allocation priority for register allocators using a greedy
+ // heuristic. Classes with higher priority values are assigned first. This is
+ // useful as it is sometimes beneficial to assign registers to highly
+ // constrained classes first. The value has to be in the range [0,63].
+ int AllocationPriority = 0;
+
+ // The diagnostic type to present when referencing this operand in a match
+ // failure error message. If this is empty, the default Match_InvalidOperand
+ // diagnostic type will be used. If this is "<name>", a Match_<name> enum
+ // value will be generated and used for this operand type. The target
+ // assembly parser is responsible for converting this into a user-facing
+ // diagnostic message.
+ string DiagnosticType = "";
+
+ // A diagnostic message to emit when an invalid value is provided for this
+ // register class when it is being used an an assembly operand. If this is
+ // non-empty, an anonymous diagnostic type enum value will be generated, and
+ // the assembly matcher will provide a function to map from diagnostic types
+ // to message strings.
+ string DiagnosticString = "";
+}
+
+// The memberList in a RegisterClass is a dag of set operations. TableGen
+// evaluates these set operations and expand them into register lists. These
+// are the most common operation, see test/TableGen/SetTheory.td for more
+// examples of what is possible:
+//
+// (add R0, R1, R2) - Set Union. Each argument can be an individual register, a
+// register class, or a sub-expression. This is also the way to simply list
+// registers.
+//
+// (sub GPR, SP) - Set difference. Subtract the last arguments from the first.
+//
+// (and GPR, CSR) - Set intersection. All registers from the first set that are
+// also in the second set.
+//
+// (sequence "R%u", 0, 15) -> [R0, R1, ..., R15]. Generate a sequence of
+// numbered registers. Takes an optional 4th operand which is a stride to use
+// when generating the sequence.
+//
+// (shl GPR, 4) - Remove the first N elements.
+//
+// (trunc GPR, 4) - Truncate after the first N elements.
+//
+// (rotl GPR, 1) - Rotate N places to the left.
+//
+// (rotr GPR, 1) - Rotate N places to the right.
+//
+// (decimate GPR, 2) - Pick every N'th element, starting with the first.
+//
+// (interleave A, B, ...) - Interleave the elements from each argument list.
+//
+// All of these operators work on ordered sets, not lists. That means
+// duplicates are removed from sub-expressions.
+
+// Set operators. The rest is defined in TargetSelectionDAG.td.
+def sequence;
+def decimate;
+def interleave;
+
+// RegisterTuples - Automatically generate super-registers by forming tuples of
+// sub-registers. This is useful for modeling register sequence constraints
+// with pseudo-registers that are larger than the architectural registers.
+//
+// The sub-register lists are zipped together:
+//
+// def EvenOdd : RegisterTuples<[sube, subo], [(add R0, R2), (add R1, R3)]>;
+//
+// Generates the same registers as:
+//
+// let SubRegIndices = [sube, subo] in {
+// def R0_R1 : RegisterWithSubRegs<"", [R0, R1]>;
+// def R2_R3 : RegisterWithSubRegs<"", [R2, R3]>;
+// }
+//
+// The generated pseudo-registers inherit super-classes and fields from their
+// first sub-register. Most fields from the Register class are inferred, and
+// the AsmName and Dwarf numbers are cleared.
+//
+// RegisterTuples instances can be used in other set operations to form
+// register classes and so on. This is the only way of using the generated
+// registers.
+class RegisterTuples<list<SubRegIndex> Indices, list<dag> Regs> {
+ // SubRegs - N lists of registers to be zipped up. Super-registers are
+ // synthesized from the first element of each SubRegs list, the second
+ // element and so on.
+ list<dag> SubRegs = Regs;
+
+ // SubRegIndices - N SubRegIndex instances. This provides the names of the
+ // sub-registers in the synthesized super-registers.
+ list<SubRegIndex> SubRegIndices = Indices;
+}
+
+
+//===----------------------------------------------------------------------===//
+// DwarfRegNum - This class provides a mapping of the llvm register enumeration
+// to the register numbering used by gcc and gdb. These values are used by a
+// debug information writer to describe where values may be located during
+// execution.
+class DwarfRegNum<list<int> Numbers> {
+ // DwarfNumbers - Numbers used internally by gcc/gdb to identify the register.
+ // These values can be determined by locating the <target>.h file in the
+ // directory llvmgcc/gcc/config/<target>/ and looking for REGISTER_NAMES. The
+ // order of these names correspond to the enumeration used by gcc. A value of
+ // -1 indicates that the gcc number is undefined and -2 that register number
+ // is invalid for this mode/flavour.
+ list<int> DwarfNumbers = Numbers;
+}
+
+// DwarfRegAlias - This class declares that a given register uses the same dwarf
+// numbers as another one. This is useful for making it clear that the two
+// registers do have the same number. It also lets us build a mapping
+// from dwarf register number to llvm register.
+class DwarfRegAlias<Register reg> {
+ Register DwarfAlias = reg;
+}
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for MCPredicate (portable scheduling predicates).
+//
+include "llvm/Target/TargetInstrPredicate.td"
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for scheduling
+//
+include "llvm/Target/TargetSchedule.td"
+
+class Predicate; // Forward def
+
+//===----------------------------------------------------------------------===//
+// Instruction set description - These classes correspond to the C++ classes in
+// the Target/TargetInstrInfo.h file.
+//
+class Instruction {
+ string Namespace = "";
+
+ dag OutOperandList; // An dag containing the MI def operand list.
+ dag InOperandList; // An dag containing the MI use operand list.
+ string AsmString = ""; // The .s format to print the instruction with.
+
+ // Pattern - Set to the DAG pattern for this instruction, if we know of one,
+ // otherwise, uninitialized.
+ list<dag> Pattern;
+
+ // The follow state will eventually be inferred automatically from the
+ // instruction pattern.
+
+ list<Register> Uses = []; // Default to using no non-operand registers
+ list<Register> Defs = []; // Default to modifying no non-operand registers
+
+ // Predicates - List of predicates which will be turned into isel matching
+ // code.
+ list<Predicate> Predicates = [];
+
+ // Size - Size of encoded instruction, or zero if the size cannot be determined
+ // from the opcode.
+ int Size = 0;
+
+ // DecoderNamespace - The "namespace" in which this instruction exists, on
+ // targets like ARM which multiple ISA namespaces exist.
+ string DecoderNamespace = "";
+
+ // Code size, for instruction selection.
+ // FIXME: What does this actually mean?
+ int CodeSize = 0;
+
+ // Added complexity passed onto matching pattern.
+ int AddedComplexity = 0;
+
+ // These bits capture information about the high-level semantics of the
+ // instruction.
+ bit isReturn = 0; // Is this instruction a return instruction?
+ bit isBranch = 0; // Is this instruction a branch instruction?
+ bit isIndirectBranch = 0; // Is this instruction an indirect branch?
+ bit isCompare = 0; // Is this instruction a comparison instruction?
+ bit isMoveImm = 0; // Is this instruction a move immediate instruction?
+ bit isMoveReg = 0; // Is this instruction a move register instruction?
+ bit isBitcast = 0; // Is this instruction a bitcast instruction?
+ bit isSelect = 0; // Is this instruction a select instruction?
+ bit isBarrier = 0; // Can control flow fall through this instruction?
+ bit isCall = 0; // Is this instruction a call instruction?
+ bit isAdd = 0; // Is this instruction an add instruction?
+ bit isTrap = 0; // Is this instruction a trap instruction?
+ bit canFoldAsLoad = 0; // Can this be folded as a simple memory operand?
+ bit mayLoad = ?; // Is it possible for this inst to read memory?
+ bit mayStore = ?; // Is it possible for this inst to write memory?
+ bit isConvertibleToThreeAddress = 0; // Can this 2-addr instruction promote?
+ bit isCommutable = 0; // Is this 3 operand instruction commutable?
+ bit isTerminator = 0; // Is this part of the terminator for a basic block?
+ bit isReMaterializable = 0; // Is this instruction re-materializable?
+ bit isPredicable = 0; // Is this instruction predicable?
+ bit hasDelaySlot = 0; // Does this instruction have an delay slot?
+ bit usesCustomInserter = 0; // Pseudo instr needing special help.
+ bit hasPostISelHook = 0; // To be *adjusted* after isel by target hook.
+ bit hasCtrlDep = 0; // Does this instruction r/w ctrl-flow chains?
+ bit isNotDuplicable = 0; // Is it unsafe to duplicate this instruction?
+ bit isConvergent = 0; // Is this instruction convergent?
+ bit isAsCheapAsAMove = 0; // As cheap (or cheaper) than a move instruction.
+ bit hasExtraSrcRegAllocReq = 0; // Sources have special regalloc requirement?
+ bit hasExtraDefRegAllocReq = 0; // Defs have special regalloc requirement?
+ bit isRegSequence = 0; // Is this instruction a kind of reg sequence?
+ // If so, make sure to override
+ // TargetInstrInfo::getRegSequenceLikeInputs.
+ bit isPseudo = 0; // Is this instruction a pseudo-instruction?
+ // If so, won't have encoding information for
+ // the [MC]CodeEmitter stuff.
+ bit isExtractSubreg = 0; // Is this instruction a kind of extract subreg?
+ // If so, make sure to override
+ // TargetInstrInfo::getExtractSubregLikeInputs.
+ bit isInsertSubreg = 0; // Is this instruction a kind of insert subreg?
+ // If so, make sure to override
+ // TargetInstrInfo::getInsertSubregLikeInputs.
+
+ // Does the instruction have side effects that are not captured by any
+ // operands of the instruction or other flags?
+ bit hasSideEffects = ?;
+
+ // Is this instruction a "real" instruction (with a distinct machine
+ // encoding), or is it a pseudo instruction used for codegen modeling
+ // purposes.
+ // FIXME: For now this is distinct from isPseudo, above, as code-gen-only
+ // instructions can (and often do) still have encoding information
+ // associated with them. Once we've migrated all of them over to true
+ // pseudo-instructions that are lowered to real instructions prior to
+ // the printer/emitter, we can remove this attribute and just use isPseudo.
+ //
+ // The intended use is:
+ // isPseudo: Does not have encoding information and should be expanded,
+ // at the latest, during lowering to MCInst.
+ //
+ // isCodeGenOnly: Does have encoding information and can go through to the
+ // CodeEmitter unchanged, but duplicates a canonical instruction
+ // definition's encoding and should be ignored when constructing the
+ // assembler match tables.
+ bit isCodeGenOnly = 0;
+
+ // Is this instruction a pseudo instruction for use by the assembler parser.
+ bit isAsmParserOnly = 0;
+
+ // This instruction is not expected to be queried for scheduling latencies
+ // and therefore needs no scheduling information even for a complete
+ // scheduling model.
+ bit hasNoSchedulingInfo = 0;
+
+ InstrItinClass Itinerary = NoItinerary;// Execution steps used for scheduling.
+
+ // Scheduling information from TargetSchedule.td.
+ list<SchedReadWrite> SchedRW;
+
+ string Constraints = ""; // OperandConstraint, e.g. $src = $dst.
+
+ /// DisableEncoding - List of operand names (e.g. "$op1,$op2") that should not
+ /// be encoded into the output machineinstr.
+ string DisableEncoding = "";
+
+ string PostEncoderMethod = "";
+ string DecoderMethod = "";
+
+ // Is the instruction decoder method able to completely determine if the
+ // given instruction is valid or not. If the TableGen definition of the
+ // instruction specifies bitpattern A??B where A and B are static bits, the
+ // hasCompleteDecoder flag says whether the decoder method fully handles the
+ // ?? space, i.e. if it is a final arbiter for the instruction validity.
+ // If not then the decoder attempts to continue decoding when the decoder
+ // method fails.
+ //
+ // This allows to handle situations where the encoding is not fully
+ // orthogonal. Example:
+ // * InstA with bitpattern 0b0000????,
+ // * InstB with bitpattern 0b000000?? but the associated decoder method
+ // DecodeInstB() returns Fail when ?? is 0b00 or 0b11.
+ //
+ // The decoder tries to decode a bitpattern that matches both InstA and
+ // InstB bitpatterns first as InstB (because it is the most specific
+ // encoding). In the default case (hasCompleteDecoder = 1), when
+ // DecodeInstB() returns Fail the bitpattern gets rejected. By setting
+ // hasCompleteDecoder = 0 in InstB, the decoder is informed that
+ // DecodeInstB() is not able to determine if all possible values of ?? are
+ // valid or not. If DecodeInstB() returns Fail the decoder will attempt to
+ // decode the bitpattern as InstA too.
+ bit hasCompleteDecoder = 1;
+
+ /// Target-specific flags. This becomes the TSFlags field in TargetInstrDesc.
+ bits<64> TSFlags = 0;
+
+ ///@name Assembler Parser Support
+ ///@{
+
+ string AsmMatchConverter = "";
+
+ /// TwoOperandAliasConstraint - Enable TableGen to auto-generate a
+ /// two-operand matcher inst-alias for a three operand instruction.
+ /// For example, the arm instruction "add r3, r3, r5" can be written
+ /// as "add r3, r5". The constraint is of the same form as a tied-operand
+ /// constraint. For example, "$Rn = $Rd".
+ string TwoOperandAliasConstraint = "";
+
+ /// Assembler variant name to use for this instruction. If specified then
+ /// instruction will be presented only in MatchTable for this variant. If
+ /// not specified then assembler variants will be determined based on
+ /// AsmString
+ string AsmVariantName = "";
+
+ ///@}
+
+ /// UseNamedOperandTable - If set, the operand indices of this instruction
+ /// can be queried via the getNamedOperandIdx() function which is generated
+ /// by TableGen.
+ bit UseNamedOperandTable = 0;
+
+ /// Should FastISel ignore this instruction. For certain ISAs, they have
+ /// instructions which map to the same ISD Opcode, value type operands and
+ /// instruction selection predicates. FastISel cannot handle such cases, but
+ /// SelectionDAG can.
+ bit FastISelShouldIgnore = 0;
+}
+
+/// PseudoInstExpansion - Expansion information for a pseudo-instruction.
+/// Which instruction it expands to and how the operands map from the
+/// pseudo.
+class PseudoInstExpansion<dag Result> {
+ dag ResultInst = Result; // The instruction to generate.
+ bit isPseudo = 1;
+}
+
+/// Predicates - These are extra conditionals which are turned into instruction
+/// selector matching code. Currently each predicate is just a string.
+class Predicate<string cond> {
+ string CondString = cond;
+
+ /// AssemblerMatcherPredicate - If this feature can be used by the assembler
+ /// matcher, this is true. Targets should set this by inheriting their
+ /// feature from the AssemblerPredicate class in addition to Predicate.
+ bit AssemblerMatcherPredicate = 0;
+
+ /// AssemblerCondString - Name of the subtarget feature being tested used
+ /// as alternative condition string used for assembler matcher.
+ /// e.g. "ModeThumb" is translated to "(Bits & ModeThumb) != 0".
+ /// "!ModeThumb" is translated to "(Bits & ModeThumb) == 0".
+ /// It can also list multiple features separated by ",".
+ /// e.g. "ModeThumb,FeatureThumb2" is translated to
+ /// "(Bits & ModeThumb) != 0 && (Bits & FeatureThumb2) != 0".
+ string AssemblerCondString = "";
+
+ /// PredicateName - User-level name to use for the predicate. Mainly for use
+ /// in diagnostics such as missing feature errors in the asm matcher.
+ string PredicateName = "";
+
+ /// Setting this to '1' indicates that the predicate must be recomputed on
+ /// every function change. Most predicates can leave this at '0'.
+ ///
+ /// Ignored by SelectionDAG, it always recomputes the predicate on every use.
+ bit RecomputePerFunction = 0;
+}
+
+/// NoHonorSignDependentRounding - This predicate is true if support for
+/// sign-dependent-rounding is not enabled.
+def NoHonorSignDependentRounding
+ : Predicate<"!TM.Options.HonorSignDependentRoundingFPMath()">;
+
+class Requires<list<Predicate> preds> {
+ list<Predicate> Predicates = preds;
+}
+
+/// ops definition - This is just a simple marker used to identify the operand
+/// list for an instruction. outs and ins are identical both syntactically and
+/// semantically; they are used to define def operands and use operands to
+/// improve readibility. This should be used like this:
+/// (outs R32:$dst), (ins R32:$src1, R32:$src2) or something similar.
+def ops;
+def outs;
+def ins;
+
+/// variable_ops definition - Mark this instruction as taking a variable number
+/// of operands.
+def variable_ops;
+
+
+/// PointerLikeRegClass - Values that are designed to have pointer width are
+/// derived from this. TableGen treats the register class as having a symbolic
+/// type that it doesn't know, and resolves the actual regclass to use by using
+/// the TargetRegisterInfo::getPointerRegClass() hook at codegen time.
+class PointerLikeRegClass<int Kind> {
+ int RegClassKind = Kind;
+}
+
+
+/// ptr_rc definition - Mark this operand as being a pointer value whose
+/// register class is resolved dynamically via a callback to TargetInstrInfo.
+/// FIXME: We should probably change this to a class which contain a list of
+/// flags. But currently we have but one flag.
+def ptr_rc : PointerLikeRegClass<0>;
+
+/// unknown definition - Mark this operand as being of unknown type, causing
+/// it to be resolved by inference in the context it is used.
+class unknown_class;
+def unknown : unknown_class;
+
+/// AsmOperandClass - Representation for the kinds of operands which the target
+/// specific parser can create and the assembly matcher may need to distinguish.
+///
+/// Operand classes are used to define the order in which instructions are
+/// matched, to ensure that the instruction which gets matched for any
+/// particular list of operands is deterministic.
+///
+/// The target specific parser must be able to classify a parsed operand into a
+/// unique class which does not partially overlap with any other classes. It can
+/// match a subset of some other class, in which case the super class field
+/// should be defined.
+class AsmOperandClass {
+ /// The name to use for this class, which should be usable as an enum value.
+ string Name = ?;
+
+ /// The super classes of this operand.
+ list<AsmOperandClass> SuperClasses = [];
+
+ /// The name of the method on the target specific operand to call to test
+ /// whether the operand is an instance of this class. If not set, this will
+ /// default to "isFoo", where Foo is the AsmOperandClass name. The method
+ /// signature should be:
+ /// bool isFoo() const;
+ string PredicateMethod = ?;
+
+ /// The name of the method on the target specific operand to call to add the
+ /// target specific operand to an MCInst. If not set, this will default to
+ /// "addFooOperands", where Foo is the AsmOperandClass name. The method
+ /// signature should be:
+ /// void addFooOperands(MCInst &Inst, unsigned N) const;
+ string RenderMethod = ?;
+
+ /// The name of the method on the target specific operand to call to custom
+ /// handle the operand parsing. This is useful when the operands do not relate
+ /// to immediates or registers and are very instruction specific (as flags to
+ /// set in a processor register, coprocessor number, ...).
+ string ParserMethod = ?;
+
+ // The diagnostic type to present when referencing this operand in a
+ // match failure error message. By default, use a generic "invalid operand"
+ // diagnostic. The target AsmParser maps these codes to text.
+ string DiagnosticType = "";
+
+ /// A diagnostic message to emit when an invalid value is provided for this
+ /// operand.
+ string DiagnosticString = "";
+
+ /// Set to 1 if this operand is optional and not always required. Typically,
+ /// the AsmParser will emit an error when it finishes parsing an
+ /// instruction if it hasn't matched all the operands yet. However, this
+ /// error will be suppressed if all of the remaining unmatched operands are
+ /// marked as IsOptional.
+ ///
+ /// Optional arguments must be at the end of the operand list.
+ bit IsOptional = 0;
+
+ /// The name of the method on the target specific asm parser that returns the
+ /// default operand for this optional operand. This method is only used if
+ /// IsOptional == 1. If not set, this will default to "defaultFooOperands",
+ /// where Foo is the AsmOperandClass name. The method signature should be:
+ /// std::unique_ptr<MCParsedAsmOperand> defaultFooOperands() const;
+ string DefaultMethod = ?;
+}
+
+def ImmAsmOperand : AsmOperandClass {
+ let Name = "Imm";
+}
+
+/// Operand Types - These provide the built-in operand types that may be used
+/// by a target. Targets can optionally provide their own operand types as
+/// needed, though this should not be needed for RISC targets.
+class Operand<ValueType ty> : DAGOperand {
+ ValueType Type = ty;
+ string PrintMethod = "printOperand";
+ string EncoderMethod = "";
+ bit hasCompleteDecoder = 1;
+ string OperandType = "OPERAND_UNKNOWN";
+ dag MIOperandInfo = (ops);
+
+ // MCOperandPredicate - Optionally, a code fragment operating on
+ // const MCOperand &MCOp, and returning a bool, to indicate if
+ // the value of MCOp is valid for the specific subclass of Operand
+ code MCOperandPredicate;
+
+ // ParserMatchClass - The "match class" that operands of this type fit
+ // in. Match classes are used to define the order in which instructions are
+ // match, to ensure that which instructions gets matched is deterministic.
+ //
+ // The target specific parser must be able to classify an parsed operand into
+ // a unique class, which does not partially overlap with any other classes. It
+ // can match a subset of some other class, in which case the AsmOperandClass
+ // should declare the other operand as one of its super classes.
+ AsmOperandClass ParserMatchClass = ImmAsmOperand;
+}
+
+class RegisterOperand<RegisterClass regclass, string pm = "printOperand">
+ : DAGOperand {
+ // RegClass - The register class of the operand.
+ RegisterClass RegClass = regclass;
+ // PrintMethod - The target method to call to print register operands of
+ // this type. The method normally will just use an alt-name index to look
+ // up the name to print. Default to the generic printOperand().
+ string PrintMethod = pm;
+
+ // EncoderMethod - The target method name to call to encode this register
+ // operand.
+ string EncoderMethod = "";
+
+ // ParserMatchClass - The "match class" that operands of this type fit
+ // in. Match classes are used to define the order in which instructions are
+ // match, to ensure that which instructions gets matched is deterministic.
+ //
+ // The target specific parser must be able to classify an parsed operand into
+ // a unique class, which does not partially overlap with any other classes. It
+ // can match a subset of some other class, in which case the AsmOperandClass
+ // should declare the other operand as one of its super classes.
+ AsmOperandClass ParserMatchClass;
+
+ string OperandType = "OPERAND_REGISTER";
+
+ // When referenced in the result of a CodeGen pattern, GlobalISel will
+ // normally copy the matched operand to the result. When this is set, it will
+ // emit a special copy that will replace zero-immediates with the specified
+ // zero-register.
+ Register GIZeroRegister = ?;
+}
+
+let OperandType = "OPERAND_IMMEDIATE" in {
+def i1imm : Operand<i1>;
+def i8imm : Operand<i8>;
+def i16imm : Operand<i16>;
+def i32imm : Operand<i32>;
+def i64imm : Operand<i64>;
+
+def f32imm : Operand<f32>;
+def f64imm : Operand<f64>;
+}
+
+// Register operands for generic instructions don't have an MVT, but do have
+// constraints linking the operands (e.g. all operands of a G_ADD must
+// have the same LLT).
+class TypedOperand<string Ty> : Operand<untyped> {
+ let OperandType = Ty;
+ bit IsPointer = 0;
+}
+
+def type0 : TypedOperand<"OPERAND_GENERIC_0">;
+def type1 : TypedOperand<"OPERAND_GENERIC_1">;
+def type2 : TypedOperand<"OPERAND_GENERIC_2">;
+def type3 : TypedOperand<"OPERAND_GENERIC_3">;
+def type4 : TypedOperand<"OPERAND_GENERIC_4">;
+def type5 : TypedOperand<"OPERAND_GENERIC_5">;
+
+let IsPointer = 1 in {
+ def ptype0 : TypedOperand<"OPERAND_GENERIC_0">;
+ def ptype1 : TypedOperand<"OPERAND_GENERIC_1">;
+ def ptype2 : TypedOperand<"OPERAND_GENERIC_2">;
+ def ptype3 : TypedOperand<"OPERAND_GENERIC_3">;
+ def ptype4 : TypedOperand<"OPERAND_GENERIC_4">;
+ def ptype5 : TypedOperand<"OPERAND_GENERIC_5">;
+}
+
+/// zero_reg definition - Special node to stand for the zero register.
+///
+def zero_reg;
+
+/// All operands which the MC layer classifies as predicates should inherit from
+/// this class in some manner. This is already handled for the most commonly
+/// used PredicateOperand, but may be useful in other circumstances.
+class PredicateOp;
+
+/// OperandWithDefaultOps - This Operand class can be used as the parent class
+/// for an Operand that needs to be initialized with a default value if
+/// no value is supplied in a pattern. This class can be used to simplify the
+/// pattern definitions for instructions that have target specific flags
+/// encoded as immediate operands.
+class OperandWithDefaultOps<ValueType ty, dag defaultops>
+ : Operand<ty> {
+ dag DefaultOps = defaultops;
+}
+
+/// PredicateOperand - This can be used to define a predicate operand for an
+/// instruction. OpTypes specifies the MIOperandInfo for the operand, and
+/// AlwaysVal specifies the value of this predicate when set to "always
+/// execute".
+class PredicateOperand<ValueType ty, dag OpTypes, dag AlwaysVal>
+ : OperandWithDefaultOps<ty, AlwaysVal>, PredicateOp {
+ let MIOperandInfo = OpTypes;
+}
+
+/// OptionalDefOperand - This is used to define a optional definition operand
+/// for an instruction. DefaultOps is the register the operand represents if
+/// none is supplied, e.g. zero_reg.
+class OptionalDefOperand<ValueType ty, dag OpTypes, dag defaultops>
+ : OperandWithDefaultOps<ty, defaultops> {
+ let MIOperandInfo = OpTypes;
+}
+
+
+// InstrInfo - This class should only be instantiated once to provide parameters
+// which are global to the target machine.
+//
+class InstrInfo {
+ // Target can specify its instructions in either big or little-endian formats.
+ // For instance, while both Sparc and PowerPC are big-endian platforms, the
+ // Sparc manual specifies its instructions in the format [31..0] (big), while
+ // PowerPC specifies them using the format [0..31] (little).
+ bit isLittleEndianEncoding = 0;
+
+ // The instruction properties mayLoad, mayStore, and hasSideEffects are unset
+ // by default, and TableGen will infer their value from the instruction
+ // pattern when possible.
+ //
+ // Normally, TableGen will issue an error it it can't infer the value of a
+ // property that hasn't been set explicitly. When guessInstructionProperties
+ // is set, it will guess a safe value instead.
+ //
+ // This option is a temporary migration help. It will go away.
+ bit guessInstructionProperties = 1;
+
+ // TableGen's instruction encoder generator has support for matching operands
+ // to bit-field variables both by name and by position. While matching by
+ // name is preferred, this is currently not possible for complex operands,
+ // and some targets still reply on the positional encoding rules. When
+ // generating a decoder for such targets, the positional encoding rules must
+ // be used by the decoder generator as well.
+ //
+ // This option is temporary; it will go away once the TableGen decoder
+ // generator has better support for complex operands and targets have
+ // migrated away from using positionally encoded operands.
+ bit decodePositionallyEncodedOperands = 0;
+
+ // When set, this indicates that there will be no overlap between those
+ // operands that are matched by ordering (positional operands) and those
+ // matched by name.
+ //
+ // This option is temporary; it will go away once the TableGen decoder
+ // generator has better support for complex operands and targets have
+ // migrated away from using positionally encoded operands.
+ bit noNamedPositionallyEncodedOperands = 0;
+}
+
+// Standard Pseudo Instructions.
+// This list must match TargetOpcodes.h and CodeGenTarget.cpp.
+// Only these instructions are allowed in the TargetOpcode namespace.
+// Ensure mayLoad and mayStore have a default value, so as not to break
+// targets that set guessInstructionProperties=0. Any local definition of
+// mayLoad/mayStore takes precedence over these default values.
+class StandardPseudoInstruction : Instruction {
+ let mayLoad = 0;
+ let mayStore = 0;
+ let isCodeGenOnly = 1;
+ let isPseudo = 1;
+ let hasNoSchedulingInfo = 1;
+ let Namespace = "TargetOpcode";
+}
+def PHI : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "PHINODE";
+ let hasSideEffects = 0;
+}
+def INLINEASM : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "";
+ let hasSideEffects = 0; // Note side effect is encoded in an operand.
+}
+def CFI_INSTRUCTION : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "";
+ let hasCtrlDep = 1;
+ let hasSideEffects = 0;
+ let isNotDuplicable = 1;
+}
+def EH_LABEL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "";
+ let hasCtrlDep = 1;
+ let hasSideEffects = 0;
+ let isNotDuplicable = 1;
+}
+def GC_LABEL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "";
+ let hasCtrlDep = 1;
+ let hasSideEffects = 0;
+ let isNotDuplicable = 1;
+}
+def ANNOTATION_LABEL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "";
+ let hasCtrlDep = 1;
+ let hasSideEffects = 0;
+ let isNotDuplicable = 1;
+}
+def KILL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "";
+ let hasSideEffects = 0;
+}
+def EXTRACT_SUBREG : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$supersrc, i32imm:$subidx);
+ let AsmString = "";
+ let hasSideEffects = 0;
+}
+def INSERT_SUBREG : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$supersrc, unknown:$subsrc, i32imm:$subidx);
+ let AsmString = "";
+ let hasSideEffects = 0;
+ let Constraints = "$supersrc = $dst";
+}
+def IMPLICIT_DEF : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins);
+ let AsmString = "";
+ let hasSideEffects = 0;
+ let isReMaterializable = 1;
+ let isAsCheapAsAMove = 1;
+}
+def SUBREG_TO_REG : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$implsrc, unknown:$subsrc, i32imm:$subidx);
+ let AsmString = "";
+ let hasSideEffects = 0;
+}
+def COPY_TO_REGCLASS : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$src, i32imm:$regclass);
+ let AsmString = "";
+ let hasSideEffects = 0;
+ let isAsCheapAsAMove = 1;
+}
+def DBG_VALUE : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "DBG_VALUE";
+ let hasSideEffects = 0;
+}
+def DBG_LABEL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins unknown:$label);
+ let AsmString = "DBG_LABEL";
+ let hasSideEffects = 0;
+}
+def REG_SEQUENCE : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$supersrc, variable_ops);
+ let AsmString = "";
+ let hasSideEffects = 0;
+ let isAsCheapAsAMove = 1;
+}
+def COPY : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins unknown:$src);
+ let AsmString = "";
+ let hasSideEffects = 0;
+ let isAsCheapAsAMove = 1;
+ let hasNoSchedulingInfo = 0;
+}
+def BUNDLE : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "BUNDLE";
+ let hasSideEffects = 1;
+}
+def LIFETIME_START : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "LIFETIME_START";
+ let hasSideEffects = 0;
+}
+def LIFETIME_END : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i32imm:$id);
+ let AsmString = "LIFETIME_END";
+ let hasSideEffects = 0;
+}
+def STACKMAP : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i64imm:$id, i32imm:$nbytes, variable_ops);
+ let hasSideEffects = 1;
+ let isCall = 1;
+ let mayLoad = 1;
+ let usesCustomInserter = 1;
+}
+def PATCHPOINT : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins i64imm:$id, i32imm:$nbytes, unknown:$callee,
+ i32imm:$nargs, i32imm:$cc, variable_ops);
+ let hasSideEffects = 1;
+ let isCall = 1;
+ let mayLoad = 1;
+ let usesCustomInserter = 1;
+}
+def STATEPOINT : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let usesCustomInserter = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let hasSideEffects = 1;
+ let isCall = 1;
+}
+def LOAD_STACK_GUARD : StandardPseudoInstruction {
+ let OutOperandList = (outs ptr_rc:$dst);
+ let InOperandList = (ins);
+ let mayLoad = 1;
+ bit isReMaterializable = 1;
+ let hasSideEffects = 0;
+ bit isPseudo = 1;
+}
+def LOCAL_ESCAPE : StandardPseudoInstruction {
+ // This instruction is really just a label. It has to be part of the chain so
+ // that it doesn't get dropped from the DAG, but it produces nothing and has
+ // no side effects.
+ let OutOperandList = (outs);
+ let InOperandList = (ins ptr_rc:$symbol, i32imm:$id);
+ let hasSideEffects = 0;
+ let hasCtrlDep = 1;
+}
+def FAULTING_OP : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins variable_ops);
+ let usesCustomInserter = 1;
+ let hasSideEffects = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let isTerminator = 1;
+ let isBranch = 1;
+}
+def PATCHABLE_OP : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins variable_ops);
+ let usesCustomInserter = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let hasSideEffects = 1;
+}
+def PATCHABLE_FUNCTION_ENTER : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins);
+ let AsmString = "# XRay Function Enter.";
+ let usesCustomInserter = 1;
+ let hasSideEffects = 0;
+}
+def PATCHABLE_RET : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "# XRay Function Patchable RET.";
+ let usesCustomInserter = 1;
+ let hasSideEffects = 1;
+ let isTerminator = 1;
+ let isReturn = 1;
+}
+def PATCHABLE_FUNCTION_EXIT : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins);
+ let AsmString = "# XRay Function Exit.";
+ let usesCustomInserter = 1;
+ let hasSideEffects = 0; // FIXME: is this correct?
+ let isReturn = 0; // Original return instruction will follow
+}
+def PATCHABLE_TAIL_CALL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "# XRay Tail Call Exit.";
+ let usesCustomInserter = 1;
+ let hasSideEffects = 1;
+ let isReturn = 1;
+}
+def PATCHABLE_EVENT_CALL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins ptr_rc:$event, i8imm:$size);
+ let AsmString = "# XRay Custom Event Log.";
+ let usesCustomInserter = 1;
+ let isCall = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let hasSideEffects = 1;
+}
+def PATCHABLE_TYPED_EVENT_CALL : StandardPseudoInstruction {
+ let OutOperandList = (outs);
+ let InOperandList = (ins i16imm:$type, ptr_rc:$event, i32imm:$size);
+ let AsmString = "# XRay Typed Event Log.";
+ let usesCustomInserter = 1;
+ let isCall = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let hasSideEffects = 1;
+}
+def FENTRY_CALL : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "# FEntry call";
+ let usesCustomInserter = 1;
+ let mayLoad = 1;
+ let mayStore = 1;
+ let hasSideEffects = 1;
+}
+def ICALL_BRANCH_FUNNEL : StandardPseudoInstruction {
+ let OutOperandList = (outs unknown:$dst);
+ let InOperandList = (ins variable_ops);
+ let AsmString = "";
+ let hasSideEffects = 1;
+}
+
+// Generic opcodes used in GlobalISel.
+include "llvm/Target/GenericOpcodes.td"
+
+//===----------------------------------------------------------------------===//
+// AsmParser - This class can be implemented by targets that wish to implement
+// .s file parsing.
+//
+// Subtargets can have multiple different assembly parsers (e.g. AT&T vs Intel
+// syntax on X86 for example).
+//
+class AsmParser {
+ // AsmParserClassName - This specifies the suffix to use for the asmparser
+ // class. Generated AsmParser classes are always prefixed with the target
+ // name.
+ string AsmParserClassName = "AsmParser";
+
+ // AsmParserInstCleanup - If non-empty, this is the name of a custom member
+ // function of the AsmParser class to call on every matched instruction.
+ // This can be used to perform target specific instruction post-processing.
+ string AsmParserInstCleanup = "";
+
+ // ShouldEmitMatchRegisterName - Set to false if the target needs a hand
+ // written register name matcher
+ bit ShouldEmitMatchRegisterName = 1;
+
+ // Set to true if the target needs a generated 'alternative register name'
+ // matcher.
+ //
+ // This generates a function which can be used to lookup registers from
+ // their aliases. This function will fail when called on targets where
+ // several registers share the same alias (i.e. not a 1:1 mapping).
+ bit ShouldEmitMatchRegisterAltName = 0;
+
+ // Set to true if MatchRegisterName and MatchRegisterAltName functions
+ // should be generated even if there are duplicate register names. The
+ // target is responsible for coercing aliased registers as necessary
+ // (e.g. in validateTargetOperandClass), and there are no guarantees about
+ // which numeric register identifier will be returned in the case of
+ // multiple matches.
+ bit AllowDuplicateRegisterNames = 0;
+
+ // HasMnemonicFirst - Set to false if target instructions don't always
+ // start with a mnemonic as the first token.
+ bit HasMnemonicFirst = 1;
+
+ // ReportMultipleNearMisses -
+ // When 0, the assembly matcher reports an error for one encoding or operand
+ // that did not match the parsed instruction.
+ // When 1, the assmebly matcher returns a list of encodings that were close
+ // to matching the parsed instruction, so to allow more detailed error
+ // messages.
+ bit ReportMultipleNearMisses = 0;
+}
+def DefaultAsmParser : AsmParser;
+
+//===----------------------------------------------------------------------===//
+// AsmParserVariant - Subtargets can have multiple different assembly parsers
+// (e.g. AT&T vs Intel syntax on X86 for example). This class can be
+// implemented by targets to describe such variants.
+//
+class AsmParserVariant {
+ // Variant - AsmParsers can be of multiple different variants. Variants are
+ // used to support targets that need to parser multiple formats for the
+ // assembly language.
+ int Variant = 0;
+
+ // Name - The AsmParser variant name (e.g., AT&T vs Intel).
+ string Name = "";
+
+ // CommentDelimiter - If given, the delimiter string used to recognize
+ // comments which are hard coded in the .td assembler strings for individual
+ // instructions.
+ string CommentDelimiter = "";
+
+ // RegisterPrefix - If given, the token prefix which indicates a register
+ // token. This is used by the matcher to automatically recognize hard coded
+ // register tokens as constrained registers, instead of tokens, for the
+ // purposes of matching.
+ string RegisterPrefix = "";
+
+ // TokenizingCharacters - Characters that are standalone tokens
+ string TokenizingCharacters = "[]*!";
+
+ // SeparatorCharacters - Characters that are not tokens
+ string SeparatorCharacters = " \t,";
+
+ // BreakCharacters - Characters that start new identifiers
+ string BreakCharacters = "";
+}
+def DefaultAsmParserVariant : AsmParserVariant;
+
+/// AssemblerPredicate - This is a Predicate that can be used when the assembler
+/// matches instructions and aliases.
+class AssemblerPredicate<string cond, string name = ""> {
+ bit AssemblerMatcherPredicate = 1;
+ string AssemblerCondString = cond;
+ string PredicateName = name;
+}
+
+/// TokenAlias - This class allows targets to define assembler token
+/// operand aliases. That is, a token literal operand which is equivalent
+/// to another, canonical, token literal. For example, ARM allows:
+/// vmov.u32 s4, #0 -> vmov.i32, #0
+/// 'u32' is a more specific designator for the 32-bit integer type specifier
+/// and is legal for any instruction which accepts 'i32' as a datatype suffix.
+/// def : TokenAlias<".u32", ".i32">;
+///
+/// This works by marking the match class of 'From' as a subclass of the
+/// match class of 'To'.
+class TokenAlias<string From, string To> {
+ string FromToken = From;
+ string ToToken = To;
+}
+
+/// MnemonicAlias - This class allows targets to define assembler mnemonic
+/// aliases. This should be used when all forms of one mnemonic are accepted
+/// with a different mnemonic. For example, X86 allows:
+/// sal %al, 1 -> shl %al, 1
+/// sal %ax, %cl -> shl %ax, %cl
+/// sal %eax, %cl -> shl %eax, %cl
+/// etc. Though "sal" is accepted with many forms, all of them are directly
+/// translated to a shl, so it can be handled with (in the case of X86, it
+/// actually has one for each suffix as well):
+/// def : MnemonicAlias<"sal", "shl">;
+///
+/// Mnemonic aliases are mapped before any other translation in the match phase,
+/// and do allow Requires predicates, e.g.:
+///
+/// def : MnemonicAlias<"pushf", "pushfq">, Requires<[In64BitMode]>;
+/// def : MnemonicAlias<"pushf", "pushfl">, Requires<[In32BitMode]>;
+///
+/// Mnemonic aliases can also be constrained to specific variants, e.g.:
+///
+/// def : MnemonicAlias<"pushf", "pushfq", "att">, Requires<[In64BitMode]>;
+///
+/// If no variant (e.g., "att" or "intel") is specified then the alias is
+/// applied unconditionally.
+class MnemonicAlias<string From, string To, string VariantName = ""> {
+ string FromMnemonic = From;
+ string ToMnemonic = To;
+ string AsmVariantName = VariantName;
+
+ // Predicates - Predicates that must be true for this remapping to happen.
+ list<Predicate> Predicates = [];
+}
+
+/// InstAlias - This defines an alternate assembly syntax that is allowed to
+/// match an instruction that has a different (more canonical) assembly
+/// representation.
+class InstAlias<string Asm, dag Result, int Emit = 1, string VariantName = ""> {
+ string AsmString = Asm; // The .s format to match the instruction with.
+ dag ResultInst = Result; // The MCInst to generate.
+
+ // This determines which order the InstPrinter detects aliases for
+ // printing. A larger value makes the alias more likely to be
+ // emitted. The Instruction's own definition is notionally 0.5, so 0
+ // disables printing and 1 enables it if there are no conflicting aliases.
+ int EmitPriority = Emit;
+
+ // Predicates - Predicates that must be true for this to match.
+ list<Predicate> Predicates = [];
+
+ // If the instruction specified in Result has defined an AsmMatchConverter
+ // then setting this to 1 will cause the alias to use the AsmMatchConverter
+ // function when converting the OperandVector into an MCInst instead of the
+ // function that is generated by the dag Result.
+ // Setting this to 0 will cause the alias to ignore the Result instruction's
+ // defined AsmMatchConverter and instead use the function generated by the
+ // dag Result.
+ bit UseInstAsmMatchConverter = 1;
+
+ // Assembler variant name to use for this alias. If not specified then
+ // assembler variants will be determined based on AsmString
+ string AsmVariantName = VariantName;
+}
+
+//===----------------------------------------------------------------------===//
+// AsmWriter - This class can be implemented by targets that need to customize
+// the format of the .s file writer.
+//
+// Subtargets can have multiple different asmwriters (e.g. AT&T vs Intel syntax
+// on X86 for example).
+//
+class AsmWriter {
+ // AsmWriterClassName - This specifies the suffix to use for the asmwriter
+ // class. Generated AsmWriter classes are always prefixed with the target
+ // name.
+ string AsmWriterClassName = "InstPrinter";
+
+ // PassSubtarget - Determines whether MCSubtargetInfo should be passed to
+ // the various print methods.
+ // FIXME: Remove after all ports are updated.
+ int PassSubtarget = 0;
+
+ // Variant - AsmWriters can be of multiple different variants. Variants are
+ // used to support targets that need to emit assembly code in ways that are
+ // mostly the same for different targets, but have minor differences in
+ // syntax. If the asmstring contains {|} characters in them, this integer
+ // will specify which alternative to use. For example "{x|y|z}" with Variant
+ // == 1, will expand to "y".
+ int Variant = 0;
+}
+def DefaultAsmWriter : AsmWriter;
+
+
+//===----------------------------------------------------------------------===//
+// Target - This class contains the "global" target information
+//
+class Target {
+ // InstructionSet - Instruction set description for this target.
+ InstrInfo InstructionSet;
+
+ // AssemblyParsers - The AsmParser instances available for this target.
+ list<AsmParser> AssemblyParsers = [DefaultAsmParser];
+
+ /// AssemblyParserVariants - The AsmParserVariant instances available for
+ /// this target.
+ list<AsmParserVariant> AssemblyParserVariants = [DefaultAsmParserVariant];
+
+ // AssemblyWriters - The AsmWriter instances available for this target.
+ list<AsmWriter> AssemblyWriters = [DefaultAsmWriter];
+
+ // AllowRegisterRenaming - Controls whether this target allows
+ // post-register-allocation renaming of registers. This is done by
+ // setting hasExtraDefRegAllocReq and hasExtraSrcRegAllocReq to 1
+ // for all opcodes if this flag is set to 0.
+ int AllowRegisterRenaming = 0;
+}
+
+//===----------------------------------------------------------------------===//
+// SubtargetFeature - A characteristic of the chip set.
+//
+class SubtargetFeature<string n, string a, string v, string d,
+ list<SubtargetFeature> i = []> {
+ // Name - Feature name. Used by command line (-mattr=) to determine the
+ // appropriate target chip.
+ //
+ string Name = n;
+
+ // Attribute - Attribute to be set by feature.
+ //
+ string Attribute = a;
+
+ // Value - Value the attribute to be set to by feature.
+ //
+ string Value = v;
+
+ // Desc - Feature description. Used by command line (-mattr=) to display help
+ // information.
+ //
+ string Desc = d;
+
+ // Implies - Features that this feature implies are present. If one of those
+ // features isn't set, then this one shouldn't be set either.
+ //
+ list<SubtargetFeature> Implies = i;
+}
+
+/// Specifies a Subtarget feature that this instruction is deprecated on.
+class Deprecated<SubtargetFeature dep> {
+ SubtargetFeature DeprecatedFeatureMask = dep;
+}
+
+/// A custom predicate used to determine if an instruction is
+/// deprecated or not.
+class ComplexDeprecationPredicate<string dep> {
+ string ComplexDeprecationPredicate = dep;
+}
+
+//===----------------------------------------------------------------------===//
+// Processor chip sets - These values represent each of the chip sets supported
+// by the scheduler. Each Processor definition requires corresponding
+// instruction itineraries.
+//
+class Processor<string n, ProcessorItineraries pi, list<SubtargetFeature> f> {
+ // Name - Chip set name. Used by command line (-mcpu=) to determine the
+ // appropriate target chip.
+ //
+ string Name = n;
+
+ // SchedModel - The machine model for scheduling and instruction cost.
+ //
+ SchedMachineModel SchedModel = NoSchedModel;
+
+ // ProcItin - The scheduling information for the target processor.
+ //
+ ProcessorItineraries ProcItin = pi;
+
+ // Features - list of
+ list<SubtargetFeature> Features = f;
+}
+
+// ProcessorModel allows subtargets to specify the more general
+// SchedMachineModel instead if a ProcessorItinerary. Subtargets will
+// gradually move to this newer form.
+//
+// Although this class always passes NoItineraries to the Processor
+// class, the SchedMachineModel may still define valid Itineraries.
+class ProcessorModel<string n, SchedMachineModel m, list<SubtargetFeature> f>
+ : Processor<n, NoItineraries, f> {
+ let SchedModel = m;
+}
+
+//===----------------------------------------------------------------------===//
+// InstrMapping - This class is used to create mapping tables to relate
+// instructions with each other based on the values specified in RowFields,
+// ColFields, KeyCol and ValueCols.
+//
+class InstrMapping {
+ // FilterClass - Used to limit search space only to the instructions that
+ // define the relationship modeled by this InstrMapping record.
+ string FilterClass;
+
+ // RowFields - List of fields/attributes that should be same for all the
+ // instructions in a row of the relation table. Think of this as a set of
+ // properties shared by all the instructions related by this relationship
+ // model and is used to categorize instructions into subgroups. For instance,
+ // if we want to define a relation that maps 'Add' instruction to its
+ // predicated forms, we can define RowFields like this:
+ //
+ // let RowFields = BaseOp
+ // All add instruction predicated/non-predicated will have to set their BaseOp
+ // to the same value.
+ //
+ // def Add: { let BaseOp = 'ADD'; let predSense = 'nopred' }
+ // def Add_predtrue: { let BaseOp = 'ADD'; let predSense = 'true' }
+ // def Add_predfalse: { let BaseOp = 'ADD'; let predSense = 'false' }
+ list<string> RowFields = [];
+
+ // List of fields/attributes that are same for all the instructions
+ // in a column of the relation table.
+ // Ex: let ColFields = 'predSense' -- It means that the columns are arranged
+ // based on the 'predSense' values. All the instruction in a specific
+ // column have the same value and it is fixed for the column according
+ // to the values set in 'ValueCols'.
+ list<string> ColFields = [];
+
+ // Values for the fields/attributes listed in 'ColFields'.
+ // Ex: let KeyCol = 'nopred' -- It means that the key instruction (instruction
+ // that models this relation) should be non-predicated.
+ // In the example above, 'Add' is the key instruction.
+ list<string> KeyCol = [];
+
+ // List of values for the fields/attributes listed in 'ColFields', one for
+ // each column in the relation table.
+ //
+ // Ex: let ValueCols = [['true'],['false']] -- It adds two columns in the
+ // table. First column requires all the instructions to have predSense
+ // set to 'true' and second column requires it to be 'false'.
+ list<list<string> > ValueCols = [];
+}
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for calling conventions.
+//
+include "llvm/Target/TargetCallingConv.td"
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for DAG isel generation.
+//
+include "llvm/Target/TargetSelectionDAG.td"
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for Global ISel register bank info generation.
+//
+include "llvm/Target/GlobalISel/RegisterBank.td"
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for DAG isel generation.
+//
+include "llvm/Target/GlobalISel/Target.td"
+
+//===----------------------------------------------------------------------===//
+// Pull in the common support for the Global ISel DAG-based selector generation.
+//
+include "llvm/Target/GlobalISel/SelectionDAGCompat.td"
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/TargetCallingConv.td b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetCallingConv.td
new file mode 100644
index 000000000..95d2b4226
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetCallingConv.td
@@ -0,0 +1,187 @@
+//===- TargetCallingConv.td - Target Calling Conventions ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent interfaces with which targets
+// describe their calling conventions.
+//
+//===----------------------------------------------------------------------===//
+
+class CCAction;
+class CallingConv;
+
+/// CCCustom - Calls a custom arg handling function.
+class CCCustom<string fn> : CCAction {
+ string FuncName = fn;
+}
+
+/// CCPredicateAction - Instances of this class check some predicate, then
+/// delegate to another action if the predicate is true.
+class CCPredicateAction<CCAction A> : CCAction {
+ CCAction SubAction = A;
+}
+
+/// CCIfType - If the current argument is one of the specified types, apply
+/// Action A.
+class CCIfType<list<ValueType> vts, CCAction A> : CCPredicateAction<A> {
+ list<ValueType> VTs = vts;
+}
+
+/// CCIf - If the predicate matches, apply A.
+class CCIf<string predicate, CCAction A> : CCPredicateAction<A> {
+ string Predicate = predicate;
+}
+
+/// CCIfByVal - If the current argument has ByVal parameter attribute, apply
+/// Action A.
+class CCIfByVal<CCAction A> : CCIf<"ArgFlags.isByVal()", A> {
+}
+
+/// CCIfSwiftSelf - If the current argument has swiftself parameter attribute,
+/// apply Action A.
+class CCIfSwiftSelf<CCAction A> : CCIf<"ArgFlags.isSwiftSelf()", A> {
+}
+
+/// CCIfSwiftError - If the current argument has swifterror parameter attribute,
+/// apply Action A.
+class CCIfSwiftError<CCAction A> : CCIf<"ArgFlags.isSwiftError()", A> {
+}
+
+/// CCIfConsecutiveRegs - If the current argument has InConsecutiveRegs
+/// parameter attribute, apply Action A.
+class CCIfConsecutiveRegs<CCAction A> : CCIf<"ArgFlags.isInConsecutiveRegs()", A> {
+}
+
+/// CCIfCC - Match if the current calling convention is 'CC'.
+class CCIfCC<string CC, CCAction A>
+ : CCIf<!strconcat("State.getCallingConv() == ", CC), A> {}
+
+/// CCIfInReg - If this argument is marked with the 'inreg' attribute, apply
+/// the specified action.
+class CCIfInReg<CCAction A> : CCIf<"ArgFlags.isInReg()", A> {}
+
+/// CCIfNest - If this argument is marked with the 'nest' attribute, apply
+/// the specified action.
+class CCIfNest<CCAction A> : CCIf<"ArgFlags.isNest()", A> {}
+
+/// CCIfSplit - If this argument is marked with the 'split' attribute, apply
+/// the specified action.
+class CCIfSplit<CCAction A> : CCIf<"ArgFlags.isSplit()", A> {}
+
+/// CCIfSRet - If this argument is marked with the 'sret' attribute, apply
+/// the specified action.
+class CCIfSRet<CCAction A> : CCIf<"ArgFlags.isSRet()", A> {}
+
+/// CCIfVarArg - If the current function is vararg - apply the action
+class CCIfVarArg<CCAction A> : CCIf<"State.isVarArg()", A> {}
+
+/// CCIfNotVarArg - If the current function is not vararg - apply the action
+class CCIfNotVarArg<CCAction A> : CCIf<"!State.isVarArg()", A> {}
+
+/// CCAssignToReg - This action matches if there is a register in the specified
+/// list that is still available. If so, it assigns the value to the first
+/// available register and succeeds.
+class CCAssignToReg<list<Register> regList> : CCAction {
+ list<Register> RegList = regList;
+}
+
+/// CCAssignToRegWithShadow - Same as CCAssignToReg, but with list of registers
+/// which became shadowed, when some register is used.
+class CCAssignToRegWithShadow<list<Register> regList,
+ list<Register> shadowList> : CCAction {
+ list<Register> RegList = regList;
+ list<Register> ShadowRegList = shadowList;
+}
+
+/// CCAssignToStack - This action always matches: it assigns the value to a
+/// stack slot of the specified size and alignment on the stack. If size is
+/// zero then the ABI size is used; if align is zero then the ABI alignment
+/// is used - these may depend on the target or subtarget.
+class CCAssignToStack<int size, int align> : CCAction {
+ int Size = size;
+ int Align = align;
+}
+
+/// CCAssignToStackWithShadow - Same as CCAssignToStack, but with a list of
+/// registers to be shadowed. Note that, unlike CCAssignToRegWithShadow, this
+/// shadows ALL of the registers in shadowList.
+class CCAssignToStackWithShadow<int size,
+ int align,
+ list<Register> shadowList> : CCAction {
+ int Size = size;
+ int Align = align;
+ list<Register> ShadowRegList = shadowList;
+}
+
+/// CCPassByVal - This action always matches: it assigns the value to a stack
+/// slot to implement ByVal aggregate parameter passing. Size and alignment
+/// specify the minimum size and alignment for the stack slot.
+class CCPassByVal<int size, int align> : CCAction {
+ int Size = size;
+ int Align = align;
+}
+
+/// CCPromoteToType - If applied, this promotes the specified current value to
+/// the specified type.
+class CCPromoteToType<ValueType destTy> : CCAction {
+ ValueType DestTy = destTy;
+}
+
+/// CCPromoteToUpperBitsInType - If applied, this promotes the specified current
+/// value to the specified type and shifts the value into the upper bits.
+class CCPromoteToUpperBitsInType<ValueType destTy> : CCAction {
+ ValueType DestTy = destTy;
+}
+
+/// CCBitConvertToType - If applied, this bitconverts the specified current
+/// value to the specified type.
+class CCBitConvertToType<ValueType destTy> : CCAction {
+ ValueType DestTy = destTy;
+}
+
+/// CCPassIndirect - If applied, this stores the value to stack and passes the pointer
+/// as normal argument.
+class CCPassIndirect<ValueType destTy> : CCAction {
+ ValueType DestTy = destTy;
+}
+
+/// CCDelegateTo - This action invokes the specified sub-calling-convention. It
+/// is successful if the specified CC matches.
+class CCDelegateTo<CallingConv cc> : CCAction {
+ CallingConv CC = cc;
+}
+
+/// CallingConv - An instance of this is used to define each calling convention
+/// that the target supports.
+class CallingConv<list<CCAction> actions> {
+ list<CCAction> Actions = actions;
+ bit Custom = 0;
+}
+
+/// CustomCallingConv - An instance of this is used to declare calling
+/// conventions that are implemented using a custom function of the same name.
+class CustomCallingConv : CallingConv<[]> {
+ let Custom = 1;
+}
+
+/// CalleeSavedRegs - A list of callee saved registers for a given calling
+/// convention. The order of registers is used by PrologEpilogInsertion when
+/// allocation stack slots for saved registers.
+///
+/// For each CalleeSavedRegs def, TableGen will emit a FOO_SaveList array for
+/// returning from getCalleeSavedRegs(), and a FOO_RegMask bit mask suitable for
+/// returning from getCallPreservedMask().
+class CalleeSavedRegs<dag saves> {
+ dag SaveList = saves;
+
+ // Registers that are also preserved across function calls, but should not be
+ // included in the generated FOO_SaveList array. These registers will be
+ // included in the FOO_RegMask bit mask. This can be used for registers that
+ // are saved automatically, like the SPARC register windows.
+ dag OtherPreserved;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/TargetInstrPredicate.td b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetInstrPredicate.td
new file mode 100644
index 000000000..8d57cae02
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetInstrPredicate.td
@@ -0,0 +1,197 @@
+//===- TargetInstrPredicate.td - ---------------------------*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines MCInstPredicate classes and its subclasses.
+//
+// MCInstPredicate is used to describe constraints on the opcode/operand(s) of
+// an instruction. Each MCInstPredicate class has a well-known semantic, and it
+// is used by a PredicateExpander to generate code for MachineInstr and/or
+// MCInst.
+//
+// MCInstPredicate definitions can be used to construct MCSchedPredicate
+// definitions. An MCSchedPredicate can be used in place of a SchedPredicate
+// when defining SchedReadVariant and SchedWriteVariant used by a processor
+// scheduling model.
+//
+// Here is an example of MCInstPredicate definition:
+//
+// def MCInstPredicateExample : CheckAll<[
+// CheckOpcode<[BLR]>,
+// CheckIsRegOperand<0>,
+// CheckNot<CheckRegOperand<0, LR>>]>;
+//
+// Predicate `MCInstPredicateExample` checks that the machine instruction in
+// input is a BLR, and that operand at index 0 is register `LR`.
+//
+// That predicate could be used to rewrite the following definition (from
+// AArch64SchedExynosM3.td):
+//
+// def M3BranchLinkFastPred : SchedPredicate<[{
+// MI->getOpcode() == AArch64::BLR &&
+// MI->getOperand(0).isReg() &&
+// MI->getOperand(0).getReg() != AArch64::LR}]>;
+//
+// MCInstPredicate definitions are used to construct MCSchedPredicate (see the
+// definition of class MCSchedPredicate in llvm/Target/TargetSchedule.td). An
+// MCSchedPredicate can be used by a `SchedVar` to associate a predicate with a
+// list of SchedReadWrites. Note that `SchedVar` are used to create SchedVariant
+// definitions.
+//
+// Each MCInstPredicate class has a well known semantic. For example,
+// `CheckOpcode` is only used to check the instruction opcode value.
+//
+// MCInstPredicate classes allow the definition of predicates in a declarative
+// way. These predicates don't require a custom block of C++, and can be used
+// to define conditions on instructions without being bound to a particular
+// representation (i.e. MachineInstr vs MCInst).
+//
+// It also means that tablegen backends must know how to parse and expand them
+// into code that works on MCInst (or MachineInst).
+//
+// Instances of class PredicateExpander (see utils/Tablegen/PredicateExpander.h)
+// know how to expand a predicate. For each MCInstPredicate class, there must be
+// an "expand" method available in the PredicateExpander interface.
+//
+// For example, a `CheckOpcode` predicate is expanded using method
+// `PredicateExpander::expandCheckOpcode()`.
+//
+// New MCInstPredicate classes must be added to this file. For each new class
+// XYZ, an "expandXYZ" method must be added to the PredicateExpander.
+//
+//===----------------------------------------------------------------------===//
+
+// Forward declarations.
+class Instruction;
+
+// A generic machine instruction predicate.
+class MCInstPredicate;
+
+class MCTrue : MCInstPredicate; // A predicate that always evaluates to True.
+class MCFalse : MCInstPredicate; // A predicate that always evaluates to False.
+def TruePred : MCTrue;
+def FalsePred : MCFalse;
+
+// A predicate used to negate the outcome of another predicate.
+// It allows to easily express "set difference" operations. For example, it
+// makes it easy to describe a check that tests if an opcode is not part of a
+// set of opcodes.
+class CheckNot<MCInstPredicate P> : MCInstPredicate {
+ MCInstPredicate Pred = P;
+}
+
+// This class is used as a building block to define predicates on instruction
+// operands. It is used to reference a specific machine operand.
+class MCOperandPredicate<int Index> : MCInstPredicate {
+ int OpIndex = Index;
+}
+
+// Return true if machine operand at position `Index` is a register operand.
+class CheckIsRegOperand<int Index> : MCOperandPredicate<Index>;
+
+// Return true if machine operand at position `Index` is an immediate operand.
+class CheckIsImmOperand<int Index> : MCOperandPredicate<Index>;
+
+// Check if machine operands at index `First` and index `Second` both reference
+// the same register.
+class CheckSameRegOperand<int First, int Second> : MCInstPredicate {
+ int FirstIndex = First;
+ int SecondIndex = Second;
+}
+
+// Check that the machine register operand at position `Index` references
+// register R. This predicate assumes that we already checked that the machine
+// operand at position `Index` is a register operand.
+class CheckRegOperand<int Index, Register R> : MCOperandPredicate<Index> {
+ Register Reg = R;
+}
+
+// Check if register operand at index `Index` is the invalid register.
+class CheckInvalidRegOperand<int Index> : MCOperandPredicate<Index>;
+
+// Check that the operand at position `Index` is immediate `Imm`.
+class CheckImmOperand<int Index, int Imm> : MCOperandPredicate<Index> {
+ int ImmVal = Imm;
+}
+
+// Similar to CheckImmOperand, however the immediate is not a literal number.
+// This is useful when we want to compare the value of an operand against an
+// enum value, and we know the actual integer value of that enum.
+class CheckImmOperand_s<int Index, string Value> : MCOperandPredicate<Index> {
+ string ImmVal = Value;
+}
+
+// Check that the operand at position `Index` is immediate value zero.
+class CheckZeroOperand<int Index> : CheckImmOperand<Index, 0>;
+
+// Check that the instruction has exactly `Num` operands.
+class CheckNumOperands<int Num> : MCInstPredicate {
+ int NumOps = Num;
+}
+
+// Check that the instruction opcode is one of the opcodes in set `Opcodes`.
+// This is a simple set membership query. The easier way to check if an opcode
+// is not a member of the set is by using a `CheckNot<CheckOpcode<[...]>>`
+// sequence.
+class CheckOpcode<list<Instruction> Opcodes> : MCInstPredicate {
+ list<Instruction> ValidOpcodes = Opcodes;
+}
+
+// Check that the instruction opcode is a pseudo opcode member of the set
+// `Opcodes`. This check is always expanded to "false" if we are generating
+// code for MCInst.
+class CheckPseudo<list<Instruction> Opcodes> : CheckOpcode<Opcodes>;
+
+// A non-portable predicate. Only to use as a last resort when a block of code
+// cannot possibly be converted in a declarative way using other MCInstPredicate
+// classes. This check is always expanded to "false" when generating code for
+// MCInst.
+class CheckNonPortable<string Code> : MCInstPredicate {
+ string CodeBlock = Code;
+}
+
+// A sequence of predicates. It is used as the base class for CheckAll, and
+// CheckAny. It allows to describe compositions of predicates.
+class CheckPredicateSequence<list<MCInstPredicate> Preds> : MCInstPredicate {
+ list<MCInstPredicate> Predicates = Preds;
+}
+
+// Check that all of the predicates in `Preds` evaluate to true.
+class CheckAll<list<MCInstPredicate> Sequence>
+ : CheckPredicateSequence<Sequence>;
+
+// Check that at least one of the predicates in `Preds` evaluates to true.
+class CheckAny<list<MCInstPredicate> Sequence>
+ : CheckPredicateSequence<Sequence>;
+
+// Check that a call to method `Name` in class "XXXGenInstrInfo" (where XXX is
+// the `Target` name) returns true.
+//
+// TIIPredicate definitions are used to model calls to the target-specific
+// InstrInfo. A TIIPredicate is treated specially by the InstrInfoEmitter
+// tablegen backend, which will use it to automatically generate a definition in
+// the target specific `GenInstrInfo` class.
+class TIIPredicate<string Target, string Name, MCInstPredicate P> : MCInstPredicate {
+ string TargetName = Target;
+ string FunctionName = Name;
+ MCInstPredicate Pred = P;
+}
+
+// A function predicate that takes as input a machine instruction, and returns
+// a boolean value.
+//
+// This predicate is expanded into a function call by the PredicateExpander.
+// In particular, the PredicateExpander would either expand this predicate into
+// a call to `MCInstFn`, or into a call to`MachineInstrFn` depending on whether
+// it is lowering predicates for MCInst or MachineInstr.
+//
+// In this context, `MCInstFn` and `MachineInstrFn` are both function names.
+class CheckFunctionPredicate<string MCInstFn, string MachineInstrFn> : MCInstPredicate {
+ string MCInstFnName = MCInstFn;
+ string MachineInstrFnName = MachineInstrFn;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/TargetItinerary.td b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetItinerary.td
new file mode 100644
index 000000000..182054d84
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetItinerary.td
@@ -0,0 +1,152 @@
+//===- TargetItinerary.td - Target Itinierary Description --*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent scheduling interfaces
+// which should be implemented by each target that uses instruction
+// itineraries for scheduling. Itineraries are details reservation
+// tables for each instruction class. They are most appropriate for
+// in-order machine with complicated scheduling or bundling constraints.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// Processor functional unit - These values represent the function units
+// available across all chip sets for the target. Eg., IntUnit, FPUnit, ...
+// These may be independent values for each chip set or may be shared across
+// all chip sets of the target. Each functional unit is treated as a resource
+// during scheduling and has an affect instruction order based on availability
+// during a time interval.
+//
+class FuncUnit;
+
+//===----------------------------------------------------------------------===//
+// Pipeline bypass / forwarding - These values specifies the symbolic names of
+// pipeline bypasses which can be used to forward results of instructions
+// that are forwarded to uses.
+class Bypass;
+def NoBypass : Bypass;
+
+class ReservationKind<bits<1> val> {
+ int Value = val;
+}
+
+def Required : ReservationKind<0>;
+def Reserved : ReservationKind<1>;
+
+//===----------------------------------------------------------------------===//
+// Instruction stage - These values represent a non-pipelined step in
+// the execution of an instruction. Cycles represents the number of
+// discrete time slots needed to complete the stage. Units represent
+// the choice of functional units that can be used to complete the
+// stage. Eg. IntUnit1, IntUnit2. TimeInc indicates how many cycles
+// should elapse from the start of this stage to the start of the next
+// stage in the itinerary. For example:
+//
+// A stage is specified in one of two ways:
+//
+// InstrStage<1, [FU_x, FU_y]> - TimeInc defaults to Cycles
+// InstrStage<1, [FU_x, FU_y], 0> - TimeInc explicit
+//
+
+class InstrStage<int cycles, list<FuncUnit> units,
+ int timeinc = -1,
+ ReservationKind kind = Required> {
+ int Cycles = cycles; // length of stage in machine cycles
+ list<FuncUnit> Units = units; // choice of functional units
+ int TimeInc = timeinc; // cycles till start of next stage
+ int Kind = kind.Value; // kind of FU reservation
+}
+
+//===----------------------------------------------------------------------===//
+// Instruction itinerary - An itinerary represents a sequential series of steps
+// required to complete an instruction. Itineraries are represented as lists of
+// instruction stages.
+//
+
+//===----------------------------------------------------------------------===//
+// Instruction itinerary classes - These values represent 'named' instruction
+// itinerary. Using named itineraries simplifies managing groups of
+// instructions across chip sets. An instruction uses the same itinerary class
+// across all chip sets. Thus a new chip set can be added without modifying
+// instruction information.
+//
+class InstrItinClass;
+def NoItinerary : InstrItinClass;
+
+//===----------------------------------------------------------------------===//
+// Instruction itinerary data - These values provide a runtime map of an
+// instruction itinerary class (name) to its itinerary data.
+//
+// NumMicroOps represents the number of micro-operations that each instruction
+// in the class are decoded to. If the number is zero, then it means the
+// instruction can decode into variable number of micro-ops and it must be
+// determined dynamically. This directly relates to the itineraries
+// global IssueWidth property, which constrains the number of microops
+// that can issue per cycle.
+//
+// OperandCycles are optional "cycle counts". They specify the cycle after
+// instruction issue the values which correspond to specific operand indices
+// are defined or read. Bypasses are optional "pipeline forwarding paths", if
+// a def by an instruction is available on a specific bypass and the use can
+// read from the same bypass, then the operand use latency is reduced by one.
+//
+// InstrItinData<IIC_iLoad_i , [InstrStage<1, [A9_Pipe1]>,
+// InstrStage<1, [A9_AGU]>],
+// [3, 1], [A9_LdBypass]>,
+// InstrItinData<IIC_iMVNr , [InstrStage<1, [A9_Pipe0, A9_Pipe1]>],
+// [1, 1], [NoBypass, A9_LdBypass]>,
+//
+// In this example, the instruction of IIC_iLoadi reads its input on cycle 1
+// (after issue) and the result of the load is available on cycle 3. The result
+// is available via forwarding path A9_LdBypass. If it's used by the first
+// source operand of instructions of IIC_iMVNr class, then the operand latency
+// is reduced by 1.
+class InstrItinData<InstrItinClass Class, list<InstrStage> stages,
+ list<int> operandcycles = [],
+ list<Bypass> bypasses = [], int uops = 1> {
+ InstrItinClass TheClass = Class;
+ int NumMicroOps = uops;
+ list<InstrStage> Stages = stages;
+ list<int> OperandCycles = operandcycles;
+ list<Bypass> Bypasses = bypasses;
+}
+
+//===----------------------------------------------------------------------===//
+// Processor itineraries - These values represent the set of all itinerary
+// classes for a given chip set.
+//
+// Set property values to -1 to use the default.
+// See InstrItineraryProps for comments and defaults.
+class ProcessorItineraries<list<FuncUnit> fu, list<Bypass> bp,
+ list<InstrItinData> iid> {
+ list<FuncUnit> FU = fu;
+ list<Bypass> BP = bp;
+ list<InstrItinData> IID = iid;
+}
+
+// NoItineraries - A marker that can be used by processors without schedule
+// info. Subtargets using NoItineraries can bypass the scheduler's
+// expensive HazardRecognizer because no reservation table is needed.
+def NoItineraries : ProcessorItineraries<[], [], []>;
+
+//===----------------------------------------------------------------------===//
+// Combo Function Unit data - This is a map of combo function unit names to
+// the list of functional units that are included in the combination.
+//
+class ComboFuncData<FuncUnit ComboFunc, list<FuncUnit> funclist> {
+ FuncUnit TheComboFunc = ComboFunc;
+ list<FuncUnit> FuncList = funclist;
+}
+
+//===----------------------------------------------------------------------===//
+// Combo Function Units - This is a list of all combo function unit data.
+class ComboFuncUnits<list<ComboFuncData> cfd> {
+ list<ComboFuncData> CFD = cfd;
+}
+
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSchedule.td b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSchedule.td
new file mode 100644
index 000000000..6fd2d5b78
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSchedule.td
@@ -0,0 +1,553 @@
+//===- TargetSchedule.td - Target Independent Scheduling ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent scheduling interfaces which should
+// be implemented by each target which is using TableGen based scheduling.
+//
+// The SchedMachineModel is defined by subtargets for three categories of data:
+// 1. Basic properties for coarse grained instruction cost model.
+// 2. Scheduler Read/Write resources for simple per-opcode cost model.
+// 3. Instruction itineraries for detailed reservation tables.
+//
+// (1) Basic properties are defined by the SchedMachineModel
+// class. Target hooks allow subtargets to associate opcodes with
+// those properties.
+//
+// (2) A per-operand machine model can be implemented in any
+// combination of the following ways:
+//
+// A. Associate per-operand SchedReadWrite types with Instructions by
+// modifying the Instruction definition to inherit from Sched. For
+// each subtarget, define WriteRes and ReadAdvance to associate
+// processor resources and latency with each SchedReadWrite type.
+//
+// B. In each instruction definition, name an ItineraryClass. For each
+// subtarget, define ItinRW entries to map ItineraryClass to
+// per-operand SchedReadWrite types. Unlike method A, these types may
+// be subtarget specific and can be directly associated with resources
+// by defining SchedWriteRes and SchedReadAdvance.
+//
+// C. In the subtarget, map SchedReadWrite types to specific
+// opcodes. This overrides any SchedReadWrite types or
+// ItineraryClasses defined by the Instruction. As in method B, the
+// subtarget can directly associate resources with SchedReadWrite
+// types by defining SchedWriteRes and SchedReadAdvance.
+//
+// D. In either the target or subtarget, define SchedWriteVariant or
+// SchedReadVariant to map one SchedReadWrite type onto another
+// sequence of SchedReadWrite types. This allows dynamic selection of
+// an instruction's machine model via custom C++ code. It also allows
+// a machine-independent SchedReadWrite type to map to a sequence of
+// machine-dependent types.
+//
+// (3) A per-pipeline-stage machine model can be implemented by providing
+// Itineraries in addition to mapping instructions to ItineraryClasses.
+//===----------------------------------------------------------------------===//
+
+// Include legacy support for instruction itineraries.
+include "llvm/Target/TargetItinerary.td"
+
+class Instruction; // Forward def
+
+class Predicate; // Forward def
+
+// DAG operator that interprets the DAG args as Instruction defs.
+def instrs;
+
+// DAG operator that interprets each DAG arg as a regex pattern for
+// matching Instruction opcode names.
+// The regex must match the beginning of the opcode (as in Python re.match).
+// To avoid matching prefixes, append '$' to the pattern.
+def instregex;
+
+// Define the SchedMachineModel and provide basic properties for
+// coarse grained instruction cost model. Default values for the
+// properties are defined in MCSchedModel. A value of "-1" in the
+// target description's SchedMachineModel indicates that the property
+// is not overriden by the target.
+//
+// Target hooks allow subtargets to associate LoadLatency and
+// HighLatency with groups of opcodes.
+//
+// See MCSchedule.h for detailed comments.
+class SchedMachineModel {
+ int IssueWidth = -1; // Max micro-ops that may be scheduled per cycle.
+ int MicroOpBufferSize = -1; // Max micro-ops that can be buffered.
+ int LoopMicroOpBufferSize = -1; // Max micro-ops that can be buffered for
+ // optimized loop dispatch/execution.
+ int LoadLatency = -1; // Cycles for loads to access the cache.
+ int HighLatency = -1; // Approximation of cycles for "high latency" ops.
+ int MispredictPenalty = -1; // Extra cycles for a mispredicted branch.
+
+ // Per-cycle resources tables.
+ ProcessorItineraries Itineraries = NoItineraries;
+
+ bit PostRAScheduler = 0; // Enable Post RegAlloc Scheduler pass.
+
+ // Subtargets that define a model for only a subset of instructions
+ // that have a scheduling class (itinerary class or SchedRW list)
+ // and may actually be generated for that subtarget must clear this
+ // bit. Otherwise, the scheduler considers an unmodelled opcode to
+ // be an error. This should only be set during initial bringup,
+ // or there will be no way to catch simple errors in the model
+ // resulting from changes to the instruction definitions.
+ bit CompleteModel = 1;
+
+ // Indicates that we should do full overlap checking for multiple InstrRWs
+ // definining the same instructions within the same SchedMachineModel.
+ // FIXME: Remove when all in tree targets are clean with the full check
+ // enabled.
+ bit FullInstRWOverlapCheck = 1;
+
+ // A processor may only implement part of published ISA, due to either new ISA
+ // extensions, (e.g. Pentium 4 doesn't have AVX) or implementation
+ // (ARM/MIPS/PowerPC/SPARC soft float cores).
+ //
+ // For a processor which doesn't support some feature(s), the schedule model
+ // can use:
+ //
+ // let<Predicate> UnsupportedFeatures = [HaveA,..,HaveY];
+ //
+ // to skip the checks for scheduling information when building LLVM for
+ // instructions which have any of the listed predicates in their Predicates
+ // field.
+ list<Predicate> UnsupportedFeatures = [];
+
+ bit NoModel = 0; // Special tag to indicate missing machine model.
+}
+
+def NoSchedModel : SchedMachineModel {
+ let NoModel = 1;
+ let CompleteModel = 0;
+}
+
+// Define a kind of processor resource that may be common across
+// similar subtargets.
+class ProcResourceKind;
+
+// Define a number of interchangeable processor resources. NumUnits
+// determines the throughput of instructions that require the resource.
+//
+// An optional Super resource may be given to model these resources as
+// a subset of the more general super resources. Using one of these
+// resources implies using one of the super resoruces.
+//
+// ProcResourceUnits normally model a few buffered resources within an
+// out-of-order engine. Buffered resources may be held for multiple
+// clock cycles, but the scheduler does not pin them to a particular
+// clock cycle relative to instruction dispatch. Setting BufferSize=0
+// changes this to an in-order issue/dispatch resource. In this case,
+// the scheduler counts down from the cycle that the instruction
+// issues in-order, forcing a stall whenever a subsequent instruction
+// requires the same resource until the number of ResourceCycles
+// specified in WriteRes expire. Setting BufferSize=1 changes this to
+// an in-order latency resource. In this case, the scheduler models
+// producer/consumer stalls between instructions that use the
+// resource.
+//
+// Examples (all assume an out-of-order engine):
+//
+// Use BufferSize = -1 for "issue ports" fed by a unified reservation
+// station. Here the size of the reservation station is modeled by
+// MicroOpBufferSize, which should be the minimum size of either the
+// register rename pool, unified reservation station, or reorder
+// buffer.
+//
+// Use BufferSize = 0 for resources that force "dispatch/issue
+// groups". (Different processors define dispath/issue
+// differently. Here we refer to stage between decoding into micro-ops
+// and moving them into a reservation station.) Normally NumMicroOps
+// is sufficient to limit dispatch/issue groups. However, some
+// processors can form groups of with only certain combinitions of
+// instruction types. e.g. POWER7.
+//
+// Use BufferSize = 1 for in-order execution units. This is used for
+// an in-order pipeline within an out-of-order core where scheduling
+// dependent operations back-to-back is guaranteed to cause a
+// bubble. e.g. Cortex-a9 floating-point.
+//
+// Use BufferSize > 1 for out-of-order executions units with a
+// separate reservation station. This simply models the size of the
+// reservation station.
+//
+// To model both dispatch/issue groups and in-order execution units,
+// create two types of units, one with BufferSize=0 and one with
+// BufferSize=1.
+//
+// SchedModel ties these units to a processor for any stand-alone defs
+// of this class.
+class ProcResourceUnits<ProcResourceKind kind, int num,
+ list<string> pfmCounters> {
+ ProcResourceKind Kind = kind;
+ int NumUnits = num;
+ ProcResourceKind Super = ?;
+ int BufferSize = -1;
+ SchedMachineModel SchedModel = ?;
+}
+
+// EponymousProcResourceKind helps implement ProcResourceUnits by
+// allowing a ProcResourceUnits definition to reference itself. It
+// should not be referenced anywhere else.
+def EponymousProcResourceKind : ProcResourceKind;
+
+// Subtargets typically define processor resource kind and number of
+// units in one place.
+class ProcResource<int num, list<string> pfmCounters = []> : ProcResourceKind,
+ ProcResourceUnits<EponymousProcResourceKind, num, pfmCounters>;
+
+class ProcResGroup<list<ProcResource> resources> : ProcResourceKind {
+ list<ProcResource> Resources = resources;
+ SchedMachineModel SchedModel = ?;
+ int BufferSize = -1;
+}
+
+// A target architecture may define SchedReadWrite types and associate
+// them with instruction operands.
+class SchedReadWrite;
+
+// List the per-operand types that map to the machine model of an
+// instruction. One SchedWrite type must be listed for each explicit
+// def operand in order. Additional SchedWrite types may optionally be
+// listed for implicit def operands. SchedRead types may optionally
+// be listed for use operands in order. The order of defs relative to
+// uses is insignificant. This way, the same SchedReadWrite list may
+// be used for multiple forms of an operation. For example, a
+// two-address instruction could have two tied operands or single
+// operand that both reads and writes a reg. In both cases we have a
+// single SchedWrite and single SchedRead in any order.
+class Sched<list<SchedReadWrite> schedrw> {
+ list<SchedReadWrite> SchedRW = schedrw;
+}
+
+// Define a scheduler resource associated with a def operand.
+class SchedWrite : SchedReadWrite;
+def NoWrite : SchedWrite;
+
+// Define a scheduler resource associated with a use operand.
+class SchedRead : SchedReadWrite;
+
+// Define a SchedWrite that is modeled as a sequence of other
+// SchedWrites with additive latency. This allows a single operand to
+// be mapped the resources composed from a set of previously defined
+// SchedWrites.
+//
+// If the final write in this sequence is a SchedWriteVariant marked
+// Variadic, then the list of prior writes are distributed across all
+// operands after resolving the predicate for the final write.
+//
+// SchedModel silences warnings but is ignored.
+class WriteSequence<list<SchedWrite> writes, int rep = 1> : SchedWrite {
+ list<SchedWrite> Writes = writes;
+ int Repeat = rep;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Define values common to WriteRes and SchedWriteRes.
+//
+// SchedModel ties these resources to a processor.
+class ProcWriteResources<list<ProcResourceKind> resources> {
+ list<ProcResourceKind> ProcResources = resources;
+ list<int> ResourceCycles = [];
+ int Latency = 1;
+ int NumMicroOps = 1;
+ bit BeginGroup = 0;
+ bit EndGroup = 0;
+ // Allow a processor to mark some scheduling classes as unsupported
+ // for stronger verification.
+ bit Unsupported = 0;
+ // Allow a processor to mark some scheduling classes as single-issue.
+ // SingleIssue is an alias for Begin/End Group.
+ bit SingleIssue = 0;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Define the resources and latency of a SchedWrite. This will be used
+// directly by targets that have no itinerary classes. In this case,
+// SchedWrite is defined by the target, while WriteResources is
+// defined by the subtarget, and maps the SchedWrite to processor
+// resources.
+//
+// If a target already has itinerary classes, SchedWriteResources can
+// be used instead to define subtarget specific SchedWrites and map
+// them to processor resources in one place. Then ItinRW can map
+// itinerary classes to the subtarget's SchedWrites.
+//
+// ProcResources indicates the set of resources consumed by the write.
+// Optionally, ResourceCycles indicates the number of cycles the
+// resource is consumed. Each ResourceCycles item is paired with the
+// ProcResource item at the same position in its list. ResourceCycles
+// can be `[]`: in that case, all resources are consumed for a single
+// cycle, regardless of latency, which models a fully pipelined processing
+// unit. A value of 0 for ResourceCycles means that the resource must
+// be available but is not consumed, which is only relevant for
+// unbuffered resources.
+//
+// By default, each SchedWrite takes one micro-op, which is counted
+// against the processor's IssueWidth limit. If an instruction can
+// write multiple registers with a single micro-op, the subtarget
+// should define one of the writes to be zero micro-ops. If a
+// subtarget requires multiple micro-ops to write a single result, it
+// should either override the write's NumMicroOps to be greater than 1
+// or require additional writes. Extra writes can be required either
+// by defining a WriteSequence, or simply listing extra writes in the
+// instruction's list of writers beyond the number of "def"
+// operands. The scheduler assumes that all micro-ops must be
+// dispatched in the same cycle. These micro-ops may be required to
+// begin or end the current dispatch group.
+class WriteRes<SchedWrite write, list<ProcResourceKind> resources>
+ : ProcWriteResources<resources> {
+ SchedWrite WriteType = write;
+}
+
+// Directly name a set of WriteResources defining a new SchedWrite
+// type at the same time. This class is unaware of its SchedModel so
+// must be referenced by InstRW or ItinRW.
+class SchedWriteRes<list<ProcResourceKind> resources> : SchedWrite,
+ ProcWriteResources<resources>;
+
+// Define values common to ReadAdvance and SchedReadAdvance.
+//
+// SchedModel ties these resources to a processor.
+class ProcReadAdvance<int cycles, list<SchedWrite> writes = []> {
+ int Cycles = cycles;
+ list<SchedWrite> ValidWrites = writes;
+ // Allow a processor to mark some scheduling classes as unsupported
+ // for stronger verification.
+ bit Unsupported = 0;
+ SchedMachineModel SchedModel = ?;
+}
+
+// A processor may define a ReadAdvance associated with a SchedRead
+// to reduce latency of a prior write by N cycles. A negative advance
+// effectively increases latency, which may be used for cross-domain
+// stalls.
+//
+// A ReadAdvance may be associated with a list of SchedWrites
+// to implement pipeline bypass. The Writes list may be empty to
+// indicate operands that are always read this number of Cycles later
+// than a normal register read, allowing the read's parent instruction
+// to issue earlier relative to the writer.
+class ReadAdvance<SchedRead read, int cycles, list<SchedWrite> writes = []>
+ : ProcReadAdvance<cycles, writes> {
+ SchedRead ReadType = read;
+}
+
+// Directly associate a new SchedRead type with a delay and optional
+// pipeline bypass. For use with InstRW or ItinRW.
+class SchedReadAdvance<int cycles, list<SchedWrite> writes = []> : SchedRead,
+ ProcReadAdvance<cycles, writes>;
+
+// Define SchedRead defaults. Reads seldom need special treatment.
+def ReadDefault : SchedRead;
+def NoReadAdvance : SchedReadAdvance<0>;
+
+// Define shared code that will be in the same scope as all
+// SchedPredicates. Available variables are:
+// (const MachineInstr *MI, const TargetSchedModel *SchedModel)
+class PredicateProlog<code c> {
+ code Code = c;
+}
+
+// Base class for scheduling predicates.
+class SchedPredicateBase;
+
+// A scheduling predicate whose logic is defined by a MCInstPredicate.
+// This can directly be used by SchedWriteVariant definitions.
+class MCSchedPredicate<MCInstPredicate P> : SchedPredicateBase {
+ MCInstPredicate Pred = P;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Define a predicate to determine which SchedVariant applies to a
+// particular MachineInstr. The code snippet is used as an
+// if-statement's expression. Available variables are MI, SchedModel,
+// and anything defined in a PredicateProlog.
+//
+// SchedModel silences warnings but is ignored.
+class SchedPredicate<code pred> : SchedPredicateBase {
+ SchedMachineModel SchedModel = ?;
+ code Predicate = pred;
+}
+def NoSchedPred : SchedPredicate<[{true}]>;
+
+// Associate a predicate with a list of SchedReadWrites. By default,
+// the selected SchedReadWrites are still associated with a single
+// operand and assumed to execute sequentially with additive
+// latency. However, if the parent SchedWriteVariant or
+// SchedReadVariant is marked "Variadic", then each Selected
+// SchedReadWrite is mapped in place to the instruction's variadic
+// operands. In this case, latency is not additive. If the current Variant
+// is already part of a Sequence, then that entire chain leading up to
+// the Variant is distributed over the variadic operands.
+class SchedVar<SchedPredicateBase pred, list<SchedReadWrite> selected> {
+ SchedPredicateBase Predicate = pred;
+ list<SchedReadWrite> Selected = selected;
+}
+
+// SchedModel silences warnings but is ignored.
+class SchedVariant<list<SchedVar> variants> {
+ list<SchedVar> Variants = variants;
+ bit Variadic = 0;
+ SchedMachineModel SchedModel = ?;
+}
+
+// A SchedWriteVariant is a single SchedWrite type that maps to a list
+// of SchedWrite types under the conditions defined by its predicates.
+//
+// A Variadic write is expanded to cover multiple "def" operands. The
+// SchedVariant's Expansion list is then interpreted as one write
+// per-operand instead of the usual sequential writes feeding a single
+// operand.
+class SchedWriteVariant<list<SchedVar> variants> : SchedWrite,
+ SchedVariant<variants> {
+}
+
+// A SchedReadVariant is a single SchedRead type that maps to a list
+// of SchedRead types under the conditions defined by its predicates.
+//
+// A Variadic write is expanded to cover multiple "readsReg" operands as
+// explained above.
+class SchedReadVariant<list<SchedVar> variants> : SchedRead,
+ SchedVariant<variants> {
+}
+
+// Map a set of opcodes to a list of SchedReadWrite types. This allows
+// the subtarget to easily override specific operations.
+//
+// SchedModel ties this opcode mapping to a processor.
+class InstRW<list<SchedReadWrite> rw, dag instrlist> {
+ list<SchedReadWrite> OperandReadWrites = rw;
+ dag Instrs = instrlist;
+ SchedMachineModel SchedModel = ?;
+ // Allow a subtarget to mark some instructions as unsupported.
+ bit Unsupported = 0;
+}
+
+// Map a set of itinerary classes to SchedReadWrite resources. This is
+// used to bootstrap a target (e.g. ARM) when itineraries already
+// exist and changing InstrInfo is undesirable.
+//
+// SchedModel ties this ItineraryClass mapping to a processor.
+class ItinRW<list<SchedReadWrite> rw, list<InstrItinClass> iic> {
+ list<InstrItinClass> MatchedItinClasses = iic;
+ list<SchedReadWrite> OperandReadWrites = rw;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Alias a target-defined SchedReadWrite to a processor specific
+// SchedReadWrite. This allows a subtarget to easily map a
+// SchedReadWrite type onto a WriteSequence, SchedWriteVariant, or
+// SchedReadVariant.
+//
+// SchedModel will usually be provided by surrounding let statement
+// and ties this SchedAlias mapping to a processor.
+class SchedAlias<SchedReadWrite match, SchedReadWrite alias> {
+ SchedReadWrite MatchRW = match;
+ SchedReadWrite AliasRW = alias;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Allow the definition of processor register files for register renaming
+// purposes.
+//
+// Each processor register file declares:
+// - The set of registers that can be renamed.
+// - The number of physical registers which can be used for register renaming
+// purpose.
+// - The cost of a register rename.
+//
+// The cost of a rename is the number of physical registers allocated by the
+// register alias table to map the new definition. By default, register can be
+// renamed at the cost of a single physical register. Note that register costs
+// are defined at register class granularity (see field `Costs`).
+//
+// The set of registers that are subject to register renaming is declared using
+// a list of register classes (see field `RegClasses`). An empty list of
+// register classes means: all the logical registers defined by the target can
+// be fully renamed.
+//
+// A register R can be renamed if its register class appears in the `RegClasses`
+// set. When R is written, a new alias is allocated at the cost of one or more
+// physical registers; as a result, false dependencies on R are removed.
+//
+// A sub-register V of register R is implicitly part of the same register file.
+// However, V is only renamed if its register class is part of `RegClasses`.
+// Otherwise, the processor keeps it (as well as any other different part
+// of R) together with R, and a write of V always causes a compulsory read of R.
+//
+// This is what happens for example on AMD processors (at least from Bulldozer
+// onwards), where AL and AH are not treated as independent from AX, and AX is
+// not treated as independent from EAX. A write to AL has an implicity false
+// dependency on the last write to EAX (or a portion of EAX). As a consequence,
+// a write to AL cannot go in parallel with a write to AH.
+//
+// There is no false dependency if the partial register write belongs to a
+// register class that is in `RegClasses`.
+// There is also no penalty for writes that "clear the content a super-register"
+// (see MC/MCInstrAnalysis.h - method MCInstrAnalysis::clearsSuperRegisters()).
+// On x86-64, 32-bit GPR writes implicitly zero the upper half of the underlying
+// physical register, effectively removing any false dependencies with the
+// previous register definition.
+//
+// TODO: This implementation assumes that there is no limit in the number of
+// renames per cycle, which might not be true for all hardware or register
+// classes. Also, there is no limit to how many times the same logical register
+// can be renamed during the same cycle.
+//
+// TODO: we don't currently model merge penalties for the case where a write to
+// a part of a register is followed by a read from a larger part of the same
+// register. On some Intel chips, different parts of a GPR can be stored in
+// different physical registers. However, there is a cost to pay for when the
+// partial write is combined with the previous super-register definition. We
+// should add support for these cases, and correctly model merge problems with
+// partial register accesses.
+class RegisterFile<int numPhysRegs, list<RegisterClass> Classes = [],
+ list<int> Costs = []> {
+ list<RegisterClass> RegClasses = Classes;
+ list<int> RegCosts = Costs;
+ int NumPhysRegs = numPhysRegs;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Describe the retire control unit.
+// A retire control unit specifies the size of the reorder buffer, as well as
+// the maximum number of opcodes that can be retired every cycle.
+// A value less-than-or-equal-to zero for field 'ReorderBufferSize' means: "the
+// size is unknown". The idea is that external tools can fall-back to using
+// field MicroOpBufferSize in SchedModel if the reorder buffer size is unknown.
+// A zero or negative value for field 'MaxRetirePerCycle' means "no
+// restrictions on the number of instructions retired per cycle".
+// Models can optionally specify up to one instance of RetireControlUnit per
+// scheduling model.
+class RetireControlUnit<int bufferSize, int retirePerCycle> {
+ int ReorderBufferSize = bufferSize;
+ int MaxRetirePerCycle = retirePerCycle;
+ SchedMachineModel SchedModel = ?;
+}
+
+// Allow the definition of hardware counters.
+class PfmCounter {
+ SchedMachineModel SchedModel = ?;
+}
+
+// Each processor can define how to measure cycles by defining a
+// PfmCycleCounter.
+class PfmCycleCounter<string counter> : PfmCounter {
+ string Counter = counter;
+}
+
+// Each ProcResourceUnits can define how to measure issued uops by defining
+// a PfmIssueCounter.
+class PfmIssueCounter<ProcResourceUnits resource, list<string> counters>
+ : PfmCounter{
+ // The resource units on which uops are issued.
+ ProcResourceUnits Resource = resource;
+ // The list of counters that measure issue events.
+ list<string> Counters = counters;
+}
diff --git a/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSelectionDAG.td b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSelectionDAG.td
new file mode 100644
index 000000000..4ba4d8212
--- /dev/null
+++ b/capstone/suite/synctools/tablegen/include/llvm/Target/TargetSelectionDAG.td
@@ -0,0 +1,1335 @@
+//===- TargetSelectionDAG.td - Common code for DAG isels ---*- tablegen -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the target-independent interfaces used by SelectionDAG
+// instruction selection generators.
+//
+//===----------------------------------------------------------------------===//
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Type Constraint definitions.
+//
+// Note that the semantics of these constraints are hard coded into tblgen. To
+// modify or add constraints, you have to hack tblgen.
+//
+
+class SDTypeConstraint<int opnum> {
+ int OperandNum = opnum;
+}
+
+// SDTCisVT - The specified operand has exactly this VT.
+class SDTCisVT<int OpNum, ValueType vt> : SDTypeConstraint<OpNum> {
+ ValueType VT = vt;
+}
+
+class SDTCisPtrTy<int OpNum> : SDTypeConstraint<OpNum>;
+
+// SDTCisInt - The specified operand has integer type.
+class SDTCisInt<int OpNum> : SDTypeConstraint<OpNum>;
+
+// SDTCisFP - The specified operand has floating-point type.
+class SDTCisFP<int OpNum> : SDTypeConstraint<OpNum>;
+
+// SDTCisVec - The specified operand has a vector type.
+class SDTCisVec<int OpNum> : SDTypeConstraint<OpNum>;
+
+// SDTCisSameAs - The two specified operands have identical types.
+class SDTCisSameAs<int OpNum, int OtherOp> : SDTypeConstraint<OpNum> {
+ int OtherOperandNum = OtherOp;
+}
+
+// SDTCisVTSmallerThanOp - The specified operand is a VT SDNode, and its type is
+// smaller than the 'Other' operand.
+class SDTCisVTSmallerThanOp<int OpNum, int OtherOp> : SDTypeConstraint<OpNum> {
+ int OtherOperandNum = OtherOp;
+}
+
+class SDTCisOpSmallerThanOp<int SmallOp, int BigOp> : SDTypeConstraint<SmallOp>{
+ int BigOperandNum = BigOp;
+}
+
+/// SDTCisEltOfVec - This indicates that ThisOp is a scalar type of the same
+/// type as the element type of OtherOp, which is a vector type.
+class SDTCisEltOfVec<int ThisOp, int OtherOp>
+ : SDTypeConstraint<ThisOp> {
+ int OtherOpNum = OtherOp;
+}
+
+/// SDTCisSubVecOfVec - This indicates that ThisOp is a vector type
+/// with length less that of OtherOp, which is a vector type.
+class SDTCisSubVecOfVec<int ThisOp, int OtherOp>
+ : SDTypeConstraint<ThisOp> {
+ int OtherOpNum = OtherOp;
+}
+
+// SDTCVecEltisVT - The specified operand is vector type with element type
+// of VT.
+class SDTCVecEltisVT<int OpNum, ValueType vt> : SDTypeConstraint<OpNum> {
+ ValueType VT = vt;
+}
+
+// SDTCisSameNumEltsAs - The two specified operands have identical number
+// of elements.
+class SDTCisSameNumEltsAs<int OpNum, int OtherOp> : SDTypeConstraint<OpNum> {
+ int OtherOperandNum = OtherOp;
+}
+
+// SDTCisSameSizeAs - The two specified operands have identical size.
+class SDTCisSameSizeAs<int OpNum, int OtherOp> : SDTypeConstraint<OpNum> {
+ int OtherOperandNum = OtherOp;
+}
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Type Profile definitions.
+//
+// These use the constraints defined above to describe the type requirements of
+// the various nodes. These are not hard coded into tblgen, allowing targets to
+// add their own if needed.
+//
+
+// SDTypeProfile - This profile describes the type requirements of a Selection
+// DAG node.
+class SDTypeProfile<int numresults, int numoperands,
+ list<SDTypeConstraint> constraints> {
+ int NumResults = numresults;
+ int NumOperands = numoperands;
+ list<SDTypeConstraint> Constraints = constraints;
+}
+
+// Builtin profiles.
+def SDTIntLeaf: SDTypeProfile<1, 0, [SDTCisInt<0>]>; // for 'imm'.
+def SDTFPLeaf : SDTypeProfile<1, 0, [SDTCisFP<0>]>; // for 'fpimm'.
+def SDTPtrLeaf: SDTypeProfile<1, 0, [SDTCisPtrTy<0>]>; // for '&g'.
+def SDTOther : SDTypeProfile<1, 0, [SDTCisVT<0, OtherVT>]>; // for 'vt'.
+def SDTUNDEF : SDTypeProfile<1, 0, []>; // for 'undef'.
+def SDTUnaryOp : SDTypeProfile<1, 1, []>; // for bitconvert.
+
+def SDTIntBinOp : SDTypeProfile<1, 2, [ // add, and, or, xor, udiv, etc.
+ SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisInt<0>
+]>;
+def SDTIntShiftOp : SDTypeProfile<1, 2, [ // shl, sra, srl
+ SDTCisSameAs<0, 1>, SDTCisInt<0>, SDTCisInt<2>
+]>;
+def SDTIntSatNoShOp : SDTypeProfile<1, 2, [ // ssat with no shift
+ SDTCisSameAs<0, 1>, SDTCisInt<2>
+]>;
+def SDTIntBinHiLoOp : SDTypeProfile<2, 2, [ // mulhi, mullo, sdivrem, udivrem
+ SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisSameAs<0, 3>,SDTCisInt<0>
+]>;
+
+def SDTFPBinOp : SDTypeProfile<1, 2, [ // fadd, fmul, etc.
+ SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>
+]>;
+def SDTFPSignOp : SDTypeProfile<1, 2, [ // fcopysign.
+ SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisFP<2>
+]>;
+def SDTFPTernaryOp : SDTypeProfile<1, 3, [ // fmadd, fnmsub, etc.
+ SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisSameAs<0, 3>, SDTCisFP<0>
+]>;
+def SDTIntUnaryOp : SDTypeProfile<1, 1, [ // ctlz, cttz
+ SDTCisSameAs<0, 1>, SDTCisInt<0>
+]>;
+def SDTIntExtendOp : SDTypeProfile<1, 1, [ // sext, zext, anyext
+ SDTCisInt<0>, SDTCisInt<1>, SDTCisOpSmallerThanOp<1, 0>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTIntTruncOp : SDTypeProfile<1, 1, [ // trunc
+ SDTCisInt<0>, SDTCisInt<1>, SDTCisOpSmallerThanOp<0, 1>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTFPUnaryOp : SDTypeProfile<1, 1, [ // fneg, fsqrt, etc
+ SDTCisSameAs<0, 1>, SDTCisFP<0>
+]>;
+def SDTFPRoundOp : SDTypeProfile<1, 1, [ // fround
+ SDTCisFP<0>, SDTCisFP<1>, SDTCisOpSmallerThanOp<0, 1>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTFPExtendOp : SDTypeProfile<1, 1, [ // fextend
+ SDTCisFP<0>, SDTCisFP<1>, SDTCisOpSmallerThanOp<1, 0>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTIntToFPOp : SDTypeProfile<1, 1, [ // [su]int_to_fp
+ SDTCisFP<0>, SDTCisInt<1>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTFPToIntOp : SDTypeProfile<1, 1, [ // fp_to_[su]int
+ SDTCisInt<0>, SDTCisFP<1>, SDTCisSameNumEltsAs<0, 1>
+]>;
+def SDTExtInreg : SDTypeProfile<1, 2, [ // sext_inreg
+ SDTCisSameAs<0, 1>, SDTCisInt<0>, SDTCisVT<2, OtherVT>,
+ SDTCisVTSmallerThanOp<2, 1>
+]>;
+def SDTExtInvec : SDTypeProfile<1, 1, [ // sext_invec
+ SDTCisInt<0>, SDTCisVec<0>, SDTCisInt<1>, SDTCisVec<1>,
+ SDTCisOpSmallerThanOp<1, 0>, SDTCisSameSizeAs<0,1>
+]>;
+
+def SDTSetCC : SDTypeProfile<1, 3, [ // setcc
+ SDTCisInt<0>, SDTCisSameAs<1, 2>, SDTCisVT<3, OtherVT>
+]>;
+
+def SDTSelect : SDTypeProfile<1, 3, [ // select
+ SDTCisInt<1>, SDTCisSameAs<0, 2>, SDTCisSameAs<2, 3>
+]>;
+
+def SDTVSelect : SDTypeProfile<1, 3, [ // vselect
+ SDTCisVec<0>, SDTCisInt<1>, SDTCisSameAs<0, 2>, SDTCisSameAs<2, 3>, SDTCisSameNumEltsAs<0, 1>
+]>;
+
+def SDTSelectCC : SDTypeProfile<1, 5, [ // select_cc
+ SDTCisSameAs<1, 2>, SDTCisSameAs<3, 4>, SDTCisSameAs<0, 3>,
+ SDTCisVT<5, OtherVT>
+]>;
+
+def SDTBr : SDTypeProfile<0, 1, [ // br
+ SDTCisVT<0, OtherVT>
+]>;
+
+def SDTBrCC : SDTypeProfile<0, 4, [ // brcc
+ SDTCisVT<0, OtherVT>, SDTCisSameAs<1, 2>, SDTCisVT<3, OtherVT>
+]>;
+
+def SDTBrcond : SDTypeProfile<0, 2, [ // brcond
+ SDTCisInt<0>, SDTCisVT<1, OtherVT>
+]>;
+
+def SDTBrind : SDTypeProfile<0, 1, [ // brind
+ SDTCisPtrTy<0>
+]>;
+
+def SDTCatchret : SDTypeProfile<0, 2, [ // catchret
+ SDTCisVT<0, OtherVT>, SDTCisVT<1, OtherVT>
+]>;
+
+def SDTNone : SDTypeProfile<0, 0, []>; // ret, trap
+
+def SDTLoad : SDTypeProfile<1, 1, [ // load
+ SDTCisPtrTy<1>
+]>;
+
+def SDTStore : SDTypeProfile<0, 2, [ // store
+ SDTCisPtrTy<1>
+]>;
+
+def SDTIStore : SDTypeProfile<1, 3, [ // indexed store
+ SDTCisSameAs<0, 2>, SDTCisPtrTy<0>, SDTCisPtrTy<3>
+]>;
+
+def SDTMaskedStore: SDTypeProfile<0, 3, [ // masked store
+ SDTCisPtrTy<0>, SDTCisVec<1>, SDTCisVec<2>, SDTCisSameNumEltsAs<1, 2>
+]>;
+
+def SDTMaskedLoad: SDTypeProfile<1, 3, [ // masked load
+ SDTCisVec<0>, SDTCisPtrTy<1>, SDTCisVec<2>, SDTCisSameAs<0, 3>,
+ SDTCisSameNumEltsAs<0, 2>
+]>;
+
+def SDTMaskedGather: SDTypeProfile<2, 3, [ // masked gather
+ SDTCisVec<0>, SDTCisVec<1>, SDTCisSameAs<0, 2>, SDTCisSameAs<1, 3>,
+ SDTCisPtrTy<4>, SDTCVecEltisVT<1, i1>, SDTCisSameNumEltsAs<0, 1>
+]>;
+
+def SDTMaskedScatter: SDTypeProfile<1, 3, [ // masked scatter
+ SDTCisVec<0>, SDTCisVec<1>, SDTCisSameAs<0, 2>, SDTCisSameNumEltsAs<0, 1>,
+ SDTCVecEltisVT<0, i1>, SDTCisPtrTy<3>
+]>;
+
+def SDTVecShuffle : SDTypeProfile<1, 2, [
+ SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>
+]>;
+def SDTVecExtract : SDTypeProfile<1, 2, [ // vector extract
+ SDTCisEltOfVec<0, 1>, SDTCisPtrTy<2>
+]>;
+def SDTVecInsert : SDTypeProfile<1, 3, [ // vector insert
+ SDTCisEltOfVec<2, 1>, SDTCisSameAs<0, 1>, SDTCisPtrTy<3>
+]>;
+
+def SDTSubVecExtract : SDTypeProfile<1, 2, [// subvector extract
+ SDTCisSubVecOfVec<0,1>, SDTCisInt<2>
+]>;
+def SDTSubVecInsert : SDTypeProfile<1, 3, [ // subvector insert
+ SDTCisSubVecOfVec<2, 1>, SDTCisSameAs<0,1>, SDTCisInt<3>
+]>;
+
+def SDTPrefetch : SDTypeProfile<0, 4, [ // prefetch
+ SDTCisPtrTy<0>, SDTCisSameAs<1, 2>, SDTCisSameAs<1, 3>, SDTCisInt<1>
+]>;
+
+def SDTMemBarrier : SDTypeProfile<0, 5, [ // memory barrier
+ SDTCisSameAs<0,1>, SDTCisSameAs<0,2>, SDTCisSameAs<0,3>, SDTCisSameAs<0,4>,
+ SDTCisInt<0>
+]>;
+def SDTAtomicFence : SDTypeProfile<0, 2, [
+ SDTCisSameAs<0,1>, SDTCisPtrTy<0>
+]>;
+def SDTAtomic3 : SDTypeProfile<1, 3, [
+ SDTCisSameAs<0,2>, SDTCisSameAs<0,3>, SDTCisInt<0>, SDTCisPtrTy<1>
+]>;
+def SDTAtomic2 : SDTypeProfile<1, 2, [
+ SDTCisSameAs<0,2>, SDTCisInt<0>, SDTCisPtrTy<1>
+]>;
+def SDTAtomicStore : SDTypeProfile<0, 2, [
+ SDTCisPtrTy<0>, SDTCisInt<1>
+]>;
+def SDTAtomicLoad : SDTypeProfile<1, 1, [
+ SDTCisInt<0>, SDTCisPtrTy<1>
+]>;
+
+def SDTConvertOp : SDTypeProfile<1, 5, [ //cvtss, su, us, uu, ff, fs, fu, sf, su
+ SDTCisVT<2, OtherVT>, SDTCisVT<3, OtherVT>, SDTCisPtrTy<4>, SDTCisPtrTy<5>
+]>;
+
+class SDCallSeqStart<list<SDTypeConstraint> constraints> :
+ SDTypeProfile<0, 2, constraints>;
+class SDCallSeqEnd<list<SDTypeConstraint> constraints> :
+ SDTypeProfile<0, 2, constraints>;
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Node definitions.
+//
+class SDNode<string opcode, SDTypeProfile typeprof,
+ list<SDNodeProperty> props = [], string sdclass = "SDNode">
+ : SDPatternOperator {
+ string Opcode = opcode;
+ string SDClass = sdclass;
+ let Properties = props;
+ SDTypeProfile TypeProfile = typeprof;
+}
+
+// Special TableGen-recognized dag nodes
+def set;
+def implicit;
+def node;
+def srcvalue;
+
+def imm : SDNode<"ISD::Constant" , SDTIntLeaf , [], "ConstantSDNode">;
+def timm : SDNode<"ISD::TargetConstant",SDTIntLeaf, [], "ConstantSDNode">;
+def fpimm : SDNode<"ISD::ConstantFP", SDTFPLeaf , [], "ConstantFPSDNode">;
+def vt : SDNode<"ISD::VALUETYPE" , SDTOther , [], "VTSDNode">;
+def bb : SDNode<"ISD::BasicBlock", SDTOther , [], "BasicBlockSDNode">;
+def cond : SDNode<"ISD::CONDCODE" , SDTOther , [], "CondCodeSDNode">;
+def undef : SDNode<"ISD::UNDEF" , SDTUNDEF , []>;
+def globaladdr : SDNode<"ISD::GlobalAddress", SDTPtrLeaf, [],
+ "GlobalAddressSDNode">;
+def tglobaladdr : SDNode<"ISD::TargetGlobalAddress", SDTPtrLeaf, [],
+ "GlobalAddressSDNode">;
+def globaltlsaddr : SDNode<"ISD::GlobalTLSAddress", SDTPtrLeaf, [],
+ "GlobalAddressSDNode">;
+def tglobaltlsaddr : SDNode<"ISD::TargetGlobalTLSAddress", SDTPtrLeaf, [],
+ "GlobalAddressSDNode">;
+def constpool : SDNode<"ISD::ConstantPool", SDTPtrLeaf, [],
+ "ConstantPoolSDNode">;
+def tconstpool : SDNode<"ISD::TargetConstantPool", SDTPtrLeaf, [],
+ "ConstantPoolSDNode">;
+def jumptable : SDNode<"ISD::JumpTable", SDTPtrLeaf, [],
+ "JumpTableSDNode">;
+def tjumptable : SDNode<"ISD::TargetJumpTable", SDTPtrLeaf, [],
+ "JumpTableSDNode">;
+def frameindex : SDNode<"ISD::FrameIndex", SDTPtrLeaf, [],
+ "FrameIndexSDNode">;
+def tframeindex : SDNode<"ISD::TargetFrameIndex", SDTPtrLeaf, [],
+ "FrameIndexSDNode">;
+def externalsym : SDNode<"ISD::ExternalSymbol", SDTPtrLeaf, [],
+ "ExternalSymbolSDNode">;
+def texternalsym: SDNode<"ISD::TargetExternalSymbol", SDTPtrLeaf, [],
+ "ExternalSymbolSDNode">;
+def mcsym: SDNode<"ISD::MCSymbol", SDTPtrLeaf, [], "MCSymbolSDNode">;
+def blockaddress : SDNode<"ISD::BlockAddress", SDTPtrLeaf, [],
+ "BlockAddressSDNode">;
+def tblockaddress: SDNode<"ISD::TargetBlockAddress", SDTPtrLeaf, [],
+ "BlockAddressSDNode">;
+
+def add : SDNode<"ISD::ADD" , SDTIntBinOp ,
+ [SDNPCommutative, SDNPAssociative]>;
+def sub : SDNode<"ISD::SUB" , SDTIntBinOp>;
+def mul : SDNode<"ISD::MUL" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def mulhs : SDNode<"ISD::MULHS" , SDTIntBinOp, [SDNPCommutative]>;
+def mulhu : SDNode<"ISD::MULHU" , SDTIntBinOp, [SDNPCommutative]>;
+def smullohi : SDNode<"ISD::SMUL_LOHI" , SDTIntBinHiLoOp, [SDNPCommutative]>;
+def umullohi : SDNode<"ISD::UMUL_LOHI" , SDTIntBinHiLoOp, [SDNPCommutative]>;
+def sdiv : SDNode<"ISD::SDIV" , SDTIntBinOp>;
+def udiv : SDNode<"ISD::UDIV" , SDTIntBinOp>;
+def srem : SDNode<"ISD::SREM" , SDTIntBinOp>;
+def urem : SDNode<"ISD::UREM" , SDTIntBinOp>;
+def sdivrem : SDNode<"ISD::SDIVREM" , SDTIntBinHiLoOp>;
+def udivrem : SDNode<"ISD::UDIVREM" , SDTIntBinHiLoOp>;
+def srl : SDNode<"ISD::SRL" , SDTIntShiftOp>;
+def sra : SDNode<"ISD::SRA" , SDTIntShiftOp>;
+def shl : SDNode<"ISD::SHL" , SDTIntShiftOp>;
+def rotl : SDNode<"ISD::ROTL" , SDTIntShiftOp>;
+def rotr : SDNode<"ISD::ROTR" , SDTIntShiftOp>;
+def and : SDNode<"ISD::AND" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def or : SDNode<"ISD::OR" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def xor : SDNode<"ISD::XOR" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def addc : SDNode<"ISD::ADDC" , SDTIntBinOp,
+ [SDNPCommutative, SDNPOutGlue]>;
+def adde : SDNode<"ISD::ADDE" , SDTIntBinOp,
+ [SDNPCommutative, SDNPOutGlue, SDNPInGlue]>;
+def subc : SDNode<"ISD::SUBC" , SDTIntBinOp,
+ [SDNPOutGlue]>;
+def sube : SDNode<"ISD::SUBE" , SDTIntBinOp,
+ [SDNPOutGlue, SDNPInGlue]>;
+def smin : SDNode<"ISD::SMIN" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def smax : SDNode<"ISD::SMAX" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def umin : SDNode<"ISD::UMIN" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def umax : SDNode<"ISD::UMAX" , SDTIntBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+
+def sext_inreg : SDNode<"ISD::SIGN_EXTEND_INREG", SDTExtInreg>;
+def sext_invec : SDNode<"ISD::SIGN_EXTEND_VECTOR_INREG", SDTExtInvec>;
+def zext_invec : SDNode<"ISD::ZERO_EXTEND_VECTOR_INREG", SDTExtInvec>;
+
+def abs : SDNode<"ISD::ABS" , SDTIntUnaryOp>;
+def bitreverse : SDNode<"ISD::BITREVERSE" , SDTIntUnaryOp>;
+def bswap : SDNode<"ISD::BSWAP" , SDTIntUnaryOp>;
+def ctlz : SDNode<"ISD::CTLZ" , SDTIntUnaryOp>;
+def cttz : SDNode<"ISD::CTTZ" , SDTIntUnaryOp>;
+def ctpop : SDNode<"ISD::CTPOP" , SDTIntUnaryOp>;
+def ctlz_zero_undef : SDNode<"ISD::CTLZ_ZERO_UNDEF", SDTIntUnaryOp>;
+def cttz_zero_undef : SDNode<"ISD::CTTZ_ZERO_UNDEF", SDTIntUnaryOp>;
+def sext : SDNode<"ISD::SIGN_EXTEND", SDTIntExtendOp>;
+def zext : SDNode<"ISD::ZERO_EXTEND", SDTIntExtendOp>;
+def anyext : SDNode<"ISD::ANY_EXTEND" , SDTIntExtendOp>;
+def trunc : SDNode<"ISD::TRUNCATE" , SDTIntTruncOp>;
+def bitconvert : SDNode<"ISD::BITCAST" , SDTUnaryOp>;
+def addrspacecast : SDNode<"ISD::ADDRSPACECAST", SDTUnaryOp>;
+def extractelt : SDNode<"ISD::EXTRACT_VECTOR_ELT", SDTVecExtract>;
+def insertelt : SDNode<"ISD::INSERT_VECTOR_ELT", SDTVecInsert>;
+
+def fadd : SDNode<"ISD::FADD" , SDTFPBinOp, [SDNPCommutative]>;
+def fsub : SDNode<"ISD::FSUB" , SDTFPBinOp>;
+def fmul : SDNode<"ISD::FMUL" , SDTFPBinOp, [SDNPCommutative]>;
+def fdiv : SDNode<"ISD::FDIV" , SDTFPBinOp>;
+def frem : SDNode<"ISD::FREM" , SDTFPBinOp>;
+def fma : SDNode<"ISD::FMA" , SDTFPTernaryOp>;
+def fmad : SDNode<"ISD::FMAD" , SDTFPTernaryOp>;
+def fabs : SDNode<"ISD::FABS" , SDTFPUnaryOp>;
+def fminnum : SDNode<"ISD::FMINNUM" , SDTFPBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def fmaxnum : SDNode<"ISD::FMAXNUM" , SDTFPBinOp,
+ [SDNPCommutative, SDNPAssociative]>;
+def fminnan : SDNode<"ISD::FMINNAN" , SDTFPBinOp>;
+def fmaxnan : SDNode<"ISD::FMAXNAN" , SDTFPBinOp>;
+def fgetsign : SDNode<"ISD::FGETSIGN" , SDTFPToIntOp>;
+def fcanonicalize : SDNode<"ISD::FCANONICALIZE", SDTFPUnaryOp>;
+def fneg : SDNode<"ISD::FNEG" , SDTFPUnaryOp>;
+def fsqrt : SDNode<"ISD::FSQRT" , SDTFPUnaryOp>;
+def fsin : SDNode<"ISD::FSIN" , SDTFPUnaryOp>;
+def fcos : SDNode<"ISD::FCOS" , SDTFPUnaryOp>;
+def fexp2 : SDNode<"ISD::FEXP2" , SDTFPUnaryOp>;
+def fpow : SDNode<"ISD::FPOW" , SDTFPBinOp>;
+def flog2 : SDNode<"ISD::FLOG2" , SDTFPUnaryOp>;
+def frint : SDNode<"ISD::FRINT" , SDTFPUnaryOp>;
+def ftrunc : SDNode<"ISD::FTRUNC" , SDTFPUnaryOp>;
+def fceil : SDNode<"ISD::FCEIL" , SDTFPUnaryOp>;
+def ffloor : SDNode<"ISD::FFLOOR" , SDTFPUnaryOp>;
+def fnearbyint : SDNode<"ISD::FNEARBYINT" , SDTFPUnaryOp>;
+def fround : SDNode<"ISD::FROUND" , SDTFPUnaryOp>;
+
+def fpround : SDNode<"ISD::FP_ROUND" , SDTFPRoundOp>;
+def fpextend : SDNode<"ISD::FP_EXTEND" , SDTFPExtendOp>;
+def fcopysign : SDNode<"ISD::FCOPYSIGN" , SDTFPSignOp>;
+
+def sint_to_fp : SDNode<"ISD::SINT_TO_FP" , SDTIntToFPOp>;
+def uint_to_fp : SDNode<"ISD::UINT_TO_FP" , SDTIntToFPOp>;
+def fp_to_sint : SDNode<"ISD::FP_TO_SINT" , SDTFPToIntOp>;
+def fp_to_uint : SDNode<"ISD::FP_TO_UINT" , SDTFPToIntOp>;
+def f16_to_fp : SDNode<"ISD::FP16_TO_FP" , SDTIntToFPOp>;
+def fp_to_f16 : SDNode<"ISD::FP_TO_FP16" , SDTFPToIntOp>;
+
+def setcc : SDNode<"ISD::SETCC" , SDTSetCC>;
+def select : SDNode<"ISD::SELECT" , SDTSelect>;
+def vselect : SDNode<"ISD::VSELECT" , SDTVSelect>;
+def selectcc : SDNode<"ISD::SELECT_CC" , SDTSelectCC>;
+
+def brcc : SDNode<"ISD::BR_CC" , SDTBrCC, [SDNPHasChain]>;
+def brcond : SDNode<"ISD::BRCOND" , SDTBrcond, [SDNPHasChain]>;
+def brind : SDNode<"ISD::BRIND" , SDTBrind, [SDNPHasChain]>;
+def br : SDNode<"ISD::BR" , SDTBr, [SDNPHasChain]>;
+def catchret : SDNode<"ISD::CATCHRET" , SDTCatchret,
+ [SDNPHasChain, SDNPSideEffect]>;
+def cleanupret : SDNode<"ISD::CLEANUPRET" , SDTNone, [SDNPHasChain]>;
+def catchpad : SDNode<"ISD::CATCHPAD" , SDTNone,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+def trap : SDNode<"ISD::TRAP" , SDTNone,
+ [SDNPHasChain, SDNPSideEffect]>;
+def debugtrap : SDNode<"ISD::DEBUGTRAP" , SDTNone,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+def prefetch : SDNode<"ISD::PREFETCH" , SDTPrefetch,
+ [SDNPHasChain, SDNPMayLoad, SDNPMayStore,
+ SDNPMemOperand]>;
+
+def readcyclecounter : SDNode<"ISD::READCYCLECOUNTER", SDTIntLeaf,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+def atomic_fence : SDNode<"ISD::ATOMIC_FENCE" , SDTAtomicFence,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+def atomic_cmp_swap : SDNode<"ISD::ATOMIC_CMP_SWAP" , SDTAtomic3,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_add : SDNode<"ISD::ATOMIC_LOAD_ADD" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_swap : SDNode<"ISD::ATOMIC_SWAP", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_sub : SDNode<"ISD::ATOMIC_LOAD_SUB" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_and : SDNode<"ISD::ATOMIC_LOAD_AND" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_clr : SDNode<"ISD::ATOMIC_LOAD_CLR" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_or : SDNode<"ISD::ATOMIC_LOAD_OR" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_xor : SDNode<"ISD::ATOMIC_LOAD_XOR" , SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_nand: SDNode<"ISD::ATOMIC_LOAD_NAND", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_min : SDNode<"ISD::ATOMIC_LOAD_MIN", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_max : SDNode<"ISD::ATOMIC_LOAD_MAX", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_umin : SDNode<"ISD::ATOMIC_LOAD_UMIN", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load_umax : SDNode<"ISD::ATOMIC_LOAD_UMAX", SDTAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_load : SDNode<"ISD::ATOMIC_LOAD", SDTAtomicLoad,
+ [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
+def atomic_store : SDNode<"ISD::ATOMIC_STORE", SDTAtomicStore,
+ [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
+
+def masked_store : SDNode<"ISD::MSTORE", SDTMaskedStore,
+ [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
+def masked_load : SDNode<"ISD::MLOAD", SDTMaskedLoad,
+ [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
+def masked_scatter : SDNode<"ISD::MSCATTER", SDTMaskedScatter,
+ [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
+def masked_gather : SDNode<"ISD::MGATHER", SDTMaskedGather,
+ [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
+
+// Do not use ld, st directly. Use load, extload, sextload, zextload, store,
+// and truncst (see below).
+def ld : SDNode<"ISD::LOAD" , SDTLoad,
+ [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
+def st : SDNode<"ISD::STORE" , SDTStore,
+ [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
+def ist : SDNode<"ISD::STORE" , SDTIStore,
+ [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
+
+def vector_shuffle : SDNode<"ISD::VECTOR_SHUFFLE", SDTVecShuffle, []>;
+def build_vector : SDNode<"ISD::BUILD_VECTOR", SDTypeProfile<1, -1, []>, []>;
+def scalar_to_vector : SDNode<"ISD::SCALAR_TO_VECTOR", SDTypeProfile<1, 1, []>,
+ []>;
+
+// vector_extract/vector_insert are deprecated. extractelt/insertelt
+// are preferred.
+def vector_extract : SDNode<"ISD::EXTRACT_VECTOR_ELT",
+ SDTypeProfile<1, 2, [SDTCisPtrTy<2>]>, []>;
+def vector_insert : SDNode<"ISD::INSERT_VECTOR_ELT",
+ SDTypeProfile<1, 3, [SDTCisSameAs<0, 1>, SDTCisPtrTy<3>]>, []>;
+def concat_vectors : SDNode<"ISD::CONCAT_VECTORS",
+ SDTypeProfile<1, 2, [SDTCisSubVecOfVec<1, 0>, SDTCisSameAs<1, 2>]>,[]>;
+
+// This operator does not do subvector type checking. The ARM
+// backend, at least, needs it.
+def vector_extract_subvec : SDNode<"ISD::EXTRACT_SUBVECTOR",
+ SDTypeProfile<1, 2, [SDTCisInt<2>, SDTCisVec<1>, SDTCisVec<0>]>,
+ []>;
+
+// This operator does subvector type checking.
+def extract_subvector : SDNode<"ISD::EXTRACT_SUBVECTOR", SDTSubVecExtract, []>;
+def insert_subvector : SDNode<"ISD::INSERT_SUBVECTOR", SDTSubVecInsert, []>;
+
+// Nodes for intrinsics, you should use the intrinsic itself and let tblgen use
+// these internally. Don't reference these directly.
+def intrinsic_void : SDNode<"ISD::INTRINSIC_VOID",
+ SDTypeProfile<0, -1, [SDTCisPtrTy<0>]>,
+ [SDNPHasChain]>;
+def intrinsic_w_chain : SDNode<"ISD::INTRINSIC_W_CHAIN",
+ SDTypeProfile<1, -1, [SDTCisPtrTy<1>]>,
+ [SDNPHasChain]>;
+def intrinsic_wo_chain : SDNode<"ISD::INTRINSIC_WO_CHAIN",
+ SDTypeProfile<1, -1, [SDTCisPtrTy<1>]>, []>;
+
+def SDT_assertext : SDTypeProfile<1, 1,
+ [SDTCisInt<0>, SDTCisInt<1>, SDTCisSameAs<1, 0>]>;
+def assertsext : SDNode<"ISD::AssertSext", SDT_assertext>;
+def assertzext : SDNode<"ISD::AssertZext", SDT_assertext>;
+
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Condition Codes
+
+class CondCode; // ISD::CondCode enums
+def SETOEQ : CondCode; def SETOGT : CondCode;
+def SETOGE : CondCode; def SETOLT : CondCode; def SETOLE : CondCode;
+def SETONE : CondCode; def SETO : CondCode; def SETUO : CondCode;
+def SETUEQ : CondCode; def SETUGT : CondCode; def SETUGE : CondCode;
+def SETULT : CondCode; def SETULE : CondCode; def SETUNE : CondCode;
+
+def SETEQ : CondCode; def SETGT : CondCode; def SETGE : CondCode;
+def SETLT : CondCode; def SETLE : CondCode; def SETNE : CondCode;
+
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Node Transformation Functions.
+//
+// This mechanism allows targets to manipulate nodes in the output DAG once a
+// match has been formed. This is typically used to manipulate immediate
+// values.
+//
+class SDNodeXForm<SDNode opc, code xformFunction> {
+ SDNode Opcode = opc;
+ code XFormFunction = xformFunction;
+}
+
+def NOOP_SDNodeXForm : SDNodeXForm<imm, [{}]>;
+
+//===----------------------------------------------------------------------===//
+// PatPred Subclasses.
+//
+// These allow specifying different sorts of predicates that control whether a
+// node is matched.
+//
+class PatPred;
+
+class CodePatPred<code predicate> : PatPred {
+ code PredicateCode = predicate;
+}
+
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Pattern Fragments.
+//
+// Pattern fragments are reusable chunks of dags that match specific things.
+// They can take arguments and have C++ predicates that control whether they
+// match. They are intended to make the patterns for common instructions more
+// compact and readable.
+//
+
+/// PatFrags - Represents a set of pattern fragments. Each single fragment
+/// can match something on the DAG, from a single node to multiple nested other
+/// fragments. The whole set of fragments matches if any of the single
+/// fragemnts match. This allows e.g. matching and "add with overflow" and
+/// a regular "add" with the same fragment set.
+///
+class PatFrags<dag ops, list<dag> frags, code pred = [{}],
+ SDNodeXForm xform = NOOP_SDNodeXForm> : SDPatternOperator {
+ dag Operands = ops;
+ list<dag> Fragments = frags;
+ code PredicateCode = pred;
+ code GISelPredicateCode = [{}];
+ code ImmediateCode = [{}];
+ SDNodeXForm OperandTransform = xform;
+
+ // Define a few pre-packaged predicates. This helps GlobalISel import
+ // existing rules from SelectionDAG for many common cases.
+ // They will be tested prior to the code in pred and must not be used in
+ // ImmLeaf and its subclasses.
+
+ // Is the desired pre-packaged predicate for a load?
+ bit IsLoad = ?;
+ // Is the desired pre-packaged predicate for a store?
+ bit IsStore = ?;
+ // Is the desired pre-packaged predicate for an atomic?
+ bit IsAtomic = ?;
+
+ // cast<LoadSDNode>(N)->getAddressingMode() == ISD::UNINDEXED;
+ // cast<StoreSDNode>(N)->getAddressingMode() == ISD::UNINDEXED;
+ bit IsUnindexed = ?;
+
+ // cast<LoadSDNode>(N)->getExtensionType() != ISD::NON_EXTLOAD
+ bit IsNonExtLoad = ?;
+ // cast<LoadSDNode>(N)->getExtensionType() == ISD::EXTLOAD;
+ bit IsAnyExtLoad = ?;
+ // cast<LoadSDNode>(N)->getExtensionType() == ISD::SEXTLOAD;
+ bit IsSignExtLoad = ?;
+ // cast<LoadSDNode>(N)->getExtensionType() == ISD::ZEXTLOAD;
+ bit IsZeroExtLoad = ?;
+ // !cast<StoreSDNode>(N)->isTruncatingStore();
+ // cast<StoreSDNode>(N)->isTruncatingStore();
+ bit IsTruncStore = ?;
+
+ // cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::Monotonic
+ bit IsAtomicOrderingMonotonic = ?;
+ // cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::Acquire
+ bit IsAtomicOrderingAcquire = ?;
+ // cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::Release
+ bit IsAtomicOrderingRelease = ?;
+ // cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::AcquireRelease
+ bit IsAtomicOrderingAcquireRelease = ?;
+ // cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::SequentiallyConsistent
+ bit IsAtomicOrderingSequentiallyConsistent = ?;
+
+ // isAcquireOrStronger(cast<AtomicSDNode>(N)->getOrdering())
+ // !isAcquireOrStronger(cast<AtomicSDNode>(N)->getOrdering())
+ bit IsAtomicOrderingAcquireOrStronger = ?;
+
+ // isReleaseOrStronger(cast<AtomicSDNode>(N)->getOrdering())
+ // !isReleaseOrStronger(cast<AtomicSDNode>(N)->getOrdering())
+ bit IsAtomicOrderingReleaseOrStronger = ?;
+
+ // cast<LoadSDNode>(N)->getMemoryVT() == MVT::<VT>;
+ // cast<StoreSDNode>(N)->getMemoryVT() == MVT::<VT>;
+ ValueType MemoryVT = ?;
+ // cast<LoadSDNode>(N)->getMemoryVT().getScalarType() == MVT::<VT>;
+ // cast<StoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::<VT>;
+ ValueType ScalarMemoryVT = ?;
+}
+
+// PatFrag - A version of PatFrags matching only a single fragment.
+class PatFrag<dag ops, dag frag, code pred = [{}],
+ SDNodeXForm xform = NOOP_SDNodeXForm>
+ : PatFrags<ops, [frag], pred, xform>;
+
+// OutPatFrag is a pattern fragment that is used as part of an output pattern
+// (not an input pattern). These do not have predicates or transforms, but are
+// used to avoid repeated subexpressions in output patterns.
+class OutPatFrag<dag ops, dag frag>
+ : PatFrag<ops, frag, [{}], NOOP_SDNodeXForm>;
+
+// PatLeaf's are pattern fragments that have no operands. This is just a helper
+// to define immediates and other common things concisely.
+class PatLeaf<dag frag, code pred = [{}], SDNodeXForm xform = NOOP_SDNodeXForm>
+ : PatFrag<(ops), frag, pred, xform>;
+
+
+// ImmLeaf is a pattern fragment with a constraint on the immediate. The
+// constraint is a function that is run on the immediate (always with the value
+// sign extended out to an int64_t) as Imm. For example:
+//
+// def immSExt8 : ImmLeaf<i16, [{ return (char)Imm == Imm; }]>;
+//
+// this is a more convenient form to match 'imm' nodes in than PatLeaf and also
+// is preferred over using PatLeaf because it allows the code generator to
+// reason more about the constraint.
+//
+// If FastIsel should ignore all instructions that have an operand of this type,
+// the FastIselShouldIgnore flag can be set. This is an optimization to reduce
+// the code size of the generated fast instruction selector.
+class ImmLeaf<ValueType vt, code pred, SDNodeXForm xform = NOOP_SDNodeXForm,
+ SDNode ImmNode = imm>
+ : PatFrag<(ops), (vt ImmNode), [{}], xform> {
+ let ImmediateCode = pred;
+ bit FastIselShouldIgnore = 0;
+
+ // Is the data type of the immediate an APInt?
+ bit IsAPInt = 0;
+
+ // Is the data type of the immediate an APFloat?
+ bit IsAPFloat = 0;
+}
+
+// An ImmLeaf except that Imm is an APInt. This is useful when you need to
+// zero-extend the immediate instead of sign-extend it.
+//
+// Note that FastISel does not currently understand IntImmLeaf and will not
+// generate code for rules that make use of it. As such, it does not make sense
+// to replace ImmLeaf with IntImmLeaf. However, replacing PatLeaf with an
+// IntImmLeaf will allow GlobalISel to import the rule.
+class IntImmLeaf<ValueType vt, code pred, SDNodeXForm xform = NOOP_SDNodeXForm>
+ : ImmLeaf<vt, pred, xform> {
+ let IsAPInt = 1;
+ let FastIselShouldIgnore = 1;
+}
+
+// An ImmLeaf except that Imm is an APFloat.
+//
+// Note that FastISel does not currently understand FPImmLeaf and will not
+// generate code for rules that make use of it.
+class FPImmLeaf<ValueType vt, code pred, SDNodeXForm xform = NOOP_SDNodeXForm>
+ : ImmLeaf<vt, pred, xform, fpimm> {
+ let IsAPFloat = 1;
+ let FastIselShouldIgnore = 1;
+}
+
+// Leaf fragments.
+
+def vtInt : PatLeaf<(vt), [{ return N->getVT().isInteger(); }]>;
+def vtFP : PatLeaf<(vt), [{ return N->getVT().isFloatingPoint(); }]>;
+
+def immAllOnesV: PatLeaf<(build_vector), [{
+ return ISD::isBuildVectorAllOnes(N);
+}]>;
+def immAllZerosV: PatLeaf<(build_vector), [{
+ return ISD::isBuildVectorAllZeros(N);
+}]>;
+
+
+
+// Other helper fragments.
+def not : PatFrag<(ops node:$in), (xor node:$in, -1)>;
+def vnot : PatFrag<(ops node:$in), (xor node:$in, immAllOnesV)>;
+def ineg : PatFrag<(ops node:$in), (sub 0, node:$in)>;
+
+// null_frag - The null pattern operator is used in multiclass instantiations
+// which accept an SDPatternOperator for use in matching patterns for internal
+// definitions. When expanding a pattern, if the null fragment is referenced
+// in the expansion, the pattern is discarded and it is as-if '[]' had been
+// specified. This allows multiclasses to have the isel patterns be optional.
+def null_frag : SDPatternOperator;
+
+// load fragments.
+def unindexedload : PatFrag<(ops node:$ptr), (ld node:$ptr)> {
+ let IsLoad = 1;
+ let IsUnindexed = 1;
+}
+def load : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> {
+ let IsLoad = 1;
+ let IsNonExtLoad = 1;
+}
+
+// extending load fragments.
+def extload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> {
+ let IsLoad = 1;
+ let IsAnyExtLoad = 1;
+}
+def sextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> {
+ let IsLoad = 1;
+ let IsSignExtLoad = 1;
+}
+def zextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> {
+ let IsLoad = 1;
+ let IsZeroExtLoad = 1;
+}
+
+def extloadi1 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i1;
+}
+def extloadi8 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i8;
+}
+def extloadi16 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i16;
+}
+def extloadi32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i32;
+}
+def extloadf32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = f32;
+}
+def extloadf64 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = f64;
+}
+
+def sextloadi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i1;
+}
+def sextloadi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i8;
+}
+def sextloadi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i16;
+}
+def sextloadi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i32;
+}
+
+def zextloadi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i1;
+}
+def zextloadi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i8;
+}
+def zextloadi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i16;
+}
+def zextloadi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let MemoryVT = i32;
+}
+
+def extloadvi1 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i1;
+}
+def extloadvi8 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i8;
+}
+def extloadvi16 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i16;
+}
+def extloadvi32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i32;
+}
+def extloadvf32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = f32;
+}
+def extloadvf64 : PatFrag<(ops node:$ptr), (extload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = f64;
+}
+
+def sextloadvi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i1;
+}
+def sextloadvi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i8;
+}
+def sextloadvi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i16;
+}
+def sextloadvi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i32;
+}
+
+def zextloadvi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i1;
+}
+def zextloadvi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i8;
+}
+def zextloadvi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i16;
+}
+def zextloadvi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> {
+ let IsLoad = 1;
+ let ScalarMemoryVT = i32;
+}
+
+// store fragments.
+def unindexedstore : PatFrag<(ops node:$val, node:$ptr),
+ (st node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let IsUnindexed = 1;
+}
+def store : PatFrag<(ops node:$val, node:$ptr),
+ (unindexedstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let IsTruncStore = 0;
+}
+
+// truncstore fragments.
+def truncstore : PatFrag<(ops node:$val, node:$ptr),
+ (unindexedstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let IsTruncStore = 1;
+}
+def truncstorei8 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let MemoryVT = i8;
+}
+def truncstorei16 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let MemoryVT = i16;
+}
+def truncstorei32 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let MemoryVT = i32;
+}
+def truncstoref32 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let MemoryVT = f32;
+}
+def truncstoref64 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let MemoryVT = f64;
+}
+
+def truncstorevi8 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let ScalarMemoryVT = i8;
+}
+
+def truncstorevi16 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let ScalarMemoryVT = i16;
+}
+
+def truncstorevi32 : PatFrag<(ops node:$val, node:$ptr),
+ (truncstore node:$val, node:$ptr)> {
+ let IsStore = 1;
+ let ScalarMemoryVT = i32;
+}
+
+// indexed store fragments.
+def istore : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (ist node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let IsTruncStore = 0;
+}
+
+def pre_store : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (istore node:$val, node:$base, node:$offset), [{
+ ISD::MemIndexedMode AM = cast<StoreSDNode>(N)->getAddressingMode();
+ return AM == ISD::PRE_INC || AM == ISD::PRE_DEC;
+}]>;
+
+def itruncstore : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (ist node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let IsTruncStore = 1;
+}
+def pre_truncst : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (itruncstore node:$val, node:$base, node:$offset), [{
+ ISD::MemIndexedMode AM = cast<StoreSDNode>(N)->getAddressingMode();
+ return AM == ISD::PRE_INC || AM == ISD::PRE_DEC;
+}]>;
+def pre_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (pre_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i1;
+}
+def pre_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (pre_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i8;
+}
+def pre_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (pre_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i16;
+}
+def pre_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (pre_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i32;
+}
+def pre_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (pre_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = f32;
+}
+
+def post_store : PatFrag<(ops node:$val, node:$ptr, node:$offset),
+ (istore node:$val, node:$ptr, node:$offset), [{
+ ISD::MemIndexedMode AM = cast<StoreSDNode>(N)->getAddressingMode();
+ return AM == ISD::POST_INC || AM == ISD::POST_DEC;
+}]>;
+
+def post_truncst : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (itruncstore node:$val, node:$base, node:$offset), [{
+ ISD::MemIndexedMode AM = cast<StoreSDNode>(N)->getAddressingMode();
+ return AM == ISD::POST_INC || AM == ISD::POST_DEC;
+}]>;
+def post_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (post_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i1;
+}
+def post_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (post_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i8;
+}
+def post_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (post_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i16;
+}
+def post_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (post_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = i32;
+}
+def post_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset),
+ (post_truncst node:$val, node:$base, node:$offset)> {
+ let IsStore = 1;
+ let MemoryVT = f32;
+}
+
+// nontemporal store fragments.
+def nontemporalstore : PatFrag<(ops node:$val, node:$ptr),
+ (store node:$val, node:$ptr), [{
+ return cast<StoreSDNode>(N)->isNonTemporal();
+}]>;
+
+def alignednontemporalstore : PatFrag<(ops node:$val, node:$ptr),
+ (nontemporalstore node:$val, node:$ptr), [{
+ StoreSDNode *St = cast<StoreSDNode>(N);
+ return St->getAlignment() >= St->getMemoryVT().getStoreSize();
+}]>;
+
+def unalignednontemporalstore : PatFrag<(ops node:$val, node:$ptr),
+ (nontemporalstore node:$val, node:$ptr), [{
+ StoreSDNode *St = cast<StoreSDNode>(N);
+ return St->getAlignment() < St->getMemoryVT().getStoreSize();
+}]>;
+
+// nontemporal load fragments.
+def nontemporalload : PatFrag<(ops node:$ptr),
+ (load node:$ptr), [{
+ return cast<LoadSDNode>(N)->isNonTemporal();
+}]>;
+
+def alignednontemporalload : PatFrag<(ops node:$ptr),
+ (nontemporalload node:$ptr), [{
+ LoadSDNode *Ld = cast<LoadSDNode>(N);
+ return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize();
+}]>;
+
+// setcc convenience fragments.
+def setoeq : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETOEQ)>;
+def setogt : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETOGT)>;
+def setoge : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETOGE)>;
+def setolt : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETOLT)>;
+def setole : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETOLE)>;
+def setone : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETONE)>;
+def seto : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETO)>;
+def setuo : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETUO)>;
+def setueq : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETUEQ)>;
+def setugt : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETUGT)>;
+def setuge : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETUGE)>;
+def setult : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETULT)>;
+def setule : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETULE)>;
+def setune : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETUNE)>;
+def seteq : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETEQ)>;
+def setgt : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETGT)>;
+def setge : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETGE)>;
+def setlt : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETLT)>;
+def setle : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETLE)>;
+def setne : PatFrag<(ops node:$lhs, node:$rhs),
+ (setcc node:$lhs, node:$rhs, SETNE)>;
+
+multiclass binary_atomic_op_ord<SDNode atomic_op> {
+ def #NAME#_monotonic : PatFrag<(ops node:$ptr, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingMonotonic = 1;
+ }
+ def #NAME#_acquire : PatFrag<(ops node:$ptr, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingAcquire = 1;
+ }
+ def #NAME#_release : PatFrag<(ops node:$ptr, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingRelease = 1;
+ }
+ def #NAME#_acq_rel : PatFrag<(ops node:$ptr, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingAcquireRelease = 1;
+ }
+ def #NAME#_seq_cst : PatFrag<(ops node:$ptr, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingSequentiallyConsistent = 1;
+ }
+}
+
+multiclass ternary_atomic_op_ord<SDNode atomic_op> {
+ def #NAME#_monotonic : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingMonotonic = 1;
+ }
+ def #NAME#_acquire : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingAcquire = 1;
+ }
+ def #NAME#_release : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingRelease = 1;
+ }
+ def #NAME#_acq_rel : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingAcquireRelease = 1;
+ }
+ def #NAME#_seq_cst : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (!cast<SDPatternOperator>(#NAME) node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let IsAtomicOrderingSequentiallyConsistent = 1;
+ }
+}
+
+multiclass binary_atomic_op<SDNode atomic_op> {
+ def _8 : PatFrag<(ops node:$ptr, node:$val),
+ (atomic_op node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i8;
+ }
+ def _16 : PatFrag<(ops node:$ptr, node:$val),
+ (atomic_op node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i16;
+ }
+ def _32 : PatFrag<(ops node:$ptr, node:$val),
+ (atomic_op node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i32;
+ }
+ def _64 : PatFrag<(ops node:$ptr, node:$val),
+ (atomic_op node:$ptr, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i64;
+ }
+
+ defm NAME#_8 : binary_atomic_op_ord<atomic_op>;
+ defm NAME#_16 : binary_atomic_op_ord<atomic_op>;
+ defm NAME#_32 : binary_atomic_op_ord<atomic_op>;
+ defm NAME#_64 : binary_atomic_op_ord<atomic_op>;
+}
+
+multiclass ternary_atomic_op<SDNode atomic_op> {
+ def _8 : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (atomic_op node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i8;
+ }
+ def _16 : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (atomic_op node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i16;
+ }
+ def _32 : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (atomic_op node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i32;
+ }
+ def _64 : PatFrag<(ops node:$ptr, node:$cmp, node:$val),
+ (atomic_op node:$ptr, node:$cmp, node:$val)> {
+ let IsAtomic = 1;
+ let MemoryVT = i64;
+ }
+
+ defm NAME#_8 : ternary_atomic_op_ord<atomic_op>;
+ defm NAME#_16 : ternary_atomic_op_ord<atomic_op>;
+ defm NAME#_32 : ternary_atomic_op_ord<atomic_op>;
+ defm NAME#_64 : ternary_atomic_op_ord<atomic_op>;
+}
+
+defm atomic_load_add : binary_atomic_op<atomic_load_add>;
+defm atomic_swap : binary_atomic_op<atomic_swap>;
+defm atomic_load_sub : binary_atomic_op<atomic_load_sub>;
+defm atomic_load_and : binary_atomic_op<atomic_load_and>;
+defm atomic_load_clr : binary_atomic_op<atomic_load_clr>;
+defm atomic_load_or : binary_atomic_op<atomic_load_or>;
+defm atomic_load_xor : binary_atomic_op<atomic_load_xor>;
+defm atomic_load_nand : binary_atomic_op<atomic_load_nand>;
+defm atomic_load_min : binary_atomic_op<atomic_load_min>;
+defm atomic_load_max : binary_atomic_op<atomic_load_max>;
+defm atomic_load_umin : binary_atomic_op<atomic_load_umin>;
+defm atomic_load_umax : binary_atomic_op<atomic_load_umax>;
+defm atomic_store : binary_atomic_op<atomic_store>;
+defm atomic_cmp_swap : ternary_atomic_op<atomic_cmp_swap>;
+
+def atomic_load_8 :
+ PatFrag<(ops node:$ptr),
+ (atomic_load node:$ptr)> {
+ let IsAtomic = 1;
+ let MemoryVT = i8;
+}
+def atomic_load_16 :
+ PatFrag<(ops node:$ptr),
+ (atomic_load node:$ptr)> {
+ let IsAtomic = 1;
+ let MemoryVT = i16;
+}
+def atomic_load_32 :
+ PatFrag<(ops node:$ptr),
+ (atomic_load node:$ptr)> {
+ let IsAtomic = 1;
+ let MemoryVT = i32;
+}
+def atomic_load_64 :
+ PatFrag<(ops node:$ptr),
+ (atomic_load node:$ptr)> {
+ let IsAtomic = 1;
+ let MemoryVT = i64;
+}
+
+//===----------------------------------------------------------------------===//
+// Selection DAG Pattern Support.
+//
+// Patterns are what are actually matched against by the target-flavored
+// instruction selection DAG. Instructions defined by the target implicitly
+// define patterns in most cases, but patterns can also be explicitly added when
+// an operation is defined by a sequence of instructions (e.g. loading a large
+// immediate value on RISC targets that do not support immediates as large as
+// their GPRs).
+//
+
+class Pattern<dag patternToMatch, list<dag> resultInstrs> {
+ dag PatternToMatch = patternToMatch;
+ list<dag> ResultInstrs = resultInstrs;
+ list<Predicate> Predicates = []; // See class Instruction in Target.td.
+ int AddedComplexity = 0; // See class Instruction in Target.td.
+}
+
+// Pat - A simple (but common) form of a pattern, which produces a simple result
+// not needing a full list.
+class Pat<dag pattern, dag result> : Pattern<pattern, [result]>;
+
+//===----------------------------------------------------------------------===//
+// Complex pattern definitions.
+//
+
+// Complex patterns, e.g. X86 addressing mode, requires pattern matching code
+// in C++. NumOperands is the number of operands returned by the select function;
+// SelectFunc is the name of the function used to pattern match the max. pattern;
+// RootNodes are the list of possible root nodes of the sub-dags to match.
+// e.g. X86 addressing mode - def addr : ComplexPattern<4, "SelectAddr", [add]>;
+//
+class ComplexPattern<ValueType ty, int numops, string fn,
+ list<SDNode> roots = [], list<SDNodeProperty> props = [],
+ int complexity = -1> {
+ ValueType Ty = ty;
+ int NumOperands = numops;
+ string SelectFunc = fn;
+ list<SDNode> RootNodes = roots;
+ list<SDNodeProperty> Properties = props;
+ int Complexity = complexity;
+}