]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - contrib/llvm-project/llvm/include/llvm/IR/IntrinsicsAArch64.td
zfs: merge openzfs/zfs@6c3c5fcfb (zfs-2.1-release) into stable/13
[FreeBSD/FreeBSD.git] / contrib / llvm-project / llvm / include / llvm / IR / IntrinsicsAArch64.td
1 //===- IntrinsicsAARCH64.td - Defines AARCH64 intrinsics ---*- tablegen -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines all of the AARCH64-specific intrinsics.
10 //
11 //===----------------------------------------------------------------------===//
12
13 let TargetPrefix = "aarch64" in {
14
15 def int_aarch64_ldxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty],
16                                  [IntrNoFree, IntrWillReturn]>;
17 def int_aarch64_ldaxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty],
18                                   [IntrNoFree, IntrWillReturn]>;
19 def int_aarch64_stxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty],
20                                  [IntrNoFree, IntrWillReturn]>;
21 def int_aarch64_stlxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty],
22                                   [IntrNoFree, IntrWillReturn]>;
23
24 def int_aarch64_ldxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty],
25                                  [IntrNoFree, IntrWillReturn]>;
26 def int_aarch64_ldaxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty],
27                                   [IntrNoFree, IntrWillReturn]>;
28 def int_aarch64_stxp : Intrinsic<[llvm_i32_ty],
29                                [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty],
30                                [IntrNoFree, IntrWillReturn]>;
31 def int_aarch64_stlxp : Intrinsic<[llvm_i32_ty],
32                                   [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty],
33                                   [IntrNoFree, IntrWillReturn]>;
34
35 def int_aarch64_clrex : Intrinsic<[]>;
36
37 def int_aarch64_sdiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>,
38                                 LLVMMatchType<0>], [IntrNoMem]>;
39 def int_aarch64_udiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>,
40                                 LLVMMatchType<0>], [IntrNoMem]>;
41
42 def int_aarch64_fjcvtzs : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
43
44 def int_aarch64_cls: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
45 def int_aarch64_cls64: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem]>;
46
47 def int_aarch64_frint32z
48     : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
49                             [ IntrNoMem ]>;
50 def int_aarch64_frint64z
51     : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
52                             [ IntrNoMem ]>;
53 def int_aarch64_frint32x
54     : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
55                             [ IntrNoMem ]>;
56 def int_aarch64_frint64x
57     : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
58                             [ IntrNoMem ]>;
59
60 //===----------------------------------------------------------------------===//
61 // HINT
62
63 def int_aarch64_hint : DefaultAttrsIntrinsic<[], [llvm_i32_ty]>;
64
65 //===----------------------------------------------------------------------===//
66 // Data Barrier Instructions
67
68 def int_aarch64_dmb : GCCBuiltin<"__builtin_arm_dmb">, MSBuiltin<"__dmb">,
69                       Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>;
70 def int_aarch64_dsb : GCCBuiltin<"__builtin_arm_dsb">, MSBuiltin<"__dsb">,
71                       Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>;
72 def int_aarch64_isb : GCCBuiltin<"__builtin_arm_isb">, MSBuiltin<"__isb">,
73                       Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>;
74
75 // A space-consuming intrinsic primarily for testing block and jump table
76 // placements. The first argument is the number of bytes this "instruction"
77 // takes up, the second and return value are essentially chains, used to force
78 // ordering during ISel.
79 def int_aarch64_space : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty], []>;
80
81 }
82
83 //===----------------------------------------------------------------------===//
84 // Advanced SIMD (NEON)
85
86 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
87   class AdvSIMD_2Scalar_Float_Intrinsic
88     : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
89                 [IntrNoMem]>;
90
91   class AdvSIMD_FPToIntRounding_Intrinsic
92     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty], [IntrNoMem]>;
93
94   class AdvSIMD_1IntArg_Intrinsic
95     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>], [IntrNoMem]>;
96   class AdvSIMD_1FloatArg_Intrinsic
97     : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>;
98   class AdvSIMD_1VectorArg_Intrinsic
99     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>], [IntrNoMem]>;
100   class AdvSIMD_1VectorArg_Expand_Intrinsic
101     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
102   class AdvSIMD_1VectorArg_Long_Intrinsic
103     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMTruncatedType<0>], [IntrNoMem]>;
104   class AdvSIMD_1IntArg_Narrow_Intrinsic
105     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyint_ty], [IntrNoMem]>;
106   class AdvSIMD_1VectorArg_Narrow_Intrinsic
107     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMExtendedType<0>], [IntrNoMem]>;
108   class AdvSIMD_1VectorArg_Int_Across_Intrinsic
109     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyvector_ty], [IntrNoMem]>;
110   class AdvSIMD_1VectorArg_Float_Across_Intrinsic
111     : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyvector_ty], [IntrNoMem]>;
112
113   class AdvSIMD_2IntArg_Intrinsic
114     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
115                 [IntrNoMem]>;
116   class AdvSIMD_2FloatArg_Intrinsic
117     : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
118                 [IntrNoMem]>;
119   class AdvSIMD_2VectorArg_Intrinsic
120     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>],
121                 [IntrNoMem]>;
122   class AdvSIMD_2VectorArg_Compare_Intrinsic
123     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMMatchType<1>],
124                 [IntrNoMem]>;
125   class AdvSIMD_2Arg_FloatCompare_Intrinsic
126     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, LLVMMatchType<1>],
127                 [IntrNoMem]>;
128   class AdvSIMD_2VectorArg_Long_Intrinsic
129     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
130                 [LLVMTruncatedType<0>, LLVMTruncatedType<0>],
131                 [IntrNoMem]>;
132   class AdvSIMD_2VectorArg_Wide_Intrinsic
133     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
134                 [LLVMMatchType<0>, LLVMTruncatedType<0>],
135                 [IntrNoMem]>;
136   class AdvSIMD_2VectorArg_Narrow_Intrinsic
137     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
138                 [LLVMExtendedType<0>, LLVMExtendedType<0>],
139                 [IntrNoMem]>;
140   class AdvSIMD_2Arg_Scalar_Narrow_Intrinsic
141     : DefaultAttrsIntrinsic<[llvm_anyint_ty],
142                 [LLVMExtendedType<0>, llvm_i32_ty],
143                 [IntrNoMem]>;
144   class AdvSIMD_2VectorArg_Scalar_Expand_BySize_Intrinsic
145     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
146                 [llvm_anyvector_ty],
147                 [IntrNoMem]>;
148   class AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic
149     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
150                 [LLVMTruncatedType<0>],
151                 [IntrNoMem]>;
152   class AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic
153     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
154                 [LLVMTruncatedType<0>, llvm_i32_ty],
155                 [IntrNoMem]>;
156   class AdvSIMD_2VectorArg_Tied_Narrow_Intrinsic
157     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
158                 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty],
159                 [IntrNoMem]>;
160   class AdvSIMD_2VectorArg_Lane_Intrinsic
161     : DefaultAttrsIntrinsic<[llvm_anyint_ty],
162                 [LLVMMatchType<0>, llvm_anyint_ty, llvm_i32_ty],
163                 [IntrNoMem]>;
164
165   class AdvSIMD_3IntArg_Intrinsic
166     : DefaultAttrsIntrinsic<[llvm_anyint_ty],
167                 [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
168                 [IntrNoMem]>;
169   class AdvSIMD_3VectorArg_Intrinsic
170       : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
171                [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
172                [IntrNoMem]>;
173   class AdvSIMD_3VectorArg_Scalar_Intrinsic
174       : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
175                [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty],
176                [IntrNoMem]>;
177   class AdvSIMD_3VectorArg_Tied_Narrow_Intrinsic
178       : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
179                [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty,
180                 LLVMMatchType<1>], [IntrNoMem]>;
181   class AdvSIMD_3VectorArg_Scalar_Tied_Narrow_Intrinsic
182     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
183                 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, llvm_i32_ty],
184                 [IntrNoMem]>;
185   class AdvSIMD_CvtFxToFP_Intrinsic
186     : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyint_ty, llvm_i32_ty],
187                 [IntrNoMem]>;
188   class AdvSIMD_CvtFPToFx_Intrinsic
189     : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, llvm_i32_ty],
190                 [IntrNoMem]>;
191
192   class AdvSIMD_1Arg_Intrinsic
193     : DefaultAttrsIntrinsic<[llvm_any_ty], [LLVMMatchType<0>], [IntrNoMem]>;
194
195   class AdvSIMD_Dot_Intrinsic
196     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
197                 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>],
198                 [IntrNoMem]>;
199
200   class AdvSIMD_FP16FML_Intrinsic
201     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
202                 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>],
203                 [IntrNoMem]>;
204
205   class AdvSIMD_MatMul_Intrinsic
206     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
207                 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>],
208                 [IntrNoMem]>;
209
210   class AdvSIMD_FML_Intrinsic
211     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
212                 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>],
213                 [IntrNoMem]>;
214
215   class AdvSIMD_BF16FML_Intrinsic
216     : DefaultAttrsIntrinsic<[llvm_v4f32_ty],
217                 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty],
218                 [IntrNoMem]>;
219 }
220
221 // Arithmetic ops
222
223 let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in {
224   // Vector Add Across Lanes
225   def int_aarch64_neon_saddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
226   def int_aarch64_neon_uaddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
227   def int_aarch64_neon_faddv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
228
229   // Vector Long Add Across Lanes
230   def int_aarch64_neon_saddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
231   def int_aarch64_neon_uaddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
232
233   // Vector Halving Add
234   def int_aarch64_neon_shadd : AdvSIMD_2VectorArg_Intrinsic;
235   def int_aarch64_neon_uhadd : AdvSIMD_2VectorArg_Intrinsic;
236
237   // Vector Rounding Halving Add
238   def int_aarch64_neon_srhadd : AdvSIMD_2VectorArg_Intrinsic;
239   def int_aarch64_neon_urhadd : AdvSIMD_2VectorArg_Intrinsic;
240
241   // Vector Saturating Add
242   def int_aarch64_neon_sqadd : AdvSIMD_2IntArg_Intrinsic;
243   def int_aarch64_neon_suqadd : AdvSIMD_2IntArg_Intrinsic;
244   def int_aarch64_neon_usqadd : AdvSIMD_2IntArg_Intrinsic;
245   def int_aarch64_neon_uqadd : AdvSIMD_2IntArg_Intrinsic;
246
247   // Vector Add High-Half
248   // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that
249   // header is no longer supported.
250   def int_aarch64_neon_addhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
251
252   // Vector Rounding Add High-Half
253   def int_aarch64_neon_raddhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
254
255   // Vector Saturating Doubling Multiply High
256   def int_aarch64_neon_sqdmulh : AdvSIMD_2IntArg_Intrinsic;
257   def int_aarch64_neon_sqdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic;
258   def int_aarch64_neon_sqdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic;
259
260   // Vector Saturating Rounding Doubling Multiply High
261   def int_aarch64_neon_sqrdmulh : AdvSIMD_2IntArg_Intrinsic;
262   def int_aarch64_neon_sqrdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic;
263   def int_aarch64_neon_sqrdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic;
264
265   def int_aarch64_neon_sqrdmlah : AdvSIMD_3IntArg_Intrinsic;
266   def int_aarch64_neon_sqrdmlsh : AdvSIMD_3IntArg_Intrinsic;
267
268   // Vector Polynominal Multiply
269   def int_aarch64_neon_pmul : AdvSIMD_2VectorArg_Intrinsic;
270
271   // Vector Long Multiply
272   def int_aarch64_neon_smull : AdvSIMD_2VectorArg_Long_Intrinsic;
273   def int_aarch64_neon_umull : AdvSIMD_2VectorArg_Long_Intrinsic;
274   def int_aarch64_neon_pmull : AdvSIMD_2VectorArg_Long_Intrinsic;
275
276   // 64-bit polynomial multiply really returns an i128, which is not legal. Fake
277   // it with a v16i8.
278   def int_aarch64_neon_pmull64 :
279         DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
280
281   // Vector Extending Multiply
282   def int_aarch64_neon_fmulx : AdvSIMD_2FloatArg_Intrinsic {
283     let IntrProperties = [IntrNoMem, Commutative];
284   }
285
286   // Vector Saturating Doubling Long Multiply
287   def int_aarch64_neon_sqdmull : AdvSIMD_2VectorArg_Long_Intrinsic;
288   def int_aarch64_neon_sqdmulls_scalar
289     : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
290
291   // Vector Halving Subtract
292   def int_aarch64_neon_shsub : AdvSIMD_2VectorArg_Intrinsic;
293   def int_aarch64_neon_uhsub : AdvSIMD_2VectorArg_Intrinsic;
294
295   // Vector Saturating Subtract
296   def int_aarch64_neon_sqsub : AdvSIMD_2IntArg_Intrinsic;
297   def int_aarch64_neon_uqsub : AdvSIMD_2IntArg_Intrinsic;
298
299   // Vector Subtract High-Half
300   // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that
301   // header is no longer supported.
302   def int_aarch64_neon_subhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
303
304   // Vector Rounding Subtract High-Half
305   def int_aarch64_neon_rsubhn : AdvSIMD_2VectorArg_Narrow_Intrinsic;
306
307   // Vector Compare Absolute Greater-than-or-equal
308   def int_aarch64_neon_facge : AdvSIMD_2Arg_FloatCompare_Intrinsic;
309
310   // Vector Compare Absolute Greater-than
311   def int_aarch64_neon_facgt : AdvSIMD_2Arg_FloatCompare_Intrinsic;
312
313   // Vector Absolute Difference
314   def int_aarch64_neon_sabd : AdvSIMD_2VectorArg_Intrinsic;
315   def int_aarch64_neon_uabd : AdvSIMD_2VectorArg_Intrinsic;
316   def int_aarch64_neon_fabd : AdvSIMD_2VectorArg_Intrinsic;
317
318   // Scalar Absolute Difference
319   def int_aarch64_sisd_fabd : AdvSIMD_2Scalar_Float_Intrinsic;
320
321   // Vector Max
322   def int_aarch64_neon_smax : AdvSIMD_2VectorArg_Intrinsic;
323   def int_aarch64_neon_umax : AdvSIMD_2VectorArg_Intrinsic;
324   def int_aarch64_neon_fmax : AdvSIMD_2FloatArg_Intrinsic;
325   def int_aarch64_neon_fmaxnmp : AdvSIMD_2VectorArg_Intrinsic;
326
327   // Vector Max Across Lanes
328   def int_aarch64_neon_smaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
329   def int_aarch64_neon_umaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
330   def int_aarch64_neon_fmaxv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
331   def int_aarch64_neon_fmaxnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
332
333   // Vector Min
334   def int_aarch64_neon_smin : AdvSIMD_2VectorArg_Intrinsic;
335   def int_aarch64_neon_umin : AdvSIMD_2VectorArg_Intrinsic;
336   def int_aarch64_neon_fmin : AdvSIMD_2FloatArg_Intrinsic;
337   def int_aarch64_neon_fminnmp : AdvSIMD_2VectorArg_Intrinsic;
338
339   // Vector Min/Max Number
340   def int_aarch64_neon_fminnm : AdvSIMD_2FloatArg_Intrinsic;
341   def int_aarch64_neon_fmaxnm : AdvSIMD_2FloatArg_Intrinsic;
342
343   // Vector Min Across Lanes
344   def int_aarch64_neon_sminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
345   def int_aarch64_neon_uminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic;
346   def int_aarch64_neon_fminv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
347   def int_aarch64_neon_fminnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic;
348
349   // Pairwise Add
350   def int_aarch64_neon_addp : AdvSIMD_2VectorArg_Intrinsic;
351   def int_aarch64_neon_faddp : AdvSIMD_2VectorArg_Intrinsic;
352
353   // Long Pairwise Add
354   // FIXME: In theory, we shouldn't need intrinsics for saddlp or
355   // uaddlp, but tblgen's type inference currently can't handle the
356   // pattern fragments this ends up generating.
357   def int_aarch64_neon_saddlp : AdvSIMD_1VectorArg_Expand_Intrinsic;
358   def int_aarch64_neon_uaddlp : AdvSIMD_1VectorArg_Expand_Intrinsic;
359
360   // Folding Maximum
361   def int_aarch64_neon_smaxp : AdvSIMD_2VectorArg_Intrinsic;
362   def int_aarch64_neon_umaxp : AdvSIMD_2VectorArg_Intrinsic;
363   def int_aarch64_neon_fmaxp : AdvSIMD_2VectorArg_Intrinsic;
364
365   // Folding Minimum
366   def int_aarch64_neon_sminp : AdvSIMD_2VectorArg_Intrinsic;
367   def int_aarch64_neon_uminp : AdvSIMD_2VectorArg_Intrinsic;
368   def int_aarch64_neon_fminp : AdvSIMD_2VectorArg_Intrinsic;
369
370   // Reciprocal Estimate/Step
371   def int_aarch64_neon_frecps : AdvSIMD_2FloatArg_Intrinsic;
372   def int_aarch64_neon_frsqrts : AdvSIMD_2FloatArg_Intrinsic;
373
374   // Reciprocal Exponent
375   def int_aarch64_neon_frecpx : AdvSIMD_1FloatArg_Intrinsic;
376
377   // Vector Saturating Shift Left
378   def int_aarch64_neon_sqshl : AdvSIMD_2IntArg_Intrinsic;
379   def int_aarch64_neon_uqshl : AdvSIMD_2IntArg_Intrinsic;
380
381   // Vector Rounding Shift Left
382   def int_aarch64_neon_srshl : AdvSIMD_2IntArg_Intrinsic;
383   def int_aarch64_neon_urshl : AdvSIMD_2IntArg_Intrinsic;
384
385   // Vector Saturating Rounding Shift Left
386   def int_aarch64_neon_sqrshl : AdvSIMD_2IntArg_Intrinsic;
387   def int_aarch64_neon_uqrshl : AdvSIMD_2IntArg_Intrinsic;
388
389   // Vector Signed->Unsigned Shift Left by Constant
390   def int_aarch64_neon_sqshlu : AdvSIMD_2IntArg_Intrinsic;
391
392   // Vector Signed->Unsigned Narrowing Saturating Shift Right by Constant
393   def int_aarch64_neon_sqshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
394
395   // Vector Signed->Unsigned Rounding Narrowing Saturating Shift Right by Const
396   def int_aarch64_neon_sqrshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
397
398   // Vector Narrowing Shift Right by Constant
399   def int_aarch64_neon_sqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
400   def int_aarch64_neon_uqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
401
402   // Vector Rounding Narrowing Shift Right by Constant
403   def int_aarch64_neon_rshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
404
405   // Vector Rounding Narrowing Saturating Shift Right by Constant
406   def int_aarch64_neon_sqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
407   def int_aarch64_neon_uqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic;
408
409   // Vector Shift Left
410   def int_aarch64_neon_sshl : AdvSIMD_2IntArg_Intrinsic;
411   def int_aarch64_neon_ushl : AdvSIMD_2IntArg_Intrinsic;
412
413   // Vector Widening Shift Left by Constant
414   def int_aarch64_neon_shll : AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic;
415   def int_aarch64_neon_sshll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic;
416   def int_aarch64_neon_ushll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic;
417
418   // Vector Shift Right by Constant and Insert
419   def int_aarch64_neon_vsri : AdvSIMD_3VectorArg_Scalar_Intrinsic;
420
421   // Vector Shift Left by Constant and Insert
422   def int_aarch64_neon_vsli : AdvSIMD_3VectorArg_Scalar_Intrinsic;
423
424   // Vector Saturating Narrow
425   def int_aarch64_neon_scalar_sqxtn: AdvSIMD_1IntArg_Narrow_Intrinsic;
426   def int_aarch64_neon_scalar_uqxtn : AdvSIMD_1IntArg_Narrow_Intrinsic;
427   def int_aarch64_neon_sqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic;
428   def int_aarch64_neon_uqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic;
429
430   // Vector Saturating Extract and Unsigned Narrow
431   def int_aarch64_neon_scalar_sqxtun : AdvSIMD_1IntArg_Narrow_Intrinsic;
432   def int_aarch64_neon_sqxtun : AdvSIMD_1VectorArg_Narrow_Intrinsic;
433
434   // Vector Absolute Value
435   def int_aarch64_neon_abs : AdvSIMD_1Arg_Intrinsic;
436
437   // Vector Saturating Absolute Value
438   def int_aarch64_neon_sqabs : AdvSIMD_1IntArg_Intrinsic;
439
440   // Vector Saturating Negation
441   def int_aarch64_neon_sqneg : AdvSIMD_1IntArg_Intrinsic;
442
443   // Vector Count Leading Sign Bits
444   def int_aarch64_neon_cls : AdvSIMD_1VectorArg_Intrinsic;
445
446   // Vector Reciprocal Estimate
447   def int_aarch64_neon_urecpe : AdvSIMD_1VectorArg_Intrinsic;
448   def int_aarch64_neon_frecpe : AdvSIMD_1FloatArg_Intrinsic;
449
450   // Vector Square Root Estimate
451   def int_aarch64_neon_ursqrte : AdvSIMD_1VectorArg_Intrinsic;
452   def int_aarch64_neon_frsqrte : AdvSIMD_1FloatArg_Intrinsic;
453
454   // Vector Conversions Between Half-Precision and Single-Precision.
455   def int_aarch64_neon_vcvtfp2hf
456     : DefaultAttrsIntrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>;
457   def int_aarch64_neon_vcvthf2fp
458     : DefaultAttrsIntrinsic<[llvm_v4f32_ty], [llvm_v4i16_ty], [IntrNoMem]>;
459
460   // Vector Conversions Between Floating-point and Fixed-point.
461   def int_aarch64_neon_vcvtfp2fxs : AdvSIMD_CvtFPToFx_Intrinsic;
462   def int_aarch64_neon_vcvtfp2fxu : AdvSIMD_CvtFPToFx_Intrinsic;
463   def int_aarch64_neon_vcvtfxs2fp : AdvSIMD_CvtFxToFP_Intrinsic;
464   def int_aarch64_neon_vcvtfxu2fp : AdvSIMD_CvtFxToFP_Intrinsic;
465
466   // Vector FP->Int Conversions
467   def int_aarch64_neon_fcvtas : AdvSIMD_FPToIntRounding_Intrinsic;
468   def int_aarch64_neon_fcvtau : AdvSIMD_FPToIntRounding_Intrinsic;
469   def int_aarch64_neon_fcvtms : AdvSIMD_FPToIntRounding_Intrinsic;
470   def int_aarch64_neon_fcvtmu : AdvSIMD_FPToIntRounding_Intrinsic;
471   def int_aarch64_neon_fcvtns : AdvSIMD_FPToIntRounding_Intrinsic;
472   def int_aarch64_neon_fcvtnu : AdvSIMD_FPToIntRounding_Intrinsic;
473   def int_aarch64_neon_fcvtps : AdvSIMD_FPToIntRounding_Intrinsic;
474   def int_aarch64_neon_fcvtpu : AdvSIMD_FPToIntRounding_Intrinsic;
475   def int_aarch64_neon_fcvtzs : AdvSIMD_FPToIntRounding_Intrinsic;
476   def int_aarch64_neon_fcvtzu : AdvSIMD_FPToIntRounding_Intrinsic;
477
478   // v8.5-A Vector FP Rounding
479   def int_aarch64_neon_frint32x : AdvSIMD_1FloatArg_Intrinsic;
480   def int_aarch64_neon_frint32z : AdvSIMD_1FloatArg_Intrinsic;
481   def int_aarch64_neon_frint64x : AdvSIMD_1FloatArg_Intrinsic;
482   def int_aarch64_neon_frint64z : AdvSIMD_1FloatArg_Intrinsic;
483
484   // Scalar FP->Int conversions
485
486   // Vector FP Inexact Narrowing
487   def int_aarch64_neon_fcvtxn : AdvSIMD_1VectorArg_Expand_Intrinsic;
488
489   // Scalar FP Inexact Narrowing
490   def int_aarch64_sisd_fcvtxn : DefaultAttrsIntrinsic<[llvm_float_ty], [llvm_double_ty],
491                                         [IntrNoMem]>;
492
493   // v8.2-A Dot Product
494   def int_aarch64_neon_udot : AdvSIMD_Dot_Intrinsic;
495   def int_aarch64_neon_sdot : AdvSIMD_Dot_Intrinsic;
496
497   // v8.6-A Matrix Multiply Intrinsics
498   def int_aarch64_neon_ummla : AdvSIMD_MatMul_Intrinsic;
499   def int_aarch64_neon_smmla : AdvSIMD_MatMul_Intrinsic;
500   def int_aarch64_neon_usmmla : AdvSIMD_MatMul_Intrinsic;
501   def int_aarch64_neon_usdot : AdvSIMD_Dot_Intrinsic;
502   def int_aarch64_neon_bfdot : AdvSIMD_Dot_Intrinsic;
503   def int_aarch64_neon_bfmmla
504     : DefaultAttrsIntrinsic<[llvm_v4f32_ty],
505                 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty],
506                 [IntrNoMem]>;
507   def int_aarch64_neon_bfmlalb : AdvSIMD_BF16FML_Intrinsic;
508   def int_aarch64_neon_bfmlalt : AdvSIMD_BF16FML_Intrinsic;
509
510
511   // v8.6-A Bfloat Intrinsics
512   def int_aarch64_neon_bfcvt
513     : DefaultAttrsIntrinsic<[llvm_bfloat_ty], [llvm_float_ty], [IntrNoMem]>;
514   def int_aarch64_neon_bfcvtn
515     : DefaultAttrsIntrinsic<[llvm_v8bf16_ty], [llvm_v4f32_ty], [IntrNoMem]>;
516   def int_aarch64_neon_bfcvtn2
517     : DefaultAttrsIntrinsic<[llvm_v8bf16_ty],
518                 [llvm_v8bf16_ty, llvm_v4f32_ty],
519                 [IntrNoMem]>;
520
521   // v8.2-A FP16 Fused Multiply-Add Long
522   def int_aarch64_neon_fmlal : AdvSIMD_FP16FML_Intrinsic;
523   def int_aarch64_neon_fmlsl : AdvSIMD_FP16FML_Intrinsic;
524   def int_aarch64_neon_fmlal2 : AdvSIMD_FP16FML_Intrinsic;
525   def int_aarch64_neon_fmlsl2 : AdvSIMD_FP16FML_Intrinsic;
526
527   // v8.3-A Floating-point complex add
528   def int_aarch64_neon_vcadd_rot90  : AdvSIMD_2VectorArg_Intrinsic;
529   def int_aarch64_neon_vcadd_rot270 : AdvSIMD_2VectorArg_Intrinsic;
530
531   def int_aarch64_neon_vcmla_rot0   : AdvSIMD_3VectorArg_Intrinsic;
532   def int_aarch64_neon_vcmla_rot90  : AdvSIMD_3VectorArg_Intrinsic;
533   def int_aarch64_neon_vcmla_rot180 : AdvSIMD_3VectorArg_Intrinsic;
534   def int_aarch64_neon_vcmla_rot270 : AdvSIMD_3VectorArg_Intrinsic;
535 }
536
537 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
538   class AdvSIMD_2Vector2Index_Intrinsic
539     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
540                 [llvm_anyvector_ty, llvm_i64_ty, LLVMMatchType<0>, llvm_i64_ty],
541                 [IntrNoMem]>;
542 }
543
544 // Vector element to element moves
545 def int_aarch64_neon_vcopy_lane: AdvSIMD_2Vector2Index_Intrinsic;
546
547 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
548   class AdvSIMD_1Vec_Load_Intrinsic
549       : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMAnyPointerType<LLVMMatchType<0>>],
550                   [IntrReadMem, IntrArgMemOnly]>;
551   class AdvSIMD_1Vec_Store_Lane_Intrinsic
552     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_i64_ty, llvm_anyptr_ty],
553                 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>;
554
555   class AdvSIMD_2Vec_Load_Intrinsic
556     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, llvm_anyvector_ty],
557                 [LLVMAnyPointerType<LLVMMatchType<0>>],
558                 [IntrReadMem, IntrArgMemOnly]>;
559   class AdvSIMD_2Vec_Load_Lane_Intrinsic
560     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>],
561                 [LLVMMatchType<0>, llvm_anyvector_ty,
562                  llvm_i64_ty, llvm_anyptr_ty],
563                 [IntrReadMem, IntrArgMemOnly]>;
564   class AdvSIMD_2Vec_Store_Intrinsic
565     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
566                      LLVMAnyPointerType<LLVMMatchType<0>>],
567                 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>;
568   class AdvSIMD_2Vec_Store_Lane_Intrinsic
569     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
570                  llvm_i64_ty, llvm_anyptr_ty],
571                 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>;
572
573   class AdvSIMD_3Vec_Load_Intrinsic
574     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty],
575                 [LLVMAnyPointerType<LLVMMatchType<0>>],
576                 [IntrReadMem, IntrArgMemOnly]>;
577   class AdvSIMD_3Vec_Load_Lane_Intrinsic
578     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
579                 [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty,
580                  llvm_i64_ty, llvm_anyptr_ty],
581                 [IntrReadMem, IntrArgMemOnly]>;
582   class AdvSIMD_3Vec_Store_Intrinsic
583     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
584                      LLVMMatchType<0>, LLVMAnyPointerType<LLVMMatchType<0>>],
585                 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>;
586   class AdvSIMD_3Vec_Store_Lane_Intrinsic
587     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty,
588                  LLVMMatchType<0>, LLVMMatchType<0>,
589                  llvm_i64_ty, llvm_anyptr_ty],
590                 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>;
591
592   class AdvSIMD_4Vec_Load_Intrinsic
593     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>,
594                  LLVMMatchType<0>, llvm_anyvector_ty],
595                 [LLVMAnyPointerType<LLVMMatchType<0>>],
596                 [IntrReadMem, IntrArgMemOnly]>;
597   class AdvSIMD_4Vec_Load_Lane_Intrinsic
598     : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>,
599                  LLVMMatchType<0>, LLVMMatchType<0>],
600                 [LLVMMatchType<0>, LLVMMatchType<0>,
601                  LLVMMatchType<0>, llvm_anyvector_ty,
602                  llvm_i64_ty, llvm_anyptr_ty],
603                 [IntrReadMem, IntrArgMemOnly]>;
604   class AdvSIMD_4Vec_Store_Intrinsic
605     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
606                  LLVMMatchType<0>, LLVMMatchType<0>,
607                  LLVMAnyPointerType<LLVMMatchType<0>>],
608                 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>;
609   class AdvSIMD_4Vec_Store_Lane_Intrinsic
610     : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>,
611                  LLVMMatchType<0>, LLVMMatchType<0>,
612                  llvm_i64_ty, llvm_anyptr_ty],
613                 [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>;
614 }
615
616 // Memory ops
617
618 def int_aarch64_neon_ld1x2 : AdvSIMD_2Vec_Load_Intrinsic;
619 def int_aarch64_neon_ld1x3 : AdvSIMD_3Vec_Load_Intrinsic;
620 def int_aarch64_neon_ld1x4 : AdvSIMD_4Vec_Load_Intrinsic;
621
622 def int_aarch64_neon_st1x2 : AdvSIMD_2Vec_Store_Intrinsic;
623 def int_aarch64_neon_st1x3 : AdvSIMD_3Vec_Store_Intrinsic;
624 def int_aarch64_neon_st1x4 : AdvSIMD_4Vec_Store_Intrinsic;
625
626 def int_aarch64_neon_ld2 : AdvSIMD_2Vec_Load_Intrinsic;
627 def int_aarch64_neon_ld3 : AdvSIMD_3Vec_Load_Intrinsic;
628 def int_aarch64_neon_ld4 : AdvSIMD_4Vec_Load_Intrinsic;
629
630 def int_aarch64_neon_ld2lane : AdvSIMD_2Vec_Load_Lane_Intrinsic;
631 def int_aarch64_neon_ld3lane : AdvSIMD_3Vec_Load_Lane_Intrinsic;
632 def int_aarch64_neon_ld4lane : AdvSIMD_4Vec_Load_Lane_Intrinsic;
633
634 def int_aarch64_neon_ld2r : AdvSIMD_2Vec_Load_Intrinsic;
635 def int_aarch64_neon_ld3r : AdvSIMD_3Vec_Load_Intrinsic;
636 def int_aarch64_neon_ld4r : AdvSIMD_4Vec_Load_Intrinsic;
637
638 def int_aarch64_neon_st2  : AdvSIMD_2Vec_Store_Intrinsic;
639 def int_aarch64_neon_st3  : AdvSIMD_3Vec_Store_Intrinsic;
640 def int_aarch64_neon_st4  : AdvSIMD_4Vec_Store_Intrinsic;
641
642 def int_aarch64_neon_st2lane  : AdvSIMD_2Vec_Store_Lane_Intrinsic;
643 def int_aarch64_neon_st3lane  : AdvSIMD_3Vec_Store_Lane_Intrinsic;
644 def int_aarch64_neon_st4lane  : AdvSIMD_4Vec_Store_Lane_Intrinsic;
645
646 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
647   class AdvSIMD_Tbl1_Intrinsic
648     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_v16i8_ty, LLVMMatchType<0>],
649                 [IntrNoMem]>;
650   class AdvSIMD_Tbl2_Intrinsic
651     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
652                 [llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], [IntrNoMem]>;
653   class AdvSIMD_Tbl3_Intrinsic
654     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
655                 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty,
656                  LLVMMatchType<0>],
657                 [IntrNoMem]>;
658   class AdvSIMD_Tbl4_Intrinsic
659     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
660                 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty,
661                  LLVMMatchType<0>],
662                 [IntrNoMem]>;
663
664   class AdvSIMD_Tbx1_Intrinsic
665     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
666                 [LLVMMatchType<0>, llvm_v16i8_ty, LLVMMatchType<0>],
667                 [IntrNoMem]>;
668   class AdvSIMD_Tbx2_Intrinsic
669     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
670                 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
671                  LLVMMatchType<0>],
672                 [IntrNoMem]>;
673   class AdvSIMD_Tbx3_Intrinsic
674     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
675                 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
676                  llvm_v16i8_ty, LLVMMatchType<0>],
677                 [IntrNoMem]>;
678   class AdvSIMD_Tbx4_Intrinsic
679     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
680                 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty,
681                  llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>],
682                 [IntrNoMem]>;
683 }
684 def int_aarch64_neon_tbl1 : AdvSIMD_Tbl1_Intrinsic;
685 def int_aarch64_neon_tbl2 : AdvSIMD_Tbl2_Intrinsic;
686 def int_aarch64_neon_tbl3 : AdvSIMD_Tbl3_Intrinsic;
687 def int_aarch64_neon_tbl4 : AdvSIMD_Tbl4_Intrinsic;
688
689 def int_aarch64_neon_tbx1 : AdvSIMD_Tbx1_Intrinsic;
690 def int_aarch64_neon_tbx2 : AdvSIMD_Tbx2_Intrinsic;
691 def int_aarch64_neon_tbx3 : AdvSIMD_Tbx3_Intrinsic;
692 def int_aarch64_neon_tbx4 : AdvSIMD_Tbx4_Intrinsic;
693
694 let TargetPrefix = "aarch64" in {
695   class FPCR_Get_Intrinsic
696     : DefaultAttrsIntrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrHasSideEffects]>;
697   class FPCR_Set_Intrinsic
698     : DefaultAttrsIntrinsic<[], [llvm_i64_ty], [IntrNoMem, IntrHasSideEffects]>;
699   class RNDR_Intrinsic
700     : DefaultAttrsIntrinsic<[llvm_i64_ty, llvm_i1_ty], [], [IntrNoMem, IntrHasSideEffects]>;
701 }
702
703 // FPCR
704 def int_aarch64_get_fpcr : FPCR_Get_Intrinsic;
705 def int_aarch64_set_fpcr : FPCR_Set_Intrinsic;
706
707 // Armv8.5-A Random number generation intrinsics
708 def int_aarch64_rndr : RNDR_Intrinsic;
709 def int_aarch64_rndrrs : RNDR_Intrinsic;
710
711 let TargetPrefix = "aarch64" in {
712   class Crypto_AES_DataKey_Intrinsic
713     : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
714
715   class Crypto_AES_Data_Intrinsic
716     : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
717
718   // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule
719   // (v4i32).
720   class Crypto_SHA_5Hash4Schedule_Intrinsic
721     : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty],
722                 [IntrNoMem]>;
723
724   // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule
725   // (v4i32).
726   class Crypto_SHA_1Hash_Intrinsic
727     : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
728
729   // SHA intrinsic taking 8 words of the schedule
730   class Crypto_SHA_8Schedule_Intrinsic
731     : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
732
733   // SHA intrinsic taking 12 words of the schedule
734   class Crypto_SHA_12Schedule_Intrinsic
735     : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
736                 [IntrNoMem]>;
737
738   // SHA intrinsic taking 8 words of the hash and 4 of the schedule.
739   class Crypto_SHA_8Hash4Schedule_Intrinsic
740     : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
741                 [IntrNoMem]>;
742
743   // SHA512 intrinsic taking 2 arguments
744   class Crypto_SHA512_2Arg_Intrinsic
745     : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
746
747   // SHA512 intrinsic taking 3 Arguments
748   class Crypto_SHA512_3Arg_Intrinsic
749     : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
750                 [IntrNoMem]>;
751
752   // SHA3 Intrinsics taking 3 arguments
753   class Crypto_SHA3_3Arg_Intrinsic
754     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
755                [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
756                [IntrNoMem]>;
757
758   // SHA3 Intrinsic taking 2 arguments
759   class Crypto_SHA3_2Arg_Intrinsic
760     : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
761                [IntrNoMem]>;
762
763   // SHA3 Intrinsic taking 3 Arguments 1 immediate
764   class Crypto_SHA3_2ArgImm_Intrinsic
765     : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i64_ty],
766                [IntrNoMem, ImmArg<ArgIndex<2>>]>;
767
768   class Crypto_SM3_3Vector_Intrinsic
769     : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
770                 [IntrNoMem]>;
771
772   class Crypto_SM3_3VectorIndexed_Intrinsic
773     : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i64_ty],
774                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
775
776   class Crypto_SM4_2Vector_Intrinsic
777     : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
778 }
779
780 // AES
781 def int_aarch64_crypto_aese   : Crypto_AES_DataKey_Intrinsic;
782 def int_aarch64_crypto_aesd   : Crypto_AES_DataKey_Intrinsic;
783 def int_aarch64_crypto_aesmc  : Crypto_AES_Data_Intrinsic;
784 def int_aarch64_crypto_aesimc : Crypto_AES_Data_Intrinsic;
785
786 // SHA1
787 def int_aarch64_crypto_sha1c  : Crypto_SHA_5Hash4Schedule_Intrinsic;
788 def int_aarch64_crypto_sha1p  : Crypto_SHA_5Hash4Schedule_Intrinsic;
789 def int_aarch64_crypto_sha1m  : Crypto_SHA_5Hash4Schedule_Intrinsic;
790 def int_aarch64_crypto_sha1h  : Crypto_SHA_1Hash_Intrinsic;
791
792 def int_aarch64_crypto_sha1su0 : Crypto_SHA_12Schedule_Intrinsic;
793 def int_aarch64_crypto_sha1su1 : Crypto_SHA_8Schedule_Intrinsic;
794
795 // SHA256
796 def int_aarch64_crypto_sha256h   : Crypto_SHA_8Hash4Schedule_Intrinsic;
797 def int_aarch64_crypto_sha256h2  : Crypto_SHA_8Hash4Schedule_Intrinsic;
798 def int_aarch64_crypto_sha256su0 : Crypto_SHA_8Schedule_Intrinsic;
799 def int_aarch64_crypto_sha256su1 : Crypto_SHA_12Schedule_Intrinsic;
800
801 //SHA3
802 def int_aarch64_crypto_eor3s : Crypto_SHA3_3Arg_Intrinsic;
803 def int_aarch64_crypto_eor3u : Crypto_SHA3_3Arg_Intrinsic;
804 def int_aarch64_crypto_bcaxs : Crypto_SHA3_3Arg_Intrinsic;
805 def int_aarch64_crypto_bcaxu : Crypto_SHA3_3Arg_Intrinsic;
806 def int_aarch64_crypto_rax1 : Crypto_SHA3_2Arg_Intrinsic;
807 def int_aarch64_crypto_xar : Crypto_SHA3_2ArgImm_Intrinsic;
808
809 // SHA512
810 def int_aarch64_crypto_sha512h : Crypto_SHA512_3Arg_Intrinsic;
811 def int_aarch64_crypto_sha512h2 : Crypto_SHA512_3Arg_Intrinsic;
812 def int_aarch64_crypto_sha512su0 : Crypto_SHA512_2Arg_Intrinsic;
813 def int_aarch64_crypto_sha512su1 : Crypto_SHA512_3Arg_Intrinsic;
814
815 //SM3 & SM4
816 def int_aarch64_crypto_sm3partw1 : Crypto_SM3_3Vector_Intrinsic;
817 def int_aarch64_crypto_sm3partw2 : Crypto_SM3_3Vector_Intrinsic;
818 def int_aarch64_crypto_sm3ss1    : Crypto_SM3_3Vector_Intrinsic;
819 def int_aarch64_crypto_sm3tt1a   : Crypto_SM3_3VectorIndexed_Intrinsic;
820 def int_aarch64_crypto_sm3tt1b   : Crypto_SM3_3VectorIndexed_Intrinsic;
821 def int_aarch64_crypto_sm3tt2a   : Crypto_SM3_3VectorIndexed_Intrinsic;
822 def int_aarch64_crypto_sm3tt2b   : Crypto_SM3_3VectorIndexed_Intrinsic;
823 def int_aarch64_crypto_sm4e      : Crypto_SM4_2Vector_Intrinsic;
824 def int_aarch64_crypto_sm4ekey   : Crypto_SM4_2Vector_Intrinsic;
825
826 //===----------------------------------------------------------------------===//
827 // CRC32
828
829 let TargetPrefix = "aarch64" in {
830
831 def int_aarch64_crc32b  : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
832     [IntrNoMem]>;
833 def int_aarch64_crc32cb : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
834     [IntrNoMem]>;
835 def int_aarch64_crc32h  : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
836     [IntrNoMem]>;
837 def int_aarch64_crc32ch : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
838     [IntrNoMem]>;
839 def int_aarch64_crc32w  : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
840     [IntrNoMem]>;
841 def int_aarch64_crc32cw : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
842     [IntrNoMem]>;
843 def int_aarch64_crc32x  : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty],
844     [IntrNoMem]>;
845 def int_aarch64_crc32cx : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty],
846     [IntrNoMem]>;
847 }
848
849 //===----------------------------------------------------------------------===//
850 // Memory Tagging Extensions (MTE) Intrinsics
851 let TargetPrefix = "aarch64" in {
852 def int_aarch64_irg   : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty],
853     [IntrNoMem, IntrHasSideEffects]>;
854 def int_aarch64_addg  : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty],
855     [IntrNoMem]>;
856 def int_aarch64_gmi   : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty],
857     [IntrNoMem]>;
858 def int_aarch64_ldg   : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_ptr_ty],
859     [IntrReadMem]>;
860 def int_aarch64_stg   : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_ptr_ty],
861     [IntrWriteMem]>;
862 def int_aarch64_subp :  DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_ptr_ty],
863     [IntrNoMem]>;
864
865 // The following are codegen-only intrinsics for stack instrumentation.
866
867 // Generate a randomly tagged stack base pointer.
868 def int_aarch64_irg_sp   : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_i64_ty],
869     [IntrNoMem, IntrHasSideEffects]>;
870
871 // Transfer pointer tag with offset.
872 // ptr1 = tagp(ptr0, baseptr, tag_offset) returns a pointer where
873 // * address is the address in ptr0
874 // * tag is a function of (tag in baseptr, tag_offset).
875 // ** Beware, this is not the same function as implemented by the ADDG instruction!
876 //    Backend optimizations may change tag_offset; the only guarantee is that calls
877 //    to tagp with the same pair of (baseptr, tag_offset) will produce pointers
878 //    with the same tag value, assuming the set of excluded tags has not changed.
879 // Address bits in baseptr and tag bits in ptr0 are ignored.
880 // When offset between ptr0 and baseptr is a compile time constant, this can be emitted as
881 //   ADDG ptr1, baseptr, (ptr0 - baseptr), tag_offset
882 // It is intended that ptr0 is an alloca address, and baseptr is the direct output of llvm.aarch64.irg.sp.
883 def int_aarch64_tagp : DefaultAttrsIntrinsic<[llvm_anyptr_ty], [LLVMMatchType<0>, llvm_ptr_ty, llvm_i64_ty],
884     [IntrNoMem, ImmArg<ArgIndex<2>>]>;
885
886 // Update allocation tags for the memory range to match the tag in the pointer argument.
887 def int_aarch64_settag  : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty],
888     [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>;
889
890 // Update allocation tags for the memory range to match the tag in the pointer argument,
891 // and set memory contents to zero.
892 def int_aarch64_settag_zero  : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty],
893     [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>;
894
895 // Update allocation tags for 16-aligned, 16-sized memory region, and store a pair 8-byte values.
896 def int_aarch64_stgp  : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty, llvm_i64_ty],
897     [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>;
898 }
899
900 //===----------------------------------------------------------------------===//
901 // Memory Operations (MOPS) Intrinsics
902 let TargetPrefix = "aarch64" in {
903   // Sizes are chosen to correspond to the llvm.memset intrinsic: ptr, i8, i64
904   def int_aarch64_mops_memset_tag : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i8_ty, llvm_i64_ty],
905       [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>;
906 }
907
908 // Transactional Memory Extension (TME) Intrinsics
909 let TargetPrefix = "aarch64" in {
910 def int_aarch64_tstart  : GCCBuiltin<"__builtin_arm_tstart">,
911                          Intrinsic<[llvm_i64_ty], [], [IntrWillReturn]>;
912
913 def int_aarch64_tcommit : GCCBuiltin<"__builtin_arm_tcommit">, Intrinsic<[], [], [IntrWillReturn]>;
914
915 def int_aarch64_tcancel : GCCBuiltin<"__builtin_arm_tcancel">,
916                           Intrinsic<[], [llvm_i64_ty], [IntrWillReturn, ImmArg<ArgIndex<0>>]>;
917
918 def int_aarch64_ttest   : GCCBuiltin<"__builtin_arm_ttest">,
919                           Intrinsic<[llvm_i64_ty], [],
920                                     [IntrNoMem, IntrHasSideEffects, IntrWillReturn]>;
921
922 // Armv8.7-A load/store 64-byte intrinsics
923 defvar data512 = !listsplat(llvm_i64_ty, 8);
924 def int_aarch64_ld64b: Intrinsic<data512, [llvm_ptr_ty]>;
925 def int_aarch64_st64b: Intrinsic<[], !listconcat([llvm_ptr_ty], data512)>;
926 def int_aarch64_st64bv: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>;
927 def int_aarch64_st64bv0: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>;
928
929 }
930
931 def llvm_nxv2i1_ty  : LLVMType<nxv2i1>;
932 def llvm_nxv4i1_ty  : LLVMType<nxv4i1>;
933 def llvm_nxv8i1_ty  : LLVMType<nxv8i1>;
934 def llvm_nxv16i1_ty : LLVMType<nxv16i1>;
935 def llvm_nxv16i8_ty : LLVMType<nxv16i8>;
936 def llvm_nxv4i32_ty : LLVMType<nxv4i32>;
937 def llvm_nxv2i64_ty : LLVMType<nxv2i64>;
938 def llvm_nxv8f16_ty : LLVMType<nxv8f16>;
939 def llvm_nxv8bf16_ty : LLVMType<nxv8bf16>;
940 def llvm_nxv4f32_ty : LLVMType<nxv4f32>;
941 def llvm_nxv2f64_ty : LLVMType<nxv2f64>;
942
943 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
944
945   class AdvSIMD_SVE_Create_2Vector_Tuple
946     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
947                 [llvm_anyvector_ty, LLVMMatchType<1>],
948                 [IntrReadMem]>;
949
950   class AdvSIMD_SVE_Create_3Vector_Tuple
951     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
952                 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>],
953                 [IntrReadMem]>;
954
955   class AdvSIMD_SVE_Create_4Vector_Tuple
956     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
957                 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>,
958                  LLVMMatchType<1>],
959                 [IntrReadMem]>;
960
961   class AdvSIMD_SVE_Set_Vector_Tuple
962     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
963                 [LLVMMatchType<0>, llvm_i32_ty, llvm_anyvector_ty],
964                 [IntrReadMem, ImmArg<ArgIndex<1>>]>;
965
966   class AdvSIMD_SVE_Get_Vector_Tuple
967     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, llvm_i32_ty],
968                 [IntrReadMem, IntrArgMemOnly, ImmArg<ArgIndex<1>>]>;
969
970   class AdvSIMD_ManyVec_PredLoad_Intrinsic
971     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMPointerToElt<0>],
972                 [IntrReadMem, IntrArgMemOnly]>;
973
974   class AdvSIMD_1Vec_PredLoad_Intrinsic
975     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
976                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
977                  LLVMPointerToElt<0>],
978                 [IntrReadMem, IntrArgMemOnly]>;
979
980   class AdvSIMD_2Vec_PredLoad_Intrinsic
981     : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
982                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
983                  LLVMPointerToElt<0>],
984                 [IntrReadMem, IntrArgMemOnly]>;
985
986   class AdvSIMD_3Vec_PredLoad_Intrinsic
987     : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>],
988                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
989                  LLVMPointerToElt<0>],
990                 [IntrReadMem, IntrArgMemOnly]>;
991
992   class AdvSIMD_4Vec_PredLoad_Intrinsic
993     : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>,
994                  LLVMMatchType<0>],
995                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
996                  LLVMPointerToElt<0>],
997                 [IntrReadMem, IntrArgMemOnly]>;
998
999   class AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic
1000     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1001                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1002                  LLVMPointerToElt<0>],
1003                 [IntrInaccessibleMemOrArgMemOnly]>;
1004
1005   class AdvSIMD_1Vec_PredStore_Intrinsic
1006     : DefaultAttrsIntrinsic<[],
1007                 [llvm_anyvector_ty,
1008                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1009                  LLVMPointerToElt<0>],
1010                 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>;
1011
1012   class AdvSIMD_2Vec_PredStore_Intrinsic
1013       : DefaultAttrsIntrinsic<[],
1014                   [llvm_anyvector_ty, LLVMMatchType<0>,
1015                    LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>],
1016                   [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>;
1017
1018   class AdvSIMD_3Vec_PredStore_Intrinsic
1019       : DefaultAttrsIntrinsic<[],
1020                   [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>,
1021                    LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>],
1022                   [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>;
1023
1024   class AdvSIMD_4Vec_PredStore_Intrinsic
1025       : DefaultAttrsIntrinsic<[],
1026                   [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>,
1027                    LLVMMatchType<0>,
1028                    LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>],
1029                   [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>;
1030
1031   class AdvSIMD_SVE_Index_Intrinsic
1032     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1033                 [LLVMVectorElementType<0>,
1034                  LLVMVectorElementType<0>],
1035                 [IntrNoMem]>;
1036
1037   class AdvSIMD_Merged1VectorArg_Intrinsic
1038     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1039                 [LLVMMatchType<0>,
1040                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1041                  LLVMMatchType<0>],
1042                 [IntrNoMem]>;
1043
1044   class AdvSIMD_2VectorArgIndexed_Intrinsic
1045     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1046                 [LLVMMatchType<0>,
1047                  LLVMMatchType<0>,
1048                  llvm_i32_ty],
1049                 [IntrNoMem, ImmArg<ArgIndex<2>>]>;
1050
1051   class AdvSIMD_3VectorArgIndexed_Intrinsic
1052     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1053                 [LLVMMatchType<0>,
1054                  LLVMMatchType<0>,
1055                  LLVMMatchType<0>,
1056                  llvm_i32_ty],
1057                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1058
1059   class AdvSIMD_Pred1VectorArg_Intrinsic
1060     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1061                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1062                  LLVMMatchType<0>],
1063                 [IntrNoMem]>;
1064
1065   class AdvSIMD_Pred2VectorArg_Intrinsic
1066     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1067                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1068                  LLVMMatchType<0>,
1069                  LLVMMatchType<0>],
1070                 [IntrNoMem]>;
1071
1072   class AdvSIMD_Pred3VectorArg_Intrinsic
1073     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1074                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1075                  LLVMMatchType<0>,
1076                  LLVMMatchType<0>,
1077                  LLVMMatchType<0>],
1078                 [IntrNoMem]>;
1079
1080   class AdvSIMD_SVE_Compare_Intrinsic
1081     : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>],
1082                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1083                  llvm_anyvector_ty,
1084                  LLVMMatchType<0>],
1085                 [IntrNoMem]>;
1086
1087   class AdvSIMD_SVE_CompareWide_Intrinsic
1088     : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>],
1089                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1090                  llvm_anyvector_ty,
1091                  llvm_nxv2i64_ty],
1092                 [IntrNoMem]>;
1093
1094   class AdvSIMD_SVE_Saturating_Intrinsic
1095     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1096                 [LLVMMatchType<0>,
1097                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>],
1098                 [IntrNoMem]>;
1099
1100   class AdvSIMD_SVE_SaturatingWithPattern_Intrinsic
1101     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1102                 [LLVMMatchType<0>,
1103                  llvm_i32_ty,
1104                  llvm_i32_ty],
1105                 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
1106
1107   class AdvSIMD_SVE_Saturating_N_Intrinsic<LLVMType T>
1108     : DefaultAttrsIntrinsic<[T],
1109                 [T, llvm_anyvector_ty],
1110                 [IntrNoMem]>;
1111
1112   class AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<LLVMType T>
1113     : DefaultAttrsIntrinsic<[T],
1114                 [T, llvm_i32_ty, llvm_i32_ty],
1115                 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
1116
1117   class AdvSIMD_SVE_CNT_Intrinsic
1118     : DefaultAttrsIntrinsic<[LLVMVectorOfBitcastsToInt<0>],
1119                 [LLVMVectorOfBitcastsToInt<0>,
1120                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1121                  llvm_anyvector_ty],
1122                 [IntrNoMem]>;
1123
1124   class AdvSIMD_SVE_ReduceWithInit_Intrinsic
1125     : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>],
1126                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1127                  LLVMVectorElementType<0>,
1128                  llvm_anyvector_ty],
1129                 [IntrNoMem]>;
1130
1131   class AdvSIMD_SVE_ShiftByImm_Intrinsic
1132     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1133                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1134                  LLVMMatchType<0>,
1135                  llvm_i32_ty],
1136                 [IntrNoMem, ImmArg<ArgIndex<2>>]>;
1137
1138   class AdvSIMD_SVE_ShiftWide_Intrinsic
1139     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1140                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1141                  LLVMMatchType<0>,
1142                  llvm_nxv2i64_ty],
1143                 [IntrNoMem]>;
1144
1145   class AdvSIMD_SVE_Unpack_Intrinsic
1146     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1147                [LLVMSubdivide2VectorType<0>],
1148                [IntrNoMem]>;
1149
1150   class AdvSIMD_SVE_CADD_Intrinsic
1151     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1152                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1153                  LLVMMatchType<0>,
1154                  LLVMMatchType<0>,
1155                  llvm_i32_ty],
1156                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1157
1158   class AdvSIMD_SVE_CMLA_Intrinsic
1159     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1160                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1161                  LLVMMatchType<0>,
1162                  LLVMMatchType<0>,
1163                  LLVMMatchType<0>,
1164                  llvm_i32_ty],
1165                 [IntrNoMem, ImmArg<ArgIndex<4>>]>;
1166
1167   class AdvSIMD_SVE_CMLA_LANE_Intrinsic
1168     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1169                 [LLVMMatchType<0>,
1170                  LLVMMatchType<0>,
1171                  LLVMMatchType<0>,
1172                  llvm_i32_ty,
1173                  llvm_i32_ty],
1174                 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>;
1175
1176   class AdvSIMD_SVE_DUP_Intrinsic
1177     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1178                 [LLVMMatchType<0>,
1179                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1180                  LLVMVectorElementType<0>],
1181                 [IntrNoMem]>;
1182
1183   class AdvSIMD_SVE_DUP_Unpred_Intrinsic
1184     : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMVectorElementType<0>],
1185                 [IntrNoMem]>;
1186
1187   class AdvSIMD_SVE_DUPQ_Intrinsic
1188     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1189                 [LLVMMatchType<0>,
1190                  llvm_i64_ty],
1191                 [IntrNoMem]>;
1192
1193   class AdvSIMD_SVE_EXPA_Intrinsic
1194     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1195                 [LLVMVectorOfBitcastsToInt<0>],
1196                 [IntrNoMem]>;
1197
1198   class AdvSIMD_SVE_FCVT_Intrinsic
1199     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1200                 [LLVMMatchType<0>,
1201                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1202                  llvm_anyvector_ty],
1203                 [IntrNoMem]>;
1204
1205   class AdvSIMD_SVE_FCVTZS_Intrinsic
1206     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1207                 [LLVMVectorOfBitcastsToInt<0>,
1208                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1209                  llvm_anyvector_ty],
1210                 [IntrNoMem]>;
1211
1212   class AdvSIMD_SVE_INSR_Intrinsic
1213     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1214                 [LLVMMatchType<0>,
1215                  LLVMVectorElementType<0>],
1216                 [IntrNoMem]>;
1217
1218   class AdvSIMD_SVE_PTRUE_Intrinsic
1219     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1220                 [llvm_i32_ty],
1221                 [IntrNoMem, ImmArg<ArgIndex<0>>]>;
1222
1223   class AdvSIMD_SVE_PUNPKHI_Intrinsic
1224     : DefaultAttrsIntrinsic<[LLVMHalfElementsVectorType<0>],
1225                 [llvm_anyvector_ty],
1226                 [IntrNoMem]>;
1227
1228   class AdvSIMD_SVE_SCALE_Intrinsic
1229     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1230                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1231                  LLVMMatchType<0>,
1232                  LLVMVectorOfBitcastsToInt<0>],
1233                 [IntrNoMem]>;
1234
1235   class AdvSIMD_SVE_SCVTF_Intrinsic
1236     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1237                 [LLVMMatchType<0>,
1238                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1239                  llvm_anyvector_ty],
1240                 [IntrNoMem]>;
1241
1242   class AdvSIMD_SVE_TSMUL_Intrinsic
1243     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1244                 [LLVMMatchType<0>,
1245                  LLVMVectorOfBitcastsToInt<0>],
1246                 [IntrNoMem]>;
1247
1248   class AdvSIMD_SVE_CNTB_Intrinsic
1249     : DefaultAttrsIntrinsic<[llvm_i64_ty],
1250                 [llvm_i32_ty],
1251                 [IntrNoMem, ImmArg<ArgIndex<0>>]>;
1252
1253   class AdvSIMD_SVE_CNTP_Intrinsic
1254     : DefaultAttrsIntrinsic<[llvm_i64_ty],
1255                 [llvm_anyvector_ty, LLVMMatchType<0>],
1256                 [IntrNoMem]>;
1257
1258   class AdvSIMD_SVE_DOT_Intrinsic
1259     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1260                 [LLVMMatchType<0>,
1261                  LLVMSubdivide4VectorType<0>,
1262                  LLVMSubdivide4VectorType<0>],
1263                 [IntrNoMem]>;
1264
1265   class AdvSIMD_SVE_DOT_Indexed_Intrinsic
1266     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1267                 [LLVMMatchType<0>,
1268                  LLVMSubdivide4VectorType<0>,
1269                  LLVMSubdivide4VectorType<0>,
1270                  llvm_i32_ty],
1271                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1272
1273   class AdvSIMD_SVE_PTEST_Intrinsic
1274     : DefaultAttrsIntrinsic<[llvm_i1_ty],
1275                 [llvm_anyvector_ty,
1276                  LLVMMatchType<0>],
1277                 [IntrNoMem]>;
1278
1279   class AdvSIMD_SVE_TBL_Intrinsic
1280     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1281                 [LLVMMatchType<0>,
1282                  LLVMVectorOfBitcastsToInt<0>],
1283                 [IntrNoMem]>;
1284
1285   class AdvSIMD_SVE2_TBX_Intrinsic
1286     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1287                 [LLVMMatchType<0>,
1288                  LLVMMatchType<0>,
1289                  LLVMVectorOfBitcastsToInt<0>],
1290                 [IntrNoMem]>;
1291
1292   class SVE2_1VectorArg_Long_Intrinsic
1293     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1294                 [LLVMSubdivide2VectorType<0>,
1295                  llvm_i32_ty],
1296                 [IntrNoMem, ImmArg<ArgIndex<1>>]>;
1297
1298   class SVE2_2VectorArg_Long_Intrinsic
1299     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1300                 [LLVMSubdivide2VectorType<0>,
1301                  LLVMSubdivide2VectorType<0>],
1302                 [IntrNoMem]>;
1303
1304   class SVE2_2VectorArgIndexed_Long_Intrinsic
1305   : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1306               [LLVMSubdivide2VectorType<0>,
1307                LLVMSubdivide2VectorType<0>,
1308                llvm_i32_ty],
1309               [IntrNoMem, ImmArg<ArgIndex<2>>]>;
1310
1311   class SVE2_2VectorArg_Wide_Intrinsic
1312     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1313                 [LLVMMatchType<0>,
1314                  LLVMSubdivide2VectorType<0>],
1315                 [IntrNoMem]>;
1316
1317   class SVE2_2VectorArg_Pred_Long_Intrinsic
1318     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1319                 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1320                  LLVMMatchType<0>,
1321                  LLVMSubdivide2VectorType<0>],
1322                 [IntrNoMem]>;
1323
1324   class SVE2_3VectorArg_Long_Intrinsic
1325     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1326                 [LLVMMatchType<0>,
1327                  LLVMSubdivide2VectorType<0>,
1328                  LLVMSubdivide2VectorType<0>],
1329                 [IntrNoMem]>;
1330
1331   class SVE2_3VectorArgIndexed_Long_Intrinsic
1332     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1333                 [LLVMMatchType<0>,
1334                  LLVMSubdivide2VectorType<0>,
1335                  LLVMSubdivide2VectorType<0>,
1336                  llvm_i32_ty],
1337                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1338
1339   class SVE2_1VectorArg_Narrowing_Intrinsic
1340     : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>],
1341                 [llvm_anyvector_ty],
1342                 [IntrNoMem]>;
1343
1344   class SVE2_Merged1VectorArg_Narrowing_Intrinsic
1345     : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>],
1346                 [LLVMSubdivide2VectorType<0>,
1347                  llvm_anyvector_ty],
1348                 [IntrNoMem]>;
1349   class SVE2_2VectorArg_Narrowing_Intrinsic
1350       : DefaultAttrsIntrinsic<
1351             [LLVMSubdivide2VectorType<0>],
1352             [llvm_anyvector_ty, LLVMMatchType<0>],
1353             [IntrNoMem]>;
1354
1355   class SVE2_Merged2VectorArg_Narrowing_Intrinsic
1356       : DefaultAttrsIntrinsic<
1357             [LLVMSubdivide2VectorType<0>],
1358             [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty, LLVMMatchType<0>],
1359             [IntrNoMem]>;
1360
1361   class SVE2_1VectorArg_Imm_Narrowing_Intrinsic
1362       : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>],
1363                   [llvm_anyvector_ty, llvm_i32_ty],
1364                   [IntrNoMem, ImmArg<ArgIndex<1>>]>;
1365
1366   class SVE2_2VectorArg_Imm_Narrowing_Intrinsic
1367       : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>],
1368                   [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty,
1369                    llvm_i32_ty],
1370                   [IntrNoMem, ImmArg<ArgIndex<2>>]>;
1371
1372   class SVE2_CONFLICT_DETECT_Intrinsic
1373     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1374                 [LLVMAnyPointerType<llvm_any_ty>,
1375                  LLVMMatchType<1>]>;
1376
1377   class SVE2_3VectorArg_Indexed_Intrinsic
1378     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1379                 [LLVMMatchType<0>,
1380                  LLVMSubdivide2VectorType<0>,
1381                  LLVMSubdivide2VectorType<0>,
1382                  llvm_i32_ty],
1383                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1384
1385   class AdvSIMD_SVE_CDOT_LANE_Intrinsic
1386     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1387                 [LLVMMatchType<0>,
1388                  LLVMSubdivide4VectorType<0>,
1389                  LLVMSubdivide4VectorType<0>,
1390                  llvm_i32_ty,
1391                  llvm_i32_ty],
1392                 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>;
1393
1394   // NOTE: There is no relationship between these intrinsics beyond an attempt
1395   // to reuse currently identical class definitions.
1396   class AdvSIMD_SVE_LOGB_Intrinsic  : AdvSIMD_SVE_CNT_Intrinsic;
1397   class AdvSIMD_SVE2_CADD_Intrinsic : AdvSIMD_2VectorArgIndexed_Intrinsic;
1398   class AdvSIMD_SVE2_CMLA_Intrinsic : AdvSIMD_3VectorArgIndexed_Intrinsic;
1399
1400   // This class of intrinsics are not intended to be useful within LLVM IR but
1401   // are instead here to support some of the more regid parts of the ACLE.
1402   class Builtin_SVCVT<LLVMType OUT, LLVMType PRED, LLVMType IN>
1403       : DefaultAttrsIntrinsic<[OUT], [OUT, PRED, IN], [IntrNoMem]>;
1404 }
1405
1406 //===----------------------------------------------------------------------===//
1407 // SVE
1408
1409 let TargetPrefix = "aarch64" in {  // All intrinsics start with "llvm.aarch64.".
1410
1411 class AdvSIMD_SVE_Reduce_Intrinsic
1412   : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>],
1413               [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1414                llvm_anyvector_ty],
1415               [IntrNoMem]>;
1416
1417 class AdvSIMD_SVE_SADDV_Reduce_Intrinsic
1418   : DefaultAttrsIntrinsic<[llvm_i64_ty],
1419               [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1420                llvm_anyvector_ty],
1421               [IntrNoMem]>;
1422
1423 class AdvSIMD_SVE_WHILE_Intrinsic
1424     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1425                 [llvm_anyint_ty, LLVMMatchType<1>],
1426                 [IntrNoMem]>;
1427
1428 class AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic
1429     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1430                 [
1431                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1432                   LLVMPointerToElt<0>,
1433                   LLVMScalarOrSameVectorWidth<0, llvm_i64_ty>
1434                 ],
1435                 [IntrReadMem, IntrArgMemOnly]>;
1436
1437 class AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic
1438     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1439                 [
1440                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1441                   LLVMPointerToElt<0>,
1442                   LLVMScalarOrSameVectorWidth<0, llvm_i64_ty>
1443                 ],
1444                 [IntrInaccessibleMemOrArgMemOnly]>;
1445
1446 class AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic
1447     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1448                 [
1449                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1450                   LLVMPointerToElt<0>,
1451                   LLVMScalarOrSameVectorWidth<0, llvm_i32_ty>
1452                 ],
1453                 [IntrReadMem, IntrArgMemOnly]>;
1454
1455 class AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic
1456     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1457                 [
1458                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1459                   LLVMPointerToElt<0>,
1460                   LLVMScalarOrSameVectorWidth<0, llvm_i32_ty>
1461                 ],
1462                 [IntrInaccessibleMemOrArgMemOnly]>;
1463
1464 class AdvSIMD_GatherLoad_VS_Intrinsic
1465     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1466                 [
1467                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1468                   llvm_anyvector_ty,
1469                   llvm_i64_ty
1470                 ],
1471                 [IntrReadMem]>;
1472
1473 class AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic
1474     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1475                 [
1476                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1477                   llvm_anyvector_ty,
1478                   llvm_i64_ty
1479                 ],
1480                 [IntrInaccessibleMemOrArgMemOnly]>;
1481
1482 class AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic
1483     : DefaultAttrsIntrinsic<[],
1484                [
1485                  llvm_anyvector_ty,
1486                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1487                  LLVMPointerToElt<0>,
1488                  LLVMScalarOrSameVectorWidth<0, llvm_i64_ty>
1489                ],
1490                [IntrWriteMem, IntrArgMemOnly]>;
1491
1492 class AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic
1493     : DefaultAttrsIntrinsic<[],
1494                [
1495                  llvm_anyvector_ty,
1496                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1497                  LLVMPointerToElt<0>,
1498                  LLVMScalarOrSameVectorWidth<0, llvm_i32_ty>
1499                ],
1500                [IntrWriteMem, IntrArgMemOnly]>;
1501
1502 class AdvSIMD_ScatterStore_VS_Intrinsic
1503     : DefaultAttrsIntrinsic<[],
1504                [
1505                  llvm_anyvector_ty,
1506                  LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
1507                  llvm_anyvector_ty, llvm_i64_ty
1508                ],
1509                [IntrWriteMem]>;
1510
1511
1512 class SVE_gather_prf_SV
1513     : DefaultAttrsIntrinsic<[],
1514                 [
1515                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate
1516                   llvm_ptr_ty, // Base address
1517                   llvm_anyvector_ty, // Offsets
1518                   llvm_i32_ty // Prfop
1519                 ],
1520                 [IntrInaccessibleMemOrArgMemOnly, NoCapture<ArgIndex<1>>, ImmArg<ArgIndex<3>>]>;
1521
1522 class SVE_gather_prf_VS
1523     : DefaultAttrsIntrinsic<[],
1524                 [
1525                   LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate
1526                   llvm_anyvector_ty, // Base addresses
1527                   llvm_i64_ty, // Scalar offset
1528                   llvm_i32_ty // Prfop
1529                 ],
1530                 [IntrInaccessibleMemOrArgMemOnly, ImmArg<ArgIndex<3>>]>;
1531
1532 class SVE_MatMul_Intrinsic
1533     : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
1534                 [LLVMMatchType<0>, LLVMSubdivide4VectorType<0>, LLVMSubdivide4VectorType<0>],
1535                 [IntrNoMem]>;
1536
1537 class SVE_4Vec_BF16
1538     : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty],
1539                 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty],
1540                 [IntrNoMem]>;
1541
1542 class SVE_4Vec_BF16_Indexed
1543     : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty],
1544                 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty, llvm_i64_ty],
1545                 [IntrNoMem, ImmArg<ArgIndex<3>>]>;
1546
1547 //
1548 // Vector tuple creation intrinsics (ACLE)
1549 //
1550
1551 def int_aarch64_sve_tuple_create2 : AdvSIMD_SVE_Create_2Vector_Tuple;
1552 def int_aarch64_sve_tuple_create3 : AdvSIMD_SVE_Create_3Vector_Tuple;
1553 def int_aarch64_sve_tuple_create4 : AdvSIMD_SVE_Create_4Vector_Tuple;
1554
1555 //
1556 // Vector tuple insertion/extraction intrinsics (ACLE)
1557 //
1558
1559 def int_aarch64_sve_tuple_get : AdvSIMD_SVE_Get_Vector_Tuple;
1560 def int_aarch64_sve_tuple_set : AdvSIMD_SVE_Set_Vector_Tuple;
1561
1562 //
1563 // Loads
1564 //
1565
1566 def int_aarch64_sve_ld1   : AdvSIMD_1Vec_PredLoad_Intrinsic;
1567
1568 def int_aarch64_sve_ld2 : AdvSIMD_ManyVec_PredLoad_Intrinsic;
1569 def int_aarch64_sve_ld3 : AdvSIMD_ManyVec_PredLoad_Intrinsic;
1570 def int_aarch64_sve_ld4 : AdvSIMD_ManyVec_PredLoad_Intrinsic;
1571
1572 def int_aarch64_sve_ld2_sret : AdvSIMD_2Vec_PredLoad_Intrinsic;
1573 def int_aarch64_sve_ld3_sret : AdvSIMD_3Vec_PredLoad_Intrinsic;
1574 def int_aarch64_sve_ld4_sret : AdvSIMD_4Vec_PredLoad_Intrinsic;
1575
1576 def int_aarch64_sve_ldnt1 : AdvSIMD_1Vec_PredLoad_Intrinsic;
1577 def int_aarch64_sve_ldnf1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic;
1578 def int_aarch64_sve_ldff1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic;
1579
1580 def int_aarch64_sve_ld1rq : AdvSIMD_1Vec_PredLoad_Intrinsic;
1581 def int_aarch64_sve_ld1ro : AdvSIMD_1Vec_PredLoad_Intrinsic;
1582
1583 //
1584 // Stores
1585 //
1586
1587 def int_aarch64_sve_st1  : AdvSIMD_1Vec_PredStore_Intrinsic;
1588 def int_aarch64_sve_st2  : AdvSIMD_2Vec_PredStore_Intrinsic;
1589 def int_aarch64_sve_st3  : AdvSIMD_3Vec_PredStore_Intrinsic;
1590 def int_aarch64_sve_st4  : AdvSIMD_4Vec_PredStore_Intrinsic;
1591
1592 def int_aarch64_sve_stnt1 : AdvSIMD_1Vec_PredStore_Intrinsic;
1593
1594 //
1595 // Prefetches
1596 //
1597
1598 def int_aarch64_sve_prf
1599   : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_ptr_ty, llvm_i32_ty],
1600                   [IntrArgMemOnly, ImmArg<ArgIndex<2>>]>;
1601
1602 // Scalar + 32-bit scaled offset vector, zero extend, packed and
1603 // unpacked.
1604 def int_aarch64_sve_prfb_gather_uxtw_index : SVE_gather_prf_SV;
1605 def int_aarch64_sve_prfh_gather_uxtw_index : SVE_gather_prf_SV;
1606 def int_aarch64_sve_prfw_gather_uxtw_index : SVE_gather_prf_SV;
1607 def int_aarch64_sve_prfd_gather_uxtw_index : SVE_gather_prf_SV;
1608
1609 // Scalar + 32-bit scaled offset vector, sign extend, packed and
1610 // unpacked.
1611 def int_aarch64_sve_prfb_gather_sxtw_index : SVE_gather_prf_SV;
1612 def int_aarch64_sve_prfw_gather_sxtw_index : SVE_gather_prf_SV;
1613 def int_aarch64_sve_prfh_gather_sxtw_index : SVE_gather_prf_SV;
1614 def int_aarch64_sve_prfd_gather_sxtw_index : SVE_gather_prf_SV;
1615
1616 // Scalar + 64-bit scaled offset vector.
1617 def int_aarch64_sve_prfb_gather_index : SVE_gather_prf_SV;
1618 def int_aarch64_sve_prfh_gather_index : SVE_gather_prf_SV;
1619 def int_aarch64_sve_prfw_gather_index : SVE_gather_prf_SV;
1620 def int_aarch64_sve_prfd_gather_index : SVE_gather_prf_SV;
1621
1622 // Vector + scalar.
1623 def int_aarch64_sve_prfb_gather_scalar_offset : SVE_gather_prf_VS;
1624 def int_aarch64_sve_prfh_gather_scalar_offset : SVE_gather_prf_VS;
1625 def int_aarch64_sve_prfw_gather_scalar_offset : SVE_gather_prf_VS;
1626 def int_aarch64_sve_prfd_gather_scalar_offset : SVE_gather_prf_VS;
1627
1628 //
1629 // Scalar to vector operations
1630 //
1631
1632 def int_aarch64_sve_dup : AdvSIMD_SVE_DUP_Intrinsic;
1633 def int_aarch64_sve_dup_x : AdvSIMD_SVE_DUP_Unpred_Intrinsic;
1634
1635 def int_aarch64_sve_index : AdvSIMD_SVE_Index_Intrinsic;
1636
1637 //
1638 // Address calculation
1639 //
1640
1641 def int_aarch64_sve_adrb : AdvSIMD_2VectorArg_Intrinsic;
1642 def int_aarch64_sve_adrh : AdvSIMD_2VectorArg_Intrinsic;
1643 def int_aarch64_sve_adrw : AdvSIMD_2VectorArg_Intrinsic;
1644 def int_aarch64_sve_adrd : AdvSIMD_2VectorArg_Intrinsic;
1645
1646 //
1647 // Integer arithmetic
1648 //
1649
1650 def int_aarch64_sve_add   : AdvSIMD_Pred2VectorArg_Intrinsic;
1651 def int_aarch64_sve_sub   : AdvSIMD_Pred2VectorArg_Intrinsic;
1652 def int_aarch64_sve_subr  : AdvSIMD_Pred2VectorArg_Intrinsic;
1653
1654 def int_aarch64_sve_pmul       : AdvSIMD_2VectorArg_Intrinsic;
1655
1656 def int_aarch64_sve_mul        : AdvSIMD_Pred2VectorArg_Intrinsic;
1657 def int_aarch64_sve_mul_lane   : AdvSIMD_2VectorArgIndexed_Intrinsic;
1658 def int_aarch64_sve_smulh      : AdvSIMD_Pred2VectorArg_Intrinsic;
1659 def int_aarch64_sve_umulh      : AdvSIMD_Pred2VectorArg_Intrinsic;
1660
1661 def int_aarch64_sve_sdiv       : AdvSIMD_Pred2VectorArg_Intrinsic;
1662 def int_aarch64_sve_udiv       : AdvSIMD_Pred2VectorArg_Intrinsic;
1663 def int_aarch64_sve_sdivr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1664 def int_aarch64_sve_udivr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1665
1666 def int_aarch64_sve_smax       : AdvSIMD_Pred2VectorArg_Intrinsic;
1667 def int_aarch64_sve_umax       : AdvSIMD_Pred2VectorArg_Intrinsic;
1668 def int_aarch64_sve_smin       : AdvSIMD_Pred2VectorArg_Intrinsic;
1669 def int_aarch64_sve_umin       : AdvSIMD_Pred2VectorArg_Intrinsic;
1670 def int_aarch64_sve_sabd       : AdvSIMD_Pred2VectorArg_Intrinsic;
1671 def int_aarch64_sve_uabd       : AdvSIMD_Pred2VectorArg_Intrinsic;
1672
1673 def int_aarch64_sve_mad        : AdvSIMD_Pred3VectorArg_Intrinsic;
1674 def int_aarch64_sve_msb        : AdvSIMD_Pred3VectorArg_Intrinsic;
1675 def int_aarch64_sve_mla        : AdvSIMD_Pred3VectorArg_Intrinsic;
1676 def int_aarch64_sve_mla_lane   : AdvSIMD_3VectorArgIndexed_Intrinsic;
1677 def int_aarch64_sve_mls        : AdvSIMD_Pred3VectorArg_Intrinsic;
1678 def int_aarch64_sve_mls_lane   : AdvSIMD_3VectorArgIndexed_Intrinsic;
1679
1680 def int_aarch64_sve_saddv      : AdvSIMD_SVE_SADDV_Reduce_Intrinsic;
1681 def int_aarch64_sve_uaddv      : AdvSIMD_SVE_SADDV_Reduce_Intrinsic;
1682
1683 def int_aarch64_sve_smaxv      : AdvSIMD_SVE_Reduce_Intrinsic;
1684 def int_aarch64_sve_umaxv      : AdvSIMD_SVE_Reduce_Intrinsic;
1685 def int_aarch64_sve_sminv      : AdvSIMD_SVE_Reduce_Intrinsic;
1686 def int_aarch64_sve_uminv      : AdvSIMD_SVE_Reduce_Intrinsic;
1687
1688 def int_aarch64_sve_orv        : AdvSIMD_SVE_Reduce_Intrinsic;
1689 def int_aarch64_sve_eorv       : AdvSIMD_SVE_Reduce_Intrinsic;
1690 def int_aarch64_sve_andv       : AdvSIMD_SVE_Reduce_Intrinsic;
1691
1692 def int_aarch64_sve_abs : AdvSIMD_Merged1VectorArg_Intrinsic;
1693 def int_aarch64_sve_neg : AdvSIMD_Merged1VectorArg_Intrinsic;
1694
1695 def int_aarch64_sve_sdot      : AdvSIMD_SVE_DOT_Intrinsic;
1696 def int_aarch64_sve_sdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic;
1697
1698 def int_aarch64_sve_udot      : AdvSIMD_SVE_DOT_Intrinsic;
1699 def int_aarch64_sve_udot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic;
1700
1701 def int_aarch64_sve_sqadd_x   : AdvSIMD_2VectorArg_Intrinsic;
1702 def int_aarch64_sve_sqsub_x   : AdvSIMD_2VectorArg_Intrinsic;
1703 def int_aarch64_sve_uqadd_x   : AdvSIMD_2VectorArg_Intrinsic;
1704 def int_aarch64_sve_uqsub_x   : AdvSIMD_2VectorArg_Intrinsic;
1705
1706 // Shifts
1707
1708 def int_aarch64_sve_asr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1709 def int_aarch64_sve_asr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic;
1710 def int_aarch64_sve_asrd     : AdvSIMD_SVE_ShiftByImm_Intrinsic;
1711 def int_aarch64_sve_insr     : AdvSIMD_SVE_INSR_Intrinsic;
1712 def int_aarch64_sve_lsl      : AdvSIMD_Pred2VectorArg_Intrinsic;
1713 def int_aarch64_sve_lsl_wide : AdvSIMD_SVE_ShiftWide_Intrinsic;
1714 def int_aarch64_sve_lsr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1715 def int_aarch64_sve_lsr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic;
1716
1717 //
1718 // Integer comparisons
1719 //
1720
1721 def int_aarch64_sve_cmpeq : AdvSIMD_SVE_Compare_Intrinsic;
1722 def int_aarch64_sve_cmpge : AdvSIMD_SVE_Compare_Intrinsic;
1723 def int_aarch64_sve_cmpgt : AdvSIMD_SVE_Compare_Intrinsic;
1724 def int_aarch64_sve_cmphi : AdvSIMD_SVE_Compare_Intrinsic;
1725 def int_aarch64_sve_cmphs : AdvSIMD_SVE_Compare_Intrinsic;
1726 def int_aarch64_sve_cmpne : AdvSIMD_SVE_Compare_Intrinsic;
1727
1728 def int_aarch64_sve_cmpeq_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1729 def int_aarch64_sve_cmpge_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1730 def int_aarch64_sve_cmpgt_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1731 def int_aarch64_sve_cmphi_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1732 def int_aarch64_sve_cmphs_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1733 def int_aarch64_sve_cmple_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1734 def int_aarch64_sve_cmplo_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1735 def int_aarch64_sve_cmpls_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1736 def int_aarch64_sve_cmplt_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1737 def int_aarch64_sve_cmpne_wide : AdvSIMD_SVE_CompareWide_Intrinsic;
1738
1739 //
1740 // Counting bits
1741 //
1742
1743 def int_aarch64_sve_cls : AdvSIMD_Merged1VectorArg_Intrinsic;
1744 def int_aarch64_sve_clz : AdvSIMD_Merged1VectorArg_Intrinsic;
1745 def int_aarch64_sve_cnt : AdvSIMD_SVE_CNT_Intrinsic;
1746
1747 //
1748 // Counting elements
1749 //
1750
1751 def int_aarch64_sve_cntb : AdvSIMD_SVE_CNTB_Intrinsic;
1752 def int_aarch64_sve_cnth : AdvSIMD_SVE_CNTB_Intrinsic;
1753 def int_aarch64_sve_cntw : AdvSIMD_SVE_CNTB_Intrinsic;
1754 def int_aarch64_sve_cntd : AdvSIMD_SVE_CNTB_Intrinsic;
1755
1756 def int_aarch64_sve_cntp : AdvSIMD_SVE_CNTP_Intrinsic;
1757
1758 //
1759 // FFR manipulation
1760 //
1761
1762 def int_aarch64_sve_rdffr   : GCCBuiltin<"__builtin_sve_svrdffr">,   DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [], [IntrReadMem, IntrInaccessibleMemOnly]>;
1763 def int_aarch64_sve_rdffr_z : GCCBuiltin<"__builtin_sve_svrdffr_z">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [llvm_nxv16i1_ty], [IntrReadMem, IntrInaccessibleMemOnly]>;
1764 def int_aarch64_sve_setffr  : GCCBuiltin<"__builtin_sve_svsetffr">,  DefaultAttrsIntrinsic<[], [], [IntrWriteMem, IntrInaccessibleMemOnly]>;
1765 def int_aarch64_sve_wrffr   : GCCBuiltin<"__builtin_sve_svwrffr">,   DefaultAttrsIntrinsic<[], [llvm_nxv16i1_ty], [IntrWriteMem, IntrInaccessibleMemOnly]>;
1766
1767 //
1768 // Saturating scalar arithmetic
1769 //
1770
1771 def int_aarch64_sve_sqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1772 def int_aarch64_sve_sqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1773 def int_aarch64_sve_sqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1774 def int_aarch64_sve_sqdecp : AdvSIMD_SVE_Saturating_Intrinsic;
1775
1776 def int_aarch64_sve_sqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1777 def int_aarch64_sve_sqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1778 def int_aarch64_sve_sqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1779 def int_aarch64_sve_sqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1780 def int_aarch64_sve_sqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1781 def int_aarch64_sve_sqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1782 def int_aarch64_sve_sqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1783 def int_aarch64_sve_sqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1784 def int_aarch64_sve_sqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>;
1785 def int_aarch64_sve_sqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>;
1786
1787 def int_aarch64_sve_sqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1788 def int_aarch64_sve_sqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1789 def int_aarch64_sve_sqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1790 def int_aarch64_sve_sqincp : AdvSIMD_SVE_Saturating_Intrinsic;
1791
1792 def int_aarch64_sve_sqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1793 def int_aarch64_sve_sqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1794 def int_aarch64_sve_sqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1795 def int_aarch64_sve_sqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1796 def int_aarch64_sve_sqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1797 def int_aarch64_sve_sqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1798 def int_aarch64_sve_sqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1799 def int_aarch64_sve_sqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1800 def int_aarch64_sve_sqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>;
1801 def int_aarch64_sve_sqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>;
1802
1803 def int_aarch64_sve_uqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1804 def int_aarch64_sve_uqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1805 def int_aarch64_sve_uqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1806 def int_aarch64_sve_uqdecp : AdvSIMD_SVE_Saturating_Intrinsic;
1807
1808 def int_aarch64_sve_uqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1809 def int_aarch64_sve_uqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1810 def int_aarch64_sve_uqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1811 def int_aarch64_sve_uqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1812 def int_aarch64_sve_uqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1813 def int_aarch64_sve_uqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1814 def int_aarch64_sve_uqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1815 def int_aarch64_sve_uqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1816 def int_aarch64_sve_uqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>;
1817 def int_aarch64_sve_uqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>;
1818
1819 def int_aarch64_sve_uqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1820 def int_aarch64_sve_uqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1821 def int_aarch64_sve_uqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic;
1822 def int_aarch64_sve_uqincp : AdvSIMD_SVE_Saturating_Intrinsic;
1823
1824 def int_aarch64_sve_uqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1825 def int_aarch64_sve_uqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1826 def int_aarch64_sve_uqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1827 def int_aarch64_sve_uqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1828 def int_aarch64_sve_uqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1829 def int_aarch64_sve_uqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1830 def int_aarch64_sve_uqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>;
1831 def int_aarch64_sve_uqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>;
1832 def int_aarch64_sve_uqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>;
1833 def int_aarch64_sve_uqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>;
1834
1835 //
1836 // Reversal
1837 //
1838
1839 def int_aarch64_sve_rbit : AdvSIMD_Merged1VectorArg_Intrinsic;
1840 def int_aarch64_sve_revb : AdvSIMD_Merged1VectorArg_Intrinsic;
1841 def int_aarch64_sve_revh : AdvSIMD_Merged1VectorArg_Intrinsic;
1842 def int_aarch64_sve_revw : AdvSIMD_Merged1VectorArg_Intrinsic;
1843
1844 //
1845 // Permutations and selection
1846 //
1847
1848 def int_aarch64_sve_clasta    : AdvSIMD_Pred2VectorArg_Intrinsic;
1849 def int_aarch64_sve_clasta_n  : AdvSIMD_SVE_ReduceWithInit_Intrinsic;
1850 def int_aarch64_sve_clastb    : AdvSIMD_Pred2VectorArg_Intrinsic;
1851 def int_aarch64_sve_clastb_n  : AdvSIMD_SVE_ReduceWithInit_Intrinsic;
1852 def int_aarch64_sve_compact   : AdvSIMD_Pred1VectorArg_Intrinsic;
1853 def int_aarch64_sve_dupq_lane : AdvSIMD_SVE_DUPQ_Intrinsic;
1854 def int_aarch64_sve_ext       : AdvSIMD_2VectorArgIndexed_Intrinsic;
1855 def int_aarch64_sve_sel       : AdvSIMD_Pred2VectorArg_Intrinsic;
1856 def int_aarch64_sve_lasta     : AdvSIMD_SVE_Reduce_Intrinsic;
1857 def int_aarch64_sve_lastb     : AdvSIMD_SVE_Reduce_Intrinsic;
1858 def int_aarch64_sve_rev       : AdvSIMD_1VectorArg_Intrinsic;
1859 def int_aarch64_sve_splice    : AdvSIMD_Pred2VectorArg_Intrinsic;
1860 def int_aarch64_sve_sunpkhi   : AdvSIMD_SVE_Unpack_Intrinsic;
1861 def int_aarch64_sve_sunpklo   : AdvSIMD_SVE_Unpack_Intrinsic;
1862 def int_aarch64_sve_tbl       : AdvSIMD_SVE_TBL_Intrinsic;
1863 def int_aarch64_sve_trn1      : AdvSIMD_2VectorArg_Intrinsic;
1864 def int_aarch64_sve_trn2      : AdvSIMD_2VectorArg_Intrinsic;
1865 def int_aarch64_sve_trn1q     : AdvSIMD_2VectorArg_Intrinsic;
1866 def int_aarch64_sve_trn2q     : AdvSIMD_2VectorArg_Intrinsic;
1867 def int_aarch64_sve_uunpkhi   : AdvSIMD_SVE_Unpack_Intrinsic;
1868 def int_aarch64_sve_uunpklo   : AdvSIMD_SVE_Unpack_Intrinsic;
1869 def int_aarch64_sve_uzp1      : AdvSIMD_2VectorArg_Intrinsic;
1870 def int_aarch64_sve_uzp2      : AdvSIMD_2VectorArg_Intrinsic;
1871 def int_aarch64_sve_uzp1q     : AdvSIMD_2VectorArg_Intrinsic;
1872 def int_aarch64_sve_uzp2q     : AdvSIMD_2VectorArg_Intrinsic;
1873 def int_aarch64_sve_zip1      : AdvSIMD_2VectorArg_Intrinsic;
1874 def int_aarch64_sve_zip2      : AdvSIMD_2VectorArg_Intrinsic;
1875 def int_aarch64_sve_zip1q     : AdvSIMD_2VectorArg_Intrinsic;
1876 def int_aarch64_sve_zip2q     : AdvSIMD_2VectorArg_Intrinsic;
1877
1878 //
1879 // Logical operations
1880 //
1881
1882 def int_aarch64_sve_and  : AdvSIMD_Pred2VectorArg_Intrinsic;
1883 def int_aarch64_sve_bic  : AdvSIMD_Pred2VectorArg_Intrinsic;
1884 def int_aarch64_sve_cnot : AdvSIMD_Merged1VectorArg_Intrinsic;
1885 def int_aarch64_sve_eor  : AdvSIMD_Pred2VectorArg_Intrinsic;
1886 def int_aarch64_sve_not  : AdvSIMD_Merged1VectorArg_Intrinsic;
1887 def int_aarch64_sve_orr  : AdvSIMD_Pred2VectorArg_Intrinsic;
1888
1889 //
1890 // Conversion
1891 //
1892
1893 def int_aarch64_sve_sxtb : AdvSIMD_Merged1VectorArg_Intrinsic;
1894 def int_aarch64_sve_sxth : AdvSIMD_Merged1VectorArg_Intrinsic;
1895 def int_aarch64_sve_sxtw : AdvSIMD_Merged1VectorArg_Intrinsic;
1896 def int_aarch64_sve_uxtb : AdvSIMD_Merged1VectorArg_Intrinsic;
1897 def int_aarch64_sve_uxth : AdvSIMD_Merged1VectorArg_Intrinsic;
1898 def int_aarch64_sve_uxtw : AdvSIMD_Merged1VectorArg_Intrinsic;
1899
1900 //
1901 // While comparisons
1902 //
1903
1904 def int_aarch64_sve_whilele : AdvSIMD_SVE_WHILE_Intrinsic;
1905 def int_aarch64_sve_whilelo : AdvSIMD_SVE_WHILE_Intrinsic;
1906 def int_aarch64_sve_whilels : AdvSIMD_SVE_WHILE_Intrinsic;
1907 def int_aarch64_sve_whilelt : AdvSIMD_SVE_WHILE_Intrinsic;
1908 def int_aarch64_sve_whilege : AdvSIMD_SVE_WHILE_Intrinsic;
1909 def int_aarch64_sve_whilegt : AdvSIMD_SVE_WHILE_Intrinsic;
1910 def int_aarch64_sve_whilehs : AdvSIMD_SVE_WHILE_Intrinsic;
1911 def int_aarch64_sve_whilehi : AdvSIMD_SVE_WHILE_Intrinsic;
1912
1913 //
1914 // Floating-point arithmetic
1915 //
1916
1917 def int_aarch64_sve_fabd       : AdvSIMD_Pred2VectorArg_Intrinsic;
1918 def int_aarch64_sve_fabs       : AdvSIMD_Merged1VectorArg_Intrinsic;
1919 def int_aarch64_sve_fadd       : AdvSIMD_Pred2VectorArg_Intrinsic;
1920 def int_aarch64_sve_fcadd      : AdvSIMD_SVE_CADD_Intrinsic;
1921 def int_aarch64_sve_fcmla      : AdvSIMD_SVE_CMLA_Intrinsic;
1922 def int_aarch64_sve_fcmla_lane : AdvSIMD_SVE_CMLA_LANE_Intrinsic;
1923 def int_aarch64_sve_fdiv       : AdvSIMD_Pred2VectorArg_Intrinsic;
1924 def int_aarch64_sve_fdivr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1925 def int_aarch64_sve_fexpa_x    : AdvSIMD_SVE_EXPA_Intrinsic;
1926 def int_aarch64_sve_fmad       : AdvSIMD_Pred3VectorArg_Intrinsic;
1927 def int_aarch64_sve_fmax       : AdvSIMD_Pred2VectorArg_Intrinsic;
1928 def int_aarch64_sve_fmaxnm     : AdvSIMD_Pred2VectorArg_Intrinsic;
1929 def int_aarch64_sve_fmin       : AdvSIMD_Pred2VectorArg_Intrinsic;
1930 def int_aarch64_sve_fminnm     : AdvSIMD_Pred2VectorArg_Intrinsic;
1931 def int_aarch64_sve_fmla       : AdvSIMD_Pred3VectorArg_Intrinsic;
1932 def int_aarch64_sve_fmla_lane  : AdvSIMD_3VectorArgIndexed_Intrinsic;
1933 def int_aarch64_sve_fmls       : AdvSIMD_Pred3VectorArg_Intrinsic;
1934 def int_aarch64_sve_fmls_lane  : AdvSIMD_3VectorArgIndexed_Intrinsic;
1935 def int_aarch64_sve_fmsb       : AdvSIMD_Pred3VectorArg_Intrinsic;
1936 def int_aarch64_sve_fmul       : AdvSIMD_Pred2VectorArg_Intrinsic;
1937 def int_aarch64_sve_fmulx      : AdvSIMD_Pred2VectorArg_Intrinsic;
1938 def int_aarch64_sve_fneg       : AdvSIMD_Merged1VectorArg_Intrinsic;
1939 def int_aarch64_sve_fmul_lane  : AdvSIMD_2VectorArgIndexed_Intrinsic;
1940 def int_aarch64_sve_fnmad      : AdvSIMD_Pred3VectorArg_Intrinsic;
1941 def int_aarch64_sve_fnmla      : AdvSIMD_Pred3VectorArg_Intrinsic;
1942 def int_aarch64_sve_fnmls      : AdvSIMD_Pred3VectorArg_Intrinsic;
1943 def int_aarch64_sve_fnmsb      : AdvSIMD_Pred3VectorArg_Intrinsic;
1944 def int_aarch64_sve_frecpe_x   : AdvSIMD_1VectorArg_Intrinsic;
1945 def int_aarch64_sve_frecps_x   : AdvSIMD_2VectorArg_Intrinsic;
1946 def int_aarch64_sve_frecpx     : AdvSIMD_Merged1VectorArg_Intrinsic;
1947 def int_aarch64_sve_frinta     : AdvSIMD_Merged1VectorArg_Intrinsic;
1948 def int_aarch64_sve_frinti     : AdvSIMD_Merged1VectorArg_Intrinsic;
1949 def int_aarch64_sve_frintm     : AdvSIMD_Merged1VectorArg_Intrinsic;
1950 def int_aarch64_sve_frintn     : AdvSIMD_Merged1VectorArg_Intrinsic;
1951 def int_aarch64_sve_frintp     : AdvSIMD_Merged1VectorArg_Intrinsic;
1952 def int_aarch64_sve_frintx     : AdvSIMD_Merged1VectorArg_Intrinsic;
1953 def int_aarch64_sve_frintz     : AdvSIMD_Merged1VectorArg_Intrinsic;
1954 def int_aarch64_sve_frsqrte_x  : AdvSIMD_1VectorArg_Intrinsic;
1955 def int_aarch64_sve_frsqrts_x  : AdvSIMD_2VectorArg_Intrinsic;
1956 def int_aarch64_sve_fscale     : AdvSIMD_SVE_SCALE_Intrinsic;
1957 def int_aarch64_sve_fsqrt      : AdvSIMD_Merged1VectorArg_Intrinsic;
1958 def int_aarch64_sve_fsub       : AdvSIMD_Pred2VectorArg_Intrinsic;
1959 def int_aarch64_sve_fsubr      : AdvSIMD_Pred2VectorArg_Intrinsic;
1960 def int_aarch64_sve_ftmad_x    : AdvSIMD_2VectorArgIndexed_Intrinsic;
1961 def int_aarch64_sve_ftsmul_x   : AdvSIMD_SVE_TSMUL_Intrinsic;
1962 def int_aarch64_sve_ftssel_x   : AdvSIMD_SVE_TSMUL_Intrinsic;
1963
1964 //
1965 // Floating-point reductions
1966 //
1967
1968 def int_aarch64_sve_fadda   : AdvSIMD_SVE_ReduceWithInit_Intrinsic;
1969 def int_aarch64_sve_faddv   : AdvSIMD_SVE_Reduce_Intrinsic;
1970 def int_aarch64_sve_fmaxv   : AdvSIMD_SVE_Reduce_Intrinsic;
1971 def int_aarch64_sve_fmaxnmv : AdvSIMD_SVE_Reduce_Intrinsic;
1972 def int_aarch64_sve_fminv   : AdvSIMD_SVE_Reduce_Intrinsic;
1973 def int_aarch64_sve_fminnmv : AdvSIMD_SVE_Reduce_Intrinsic;
1974
1975 //
1976 // Floating-point conversions
1977 //
1978
1979 def int_aarch64_sve_fcvt   : AdvSIMD_SVE_FCVT_Intrinsic;
1980 def int_aarch64_sve_fcvtzs : AdvSIMD_SVE_FCVTZS_Intrinsic;
1981 def int_aarch64_sve_fcvtzu : AdvSIMD_SVE_FCVTZS_Intrinsic;
1982 def int_aarch64_sve_scvtf  : AdvSIMD_SVE_SCVTF_Intrinsic;
1983 def int_aarch64_sve_ucvtf  : AdvSIMD_SVE_SCVTF_Intrinsic;
1984
1985 //
1986 // Floating-point comparisons
1987 //
1988
1989 def int_aarch64_sve_facge : AdvSIMD_SVE_Compare_Intrinsic;
1990 def int_aarch64_sve_facgt : AdvSIMD_SVE_Compare_Intrinsic;
1991
1992 def int_aarch64_sve_fcmpeq : AdvSIMD_SVE_Compare_Intrinsic;
1993 def int_aarch64_sve_fcmpge : AdvSIMD_SVE_Compare_Intrinsic;
1994 def int_aarch64_sve_fcmpgt : AdvSIMD_SVE_Compare_Intrinsic;
1995 def int_aarch64_sve_fcmpne : AdvSIMD_SVE_Compare_Intrinsic;
1996 def int_aarch64_sve_fcmpuo : AdvSIMD_SVE_Compare_Intrinsic;
1997
1998 def int_aarch64_sve_fcvtzs_i32f16   : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>;
1999 def int_aarch64_sve_fcvtzs_i32f64   : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2000 def int_aarch64_sve_fcvtzs_i64f16   : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>;
2001 def int_aarch64_sve_fcvtzs_i64f32   : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>;
2002
2003 def int_aarch64_sve_fcvt_bf16f32    : Builtin_SVCVT<llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>;
2004 def int_aarch64_sve_fcvtnt_bf16f32  : Builtin_SVCVT<llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>;
2005
2006 def int_aarch64_sve_fcvtzu_i32f16   : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>;
2007 def int_aarch64_sve_fcvtzu_i32f64   : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2008 def int_aarch64_sve_fcvtzu_i64f16   : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>;
2009 def int_aarch64_sve_fcvtzu_i64f32   : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>;
2010
2011 def int_aarch64_sve_fcvt_f16f32     : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>;
2012 def int_aarch64_sve_fcvt_f16f64     : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2013 def int_aarch64_sve_fcvt_f32f64     : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2014
2015 def int_aarch64_sve_fcvt_f32f16     : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>;
2016 def int_aarch64_sve_fcvt_f64f16     : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>;
2017 def int_aarch64_sve_fcvt_f64f32     : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>;
2018
2019 def int_aarch64_sve_fcvtlt_f32f16   : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>;
2020 def int_aarch64_sve_fcvtlt_f64f32   : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>;
2021 def int_aarch64_sve_fcvtnt_f16f32   : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>;
2022 def int_aarch64_sve_fcvtnt_f32f64   : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2023
2024 def int_aarch64_sve_fcvtx_f32f64    : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2025 def int_aarch64_sve_fcvtxnt_f32f64  : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>;
2026
2027 def int_aarch64_sve_scvtf_f16i32    : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>;
2028 def int_aarch64_sve_scvtf_f16i64    : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>;
2029 def int_aarch64_sve_scvtf_f32i64    : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>;
2030 def int_aarch64_sve_scvtf_f64i32    : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>;
2031
2032 def int_aarch64_sve_ucvtf_f16i32    : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>;
2033 def int_aarch64_sve_ucvtf_f16i64    : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>;
2034 def int_aarch64_sve_ucvtf_f32i64    : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>;
2035 def int_aarch64_sve_ucvtf_f64i32    : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>;
2036
2037 //
2038 // Predicate creation
2039 //
2040
2041 def int_aarch64_sve_ptrue : AdvSIMD_SVE_PTRUE_Intrinsic;
2042
2043 //
2044 // Predicate operations
2045 //
2046
2047 def int_aarch64_sve_and_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2048 def int_aarch64_sve_bic_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2049 def int_aarch64_sve_brka    : AdvSIMD_Merged1VectorArg_Intrinsic;
2050 def int_aarch64_sve_brka_z  : AdvSIMD_Pred1VectorArg_Intrinsic;
2051 def int_aarch64_sve_brkb    : AdvSIMD_Merged1VectorArg_Intrinsic;
2052 def int_aarch64_sve_brkb_z  : AdvSIMD_Pred1VectorArg_Intrinsic;
2053 def int_aarch64_sve_brkn_z  : AdvSIMD_Pred2VectorArg_Intrinsic;
2054 def int_aarch64_sve_brkpa_z : AdvSIMD_Pred2VectorArg_Intrinsic;
2055 def int_aarch64_sve_brkpb_z : AdvSIMD_Pred2VectorArg_Intrinsic;
2056 def int_aarch64_sve_eor_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2057 def int_aarch64_sve_nand_z  : AdvSIMD_Pred2VectorArg_Intrinsic;
2058 def int_aarch64_sve_nor_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2059 def int_aarch64_sve_orn_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2060 def int_aarch64_sve_orr_z   : AdvSIMD_Pred2VectorArg_Intrinsic;
2061 def int_aarch64_sve_pfirst  : AdvSIMD_Pred1VectorArg_Intrinsic;
2062 def int_aarch64_sve_pnext   : AdvSIMD_Pred1VectorArg_Intrinsic;
2063 def int_aarch64_sve_punpkhi : AdvSIMD_SVE_PUNPKHI_Intrinsic;
2064 def int_aarch64_sve_punpklo : AdvSIMD_SVE_PUNPKHI_Intrinsic;
2065
2066 //
2067 // Testing predicates
2068 //
2069
2070 def int_aarch64_sve_ptest_any   : AdvSIMD_SVE_PTEST_Intrinsic;
2071 def int_aarch64_sve_ptest_first : AdvSIMD_SVE_PTEST_Intrinsic;
2072 def int_aarch64_sve_ptest_last  : AdvSIMD_SVE_PTEST_Intrinsic;
2073
2074 //
2075 // Reinterpreting data
2076 //
2077
2078 def int_aarch64_sve_convert_from_svbool : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
2079                                                     [llvm_nxv16i1_ty],
2080                                                     [IntrNoMem]>;
2081
2082 def int_aarch64_sve_convert_to_svbool : DefaultAttrsIntrinsic<[llvm_nxv16i1_ty],
2083                                                   [llvm_anyvector_ty],
2084                                                   [IntrNoMem]>;
2085
2086 //
2087 // Gather loads: scalar base + vector offsets
2088 //
2089
2090 // 64 bit unscaled offsets
2091 def int_aarch64_sve_ld1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
2092
2093 // 64 bit scaled offsets
2094 def int_aarch64_sve_ld1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
2095
2096 // 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits
2097 def int_aarch64_sve_ld1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
2098 def int_aarch64_sve_ld1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
2099
2100 // 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits
2101 def int_aarch64_sve_ld1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
2102 def int_aarch64_sve_ld1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
2103
2104 //
2105 // Gather loads: vector base + scalar offset
2106 //
2107
2108 def int_aarch64_sve_ld1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic;
2109
2110
2111 //
2112 // First-faulting gather loads: scalar base + vector offsets
2113 //
2114
2115 // 64 bit unscaled offsets
2116 def int_aarch64_sve_ldff1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic;
2117
2118 // 64 bit scaled offsets
2119 def int_aarch64_sve_ldff1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic;
2120
2121 // 32 bit unscaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
2122 def int_aarch64_sve_ldff1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
2123 def int_aarch64_sve_ldff1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
2124
2125 // 32 bit scaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
2126 def int_aarch64_sve_ldff1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
2127 def int_aarch64_sve_ldff1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
2128
2129 //
2130 // First-faulting gather loads: vector base + scalar offset
2131 //
2132
2133 def int_aarch64_sve_ldff1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic;
2134
2135
2136 //
2137 // Non-temporal gather loads: scalar base + vector offsets
2138 //
2139
2140 // 64 bit unscaled offsets
2141 def int_aarch64_sve_ldnt1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
2142
2143 // 64 bit indices
2144 def int_aarch64_sve_ldnt1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
2145
2146 // 32 bit unscaled offsets, zero (zxtw) extended to 64 bits
2147 def int_aarch64_sve_ldnt1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
2148
2149 //
2150 // Non-temporal gather loads: vector base + scalar offset
2151 //
2152
2153 def int_aarch64_sve_ldnt1_gather_scalar_offset  : AdvSIMD_GatherLoad_VS_Intrinsic;
2154
2155 //
2156 // Scatter stores: scalar base + vector offsets
2157 //
2158
2159 // 64 bit unscaled offsets
2160 def int_aarch64_sve_st1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic;
2161
2162 // 64 bit scaled offsets
2163 def int_aarch64_sve_st1_scatter_index
2164     : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic;
2165
2166 // 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits
2167 def int_aarch64_sve_st1_scatter_sxtw
2168     : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic;
2169
2170 def int_aarch64_sve_st1_scatter_uxtw
2171     : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic;
2172
2173 // 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits
2174 def int_aarch64_sve_st1_scatter_sxtw_index
2175     : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic;
2176
2177 def int_aarch64_sve_st1_scatter_uxtw_index
2178     : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic;
2179
2180 //
2181 // Scatter stores: vector base + scalar offset
2182 //
2183
2184 def int_aarch64_sve_st1_scatter_scalar_offset : AdvSIMD_ScatterStore_VS_Intrinsic;
2185
2186 //
2187 // Non-temporal scatter stores: scalar base + vector offsets
2188 //
2189
2190 // 64 bit unscaled offsets
2191 def int_aarch64_sve_stnt1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic;
2192
2193 // 64 bit indices
2194 def int_aarch64_sve_stnt1_scatter_index
2195     : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic;
2196
2197 // 32 bit unscaled offsets, zero (zxtw) extended to 64 bits
2198 def int_aarch64_sve_stnt1_scatter_uxtw : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic;
2199
2200 //
2201 // Non-temporal scatter stores: vector base + scalar offset
2202 //
2203
2204 def int_aarch64_sve_stnt1_scatter_scalar_offset  : AdvSIMD_ScatterStore_VS_Intrinsic;
2205
2206 //
2207 // SVE2 - Uniform DSP operations
2208 //
2209
2210 def int_aarch64_sve_saba          : AdvSIMD_3VectorArg_Intrinsic;
2211 def int_aarch64_sve_shadd         : AdvSIMD_Pred2VectorArg_Intrinsic;
2212 def int_aarch64_sve_shsub         : AdvSIMD_Pred2VectorArg_Intrinsic;
2213 def int_aarch64_sve_shsubr        : AdvSIMD_Pred2VectorArg_Intrinsic;
2214 def int_aarch64_sve_sli           : AdvSIMD_2VectorArgIndexed_Intrinsic;
2215 def int_aarch64_sve_sqabs         : AdvSIMD_Merged1VectorArg_Intrinsic;
2216 def int_aarch64_sve_sqadd         : AdvSIMD_Pred2VectorArg_Intrinsic;
2217 def int_aarch64_sve_sqdmulh       : AdvSIMD_2VectorArg_Intrinsic;
2218 def int_aarch64_sve_sqdmulh_lane  : AdvSIMD_2VectorArgIndexed_Intrinsic;
2219 def int_aarch64_sve_sqneg         : AdvSIMD_Merged1VectorArg_Intrinsic;
2220 def int_aarch64_sve_sqrdmlah      : AdvSIMD_3VectorArg_Intrinsic;
2221 def int_aarch64_sve_sqrdmlah_lane : AdvSIMD_3VectorArgIndexed_Intrinsic;
2222 def int_aarch64_sve_sqrdmlsh      : AdvSIMD_3VectorArg_Intrinsic;
2223 def int_aarch64_sve_sqrdmlsh_lane : AdvSIMD_3VectorArgIndexed_Intrinsic;
2224 def int_aarch64_sve_sqrdmulh      : AdvSIMD_2VectorArg_Intrinsic;
2225 def int_aarch64_sve_sqrdmulh_lane : AdvSIMD_2VectorArgIndexed_Intrinsic;
2226 def int_aarch64_sve_sqrshl        : AdvSIMD_Pred2VectorArg_Intrinsic;
2227 def int_aarch64_sve_sqshl         : AdvSIMD_Pred2VectorArg_Intrinsic;
2228 def int_aarch64_sve_sqshlu        : AdvSIMD_SVE_ShiftByImm_Intrinsic;
2229 def int_aarch64_sve_sqsub         : AdvSIMD_Pred2VectorArg_Intrinsic;
2230 def int_aarch64_sve_sqsubr        : AdvSIMD_Pred2VectorArg_Intrinsic;
2231 def int_aarch64_sve_srhadd        : AdvSIMD_Pred2VectorArg_Intrinsic;
2232 def int_aarch64_sve_sri           : AdvSIMD_2VectorArgIndexed_Intrinsic;
2233 def int_aarch64_sve_srshl         : AdvSIMD_Pred2VectorArg_Intrinsic;
2234 def int_aarch64_sve_srshr         : AdvSIMD_SVE_ShiftByImm_Intrinsic;
2235 def int_aarch64_sve_srsra         : AdvSIMD_2VectorArgIndexed_Intrinsic;
2236 def int_aarch64_sve_ssra          : AdvSIMD_2VectorArgIndexed_Intrinsic;
2237 def int_aarch64_sve_suqadd        : AdvSIMD_Pred2VectorArg_Intrinsic;
2238 def int_aarch64_sve_uaba          : AdvSIMD_3VectorArg_Intrinsic;
2239 def int_aarch64_sve_uhadd         : AdvSIMD_Pred2VectorArg_Intrinsic;
2240 def int_aarch64_sve_uhsub         : AdvSIMD_Pred2VectorArg_Intrinsic;
2241 def int_aarch64_sve_uhsubr        : AdvSIMD_Pred2VectorArg_Intrinsic;
2242 def int_aarch64_sve_uqadd         : AdvSIMD_Pred2VectorArg_Intrinsic;
2243 def int_aarch64_sve_uqrshl        : AdvSIMD_Pred2VectorArg_Intrinsic;
2244 def int_aarch64_sve_uqshl         : AdvSIMD_Pred2VectorArg_Intrinsic;
2245 def int_aarch64_sve_uqsub         : AdvSIMD_Pred2VectorArg_Intrinsic;
2246 def int_aarch64_sve_uqsubr        : AdvSIMD_Pred2VectorArg_Intrinsic;
2247 def int_aarch64_sve_urecpe        : AdvSIMD_Merged1VectorArg_Intrinsic;
2248 def int_aarch64_sve_urhadd        : AdvSIMD_Pred2VectorArg_Intrinsic;
2249 def int_aarch64_sve_urshl         : AdvSIMD_Pred2VectorArg_Intrinsic;
2250 def int_aarch64_sve_urshr         : AdvSIMD_SVE_ShiftByImm_Intrinsic;
2251 def int_aarch64_sve_ursqrte       : AdvSIMD_Merged1VectorArg_Intrinsic;
2252 def int_aarch64_sve_ursra         : AdvSIMD_2VectorArgIndexed_Intrinsic;
2253 def int_aarch64_sve_usqadd        : AdvSIMD_Pred2VectorArg_Intrinsic;
2254 def int_aarch64_sve_usra          : AdvSIMD_2VectorArgIndexed_Intrinsic;
2255
2256 //
2257 // SVE2 - Widening DSP operations
2258 //
2259
2260 def int_aarch64_sve_sabalb : SVE2_3VectorArg_Long_Intrinsic;
2261 def int_aarch64_sve_sabalt : SVE2_3VectorArg_Long_Intrinsic;
2262 def int_aarch64_sve_sabdlb : SVE2_2VectorArg_Long_Intrinsic;
2263 def int_aarch64_sve_sabdlt : SVE2_2VectorArg_Long_Intrinsic;
2264 def int_aarch64_sve_saddlb : SVE2_2VectorArg_Long_Intrinsic;
2265 def int_aarch64_sve_saddlt : SVE2_2VectorArg_Long_Intrinsic;
2266 def int_aarch64_sve_saddwb : SVE2_2VectorArg_Wide_Intrinsic;
2267 def int_aarch64_sve_saddwt : SVE2_2VectorArg_Wide_Intrinsic;
2268 def int_aarch64_sve_sshllb : SVE2_1VectorArg_Long_Intrinsic;
2269 def int_aarch64_sve_sshllt : SVE2_1VectorArg_Long_Intrinsic;
2270 def int_aarch64_sve_ssublb : SVE2_2VectorArg_Long_Intrinsic;
2271 def int_aarch64_sve_ssublt : SVE2_2VectorArg_Long_Intrinsic;
2272 def int_aarch64_sve_ssubwb : SVE2_2VectorArg_Wide_Intrinsic;
2273 def int_aarch64_sve_ssubwt : SVE2_2VectorArg_Wide_Intrinsic;
2274 def int_aarch64_sve_uabalb : SVE2_3VectorArg_Long_Intrinsic;
2275 def int_aarch64_sve_uabalt : SVE2_3VectorArg_Long_Intrinsic;
2276 def int_aarch64_sve_uabdlb : SVE2_2VectorArg_Long_Intrinsic;
2277 def int_aarch64_sve_uabdlt : SVE2_2VectorArg_Long_Intrinsic;
2278 def int_aarch64_sve_uaddlb : SVE2_2VectorArg_Long_Intrinsic;
2279 def int_aarch64_sve_uaddlt : SVE2_2VectorArg_Long_Intrinsic;
2280 def int_aarch64_sve_uaddwb : SVE2_2VectorArg_Wide_Intrinsic;
2281 def int_aarch64_sve_uaddwt : SVE2_2VectorArg_Wide_Intrinsic;
2282 def int_aarch64_sve_ushllb : SVE2_1VectorArg_Long_Intrinsic;
2283 def int_aarch64_sve_ushllt : SVE2_1VectorArg_Long_Intrinsic;
2284 def int_aarch64_sve_usublb : SVE2_2VectorArg_Long_Intrinsic;
2285 def int_aarch64_sve_usublt : SVE2_2VectorArg_Long_Intrinsic;
2286 def int_aarch64_sve_usubwb : SVE2_2VectorArg_Wide_Intrinsic;
2287 def int_aarch64_sve_usubwt : SVE2_2VectorArg_Wide_Intrinsic;
2288
2289 //
2290 // SVE2 - Non-widening pairwise arithmetic
2291 //
2292
2293 def int_aarch64_sve_addp    : AdvSIMD_Pred2VectorArg_Intrinsic;
2294 def int_aarch64_sve_faddp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2295 def int_aarch64_sve_fmaxp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2296 def int_aarch64_sve_fmaxnmp : AdvSIMD_Pred2VectorArg_Intrinsic;
2297 def int_aarch64_sve_fminp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2298 def int_aarch64_sve_fminnmp : AdvSIMD_Pred2VectorArg_Intrinsic;
2299 def int_aarch64_sve_smaxp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2300 def int_aarch64_sve_sminp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2301 def int_aarch64_sve_umaxp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2302 def int_aarch64_sve_uminp   : AdvSIMD_Pred2VectorArg_Intrinsic;
2303
2304 //
2305 // SVE2 - Widening pairwise arithmetic
2306 //
2307
2308 def int_aarch64_sve_sadalp : SVE2_2VectorArg_Pred_Long_Intrinsic;
2309 def int_aarch64_sve_uadalp : SVE2_2VectorArg_Pred_Long_Intrinsic;
2310
2311 //
2312 // SVE2 - Uniform complex integer arithmetic
2313 //
2314
2315 def int_aarch64_sve_cadd_x           : AdvSIMD_SVE2_CADD_Intrinsic;
2316 def int_aarch64_sve_sqcadd_x         : AdvSIMD_SVE2_CADD_Intrinsic;
2317 def int_aarch64_sve_cmla_x           : AdvSIMD_SVE2_CMLA_Intrinsic;
2318 def int_aarch64_sve_cmla_lane_x      : AdvSIMD_SVE_CMLA_LANE_Intrinsic;
2319 def int_aarch64_sve_sqrdcmlah_x      : AdvSIMD_SVE2_CMLA_Intrinsic;
2320 def int_aarch64_sve_sqrdcmlah_lane_x : AdvSIMD_SVE_CMLA_LANE_Intrinsic;
2321
2322 //
2323 // SVE2 - Widening complex integer arithmetic
2324 //
2325
2326 def int_aarch64_sve_saddlbt   : SVE2_2VectorArg_Long_Intrinsic;
2327 def int_aarch64_sve_ssublbt   : SVE2_2VectorArg_Long_Intrinsic;
2328 def int_aarch64_sve_ssubltb   : SVE2_2VectorArg_Long_Intrinsic;
2329
2330 //
2331 // SVE2 - Widening complex integer dot product
2332 //
2333
2334 def int_aarch64_sve_cdot      : AdvSIMD_SVE_DOT_Indexed_Intrinsic;
2335 def int_aarch64_sve_cdot_lane : AdvSIMD_SVE_CDOT_LANE_Intrinsic;
2336
2337 //
2338 // SVE2 - Floating-point widening multiply-accumulate
2339 //
2340
2341 def int_aarch64_sve_fmlalb        : SVE2_3VectorArg_Long_Intrinsic;
2342 def int_aarch64_sve_fmlalb_lane   : SVE2_3VectorArgIndexed_Long_Intrinsic;
2343 def int_aarch64_sve_fmlalt        : SVE2_3VectorArg_Long_Intrinsic;
2344 def int_aarch64_sve_fmlalt_lane   : SVE2_3VectorArgIndexed_Long_Intrinsic;
2345 def int_aarch64_sve_fmlslb        : SVE2_3VectorArg_Long_Intrinsic;
2346 def int_aarch64_sve_fmlslb_lane   : SVE2_3VectorArgIndexed_Long_Intrinsic;
2347 def int_aarch64_sve_fmlslt        : SVE2_3VectorArg_Long_Intrinsic;
2348 def int_aarch64_sve_fmlslt_lane   : SVE2_3VectorArgIndexed_Long_Intrinsic;
2349
2350 //
2351 // SVE2 - Floating-point integer binary logarithm
2352 //
2353
2354 def int_aarch64_sve_flogb : AdvSIMD_SVE_LOGB_Intrinsic;
2355
2356 //
2357 // SVE2 - Vector histogram count
2358 //
2359
2360 def int_aarch64_sve_histcnt : AdvSIMD_Pred2VectorArg_Intrinsic;
2361 def int_aarch64_sve_histseg : AdvSIMD_2VectorArg_Intrinsic;
2362
2363 //
2364 // SVE2 - Character match
2365 //
2366
2367 def int_aarch64_sve_match   : AdvSIMD_SVE_Compare_Intrinsic;
2368 def int_aarch64_sve_nmatch  : AdvSIMD_SVE_Compare_Intrinsic;
2369
2370 //
2371 // SVE2 - Unary narrowing operations
2372 //
2373
2374 def int_aarch64_sve_sqxtnb  : SVE2_1VectorArg_Narrowing_Intrinsic;
2375 def int_aarch64_sve_sqxtnt  : SVE2_Merged1VectorArg_Narrowing_Intrinsic;
2376 def int_aarch64_sve_sqxtunb : SVE2_1VectorArg_Narrowing_Intrinsic;
2377 def int_aarch64_sve_sqxtunt : SVE2_Merged1VectorArg_Narrowing_Intrinsic;
2378 def int_aarch64_sve_uqxtnb  : SVE2_1VectorArg_Narrowing_Intrinsic;
2379 def int_aarch64_sve_uqxtnt  : SVE2_Merged1VectorArg_Narrowing_Intrinsic;
2380
2381 //
2382 // SVE2 - Binary narrowing DSP operations
2383 //
2384 def int_aarch64_sve_addhnb    : SVE2_2VectorArg_Narrowing_Intrinsic;
2385 def int_aarch64_sve_addhnt    : SVE2_Merged2VectorArg_Narrowing_Intrinsic;
2386
2387 def int_aarch64_sve_raddhnb   : SVE2_2VectorArg_Narrowing_Intrinsic;
2388 def int_aarch64_sve_raddhnt   : SVE2_Merged2VectorArg_Narrowing_Intrinsic;
2389
2390 def int_aarch64_sve_subhnb    : SVE2_2VectorArg_Narrowing_Intrinsic;
2391 def int_aarch64_sve_subhnt    : SVE2_Merged2VectorArg_Narrowing_Intrinsic;
2392
2393 def int_aarch64_sve_rsubhnb   : SVE2_2VectorArg_Narrowing_Intrinsic;
2394 def int_aarch64_sve_rsubhnt   : SVE2_Merged2VectorArg_Narrowing_Intrinsic;
2395
2396 // Narrowing shift right
2397 def int_aarch64_sve_shrnb     : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2398 def int_aarch64_sve_shrnt     : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2399
2400 def int_aarch64_sve_rshrnb    : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2401 def int_aarch64_sve_rshrnt    : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2402
2403 // Saturating shift right - signed input/output
2404 def int_aarch64_sve_sqshrnb   : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2405 def int_aarch64_sve_sqshrnt   : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2406
2407 def int_aarch64_sve_sqrshrnb  : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2408 def int_aarch64_sve_sqrshrnt  : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2409
2410 // Saturating shift right - unsigned input/output
2411 def int_aarch64_sve_uqshrnb   : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2412 def int_aarch64_sve_uqshrnt   : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2413
2414 def int_aarch64_sve_uqrshrnb  : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2415 def int_aarch64_sve_uqrshrnt  : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2416
2417 // Saturating shift right - signed input, unsigned output
2418 def int_aarch64_sve_sqshrunb  : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2419 def int_aarch64_sve_sqshrunt  : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2420
2421 def int_aarch64_sve_sqrshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic;
2422 def int_aarch64_sve_sqrshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic;
2423
2424 // SVE2 MLA LANE.
2425 def int_aarch64_sve_smlalb_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2426 def int_aarch64_sve_smlalt_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2427 def int_aarch64_sve_umlalb_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2428 def int_aarch64_sve_umlalt_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2429 def int_aarch64_sve_smlslb_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2430 def int_aarch64_sve_smlslt_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2431 def int_aarch64_sve_umlslb_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2432 def int_aarch64_sve_umlslt_lane   : SVE2_3VectorArg_Indexed_Intrinsic;
2433 def int_aarch64_sve_smullb_lane   : SVE2_2VectorArgIndexed_Long_Intrinsic;
2434 def int_aarch64_sve_smullt_lane   : SVE2_2VectorArgIndexed_Long_Intrinsic;
2435 def int_aarch64_sve_umullb_lane   : SVE2_2VectorArgIndexed_Long_Intrinsic;
2436 def int_aarch64_sve_umullt_lane   : SVE2_2VectorArgIndexed_Long_Intrinsic;
2437 def int_aarch64_sve_sqdmlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic;
2438 def int_aarch64_sve_sqdmlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic;
2439 def int_aarch64_sve_sqdmlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic;
2440 def int_aarch64_sve_sqdmlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic;
2441 def int_aarch64_sve_sqdmullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic;
2442 def int_aarch64_sve_sqdmullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic;
2443
2444 // SVE2 MLA Unpredicated.
2445 def int_aarch64_sve_smlalb      : SVE2_3VectorArg_Long_Intrinsic;
2446 def int_aarch64_sve_smlalt      : SVE2_3VectorArg_Long_Intrinsic;
2447 def int_aarch64_sve_umlalb      : SVE2_3VectorArg_Long_Intrinsic;
2448 def int_aarch64_sve_umlalt      : SVE2_3VectorArg_Long_Intrinsic;
2449 def int_aarch64_sve_smlslb      : SVE2_3VectorArg_Long_Intrinsic;
2450 def int_aarch64_sve_smlslt      : SVE2_3VectorArg_Long_Intrinsic;
2451 def int_aarch64_sve_umlslb      : SVE2_3VectorArg_Long_Intrinsic;
2452 def int_aarch64_sve_umlslt      : SVE2_3VectorArg_Long_Intrinsic;
2453 def int_aarch64_sve_smullb      : SVE2_2VectorArg_Long_Intrinsic;
2454 def int_aarch64_sve_smullt      : SVE2_2VectorArg_Long_Intrinsic;
2455 def int_aarch64_sve_umullb      : SVE2_2VectorArg_Long_Intrinsic;
2456 def int_aarch64_sve_umullt      : SVE2_2VectorArg_Long_Intrinsic;
2457
2458 def int_aarch64_sve_sqdmlalb    : SVE2_3VectorArg_Long_Intrinsic;
2459 def int_aarch64_sve_sqdmlalt    : SVE2_3VectorArg_Long_Intrinsic;
2460 def int_aarch64_sve_sqdmlslb    : SVE2_3VectorArg_Long_Intrinsic;
2461 def int_aarch64_sve_sqdmlslt    : SVE2_3VectorArg_Long_Intrinsic;
2462 def int_aarch64_sve_sqdmullb    : SVE2_2VectorArg_Long_Intrinsic;
2463 def int_aarch64_sve_sqdmullt    : SVE2_2VectorArg_Long_Intrinsic;
2464 def int_aarch64_sve_sqdmlalbt   : SVE2_3VectorArg_Long_Intrinsic;
2465 def int_aarch64_sve_sqdmlslbt   : SVE2_3VectorArg_Long_Intrinsic;
2466
2467 // SVE2 ADDSUB Long Unpredicated.
2468 def int_aarch64_sve_adclb       : AdvSIMD_3VectorArg_Intrinsic;
2469 def int_aarch64_sve_adclt       : AdvSIMD_3VectorArg_Intrinsic;
2470 def int_aarch64_sve_sbclb       : AdvSIMD_3VectorArg_Intrinsic;
2471 def int_aarch64_sve_sbclt       : AdvSIMD_3VectorArg_Intrinsic;
2472
2473 //
2474 // SVE2 - Polynomial arithmetic
2475 //
2476 def int_aarch64_sve_eorbt       : AdvSIMD_3VectorArg_Intrinsic;
2477 def int_aarch64_sve_eortb       : AdvSIMD_3VectorArg_Intrinsic;
2478 def int_aarch64_sve_pmullb_pair : AdvSIMD_2VectorArg_Intrinsic;
2479 def int_aarch64_sve_pmullt_pair : AdvSIMD_2VectorArg_Intrinsic;
2480
2481 //
2482 // SVE2 bitwise ternary operations.
2483 //
2484 def int_aarch64_sve_eor3   : AdvSIMD_3VectorArg_Intrinsic;
2485 def int_aarch64_sve_bcax   : AdvSIMD_3VectorArg_Intrinsic;
2486 def int_aarch64_sve_bsl    : AdvSIMD_3VectorArg_Intrinsic;
2487 def int_aarch64_sve_bsl1n  : AdvSIMD_3VectorArg_Intrinsic;
2488 def int_aarch64_sve_bsl2n  : AdvSIMD_3VectorArg_Intrinsic;
2489 def int_aarch64_sve_nbsl   : AdvSIMD_3VectorArg_Intrinsic;
2490 def int_aarch64_sve_xar    : AdvSIMD_2VectorArgIndexed_Intrinsic;
2491
2492 //
2493 // SVE2 - Optional AES, SHA-3 and SM4
2494 //
2495
2496 def int_aarch64_sve_aesd    : GCCBuiltin<"__builtin_sve_svaesd_u8">,
2497                               DefaultAttrsIntrinsic<[llvm_nxv16i8_ty],
2498                                         [llvm_nxv16i8_ty, llvm_nxv16i8_ty],
2499                                         [IntrNoMem]>;
2500 def int_aarch64_sve_aesimc  : GCCBuiltin<"__builtin_sve_svaesimc_u8">,
2501                               DefaultAttrsIntrinsic<[llvm_nxv16i8_ty],
2502                                         [llvm_nxv16i8_ty],
2503                                         [IntrNoMem]>;
2504 def int_aarch64_sve_aese    : GCCBuiltin<"__builtin_sve_svaese_u8">,
2505                               DefaultAttrsIntrinsic<[llvm_nxv16i8_ty],
2506                                         [llvm_nxv16i8_ty, llvm_nxv16i8_ty],
2507                                         [IntrNoMem]>;
2508 def int_aarch64_sve_aesmc   : GCCBuiltin<"__builtin_sve_svaesmc_u8">,
2509                               DefaultAttrsIntrinsic<[llvm_nxv16i8_ty],
2510                                         [llvm_nxv16i8_ty],
2511                                         [IntrNoMem]>;
2512 def int_aarch64_sve_rax1    : GCCBuiltin<"__builtin_sve_svrax1_u64">,
2513                               DefaultAttrsIntrinsic<[llvm_nxv2i64_ty],
2514                                         [llvm_nxv2i64_ty, llvm_nxv2i64_ty],
2515                                         [IntrNoMem]>;
2516 def int_aarch64_sve_sm4e    : GCCBuiltin<"__builtin_sve_svsm4e_u32">,
2517                               DefaultAttrsIntrinsic<[llvm_nxv4i32_ty],
2518                                         [llvm_nxv4i32_ty, llvm_nxv4i32_ty],
2519                                         [IntrNoMem]>;
2520 def int_aarch64_sve_sm4ekey : GCCBuiltin<"__builtin_sve_svsm4ekey_u32">,
2521                               DefaultAttrsIntrinsic<[llvm_nxv4i32_ty],
2522                                         [llvm_nxv4i32_ty, llvm_nxv4i32_ty],
2523                                         [IntrNoMem]>;
2524 //
2525 // SVE2 - Extended table lookup/permute
2526 //
2527
2528 def int_aarch64_sve_tbl2 : AdvSIMD_SVE2_TBX_Intrinsic;
2529 def int_aarch64_sve_tbx  : AdvSIMD_SVE2_TBX_Intrinsic;
2530
2531 //
2532 // SVE2 - Optional bit permutation
2533 //
2534
2535 def int_aarch64_sve_bdep_x : AdvSIMD_2VectorArg_Intrinsic;
2536 def int_aarch64_sve_bext_x : AdvSIMD_2VectorArg_Intrinsic;
2537 def int_aarch64_sve_bgrp_x : AdvSIMD_2VectorArg_Intrinsic;
2538
2539
2540 //
2541 // SVE ACLE: 7.3. INT8 matrix multiply extensions
2542 //
2543 def int_aarch64_sve_ummla : SVE_MatMul_Intrinsic;
2544 def int_aarch64_sve_smmla : SVE_MatMul_Intrinsic;
2545 def int_aarch64_sve_usmmla : SVE_MatMul_Intrinsic;
2546
2547 def int_aarch64_sve_usdot : AdvSIMD_SVE_DOT_Intrinsic;
2548 def int_aarch64_sve_usdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic;
2549 def int_aarch64_sve_sudot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic;
2550
2551 //
2552 // SVE ACLE: 7.4/5. FP64/FP32 matrix multiply extensions
2553 //
2554 def int_aarch64_sve_fmmla : AdvSIMD_3VectorArg_Intrinsic;
2555
2556 //
2557 // SVE ACLE: 7.2. BFloat16 extensions
2558 //
2559
2560 def int_aarch64_sve_bfdot   : SVE_4Vec_BF16;
2561 def int_aarch64_sve_bfmlalb : SVE_4Vec_BF16;
2562 def int_aarch64_sve_bfmlalt : SVE_4Vec_BF16;
2563
2564 def int_aarch64_sve_bfmmla  : SVE_4Vec_BF16;
2565
2566 def int_aarch64_sve_bfdot_lane   : SVE_4Vec_BF16_Indexed;
2567 def int_aarch64_sve_bfmlalb_lane : SVE_4Vec_BF16_Indexed;
2568 def int_aarch64_sve_bfmlalt_lane : SVE_4Vec_BF16_Indexed;
2569 }
2570
2571 //
2572 // SVE2 - Contiguous conflict detection
2573 //
2574
2575 def int_aarch64_sve_whilerw_b : SVE2_CONFLICT_DETECT_Intrinsic;
2576 def int_aarch64_sve_whilerw_h : SVE2_CONFLICT_DETECT_Intrinsic;
2577 def int_aarch64_sve_whilerw_s : SVE2_CONFLICT_DETECT_Intrinsic;
2578 def int_aarch64_sve_whilerw_d : SVE2_CONFLICT_DETECT_Intrinsic;
2579 def int_aarch64_sve_whilewr_b : SVE2_CONFLICT_DETECT_Intrinsic;
2580 def int_aarch64_sve_whilewr_h : SVE2_CONFLICT_DETECT_Intrinsic;
2581 def int_aarch64_sve_whilewr_s : SVE2_CONFLICT_DETECT_Intrinsic;
2582 def int_aarch64_sve_whilewr_d : SVE2_CONFLICT_DETECT_Intrinsic;