1 /*===------------- avx512vbmi2intrin.h - VBMI2 intrinsics ------------------===
4 * Permission is hereby granted, free of charge, to any person obtaining a copy
5 * of this software and associated documentation files (the "Software"), to deal
6 * in the Software without restriction, including without limitation the rights
7 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 * copies of the Software, and to permit persons to whom the Software is
9 * furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 *===-----------------------------------------------------------------------===
25 #error "Never use <avx512vbmi2intrin.h> directly; include <immintrin.h> instead."
28 #ifndef __AVX512VBMI2INTRIN_H
29 #define __AVX512VBMI2INTRIN_H
31 /* Define the default attributes for the functions in this file. */
32 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__, __target__("avx512vbmi2"), __min_vector_width__(512)))
35 static __inline__ __m512i __DEFAULT_FN_ATTRS
36 _mm512_mask_compress_epi16(__m512i __S, __mmask32 __U, __m512i __D)
38 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi) __D,
43 static __inline__ __m512i __DEFAULT_FN_ATTRS
44 _mm512_maskz_compress_epi16(__mmask32 __U, __m512i __D)
46 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi) __D,
47 (__v32hi) _mm512_setzero_si512(),
51 static __inline__ __m512i __DEFAULT_FN_ATTRS
52 _mm512_mask_compress_epi8(__m512i __S, __mmask64 __U, __m512i __D)
54 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi) __D,
59 static __inline__ __m512i __DEFAULT_FN_ATTRS
60 _mm512_maskz_compress_epi8(__mmask64 __U, __m512i __D)
62 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi) __D,
63 (__v64qi) _mm512_setzero_si512(),
67 static __inline__ void __DEFAULT_FN_ATTRS
68 _mm512_mask_compressstoreu_epi16(void *__P, __mmask32 __U, __m512i __D)
70 __builtin_ia32_compressstorehi512_mask ((__v32hi *) __P, (__v32hi) __D,
74 static __inline__ void __DEFAULT_FN_ATTRS
75 _mm512_mask_compressstoreu_epi8(void *__P, __mmask64 __U, __m512i __D)
77 __builtin_ia32_compressstoreqi512_mask ((__v64qi *) __P, (__v64qi) __D,
81 static __inline__ __m512i __DEFAULT_FN_ATTRS
82 _mm512_mask_expand_epi16(__m512i __S, __mmask32 __U, __m512i __D)
84 return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __D,
89 static __inline__ __m512i __DEFAULT_FN_ATTRS
90 _mm512_maskz_expand_epi16(__mmask32 __U, __m512i __D)
92 return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __D,
93 (__v32hi) _mm512_setzero_si512(),
97 static __inline__ __m512i __DEFAULT_FN_ATTRS
98 _mm512_mask_expand_epi8(__m512i __S, __mmask64 __U, __m512i __D)
100 return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __D,
105 static __inline__ __m512i __DEFAULT_FN_ATTRS
106 _mm512_maskz_expand_epi8(__mmask64 __U, __m512i __D)
108 return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __D,
109 (__v64qi) _mm512_setzero_si512(),
113 static __inline__ __m512i __DEFAULT_FN_ATTRS
114 _mm512_mask_expandloadu_epi16(__m512i __S, __mmask32 __U, void const *__P)
116 return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *)__P,
121 static __inline__ __m512i __DEFAULT_FN_ATTRS
122 _mm512_maskz_expandloadu_epi16(__mmask32 __U, void const *__P)
124 return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *)__P,
125 (__v32hi) _mm512_setzero_si512(),
129 static __inline__ __m512i __DEFAULT_FN_ATTRS
130 _mm512_mask_expandloadu_epi8(__m512i __S, __mmask64 __U, void const *__P)
132 return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *)__P,
137 static __inline__ __m512i __DEFAULT_FN_ATTRS
138 _mm512_maskz_expandloadu_epi8(__mmask64 __U, void const *__P)
140 return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *)__P,
141 (__v64qi) _mm512_setzero_si512(),
145 #define _mm512_shldi_epi64(A, B, I) \
146 (__m512i)__builtin_ia32_vpshldq512((__v8di)(__m512i)(A), \
147 (__v8di)(__m512i)(B), (int)(I))
149 #define _mm512_mask_shldi_epi64(S, U, A, B, I) \
150 (__m512i)__builtin_ia32_selectq_512((__mmask8)(U), \
151 (__v8di)_mm512_shldi_epi64((A), (B), (I)), \
152 (__v8di)(__m512i)(S))
154 #define _mm512_maskz_shldi_epi64(U, A, B, I) \
155 (__m512i)__builtin_ia32_selectq_512((__mmask8)(U), \
156 (__v8di)_mm512_shldi_epi64((A), (B), (I)), \
157 (__v8di)_mm512_setzero_si512())
159 #define _mm512_shldi_epi32(A, B, I) \
160 (__m512i)__builtin_ia32_vpshldd512((__v16si)(__m512i)(A), \
161 (__v16si)(__m512i)(B), (int)(I))
163 #define _mm512_mask_shldi_epi32(S, U, A, B, I) \
164 (__m512i)__builtin_ia32_selectd_512((__mmask16)(U), \
165 (__v16si)_mm512_shldi_epi32((A), (B), (I)), \
166 (__v16si)(__m512i)(S))
168 #define _mm512_maskz_shldi_epi32(U, A, B, I) \
169 (__m512i)__builtin_ia32_selectd_512((__mmask16)(U), \
170 (__v16si)_mm512_shldi_epi32((A), (B), (I)), \
171 (__v16si)_mm512_setzero_si512())
173 #define _mm512_shldi_epi16(A, B, I) \
174 (__m512i)__builtin_ia32_vpshldw512((__v32hi)(__m512i)(A), \
175 (__v32hi)(__m512i)(B), (int)(I))
177 #define _mm512_mask_shldi_epi16(S, U, A, B, I) \
178 (__m512i)__builtin_ia32_selectw_512((__mmask32)(U), \
179 (__v32hi)_mm512_shldi_epi16((A), (B), (I)), \
180 (__v32hi)(__m512i)(S))
182 #define _mm512_maskz_shldi_epi16(U, A, B, I) \
183 (__m512i)__builtin_ia32_selectw_512((__mmask32)(U), \
184 (__v32hi)_mm512_shldi_epi16((A), (B), (I)), \
185 (__v32hi)_mm512_setzero_si512())
187 #define _mm512_shrdi_epi64(A, B, I) \
188 (__m512i)__builtin_ia32_vpshrdq512((__v8di)(__m512i)(A), \
189 (__v8di)(__m512i)(B), (int)(I))
191 #define _mm512_mask_shrdi_epi64(S, U, A, B, I) \
192 (__m512i)__builtin_ia32_selectq_512((__mmask8)(U), \
193 (__v8di)_mm512_shrdi_epi64((A), (B), (I)), \
194 (__v8di)(__m512i)(S))
196 #define _mm512_maskz_shrdi_epi64(U, A, B, I) \
197 (__m512i)__builtin_ia32_selectq_512((__mmask8)(U), \
198 (__v8di)_mm512_shrdi_epi64((A), (B), (I)), \
199 (__v8di)_mm512_setzero_si512())
201 #define _mm512_shrdi_epi32(A, B, I) \
202 (__m512i)__builtin_ia32_vpshrdd512((__v16si)(__m512i)(A), \
203 (__v16si)(__m512i)(B), (int)(I))
205 #define _mm512_mask_shrdi_epi32(S, U, A, B, I) \
206 (__m512i)__builtin_ia32_selectd_512((__mmask16)(U), \
207 (__v16si)_mm512_shrdi_epi32((A), (B), (I)), \
208 (__v16si)(__m512i)(S))
210 #define _mm512_maskz_shrdi_epi32(U, A, B, I) \
211 (__m512i)__builtin_ia32_selectd_512((__mmask16)(U), \
212 (__v16si)_mm512_shrdi_epi32((A), (B), (I)), \
213 (__v16si)_mm512_setzero_si512())
215 #define _mm512_shrdi_epi16(A, B, I) \
216 (__m512i)__builtin_ia32_vpshrdw512((__v32hi)(__m512i)(A), \
217 (__v32hi)(__m512i)(B), (int)(I))
219 #define _mm512_mask_shrdi_epi16(S, U, A, B, I) \
220 (__m512i)__builtin_ia32_selectw_512((__mmask32)(U), \
221 (__v32hi)_mm512_shrdi_epi16((A), (B), (I)), \
222 (__v32hi)(__m512i)(S))
224 #define _mm512_maskz_shrdi_epi16(U, A, B, I) \
225 (__m512i)__builtin_ia32_selectw_512((__mmask32)(U), \
226 (__v32hi)_mm512_shrdi_epi16((A), (B), (I)), \
227 (__v32hi)_mm512_setzero_si512())
229 static __inline__ __m512i __DEFAULT_FN_ATTRS
230 _mm512_shldv_epi64(__m512i __A, __m512i __B, __m512i __C)
232 return (__m512i)__builtin_ia32_vpshldvq512((__v8di)__A, (__v8di)__B,
236 static __inline__ __m512i __DEFAULT_FN_ATTRS
237 _mm512_mask_shldv_epi64(__m512i __A, __mmask8 __U, __m512i __B, __m512i __C)
239 return (__m512i)__builtin_ia32_selectq_512(__U,
240 (__v8di)_mm512_shldv_epi64(__A, __B, __C),
244 static __inline__ __m512i __DEFAULT_FN_ATTRS
245 _mm512_maskz_shldv_epi64(__mmask8 __U, __m512i __A, __m512i __B, __m512i __C)
247 return (__m512i)__builtin_ia32_selectq_512(__U,
248 (__v8di)_mm512_shldv_epi64(__A, __B, __C),
249 (__v8di)_mm512_setzero_si512());
252 static __inline__ __m512i __DEFAULT_FN_ATTRS
253 _mm512_shldv_epi32(__m512i __A, __m512i __B, __m512i __C)
255 return (__m512i)__builtin_ia32_vpshldvd512((__v16si)__A, (__v16si)__B,
259 static __inline__ __m512i __DEFAULT_FN_ATTRS
260 _mm512_mask_shldv_epi32(__m512i __A, __mmask16 __U, __m512i __B, __m512i __C)
262 return (__m512i)__builtin_ia32_selectd_512(__U,
263 (__v16si)_mm512_shldv_epi32(__A, __B, __C),
267 static __inline__ __m512i __DEFAULT_FN_ATTRS
268 _mm512_maskz_shldv_epi32(__mmask16 __U, __m512i __A, __m512i __B, __m512i __C)
270 return (__m512i)__builtin_ia32_selectd_512(__U,
271 (__v16si)_mm512_shldv_epi32(__A, __B, __C),
272 (__v16si)_mm512_setzero_si512());
275 static __inline__ __m512i __DEFAULT_FN_ATTRS
276 _mm512_shldv_epi16(__m512i __A, __m512i __B, __m512i __C)
278 return (__m512i)__builtin_ia32_vpshldvw512((__v32hi)__A, (__v32hi)__B,
282 static __inline__ __m512i __DEFAULT_FN_ATTRS
283 _mm512_mask_shldv_epi16(__m512i __A, __mmask32 __U, __m512i __B, __m512i __C)
285 return (__m512i)__builtin_ia32_selectw_512(__U,
286 (__v32hi)_mm512_shldv_epi16(__A, __B, __C),
290 static __inline__ __m512i __DEFAULT_FN_ATTRS
291 _mm512_maskz_shldv_epi16(__mmask32 __U, __m512i __A, __m512i __B, __m512i __C)
293 return (__m512i)__builtin_ia32_selectw_512(__U,
294 (__v32hi)_mm512_shldv_epi16(__A, __B, __C),
295 (__v32hi)_mm512_setzero_si512());
298 static __inline__ __m512i __DEFAULT_FN_ATTRS
299 _mm512_shrdv_epi64(__m512i __A, __m512i __B, __m512i __C)
301 return (__m512i)__builtin_ia32_vpshrdvq512((__v8di)__A, (__v8di)__B,
305 static __inline__ __m512i __DEFAULT_FN_ATTRS
306 _mm512_mask_shrdv_epi64(__m512i __A, __mmask8 __U, __m512i __B, __m512i __C)
308 return (__m512i)__builtin_ia32_selectq_512(__U,
309 (__v8di)_mm512_shrdv_epi64(__A, __B, __C),
313 static __inline__ __m512i __DEFAULT_FN_ATTRS
314 _mm512_maskz_shrdv_epi64(__mmask8 __U, __m512i __A, __m512i __B, __m512i __C)
316 return (__m512i)__builtin_ia32_selectq_512(__U,
317 (__v8di)_mm512_shrdv_epi64(__A, __B, __C),
318 (__v8di)_mm512_setzero_si512());
321 static __inline__ __m512i __DEFAULT_FN_ATTRS
322 _mm512_shrdv_epi32(__m512i __A, __m512i __B, __m512i __C)
324 return (__m512i)__builtin_ia32_vpshrdvd512((__v16si)__A, (__v16si)__B,
328 static __inline__ __m512i __DEFAULT_FN_ATTRS
329 _mm512_mask_shrdv_epi32(__m512i __A, __mmask16 __U, __m512i __B, __m512i __C)
331 return (__m512i) __builtin_ia32_selectd_512(__U,
332 (__v16si)_mm512_shrdv_epi32(__A, __B, __C),
336 static __inline__ __m512i __DEFAULT_FN_ATTRS
337 _mm512_maskz_shrdv_epi32(__mmask16 __U, __m512i __A, __m512i __B, __m512i __C)
339 return (__m512i) __builtin_ia32_selectd_512(__U,
340 (__v16si)_mm512_shrdv_epi32(__A, __B, __C),
341 (__v16si)_mm512_setzero_si512());
344 static __inline__ __m512i __DEFAULT_FN_ATTRS
345 _mm512_shrdv_epi16(__m512i __A, __m512i __B, __m512i __C)
347 return (__m512i)__builtin_ia32_vpshrdvw512((__v32hi)__A, (__v32hi)__B,
351 static __inline__ __m512i __DEFAULT_FN_ATTRS
352 _mm512_mask_shrdv_epi16(__m512i __A, __mmask32 __U, __m512i __B, __m512i __C)
354 return (__m512i)__builtin_ia32_selectw_512(__U,
355 (__v32hi)_mm512_shrdv_epi16(__A, __B, __C),
359 static __inline__ __m512i __DEFAULT_FN_ATTRS
360 _mm512_maskz_shrdv_epi16(__mmask32 __U, __m512i __A, __m512i __B, __m512i __C)
362 return (__m512i)__builtin_ia32_selectw_512(__U,
363 (__v32hi)_mm512_shrdv_epi16(__A, __B, __C),
364 (__v32hi)_mm512_setzero_si512());
368 #undef __DEFAULT_FN_ATTRS