1 /*===---- avx512dqintrin.h - AVX512DQ intrinsics ---------------------------===
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 *===-----------------------------------------------------------------------===
25 #error "Never use <avx512dqintrin.h> directly; include <immintrin.h> instead."
28 #ifndef __AVX512DQINTRIN_H
29 #define __AVX512DQINTRIN_H
31 static __inline__ __m512i __attribute__ ((__always_inline__, __nodebug__))
32 _mm512_mullo_epi64 (__m512i __A, __m512i __B) {
33 return (__m512i) ((__v8di) __A * (__v8di) __B);
36 static __inline__ __m512i __attribute__ ((__always_inline__, __nodebug__))
37 _mm512_mask_mullo_epi64 (__m512i __W, __mmask8 __U, __m512i __A, __m512i __B) {
38 return (__m512i) __builtin_ia32_pmullq512_mask ((__v8di) __A,
44 static __inline__ __m512i __attribute__ ((__always_inline__, __nodebug__))
45 _mm512_maskz_mullo_epi64 (__mmask8 __U, __m512i __A, __m512i __B) {
46 return (__m512i) __builtin_ia32_pmullq512_mask ((__v8di) __A,
49 _mm512_setzero_si512 (),
53 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
54 _mm512_xor_pd (__m512d __A, __m512d __B) {
55 return (__m512d) ((__v8di) __A ^ (__v8di) __B);
58 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
59 _mm512_mask_xor_pd (__m512d __W, __mmask8 __U, __m512d __A, __m512d __B) {
60 return (__m512d) __builtin_ia32_xorpd512_mask ((__v8df) __A,
66 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
67 _mm512_maskz_xor_pd (__mmask8 __U, __m512d __A, __m512d __B) {
68 return (__m512d) __builtin_ia32_xorpd512_mask ((__v8df) __A,
75 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
76 _mm512_xor_ps (__m512 __A, __m512 __B) {
77 return (__m512) ((__v16si) __A ^ (__v16si) __B);
80 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
81 _mm512_mask_xor_ps (__m512 __W, __mmask16 __U, __m512 __A, __m512 __B) {
82 return (__m512) __builtin_ia32_xorps512_mask ((__v16sf) __A,
88 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
89 _mm512_maskz_xor_ps (__mmask16 __U, __m512 __A, __m512 __B) {
90 return (__m512) __builtin_ia32_xorps512_mask ((__v16sf) __A,
97 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
98 _mm512_or_pd (__m512d __A, __m512d __B) {
99 return (__m512d) ((__v8di) __A | (__v8di) __B);
102 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
103 _mm512_mask_or_pd (__m512d __W, __mmask8 __U, __m512d __A, __m512d __B) {
104 return (__m512d) __builtin_ia32_orpd512_mask ((__v8df) __A,
110 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
111 _mm512_maskz_or_pd (__mmask8 __U, __m512d __A, __m512d __B) {
112 return (__m512d) __builtin_ia32_orpd512_mask ((__v8df) __A,
115 _mm512_setzero_pd (),
119 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
120 _mm512_or_ps (__m512 __A, __m512 __B) {
121 return (__m512) ((__v16si) __A | (__v16si) __B);
124 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
125 _mm512_mask_or_ps (__m512 __W, __mmask16 __U, __m512 __A, __m512 __B) {
126 return (__m512) __builtin_ia32_orps512_mask ((__v16sf) __A,
132 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
133 _mm512_maskz_or_ps (__mmask16 __U, __m512 __A, __m512 __B) {
134 return (__m512) __builtin_ia32_orps512_mask ((__v16sf) __A,
137 _mm512_setzero_ps (),
141 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
142 _mm512_and_pd (__m512d __A, __m512d __B) {
143 return (__m512d) ((__v8di) __A & (__v8di) __B);
146 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
147 _mm512_mask_and_pd (__m512d __W, __mmask8 __U, __m512d __A, __m512d __B) {
148 return (__m512d) __builtin_ia32_andpd512_mask ((__v8df) __A,
154 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
155 _mm512_maskz_and_pd (__mmask8 __U, __m512d __A, __m512d __B) {
156 return (__m512d) __builtin_ia32_andpd512_mask ((__v8df) __A,
159 _mm512_setzero_pd (),
163 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
164 _mm512_and_ps (__m512 __A, __m512 __B) {
165 return (__m512) ((__v16si) __A & (__v16si) __B);
168 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
169 _mm512_mask_and_ps (__m512 __W, __mmask16 __U, __m512 __A, __m512 __B) {
170 return (__m512) __builtin_ia32_andps512_mask ((__v16sf) __A,
176 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
177 _mm512_maskz_and_ps (__mmask16 __U, __m512 __A, __m512 __B) {
178 return (__m512) __builtin_ia32_andps512_mask ((__v16sf) __A,
181 _mm512_setzero_ps (),
185 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
186 _mm512_andnot_pd (__m512d __A, __m512d __B) {
187 return (__m512d) __builtin_ia32_andnpd512_mask ((__v8df) __A,
190 _mm512_setzero_pd (),
194 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
195 _mm512_mask_andnot_pd (__m512d __W, __mmask8 __U, __m512d __A, __m512d __B) {
196 return (__m512d) __builtin_ia32_andnpd512_mask ((__v8df) __A,
202 static __inline__ __m512d __attribute__ ((__always_inline__, __nodebug__))
203 _mm512_maskz_andnot_pd (__mmask8 __U, __m512d __A, __m512d __B) {
204 return (__m512d) __builtin_ia32_andnpd512_mask ((__v8df) __A,
207 _mm512_setzero_pd (),
211 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
212 _mm512_andnot_ps (__m512 __A, __m512 __B) {
213 return (__m512) __builtin_ia32_andnps512_mask ((__v16sf) __A,
216 _mm512_setzero_ps (),
220 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
221 _mm512_mask_andnot_ps (__m512 __W, __mmask16 __U, __m512 __A, __m512 __B) {
222 return (__m512) __builtin_ia32_andnps512_mask ((__v16sf) __A,
228 static __inline__ __m512 __attribute__ ((__always_inline__, __nodebug__))
229 _mm512_maskz_andnot_ps (__mmask16 __U, __m512 __A, __m512 __B) {
230 return (__m512) __builtin_ia32_andnps512_mask ((__v16sf) __A,
233 _mm512_setzero_ps (),