1 ; RUN: opt < %s -tsan -S | FileCheck %s
2 ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
5 define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
7 %0 = load atomic i8* %a unordered, align 1
10 ; CHECK: atomic8_load_unordered
11 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
13 define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
15 %0 = load atomic i8* %a monotonic, align 1
18 ; CHECK: atomic8_load_monotonic
19 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
21 define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
23 %0 = load atomic i8* %a acquire, align 1
26 ; CHECK: atomic8_load_acquire
27 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2)
29 define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
31 %0 = load atomic i8* %a seq_cst, align 1
34 ; CHECK: atomic8_load_seq_cst
35 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5)
37 define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
39 store atomic i8 0, i8* %a unordered, align 1
42 ; CHECK: atomic8_store_unordered
43 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
45 define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
47 store atomic i8 0, i8* %a monotonic, align 1
50 ; CHECK: atomic8_store_monotonic
51 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
53 define void @atomic8_store_release(i8* %a) nounwind uwtable {
55 store atomic i8 0, i8* %a release, align 1
58 ; CHECK: atomic8_store_release
59 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3)
61 define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
63 store atomic i8 0, i8* %a seq_cst, align 1
66 ; CHECK: atomic8_store_seq_cst
67 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5)
69 define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable {
71 atomicrmw xchg i8* %a, i8 0 monotonic
74 ; CHECK: atomic8_xchg_monotonic
75 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0)
77 define void @atomic8_add_monotonic(i8* %a) nounwind uwtable {
79 atomicrmw add i8* %a, i8 0 monotonic
82 ; CHECK: atomic8_add_monotonic
83 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0)
85 define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable {
87 atomicrmw sub i8* %a, i8 0 monotonic
90 ; CHECK: atomic8_sub_monotonic
91 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0)
93 define void @atomic8_and_monotonic(i8* %a) nounwind uwtable {
95 atomicrmw and i8* %a, i8 0 monotonic
98 ; CHECK: atomic8_and_monotonic
99 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0)
101 define void @atomic8_or_monotonic(i8* %a) nounwind uwtable {
103 atomicrmw or i8* %a, i8 0 monotonic
106 ; CHECK: atomic8_or_monotonic
107 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0)
109 define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable {
111 atomicrmw xor i8* %a, i8 0 monotonic
114 ; CHECK: atomic8_xor_monotonic
115 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0)
117 define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable {
119 atomicrmw xchg i8* %a, i8 0 acquire
122 ; CHECK: atomic8_xchg_acquire
123 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2)
125 define void @atomic8_add_acquire(i8* %a) nounwind uwtable {
127 atomicrmw add i8* %a, i8 0 acquire
130 ; CHECK: atomic8_add_acquire
131 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2)
133 define void @atomic8_sub_acquire(i8* %a) nounwind uwtable {
135 atomicrmw sub i8* %a, i8 0 acquire
138 ; CHECK: atomic8_sub_acquire
139 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2)
141 define void @atomic8_and_acquire(i8* %a) nounwind uwtable {
143 atomicrmw and i8* %a, i8 0 acquire
146 ; CHECK: atomic8_and_acquire
147 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2)
149 define void @atomic8_or_acquire(i8* %a) nounwind uwtable {
151 atomicrmw or i8* %a, i8 0 acquire
154 ; CHECK: atomic8_or_acquire
155 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2)
157 define void @atomic8_xor_acquire(i8* %a) nounwind uwtable {
159 atomicrmw xor i8* %a, i8 0 acquire
162 ; CHECK: atomic8_xor_acquire
163 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2)
165 define void @atomic8_xchg_release(i8* %a) nounwind uwtable {
167 atomicrmw xchg i8* %a, i8 0 release
170 ; CHECK: atomic8_xchg_release
171 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3)
173 define void @atomic8_add_release(i8* %a) nounwind uwtable {
175 atomicrmw add i8* %a, i8 0 release
178 ; CHECK: atomic8_add_release
179 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3)
181 define void @atomic8_sub_release(i8* %a) nounwind uwtable {
183 atomicrmw sub i8* %a, i8 0 release
186 ; CHECK: atomic8_sub_release
187 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3)
189 define void @atomic8_and_release(i8* %a) nounwind uwtable {
191 atomicrmw and i8* %a, i8 0 release
194 ; CHECK: atomic8_and_release
195 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3)
197 define void @atomic8_or_release(i8* %a) nounwind uwtable {
199 atomicrmw or i8* %a, i8 0 release
202 ; CHECK: atomic8_or_release
203 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3)
205 define void @atomic8_xor_release(i8* %a) nounwind uwtable {
207 atomicrmw xor i8* %a, i8 0 release
210 ; CHECK: atomic8_xor_release
211 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3)
213 define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable {
215 atomicrmw xchg i8* %a, i8 0 acq_rel
218 ; CHECK: atomic8_xchg_acq_rel
219 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4)
221 define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable {
223 atomicrmw add i8* %a, i8 0 acq_rel
226 ; CHECK: atomic8_add_acq_rel
227 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4)
229 define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable {
231 atomicrmw sub i8* %a, i8 0 acq_rel
234 ; CHECK: atomic8_sub_acq_rel
235 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4)
237 define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable {
239 atomicrmw and i8* %a, i8 0 acq_rel
242 ; CHECK: atomic8_and_acq_rel
243 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4)
245 define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable {
247 atomicrmw or i8* %a, i8 0 acq_rel
250 ; CHECK: atomic8_or_acq_rel
251 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4)
253 define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable {
255 atomicrmw xor i8* %a, i8 0 acq_rel
258 ; CHECK: atomic8_xor_acq_rel
259 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4)
261 define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable {
263 atomicrmw xchg i8* %a, i8 0 seq_cst
266 ; CHECK: atomic8_xchg_seq_cst
267 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5)
269 define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable {
271 atomicrmw add i8* %a, i8 0 seq_cst
274 ; CHECK: atomic8_add_seq_cst
275 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5)
277 define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable {
279 atomicrmw sub i8* %a, i8 0 seq_cst
282 ; CHECK: atomic8_sub_seq_cst
283 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5)
285 define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable {
287 atomicrmw and i8* %a, i8 0 seq_cst
290 ; CHECK: atomic8_and_seq_cst
291 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5)
293 define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable {
295 atomicrmw or i8* %a, i8 0 seq_cst
298 ; CHECK: atomic8_or_seq_cst
299 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5)
301 define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable {
303 atomicrmw xor i8* %a, i8 0 seq_cst
306 ; CHECK: atomic8_xor_seq_cst
307 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5)
309 define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable {
311 cmpxchg i8* %a, i8 0, i8 1 monotonic
314 ; CHECK: atomic8_cas_monotonic
315 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0)
317 define void @atomic8_cas_acquire(i8* %a) nounwind uwtable {
319 cmpxchg i8* %a, i8 0, i8 1 acquire
322 ; CHECK: atomic8_cas_acquire
323 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2)
325 define void @atomic8_cas_release(i8* %a) nounwind uwtable {
327 cmpxchg i8* %a, i8 0, i8 1 release
330 ; CHECK: atomic8_cas_release
331 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3)
333 define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable {
335 cmpxchg i8* %a, i8 0, i8 1 acq_rel
338 ; CHECK: atomic8_cas_acq_rel
339 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4)
341 define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable {
343 cmpxchg i8* %a, i8 0, i8 1 seq_cst
346 ; CHECK: atomic8_cas_seq_cst
347 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5)
349 define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
351 %0 = load atomic i16* %a unordered, align 2
354 ; CHECK: atomic16_load_unordered
355 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
357 define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
359 %0 = load atomic i16* %a monotonic, align 2
362 ; CHECK: atomic16_load_monotonic
363 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
365 define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
367 %0 = load atomic i16* %a acquire, align 2
370 ; CHECK: atomic16_load_acquire
371 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2)
373 define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
375 %0 = load atomic i16* %a seq_cst, align 2
378 ; CHECK: atomic16_load_seq_cst
379 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5)
381 define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
383 store atomic i16 0, i16* %a unordered, align 2
386 ; CHECK: atomic16_store_unordered
387 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
389 define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
391 store atomic i16 0, i16* %a monotonic, align 2
394 ; CHECK: atomic16_store_monotonic
395 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
397 define void @atomic16_store_release(i16* %a) nounwind uwtable {
399 store atomic i16 0, i16* %a release, align 2
402 ; CHECK: atomic16_store_release
403 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3)
405 define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
407 store atomic i16 0, i16* %a seq_cst, align 2
410 ; CHECK: atomic16_store_seq_cst
411 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5)
413 define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable {
415 atomicrmw xchg i16* %a, i16 0 monotonic
418 ; CHECK: atomic16_xchg_monotonic
419 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0)
421 define void @atomic16_add_monotonic(i16* %a) nounwind uwtable {
423 atomicrmw add i16* %a, i16 0 monotonic
426 ; CHECK: atomic16_add_monotonic
427 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0)
429 define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable {
431 atomicrmw sub i16* %a, i16 0 monotonic
434 ; CHECK: atomic16_sub_monotonic
435 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0)
437 define void @atomic16_and_monotonic(i16* %a) nounwind uwtable {
439 atomicrmw and i16* %a, i16 0 monotonic
442 ; CHECK: atomic16_and_monotonic
443 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0)
445 define void @atomic16_or_monotonic(i16* %a) nounwind uwtable {
447 atomicrmw or i16* %a, i16 0 monotonic
450 ; CHECK: atomic16_or_monotonic
451 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0)
453 define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable {
455 atomicrmw xor i16* %a, i16 0 monotonic
458 ; CHECK: atomic16_xor_monotonic
459 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0)
461 define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable {
463 atomicrmw xchg i16* %a, i16 0 acquire
466 ; CHECK: atomic16_xchg_acquire
467 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2)
469 define void @atomic16_add_acquire(i16* %a) nounwind uwtable {
471 atomicrmw add i16* %a, i16 0 acquire
474 ; CHECK: atomic16_add_acquire
475 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2)
477 define void @atomic16_sub_acquire(i16* %a) nounwind uwtable {
479 atomicrmw sub i16* %a, i16 0 acquire
482 ; CHECK: atomic16_sub_acquire
483 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2)
485 define void @atomic16_and_acquire(i16* %a) nounwind uwtable {
487 atomicrmw and i16* %a, i16 0 acquire
490 ; CHECK: atomic16_and_acquire
491 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2)
493 define void @atomic16_or_acquire(i16* %a) nounwind uwtable {
495 atomicrmw or i16* %a, i16 0 acquire
498 ; CHECK: atomic16_or_acquire
499 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2)
501 define void @atomic16_xor_acquire(i16* %a) nounwind uwtable {
503 atomicrmw xor i16* %a, i16 0 acquire
506 ; CHECK: atomic16_xor_acquire
507 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2)
509 define void @atomic16_xchg_release(i16* %a) nounwind uwtable {
511 atomicrmw xchg i16* %a, i16 0 release
514 ; CHECK: atomic16_xchg_release
515 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3)
517 define void @atomic16_add_release(i16* %a) nounwind uwtable {
519 atomicrmw add i16* %a, i16 0 release
522 ; CHECK: atomic16_add_release
523 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3)
525 define void @atomic16_sub_release(i16* %a) nounwind uwtable {
527 atomicrmw sub i16* %a, i16 0 release
530 ; CHECK: atomic16_sub_release
531 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3)
533 define void @atomic16_and_release(i16* %a) nounwind uwtable {
535 atomicrmw and i16* %a, i16 0 release
538 ; CHECK: atomic16_and_release
539 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3)
541 define void @atomic16_or_release(i16* %a) nounwind uwtable {
543 atomicrmw or i16* %a, i16 0 release
546 ; CHECK: atomic16_or_release
547 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3)
549 define void @atomic16_xor_release(i16* %a) nounwind uwtable {
551 atomicrmw xor i16* %a, i16 0 release
554 ; CHECK: atomic16_xor_release
555 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3)
557 define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable {
559 atomicrmw xchg i16* %a, i16 0 acq_rel
562 ; CHECK: atomic16_xchg_acq_rel
563 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4)
565 define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable {
567 atomicrmw add i16* %a, i16 0 acq_rel
570 ; CHECK: atomic16_add_acq_rel
571 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4)
573 define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable {
575 atomicrmw sub i16* %a, i16 0 acq_rel
578 ; CHECK: atomic16_sub_acq_rel
579 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4)
581 define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable {
583 atomicrmw and i16* %a, i16 0 acq_rel
586 ; CHECK: atomic16_and_acq_rel
587 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4)
589 define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable {
591 atomicrmw or i16* %a, i16 0 acq_rel
594 ; CHECK: atomic16_or_acq_rel
595 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4)
597 define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable {
599 atomicrmw xor i16* %a, i16 0 acq_rel
602 ; CHECK: atomic16_xor_acq_rel
603 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4)
605 define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable {
607 atomicrmw xchg i16* %a, i16 0 seq_cst
610 ; CHECK: atomic16_xchg_seq_cst
611 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5)
613 define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable {
615 atomicrmw add i16* %a, i16 0 seq_cst
618 ; CHECK: atomic16_add_seq_cst
619 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5)
621 define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable {
623 atomicrmw sub i16* %a, i16 0 seq_cst
626 ; CHECK: atomic16_sub_seq_cst
627 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5)
629 define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable {
631 atomicrmw and i16* %a, i16 0 seq_cst
634 ; CHECK: atomic16_and_seq_cst
635 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5)
637 define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable {
639 atomicrmw or i16* %a, i16 0 seq_cst
642 ; CHECK: atomic16_or_seq_cst
643 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5)
645 define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable {
647 atomicrmw xor i16* %a, i16 0 seq_cst
650 ; CHECK: atomic16_xor_seq_cst
651 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5)
653 define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable {
655 cmpxchg i16* %a, i16 0, i16 1 monotonic
658 ; CHECK: atomic16_cas_monotonic
659 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0)
661 define void @atomic16_cas_acquire(i16* %a) nounwind uwtable {
663 cmpxchg i16* %a, i16 0, i16 1 acquire
666 ; CHECK: atomic16_cas_acquire
667 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2)
669 define void @atomic16_cas_release(i16* %a) nounwind uwtable {
671 cmpxchg i16* %a, i16 0, i16 1 release
674 ; CHECK: atomic16_cas_release
675 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3)
677 define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable {
679 cmpxchg i16* %a, i16 0, i16 1 acq_rel
682 ; CHECK: atomic16_cas_acq_rel
683 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4)
685 define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable {
687 cmpxchg i16* %a, i16 0, i16 1 seq_cst
690 ; CHECK: atomic16_cas_seq_cst
691 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5)
693 define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
695 %0 = load atomic i32* %a unordered, align 4
698 ; CHECK: atomic32_load_unordered
699 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
701 define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
703 %0 = load atomic i32* %a monotonic, align 4
706 ; CHECK: atomic32_load_monotonic
707 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
709 define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
711 %0 = load atomic i32* %a acquire, align 4
714 ; CHECK: atomic32_load_acquire
715 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2)
717 define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
719 %0 = load atomic i32* %a seq_cst, align 4
722 ; CHECK: atomic32_load_seq_cst
723 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5)
725 define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
727 store atomic i32 0, i32* %a unordered, align 4
730 ; CHECK: atomic32_store_unordered
731 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
733 define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
735 store atomic i32 0, i32* %a monotonic, align 4
738 ; CHECK: atomic32_store_monotonic
739 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
741 define void @atomic32_store_release(i32* %a) nounwind uwtable {
743 store atomic i32 0, i32* %a release, align 4
746 ; CHECK: atomic32_store_release
747 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3)
749 define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
751 store atomic i32 0, i32* %a seq_cst, align 4
754 ; CHECK: atomic32_store_seq_cst
755 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5)
757 define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable {
759 atomicrmw xchg i32* %a, i32 0 monotonic
762 ; CHECK: atomic32_xchg_monotonic
763 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0)
765 define void @atomic32_add_monotonic(i32* %a) nounwind uwtable {
767 atomicrmw add i32* %a, i32 0 monotonic
770 ; CHECK: atomic32_add_monotonic
771 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0)
773 define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable {
775 atomicrmw sub i32* %a, i32 0 monotonic
778 ; CHECK: atomic32_sub_monotonic
779 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0)
781 define void @atomic32_and_monotonic(i32* %a) nounwind uwtable {
783 atomicrmw and i32* %a, i32 0 monotonic
786 ; CHECK: atomic32_and_monotonic
787 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0)
789 define void @atomic32_or_monotonic(i32* %a) nounwind uwtable {
791 atomicrmw or i32* %a, i32 0 monotonic
794 ; CHECK: atomic32_or_monotonic
795 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0)
797 define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable {
799 atomicrmw xor i32* %a, i32 0 monotonic
802 ; CHECK: atomic32_xor_monotonic
803 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0)
805 define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable {
807 atomicrmw xchg i32* %a, i32 0 acquire
810 ; CHECK: atomic32_xchg_acquire
811 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2)
813 define void @atomic32_add_acquire(i32* %a) nounwind uwtable {
815 atomicrmw add i32* %a, i32 0 acquire
818 ; CHECK: atomic32_add_acquire
819 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2)
821 define void @atomic32_sub_acquire(i32* %a) nounwind uwtable {
823 atomicrmw sub i32* %a, i32 0 acquire
826 ; CHECK: atomic32_sub_acquire
827 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2)
829 define void @atomic32_and_acquire(i32* %a) nounwind uwtable {
831 atomicrmw and i32* %a, i32 0 acquire
834 ; CHECK: atomic32_and_acquire
835 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2)
837 define void @atomic32_or_acquire(i32* %a) nounwind uwtable {
839 atomicrmw or i32* %a, i32 0 acquire
842 ; CHECK: atomic32_or_acquire
843 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2)
845 define void @atomic32_xor_acquire(i32* %a) nounwind uwtable {
847 atomicrmw xor i32* %a, i32 0 acquire
850 ; CHECK: atomic32_xor_acquire
851 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2)
853 define void @atomic32_xchg_release(i32* %a) nounwind uwtable {
855 atomicrmw xchg i32* %a, i32 0 release
858 ; CHECK: atomic32_xchg_release
859 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3)
861 define void @atomic32_add_release(i32* %a) nounwind uwtable {
863 atomicrmw add i32* %a, i32 0 release
866 ; CHECK: atomic32_add_release
867 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3)
869 define void @atomic32_sub_release(i32* %a) nounwind uwtable {
871 atomicrmw sub i32* %a, i32 0 release
874 ; CHECK: atomic32_sub_release
875 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3)
877 define void @atomic32_and_release(i32* %a) nounwind uwtable {
879 atomicrmw and i32* %a, i32 0 release
882 ; CHECK: atomic32_and_release
883 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3)
885 define void @atomic32_or_release(i32* %a) nounwind uwtable {
887 atomicrmw or i32* %a, i32 0 release
890 ; CHECK: atomic32_or_release
891 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3)
893 define void @atomic32_xor_release(i32* %a) nounwind uwtable {
895 atomicrmw xor i32* %a, i32 0 release
898 ; CHECK: atomic32_xor_release
899 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3)
901 define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable {
903 atomicrmw xchg i32* %a, i32 0 acq_rel
906 ; CHECK: atomic32_xchg_acq_rel
907 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4)
909 define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable {
911 atomicrmw add i32* %a, i32 0 acq_rel
914 ; CHECK: atomic32_add_acq_rel
915 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4)
917 define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable {
919 atomicrmw sub i32* %a, i32 0 acq_rel
922 ; CHECK: atomic32_sub_acq_rel
923 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4)
925 define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable {
927 atomicrmw and i32* %a, i32 0 acq_rel
930 ; CHECK: atomic32_and_acq_rel
931 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4)
933 define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable {
935 atomicrmw or i32* %a, i32 0 acq_rel
938 ; CHECK: atomic32_or_acq_rel
939 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4)
941 define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable {
943 atomicrmw xor i32* %a, i32 0 acq_rel
946 ; CHECK: atomic32_xor_acq_rel
947 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4)
949 define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable {
951 atomicrmw xchg i32* %a, i32 0 seq_cst
954 ; CHECK: atomic32_xchg_seq_cst
955 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5)
957 define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable {
959 atomicrmw add i32* %a, i32 0 seq_cst
962 ; CHECK: atomic32_add_seq_cst
963 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5)
965 define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable {
967 atomicrmw sub i32* %a, i32 0 seq_cst
970 ; CHECK: atomic32_sub_seq_cst
971 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5)
973 define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable {
975 atomicrmw and i32* %a, i32 0 seq_cst
978 ; CHECK: atomic32_and_seq_cst
979 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5)
981 define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable {
983 atomicrmw or i32* %a, i32 0 seq_cst
986 ; CHECK: atomic32_or_seq_cst
987 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5)
989 define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable {
991 atomicrmw xor i32* %a, i32 0 seq_cst
994 ; CHECK: atomic32_xor_seq_cst
995 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5)
997 define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable {
999 cmpxchg i32* %a, i32 0, i32 1 monotonic
1002 ; CHECK: atomic32_cas_monotonic
1003 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0)
1005 define void @atomic32_cas_acquire(i32* %a) nounwind uwtable {
1007 cmpxchg i32* %a, i32 0, i32 1 acquire
1010 ; CHECK: atomic32_cas_acquire
1011 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2)
1013 define void @atomic32_cas_release(i32* %a) nounwind uwtable {
1015 cmpxchg i32* %a, i32 0, i32 1 release
1018 ; CHECK: atomic32_cas_release
1019 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3)
1021 define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable {
1023 cmpxchg i32* %a, i32 0, i32 1 acq_rel
1026 ; CHECK: atomic32_cas_acq_rel
1027 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4)
1029 define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable {
1031 cmpxchg i32* %a, i32 0, i32 1 seq_cst
1034 ; CHECK: atomic32_cas_seq_cst
1035 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5)
1037 define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
1039 %0 = load atomic i64* %a unordered, align 8
1042 ; CHECK: atomic64_load_unordered
1043 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
1045 define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
1047 %0 = load atomic i64* %a monotonic, align 8
1050 ; CHECK: atomic64_load_monotonic
1051 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
1053 define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
1055 %0 = load atomic i64* %a acquire, align 8
1058 ; CHECK: atomic64_load_acquire
1059 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2)
1061 define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
1063 %0 = load atomic i64* %a seq_cst, align 8
1066 ; CHECK: atomic64_load_seq_cst
1067 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5)
1069 define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
1071 store atomic i64 0, i64* %a unordered, align 8
1074 ; CHECK: atomic64_store_unordered
1075 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
1077 define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
1079 store atomic i64 0, i64* %a monotonic, align 8
1082 ; CHECK: atomic64_store_monotonic
1083 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
1085 define void @atomic64_store_release(i64* %a) nounwind uwtable {
1087 store atomic i64 0, i64* %a release, align 8
1090 ; CHECK: atomic64_store_release
1091 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3)
1093 define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
1095 store atomic i64 0, i64* %a seq_cst, align 8
1098 ; CHECK: atomic64_store_seq_cst
1099 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5)
1101 define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
1103 atomicrmw xchg i64* %a, i64 0 monotonic
1106 ; CHECK: atomic64_xchg_monotonic
1107 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0)
1109 define void @atomic64_add_monotonic(i64* %a) nounwind uwtable {
1111 atomicrmw add i64* %a, i64 0 monotonic
1114 ; CHECK: atomic64_add_monotonic
1115 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0)
1117 define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable {
1119 atomicrmw sub i64* %a, i64 0 monotonic
1122 ; CHECK: atomic64_sub_monotonic
1123 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0)
1125 define void @atomic64_and_monotonic(i64* %a) nounwind uwtable {
1127 atomicrmw and i64* %a, i64 0 monotonic
1130 ; CHECK: atomic64_and_monotonic
1131 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0)
1133 define void @atomic64_or_monotonic(i64* %a) nounwind uwtable {
1135 atomicrmw or i64* %a, i64 0 monotonic
1138 ; CHECK: atomic64_or_monotonic
1139 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0)
1141 define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable {
1143 atomicrmw xor i64* %a, i64 0 monotonic
1146 ; CHECK: atomic64_xor_monotonic
1147 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0)
1149 define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable {
1151 atomicrmw xchg i64* %a, i64 0 acquire
1154 ; CHECK: atomic64_xchg_acquire
1155 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2)
1157 define void @atomic64_add_acquire(i64* %a) nounwind uwtable {
1159 atomicrmw add i64* %a, i64 0 acquire
1162 ; CHECK: atomic64_add_acquire
1163 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2)
1165 define void @atomic64_sub_acquire(i64* %a) nounwind uwtable {
1167 atomicrmw sub i64* %a, i64 0 acquire
1170 ; CHECK: atomic64_sub_acquire
1171 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2)
1173 define void @atomic64_and_acquire(i64* %a) nounwind uwtable {
1175 atomicrmw and i64* %a, i64 0 acquire
1178 ; CHECK: atomic64_and_acquire
1179 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2)
1181 define void @atomic64_or_acquire(i64* %a) nounwind uwtable {
1183 atomicrmw or i64* %a, i64 0 acquire
1186 ; CHECK: atomic64_or_acquire
1187 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2)
1189 define void @atomic64_xor_acquire(i64* %a) nounwind uwtable {
1191 atomicrmw xor i64* %a, i64 0 acquire
1194 ; CHECK: atomic64_xor_acquire
1195 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2)
1197 define void @atomic64_xchg_release(i64* %a) nounwind uwtable {
1199 atomicrmw xchg i64* %a, i64 0 release
1202 ; CHECK: atomic64_xchg_release
1203 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3)
1205 define void @atomic64_add_release(i64* %a) nounwind uwtable {
1207 atomicrmw add i64* %a, i64 0 release
1210 ; CHECK: atomic64_add_release
1211 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3)
1213 define void @atomic64_sub_release(i64* %a) nounwind uwtable {
1215 atomicrmw sub i64* %a, i64 0 release
1218 ; CHECK: atomic64_sub_release
1219 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3)
1221 define void @atomic64_and_release(i64* %a) nounwind uwtable {
1223 atomicrmw and i64* %a, i64 0 release
1226 ; CHECK: atomic64_and_release
1227 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3)
1229 define void @atomic64_or_release(i64* %a) nounwind uwtable {
1231 atomicrmw or i64* %a, i64 0 release
1234 ; CHECK: atomic64_or_release
1235 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3)
1237 define void @atomic64_xor_release(i64* %a) nounwind uwtable {
1239 atomicrmw xor i64* %a, i64 0 release
1242 ; CHECK: atomic64_xor_release
1243 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3)
1245 define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable {
1247 atomicrmw xchg i64* %a, i64 0 acq_rel
1250 ; CHECK: atomic64_xchg_acq_rel
1251 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4)
1253 define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable {
1255 atomicrmw add i64* %a, i64 0 acq_rel
1258 ; CHECK: atomic64_add_acq_rel
1259 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4)
1261 define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable {
1263 atomicrmw sub i64* %a, i64 0 acq_rel
1266 ; CHECK: atomic64_sub_acq_rel
1267 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4)
1269 define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable {
1271 atomicrmw and i64* %a, i64 0 acq_rel
1274 ; CHECK: atomic64_and_acq_rel
1275 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4)
1277 define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable {
1279 atomicrmw or i64* %a, i64 0 acq_rel
1282 ; CHECK: atomic64_or_acq_rel
1283 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4)
1285 define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable {
1287 atomicrmw xor i64* %a, i64 0 acq_rel
1290 ; CHECK: atomic64_xor_acq_rel
1291 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4)
1293 define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable {
1295 atomicrmw xchg i64* %a, i64 0 seq_cst
1298 ; CHECK: atomic64_xchg_seq_cst
1299 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5)
1301 define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable {
1303 atomicrmw add i64* %a, i64 0 seq_cst
1306 ; CHECK: atomic64_add_seq_cst
1307 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5)
1309 define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable {
1311 atomicrmw sub i64* %a, i64 0 seq_cst
1314 ; CHECK: atomic64_sub_seq_cst
1315 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5)
1317 define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable {
1319 atomicrmw and i64* %a, i64 0 seq_cst
1322 ; CHECK: atomic64_and_seq_cst
1323 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5)
1325 define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable {
1327 atomicrmw or i64* %a, i64 0 seq_cst
1330 ; CHECK: atomic64_or_seq_cst
1331 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5)
1333 define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable {
1335 atomicrmw xor i64* %a, i64 0 seq_cst
1338 ; CHECK: atomic64_xor_seq_cst
1339 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5)
1341 define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable {
1343 cmpxchg i64* %a, i64 0, i64 1 monotonic
1346 ; CHECK: atomic64_cas_monotonic
1347 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0)
1349 define void @atomic64_cas_acquire(i64* %a) nounwind uwtable {
1351 cmpxchg i64* %a, i64 0, i64 1 acquire
1354 ; CHECK: atomic64_cas_acquire
1355 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2)
1357 define void @atomic64_cas_release(i64* %a) nounwind uwtable {
1359 cmpxchg i64* %a, i64 0, i64 1 release
1362 ; CHECK: atomic64_cas_release
1363 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3)
1365 define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable {
1367 cmpxchg i64* %a, i64 0, i64 1 acq_rel
1370 ; CHECK: atomic64_cas_acq_rel
1371 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4)
1373 define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable {
1375 cmpxchg i64* %a, i64 0, i64 1 seq_cst
1378 ; CHECK: atomic64_cas_seq_cst
1379 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5)
1381 define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
1383 %0 = load atomic i128* %a unordered, align 16
1386 ; CHECK: atomic128_load_unordered
1387 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
1389 define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
1391 %0 = load atomic i128* %a monotonic, align 16
1394 ; CHECK: atomic128_load_monotonic
1395 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
1397 define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
1399 %0 = load atomic i128* %a acquire, align 16
1402 ; CHECK: atomic128_load_acquire
1403 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2)
1405 define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
1407 %0 = load atomic i128* %a seq_cst, align 16
1410 ; CHECK: atomic128_load_seq_cst
1411 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5)
1413 define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
1415 store atomic i128 0, i128* %a unordered, align 16
1418 ; CHECK: atomic128_store_unordered
1419 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
1421 define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
1423 store atomic i128 0, i128* %a monotonic, align 16
1426 ; CHECK: atomic128_store_monotonic
1427 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
1429 define void @atomic128_store_release(i128* %a) nounwind uwtable {
1431 store atomic i128 0, i128* %a release, align 16
1434 ; CHECK: atomic128_store_release
1435 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3)
1437 define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
1439 store atomic i128 0, i128* %a seq_cst, align 16
1442 ; CHECK: atomic128_store_seq_cst
1443 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5)
1445 define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable {
1447 atomicrmw xchg i128* %a, i128 0 monotonic
1450 ; CHECK: atomic128_xchg_monotonic
1451 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0)
1453 define void @atomic128_add_monotonic(i128* %a) nounwind uwtable {
1455 atomicrmw add i128* %a, i128 0 monotonic
1458 ; CHECK: atomic128_add_monotonic
1459 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0)
1461 define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable {
1463 atomicrmw sub i128* %a, i128 0 monotonic
1466 ; CHECK: atomic128_sub_monotonic
1467 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0)
1469 define void @atomic128_and_monotonic(i128* %a) nounwind uwtable {
1471 atomicrmw and i128* %a, i128 0 monotonic
1474 ; CHECK: atomic128_and_monotonic
1475 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0)
1477 define void @atomic128_or_monotonic(i128* %a) nounwind uwtable {
1479 atomicrmw or i128* %a, i128 0 monotonic
1482 ; CHECK: atomic128_or_monotonic
1483 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0)
1485 define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable {
1487 atomicrmw xor i128* %a, i128 0 monotonic
1490 ; CHECK: atomic128_xor_monotonic
1491 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0)
1493 define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable {
1495 atomicrmw xchg i128* %a, i128 0 acquire
1498 ; CHECK: atomic128_xchg_acquire
1499 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2)
1501 define void @atomic128_add_acquire(i128* %a) nounwind uwtable {
1503 atomicrmw add i128* %a, i128 0 acquire
1506 ; CHECK: atomic128_add_acquire
1507 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2)
1509 define void @atomic128_sub_acquire(i128* %a) nounwind uwtable {
1511 atomicrmw sub i128* %a, i128 0 acquire
1514 ; CHECK: atomic128_sub_acquire
1515 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2)
1517 define void @atomic128_and_acquire(i128* %a) nounwind uwtable {
1519 atomicrmw and i128* %a, i128 0 acquire
1522 ; CHECK: atomic128_and_acquire
1523 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2)
1525 define void @atomic128_or_acquire(i128* %a) nounwind uwtable {
1527 atomicrmw or i128* %a, i128 0 acquire
1530 ; CHECK: atomic128_or_acquire
1531 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2)
1533 define void @atomic128_xor_acquire(i128* %a) nounwind uwtable {
1535 atomicrmw xor i128* %a, i128 0 acquire
1538 ; CHECK: atomic128_xor_acquire
1539 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2)
1541 define void @atomic128_xchg_release(i128* %a) nounwind uwtable {
1543 atomicrmw xchg i128* %a, i128 0 release
1546 ; CHECK: atomic128_xchg_release
1547 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3)
1549 define void @atomic128_add_release(i128* %a) nounwind uwtable {
1551 atomicrmw add i128* %a, i128 0 release
1554 ; CHECK: atomic128_add_release
1555 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3)
1557 define void @atomic128_sub_release(i128* %a) nounwind uwtable {
1559 atomicrmw sub i128* %a, i128 0 release
1562 ; CHECK: atomic128_sub_release
1563 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3)
1565 define void @atomic128_and_release(i128* %a) nounwind uwtable {
1567 atomicrmw and i128* %a, i128 0 release
1570 ; CHECK: atomic128_and_release
1571 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3)
1573 define void @atomic128_or_release(i128* %a) nounwind uwtable {
1575 atomicrmw or i128* %a, i128 0 release
1578 ; CHECK: atomic128_or_release
1579 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3)
1581 define void @atomic128_xor_release(i128* %a) nounwind uwtable {
1583 atomicrmw xor i128* %a, i128 0 release
1586 ; CHECK: atomic128_xor_release
1587 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3)
1589 define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable {
1591 atomicrmw xchg i128* %a, i128 0 acq_rel
1594 ; CHECK: atomic128_xchg_acq_rel
1595 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4)
1597 define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable {
1599 atomicrmw add i128* %a, i128 0 acq_rel
1602 ; CHECK: atomic128_add_acq_rel
1603 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4)
1605 define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable {
1607 atomicrmw sub i128* %a, i128 0 acq_rel
1610 ; CHECK: atomic128_sub_acq_rel
1611 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4)
1613 define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable {
1615 atomicrmw and i128* %a, i128 0 acq_rel
1618 ; CHECK: atomic128_and_acq_rel
1619 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4)
1621 define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable {
1623 atomicrmw or i128* %a, i128 0 acq_rel
1626 ; CHECK: atomic128_or_acq_rel
1627 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4)
1629 define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable {
1631 atomicrmw xor i128* %a, i128 0 acq_rel
1634 ; CHECK: atomic128_xor_acq_rel
1635 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4)
1637 define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable {
1639 atomicrmw xchg i128* %a, i128 0 seq_cst
1642 ; CHECK: atomic128_xchg_seq_cst
1643 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5)
1645 define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable {
1647 atomicrmw add i128* %a, i128 0 seq_cst
1650 ; CHECK: atomic128_add_seq_cst
1651 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5)
1653 define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable {
1655 atomicrmw sub i128* %a, i128 0 seq_cst
1658 ; CHECK: atomic128_sub_seq_cst
1659 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5)
1661 define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable {
1663 atomicrmw and i128* %a, i128 0 seq_cst
1666 ; CHECK: atomic128_and_seq_cst
1667 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5)
1669 define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable {
1671 atomicrmw or i128* %a, i128 0 seq_cst
1674 ; CHECK: atomic128_or_seq_cst
1675 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5)
1677 define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable {
1679 atomicrmw xor i128* %a, i128 0 seq_cst
1682 ; CHECK: atomic128_xor_seq_cst
1683 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5)
1685 define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable {
1687 cmpxchg i128* %a, i128 0, i128 1 monotonic
1690 ; CHECK: atomic128_cas_monotonic
1691 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0)
1693 define void @atomic128_cas_acquire(i128* %a) nounwind uwtable {
1695 cmpxchg i128* %a, i128 0, i128 1 acquire
1698 ; CHECK: atomic128_cas_acquire
1699 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2)
1701 define void @atomic128_cas_release(i128* %a) nounwind uwtable {
1703 cmpxchg i128* %a, i128 0, i128 1 release
1706 ; CHECK: atomic128_cas_release
1707 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3)
1709 define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable {
1711 cmpxchg i128* %a, i128 0, i128 1 acq_rel
1714 ; CHECK: atomic128_cas_acq_rel
1715 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4)
1717 define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable {
1719 cmpxchg i128* %a, i128 0, i128 1 seq_cst
1722 ; CHECK: atomic128_cas_seq_cst
1723 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5)
1725 define void @atomic_signal_fence_acquire() nounwind uwtable {
1727 fence singlethread acquire
1730 ; CHECK: atomic_signal_fence_acquire
1731 ; CHECK: call void @__tsan_atomic_signal_fence(i32 2)
1733 define void @atomic_thread_fence_acquire() nounwind uwtable {
1738 ; CHECK: atomic_thread_fence_acquire
1739 ; CHECK: call void @__tsan_atomic_thread_fence(i32 2)
1741 define void @atomic_signal_fence_release() nounwind uwtable {
1743 fence singlethread release
1746 ; CHECK: atomic_signal_fence_release
1747 ; CHECK: call void @__tsan_atomic_signal_fence(i32 3)
1749 define void @atomic_thread_fence_release() nounwind uwtable {
1754 ; CHECK: atomic_thread_fence_release
1755 ; CHECK: call void @__tsan_atomic_thread_fence(i32 3)
1757 define void @atomic_signal_fence_acq_rel() nounwind uwtable {
1759 fence singlethread acq_rel
1762 ; CHECK: atomic_signal_fence_acq_rel
1763 ; CHECK: call void @__tsan_atomic_signal_fence(i32 4)
1765 define void @atomic_thread_fence_acq_rel() nounwind uwtable {
1770 ; CHECK: atomic_thread_fence_acq_rel
1771 ; CHECK: call void @__tsan_atomic_thread_fence(i32 4)
1773 define void @atomic_signal_fence_seq_cst() nounwind uwtable {
1775 fence singlethread seq_cst
1778 ; CHECK: atomic_signal_fence_seq_cst
1779 ; CHECK: call void @__tsan_atomic_signal_fence(i32 5)
1781 define void @atomic_thread_fence_seq_cst() nounwind uwtable {
1786 ; CHECK: atomic_thread_fence_seq_cst
1787 ; CHECK: call void @__tsan_atomic_thread_fence(i32 5)