1 ; RUN: opt < %s -msan -msan-check-access-address=0 -S | FileCheck %s
3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4 target triple = "x86_64-unknown-linux-gnu"
6 ; atomicrmw xchg: store clean shadow, return clean shadow
8 define i32 @AtomicRmwXchg(i32* %p, i32 %x) sanitize_memory {
10 %0 = atomicrmw xchg i32* %p, i32 %x seq_cst
14 ; CHECK: @AtomicRmwXchg
16 ; CHECK: atomicrmw xchg {{.*}} seq_cst
17 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
21 ; atomicrmw max: exactly the same as above
23 define i32 @AtomicRmwMax(i32* %p, i32 %x) sanitize_memory {
25 %0 = atomicrmw max i32* %p, i32 %x seq_cst
29 ; CHECK: @AtomicRmwMax
31 ; CHECK: atomicrmw max {{.*}} seq_cst
32 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
36 ; cmpxchg: the same as above, but also check %a shadow
38 define i32 @Cmpxchg(i32* %p, i32 %a, i32 %b) sanitize_memory {
40 %0 = cmpxchg i32* %p, i32 %a, i32 %b seq_cst
48 ; CHECK: @__msan_warning
49 ; CHECK: cmpxchg {{.*}} seq_cst
50 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
54 ; relaxed cmpxchg: bump up to "release"
56 define i32 @CmpxchgMonotonic(i32* %p, i32 %a, i32 %b) sanitize_memory {
58 %0 = cmpxchg i32* %p, i32 %a, i32 %b monotonic
62 ; CHECK: @CmpxchgMonotonic
66 ; CHECK: @__msan_warning
67 ; CHECK: cmpxchg {{.*}} release
68 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
72 ; atomic load: preserve alignment, load shadow value after app value
74 define i32 @AtomicLoad(i32* %p) sanitize_memory {
76 %0 = load atomic i32* %p seq_cst, align 16
81 ; CHECK: load atomic i32* {{.*}} seq_cst, align 16
82 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
83 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
87 ; atomic load: preserve alignment, load shadow value after app value
89 define i32 @AtomicLoadAcquire(i32* %p) sanitize_memory {
91 %0 = load atomic i32* %p acquire, align 16
95 ; CHECK: @AtomicLoadAcquire
96 ; CHECK: load atomic i32* {{.*}} acquire, align 16
97 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
98 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
102 ; atomic load monotonic: bump up to load acquire
104 define i32 @AtomicLoadMonotonic(i32* %p) sanitize_memory {
106 %0 = load atomic i32* %p monotonic, align 16
110 ; CHECK: @AtomicLoadMonotonic
111 ; CHECK: load atomic i32* {{.*}} acquire, align 16
112 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
113 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
117 ; atomic load unordered: bump up to load acquire
119 define i32 @AtomicLoadUnordered(i32* %p) sanitize_memory {
121 %0 = load atomic i32* %p unordered, align 16
125 ; CHECK: @AtomicLoadUnordered
126 ; CHECK: load atomic i32* {{.*}} acquire, align 16
127 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
128 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
132 ; atomic store: preserve alignment, store clean shadow value before app value
134 define void @AtomicStore(i32* %p, i32 %x) sanitize_memory {
136 store atomic i32 %x, i32* %p seq_cst, align 16
140 ; CHECK: @AtomicStore
141 ; CHECK-NOT: @__msan_param_tls
142 ; CHECK: store i32 0, i32* {{.*}}, align 16
143 ; CHECK: store atomic i32 %x, i32* %p seq_cst, align 16
147 ; atomic store: preserve alignment, store clean shadow value before app value
149 define void @AtomicStoreRelease(i32* %p, i32 %x) sanitize_memory {
151 store atomic i32 %x, i32* %p release, align 16
155 ; CHECK: @AtomicStoreRelease
156 ; CHECK-NOT: @__msan_param_tls
157 ; CHECK: store i32 0, i32* {{.*}}, align 16
158 ; CHECK: store atomic i32 %x, i32* %p release, align 16
162 ; atomic store monotonic: bumped up to store release
164 define void @AtomicStoreMonotonic(i32* %p, i32 %x) sanitize_memory {
166 store atomic i32 %x, i32* %p monotonic, align 16
170 ; CHECK: @AtomicStoreMonotonic
171 ; CHECK-NOT: @__msan_param_tls
172 ; CHECK: store i32 0, i32* {{.*}}, align 16
173 ; CHECK: store atomic i32 %x, i32* %p release, align 16
177 ; atomic store unordered: bumped up to store release
179 define void @AtomicStoreUnordered(i32* %p, i32 %x) sanitize_memory {
181 store atomic i32 %x, i32* %p unordered, align 16
185 ; CHECK: @AtomicStoreUnordered
186 ; CHECK-NOT: @__msan_param_tls
187 ; CHECK: store i32 0, i32* {{.*}}, align 16
188 ; CHECK: store atomic i32 %x, i32* %p release, align 16