]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - contrib/compiler-rt/lib/msan/msan.h
Merge clang 7.0.1 and several follow-up changes
[FreeBSD/FreeBSD.git] / contrib / compiler-rt / lib / msan / msan.h
1 //===-- msan.h --------------------------------------------------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of MemorySanitizer.
11 //
12 // Private MSan header.
13 //===----------------------------------------------------------------------===//
14
15 #ifndef MSAN_H
16 #define MSAN_H
17
18 #include "sanitizer_common/sanitizer_flags.h"
19 #include "sanitizer_common/sanitizer_internal_defs.h"
20 #include "sanitizer_common/sanitizer_stacktrace.h"
21 #include "msan_interface_internal.h"
22 #include "msan_flags.h"
23 #include "ubsan/ubsan_platform.h"
24
25 #ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
26 # define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
27 #endif
28
29 #ifndef MSAN_CONTAINS_UBSAN
30 # define MSAN_CONTAINS_UBSAN CAN_SANITIZE_UB
31 #endif
32
33 struct MappingDesc {
34   uptr start;
35   uptr end;
36   enum Type {
37     INVALID, APP, SHADOW, ORIGIN
38   } type;
39   const char *name;
40 };
41
42
43 #if SANITIZER_LINUX && defined(__mips64)
44
45 // MIPS64 maps:
46 // - 0x0000000000-0x0200000000: Program own segments
47 // - 0xa200000000-0xc000000000: PIE program segments
48 // - 0xe200000000-0xffffffffff: libraries segments.
49 const MappingDesc kMemoryLayout[] = {
50     {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "app-1"},
51     {0x000200000000ULL, 0x002200000000ULL, MappingDesc::INVALID, "invalid"},
52     {0x002200000000ULL, 0x004000000000ULL, MappingDesc::SHADOW, "shadow-2"},
53     {0x004000000000ULL, 0x004200000000ULL, MappingDesc::INVALID, "invalid"},
54     {0x004200000000ULL, 0x006000000000ULL, MappingDesc::ORIGIN, "origin-2"},
55     {0x006000000000ULL, 0x006200000000ULL, MappingDesc::INVALID, "invalid"},
56     {0x006200000000ULL, 0x008000000000ULL, MappingDesc::SHADOW, "shadow-3"},
57     {0x008000000000ULL, 0x008200000000ULL, MappingDesc::SHADOW, "shadow-1"},
58     {0x008200000000ULL, 0x00a000000000ULL, MappingDesc::ORIGIN, "origin-3"},
59     {0x00a000000000ULL, 0x00a200000000ULL, MappingDesc::ORIGIN, "origin-1"},
60     {0x00a200000000ULL, 0x00c000000000ULL, MappingDesc::APP, "app-2"},
61     {0x00c000000000ULL, 0x00e200000000ULL, MappingDesc::INVALID, "invalid"},
62     {0x00e200000000ULL, 0x00ffffffffffULL, MappingDesc::APP, "app-3"}};
63
64 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x8000000000ULL)
65 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x2000000000ULL)
66
67 #elif SANITIZER_LINUX && defined(__aarch64__)
68
69 // The mapping describes both 39-bits, 42-bits, and 48-bits VMA.  AArch64
70 // maps:
71 // - 0x0000000000000-0x0000010000000: 39/42/48-bits program own segments
72 // - 0x0005500000000-0x0005600000000: 39-bits PIE program segments
73 // - 0x0007f80000000-0x0007fffffffff: 39-bits libraries segments
74 // - 0x002aa00000000-0x002ab00000000: 42-bits PIE program segments
75 // - 0x003ff00000000-0x003ffffffffff: 42-bits libraries segments
76 // - 0x0aaaaa0000000-0x0aaab00000000: 48-bits PIE program segments
77 // - 0xffff000000000-0x1000000000000: 48-bits libraries segments
78 // It is fragmented in multiples segments to increase the memory available
79 // on 42-bits (12.21% of total VMA available for 42-bits and 13.28 for
80 // 39 bits). The 48-bits segments only cover the usual PIE/default segments
81 // plus some more segments (262144GB total, 0.39% total VMA).
82 const MappingDesc kMemoryLayout[] = {
83     {0x00000000000ULL, 0x01000000000ULL, MappingDesc::INVALID, "invalid"},
84     {0x01000000000ULL, 0x02000000000ULL, MappingDesc::SHADOW, "shadow-2"},
85     {0x02000000000ULL, 0x03000000000ULL, MappingDesc::ORIGIN, "origin-2"},
86     {0x03000000000ULL, 0x04000000000ULL, MappingDesc::SHADOW, "shadow-1"},
87     {0x04000000000ULL, 0x05000000000ULL, MappingDesc::ORIGIN, "origin-1"},
88     {0x05000000000ULL, 0x06000000000ULL, MappingDesc::APP, "app-1"},
89     {0x06000000000ULL, 0x07000000000ULL, MappingDesc::INVALID, "invalid"},
90     {0x07000000000ULL, 0x08000000000ULL, MappingDesc::APP, "app-2"},
91     {0x08000000000ULL, 0x09000000000ULL, MappingDesc::INVALID, "invalid"},
92     // The mappings below are used only for 42-bits VMA.
93     {0x09000000000ULL, 0x0A000000000ULL, MappingDesc::SHADOW, "shadow-3"},
94     {0x0A000000000ULL, 0x0B000000000ULL, MappingDesc::ORIGIN, "origin-3"},
95     {0x0B000000000ULL, 0x0F000000000ULL, MappingDesc::INVALID, "invalid"},
96     {0x0F000000000ULL, 0x10000000000ULL, MappingDesc::APP, "app-3"},
97     {0x10000000000ULL, 0x11000000000ULL, MappingDesc::INVALID, "invalid"},
98     {0x11000000000ULL, 0x12000000000ULL, MappingDesc::APP, "app-4"},
99     {0x12000000000ULL, 0x17000000000ULL, MappingDesc::INVALID, "invalid"},
100     {0x17000000000ULL, 0x18000000000ULL, MappingDesc::SHADOW, "shadow-4"},
101     {0x18000000000ULL, 0x19000000000ULL, MappingDesc::ORIGIN, "origin-4"},
102     {0x19000000000ULL, 0x20000000000ULL, MappingDesc::INVALID, "invalid"},
103     {0x20000000000ULL, 0x21000000000ULL, MappingDesc::APP, "app-5"},
104     {0x21000000000ULL, 0x26000000000ULL, MappingDesc::INVALID, "invalid"},
105     {0x26000000000ULL, 0x27000000000ULL, MappingDesc::SHADOW, "shadow-5"},
106     {0x27000000000ULL, 0x28000000000ULL, MappingDesc::ORIGIN, "origin-5"},
107     {0x28000000000ULL, 0x29000000000ULL, MappingDesc::SHADOW, "shadow-7"},
108     {0x29000000000ULL, 0x2A000000000ULL, MappingDesc::ORIGIN, "origin-7"},
109     {0x2A000000000ULL, 0x2B000000000ULL, MappingDesc::APP, "app-6"},
110     {0x2B000000000ULL, 0x2C000000000ULL, MappingDesc::INVALID, "invalid"},
111     {0x2C000000000ULL, 0x2D000000000ULL, MappingDesc::SHADOW, "shadow-6"},
112     {0x2D000000000ULL, 0x2E000000000ULL, MappingDesc::ORIGIN, "origin-6"},
113     {0x2E000000000ULL, 0x2F000000000ULL, MappingDesc::APP, "app-7"},
114     {0x2F000000000ULL, 0x39000000000ULL, MappingDesc::INVALID, "invalid"},
115     {0x39000000000ULL, 0x3A000000000ULL, MappingDesc::SHADOW, "shadow-9"},
116     {0x3A000000000ULL, 0x3B000000000ULL, MappingDesc::ORIGIN, "origin-9"},
117     {0x3B000000000ULL, 0x3C000000000ULL, MappingDesc::APP, "app-8"},
118     {0x3C000000000ULL, 0x3D000000000ULL, MappingDesc::INVALID, "invalid"},
119     {0x3D000000000ULL, 0x3E000000000ULL, MappingDesc::SHADOW, "shadow-8"},
120     {0x3E000000000ULL, 0x3F000000000ULL, MappingDesc::ORIGIN, "origin-8"},
121     {0x3F000000000ULL, 0x40000000000ULL, MappingDesc::APP, "app-9"},
122     // The mappings below are used only for 48-bits VMA.
123     // TODO(unknown): 48-bit mapping ony covers the usual PIE, non-PIE
124     // segments and some more segments totalizing 262144GB of VMA (which cover
125     // only 0.32% of all 48-bit VMA). Memory avaliability can be increase by
126     // adding multiple application segments like 39 and 42 mapping.
127     {0x0040000000000ULL, 0x0041000000000ULL, MappingDesc::INVALID, "invalid"},
128     {0x0041000000000ULL, 0x0042000000000ULL, MappingDesc::APP, "app-10"},
129     {0x0042000000000ULL, 0x0047000000000ULL, MappingDesc::INVALID, "invalid"},
130     {0x0047000000000ULL, 0x0048000000000ULL, MappingDesc::SHADOW, "shadow-10"},
131     {0x0048000000000ULL, 0x0049000000000ULL, MappingDesc::ORIGIN, "origin-10"},
132     {0x0049000000000ULL, 0x0050000000000ULL, MappingDesc::INVALID, "invalid"},
133     {0x0050000000000ULL, 0x0051000000000ULL, MappingDesc::APP, "app-11"},
134     {0x0051000000000ULL, 0x0056000000000ULL, MappingDesc::INVALID, "invalid"},
135     {0x0056000000000ULL, 0x0057000000000ULL, MappingDesc::SHADOW, "shadow-11"},
136     {0x0057000000000ULL, 0x0058000000000ULL, MappingDesc::ORIGIN, "origin-11"},
137     {0x0058000000000ULL, 0x0059000000000ULL, MappingDesc::APP, "app-12"},
138     {0x0059000000000ULL, 0x005E000000000ULL, MappingDesc::INVALID, "invalid"},
139     {0x005E000000000ULL, 0x005F000000000ULL, MappingDesc::SHADOW, "shadow-12"},
140     {0x005F000000000ULL, 0x0060000000000ULL, MappingDesc::ORIGIN, "origin-12"},
141     {0x0060000000000ULL, 0x0061000000000ULL, MappingDesc::INVALID, "invalid"},
142     {0x0061000000000ULL, 0x0062000000000ULL, MappingDesc::APP, "app-13"},
143     {0x0062000000000ULL, 0x0067000000000ULL, MappingDesc::INVALID, "invalid"},
144     {0x0067000000000ULL, 0x0068000000000ULL, MappingDesc::SHADOW, "shadow-13"},
145     {0x0068000000000ULL, 0x0069000000000ULL, MappingDesc::ORIGIN, "origin-13"},
146     {0x0069000000000ULL, 0x0AAAAA0000000ULL, MappingDesc::INVALID, "invalid"},
147     {0x0AAAAA0000000ULL, 0x0AAAB00000000ULL, MappingDesc::APP, "app-14"},
148     {0x0AAAB00000000ULL, 0x0AACAA0000000ULL, MappingDesc::INVALID, "invalid"},
149     {0x0AACAA0000000ULL, 0x0AACB00000000ULL, MappingDesc::SHADOW, "shadow-14"},
150     {0x0AACB00000000ULL, 0x0AADAA0000000ULL, MappingDesc::INVALID, "invalid"},
151     {0x0AADAA0000000ULL, 0x0AADB00000000ULL, MappingDesc::ORIGIN, "origin-14"},
152     {0x0AADB00000000ULL, 0x0FF9F00000000ULL, MappingDesc::INVALID, "invalid"},
153     {0x0FF9F00000000ULL, 0x0FFA000000000ULL, MappingDesc::SHADOW, "shadow-15"},
154     {0x0FFA000000000ULL, 0x0FFAF00000000ULL, MappingDesc::INVALID, "invalid"},
155     {0x0FFAF00000000ULL, 0x0FFB000000000ULL, MappingDesc::ORIGIN, "origin-15"},
156     {0x0FFB000000000ULL, 0x0FFFF00000000ULL, MappingDesc::INVALID, "invalid"},
157     {0x0FFFF00000000ULL, 0x1000000000000ULL, MappingDesc::APP, "app-15"},
158 };
159 # define MEM_TO_SHADOW(mem) ((uptr)mem ^ 0x6000000000ULL)
160 # define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x1000000000ULL)
161
162 #elif SANITIZER_LINUX && SANITIZER_PPC64
163 const MappingDesc kMemoryLayout[] = {
164     {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "low memory"},
165     {0x000200000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
166     {0x080000000000ULL, 0x180200000000ULL, MappingDesc::SHADOW, "shadow"},
167     {0x180200000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
168     {0x1C0000000000ULL, 0x2C0200000000ULL, MappingDesc::ORIGIN, "origin"},
169     {0x2C0200000000ULL, 0x300000000000ULL, MappingDesc::INVALID, "invalid"},
170     {0x300000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
171
172 // Various kernels use different low end ranges but we can combine them into one
173 // big range. They also use different high end ranges but we can map them all to
174 // one range.
175 // Maps low and high app ranges to contiguous space with zero base:
176 //   Low:  0000 0000 0000 - 0001 ffff ffff  ->  1000 0000 0000 - 1001 ffff ffff
177 //   High: 3000 0000 0000 - 3fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
178 //   High: 4000 0000 0000 - 4fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
179 //   High: 7000 0000 0000 - 7fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
180 #define LINEARIZE_MEM(mem) \
181   (((uptr)(mem) & ~0xE00000000000ULL) ^ 0x100000000000ULL)
182 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x080000000000ULL)
183 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
184
185 #elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
186
187 // Low memory: main binary, MAP_32BIT mappings and modules
188 // High memory: heap, modules and main thread stack
189 const MappingDesc kMemoryLayout[] = {
190     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
191     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
192     {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
193     {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
194     {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
195     {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
196     {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
197
198 // Maps low and high app ranges to contiguous space with zero base:
199 //   Low:  0000 0000 0000 - 00ff ffff ffff  ->  2000 0000 0000 - 20ff ffff ffff
200 //   High: 6000 0000 0000 - 7fff ffff ffff  ->  0000 0000 0000 - 1fff ffff ffff
201 #define LINEARIZE_MEM(mem) \
202   (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
203 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
204 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
205
206 #elif SANITIZER_NETBSD || (SANITIZER_LINUX && SANITIZER_WORDSIZE == 64)
207
208 #ifdef MSAN_LINUX_X86_64_OLD_MAPPING
209 // Requries PIE binary and ASLR enabled.
210 // Main thread stack and DSOs at 0x7f0000000000 (sometimes 0x7e0000000000).
211 // Heap at 0x600000000000.
212 const MappingDesc kMemoryLayout[] = {
213     {0x000000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
214     {0x200000000000ULL, 0x400000000000ULL, MappingDesc::SHADOW, "shadow"},
215     {0x400000000000ULL, 0x600000000000ULL, MappingDesc::ORIGIN, "origin"},
216     {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app"}};
217
218 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) & ~0x400000000000ULL)
219 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x200000000000ULL)
220 #else  // MSAN_LINUX_X86_64_OLD_MAPPING
221 // All of the following configurations are supported.
222 // ASLR disabled: main executable and DSOs at 0x555550000000
223 // PIE and ASLR: main executable and DSOs at 0x7f0000000000
224 // non-PIE: main executable below 0x100000000, DSOs at 0x7f0000000000
225 // Heap at 0x700000000000.
226 const MappingDesc kMemoryLayout[] = {
227     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
228     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
229     {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
230     {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
231     {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
232     {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
233     {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
234     {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
235     {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
236     {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
237     {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
238     {0x700000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
239 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
240 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x100000000000ULL)
241 #endif  // MSAN_LINUX_X86_64_OLD_MAPPING
242
243 #else
244 #error "Unsupported platform"
245 #endif
246
247 const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
248
249 #define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
250
251 #ifndef __clang__
252 __attribute__((optimize("unroll-loops")))
253 #endif
254 inline bool addr_is_type(uptr addr, MappingDesc::Type mapping_type) {
255 // It is critical for performance that this loop is unrolled (because then it is
256 // simplified into just a few constant comparisons).
257 #ifdef __clang__
258 #pragma unroll
259 #endif
260   for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
261     if (kMemoryLayout[i].type == mapping_type &&
262         addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
263       return true;
264   return false;
265 }
266
267 #define MEM_IS_APP(mem) addr_is_type((uptr)(mem), MappingDesc::APP)
268 #define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
269 #define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
270
271 // These constants must be kept in sync with the ones in MemorySanitizer.cc.
272 const int kMsanParamTlsSize = 800;
273 const int kMsanRetvalTlsSize = 800;
274
275 namespace __msan {
276 extern int msan_inited;
277 extern bool msan_init_is_running;
278 extern int msan_report_count;
279
280 bool ProtectRange(uptr beg, uptr end);
281 bool InitShadow(bool init_origins);
282 char *GetProcSelfMaps();
283 void InitializeInterceptors();
284
285 void MsanAllocatorInit();
286 void MsanAllocatorThreadFinish();
287 void MsanDeallocate(StackTrace *stack, void *ptr);
288
289 void *msan_malloc(uptr size, StackTrace *stack);
290 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack);
291 void *msan_realloc(void *ptr, uptr size, StackTrace *stack);
292 void *msan_valloc(uptr size, StackTrace *stack);
293 void *msan_pvalloc(uptr size, StackTrace *stack);
294 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
295 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack);
296 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
297                         StackTrace *stack);
298
299 void InstallTrapHandler();
300 void InstallAtExitHandler();
301
302 const char *GetStackOriginDescr(u32 id, uptr *pc);
303
304 void EnterSymbolizer();
305 void ExitSymbolizer();
306 bool IsInSymbolizer();
307
308 struct SymbolizerScope {
309   SymbolizerScope() { EnterSymbolizer(); }
310   ~SymbolizerScope() { ExitSymbolizer(); }
311 };
312
313 void PrintWarning(uptr pc, uptr bp);
314 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
315
316 void GetStackTrace(BufferedStackTrace *stack, uptr max_s, uptr pc, uptr bp,
317                    void *context, bool request_fast_unwind);
318
319 // Unpoison first n function arguments.
320 void UnpoisonParam(uptr n);
321 void UnpoisonThreadLocalState();
322
323 // Returns a "chained" origin id, pointing to the given stack trace followed by
324 // the previous origin id.
325 u32 ChainOrigin(u32 id, StackTrace *stack);
326
327 const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
328
329 #define GET_MALLOC_STACK_TRACE                                            \
330   BufferedStackTrace stack;                                               \
331   if (__msan_get_track_origins() && msan_inited)                          \
332   GetStackTrace(&stack, common_flags()->malloc_context_size,              \
333                 StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
334                 common_flags()->fast_unwind_on_malloc)
335
336 // For platforms which support slow unwinder only, we restrict the store context
337 // size to 1, basically only storing the current pc. We do this because the slow
338 // unwinder which is based on libunwind is not async signal safe and causes
339 // random freezes in forking applications as well as in signal handlers.
340 #define GET_STORE_STACK_TRACE_PC_BP(pc, bp)                               \
341   BufferedStackTrace stack;                                               \
342   if (__msan_get_track_origins() > 1 && msan_inited) {                    \
343     if (!SANITIZER_CAN_FAST_UNWIND)                                       \
344       GetStackTrace(&stack, Min(1, flags()->store_context_size), pc, bp,  \
345                     nullptr, false);                                      \
346     else                                                                  \
347       GetStackTrace(&stack, flags()->store_context_size, pc, bp, nullptr, \
348                     common_flags()->fast_unwind_on_malloc);               \
349   }
350
351 #define GET_STORE_STACK_TRACE \
352   GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
353
354 #define GET_FATAL_STACK_TRACE_PC_BP(pc, bp)              \
355   BufferedStackTrace stack;                              \
356   if (msan_inited)                                       \
357   GetStackTrace(&stack, kStackTraceMax, pc, bp, nullptr, \
358                 common_flags()->fast_unwind_on_fatal)
359
360 #define GET_FATAL_STACK_TRACE_HERE \
361   GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
362
363 #define PRINT_CURRENT_STACK_CHECK() \
364   {                                 \
365     GET_FATAL_STACK_TRACE_HERE;     \
366     stack.Print();                  \
367   }
368
369 class ScopedThreadLocalStateBackup {
370  public:
371   ScopedThreadLocalStateBackup() { Backup(); }
372   ~ScopedThreadLocalStateBackup() { Restore(); }
373   void Backup();
374   void Restore();
375  private:
376   u64 va_arg_overflow_size_tls;
377 };
378
379 void MsanTSDInit(void (*destructor)(void *tsd));
380 void *MsanTSDGet();
381 void MsanTSDSet(void *tsd);
382 void MsanTSDDtor(void *tsd);
383
384 }  // namespace __msan
385
386 #define MSAN_MALLOC_HOOK(ptr, size)       \
387   do {                                    \
388     if (&__sanitizer_malloc_hook) {       \
389       UnpoisonParam(2);                   \
390       __sanitizer_malloc_hook(ptr, size); \
391     }                                     \
392     RunMallocHooks(ptr, size);            \
393   } while (false)
394 #define MSAN_FREE_HOOK(ptr)       \
395   do {                            \
396     if (&__sanitizer_free_hook) { \
397       UnpoisonParam(1);           \
398       __sanitizer_free_hook(ptr); \
399     }                             \
400     RunFreeHooks(ptr);            \
401   } while (false)
402
403 #endif  // MSAN_H