1//===-- msan.h --------------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file is a part of MemorySanitizer.
10//
11// Private MSan header.
12//===----------------------------------------------------------------------===//
13
14#ifndef MSAN_H
15#define MSAN_H
16
17#include "sanitizer_common/sanitizer_flags.h"
18#include "sanitizer_common/sanitizer_internal_defs.h"
19#include "sanitizer_common/sanitizer_stacktrace.h"
20#include "msan_interface_internal.h"
21#include "msan_flags.h"
22#include "ubsan/ubsan_platform.h"
23
24#ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
25# define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
26#endif
27
28#ifndef MSAN_CONTAINS_UBSAN
29# define MSAN_CONTAINS_UBSAN CAN_SANITIZE_UB
30#endif
31
32struct MappingDesc {
33 uptr start;
34 uptr end;
35 enum Type {
36 INVALID = 1,
37 ALLOCATOR = 2,
38 APP = 4,
39 SHADOW = 8,
40 ORIGIN = 16,
41 } type;
42 const char *name;
43};
44
45// Note: MappingDesc::ALLOCATOR entries are only used to check for memory
46// layout compatibility. The actual allocation settings are in
47// msan_allocator.cpp, which need to be kept in sync.
48#if SANITIZER_LINUX && defined(__mips64)
49
50// MIPS64 maps:
51// - 0x0000000000-0x0200000000: Program own segments
52// - 0xa200000000-0xc000000000: PIE program segments
53// - 0xe200000000-0xffffffffff: libraries segments.
54const MappingDesc kMemoryLayout[] = {
55 {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "app-1"},
56 {0x000200000000ULL, 0x002200000000ULL, MappingDesc::INVALID, "invalid"},
57 {0x002200000000ULL, 0x004000000000ULL, MappingDesc::SHADOW, "shadow-2"},
58 {0x004000000000ULL, 0x004200000000ULL, MappingDesc::INVALID, "invalid"},
59 {0x004200000000ULL, 0x006000000000ULL, MappingDesc::ORIGIN, "origin-2"},
60 {0x006000000000ULL, 0x006200000000ULL, MappingDesc::INVALID, "invalid"},
61 {0x006200000000ULL, 0x008000000000ULL, MappingDesc::SHADOW, "shadow-3"},
62 {0x008000000000ULL, 0x008200000000ULL, MappingDesc::SHADOW, "shadow-1"},
63 {0x008200000000ULL, 0x00a000000000ULL, MappingDesc::ORIGIN, "origin-3"},
64 {0x00a000000000ULL, 0x00a200000000ULL, MappingDesc::ORIGIN, "origin-1"},
65 {0x00a200000000ULL, 0x00c000000000ULL, MappingDesc::APP, "app-2"},
66 {0x00c000000000ULL, 0x00e200000000ULL, MappingDesc::INVALID, "invalid"},
67 {0x00e200000000ULL, 0x00ffffffffffULL, MappingDesc::APP, "app-3"}};
68
69#define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x8000000000ULL)
70#define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x2000000000ULL)
71
72#elif SANITIZER_LINUX && defined(__aarch64__)
73
74// The mapping assumes 48-bit VMA. AArch64 maps:
75// - 0x0000000000000-0x0100000000000: 39/42/48-bits program own segments
76// - 0x0a00000000000-0x0b00000000000: 48-bits PIE program segments
77// Ideally, this would extend to 0x0c00000000000 (2^45 bytes - the
78// maximum ASLR region for 48-bit VMA) but it is too hard to fit in
79// the larger app/shadow/origin regions.
80// - 0x0e00000000000-0x1000000000000: 48-bits libraries segments
81const MappingDesc kMemoryLayout[] = {
82 {0X0000000000000, 0X0100000000000, MappingDesc::APP, "app-10-13"},
83 {0X0100000000000, 0X0200000000000, MappingDesc::SHADOW, "shadow-14"},
84 {0X0200000000000, 0X0300000000000, MappingDesc::INVALID, "invalid"},
85 {0X0300000000000, 0X0400000000000, MappingDesc::ORIGIN, "origin-14"},
86 {0X0400000000000, 0X0600000000000, MappingDesc::SHADOW, "shadow-15"},
87 {0X0600000000000, 0X0800000000000, MappingDesc::ORIGIN, "origin-15"},
88 {0X0800000000000, 0X0A00000000000, MappingDesc::INVALID, "invalid"},
89 {0X0A00000000000, 0X0B00000000000, MappingDesc::APP, "app-14"},
90 {0X0B00000000000, 0X0C00000000000, MappingDesc::SHADOW, "shadow-10-13"},
91 {0X0C00000000000, 0X0D00000000000, MappingDesc::INVALID, "invalid"},
92 {0X0D00000000000, 0X0E00000000000, MappingDesc::ORIGIN, "origin-10-13"},
93 {0x0E00000000000, 0x0E40000000000, MappingDesc::ALLOCATOR, "allocator"},
94 {0X0E40000000000, 0X1000000000000, MappingDesc::APP, "app-15"},
95};
96# define MEM_TO_SHADOW(mem) ((uptr)mem ^ 0xB00000000000ULL)
97# define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x200000000000ULL)
98
99#elif SANITIZER_LINUX && SANITIZER_LOONGARCH64
100// LoongArch64 maps:
101// - 0x000000000000-0x010000000000: Program own segments
102// - 0x555500000000-0x555600000000: PIE program segments
103// - 0x7fff00000000-0x7fffffffffff: libraries segments.
104const MappingDesc kMemoryLayout[] = {
105 {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
106 {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
107 {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
108 {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
109 {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
110 {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
111 {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
112 {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
113 {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
114 {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
115 {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
116 {0x700000000000ULL, 0x740000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
117 {0x740000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
118# define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
119# define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x100000000000ULL)
120
121#elif SANITIZER_LINUX && SANITIZER_PPC64
122const MappingDesc kMemoryLayout[] = {
123 {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "low memory"},
124 {0x000200000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
125 {0x080000000000ULL, 0x180200000000ULL, MappingDesc::SHADOW, "shadow"},
126 {0x180200000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
127 {0x1C0000000000ULL, 0x2C0200000000ULL, MappingDesc::ORIGIN, "origin"},
128 {0x2C0200000000ULL, 0x300000000000ULL, MappingDesc::INVALID, "invalid"},
129 {0x300000000000ULL, 0x320000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
130 {0x320000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
131
132// Various kernels use different low end ranges but we can combine them into one
133// big range. They also use different high end ranges but we can map them all to
134// one range.
135// Maps low and high app ranges to contiguous space with zero base:
136// Low: 0000 0000 0000 - 0001 ffff ffff -> 1000 0000 0000 - 1001 ffff ffff
137// High: 3000 0000 0000 - 3fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
138// High: 4000 0000 0000 - 4fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
139// High: 7000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
140#define LINEARIZE_MEM(mem) \
141 (((uptr)(mem) & ~0xE00000000000ULL) ^ 0x100000000000ULL)
142#define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x080000000000ULL)
143#define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
144
145#elif SANITIZER_LINUX && SANITIZER_S390_64
146const MappingDesc kMemoryLayout[] = {
147 {0x000000000000ULL, 0x040000000000ULL, MappingDesc::APP, "low memory"},
148 {0x040000000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
149 {0x080000000000ULL, 0x180000000000ULL, MappingDesc::SHADOW, "shadow"},
150 {0x180000000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
151 {0x1C0000000000ULL, 0x2C0000000000ULL, MappingDesc::ORIGIN, "origin"},
152 {0x2C0000000000ULL, 0x440000000000ULL, MappingDesc::INVALID, "invalid"},
153 {0x440000000000ULL, 0x460000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
154 {0x460000000000ULL, 0x500000000000ULL, MappingDesc::APP, "high memory"}};
155
156#define MEM_TO_SHADOW(mem) \
157 ((((uptr)(mem)) & ~0xC00000000000ULL) + 0x080000000000ULL)
158#define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
159
160#elif SANITIZER_FREEBSD && defined(__aarch64__)
161
162// Low memory: main binary, MAP_32BIT mappings and modules
163// High memory: heap, modules and main thread stack
164const MappingDesc kMemoryLayout[] = {
165 {0x000000000000ULL, 0x020000000000ULL, MappingDesc::APP, "low memory"},
166 {0x020000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
167 {0x200000000000ULL, 0x620000000000ULL, MappingDesc::SHADOW, "shadow"},
168 {0x620000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
169 {0x700000000000ULL, 0xb20000000000ULL, MappingDesc::ORIGIN, "origin"},
170 {0xb20000000000ULL, 0xc00000000000ULL, MappingDesc::INVALID, "invalid"},
171 {0xc00000000000ULL, 0x1000000000000ULL, MappingDesc::APP, "high memory"}};
172
173// Maps low and high app ranges to contiguous space with zero base:
174// Low: 0000 0000 0000 - 01ff ffff ffff -> 4000 0000 0000 - 41ff ffff ffff
175// High: c000 0000 0000 - ffff ffff ffff -> 0000 0000 0000 - 3fff ffff ffff
176#define LINEARIZE_MEM(mem) \
177 (((uptr)(mem) & ~0x1800000000000ULL) ^ 0x400000000000ULL)
178#define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x200000000000ULL)
179#define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x500000000000)
180
181#elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
182
183// Low memory: main binary, MAP_32BIT mappings and modules
184// High memory: heap, modules and main thread stack
185const MappingDesc kMemoryLayout[] = {
186 {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
187 {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
188 {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
189 {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
190 {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
191 {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
192 {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
193
194// Maps low and high app ranges to contiguous space with zero base:
195// Low: 0000 0000 0000 - 00ff ffff ffff -> 2000 0000 0000 - 20ff ffff ffff
196// High: 6000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 1fff ffff ffff
197#define LINEARIZE_MEM(mem) \
198 (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
199#define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
200#define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
201
202#elif SANITIZER_NETBSD || (SANITIZER_LINUX && SANITIZER_WORDSIZE == 64)
203
204// All of the following configurations are supported.
205// ASLR disabled: main executable and DSOs at 0x555550000000
206// PIE and ASLR: main executable and DSOs at 0x7f0000000000
207// non-PIE: main executable below 0x100000000, DSOs at 0x7f0000000000
208// Heap at 0x700000000000.
209const MappingDesc kMemoryLayout[] = {
210 {.start: 0x000000000000ULL, .end: 0x010000000000ULL, .type: MappingDesc::APP, .name: "app-1"},
211 {.start: 0x010000000000ULL, .end: 0x100000000000ULL, .type: MappingDesc::SHADOW, .name: "shadow-2"},
212 {.start: 0x100000000000ULL, .end: 0x110000000000ULL, .type: MappingDesc::INVALID, .name: "invalid"},
213 {.start: 0x110000000000ULL, .end: 0x200000000000ULL, .type: MappingDesc::ORIGIN, .name: "origin-2"},
214 {.start: 0x200000000000ULL, .end: 0x300000000000ULL, .type: MappingDesc::SHADOW, .name: "shadow-3"},
215 {.start: 0x300000000000ULL, .end: 0x400000000000ULL, .type: MappingDesc::ORIGIN, .name: "origin-3"},
216 {.start: 0x400000000000ULL, .end: 0x500000000000ULL, .type: MappingDesc::INVALID, .name: "invalid"},
217 {.start: 0x500000000000ULL, .end: 0x510000000000ULL, .type: MappingDesc::SHADOW, .name: "shadow-1"},
218 {.start: 0x510000000000ULL, .end: 0x600000000000ULL, .type: MappingDesc::APP, .name: "app-2"},
219 {.start: 0x600000000000ULL, .end: 0x610000000000ULL, .type: MappingDesc::ORIGIN, .name: "origin-1"},
220 {.start: 0x610000000000ULL, .end: 0x700000000000ULL, .type: MappingDesc::INVALID, .name: "invalid"},
221 {.start: 0x700000000000ULL, .end: 0x740000000000ULL, .type: MappingDesc::ALLOCATOR, .name: "allocator"},
222 {.start: 0x740000000000ULL, .end: 0x800000000000ULL, .type: MappingDesc::APP, .name: "app-3"}};
223#define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
224#define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x100000000000ULL)
225
226#else
227#error "Unsupported platform"
228#endif
229
230const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
231
232#define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
233
234#ifndef __clang__
235__attribute__((optimize("unroll-loops")))
236#endif
237inline bool
238addr_is_type(uptr addr, int mapping_types) {
239// It is critical for performance that this loop is unrolled (because then it is
240// simplified into just a few constant comparisons).
241#ifdef __clang__
242#pragma unroll
243#endif
244 for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
245 if ((kMemoryLayout[i].type & mapping_types) &&
246 addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
247 return true;
248 return false;
249}
250
251#define MEM_IS_APP(mem) \
252 (addr_is_type((uptr)(mem), MappingDesc::APP | MappingDesc::ALLOCATOR))
253#define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
254#define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
255
256// These constants must be kept in sync with the ones in MemorySanitizer.cpp.
257const int kMsanParamTlsSize = 800;
258const int kMsanRetvalTlsSize = 800;
259
260namespace __msan {
261extern int msan_inited;
262extern bool msan_init_is_running;
263extern int msan_report_count;
264
265bool ProtectRange(uptr beg, uptr end);
266bool InitShadowWithReExec(bool init_origins);
267char *GetProcSelfMaps();
268void InitializeInterceptors();
269
270void MsanAllocatorInit();
271void MsanDeallocate(BufferedStackTrace *stack, void *ptr);
272
273void *msan_malloc(uptr size, BufferedStackTrace *stack);
274void *msan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
275void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack);
276void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
277 BufferedStackTrace *stack);
278void *msan_valloc(uptr size, BufferedStackTrace *stack);
279void *msan_pvalloc(uptr size, BufferedStackTrace *stack);
280void *msan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack);
281void *msan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack);
282int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
283 BufferedStackTrace *stack);
284
285void InstallTrapHandler();
286void InstallAtExitHandler();
287
288const char *GetStackOriginDescr(u32 id, uptr *pc);
289
290bool IsInSymbolizerOrUnwider();
291
292void PrintWarning(uptr pc, uptr bp);
293void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
294
295// Unpoison first n function arguments.
296void UnpoisonParam(uptr n);
297void UnpoisonThreadLocalState();
298
299// Returns a "chained" origin id, pointing to the given stack trace followed by
300// the previous origin id.
301u32 ChainOrigin(u32 id, StackTrace *stack);
302
303const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
304const int STACK_TRACE_TAG_FIELDS = STACK_TRACE_TAG_POISON + 1;
305const int STACK_TRACE_TAG_VPTR = STACK_TRACE_TAG_FIELDS + 1;
306
307#define GET_MALLOC_STACK_TRACE \
308 UNINITIALIZED BufferedStackTrace stack; \
309 if (__msan_get_track_origins() && msan_inited) { \
310 stack.Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
311 common_flags()->fast_unwind_on_malloc, \
312 common_flags()->malloc_context_size); \
313 }
314
315// For platforms which support slow unwinder only, we restrict the store context
316// size to 1, basically only storing the current pc. We do this because the slow
317// unwinder which is based on libunwind is not async signal safe and causes
318// random freezes in forking applications as well as in signal handlers.
319#define GET_STORE_STACK_TRACE_PC_BP(pc, bp) \
320 UNINITIALIZED BufferedStackTrace stack; \
321 if (__msan_get_track_origins() > 1 && msan_inited) { \
322 int size = flags()->store_context_size; \
323 if (!SANITIZER_CAN_FAST_UNWIND) \
324 size = Min(size, 1); \
325 stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_malloc, \
326 size); \
327 }
328
329#define GET_STORE_STACK_TRACE \
330 GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
331
332#define GET_FATAL_STACK_TRACE_PC_BP(pc, bp) \
333 UNINITIALIZED BufferedStackTrace stack; \
334 if (msan_inited) { \
335 stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_fatal); \
336 }
337
338#define GET_FATAL_STACK_TRACE \
339 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
340
341// Unwind the stack for fatal error, as the parameter `stack` is
342// empty without origins.
343#define GET_FATAL_STACK_TRACE_IF_EMPTY(STACK) \
344 if (msan_inited && (STACK)->size == 0) { \
345 (STACK)->Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
346 common_flags()->fast_unwind_on_fatal); \
347 }
348
349class ScopedThreadLocalStateBackup {
350 public:
351 ScopedThreadLocalStateBackup() { Backup(); }
352 ~ScopedThreadLocalStateBackup() { Restore(); }
353 void Backup();
354 void Restore();
355 private:
356 u64 va_arg_overflow_size_tls;
357};
358
359void MsanTSDInit(void (*destructor)(void *tsd));
360void *MsanTSDGet();
361void MsanTSDSet(void *tsd);
362void MsanTSDDtor(void *tsd);
363
364void InstallAtForkHandler();
365
366} // namespace __msan
367
368#endif // MSAN_H
369

source code of compiler-rt/lib/msan/msan.h