msan.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. //===-- msan.h --------------------------------------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of MemorySanitizer.
  10. //
  11. // Private MSan header.
  12. //===----------------------------------------------------------------------===//
  13. #ifndef MSAN_H
  14. #define MSAN_H
  15. #include "sanitizer_common/sanitizer_flags.h"
  16. #include "sanitizer_common/sanitizer_internal_defs.h"
  17. #include "sanitizer_common/sanitizer_stacktrace.h"
  18. #include "msan_interface_internal.h"
  19. #include "msan_flags.h"
  20. #include "ubsan/ubsan_platform.h"
  21. #ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
  22. # define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
  23. #endif
  24. #ifndef MSAN_CONTAINS_UBSAN
  25. # define MSAN_CONTAINS_UBSAN CAN_SANITIZE_UB
  26. #endif
  27. struct MappingDesc {
  28. uptr start;
  29. uptr end;
  30. enum Type {
  31. INVALID = 1,
  32. ALLOCATOR = 2,
  33. APP = 4,
  34. SHADOW = 8,
  35. ORIGIN = 16,
  36. } type;
  37. const char *name;
  38. };
  39. // Note: MappingDesc::ALLOCATOR entries are only used to check for memory
  40. // layout compatibility. The actual allocation settings are in
  41. // msan_allocator.cpp, which need to be kept in sync.
  42. #if SANITIZER_LINUX && defined(__mips64)
  43. // MIPS64 maps:
  44. // - 0x0000000000-0x0200000000: Program own segments
  45. // - 0xa200000000-0xc000000000: PIE program segments
  46. // - 0xe200000000-0xffffffffff: libraries segments.
  47. const MappingDesc kMemoryLayout[] = {
  48. {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "app-1"},
  49. {0x000200000000ULL, 0x002200000000ULL, MappingDesc::INVALID, "invalid"},
  50. {0x002200000000ULL, 0x004000000000ULL, MappingDesc::SHADOW, "shadow-2"},
  51. {0x004000000000ULL, 0x004200000000ULL, MappingDesc::INVALID, "invalid"},
  52. {0x004200000000ULL, 0x006000000000ULL, MappingDesc::ORIGIN, "origin-2"},
  53. {0x006000000000ULL, 0x006200000000ULL, MappingDesc::INVALID, "invalid"},
  54. {0x006200000000ULL, 0x008000000000ULL, MappingDesc::SHADOW, "shadow-3"},
  55. {0x008000000000ULL, 0x008200000000ULL, MappingDesc::SHADOW, "shadow-1"},
  56. {0x008200000000ULL, 0x00a000000000ULL, MappingDesc::ORIGIN, "origin-3"},
  57. {0x00a000000000ULL, 0x00a200000000ULL, MappingDesc::ORIGIN, "origin-1"},
  58. {0x00a200000000ULL, 0x00c000000000ULL, MappingDesc::APP, "app-2"},
  59. {0x00c000000000ULL, 0x00e200000000ULL, MappingDesc::INVALID, "invalid"},
  60. {0x00e200000000ULL, 0x00ffffffffffULL, MappingDesc::APP, "app-3"}};
  61. #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x8000000000ULL)
  62. #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x2000000000ULL)
  63. #elif SANITIZER_LINUX && defined(__aarch64__)
  64. // The mapping assumes 48-bit VMA. AArch64 maps:
  65. // - 0x0000000000000-0x0100000000000: 39/42/48-bits program own segments
  66. // - 0x0a00000000000-0x0b00000000000: 48-bits PIE program segments
  67. // Ideally, this would extend to 0x0c00000000000 (2^45 bytes - the
  68. // maximum ASLR region for 48-bit VMA) but it is too hard to fit in
  69. // the larger app/shadow/origin regions.
  70. // - 0x0e00000000000-0x1000000000000: 48-bits libraries segments
  71. const MappingDesc kMemoryLayout[] = {
  72. {0X0000000000000, 0X0100000000000, MappingDesc::APP, "app-10-13"},
  73. {0X0100000000000, 0X0200000000000, MappingDesc::SHADOW, "shadow-14"},
  74. {0X0200000000000, 0X0300000000000, MappingDesc::INVALID, "invalid"},
  75. {0X0300000000000, 0X0400000000000, MappingDesc::ORIGIN, "origin-14"},
  76. {0X0400000000000, 0X0600000000000, MappingDesc::SHADOW, "shadow-15"},
  77. {0X0600000000000, 0X0800000000000, MappingDesc::ORIGIN, "origin-15"},
  78. {0X0800000000000, 0X0A00000000000, MappingDesc::INVALID, "invalid"},
  79. {0X0A00000000000, 0X0B00000000000, MappingDesc::APP, "app-14"},
  80. {0X0B00000000000, 0X0C00000000000, MappingDesc::SHADOW, "shadow-10-13"},
  81. {0X0C00000000000, 0X0D00000000000, MappingDesc::INVALID, "invalid"},
  82. {0X0D00000000000, 0X0E00000000000, MappingDesc::ORIGIN, "origin-10-13"},
  83. {0x0E00000000000, 0x0E40000000000, MappingDesc::ALLOCATOR, "allocator"},
  84. {0X0E40000000000, 0X1000000000000, MappingDesc::APP, "app-15"},
  85. };
  86. # define MEM_TO_SHADOW(mem) ((uptr)mem ^ 0xB00000000000ULL)
  87. # define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x200000000000ULL)
  88. #elif SANITIZER_LINUX && SANITIZER_LOONGARCH64
  89. // LoongArch64 maps:
  90. // - 0x000000000000-0x010000000000: Program own segments
  91. // - 0x555500000000-0x555600000000: PIE program segments
  92. // - 0x7fff00000000-0x7fffffffffff: libraries segments.
  93. const MappingDesc kMemoryLayout[] = {
  94. {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
  95. {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
  96. {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
  97. {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
  98. {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
  99. {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
  100. {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
  101. {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
  102. {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
  103. {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
  104. {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
  105. {0x700000000000ULL, 0x740000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
  106. {0x740000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
  107. # define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
  108. # define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x100000000000ULL)
  109. #elif SANITIZER_LINUX && SANITIZER_PPC64
  110. const MappingDesc kMemoryLayout[] = {
  111. {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "low memory"},
  112. {0x000200000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
  113. {0x080000000000ULL, 0x180200000000ULL, MappingDesc::SHADOW, "shadow"},
  114. {0x180200000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
  115. {0x1C0000000000ULL, 0x2C0200000000ULL, MappingDesc::ORIGIN, "origin"},
  116. {0x2C0200000000ULL, 0x300000000000ULL, MappingDesc::INVALID, "invalid"},
  117. {0x300000000000ULL, 0x320000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
  118. {0x320000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
  119. // Various kernels use different low end ranges but we can combine them into one
  120. // big range. They also use different high end ranges but we can map them all to
  121. // one range.
  122. // Maps low and high app ranges to contiguous space with zero base:
  123. // Low: 0000 0000 0000 - 0001 ffff ffff -> 1000 0000 0000 - 1001 ffff ffff
  124. // High: 3000 0000 0000 - 3fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
  125. // High: 4000 0000 0000 - 4fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
  126. // High: 7000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
  127. #define LINEARIZE_MEM(mem) \
  128. (((uptr)(mem) & ~0xE00000000000ULL) ^ 0x100000000000ULL)
  129. #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x080000000000ULL)
  130. #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
  131. #elif SANITIZER_LINUX && SANITIZER_S390_64
  132. const MappingDesc kMemoryLayout[] = {
  133. {0x000000000000ULL, 0x040000000000ULL, MappingDesc::APP, "low memory"},
  134. {0x040000000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
  135. {0x080000000000ULL, 0x180000000000ULL, MappingDesc::SHADOW, "shadow"},
  136. {0x180000000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
  137. {0x1C0000000000ULL, 0x2C0000000000ULL, MappingDesc::ORIGIN, "origin"},
  138. {0x2C0000000000ULL, 0x440000000000ULL, MappingDesc::INVALID, "invalid"},
  139. {0x440000000000ULL, 0x460000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
  140. {0x460000000000ULL, 0x500000000000ULL, MappingDesc::APP, "high memory"}};
  141. #define MEM_TO_SHADOW(mem) \
  142. ((((uptr)(mem)) & ~0xC00000000000ULL) + 0x080000000000ULL)
  143. #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
  144. #elif SANITIZER_FREEBSD && defined(__aarch64__)
  145. // Low memory: main binary, MAP_32BIT mappings and modules
  146. // High memory: heap, modules and main thread stack
  147. const MappingDesc kMemoryLayout[] = {
  148. {0x000000000000ULL, 0x020000000000ULL, MappingDesc::APP, "low memory"},
  149. {0x020000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
  150. {0x200000000000ULL, 0x620000000000ULL, MappingDesc::SHADOW, "shadow"},
  151. {0x620000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
  152. {0x700000000000ULL, 0xb20000000000ULL, MappingDesc::ORIGIN, "origin"},
  153. {0xb20000000000ULL, 0xc00000000000ULL, MappingDesc::INVALID, "invalid"},
  154. {0xc00000000000ULL, 0x1000000000000ULL, MappingDesc::APP, "high memory"}};
  155. // Maps low and high app ranges to contiguous space with zero base:
  156. // Low: 0000 0000 0000 - 01ff ffff ffff -> 4000 0000 0000 - 41ff ffff ffff
  157. // High: c000 0000 0000 - ffff ffff ffff -> 0000 0000 0000 - 3fff ffff ffff
  158. #define LINEARIZE_MEM(mem) \
  159. (((uptr)(mem) & ~0x1800000000000ULL) ^ 0x400000000000ULL)
  160. #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x200000000000ULL)
  161. #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x500000000000)
  162. #elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
  163. // Low memory: main binary, MAP_32BIT mappings and modules
  164. // High memory: heap, modules and main thread stack
  165. const MappingDesc kMemoryLayout[] = {
  166. {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
  167. {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
  168. {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
  169. {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
  170. {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
  171. {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
  172. {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
  173. // Maps low and high app ranges to contiguous space with zero base:
  174. // Low: 0000 0000 0000 - 00ff ffff ffff -> 2000 0000 0000 - 20ff ffff ffff
  175. // High: 6000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 1fff ffff ffff
  176. #define LINEARIZE_MEM(mem) \
  177. (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
  178. #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
  179. #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
  180. #elif SANITIZER_NETBSD || (SANITIZER_LINUX && SANITIZER_WORDSIZE == 64)
  181. // All of the following configurations are supported.
  182. // ASLR disabled: main executable and DSOs at 0x555550000000
  183. // PIE and ASLR: main executable and DSOs at 0x7f0000000000
  184. // non-PIE: main executable below 0x100000000, DSOs at 0x7f0000000000
  185. // Heap at 0x700000000000.
  186. const MappingDesc kMemoryLayout[] = {
  187. {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
  188. {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
  189. {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
  190. {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
  191. {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
  192. {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
  193. {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
  194. {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
  195. {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
  196. {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
  197. {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
  198. {0x700000000000ULL, 0x740000000000ULL, MappingDesc::ALLOCATOR, "allocator"},
  199. {0x740000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
  200. #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
  201. #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x100000000000ULL)
  202. #else
  203. #error "Unsupported platform"
  204. #endif
  205. const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
  206. #define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
  207. #ifndef __clang__
  208. __attribute__((optimize("unroll-loops")))
  209. #endif
  210. inline bool
  211. addr_is_type(uptr addr, int mapping_types) {
  212. // It is critical for performance that this loop is unrolled (because then it is
  213. // simplified into just a few constant comparisons).
  214. #ifdef __clang__
  215. #pragma unroll
  216. #endif
  217. for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
  218. if ((kMemoryLayout[i].type & mapping_types) &&
  219. addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
  220. return true;
  221. return false;
  222. }
  223. #define MEM_IS_APP(mem) \
  224. (addr_is_type((uptr)(mem), MappingDesc::APP | MappingDesc::ALLOCATOR))
  225. #define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
  226. #define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
  227. // These constants must be kept in sync with the ones in MemorySanitizer.cpp.
  228. const int kMsanParamTlsSize = 800;
  229. const int kMsanRetvalTlsSize = 800;
  230. namespace __msan {
  231. extern int msan_inited;
  232. extern bool msan_init_is_running;
  233. extern int msan_report_count;
  234. bool ProtectRange(uptr beg, uptr end);
  235. bool InitShadowWithReExec(bool init_origins);
  236. char *GetProcSelfMaps();
  237. void InitializeInterceptors();
  238. void MsanAllocatorInit();
  239. void MsanDeallocate(BufferedStackTrace *stack, void *ptr);
  240. void *msan_malloc(uptr size, BufferedStackTrace *stack);
  241. void *msan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
  242. void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack);
  243. void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
  244. BufferedStackTrace *stack);
  245. void *msan_valloc(uptr size, BufferedStackTrace *stack);
  246. void *msan_pvalloc(uptr size, BufferedStackTrace *stack);
  247. void *msan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack);
  248. void *msan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack);
  249. int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
  250. BufferedStackTrace *stack);
  251. void InstallTrapHandler();
  252. void InstallAtExitHandler();
  253. const char *GetStackOriginDescr(u32 id, uptr *pc);
  254. bool IsInSymbolizerOrUnwider();
  255. void PrintWarning(uptr pc, uptr bp);
  256. void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
  257. // Unpoison first n function arguments.
  258. void UnpoisonParam(uptr n);
  259. void UnpoisonThreadLocalState();
  260. // Returns a "chained" origin id, pointing to the given stack trace followed by
  261. // the previous origin id.
  262. u32 ChainOrigin(u32 id, StackTrace *stack);
  263. const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
  264. const int STACK_TRACE_TAG_FIELDS = STACK_TRACE_TAG_POISON + 1;
  265. const int STACK_TRACE_TAG_VPTR = STACK_TRACE_TAG_FIELDS + 1;
  266. #define GET_MALLOC_STACK_TRACE \
  267. UNINITIALIZED BufferedStackTrace stack; \
  268. if (__msan_get_track_origins() && msan_inited) { \
  269. stack.Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
  270. common_flags()->fast_unwind_on_malloc, \
  271. common_flags()->malloc_context_size); \
  272. }
  273. // For platforms which support slow unwinder only, we restrict the store context
  274. // size to 1, basically only storing the current pc. We do this because the slow
  275. // unwinder which is based on libunwind is not async signal safe and causes
  276. // random freezes in forking applications as well as in signal handlers.
  277. #define GET_STORE_STACK_TRACE_PC_BP(pc, bp) \
  278. UNINITIALIZED BufferedStackTrace stack; \
  279. if (__msan_get_track_origins() > 1 && msan_inited) { \
  280. int size = flags()->store_context_size; \
  281. if (!SANITIZER_CAN_FAST_UNWIND) \
  282. size = Min(size, 1); \
  283. stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_malloc, \
  284. size); \
  285. }
  286. #define GET_STORE_STACK_TRACE \
  287. GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
  288. #define GET_FATAL_STACK_TRACE_PC_BP(pc, bp) \
  289. UNINITIALIZED BufferedStackTrace stack; \
  290. if (msan_inited) { \
  291. stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_fatal); \
  292. }
  293. #define GET_FATAL_STACK_TRACE \
  294. GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
  295. // Unwind the stack for fatal error, as the parameter `stack` is
  296. // empty without origins.
  297. #define GET_FATAL_STACK_TRACE_IF_EMPTY(STACK) \
  298. if (msan_inited && (STACK)->size == 0) { \
  299. (STACK)->Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
  300. common_flags()->fast_unwind_on_fatal); \
  301. }
  302. class ScopedThreadLocalStateBackup {
  303. public:
  304. ScopedThreadLocalStateBackup() { Backup(); }
  305. ~ScopedThreadLocalStateBackup() { Restore(); }
  306. void Backup();
  307. void Restore();
  308. private:
  309. u64 va_arg_overflow_size_tls;
  310. };
  311. void MsanTSDInit(void (*destructor)(void *tsd));
  312. void *MsanTSDGet();
  313. void MsanTSDSet(void *tsd);
  314. void MsanTSDDtor(void *tsd);
  315. void InstallAtForkHandler();
  316. } // namespace __msan
  317. #endif // MSAN_H