msan_poisoning.cpp 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260
  1. //===-- msan_poisoning.cpp --------------------------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of MemorySanitizer.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "msan_poisoning.h"
  13. #include "interception/interception.h"
  14. #include "msan_origin.h"
  15. #include "msan_thread.h"
  16. #include "sanitizer_common/sanitizer_common.h"
  17. DECLARE_REAL(void *, memset, void *dest, int c, uptr n)
  18. DECLARE_REAL(void *, memcpy, void *dest, const void *src, uptr n)
  19. DECLARE_REAL(void *, memmove, void *dest, const void *src, uptr n)
  20. namespace __msan {
  21. u32 GetOriginIfPoisoned(uptr addr, uptr size) {
  22. unsigned char *s = (unsigned char *)MEM_TO_SHADOW(addr);
  23. for (uptr i = 0; i < size; ++i)
  24. if (s[i]) return *(u32 *)SHADOW_TO_ORIGIN(((uptr)s + i) & ~3UL);
  25. return 0;
  26. }
  27. void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
  28. u32 src_origin) {
  29. uptr dst_s = MEM_TO_SHADOW(addr);
  30. uptr src_s = src_shadow;
  31. uptr src_s_end = src_s + size;
  32. for (; src_s < src_s_end; ++dst_s, ++src_s)
  33. if (*(u8 *)src_s) *(u32 *)SHADOW_TO_ORIGIN(dst_s & ~3UL) = src_origin;
  34. }
  35. void CopyOrigin(const void *dst, const void *src, uptr size,
  36. StackTrace *stack) {
  37. if (!MEM_IS_APP(dst) || !MEM_IS_APP(src)) return;
  38. uptr d = (uptr)dst;
  39. uptr beg = d & ~3UL;
  40. // Copy left unaligned origin if that memory is poisoned.
  41. if (beg < d) {
  42. u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
  43. if (o) {
  44. if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
  45. *(u32 *)MEM_TO_ORIGIN(beg) = o;
  46. }
  47. beg += 4;
  48. }
  49. uptr end = (d + size) & ~3UL;
  50. // If both ends fall into the same 4-byte slot, we are done.
  51. if (end < beg) return;
  52. // Copy right unaligned origin if that memory is poisoned.
  53. if (end < d + size) {
  54. u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
  55. if (o) {
  56. if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
  57. *(u32 *)MEM_TO_ORIGIN(end) = o;
  58. }
  59. }
  60. if (beg < end) {
  61. // Align src up.
  62. uptr s = ((uptr)src + 3) & ~3UL;
  63. // FIXME: factor out to msan_copy_origin_aligned
  64. if (__msan_get_track_origins() > 1) {
  65. u32 *src = (u32 *)MEM_TO_ORIGIN(s);
  66. u32 *src_s = (u32 *)MEM_TO_SHADOW(s);
  67. u32 *src_end = (u32 *)MEM_TO_ORIGIN(s + (end - beg));
  68. u32 *dst = (u32 *)MEM_TO_ORIGIN(beg);
  69. u32 src_o = 0;
  70. u32 dst_o = 0;
  71. for (; src < src_end; ++src, ++src_s, ++dst) {
  72. if (!*src_s) continue;
  73. if (*src != src_o) {
  74. src_o = *src;
  75. dst_o = ChainOrigin(src_o, stack);
  76. }
  77. *dst = dst_o;
  78. }
  79. } else {
  80. REAL(memcpy)((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s),
  81. end - beg);
  82. }
  83. }
  84. }
  85. void ReverseCopyOrigin(const void *dst, const void *src, uptr size,
  86. StackTrace *stack) {
  87. if (!MEM_IS_APP(dst) || !MEM_IS_APP(src))
  88. return;
  89. uptr d = (uptr)dst;
  90. uptr end = (d + size) & ~3UL;
  91. // Copy right unaligned origin if that memory is poisoned.
  92. if (end < d + size) {
  93. u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
  94. if (o) {
  95. if (__msan_get_track_origins() > 1)
  96. o = ChainOrigin(o, stack);
  97. *(u32 *)MEM_TO_ORIGIN(end) = o;
  98. }
  99. }
  100. uptr beg = d & ~3UL;
  101. if (beg + 4 < end) {
  102. // Align src up.
  103. uptr s = ((uptr)src + 3) & ~3UL;
  104. if (__msan_get_track_origins() > 1) {
  105. u32 *src = (u32 *)MEM_TO_ORIGIN(s + end - beg - 4);
  106. u32 *src_s = (u32 *)MEM_TO_SHADOW(s + end - beg - 4);
  107. u32 *src_begin = (u32 *)MEM_TO_ORIGIN(s);
  108. u32 *dst = (u32 *)MEM_TO_ORIGIN(end - 4);
  109. u32 src_o = 0;
  110. u32 dst_o = 0;
  111. for (; src >= src_begin; --src, --src_s, --dst) {
  112. if (!*src_s)
  113. continue;
  114. if (*src != src_o) {
  115. src_o = *src;
  116. dst_o = ChainOrigin(src_o, stack);
  117. }
  118. *dst = dst_o;
  119. }
  120. } else {
  121. REAL(memmove)
  122. ((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s), end - beg - 4);
  123. }
  124. }
  125. // Copy left unaligned origin if that memory is poisoned.
  126. if (beg < d) {
  127. u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
  128. if (o) {
  129. if (__msan_get_track_origins() > 1)
  130. o = ChainOrigin(o, stack);
  131. *(u32 *)MEM_TO_ORIGIN(beg) = o;
  132. }
  133. }
  134. }
  135. void MoveOrigin(const void *dst, const void *src, uptr size,
  136. StackTrace *stack) {
  137. // If destination origin range overlaps with source origin range, move
  138. // origins by coping origins in a reverse order; otherwise, copy origins in
  139. // a normal order.
  140. uptr src_aligned_beg = reinterpret_cast<uptr>(src) & ~3UL;
  141. uptr src_aligned_end = (reinterpret_cast<uptr>(src) + size) & ~3UL;
  142. uptr dst_aligned_beg = reinterpret_cast<uptr>(dst) & ~3UL;
  143. if (dst_aligned_beg < src_aligned_end && dst_aligned_beg >= src_aligned_beg)
  144. return ReverseCopyOrigin(dst, src, size, stack);
  145. return CopyOrigin(dst, src, size, stack);
  146. }
  147. void MoveShadowAndOrigin(const void *dst, const void *src, uptr size,
  148. StackTrace *stack) {
  149. if (!MEM_IS_APP(dst)) return;
  150. if (!MEM_IS_APP(src)) return;
  151. if (src == dst) return;
  152. // MoveOrigin transfers origins by refering to their shadows. So we
  153. // need to move origins before moving shadows.
  154. if (__msan_get_track_origins())
  155. MoveOrigin(dst, src, size, stack);
  156. REAL(memmove)((void *)MEM_TO_SHADOW((uptr)dst),
  157. (void *)MEM_TO_SHADOW((uptr)src), size);
  158. }
  159. void CopyShadowAndOrigin(const void *dst, const void *src, uptr size,
  160. StackTrace *stack) {
  161. if (!MEM_IS_APP(dst)) return;
  162. if (!MEM_IS_APP(src)) return;
  163. // Because origin's range is slightly larger than app range, memcpy may also
  164. // cause overlapped origin ranges.
  165. REAL(memcpy)((void *)MEM_TO_SHADOW((uptr)dst),
  166. (void *)MEM_TO_SHADOW((uptr)src), size);
  167. if (__msan_get_track_origins())
  168. MoveOrigin(dst, src, size, stack);
  169. }
  170. void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack) {
  171. REAL(memcpy)(dst, src, size);
  172. CopyShadowAndOrigin(dst, src, size, stack);
  173. }
  174. void SetShadow(const void *ptr, uptr size, u8 value) {
  175. uptr PageSize = GetPageSizeCached();
  176. uptr shadow_beg = MEM_TO_SHADOW(ptr);
  177. uptr shadow_end = shadow_beg + size;
  178. if (value ||
  179. shadow_end - shadow_beg < common_flags()->clear_shadow_mmap_threshold) {
  180. REAL(memset)((void *)shadow_beg, value, shadow_end - shadow_beg);
  181. } else {
  182. uptr page_beg = RoundUpTo(shadow_beg, PageSize);
  183. uptr page_end = RoundDownTo(shadow_end, PageSize);
  184. if (page_beg >= page_end) {
  185. REAL(memset)((void *)shadow_beg, 0, shadow_end - shadow_beg);
  186. } else {
  187. if (page_beg != shadow_beg) {
  188. REAL(memset)((void *)shadow_beg, 0, page_beg - shadow_beg);
  189. }
  190. if (page_end != shadow_end) {
  191. REAL(memset)((void *)page_end, 0, shadow_end - page_end);
  192. }
  193. if (!MmapFixedSuperNoReserve(page_beg, page_end - page_beg))
  194. Die();
  195. if (__msan_get_track_origins()) {
  196. // No need to set origin for zero shadow, but we can release pages.
  197. uptr origin_beg = RoundUpTo(MEM_TO_ORIGIN(ptr), PageSize);
  198. if (!MmapFixedSuperNoReserve(origin_beg, page_end - page_beg))
  199. Die();
  200. }
  201. }
  202. }
  203. }
  204. void SetOrigin(const void *dst, uptr size, u32 origin) {
  205. // Origin mapping is 4 bytes per 4 bytes of application memory.
  206. // Here we extend the range such that its left and right bounds are both
  207. // 4 byte aligned.
  208. uptr x = MEM_TO_ORIGIN((uptr)dst);
  209. uptr beg = x & ~3UL; // align down.
  210. uptr end = (x + size + 3) & ~3UL; // align up.
  211. u64 origin64 = ((u64)origin << 32) | origin;
  212. // This is like memset, but the value is 32-bit. We unroll by 2 to write
  213. // 64 bits at once. May want to unroll further to get 128-bit stores.
  214. if (beg & 7ULL) {
  215. *(u32 *)beg = origin;
  216. beg += 4;
  217. }
  218. for (uptr addr = beg; addr < (end & ~7UL); addr += 8) *(u64 *)addr = origin64;
  219. if (end & 7ULL) *(u32 *)(end - 4) = origin;
  220. }
  221. void PoisonMemory(const void *dst, uptr size, StackTrace *stack) {
  222. SetShadow(dst, size, (u8)-1);
  223. if (__msan_get_track_origins()) {
  224. MsanThread *t = GetCurrentThread();
  225. if (t && t->InSignalHandler())
  226. return;
  227. Origin o = Origin::CreateHeapOrigin(stack);
  228. SetOrigin(dst, size, o.raw_id());
  229. }
  230. }
  231. } // namespace __msan