asan_fake_stack.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328
  1. //===-- asan_fake_stack.cpp -----------------------------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of AddressSanitizer, an address sanity checker.
  10. //
  11. // FakeStack is used to detect use-after-return bugs.
  12. //===----------------------------------------------------------------------===//
  13. #include "asan_allocator.h"
  14. #include "asan_poisoning.h"
  15. #include "asan_thread.h"
  16. namespace __asan {
  17. static const u64 kMagic1 = kAsanStackAfterReturnMagic;
  18. static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
  19. static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
  20. static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
  21. static const u64 kAllocaRedzoneSize = 32UL;
  22. static const u64 kAllocaRedzoneMask = 31UL;
  23. // For small size classes inline PoisonShadow for better performance.
  24. ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
  25. u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
  26. if (ASAN_SHADOW_SCALE == 3 && class_id <= 6) {
  27. // This code expects ASAN_SHADOW_SCALE=3.
  28. for (uptr i = 0; i < (((uptr)1) << class_id); i++) {
  29. shadow[i] = magic;
  30. // Make sure this does not become memset.
  31. SanitizerBreakOptimization(nullptr);
  32. }
  33. } else {
  34. // The size class is too big, it's cheaper to poison only size bytes.
  35. PoisonShadow(ptr, size, static_cast<u8>(magic));
  36. }
  37. }
  38. FakeStack *FakeStack::Create(uptr stack_size_log) {
  39. static uptr kMinStackSizeLog = 16;
  40. static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
  41. if (stack_size_log < kMinStackSizeLog)
  42. stack_size_log = kMinStackSizeLog;
  43. if (stack_size_log > kMaxStackSizeLog)
  44. stack_size_log = kMaxStackSizeLog;
  45. uptr size = RequiredSize(stack_size_log);
  46. FakeStack *res = reinterpret_cast<FakeStack *>(
  47. flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")
  48. : MmapOrDie(size, "FakeStack"));
  49. res->stack_size_log_ = stack_size_log;
  50. u8 *p = reinterpret_cast<u8 *>(res);
  51. VReport(1,
  52. "T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
  53. "mmapped %zdK, noreserve=%d \n",
  54. GetCurrentTidOrInvalid(), (void *)p,
  55. (void *)(p + FakeStack::RequiredSize(stack_size_log)), stack_size_log,
  56. size >> 10, flags()->uar_noreserve);
  57. return res;
  58. }
  59. void FakeStack::Destroy(int tid) {
  60. PoisonAll(0);
  61. if (Verbosity() >= 2) {
  62. InternalScopedString str;
  63. for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
  64. str.AppendF("%zd: %zd/%zd; ", class_id, hint_position_[class_id],
  65. NumberOfFrames(stack_size_log(), class_id));
  66. Report("T%d: FakeStack destroyed: %s\n", tid, str.data());
  67. }
  68. uptr size = RequiredSize(stack_size_log_);
  69. FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);
  70. UnmapOrDie(this, size);
  71. }
  72. void FakeStack::PoisonAll(u8 magic) {
  73. PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
  74. magic);
  75. }
  76. #if !defined(_MSC_VER) || defined(__clang__)
  77. ALWAYS_INLINE USED
  78. #endif
  79. FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
  80. uptr real_stack) {
  81. CHECK_LT(class_id, kNumberOfSizeClasses);
  82. if (needs_gc_)
  83. GC(real_stack);
  84. uptr &hint_position = hint_position_[class_id];
  85. const int num_iter = NumberOfFrames(stack_size_log, class_id);
  86. u8 *flags = GetFlags(stack_size_log, class_id);
  87. for (int i = 0; i < num_iter; i++) {
  88. uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
  89. // This part is tricky. On one hand, checking and setting flags[pos]
  90. // should be atomic to ensure async-signal safety. But on the other hand,
  91. // if the signal arrives between checking and setting flags[pos], the
  92. // signal handler's fake stack will start from a different hint_position
  93. // and so will not touch this particular byte. So, it is safe to do this
  94. // with regular non-atomic load and store (at least I was not able to make
  95. // this code crash).
  96. if (flags[pos]) continue;
  97. flags[pos] = 1;
  98. FakeFrame *res = reinterpret_cast<FakeFrame *>(
  99. GetFrame(stack_size_log, class_id, pos));
  100. res->real_stack = real_stack;
  101. *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
  102. return res;
  103. }
  104. return nullptr; // We are out of fake stack.
  105. }
  106. uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
  107. uptr stack_size_log = this->stack_size_log();
  108. uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
  109. uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
  110. if (ptr < beg || ptr >= end) return 0;
  111. uptr class_id = (ptr - beg) >> stack_size_log;
  112. uptr base = beg + (class_id << stack_size_log);
  113. CHECK_LE(base, ptr);
  114. CHECK_LT(ptr, base + (((uptr)1) << stack_size_log));
  115. uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
  116. uptr res = base + pos * BytesInSizeClass(class_id);
  117. *frame_end = res + BytesInSizeClass(class_id);
  118. *frame_beg = res + sizeof(FakeFrame);
  119. return res;
  120. }
  121. void FakeStack::HandleNoReturn() {
  122. needs_gc_ = true;
  123. }
  124. // Hack: The statement below is not true if we take into account sigaltstack or
  125. // makecontext. It should be possible to make GC to discard wrong stack frame if
  126. // we use these tools. For now, let's support the simplest case and allow GC to
  127. // discard only frames from the default stack, assuming there is no buffer on
  128. // the stack which is used for makecontext or sigaltstack.
  129. //
  130. // When throw, longjmp or some such happens we don't call OnFree() and
  131. // as the result may leak one or more fake frames, but the good news is that
  132. // we are notified about all such events by HandleNoReturn().
  133. // If we recently had such no-return event we need to collect garbage frames.
  134. // We do it based on their 'real_stack' values -- everything that is lower
  135. // than the current real_stack is garbage.
  136. NOINLINE void FakeStack::GC(uptr real_stack) {
  137. AsanThread *curr_thread = GetCurrentThread();
  138. if (!curr_thread)
  139. return; // Try again when we have a thread.
  140. auto top = curr_thread->stack_top();
  141. auto bottom = curr_thread->stack_bottom();
  142. if (real_stack < bottom || real_stack > top)
  143. return; // Not the default stack.
  144. for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
  145. u8 *flags = GetFlags(stack_size_log(), class_id);
  146. for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
  147. i++) {
  148. if (flags[i] == 0) continue; // not allocated.
  149. FakeFrame *ff = reinterpret_cast<FakeFrame *>(
  150. GetFrame(stack_size_log(), class_id, i));
  151. // GC only on the default stack.
  152. if (bottom < ff->real_stack && ff->real_stack < real_stack) {
  153. flags[i] = 0;
  154. // Poison the frame, so the any access will be reported as UAR.
  155. SetShadow(reinterpret_cast<uptr>(ff), BytesInSizeClass(class_id),
  156. class_id, kMagic8);
  157. }
  158. }
  159. }
  160. needs_gc_ = false;
  161. }
  162. void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
  163. for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
  164. u8 *flags = GetFlags(stack_size_log(), class_id);
  165. for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
  166. i++) {
  167. if (flags[i] == 0) continue; // not allocated.
  168. FakeFrame *ff = reinterpret_cast<FakeFrame *>(
  169. GetFrame(stack_size_log(), class_id, i));
  170. uptr begin = reinterpret_cast<uptr>(ff);
  171. callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
  172. }
  173. }
  174. }
  175. #if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
  176. static THREADLOCAL FakeStack *fake_stack_tls;
  177. FakeStack *GetTLSFakeStack() {
  178. return fake_stack_tls;
  179. }
  180. void SetTLSFakeStack(FakeStack *fs) {
  181. fake_stack_tls = fs;
  182. }
  183. #else
  184. FakeStack *GetTLSFakeStack() { return 0; }
  185. void SetTLSFakeStack(FakeStack *fs) { }
  186. #endif // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
  187. static FakeStack *GetFakeStack() {
  188. AsanThread *t = GetCurrentThread();
  189. if (!t) return nullptr;
  190. return t->get_or_create_fake_stack();
  191. }
  192. static FakeStack *GetFakeStackFast() {
  193. if (FakeStack *fs = GetTLSFakeStack())
  194. return fs;
  195. if (!__asan_option_detect_stack_use_after_return)
  196. return nullptr;
  197. return GetFakeStack();
  198. }
  199. static FakeStack *GetFakeStackFastAlways() {
  200. if (FakeStack *fs = GetTLSFakeStack())
  201. return fs;
  202. return GetFakeStack();
  203. }
  204. static ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
  205. FakeStack *fs = GetFakeStackFast();
  206. if (!fs)
  207. return 0;
  208. FakeFrame *ff =
  209. fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());
  210. if (!ff)
  211. return 0; // Out of fake stack.
  212. uptr ptr = reinterpret_cast<uptr>(ff);
  213. SetShadow(ptr, size, class_id, 0);
  214. return ptr;
  215. }
  216. static ALWAYS_INLINE uptr OnMallocAlways(uptr class_id, uptr size) {
  217. FakeStack *fs = GetFakeStackFastAlways();
  218. if (!fs)
  219. return 0;
  220. FakeFrame *ff =
  221. fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());
  222. if (!ff)
  223. return 0; // Out of fake stack.
  224. uptr ptr = reinterpret_cast<uptr>(ff);
  225. SetShadow(ptr, size, class_id, 0);
  226. return ptr;
  227. }
  228. static ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
  229. FakeStack::Deallocate(ptr, class_id);
  230. SetShadow(ptr, size, class_id, kMagic8);
  231. }
  232. } // namespace __asan
  233. // ---------------------- Interface ---------------- {{{1
  234. using namespace __asan;
  235. #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \
  236. extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
  237. __asan_stack_malloc_##class_id(uptr size) { \
  238. return OnMalloc(class_id, size); \
  239. } \
  240. extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
  241. __asan_stack_malloc_always_##class_id(uptr size) { \
  242. return OnMallocAlways(class_id, size); \
  243. } \
  244. extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
  245. uptr ptr, uptr size) { \
  246. OnFree(ptr, class_id, size); \
  247. }
  248. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
  249. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
  250. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
  251. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
  252. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
  253. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
  254. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
  255. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
  256. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
  257. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
  258. DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)
  259. extern "C" {
  260. // TODO: remove this method and fix tests that use it by setting
  261. // -asan-use-after-return=never, after modal UAR flag lands
  262. // (https://github.com/google/sanitizers/issues/1394)
  263. SANITIZER_INTERFACE_ATTRIBUTE
  264. void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }
  265. SANITIZER_INTERFACE_ATTRIBUTE
  266. void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,
  267. void **end) {
  268. FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);
  269. if (!fs) return nullptr;
  270. uptr frame_beg, frame_end;
  271. FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(
  272. reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));
  273. if (!frame) return nullptr;
  274. if (frame->magic != kCurrentStackFrameMagic)
  275. return nullptr;
  276. if (beg) *beg = reinterpret_cast<void*>(frame_beg);
  277. if (end) *end = reinterpret_cast<void*>(frame_end);
  278. return reinterpret_cast<void*>(frame->real_stack);
  279. }
  280. SANITIZER_INTERFACE_ATTRIBUTE
  281. void __asan_alloca_poison(uptr addr, uptr size) {
  282. uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
  283. uptr PartialRzAddr = addr + size;
  284. uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
  285. uptr PartialRzAligned = PartialRzAddr & ~(ASAN_SHADOW_GRANULARITY - 1);
  286. FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
  287. FastPoisonShadowPartialRightRedzone(
  288. PartialRzAligned, PartialRzAddr % ASAN_SHADOW_GRANULARITY,
  289. RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
  290. FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
  291. }
  292. SANITIZER_INTERFACE_ATTRIBUTE
  293. void __asan_allocas_unpoison(uptr top, uptr bottom) {
  294. if ((!top) || (top > bottom)) return;
  295. REAL(memset)
  296. (reinterpret_cast<void *>(MemToShadow(top)), 0,
  297. (bottom - top) / ASAN_SHADOW_GRANULARITY);
  298. }
  299. } // extern "C"