userdata.patch 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. --- contrib/libs/tcmalloc/tcmalloc/internal/logging.h (index)
  2. +++ contrib/libs/tcmalloc/tcmalloc/internal/logging.h (working tree)
  3. @@ -67,6 +67,8 @@ struct StackTrace {
  4. // between the previous sample and this one
  5. size_t weight;
  6. + void* user_data;
  7. +
  8. template <typename H>
  9. friend H AbslHashValue(H h, const StackTrace& t) {
  10. // As we use StackTrace as a key-value node in StackTraceTable, we only
  11. --- contrib/libs/tcmalloc/tcmalloc/internal_malloc_extension.h (index)
  12. +++ contrib/libs/tcmalloc/tcmalloc/internal_malloc_extension.h (working tree)
  13. @@ -120,6 +120,12 @@ ABSL_ATTRIBUTE_WEAK void MallocExtension_Internal_SetMaxTotalThreadCacheBytes(
  14. ABSL_ATTRIBUTE_WEAK void
  15. MallocExtension_EnableForkSupport();
  16. +ABSL_ATTRIBUTE_WEAK void
  17. +MallocExtension_SetSampleUserDataCallbacks(
  18. + tcmalloc::MallocExtension::CreateSampleUserDataCallback create,
  19. + tcmalloc::MallocExtension::CopySampleUserDataCallback copy,
  20. + tcmalloc::MallocExtension::DestroySampleUserDataCallback destroy);
  21. +
  22. }
  23. #endif
  24. --- contrib/libs/tcmalloc/tcmalloc/malloc_extension.cc (index)
  25. +++ contrib/libs/tcmalloc/tcmalloc/malloc_extension.cc (working tree)
  26. @@ -468,6 +468,21 @@ void MallocExtension::EnableForkSupport() {
  27. #endif
  28. }
  29. +void MallocExtension::SetSampleUserDataCallbacks(
  30. + CreateSampleUserDataCallback create,
  31. + CopySampleUserDataCallback copy,
  32. + DestroySampleUserDataCallback destroy) {
  33. +#if ABSL_INTERNAL_HAVE_WEAK_MALLOCEXTENSION_STUBS
  34. + if (&MallocExtension_SetSampleUserDataCallbacks != nullptr) {
  35. + MallocExtension_SetSampleUserDataCallbacks(create, copy, destroy);
  36. + }
  37. +#else
  38. + (void)create;
  39. + (void)copy;
  40. + (void)destroy;
  41. +#endif
  42. +}
  43. +
  44. } // namespace tcmalloc
  45. // Default implementation just returns size. The expectation is that
  46. --- contrib/libs/tcmalloc/tcmalloc/malloc_extension.h (index)
  47. +++ contrib/libs/tcmalloc/tcmalloc/malloc_extension.h (working tree)
  48. @@ -94,6 +94,8 @@ class Profile final {
  49. int depth;
  50. void* stack[kMaxStackDepth];
  51. +
  52. + void* user_data;
  53. };
  54. void Iterate(absl::FunctionRef<void(const Sample&)> f) const;
  55. @@ -472,6 +474,16 @@ class MallocExtension final {
  56. // Enables fork support.
  57. // Allocator will continue to function correctly in the child, after calling fork().
  58. static void EnableForkSupport();
  59. +
  60. + using CreateSampleUserDataCallback = void*();
  61. + using CopySampleUserDataCallback = void*(void*);
  62. + using DestroySampleUserDataCallback = void(void*);
  63. +
  64. + // Sets callbacks for lifetime control of custom user data attached to allocation samples
  65. + static void SetSampleUserDataCallbacks(
  66. + CreateSampleUserDataCallback create,
  67. + CopySampleUserDataCallback copy,
  68. + DestroySampleUserDataCallback destroy);
  69. };
  70. } // namespace tcmalloc
  71. --- contrib/libs/tcmalloc/tcmalloc/peak_heap_tracker.cc (index)
  72. +++ contrib/libs/tcmalloc/tcmalloc/peak_heap_tracker.cc (working tree)
  73. @@ -55,6 +55,7 @@ void PeakHeapTracker::MaybeSaveSample() {
  74. StackTrace *t = peak_sampled_span_stacks_, *next = nullptr;
  75. while (t != nullptr) {
  76. next = reinterpret_cast<StackTrace*>(t->stack[kMaxStackDepth - 1]);
  77. + Static::DestroySampleUserData(t->user_data);
  78. Static::stacktrace_allocator().Delete(t);
  79. t = next;
  80. }
  81. @@ -63,7 +64,9 @@ void PeakHeapTracker::MaybeSaveSample() {
  82. for (Span* s : Static::sampled_objects_) {
  83. t = Static::stacktrace_allocator().New();
  84. - *t = *s->sampled_stack();
  85. + StackTrace* sampled_stack = s->sampled_stack();
  86. + *t = *sampled_stack;
  87. + t->user_data = Static::CopySampleUserData(sampled_stack->user_data);
  88. if (t->depth == kMaxStackDepth) {
  89. t->depth = kMaxStackDepth - 1;
  90. }
  91. --- contrib/libs/tcmalloc/tcmalloc/stack_trace_table.cc (index)
  92. +++ contrib/libs/tcmalloc/tcmalloc/stack_trace_table.cc (working tree)
  93. @@ -73,6 +73,7 @@ StackTraceTable::~StackTraceTable() {
  94. Bucket* b = table_[i];
  95. while (b != nullptr) {
  96. Bucket* next = b->next;
  97. + Static::DestroySampleUserData(b->trace.user_data);
  98. Static::bucket_allocator().Delete(b);
  99. b = next;
  100. }
  101. @@ -104,6 +105,7 @@ void StackTraceTable::AddTrace(double count, const StackTrace& t) {
  102. b = Static::bucket_allocator().New();
  103. b->hash = h;
  104. b->trace = t;
  105. + b->trace.user_data = Static::CopySampleUserData(t.user_data);
  106. b->count = count;
  107. b->total_weight = t.weight * count;
  108. b->next = table_[idx];
  109. @@ -135,6 +137,8 @@ void StackTraceTable::Iterate(
  110. e.requested_alignment = b->trace.requested_alignment;
  111. e.allocated_size = allocated_size;
  112. + e.user_data = b->trace.user_data;
  113. +
  114. e.depth = b->trace.depth;
  115. static_assert(kMaxStackDepth <= Profile::Sample::kMaxStackDepth,
  116. "Profile stack size smaller than internal stack sizes");
  117. --- contrib/libs/tcmalloc/tcmalloc/static_vars.cc (index)
  118. +++ contrib/libs/tcmalloc/tcmalloc/static_vars.cc (working tree)
  119. @@ -60,6 +60,12 @@ ABSL_CONST_INIT PageHeapAllocator<StackTraceTable::Bucket>
  120. ABSL_CONST_INIT std::atomic<bool> Static::inited_{false};
  121. ABSL_CONST_INIT bool Static::cpu_cache_active_ = false;
  122. ABSL_CONST_INIT bool Static::fork_support_enabled_ = false;
  123. +ABSL_CONST_INIT Static::CreateSampleUserDataCallback*
  124. + Static::create_sample_user_data_callback_ = nullptr;
  125. +ABSL_CONST_INIT Static::CopySampleUserDataCallback*
  126. + Static::copy_sample_user_data_callback_ = nullptr;
  127. +ABSL_CONST_INIT Static::DestroySampleUserDataCallback*
  128. + Static::destroy_sample_user_data_callback_ = nullptr;
  129. ABSL_CONST_INIT Static::PageAllocatorStorage Static::page_allocator_;
  130. ABSL_CONST_INIT PageMap Static::pagemap_;
  131. ABSL_CONST_INIT absl::base_internal::SpinLock guarded_page_lock(
  132. --- contrib/libs/tcmalloc/tcmalloc/static_vars.h (index)
  133. +++ contrib/libs/tcmalloc/tcmalloc/static_vars.h (working tree)
  134. @@ -130,6 +130,34 @@ class Static {
  135. static bool ForkSupportEnabled() { return fork_support_enabled_; }
  136. static void EnableForkSupport() { fork_support_enabled_ = true; }
  137. + using CreateSampleUserDataCallback = void*();
  138. + using CopySampleUserDataCallback = void*(void*);
  139. + using DestroySampleUserDataCallback = void(void*);
  140. +
  141. + static void SetSampleUserDataCallbacks(
  142. + CreateSampleUserDataCallback create,
  143. + CopySampleUserDataCallback copy,
  144. + DestroySampleUserDataCallback destroy) {
  145. + create_sample_user_data_callback_ = create;
  146. + copy_sample_user_data_callback_ = copy;
  147. + destroy_sample_user_data_callback_ = destroy;
  148. + }
  149. +
  150. + static void* CreateSampleUserData() {
  151. + if (create_sample_user_data_callback_)
  152. + return create_sample_user_data_callback_();
  153. + return nullptr;
  154. + }
  155. + static void* CopySampleUserData(void* user_data) {
  156. + if (copy_sample_user_data_callback_)
  157. + return copy_sample_user_data_callback_(user_data);
  158. + return nullptr;
  159. + }
  160. + static void DestroySampleUserData(void* user_data) {
  161. + if (destroy_sample_user_data_callback_)
  162. + destroy_sample_user_data_callback_(user_data);
  163. + }
  164. +
  165. static bool ABSL_ATTRIBUTE_ALWAYS_INLINE IsOnFastPath() {
  166. return
  167. #ifndef TCMALLOC_DEPRECATED_PERTHREAD
  168. @@ -176,6 +204,9 @@ class Static {
  169. ABSL_CONST_INIT static std::atomic<bool> inited_;
  170. static bool cpu_cache_active_;
  171. static bool fork_support_enabled_;
  172. + static CreateSampleUserDataCallback* create_sample_user_data_callback_;
  173. + static CopySampleUserDataCallback* copy_sample_user_data_callback_;
  174. + static DestroySampleUserDataCallback* destroy_sample_user_data_callback_;
  175. ABSL_CONST_INIT static PeakHeapTracker peak_heap_tracker_;
  176. ABSL_CONST_INIT static NumaTopology<kNumaPartitions, kNumBaseClasses>
  177. numa_topology_;
  178. --- contrib/libs/tcmalloc/tcmalloc/tcmalloc.cc (index)
  179. +++ contrib/libs/tcmalloc/tcmalloc/tcmalloc.cc (working tree)
  180. @@ -1151,6 +1151,13 @@ void TCMallocPostFork() {
  181. }
  182. }
  183. +extern "C" void MallocExtension_SetSampleUserDataCallbacks(
  184. + MallocExtension::CreateSampleUserDataCallback create,
  185. + MallocExtension::CopySampleUserDataCallback copy,
  186. + MallocExtension::DestroySampleUserDataCallback destroy) {
  187. + Static::SetSampleUserDataCallbacks(create, copy, destroy);
  188. +}
  189. +
  190. // nallocx slow path.
  191. // Moved to a separate function because size_class_with_alignment is not inlined
  192. // which would cause nallocx to become non-leaf function with stack frame and
  193. @@ -1500,6 +1507,7 @@ static void* SampleifyAllocation(size_t requested_size, size_t weight,
  194. tmp.requested_alignment = requested_alignment;
  195. tmp.allocated_size = allocated_size;
  196. tmp.weight = weight;
  197. + tmp.user_data = Static::CreateSampleUserData();
  198. {
  199. absl::base_internal::SpinLockHolder h(&pageheap_lock);
  200. @@ -1629,6 +1637,7 @@ static void do_free_pages(void* ptr, const PageId p) {
  201. 1);
  202. }
  203. notify_sampled_alloc = true;
  204. + Static::DestroySampleUserData(st->user_data);
  205. Static::stacktrace_allocator().Delete(st);
  206. }
  207. if (IsSampledMemory(ptr)) {