ref_counted-inl.h 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325
  1. #ifndef REF_COUNTED_INL_H_
  2. #error "Direct inclusion of this file is not allowed, include ref_counted.h"
  3. // For the sake of sane code completion.
  4. #include "ref_counted.h"
  5. #endif
  6. #include "tagged_ptr.h"
  7. #include <util/system/sanitizers.h>
  8. #include <stdlib.h>
  9. namespace NYT {
  10. ////////////////////////////////////////////////////////////////////////////////
  11. // TODO(babenko): move to hazard pointers
  12. void RetireHazardPointer(TPackedPtr packedPtr, void (*reclaimer)(TPackedPtr));
  13. ////////////////////////////////////////////////////////////////////////////////
  14. namespace NDetail {
  15. ////////////////////////////////////////////////////////////////////////////////
  16. template <class T, class = void>
  17. struct TFreeMemory
  18. {
  19. static void Do(void* ptr)
  20. {
  21. #ifdef _win_
  22. ::_aligned_free(ptr);
  23. #else
  24. ::free(ptr);
  25. #endif
  26. }
  27. };
  28. template <class T>
  29. struct TFreeMemory<T, std::void_t<typename T::TAllocator>>
  30. {
  31. static void Do(void* ptr)
  32. {
  33. using TAllocator = typename T::TAllocator;
  34. TAllocator::Free(ptr);
  35. }
  36. };
  37. ////////////////////////////////////////////////////////////////////////////////
  38. template <class T, class = void>
  39. struct TMemoryReleaser
  40. {
  41. static void Do(void* ptr, ui16 /*offset*/)
  42. {
  43. TFreeMemory<T>::Do(ptr);
  44. }
  45. };
  46. template <class T>
  47. struct TMemoryReleaser<T, std::enable_if_t<T::EnableHazard>>
  48. {
  49. static void Do(void* ptr, ui16 offset)
  50. {
  51. // Base pointer is used in HazardPtr as the identity of object.
  52. auto packedPtr = TTaggedPtr<char>{static_cast<char*>(ptr) + offset, offset}.Pack();
  53. RetireHazardPointer(packedPtr, [] (TPackedPtr packedPtr) {
  54. // Base ptr and the beginning of allocated memory region may differ.
  55. auto [ptr, offset] = TTaggedPtr<char>::Unpack(packedPtr);
  56. TFreeMemory<T>::Do(ptr - offset);
  57. });
  58. }
  59. };
  60. ////////////////////////////////////////////////////////////////////////////////
  61. template <class T>
  62. Y_FORCE_INLINE void DestroyRefCountedImpl(T* obj)
  63. {
  64. // No standard way to statically calculate the base offset even if T is final.
  65. // static_cast<TFinalDerived*>(virtualBasePtr) does not work.
  66. auto* basePtr = static_cast<TRefCountedBase*>(obj);
  67. auto offset = reinterpret_cast<uintptr_t>(basePtr) - reinterpret_cast<uintptr_t>(obj);
  68. auto* refCounter = GetRefCounter(obj);
  69. // No virtual call when T is final.
  70. obj->~T();
  71. // Fast path. Weak refs cannot appear if there are neither strong nor weak refs.
  72. if (refCounter->GetWeakRefCount() == 1) {
  73. NYT::NDetail::TMemoryReleaser<T>::Do(obj, offset);
  74. return;
  75. }
  76. YT_ASSERT(offset < (1ULL << PackedPtrTagBits));
  77. auto* vTablePtr = reinterpret_cast<TPackedPtr*>(basePtr);
  78. *vTablePtr = TTaggedPtr<void(void*, ui16)>(&NYT::NDetail::TMemoryReleaser<T>::Do, offset).Pack();
  79. if (refCounter->WeakUnref()) {
  80. NYT::NDetail::TMemoryReleaser<T>::Do(obj, offset);
  81. }
  82. }
  83. ////////////////////////////////////////////////////////////////////////////////
  84. // Specialization for final classes.
  85. template <class T, bool = std::derived_from<T, TRefCountedBase>>
  86. struct TRefCountedTraits
  87. {
  88. static_assert(
  89. std::is_final_v<T>,
  90. "Ref-counted objects must be derived from TRefCountedBase or to be final");
  91. static constexpr size_t RefCounterSpace = (sizeof(TRefCounter) + alignof(T) - 1) & ~(alignof(T) - 1);
  92. static constexpr size_t RefCounterOffset = RefCounterSpace - sizeof(TRefCounter);
  93. Y_FORCE_INLINE static const TRefCounter* GetRefCounter(const T* obj)
  94. {
  95. return reinterpret_cast<const TRefCounter*>(obj) - 1;
  96. }
  97. Y_FORCE_INLINE static void Destroy(const T* obj)
  98. {
  99. auto* refCounter = GetRefCounter(obj);
  100. // No virtual call when T is final.
  101. obj->~T();
  102. char* ptr = reinterpret_cast<char*>(const_cast<TRefCounter*>(refCounter));
  103. // Fast path. Weak refs cannot appear if there are neither strong nor weak refs.
  104. if (refCounter->GetWeakRefCount() == 1) {
  105. NYT::NDetail::TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
  106. return;
  107. }
  108. if (refCounter->WeakUnref()) {
  109. NYT::NDetail::TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
  110. }
  111. }
  112. Y_FORCE_INLINE static void Deallocate(const T* obj)
  113. {
  114. char* ptr = reinterpret_cast<char*>(const_cast<TRefCounter*>(GetRefCounter(obj)));
  115. NYT::NDetail::TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
  116. }
  117. };
  118. // Specialization for classes derived from TRefCountedBase.
  119. template <class T>
  120. struct TRefCountedTraits<T, true>
  121. {
  122. static_assert(
  123. sizeof(T) < (1ULL << PackedPtrTagBits),
  124. "Ref counted object derived from TRefCountedBase exceedes max size");
  125. Y_FORCE_INLINE static const TRefCounter* GetRefCounter(const T* obj)
  126. {
  127. return obj;
  128. }
  129. Y_FORCE_INLINE static void Destroy(const TRefCountedBase* obj)
  130. {
  131. const_cast<TRefCountedBase*>(obj)->DestroyRefCounted();
  132. }
  133. Y_FORCE_INLINE static void Deallocate(const TRefCountedBase* obj)
  134. {
  135. auto* ptr = reinterpret_cast<TPackedPtr*>(const_cast<TRefCountedBase*>(obj));
  136. auto [ptrToDeleter, offset] = TTaggedPtr<void(void*, ui16)>::Unpack(*ptr);
  137. // The most derived type is erased here. So we cannot call TMemoryReleaser with derived type.
  138. ptrToDeleter(reinterpret_cast<char*>(ptr) - offset, offset);
  139. }
  140. };
  141. ////////////////////////////////////////////////////////////////////////////////
  142. } // namespace NDetail
  143. ////////////////////////////////////////////////////////////////////////////////
  144. Y_FORCE_INLINE int TRefCounter::GetRefCount() const noexcept
  145. {
  146. return StrongCount_.load(std::memory_order::acquire);
  147. }
  148. Y_FORCE_INLINE void TRefCounter::Ref(int n) const noexcept
  149. {
  150. YT_ASSERT(n >= 0);
  151. // It is safe to use relaxed here, since new reference is always created from another live reference.
  152. auto value = StrongCount_.fetch_add(n, std::memory_order::relaxed);
  153. YT_ASSERT(value > 0);
  154. YT_ASSERT(value <= std::numeric_limits<TRefCount>::max() - n);
  155. YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
  156. }
  157. Y_FORCE_INLINE void TRefCounter::DangerousRef(int n) const noexcept
  158. {
  159. YT_ASSERT(n >= 0);
  160. // Relaxed is fine as per lukyan@, the caller guarantees object liveness.
  161. auto value = StrongCount_.fetch_add(n, std::memory_order::relaxed);
  162. YT_ASSERT(value >= 0);
  163. YT_ASSERT(value <= std::numeric_limits<TRefCount>::max() - n);
  164. YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
  165. }
  166. Y_FORCE_INLINE bool TRefCounter::TryRef() const noexcept
  167. {
  168. auto value = StrongCount_.load(std::memory_order::relaxed);
  169. YT_ASSERT(value >= 0 && value < std::numeric_limits<TRefCount>::max());
  170. YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
  171. while (value != 0 && !StrongCount_.compare_exchange_weak(value, value + 1));
  172. return value != 0;
  173. }
  174. Y_FORCE_INLINE bool TRefCounter::Unref(int n) const
  175. {
  176. YT_ASSERT(n >= 0);
  177. // We must properly synchronize last access to object with it destruction.
  178. // Otherwise compiler might reorder access to object past this decrement.
  179. //
  180. // See http://www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html#boost_atomic.usage_examples.example_reference_counters
  181. //
  182. auto oldStrongCount = StrongCount_.fetch_sub(n, std::memory_order::release);
  183. YT_ASSERT(oldStrongCount >= n);
  184. if (oldStrongCount == n) {
  185. std::atomic_thread_fence(std::memory_order::acquire);
  186. NSan::Acquire(&StrongCount_);
  187. return true;
  188. } else {
  189. return false;
  190. }
  191. }
  192. Y_FORCE_INLINE int TRefCounter::GetWeakRefCount() const noexcept
  193. {
  194. return WeakCount_.load(std::memory_order::acquire);
  195. }
  196. Y_FORCE_INLINE void TRefCounter::WeakRef() const noexcept
  197. {
  198. auto oldWeakCount = WeakCount_.fetch_add(1, std::memory_order::relaxed);
  199. YT_ASSERT(oldWeakCount > 0);
  200. }
  201. Y_FORCE_INLINE bool TRefCounter::WeakUnref() const
  202. {
  203. auto oldWeakCount = WeakCount_.fetch_sub(1, std::memory_order::release);
  204. YT_ASSERT(oldWeakCount > 0);
  205. if (oldWeakCount == 1) {
  206. std::atomic_thread_fence(std::memory_order::acquire);
  207. NSan::Acquire(&WeakCount_);
  208. return true;
  209. } else {
  210. return false;
  211. }
  212. }
  213. ////////////////////////////////////////////////////////////////////////////////
  214. template <class T>
  215. Y_FORCE_INLINE const TRefCounter* GetRefCounter(const T* obj)
  216. {
  217. return NYT::NDetail::TRefCountedTraits<T>::GetRefCounter(obj);
  218. }
  219. template <class T>
  220. Y_FORCE_INLINE void DestroyRefCounted(const T* obj)
  221. {
  222. NYT::NDetail::TRefCountedTraits<T>::Destroy(obj);
  223. }
  224. template <class T>
  225. Y_FORCE_INLINE void DeallocateRefCounted(const T* obj)
  226. {
  227. NYT::NDetail::TRefCountedTraits<T>::Deallocate(obj);
  228. }
  229. ////////////////////////////////////////////////////////////////////////////////
  230. template <class T>
  231. Y_FORCE_INLINE void Ref(T* obj, int n)
  232. {
  233. GetRefCounter(obj)->Ref(n);
  234. }
  235. template <class T>
  236. Y_FORCE_INLINE void Unref(T* obj, int n)
  237. {
  238. if (GetRefCounter(obj)->Unref(n)) {
  239. DestroyRefCounted(obj);
  240. }
  241. }
  242. ////////////////////////////////////////////////////////////////////////////////
  243. Y_FORCE_INLINE void TRefCounted::Unref() const
  244. {
  245. ::NYT::Unref(this);
  246. }
  247. Y_FORCE_INLINE void TRefCounted::WeakUnref() const
  248. {
  249. if (TRefCounter::WeakUnref()) {
  250. DeallocateRefCounted(this);
  251. }
  252. }
  253. template <class T>
  254. void TRefCounted::DestroyRefCountedImpl(T* obj)
  255. {
  256. NYT::NDetail::DestroyRefCountedImpl<T>(obj);
  257. }
  258. ////////////////////////////////////////////////////////////////////////////////
  259. } // namespace NYT