container_memory.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492
  1. // Copyright 2018 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #ifndef ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
  15. #define ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
  16. #include <cassert>
  17. #include <cstddef>
  18. #include <cstdint>
  19. #include <cstring>
  20. #include <memory>
  21. #include <new>
  22. #include <tuple>
  23. #include <type_traits>
  24. #include <utility>
  25. #include "absl/base/config.h"
  26. #include "absl/memory/memory.h"
  27. #include "absl/meta/type_traits.h"
  28. #include "absl/utility/utility.h"
  29. #ifdef ABSL_HAVE_ADDRESS_SANITIZER
  30. #include <sanitizer/asan_interface.h>
  31. #endif
  32. #ifdef ABSL_HAVE_MEMORY_SANITIZER
  33. #include <sanitizer/msan_interface.h>
  34. #endif
  35. namespace absl {
  36. ABSL_NAMESPACE_BEGIN
  37. namespace container_internal {
  38. template <size_t Alignment>
  39. struct alignas(Alignment) AlignedType {};
  40. // Allocates at least n bytes aligned to the specified alignment.
  41. // Alignment must be a power of 2. It must be positive.
  42. //
  43. // Note that many allocators don't honor alignment requirements above certain
  44. // threshold (usually either alignof(std::max_align_t) or alignof(void*)).
  45. // Allocate() doesn't apply alignment corrections. If the underlying allocator
  46. // returns insufficiently alignment pointer, that's what you are going to get.
  47. template <size_t Alignment, class Alloc>
  48. void* Allocate(Alloc* alloc, size_t n) {
  49. static_assert(Alignment > 0, "");
  50. assert(n && "n must be positive");
  51. using M = AlignedType<Alignment>;
  52. using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
  53. using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
  54. // On macOS, "mem_alloc" is a #define with one argument defined in
  55. // rpc/types.h, so we can't name the variable "mem_alloc" and initialize it
  56. // with the "foo(bar)" syntax.
  57. A my_mem_alloc(*alloc);
  58. void* p = AT::allocate(my_mem_alloc, (n + sizeof(M) - 1) / sizeof(M));
  59. assert(reinterpret_cast<uintptr_t>(p) % Alignment == 0 &&
  60. "allocator does not respect alignment");
  61. return p;
  62. }
  63. // Returns true if the destruction of the value with given Allocator will be
  64. // trivial.
  65. template <class Allocator, class ValueType>
  66. constexpr auto IsDestructionTrivial() {
  67. constexpr bool result =
  68. std::is_trivially_destructible<ValueType>::value &&
  69. std::is_same<typename absl::allocator_traits<
  70. Allocator>::template rebind_alloc<char>,
  71. std::allocator<char>>::value;
  72. return std::integral_constant<bool, result>();
  73. }
  74. // The pointer must have been previously obtained by calling
  75. // Allocate<Alignment>(alloc, n).
  76. template <size_t Alignment, class Alloc>
  77. void Deallocate(Alloc* alloc, void* p, size_t n) {
  78. static_assert(Alignment > 0, "");
  79. assert(n && "n must be positive");
  80. using M = AlignedType<Alignment>;
  81. using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
  82. using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
  83. // On macOS, "mem_alloc" is a #define with one argument defined in
  84. // rpc/types.h, so we can't name the variable "mem_alloc" and initialize it
  85. // with the "foo(bar)" syntax.
  86. A my_mem_alloc(*alloc);
  87. AT::deallocate(my_mem_alloc, static_cast<M*>(p),
  88. (n + sizeof(M) - 1) / sizeof(M));
  89. }
  90. namespace memory_internal {
  91. // Constructs T into uninitialized storage pointed by `ptr` using the args
  92. // specified in the tuple.
  93. template <class Alloc, class T, class Tuple, size_t... I>
  94. void ConstructFromTupleImpl(Alloc* alloc, T* ptr, Tuple&& t,
  95. absl::index_sequence<I...>) {
  96. absl::allocator_traits<Alloc>::construct(
  97. *alloc, ptr, std::get<I>(std::forward<Tuple>(t))...);
  98. }
  99. template <class T, class F>
  100. struct WithConstructedImplF {
  101. template <class... Args>
  102. decltype(std::declval<F>()(std::declval<T>())) operator()(
  103. Args&&... args) const {
  104. return std::forward<F>(f)(T(std::forward<Args>(args)...));
  105. }
  106. F&& f;
  107. };
  108. template <class T, class Tuple, size_t... Is, class F>
  109. decltype(std::declval<F>()(std::declval<T>())) WithConstructedImpl(
  110. Tuple&& t, absl::index_sequence<Is...>, F&& f) {
  111. return WithConstructedImplF<T, F>{std::forward<F>(f)}(
  112. std::get<Is>(std::forward<Tuple>(t))...);
  113. }
  114. template <class T, size_t... Is>
  115. auto TupleRefImpl(T&& t, absl::index_sequence<Is...>)
  116. -> decltype(std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...)) {
  117. return std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...);
  118. }
  119. // Returns a tuple of references to the elements of the input tuple. T must be a
  120. // tuple.
  121. template <class T>
  122. auto TupleRef(T&& t) -> decltype(TupleRefImpl(
  123. std::forward<T>(t),
  124. absl::make_index_sequence<
  125. std::tuple_size<typename std::decay<T>::type>::value>())) {
  126. return TupleRefImpl(
  127. std::forward<T>(t),
  128. absl::make_index_sequence<
  129. std::tuple_size<typename std::decay<T>::type>::value>());
  130. }
  131. template <class F, class K, class V>
  132. decltype(std::declval<F>()(std::declval<const K&>(), std::piecewise_construct,
  133. std::declval<std::tuple<K>>(), std::declval<V>()))
  134. DecomposePairImpl(F&& f, std::pair<std::tuple<K>, V> p) {
  135. const auto& key = std::get<0>(p.first);
  136. return std::forward<F>(f)(key, std::piecewise_construct, std::move(p.first),
  137. std::move(p.second));
  138. }
  139. } // namespace memory_internal
  140. // Constructs T into uninitialized storage pointed by `ptr` using the args
  141. // specified in the tuple.
  142. template <class Alloc, class T, class Tuple>
  143. void ConstructFromTuple(Alloc* alloc, T* ptr, Tuple&& t) {
  144. memory_internal::ConstructFromTupleImpl(
  145. alloc, ptr, std::forward<Tuple>(t),
  146. absl::make_index_sequence<
  147. std::tuple_size<typename std::decay<Tuple>::type>::value>());
  148. }
  149. // Constructs T using the args specified in the tuple and calls F with the
  150. // constructed value.
  151. template <class T, class Tuple, class F>
  152. decltype(std::declval<F>()(std::declval<T>())) WithConstructed(Tuple&& t,
  153. F&& f) {
  154. return memory_internal::WithConstructedImpl<T>(
  155. std::forward<Tuple>(t),
  156. absl::make_index_sequence<
  157. std::tuple_size<typename std::decay<Tuple>::type>::value>(),
  158. std::forward<F>(f));
  159. }
  160. // Given arguments of an std::pair's constructor, PairArgs() returns a pair of
  161. // tuples with references to the passed arguments. The tuples contain
  162. // constructor arguments for the first and the second elements of the pair.
  163. //
  164. // The following two snippets are equivalent.
  165. //
  166. // 1. std::pair<F, S> p(args...);
  167. //
  168. // 2. auto a = PairArgs(args...);
  169. // std::pair<F, S> p(std::piecewise_construct,
  170. // std::move(a.first), std::move(a.second));
  171. inline std::pair<std::tuple<>, std::tuple<>> PairArgs() { return {}; }
  172. template <class F, class S>
  173. std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(F&& f, S&& s) {
  174. return {std::piecewise_construct, std::forward_as_tuple(std::forward<F>(f)),
  175. std::forward_as_tuple(std::forward<S>(s))};
  176. }
  177. template <class F, class S>
  178. std::pair<std::tuple<const F&>, std::tuple<const S&>> PairArgs(
  179. const std::pair<F, S>& p) {
  180. return PairArgs(p.first, p.second);
  181. }
  182. template <class F, class S>
  183. std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(std::pair<F, S>&& p) {
  184. return PairArgs(std::forward<F>(p.first), std::forward<S>(p.second));
  185. }
  186. template <class F, class S>
  187. auto PairArgs(std::piecewise_construct_t, F&& f, S&& s)
  188. -> decltype(std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
  189. memory_internal::TupleRef(std::forward<S>(s)))) {
  190. return std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
  191. memory_internal::TupleRef(std::forward<S>(s)));
  192. }
  193. // A helper function for implementing apply() in map policies.
  194. template <class F, class... Args>
  195. auto DecomposePair(F&& f, Args&&... args)
  196. -> decltype(memory_internal::DecomposePairImpl(
  197. std::forward<F>(f), PairArgs(std::forward<Args>(args)...))) {
  198. return memory_internal::DecomposePairImpl(
  199. std::forward<F>(f), PairArgs(std::forward<Args>(args)...));
  200. }
  201. // A helper function for implementing apply() in set policies.
  202. template <class F, class Arg>
  203. decltype(std::declval<F>()(std::declval<const Arg&>(), std::declval<Arg>()))
  204. DecomposeValue(F&& f, Arg&& arg) {
  205. const auto& key = arg;
  206. return std::forward<F>(f)(key, std::forward<Arg>(arg));
  207. }
  208. // Helper functions for asan and msan.
  209. inline void SanitizerPoisonMemoryRegion(const void* m, size_t s) {
  210. #ifdef ABSL_HAVE_ADDRESS_SANITIZER
  211. ASAN_POISON_MEMORY_REGION(m, s);
  212. #endif
  213. #ifdef ABSL_HAVE_MEMORY_SANITIZER
  214. __msan_poison(m, s);
  215. #endif
  216. (void)m;
  217. (void)s;
  218. }
  219. inline void SanitizerUnpoisonMemoryRegion(const void* m, size_t s) {
  220. #ifdef ABSL_HAVE_ADDRESS_SANITIZER
  221. ASAN_UNPOISON_MEMORY_REGION(m, s);
  222. #endif
  223. #ifdef ABSL_HAVE_MEMORY_SANITIZER
  224. __msan_unpoison(m, s);
  225. #endif
  226. (void)m;
  227. (void)s;
  228. }
  229. template <typename T>
  230. inline void SanitizerPoisonObject(const T* object) {
  231. SanitizerPoisonMemoryRegion(object, sizeof(T));
  232. }
  233. template <typename T>
  234. inline void SanitizerUnpoisonObject(const T* object) {
  235. SanitizerUnpoisonMemoryRegion(object, sizeof(T));
  236. }
  237. namespace memory_internal {
  238. // If Pair is a standard-layout type, OffsetOf<Pair>::kFirst and
  239. // OffsetOf<Pair>::kSecond are equivalent to offsetof(Pair, first) and
  240. // offsetof(Pair, second) respectively. Otherwise they are -1.
  241. //
  242. // The purpose of OffsetOf is to avoid calling offsetof() on non-standard-layout
  243. // type, which is non-portable.
  244. template <class Pair, class = std::true_type>
  245. struct OffsetOf {
  246. static constexpr size_t kFirst = static_cast<size_t>(-1);
  247. static constexpr size_t kSecond = static_cast<size_t>(-1);
  248. };
  249. template <class Pair>
  250. struct OffsetOf<Pair, typename std::is_standard_layout<Pair>::type> {
  251. static constexpr size_t kFirst = offsetof(Pair, first);
  252. static constexpr size_t kSecond = offsetof(Pair, second);
  253. };
  254. template <class K, class V>
  255. struct IsLayoutCompatible {
  256. private:
  257. struct Pair {
  258. K first;
  259. V second;
  260. };
  261. // Is P layout-compatible with Pair?
  262. template <class P>
  263. static constexpr bool LayoutCompatible() {
  264. return std::is_standard_layout<P>() && sizeof(P) == sizeof(Pair) &&
  265. alignof(P) == alignof(Pair) &&
  266. memory_internal::OffsetOf<P>::kFirst ==
  267. memory_internal::OffsetOf<Pair>::kFirst &&
  268. memory_internal::OffsetOf<P>::kSecond ==
  269. memory_internal::OffsetOf<Pair>::kSecond;
  270. }
  271. public:
  272. // Whether pair<const K, V> and pair<K, V> are layout-compatible. If they are,
  273. // then it is safe to store them in a union and read from either.
  274. static constexpr bool value = std::is_standard_layout<K>() &&
  275. std::is_standard_layout<Pair>() &&
  276. memory_internal::OffsetOf<Pair>::kFirst == 0 &&
  277. LayoutCompatible<std::pair<K, V>>() &&
  278. LayoutCompatible<std::pair<const K, V>>();
  279. };
  280. } // namespace memory_internal
  281. // The internal storage type for key-value containers like flat_hash_map.
  282. //
  283. // It is convenient for the value_type of a flat_hash_map<K, V> to be
  284. // pair<const K, V>; the "const K" prevents accidental modification of the key
  285. // when dealing with the reference returned from find() and similar methods.
  286. // However, this creates other problems; we want to be able to emplace(K, V)
  287. // efficiently with move operations, and similarly be able to move a
  288. // pair<K, V> in insert().
  289. //
  290. // The solution is this union, which aliases the const and non-const versions
  291. // of the pair. This also allows flat_hash_map<const K, V> to work, even though
  292. // that has the same efficiency issues with move in emplace() and insert() -
  293. // but people do it anyway.
  294. //
  295. // If kMutableKeys is false, only the value member can be accessed.
  296. //
  297. // If kMutableKeys is true, key can be accessed through all slots while value
  298. // and mutable_value must be accessed only via INITIALIZED slots. Slots are
  299. // created and destroyed via mutable_value so that the key can be moved later.
  300. //
  301. // Accessing one of the union fields while the other is active is safe as
  302. // long as they are layout-compatible, which is guaranteed by the definition of
  303. // kMutableKeys. For C++11, the relevant section of the standard is
  304. // https://timsong-cpp.github.io/cppwp/n3337/class.mem#19 (9.2.19)
  305. template <class K, class V>
  306. union map_slot_type {
  307. map_slot_type() {}
  308. ~map_slot_type() = delete;
  309. using value_type = std::pair<const K, V>;
  310. using mutable_value_type =
  311. std::pair<absl::remove_const_t<K>, absl::remove_const_t<V>>;
  312. value_type value;
  313. mutable_value_type mutable_value;
  314. absl::remove_const_t<K> key;
  315. };
  316. template <class K, class V>
  317. struct map_slot_policy {
  318. using slot_type = map_slot_type<K, V>;
  319. using value_type = std::pair<const K, V>;
  320. using mutable_value_type =
  321. std::pair<absl::remove_const_t<K>, absl::remove_const_t<V>>;
  322. private:
  323. static void emplace(slot_type* slot) {
  324. // The construction of union doesn't do anything at runtime but it allows us
  325. // to access its members without violating aliasing rules.
  326. new (slot) slot_type;
  327. }
  328. // If pair<const K, V> and pair<K, V> are layout-compatible, we can accept one
  329. // or the other via slot_type. We are also free to access the key via
  330. // slot_type::key in this case.
  331. using kMutableKeys = memory_internal::IsLayoutCompatible<K, V>;
  332. public:
  333. static value_type& element(slot_type* slot) { return slot->value; }
  334. static const value_type& element(const slot_type* slot) {
  335. return slot->value;
  336. }
  337. // When C++17 is available, we can use std::launder to provide mutable
  338. // access to the key for use in node handle.
  339. #if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606
  340. static K& mutable_key(slot_type* slot) {
  341. // Still check for kMutableKeys so that we can avoid calling std::launder
  342. // unless necessary because it can interfere with optimizations.
  343. return kMutableKeys::value ? slot->key
  344. : *std::launder(const_cast<K*>(
  345. std::addressof(slot->value.first)));
  346. }
  347. #else // !(defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606)
  348. static const K& mutable_key(slot_type* slot) { return key(slot); }
  349. #endif
  350. static const K& key(const slot_type* slot) {
  351. return kMutableKeys::value ? slot->key : slot->value.first;
  352. }
  353. template <class Allocator, class... Args>
  354. static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
  355. emplace(slot);
  356. if (kMutableKeys::value) {
  357. absl::allocator_traits<Allocator>::construct(*alloc, &slot->mutable_value,
  358. std::forward<Args>(args)...);
  359. } else {
  360. absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
  361. std::forward<Args>(args)...);
  362. }
  363. }
  364. // Construct this slot by moving from another slot.
  365. template <class Allocator>
  366. static void construct(Allocator* alloc, slot_type* slot, slot_type* other) {
  367. emplace(slot);
  368. if (kMutableKeys::value) {
  369. absl::allocator_traits<Allocator>::construct(
  370. *alloc, &slot->mutable_value, std::move(other->mutable_value));
  371. } else {
  372. absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
  373. std::move(other->value));
  374. }
  375. }
  376. // Construct this slot by copying from another slot.
  377. template <class Allocator>
  378. static void construct(Allocator* alloc, slot_type* slot,
  379. const slot_type* other) {
  380. emplace(slot);
  381. absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
  382. other->value);
  383. }
  384. template <class Allocator>
  385. static auto destroy(Allocator* alloc, slot_type* slot) {
  386. if (kMutableKeys::value) {
  387. absl::allocator_traits<Allocator>::destroy(*alloc, &slot->mutable_value);
  388. } else {
  389. absl::allocator_traits<Allocator>::destroy(*alloc, &slot->value);
  390. }
  391. return IsDestructionTrivial<Allocator, value_type>();
  392. }
  393. template <class Allocator>
  394. static auto transfer(Allocator* alloc, slot_type* new_slot,
  395. slot_type* old_slot) {
  396. auto is_relocatable =
  397. typename absl::is_trivially_relocatable<value_type>::type();
  398. emplace(new_slot);
  399. #if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606
  400. if (is_relocatable) {
  401. // TODO(b/247130232,b/251814870): remove casts after fixing warnings.
  402. std::memcpy(static_cast<void*>(std::launder(&new_slot->value)),
  403. static_cast<const void*>(&old_slot->value),
  404. sizeof(value_type));
  405. return is_relocatable;
  406. }
  407. #endif
  408. if (kMutableKeys::value) {
  409. absl::allocator_traits<Allocator>::construct(
  410. *alloc, &new_slot->mutable_value, std::move(old_slot->mutable_value));
  411. } else {
  412. absl::allocator_traits<Allocator>::construct(*alloc, &new_slot->value,
  413. std::move(old_slot->value));
  414. }
  415. destroy(alloc, old_slot);
  416. return is_relocatable;
  417. }
  418. };
  419. // Type erased function for computing hash of the slot.
  420. using HashSlotFn = size_t (*)(const void* hash_fn, void* slot);
  421. // Type erased function to apply `Fn` to data inside of the `slot`.
  422. // The data is expected to have type `T`.
  423. template <class Fn, class T>
  424. size_t TypeErasedApplyToSlotFn(const void* fn, void* slot) {
  425. const auto* f = static_cast<const Fn*>(fn);
  426. return (*f)(*static_cast<const T*>(slot));
  427. }
  428. // Type erased function to apply `Fn` to data inside of the `*slot_ptr`.
  429. // The data is expected to have type `T`.
  430. template <class Fn, class T>
  431. size_t TypeErasedDerefAndApplyToSlotFn(const void* fn, void* slot_ptr) {
  432. const auto* f = static_cast<const Fn*>(fn);
  433. const T* slot = *static_cast<const T**>(slot_ptr);
  434. return (*f)(*slot);
  435. }
  436. } // namespace container_internal
  437. ABSL_NAMESPACE_END
  438. } // namespace absl
  439. #endif // ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_