inlined_vector.h 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105
  1. // Copyright 2019 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #ifndef Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  15. #define Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  16. #include <algorithm>
  17. #include <cstddef>
  18. #include <cstring>
  19. #include <iterator>
  20. #include <limits>
  21. #include <memory>
  22. #include <new>
  23. #include <type_traits>
  24. #include <utility>
  25. #include "y_absl/base/attributes.h"
  26. #include "y_absl/base/config.h"
  27. #include "y_absl/base/internal/identity.h"
  28. #include "y_absl/base/macros.h"
  29. #include "y_absl/container/internal/compressed_tuple.h"
  30. #include "y_absl/memory/memory.h"
  31. #include "y_absl/meta/type_traits.h"
  32. #include "y_absl/types/span.h"
  33. namespace y_absl {
  34. Y_ABSL_NAMESPACE_BEGIN
  35. namespace inlined_vector_internal {
  36. // GCC does not deal very well with the below code
  37. #if !defined(__clang__) && defined(__GNUC__)
  38. #pragma GCC diagnostic push
  39. #pragma GCC diagnostic ignored "-Warray-bounds"
  40. #endif
  41. template <typename A>
  42. using AllocatorTraits = std::allocator_traits<A>;
  43. template <typename A>
  44. using ValueType = typename AllocatorTraits<A>::value_type;
  45. template <typename A>
  46. using SizeType = typename AllocatorTraits<A>::size_type;
  47. template <typename A>
  48. using Pointer = typename AllocatorTraits<A>::pointer;
  49. template <typename A>
  50. using ConstPointer = typename AllocatorTraits<A>::const_pointer;
  51. template <typename A>
  52. using SizeType = typename AllocatorTraits<A>::size_type;
  53. template <typename A>
  54. using DifferenceType = typename AllocatorTraits<A>::difference_type;
  55. template <typename A>
  56. using Reference = ValueType<A>&;
  57. template <typename A>
  58. using ConstReference = const ValueType<A>&;
  59. template <typename A>
  60. using Iterator = Pointer<A>;
  61. template <typename A>
  62. using ConstIterator = ConstPointer<A>;
  63. template <typename A>
  64. using ReverseIterator = typename std::reverse_iterator<Iterator<A>>;
  65. template <typename A>
  66. using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>;
  67. template <typename A>
  68. using MoveIterator = typename std::move_iterator<Iterator<A>>;
  69. template <typename Iterator>
  70. using IsAtLeastForwardIterator = std::is_convertible<
  71. typename std::iterator_traits<Iterator>::iterator_category,
  72. std::forward_iterator_tag>;
  73. template <typename A>
  74. using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>;
  75. template <typename A>
  76. using IsSwapOk = y_absl::type_traits_internal::IsSwappable<ValueType<A>>;
  77. template <typename A, bool IsTriviallyDestructible =
  78. y_absl::is_trivially_destructible<ValueType<A>>::value>
  79. struct DestroyAdapter;
  80. template <typename A>
  81. struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> {
  82. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  83. SizeType<A> destroy_size) {
  84. for (SizeType<A> i = destroy_size; i != 0;) {
  85. --i;
  86. AllocatorTraits<A>::destroy(allocator, destroy_first + i);
  87. }
  88. }
  89. };
  90. template <typename A>
  91. struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> {
  92. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  93. SizeType<A> destroy_size) {
  94. static_cast<void>(allocator);
  95. static_cast<void>(destroy_first);
  96. static_cast<void>(destroy_size);
  97. }
  98. };
  99. template <typename A>
  100. struct Allocation {
  101. Pointer<A> data = nullptr;
  102. SizeType<A> capacity = 0;
  103. };
  104. template <typename A,
  105. bool IsOverAligned =
  106. (alignof(ValueType<A>) > Y_ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)>
  107. struct MallocAdapter {
  108. static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) {
  109. return {AllocatorTraits<A>::allocate(allocator, requested_capacity),
  110. requested_capacity};
  111. }
  112. static void Deallocate(A& allocator, Pointer<A> pointer,
  113. SizeType<A> capacity) {
  114. AllocatorTraits<A>::deallocate(allocator, pointer, capacity);
  115. }
  116. };
  117. template <typename A, typename ValueAdapter>
  118. void ConstructElements(y_absl::internal::type_identity_t<A>& allocator,
  119. Pointer<A> construct_first, ValueAdapter& values,
  120. SizeType<A> construct_size) {
  121. for (SizeType<A> i = 0; i < construct_size; ++i) {
  122. Y_ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); }
  123. Y_ABSL_INTERNAL_CATCH_ANY {
  124. DestroyAdapter<A>::DestroyElements(allocator, construct_first, i);
  125. Y_ABSL_INTERNAL_RETHROW;
  126. }
  127. }
  128. }
  129. template <typename A, typename ValueAdapter>
  130. void AssignElements(Pointer<A> assign_first, ValueAdapter& values,
  131. SizeType<A> assign_size) {
  132. for (SizeType<A> i = 0; i < assign_size; ++i) {
  133. values.AssignNext(assign_first + i);
  134. }
  135. }
  136. template <typename A>
  137. struct StorageView {
  138. Pointer<A> data;
  139. SizeType<A> size;
  140. SizeType<A> capacity;
  141. };
  142. template <typename A, typename Iterator>
  143. class IteratorValueAdapter {
  144. public:
  145. explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
  146. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  147. AllocatorTraits<A>::construct(allocator, construct_at, *it_);
  148. ++it_;
  149. }
  150. void AssignNext(Pointer<A> assign_at) {
  151. *assign_at = *it_;
  152. ++it_;
  153. }
  154. private:
  155. Iterator it_;
  156. };
  157. template <typename A>
  158. class CopyValueAdapter {
  159. public:
  160. explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {}
  161. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  162. AllocatorTraits<A>::construct(allocator, construct_at, *ptr_);
  163. }
  164. void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; }
  165. private:
  166. ConstPointer<A> ptr_;
  167. };
  168. template <typename A>
  169. class DefaultValueAdapter {
  170. public:
  171. explicit DefaultValueAdapter() {}
  172. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  173. AllocatorTraits<A>::construct(allocator, construct_at);
  174. }
  175. void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); }
  176. };
  177. template <typename A>
  178. class AllocationTransaction {
  179. public:
  180. explicit AllocationTransaction(A& allocator)
  181. : allocator_data_(allocator, nullptr), capacity_(0) {}
  182. ~AllocationTransaction() {
  183. if (DidAllocate()) {
  184. MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity());
  185. }
  186. }
  187. AllocationTransaction(const AllocationTransaction&) = delete;
  188. void operator=(const AllocationTransaction&) = delete;
  189. A& GetAllocator() { return allocator_data_.template get<0>(); }
  190. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  191. SizeType<A>& GetCapacity() { return capacity_; }
  192. bool DidAllocate() { return GetData() != nullptr; }
  193. Pointer<A> Allocate(SizeType<A> requested_capacity) {
  194. Allocation<A> result =
  195. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  196. GetData() = result.data;
  197. GetCapacity() = result.capacity;
  198. return result.data;
  199. }
  200. Y_ABSL_MUST_USE_RESULT Allocation<A> Release() && {
  201. Allocation<A> result = {GetData(), GetCapacity()};
  202. Reset();
  203. return result;
  204. }
  205. private:
  206. void Reset() {
  207. GetData() = nullptr;
  208. GetCapacity() = 0;
  209. }
  210. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  211. SizeType<A> capacity_;
  212. };
  213. template <typename A>
  214. class ConstructionTransaction {
  215. public:
  216. explicit ConstructionTransaction(A& allocator)
  217. : allocator_data_(allocator, nullptr), size_(0) {}
  218. ~ConstructionTransaction() {
  219. if (DidConstruct()) {
  220. DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize());
  221. }
  222. }
  223. ConstructionTransaction(const ConstructionTransaction&) = delete;
  224. void operator=(const ConstructionTransaction&) = delete;
  225. A& GetAllocator() { return allocator_data_.template get<0>(); }
  226. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  227. SizeType<A>& GetSize() { return size_; }
  228. bool DidConstruct() { return GetData() != nullptr; }
  229. template <typename ValueAdapter>
  230. void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) {
  231. ConstructElements<A>(GetAllocator(), data, values, size);
  232. GetData() = data;
  233. GetSize() = size;
  234. }
  235. void Commit() && {
  236. GetData() = nullptr;
  237. GetSize() = 0;
  238. }
  239. private:
  240. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  241. SizeType<A> size_;
  242. };
  243. template <typename T, size_t N, typename A>
  244. class Storage {
  245. public:
  246. struct MemcpyPolicy {};
  247. struct ElementwiseAssignPolicy {};
  248. struct ElementwiseSwapPolicy {};
  249. struct ElementwiseConstructPolicy {};
  250. using MoveAssignmentPolicy = y_absl::conditional_t<
  251. // Fast path: if the value type can be trivially move assigned and
  252. // destroyed, and we know the allocator doesn't do anything fancy, then
  253. // it's safe for us to simply adopt the contents of the storage for
  254. // `other` and remove its own reference to them. It's as if we had
  255. // individually move-assigned each value and then destroyed the original.
  256. y_absl::conjunction<y_absl::is_trivially_move_assignable<ValueType<A>>,
  257. y_absl::is_trivially_destructible<ValueType<A>>,
  258. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  259. MemcpyPolicy,
  260. // Otherwise we use move assignment if possible. If not, we simulate
  261. // move assignment using move construction.
  262. //
  263. // Note that this is in contrast to e.g. std::vector and std::optional,
  264. // which are themselves not move-assignable when their contained type is
  265. // not.
  266. y_absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy,
  267. ElementwiseConstructPolicy>>;
  268. // The policy to be used specifically when swapping inlined elements.
  269. using SwapInlinedElementsPolicy = y_absl::conditional_t<
  270. // Fast path: if the value type can be trivially relocated, and we
  271. // know the allocator doesn't do anything fancy, then it's safe for us
  272. // to simply swap the bytes in the inline storage. It's as if we had
  273. // relocated the first vector's elements into temporary storage,
  274. // relocated the second's elements into the (now-empty) first's,
  275. // and then relocated from temporary storage into the second.
  276. y_absl::conjunction<y_absl::is_trivially_relocatable<ValueType<A>>,
  277. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  278. MemcpyPolicy,
  279. y_absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy,
  280. ElementwiseConstructPolicy>>;
  281. static SizeType<A> NextCapacity(SizeType<A> current_capacity) {
  282. return current_capacity * 2;
  283. }
  284. static SizeType<A> ComputeCapacity(SizeType<A> current_capacity,
  285. SizeType<A> requested_capacity) {
  286. return (std::max)(NextCapacity(current_capacity), requested_capacity);
  287. }
  288. // ---------------------------------------------------------------------------
  289. // Storage Constructors and Destructor
  290. // ---------------------------------------------------------------------------
  291. Storage() : metadata_(A(), /* size and is_allocated */ 0u) {}
  292. explicit Storage(const A& allocator)
  293. : metadata_(allocator, /* size and is_allocated */ 0u) {}
  294. ~Storage() {
  295. // Fast path: if we are empty and not allocated, there's nothing to do.
  296. if (GetSizeAndIsAllocated() == 0) {
  297. return;
  298. }
  299. // Fast path: if no destructors need to be run and we know the allocator
  300. // doesn't do anything fancy, then all we need to do is deallocate (and
  301. // maybe not even that).
  302. if (y_absl::is_trivially_destructible<ValueType<A>>::value &&
  303. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  304. DeallocateIfAllocated();
  305. return;
  306. }
  307. DestroyContents();
  308. }
  309. // ---------------------------------------------------------------------------
  310. // Storage Member Accessors
  311. // ---------------------------------------------------------------------------
  312. SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
  313. const SizeType<A>& GetSizeAndIsAllocated() const {
  314. return metadata_.template get<1>();
  315. }
  316. SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; }
  317. bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
  318. Pointer<A> GetAllocatedData() {
  319. // GCC 12 has a false-positive -Wmaybe-uninitialized warning here.
  320. #if Y_ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  321. #pragma GCC diagnostic push
  322. #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
  323. #endif
  324. return data_.allocated.allocated_data;
  325. #if Y_ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  326. #pragma GCC diagnostic pop
  327. #endif
  328. }
  329. ConstPointer<A> GetAllocatedData() const {
  330. return data_.allocated.allocated_data;
  331. }
  332. // Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be
  333. // uninitialized, a common pattern in allocate()+construct() APIs.
  334. // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking
  335. // NOTE: When this was written, LLVM documentation did not explicitly
  336. // mention that casting `char*` and using `reinterpret_cast` qualifies
  337. // as a bad cast.
  338. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() {
  339. return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data);
  340. }
  341. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const {
  342. return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data);
  343. }
  344. SizeType<A> GetAllocatedCapacity() const {
  345. return data_.allocated.allocated_capacity;
  346. }
  347. SizeType<A> GetInlinedCapacity() const {
  348. return static_cast<SizeType<A>>(kOptimalInlinedSize);
  349. }
  350. StorageView<A> MakeStorageView() {
  351. return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(),
  352. GetAllocatedCapacity()}
  353. : StorageView<A>{GetInlinedData(), GetSize(),
  354. GetInlinedCapacity()};
  355. }
  356. A& GetAllocator() { return metadata_.template get<0>(); }
  357. const A& GetAllocator() const { return metadata_.template get<0>(); }
  358. // ---------------------------------------------------------------------------
  359. // Storage Member Mutators
  360. // ---------------------------------------------------------------------------
  361. Y_ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other);
  362. template <typename ValueAdapter>
  363. void Initialize(ValueAdapter values, SizeType<A> new_size);
  364. template <typename ValueAdapter>
  365. void Assign(ValueAdapter values, SizeType<A> new_size);
  366. template <typename ValueAdapter>
  367. void Resize(ValueAdapter values, SizeType<A> new_size);
  368. template <typename ValueAdapter>
  369. Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values,
  370. SizeType<A> insert_count);
  371. template <typename... Args>
  372. Reference<A> EmplaceBack(Args&&... args);
  373. Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to);
  374. void Reserve(SizeType<A> requested_capacity);
  375. void ShrinkToFit();
  376. void Swap(Storage* other_storage_ptr);
  377. void SetIsAllocated() {
  378. GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1);
  379. }
  380. void UnsetIsAllocated() {
  381. GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1);
  382. }
  383. void SetSize(SizeType<A> size) {
  384. GetSizeAndIsAllocated() =
  385. (size << 1) | static_cast<SizeType<A>>(GetIsAllocated());
  386. }
  387. void SetAllocatedSize(SizeType<A> size) {
  388. GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1);
  389. }
  390. void SetInlinedSize(SizeType<A> size) {
  391. GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1);
  392. }
  393. void AddSize(SizeType<A> count) {
  394. GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1);
  395. }
  396. void SubtractSize(SizeType<A> count) {
  397. Y_ABSL_HARDENING_ASSERT(count <= GetSize());
  398. GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1);
  399. }
  400. void SetAllocation(Allocation<A> allocation) {
  401. data_.allocated.allocated_data = allocation.data;
  402. data_.allocated.allocated_capacity = allocation.capacity;
  403. }
  404. void MemcpyFrom(const Storage& other_storage) {
  405. // Assumption check: it doesn't make sense to memcpy inlined elements unless
  406. // we know the allocator doesn't do anything fancy, and one of the following
  407. // holds:
  408. //
  409. // * The elements are trivially relocatable.
  410. //
  411. // * It's possible to trivially assign the elements and then destroy the
  412. // source.
  413. //
  414. // * It's possible to trivially copy construct/assign the elements.
  415. //
  416. {
  417. using V = ValueType<A>;
  418. Y_ABSL_HARDENING_ASSERT(
  419. other_storage.GetIsAllocated() ||
  420. (std::is_same<A, std::allocator<V>>::value &&
  421. (
  422. // First case above
  423. y_absl::is_trivially_relocatable<V>::value ||
  424. // Second case above
  425. (y_absl::is_trivially_move_assignable<V>::value &&
  426. y_absl::is_trivially_destructible<V>::value) ||
  427. // Third case above
  428. (y_absl::is_trivially_copy_constructible<V>::value ||
  429. y_absl::is_trivially_copy_assignable<V>::value))));
  430. }
  431. GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
  432. data_ = other_storage.data_;
  433. }
  434. void DeallocateIfAllocated() {
  435. if (GetIsAllocated()) {
  436. MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(),
  437. GetAllocatedCapacity());
  438. }
  439. }
  440. private:
  441. Y_ABSL_ATTRIBUTE_NOINLINE void DestroyContents();
  442. using Metadata = container_internal::CompressedTuple<A, SizeType<A>>;
  443. struct Allocated {
  444. Pointer<A> allocated_data;
  445. SizeType<A> allocated_capacity;
  446. };
  447. // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the
  448. // `InlinedVector`. Sometimes, it is possible to increase the capacity (from
  449. // the user requested `N`) without increasing the size of the `InlinedVector`.
  450. static constexpr size_t kOptimalInlinedSize =
  451. (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>));
  452. struct Inlined {
  453. alignas(ValueType<A>) char inlined_data[sizeof(
  454. ValueType<A>[kOptimalInlinedSize])];
  455. };
  456. union Data {
  457. Allocated allocated;
  458. Inlined inlined;
  459. };
  460. void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n);
  461. void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n);
  462. void SwapInlinedElements(MemcpyPolicy, Storage* other);
  463. template <typename NotMemcpyPolicy>
  464. void SwapInlinedElements(NotMemcpyPolicy, Storage* other);
  465. template <typename... Args>
  466. Y_ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args);
  467. Metadata metadata_;
  468. Data data_;
  469. };
  470. template <typename T, size_t N, typename A>
  471. void Storage<T, N, A>::DestroyContents() {
  472. Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
  473. DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize());
  474. DeallocateIfAllocated();
  475. }
  476. template <typename T, size_t N, typename A>
  477. void Storage<T, N, A>::InitFrom(const Storage& other) {
  478. const SizeType<A> n = other.GetSize();
  479. Y_ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller.
  480. ConstPointer<A> src;
  481. Pointer<A> dst;
  482. if (!other.GetIsAllocated()) {
  483. dst = GetInlinedData();
  484. src = other.GetInlinedData();
  485. } else {
  486. // Because this is only called from the `InlinedVector` constructors, it's
  487. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  488. // throws, deallocation will be automatically handled by `~Storage()`.
  489. SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n);
  490. Allocation<A> allocation =
  491. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  492. SetAllocation(allocation);
  493. dst = allocation.data;
  494. src = other.GetAllocatedData();
  495. }
  496. // Fast path: if the value type is trivially copy constructible and we know
  497. // the allocator doesn't do anything fancy, then we know it is legal for us to
  498. // simply memcpy the other vector's elements.
  499. if (y_absl::is_trivially_copy_constructible<ValueType<A>>::value &&
  500. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  501. std::memcpy(reinterpret_cast<char*>(dst),
  502. reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>));
  503. } else {
  504. auto values = IteratorValueAdapter<A, ConstPointer<A>>(src);
  505. ConstructElements<A>(GetAllocator(), dst, values, n);
  506. }
  507. GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated();
  508. }
  509. template <typename T, size_t N, typename A>
  510. template <typename ValueAdapter>
  511. auto Storage<T, N, A>::Initialize(ValueAdapter values,
  512. SizeType<A> new_size) -> void {
  513. // Only callable from constructors!
  514. Y_ABSL_HARDENING_ASSERT(!GetIsAllocated());
  515. Y_ABSL_HARDENING_ASSERT(GetSize() == 0);
  516. Pointer<A> construct_data;
  517. if (new_size > GetInlinedCapacity()) {
  518. // Because this is only called from the `InlinedVector` constructors, it's
  519. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  520. // throws, deallocation will be automatically handled by `~Storage()`.
  521. SizeType<A> requested_capacity =
  522. ComputeCapacity(GetInlinedCapacity(), new_size);
  523. Allocation<A> allocation =
  524. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  525. construct_data = allocation.data;
  526. SetAllocation(allocation);
  527. SetIsAllocated();
  528. } else {
  529. construct_data = GetInlinedData();
  530. }
  531. ConstructElements<A>(GetAllocator(), construct_data, values, new_size);
  532. // Since the initial size was guaranteed to be `0` and the allocated bit is
  533. // already correct for either case, *adding* `new_size` gives us the correct
  534. // result faster than setting it directly.
  535. AddSize(new_size);
  536. }
  537. template <typename T, size_t N, typename A>
  538. template <typename ValueAdapter>
  539. auto Storage<T, N, A>::Assign(ValueAdapter values,
  540. SizeType<A> new_size) -> void {
  541. StorageView<A> storage_view = MakeStorageView();
  542. AllocationTransaction<A> allocation_tx(GetAllocator());
  543. y_absl::Span<ValueType<A>> assign_loop;
  544. y_absl::Span<ValueType<A>> construct_loop;
  545. y_absl::Span<ValueType<A>> destroy_loop;
  546. if (new_size > storage_view.capacity) {
  547. SizeType<A> requested_capacity =
  548. ComputeCapacity(storage_view.capacity, new_size);
  549. construct_loop = {allocation_tx.Allocate(requested_capacity), new_size};
  550. destroy_loop = {storage_view.data, storage_view.size};
  551. } else if (new_size > storage_view.size) {
  552. assign_loop = {storage_view.data, storage_view.size};
  553. construct_loop = {storage_view.data + storage_view.size,
  554. new_size - storage_view.size};
  555. } else {
  556. assign_loop = {storage_view.data, new_size};
  557. destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
  558. }
  559. AssignElements<A>(assign_loop.data(), values, assign_loop.size());
  560. ConstructElements<A>(GetAllocator(), construct_loop.data(), values,
  561. construct_loop.size());
  562. DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(),
  563. destroy_loop.size());
  564. if (allocation_tx.DidAllocate()) {
  565. DeallocateIfAllocated();
  566. SetAllocation(std::move(allocation_tx).Release());
  567. SetIsAllocated();
  568. }
  569. SetSize(new_size);
  570. }
  571. template <typename T, size_t N, typename A>
  572. template <typename ValueAdapter>
  573. auto Storage<T, N, A>::Resize(ValueAdapter values,
  574. SizeType<A> new_size) -> void {
  575. StorageView<A> storage_view = MakeStorageView();
  576. Pointer<A> const base = storage_view.data;
  577. const SizeType<A> size = storage_view.size;
  578. A& alloc = GetAllocator();
  579. if (new_size <= size) {
  580. // Destroy extra old elements.
  581. DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size);
  582. } else if (new_size <= storage_view.capacity) {
  583. // Construct new elements in place.
  584. ConstructElements<A>(alloc, base + size, values, new_size - size);
  585. } else {
  586. // Steps:
  587. // a. Allocate new backing store.
  588. // b. Construct new elements in new backing store.
  589. // c. Move existing elements from old backing store to new backing store.
  590. // d. Destroy all elements in old backing store.
  591. // Use transactional wrappers for the first two steps so we can roll
  592. // back if necessary due to exceptions.
  593. AllocationTransaction<A> allocation_tx(alloc);
  594. SizeType<A> requested_capacity =
  595. ComputeCapacity(storage_view.capacity, new_size);
  596. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  597. ConstructionTransaction<A> construction_tx(alloc);
  598. construction_tx.Construct(new_data + size, values, new_size - size);
  599. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  600. (MoveIterator<A>(base)));
  601. ConstructElements<A>(alloc, new_data, move_values, size);
  602. DestroyAdapter<A>::DestroyElements(alloc, base, size);
  603. std::move(construction_tx).Commit();
  604. DeallocateIfAllocated();
  605. SetAllocation(std::move(allocation_tx).Release());
  606. SetIsAllocated();
  607. }
  608. SetSize(new_size);
  609. }
  610. template <typename T, size_t N, typename A>
  611. template <typename ValueAdapter>
  612. auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values,
  613. SizeType<A> insert_count) -> Iterator<A> {
  614. StorageView<A> storage_view = MakeStorageView();
  615. auto insert_index = static_cast<SizeType<A>>(
  616. std::distance(ConstIterator<A>(storage_view.data), pos));
  617. SizeType<A> insert_end_index = insert_index + insert_count;
  618. SizeType<A> new_size = storage_view.size + insert_count;
  619. if (new_size > storage_view.capacity) {
  620. AllocationTransaction<A> allocation_tx(GetAllocator());
  621. ConstructionTransaction<A> construction_tx(GetAllocator());
  622. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  623. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  624. MoveIterator<A>(storage_view.data));
  625. SizeType<A> requested_capacity =
  626. ComputeCapacity(storage_view.capacity, new_size);
  627. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  628. construction_tx.Construct(new_data + insert_index, values, insert_count);
  629. move_construction_tx.Construct(new_data, move_values, insert_index);
  630. ConstructElements<A>(GetAllocator(), new_data + insert_end_index,
  631. move_values, storage_view.size - insert_index);
  632. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  633. storage_view.size);
  634. std::move(construction_tx).Commit();
  635. std::move(move_construction_tx).Commit();
  636. DeallocateIfAllocated();
  637. SetAllocation(std::move(allocation_tx).Release());
  638. SetAllocatedSize(new_size);
  639. return Iterator<A>(new_data + insert_index);
  640. } else {
  641. SizeType<A> move_construction_destination_index =
  642. (std::max)(insert_end_index, storage_view.size);
  643. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  644. IteratorValueAdapter<A, MoveIterator<A>> move_construction_values(
  645. MoveIterator<A>(storage_view.data +
  646. (move_construction_destination_index - insert_count)));
  647. y_absl::Span<ValueType<A>> move_construction = {
  648. storage_view.data + move_construction_destination_index,
  649. new_size - move_construction_destination_index};
  650. Pointer<A> move_assignment_values = storage_view.data + insert_index;
  651. y_absl::Span<ValueType<A>> move_assignment = {
  652. storage_view.data + insert_end_index,
  653. move_construction_destination_index - insert_end_index};
  654. y_absl::Span<ValueType<A>> insert_assignment = {move_assignment_values,
  655. move_construction.size()};
  656. y_absl::Span<ValueType<A>> insert_construction = {
  657. insert_assignment.data() + insert_assignment.size(),
  658. insert_count - insert_assignment.size()};
  659. move_construction_tx.Construct(move_construction.data(),
  660. move_construction_values,
  661. move_construction.size());
  662. for (Pointer<A>
  663. destination = move_assignment.data() + move_assignment.size(),
  664. last_destination = move_assignment.data(),
  665. source = move_assignment_values + move_assignment.size();
  666. ;) {
  667. --destination;
  668. --source;
  669. if (destination < last_destination) break;
  670. *destination = std::move(*source);
  671. }
  672. AssignElements<A>(insert_assignment.data(), values,
  673. insert_assignment.size());
  674. ConstructElements<A>(GetAllocator(), insert_construction.data(), values,
  675. insert_construction.size());
  676. std::move(move_construction_tx).Commit();
  677. AddSize(insert_count);
  678. return Iterator<A>(storage_view.data + insert_index);
  679. }
  680. }
  681. template <typename T, size_t N, typename A>
  682. template <typename... Args>
  683. auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> {
  684. StorageView<A> storage_view = MakeStorageView();
  685. const SizeType<A> n = storage_view.size;
  686. if (Y_ABSL_PREDICT_TRUE(n != storage_view.capacity)) {
  687. // Fast path; new element fits.
  688. Pointer<A> last_ptr = storage_view.data + n;
  689. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  690. std::forward<Args>(args)...);
  691. AddSize(1);
  692. return *last_ptr;
  693. }
  694. // TODO(b/173712035): Annotate with musttail attribute to prevent regression.
  695. return EmplaceBackSlow(std::forward<Args>(args)...);
  696. }
  697. template <typename T, size_t N, typename A>
  698. template <typename... Args>
  699. auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> {
  700. StorageView<A> storage_view = MakeStorageView();
  701. AllocationTransaction<A> allocation_tx(GetAllocator());
  702. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  703. MoveIterator<A>(storage_view.data));
  704. SizeType<A> requested_capacity = NextCapacity(storage_view.capacity);
  705. Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity);
  706. Pointer<A> last_ptr = construct_data + storage_view.size;
  707. // Construct new element.
  708. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  709. std::forward<Args>(args)...);
  710. // Move elements from old backing store to new backing store.
  711. Y_ABSL_INTERNAL_TRY {
  712. ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values,
  713. storage_view.size);
  714. }
  715. Y_ABSL_INTERNAL_CATCH_ANY {
  716. AllocatorTraits<A>::destroy(GetAllocator(), last_ptr);
  717. Y_ABSL_INTERNAL_RETHROW;
  718. }
  719. // Destroy elements in old backing store.
  720. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  721. storage_view.size);
  722. DeallocateIfAllocated();
  723. SetAllocation(std::move(allocation_tx).Release());
  724. SetIsAllocated();
  725. AddSize(1);
  726. return *last_ptr;
  727. }
  728. template <typename T, size_t N, typename A>
  729. auto Storage<T, N, A>::Erase(ConstIterator<A> from,
  730. ConstIterator<A> to) -> Iterator<A> {
  731. StorageView<A> storage_view = MakeStorageView();
  732. auto erase_size = static_cast<SizeType<A>>(std::distance(from, to));
  733. auto erase_index = static_cast<SizeType<A>>(
  734. std::distance(ConstIterator<A>(storage_view.data), from));
  735. SizeType<A> erase_end_index = erase_index + erase_size;
  736. // Fast path: if the value type is trivially relocatable and we know
  737. // the allocator doesn't do anything fancy, then we know it is legal for us to
  738. // simply destroy the elements in the "erasure window" (which cannot throw)
  739. // and then memcpy downward to close the window.
  740. if (y_absl::is_trivially_relocatable<ValueType<A>>::value &&
  741. std::is_nothrow_destructible<ValueType<A>>::value &&
  742. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  743. DestroyAdapter<A>::DestroyElements(
  744. GetAllocator(), storage_view.data + erase_index, erase_size);
  745. std::memmove(
  746. reinterpret_cast<char*>(storage_view.data + erase_index),
  747. reinterpret_cast<const char*>(storage_view.data + erase_end_index),
  748. (storage_view.size - erase_end_index) * sizeof(ValueType<A>));
  749. } else {
  750. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  751. MoveIterator<A>(storage_view.data + erase_end_index));
  752. AssignElements<A>(storage_view.data + erase_index, move_values,
  753. storage_view.size - erase_end_index);
  754. DestroyAdapter<A>::DestroyElements(
  755. GetAllocator(), storage_view.data + (storage_view.size - erase_size),
  756. erase_size);
  757. }
  758. SubtractSize(erase_size);
  759. return Iterator<A>(storage_view.data + erase_index);
  760. }
  761. template <typename T, size_t N, typename A>
  762. auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void {
  763. StorageView<A> storage_view = MakeStorageView();
  764. if (Y_ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
  765. AllocationTransaction<A> allocation_tx(GetAllocator());
  766. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  767. MoveIterator<A>(storage_view.data));
  768. SizeType<A> new_requested_capacity =
  769. ComputeCapacity(storage_view.capacity, requested_capacity);
  770. Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity);
  771. ConstructElements<A>(GetAllocator(), new_data, move_values,
  772. storage_view.size);
  773. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  774. storage_view.size);
  775. DeallocateIfAllocated();
  776. SetAllocation(std::move(allocation_tx).Release());
  777. SetIsAllocated();
  778. }
  779. template <typename T, size_t N, typename A>
  780. auto Storage<T, N, A>::ShrinkToFit() -> void {
  781. // May only be called on allocated instances!
  782. Y_ABSL_HARDENING_ASSERT(GetIsAllocated());
  783. StorageView<A> storage_view{GetAllocatedData(), GetSize(),
  784. GetAllocatedCapacity()};
  785. if (Y_ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
  786. AllocationTransaction<A> allocation_tx(GetAllocator());
  787. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  788. MoveIterator<A>(storage_view.data));
  789. Pointer<A> construct_data;
  790. if (storage_view.size > GetInlinedCapacity()) {
  791. SizeType<A> requested_capacity = storage_view.size;
  792. construct_data = allocation_tx.Allocate(requested_capacity);
  793. if (allocation_tx.GetCapacity() >= storage_view.capacity) {
  794. // Already using the smallest available heap allocation.
  795. return;
  796. }
  797. } else {
  798. construct_data = GetInlinedData();
  799. }
  800. Y_ABSL_INTERNAL_TRY {
  801. ConstructElements<A>(GetAllocator(), construct_data, move_values,
  802. storage_view.size);
  803. }
  804. Y_ABSL_INTERNAL_CATCH_ANY {
  805. SetAllocation({storage_view.data, storage_view.capacity});
  806. Y_ABSL_INTERNAL_RETHROW;
  807. }
  808. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  809. storage_view.size);
  810. MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data,
  811. storage_view.capacity);
  812. if (allocation_tx.DidAllocate()) {
  813. SetAllocation(std::move(allocation_tx).Release());
  814. } else {
  815. UnsetIsAllocated();
  816. }
  817. }
  818. template <typename T, size_t N, typename A>
  819. auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
  820. using std::swap;
  821. Y_ABSL_HARDENING_ASSERT(this != other_storage_ptr);
  822. if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
  823. swap(data_.allocated, other_storage_ptr->data_.allocated);
  824. } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
  825. SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr);
  826. } else {
  827. Storage* allocated_ptr = this;
  828. Storage* inlined_ptr = other_storage_ptr;
  829. if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
  830. StorageView<A> allocated_storage_view{
  831. allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(),
  832. allocated_ptr->GetAllocatedCapacity()};
  833. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  834. MoveIterator<A>(inlined_ptr->GetInlinedData()));
  835. Y_ABSL_INTERNAL_TRY {
  836. ConstructElements<A>(inlined_ptr->GetAllocator(),
  837. allocated_ptr->GetInlinedData(), move_values,
  838. inlined_ptr->GetSize());
  839. }
  840. Y_ABSL_INTERNAL_CATCH_ANY {
  841. allocated_ptr->SetAllocation(Allocation<A>{
  842. allocated_storage_view.data, allocated_storage_view.capacity});
  843. Y_ABSL_INTERNAL_RETHROW;
  844. }
  845. DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(),
  846. inlined_ptr->GetInlinedData(),
  847. inlined_ptr->GetSize());
  848. inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data,
  849. allocated_storage_view.capacity});
  850. }
  851. swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
  852. swap(GetAllocator(), other_storage_ptr->GetAllocator());
  853. }
  854. template <typename T, size_t N, typename A>
  855. void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other,
  856. SizeType<A> n) {
  857. std::swap_ranges(GetInlinedData(), GetInlinedData() + n,
  858. other->GetInlinedData());
  859. }
  860. template <typename T, size_t N, typename A>
  861. void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other,
  862. SizeType<A> n) {
  863. Pointer<A> a = GetInlinedData();
  864. Pointer<A> b = other->GetInlinedData();
  865. // see note on allocators in `SwapInlinedElements`.
  866. A& allocator_a = GetAllocator();
  867. A& allocator_b = other->GetAllocator();
  868. for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) {
  869. ValueType<A> tmp(std::move(*a));
  870. AllocatorTraits<A>::destroy(allocator_a, a);
  871. AllocatorTraits<A>::construct(allocator_b, a, std::move(*b));
  872. AllocatorTraits<A>::destroy(allocator_b, b);
  873. AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp));
  874. }
  875. }
  876. template <typename T, size_t N, typename A>
  877. void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) {
  878. Data tmp = data_;
  879. data_ = other->data_;
  880. other->data_ = tmp;
  881. }
  882. template <typename T, size_t N, typename A>
  883. template <typename NotMemcpyPolicy>
  884. void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy,
  885. Storage* other) {
  886. // Note: `destroy` needs to use pre-swap allocator while `construct` -
  887. // post-swap allocator. Allocators will be swapped later on outside of
  888. // `SwapInlinedElements`.
  889. Storage* small_ptr = this;
  890. Storage* large_ptr = other;
  891. if (small_ptr->GetSize() > large_ptr->GetSize()) {
  892. std::swap(small_ptr, large_ptr);
  893. }
  894. auto small_size = small_ptr->GetSize();
  895. auto diff = large_ptr->GetSize() - small_size;
  896. SwapN(policy, other, small_size);
  897. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  898. MoveIterator<A>(large_ptr->GetInlinedData() + small_size));
  899. ConstructElements<A>(large_ptr->GetAllocator(),
  900. small_ptr->GetInlinedData() + small_size, move_values,
  901. diff);
  902. DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(),
  903. large_ptr->GetInlinedData() + small_size,
  904. diff);
  905. }
  906. // End ignore "array-bounds"
  907. #if !defined(__clang__) && defined(__GNUC__)
  908. #pragma GCC diagnostic pop
  909. #endif
  910. } // namespace inlined_vector_internal
  911. Y_ABSL_NAMESPACE_END
  912. } // namespace y_absl
  913. #endif // Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_