inlined_vector.h 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. // Copyright 2019 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #ifndef Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  15. #define Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  16. #include <algorithm>
  17. #include <cstddef>
  18. #include <cstring>
  19. #include <iterator>
  20. #include <limits>
  21. #include <memory>
  22. #include <new>
  23. #include <type_traits>
  24. #include <utility>
  25. #include "y_absl/base/attributes.h"
  26. #include "y_absl/base/config.h"
  27. #include "y_absl/base/macros.h"
  28. #include "y_absl/container/internal/compressed_tuple.h"
  29. #include "y_absl/memory/memory.h"
  30. #include "y_absl/meta/type_traits.h"
  31. #include "y_absl/types/span.h"
  32. namespace y_absl {
  33. Y_ABSL_NAMESPACE_BEGIN
  34. namespace inlined_vector_internal {
  35. // GCC does not deal very well with the below code
  36. #if !defined(__clang__) && defined(__GNUC__)
  37. #pragma GCC diagnostic push
  38. #pragma GCC diagnostic ignored "-Warray-bounds"
  39. #endif
  40. template <typename A>
  41. using AllocatorTraits = std::allocator_traits<A>;
  42. template <typename A>
  43. using ValueType = typename AllocatorTraits<A>::value_type;
  44. template <typename A>
  45. using SizeType = typename AllocatorTraits<A>::size_type;
  46. template <typename A>
  47. using Pointer = typename AllocatorTraits<A>::pointer;
  48. template <typename A>
  49. using ConstPointer = typename AllocatorTraits<A>::const_pointer;
  50. template <typename A>
  51. using SizeType = typename AllocatorTraits<A>::size_type;
  52. template <typename A>
  53. using DifferenceType = typename AllocatorTraits<A>::difference_type;
  54. template <typename A>
  55. using Reference = ValueType<A>&;
  56. template <typename A>
  57. using ConstReference = const ValueType<A>&;
  58. template <typename A>
  59. using Iterator = Pointer<A>;
  60. template <typename A>
  61. using ConstIterator = ConstPointer<A>;
  62. template <typename A>
  63. using ReverseIterator = typename std::reverse_iterator<Iterator<A>>;
  64. template <typename A>
  65. using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>;
  66. template <typename A>
  67. using MoveIterator = typename std::move_iterator<Iterator<A>>;
  68. template <typename Iterator>
  69. using IsAtLeastForwardIterator = std::is_convertible<
  70. typename std::iterator_traits<Iterator>::iterator_category,
  71. std::forward_iterator_tag>;
  72. template <typename A>
  73. using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>;
  74. template <typename A>
  75. using IsSwapOk = y_absl::type_traits_internal::IsSwappable<ValueType<A>>;
  76. template <typename T>
  77. struct TypeIdentity {
  78. using type = T;
  79. };
  80. // Used for function arguments in template functions to prevent ADL by forcing
  81. // callers to explicitly specify the template parameter.
  82. template <typename T>
  83. using NoTypeDeduction = typename TypeIdentity<T>::type;
  84. template <typename A, bool IsTriviallyDestructible =
  85. y_absl::is_trivially_destructible<ValueType<A>>::value>
  86. struct DestroyAdapter;
  87. template <typename A>
  88. struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> {
  89. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  90. SizeType<A> destroy_size) {
  91. for (SizeType<A> i = destroy_size; i != 0;) {
  92. --i;
  93. AllocatorTraits<A>::destroy(allocator, destroy_first + i);
  94. }
  95. }
  96. };
  97. template <typename A>
  98. struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> {
  99. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  100. SizeType<A> destroy_size) {
  101. static_cast<void>(allocator);
  102. static_cast<void>(destroy_first);
  103. static_cast<void>(destroy_size);
  104. }
  105. };
  106. template <typename A>
  107. struct Allocation {
  108. Pointer<A> data = nullptr;
  109. SizeType<A> capacity = 0;
  110. };
  111. template <typename A,
  112. bool IsOverAligned =
  113. (alignof(ValueType<A>) > Y_ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)>
  114. struct MallocAdapter {
  115. static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) {
  116. return {AllocatorTraits<A>::allocate(allocator, requested_capacity),
  117. requested_capacity};
  118. }
  119. static void Deallocate(A& allocator, Pointer<A> pointer,
  120. SizeType<A> capacity) {
  121. AllocatorTraits<A>::deallocate(allocator, pointer, capacity);
  122. }
  123. };
  124. template <typename A, typename ValueAdapter>
  125. void ConstructElements(NoTypeDeduction<A>& allocator,
  126. Pointer<A> construct_first, ValueAdapter& values,
  127. SizeType<A> construct_size) {
  128. for (SizeType<A> i = 0; i < construct_size; ++i) {
  129. Y_ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); }
  130. Y_ABSL_INTERNAL_CATCH_ANY {
  131. DestroyAdapter<A>::DestroyElements(allocator, construct_first, i);
  132. Y_ABSL_INTERNAL_RETHROW;
  133. }
  134. }
  135. }
  136. template <typename A, typename ValueAdapter>
  137. void AssignElements(Pointer<A> assign_first, ValueAdapter& values,
  138. SizeType<A> assign_size) {
  139. for (SizeType<A> i = 0; i < assign_size; ++i) {
  140. values.AssignNext(assign_first + i);
  141. }
  142. }
  143. template <typename A>
  144. struct StorageView {
  145. Pointer<A> data;
  146. SizeType<A> size;
  147. SizeType<A> capacity;
  148. };
  149. template <typename A, typename Iterator>
  150. class IteratorValueAdapter {
  151. public:
  152. explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
  153. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  154. AllocatorTraits<A>::construct(allocator, construct_at, *it_);
  155. ++it_;
  156. }
  157. void AssignNext(Pointer<A> assign_at) {
  158. *assign_at = *it_;
  159. ++it_;
  160. }
  161. private:
  162. Iterator it_;
  163. };
  164. template <typename A>
  165. class CopyValueAdapter {
  166. public:
  167. explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {}
  168. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  169. AllocatorTraits<A>::construct(allocator, construct_at, *ptr_);
  170. }
  171. void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; }
  172. private:
  173. ConstPointer<A> ptr_;
  174. };
  175. template <typename A>
  176. class DefaultValueAdapter {
  177. public:
  178. explicit DefaultValueAdapter() {}
  179. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  180. AllocatorTraits<A>::construct(allocator, construct_at);
  181. }
  182. void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); }
  183. };
  184. template <typename A>
  185. class AllocationTransaction {
  186. public:
  187. explicit AllocationTransaction(A& allocator)
  188. : allocator_data_(allocator, nullptr), capacity_(0) {}
  189. ~AllocationTransaction() {
  190. if (DidAllocate()) {
  191. MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity());
  192. }
  193. }
  194. AllocationTransaction(const AllocationTransaction&) = delete;
  195. void operator=(const AllocationTransaction&) = delete;
  196. A& GetAllocator() { return allocator_data_.template get<0>(); }
  197. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  198. SizeType<A>& GetCapacity() { return capacity_; }
  199. bool DidAllocate() { return GetData() != nullptr; }
  200. Pointer<A> Allocate(SizeType<A> requested_capacity) {
  201. Allocation<A> result =
  202. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  203. GetData() = result.data;
  204. GetCapacity() = result.capacity;
  205. return result.data;
  206. }
  207. Y_ABSL_MUST_USE_RESULT Allocation<A> Release() && {
  208. Allocation<A> result = {GetData(), GetCapacity()};
  209. Reset();
  210. return result;
  211. }
  212. private:
  213. void Reset() {
  214. GetData() = nullptr;
  215. GetCapacity() = 0;
  216. }
  217. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  218. SizeType<A> capacity_;
  219. };
  220. template <typename A>
  221. class ConstructionTransaction {
  222. public:
  223. explicit ConstructionTransaction(A& allocator)
  224. : allocator_data_(allocator, nullptr), size_(0) {}
  225. ~ConstructionTransaction() {
  226. if (DidConstruct()) {
  227. DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize());
  228. }
  229. }
  230. ConstructionTransaction(const ConstructionTransaction&) = delete;
  231. void operator=(const ConstructionTransaction&) = delete;
  232. A& GetAllocator() { return allocator_data_.template get<0>(); }
  233. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  234. SizeType<A>& GetSize() { return size_; }
  235. bool DidConstruct() { return GetData() != nullptr; }
  236. template <typename ValueAdapter>
  237. void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) {
  238. ConstructElements<A>(GetAllocator(), data, values, size);
  239. GetData() = data;
  240. GetSize() = size;
  241. }
  242. void Commit() && {
  243. GetData() = nullptr;
  244. GetSize() = 0;
  245. }
  246. private:
  247. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  248. SizeType<A> size_;
  249. };
  250. template <typename T, size_t N, typename A>
  251. class Storage {
  252. public:
  253. struct MemcpyPolicy {};
  254. struct ElementwiseAssignPolicy {};
  255. struct ElementwiseSwapPolicy {};
  256. struct ElementwiseConstructPolicy {};
  257. using MoveAssignmentPolicy = y_absl::conditional_t<
  258. // Fast path: if the value type can be trivially move assigned and
  259. // destroyed, and we know the allocator doesn't do anything fancy, then
  260. // it's safe for us to simply adopt the contents of the storage for
  261. // `other` and remove its own reference to them. It's as if we had
  262. // individually move-assigned each value and then destroyed the original.
  263. y_absl::conjunction<y_absl::is_trivially_move_assignable<ValueType<A>>,
  264. y_absl::is_trivially_destructible<ValueType<A>>,
  265. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  266. MemcpyPolicy,
  267. // Otherwise we use move assignment if possible. If not, we simulate
  268. // move assignment using move construction.
  269. //
  270. // Note that this is in contrast to e.g. std::vector and std::optional,
  271. // which are themselves not move-assignable when their contained type is
  272. // not.
  273. y_absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy,
  274. ElementwiseConstructPolicy>>;
  275. // The policy to be used specifically when swapping inlined elements.
  276. using SwapInlinedElementsPolicy = y_absl::conditional_t<
  277. // Fast path: if the value type can be trivially move constructed/assigned
  278. // and destroyed, and we know the allocator doesn't do anything fancy,
  279. // then it's safe for us to simply swap the bytes in the inline storage.
  280. // It's as if we had move-constructed a temporary vector, move-assigned
  281. // one to the other, then move-assigned the first from the temporary.
  282. y_absl::conjunction<y_absl::is_trivially_move_constructible<ValueType<A>>,
  283. y_absl::is_trivially_move_assignable<ValueType<A>>,
  284. y_absl::is_trivially_destructible<ValueType<A>>,
  285. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  286. MemcpyPolicy,
  287. y_absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy,
  288. ElementwiseConstructPolicy>>;
  289. static SizeType<A> NextCapacity(SizeType<A> current_capacity) {
  290. return current_capacity * 2;
  291. }
  292. static SizeType<A> ComputeCapacity(SizeType<A> current_capacity,
  293. SizeType<A> requested_capacity) {
  294. return (std::max)(NextCapacity(current_capacity), requested_capacity);
  295. }
  296. // ---------------------------------------------------------------------------
  297. // Storage Constructors and Destructor
  298. // ---------------------------------------------------------------------------
  299. Storage() : metadata_(A(), /* size and is_allocated */ 0u) {}
  300. explicit Storage(const A& allocator)
  301. : metadata_(allocator, /* size and is_allocated */ 0u) {}
  302. ~Storage() {
  303. // Fast path: if we are empty and not allocated, there's nothing to do.
  304. if (GetSizeAndIsAllocated() == 0) {
  305. return;
  306. }
  307. // Fast path: if no destructors need to be run and we know the allocator
  308. // doesn't do anything fancy, then all we need to do is deallocate (and
  309. // maybe not even that).
  310. if (y_absl::is_trivially_destructible<ValueType<A>>::value &&
  311. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  312. DeallocateIfAllocated();
  313. return;
  314. }
  315. DestroyContents();
  316. }
  317. // ---------------------------------------------------------------------------
  318. // Storage Member Accessors
  319. // ---------------------------------------------------------------------------
  320. SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
  321. const SizeType<A>& GetSizeAndIsAllocated() const {
  322. return metadata_.template get<1>();
  323. }
  324. SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; }
  325. bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
  326. Pointer<A> GetAllocatedData() {
  327. // GCC 12 has a false-positive -Wmaybe-uninitialized warning here.
  328. #if Y_ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  329. #pragma GCC diagnostic push
  330. #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
  331. #endif
  332. return data_.allocated.allocated_data;
  333. #if Y_ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  334. #pragma GCC diagnostic pop
  335. #endif
  336. }
  337. ConstPointer<A> GetAllocatedData() const {
  338. return data_.allocated.allocated_data;
  339. }
  340. // Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be
  341. // uninitialized, a common pattern in allocate()+construct() APIs.
  342. // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking
  343. // NOTE: When this was written, LLVM documentation did not explicitly
  344. // mention that casting `char*` and using `reinterpret_cast` qualifies
  345. // as a bad cast.
  346. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() {
  347. return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data);
  348. }
  349. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const {
  350. return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data);
  351. }
  352. SizeType<A> GetAllocatedCapacity() const {
  353. return data_.allocated.allocated_capacity;
  354. }
  355. SizeType<A> GetInlinedCapacity() const {
  356. return static_cast<SizeType<A>>(kOptimalInlinedSize);
  357. }
  358. StorageView<A> MakeStorageView() {
  359. return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(),
  360. GetAllocatedCapacity()}
  361. : StorageView<A>{GetInlinedData(), GetSize(),
  362. GetInlinedCapacity()};
  363. }
  364. A& GetAllocator() { return metadata_.template get<0>(); }
  365. const A& GetAllocator() const { return metadata_.template get<0>(); }
  366. // ---------------------------------------------------------------------------
  367. // Storage Member Mutators
  368. // ---------------------------------------------------------------------------
  369. Y_ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other);
  370. template <typename ValueAdapter>
  371. void Initialize(ValueAdapter values, SizeType<A> new_size);
  372. template <typename ValueAdapter>
  373. void Assign(ValueAdapter values, SizeType<A> new_size);
  374. template <typename ValueAdapter>
  375. void Resize(ValueAdapter values, SizeType<A> new_size);
  376. template <typename ValueAdapter>
  377. Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values,
  378. SizeType<A> insert_count);
  379. template <typename... Args>
  380. Reference<A> EmplaceBack(Args&&... args);
  381. Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to);
  382. void Reserve(SizeType<A> requested_capacity);
  383. void ShrinkToFit();
  384. void Swap(Storage* other_storage_ptr);
  385. void SetIsAllocated() {
  386. GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1);
  387. }
  388. void UnsetIsAllocated() {
  389. GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1);
  390. }
  391. void SetSize(SizeType<A> size) {
  392. GetSizeAndIsAllocated() =
  393. (size << 1) | static_cast<SizeType<A>>(GetIsAllocated());
  394. }
  395. void SetAllocatedSize(SizeType<A> size) {
  396. GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1);
  397. }
  398. void SetInlinedSize(SizeType<A> size) {
  399. GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1);
  400. }
  401. void AddSize(SizeType<A> count) {
  402. GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1);
  403. }
  404. void SubtractSize(SizeType<A> count) {
  405. Y_ABSL_HARDENING_ASSERT(count <= GetSize());
  406. GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1);
  407. }
  408. void SetAllocation(Allocation<A> allocation) {
  409. data_.allocated.allocated_data = allocation.data;
  410. data_.allocated.allocated_capacity = allocation.capacity;
  411. }
  412. void MemcpyFrom(const Storage& other_storage) {
  413. // Assumption check: it doesn't make sense to memcpy inlined elements unless
  414. // we know the allocator doesn't do anything fancy, and one of the following
  415. // holds:
  416. //
  417. // * The elements are trivially relocatable.
  418. //
  419. // * It's possible to trivially assign the elements and then destroy the
  420. // source.
  421. //
  422. // * It's possible to trivially copy construct/assign the elements.
  423. //
  424. {
  425. using V = ValueType<A>;
  426. Y_ABSL_HARDENING_ASSERT(
  427. other_storage.GetIsAllocated() ||
  428. (std::is_same<A, std::allocator<V>>::value &&
  429. (
  430. // First case above
  431. y_absl::is_trivially_relocatable<V>::value ||
  432. // Second case above
  433. (y_absl::is_trivially_move_assignable<V>::value &&
  434. y_absl::is_trivially_destructible<V>::value) ||
  435. // Third case above
  436. (y_absl::is_trivially_copy_constructible<V>::value ||
  437. y_absl::is_trivially_copy_assignable<V>::value))));
  438. }
  439. GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
  440. data_ = other_storage.data_;
  441. }
  442. void DeallocateIfAllocated() {
  443. if (GetIsAllocated()) {
  444. MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(),
  445. GetAllocatedCapacity());
  446. }
  447. }
  448. private:
  449. Y_ABSL_ATTRIBUTE_NOINLINE void DestroyContents();
  450. using Metadata = container_internal::CompressedTuple<A, SizeType<A>>;
  451. struct Allocated {
  452. Pointer<A> allocated_data;
  453. SizeType<A> allocated_capacity;
  454. };
  455. // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the
  456. // `InlinedVector`. Sometimes, it is possible to increase the capacity (from
  457. // the user requested `N`) without increasing the size of the `InlinedVector`.
  458. static constexpr size_t kOptimalInlinedSize =
  459. (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>));
  460. struct Inlined {
  461. alignas(ValueType<A>) char inlined_data[sizeof(
  462. ValueType<A>[kOptimalInlinedSize])];
  463. };
  464. union Data {
  465. Allocated allocated;
  466. Inlined inlined;
  467. };
  468. void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n);
  469. void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n);
  470. void SwapInlinedElements(MemcpyPolicy, Storage* other);
  471. template <typename NotMemcpyPolicy>
  472. void SwapInlinedElements(NotMemcpyPolicy, Storage* other);
  473. template <typename... Args>
  474. Y_ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args);
  475. Metadata metadata_;
  476. Data data_;
  477. };
  478. template <typename T, size_t N, typename A>
  479. void Storage<T, N, A>::DestroyContents() {
  480. Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
  481. DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize());
  482. DeallocateIfAllocated();
  483. }
  484. template <typename T, size_t N, typename A>
  485. void Storage<T, N, A>::InitFrom(const Storage& other) {
  486. const SizeType<A> n = other.GetSize();
  487. Y_ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller.
  488. ConstPointer<A> src;
  489. Pointer<A> dst;
  490. if (!other.GetIsAllocated()) {
  491. dst = GetInlinedData();
  492. src = other.GetInlinedData();
  493. } else {
  494. // Because this is only called from the `InlinedVector` constructors, it's
  495. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  496. // throws, deallocation will be automatically handled by `~Storage()`.
  497. SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n);
  498. Allocation<A> allocation =
  499. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  500. SetAllocation(allocation);
  501. dst = allocation.data;
  502. src = other.GetAllocatedData();
  503. }
  504. // Fast path: if the value type is trivially copy constructible and we know
  505. // the allocator doesn't do anything fancy, then we know it is legal for us to
  506. // simply memcpy the other vector's elements.
  507. if (y_absl::is_trivially_copy_constructible<ValueType<A>>::value &&
  508. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  509. std::memcpy(reinterpret_cast<char*>(dst),
  510. reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>));
  511. } else {
  512. auto values = IteratorValueAdapter<A, ConstPointer<A>>(src);
  513. ConstructElements<A>(GetAllocator(), dst, values, n);
  514. }
  515. GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated();
  516. }
  517. template <typename T, size_t N, typename A>
  518. template <typename ValueAdapter>
  519. auto Storage<T, N, A>::Initialize(ValueAdapter values, SizeType<A> new_size)
  520. -> void {
  521. // Only callable from constructors!
  522. Y_ABSL_HARDENING_ASSERT(!GetIsAllocated());
  523. Y_ABSL_HARDENING_ASSERT(GetSize() == 0);
  524. Pointer<A> construct_data;
  525. if (new_size > GetInlinedCapacity()) {
  526. // Because this is only called from the `InlinedVector` constructors, it's
  527. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  528. // throws, deallocation will be automatically handled by `~Storage()`.
  529. SizeType<A> requested_capacity =
  530. ComputeCapacity(GetInlinedCapacity(), new_size);
  531. Allocation<A> allocation =
  532. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  533. construct_data = allocation.data;
  534. SetAllocation(allocation);
  535. SetIsAllocated();
  536. } else {
  537. construct_data = GetInlinedData();
  538. }
  539. ConstructElements<A>(GetAllocator(), construct_data, values, new_size);
  540. // Since the initial size was guaranteed to be `0` and the allocated bit is
  541. // already correct for either case, *adding* `new_size` gives us the correct
  542. // result faster than setting it directly.
  543. AddSize(new_size);
  544. }
  545. template <typename T, size_t N, typename A>
  546. template <typename ValueAdapter>
  547. auto Storage<T, N, A>::Assign(ValueAdapter values, SizeType<A> new_size)
  548. -> void {
  549. StorageView<A> storage_view = MakeStorageView();
  550. AllocationTransaction<A> allocation_tx(GetAllocator());
  551. y_absl::Span<ValueType<A>> assign_loop;
  552. y_absl::Span<ValueType<A>> construct_loop;
  553. y_absl::Span<ValueType<A>> destroy_loop;
  554. if (new_size > storage_view.capacity) {
  555. SizeType<A> requested_capacity =
  556. ComputeCapacity(storage_view.capacity, new_size);
  557. construct_loop = {allocation_tx.Allocate(requested_capacity), new_size};
  558. destroy_loop = {storage_view.data, storage_view.size};
  559. } else if (new_size > storage_view.size) {
  560. assign_loop = {storage_view.data, storage_view.size};
  561. construct_loop = {storage_view.data + storage_view.size,
  562. new_size - storage_view.size};
  563. } else {
  564. assign_loop = {storage_view.data, new_size};
  565. destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
  566. }
  567. AssignElements<A>(assign_loop.data(), values, assign_loop.size());
  568. ConstructElements<A>(GetAllocator(), construct_loop.data(), values,
  569. construct_loop.size());
  570. DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(),
  571. destroy_loop.size());
  572. if (allocation_tx.DidAllocate()) {
  573. DeallocateIfAllocated();
  574. SetAllocation(std::move(allocation_tx).Release());
  575. SetIsAllocated();
  576. }
  577. SetSize(new_size);
  578. }
  579. template <typename T, size_t N, typename A>
  580. template <typename ValueAdapter>
  581. auto Storage<T, N, A>::Resize(ValueAdapter values, SizeType<A> new_size)
  582. -> void {
  583. StorageView<A> storage_view = MakeStorageView();
  584. Pointer<A> const base = storage_view.data;
  585. const SizeType<A> size = storage_view.size;
  586. A& alloc = GetAllocator();
  587. if (new_size <= size) {
  588. // Destroy extra old elements.
  589. DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size);
  590. } else if (new_size <= storage_view.capacity) {
  591. // Construct new elements in place.
  592. ConstructElements<A>(alloc, base + size, values, new_size - size);
  593. } else {
  594. // Steps:
  595. // a. Allocate new backing store.
  596. // b. Construct new elements in new backing store.
  597. // c. Move existing elements from old backing store to new backing store.
  598. // d. Destroy all elements in old backing store.
  599. // Use transactional wrappers for the first two steps so we can roll
  600. // back if necessary due to exceptions.
  601. AllocationTransaction<A> allocation_tx(alloc);
  602. SizeType<A> requested_capacity =
  603. ComputeCapacity(storage_view.capacity, new_size);
  604. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  605. ConstructionTransaction<A> construction_tx(alloc);
  606. construction_tx.Construct(new_data + size, values, new_size - size);
  607. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  608. (MoveIterator<A>(base)));
  609. ConstructElements<A>(alloc, new_data, move_values, size);
  610. DestroyAdapter<A>::DestroyElements(alloc, base, size);
  611. std::move(construction_tx).Commit();
  612. DeallocateIfAllocated();
  613. SetAllocation(std::move(allocation_tx).Release());
  614. SetIsAllocated();
  615. }
  616. SetSize(new_size);
  617. }
  618. template <typename T, size_t N, typename A>
  619. template <typename ValueAdapter>
  620. auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values,
  621. SizeType<A> insert_count) -> Iterator<A> {
  622. StorageView<A> storage_view = MakeStorageView();
  623. auto insert_index = static_cast<SizeType<A>>(
  624. std::distance(ConstIterator<A>(storage_view.data), pos));
  625. SizeType<A> insert_end_index = insert_index + insert_count;
  626. SizeType<A> new_size = storage_view.size + insert_count;
  627. if (new_size > storage_view.capacity) {
  628. AllocationTransaction<A> allocation_tx(GetAllocator());
  629. ConstructionTransaction<A> construction_tx(GetAllocator());
  630. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  631. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  632. MoveIterator<A>(storage_view.data));
  633. SizeType<A> requested_capacity =
  634. ComputeCapacity(storage_view.capacity, new_size);
  635. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  636. construction_tx.Construct(new_data + insert_index, values, insert_count);
  637. move_construction_tx.Construct(new_data, move_values, insert_index);
  638. ConstructElements<A>(GetAllocator(), new_data + insert_end_index,
  639. move_values, storage_view.size - insert_index);
  640. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  641. storage_view.size);
  642. std::move(construction_tx).Commit();
  643. std::move(move_construction_tx).Commit();
  644. DeallocateIfAllocated();
  645. SetAllocation(std::move(allocation_tx).Release());
  646. SetAllocatedSize(new_size);
  647. return Iterator<A>(new_data + insert_index);
  648. } else {
  649. SizeType<A> move_construction_destination_index =
  650. (std::max)(insert_end_index, storage_view.size);
  651. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  652. IteratorValueAdapter<A, MoveIterator<A>> move_construction_values(
  653. MoveIterator<A>(storage_view.data +
  654. (move_construction_destination_index - insert_count)));
  655. y_absl::Span<ValueType<A>> move_construction = {
  656. storage_view.data + move_construction_destination_index,
  657. new_size - move_construction_destination_index};
  658. Pointer<A> move_assignment_values = storage_view.data + insert_index;
  659. y_absl::Span<ValueType<A>> move_assignment = {
  660. storage_view.data + insert_end_index,
  661. move_construction_destination_index - insert_end_index};
  662. y_absl::Span<ValueType<A>> insert_assignment = {move_assignment_values,
  663. move_construction.size()};
  664. y_absl::Span<ValueType<A>> insert_construction = {
  665. insert_assignment.data() + insert_assignment.size(),
  666. insert_count - insert_assignment.size()};
  667. move_construction_tx.Construct(move_construction.data(),
  668. move_construction_values,
  669. move_construction.size());
  670. for (Pointer<A>
  671. destination = move_assignment.data() + move_assignment.size(),
  672. last_destination = move_assignment.data(),
  673. source = move_assignment_values + move_assignment.size();
  674. ;) {
  675. --destination;
  676. --source;
  677. if (destination < last_destination) break;
  678. *destination = std::move(*source);
  679. }
  680. AssignElements<A>(insert_assignment.data(), values,
  681. insert_assignment.size());
  682. ConstructElements<A>(GetAllocator(), insert_construction.data(), values,
  683. insert_construction.size());
  684. std::move(move_construction_tx).Commit();
  685. AddSize(insert_count);
  686. return Iterator<A>(storage_view.data + insert_index);
  687. }
  688. }
  689. template <typename T, size_t N, typename A>
  690. template <typename... Args>
  691. auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> {
  692. StorageView<A> storage_view = MakeStorageView();
  693. const SizeType<A> n = storage_view.size;
  694. if (Y_ABSL_PREDICT_TRUE(n != storage_view.capacity)) {
  695. // Fast path; new element fits.
  696. Pointer<A> last_ptr = storage_view.data + n;
  697. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  698. std::forward<Args>(args)...);
  699. AddSize(1);
  700. return *last_ptr;
  701. }
  702. // TODO(b/173712035): Annotate with musttail attribute to prevent regression.
  703. return EmplaceBackSlow(std::forward<Args>(args)...);
  704. }
  705. template <typename T, size_t N, typename A>
  706. template <typename... Args>
  707. auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> {
  708. StorageView<A> storage_view = MakeStorageView();
  709. AllocationTransaction<A> allocation_tx(GetAllocator());
  710. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  711. MoveIterator<A>(storage_view.data));
  712. SizeType<A> requested_capacity = NextCapacity(storage_view.capacity);
  713. Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity);
  714. Pointer<A> last_ptr = construct_data + storage_view.size;
  715. // Construct new element.
  716. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  717. std::forward<Args>(args)...);
  718. // Move elements from old backing store to new backing store.
  719. Y_ABSL_INTERNAL_TRY {
  720. ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values,
  721. storage_view.size);
  722. }
  723. Y_ABSL_INTERNAL_CATCH_ANY {
  724. AllocatorTraits<A>::destroy(GetAllocator(), last_ptr);
  725. Y_ABSL_INTERNAL_RETHROW;
  726. }
  727. // Destroy elements in old backing store.
  728. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  729. storage_view.size);
  730. DeallocateIfAllocated();
  731. SetAllocation(std::move(allocation_tx).Release());
  732. SetIsAllocated();
  733. AddSize(1);
  734. return *last_ptr;
  735. }
  736. template <typename T, size_t N, typename A>
  737. auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to)
  738. -> Iterator<A> {
  739. StorageView<A> storage_view = MakeStorageView();
  740. auto erase_size = static_cast<SizeType<A>>(std::distance(from, to));
  741. auto erase_index = static_cast<SizeType<A>>(
  742. std::distance(ConstIterator<A>(storage_view.data), from));
  743. SizeType<A> erase_end_index = erase_index + erase_size;
  744. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  745. MoveIterator<A>(storage_view.data + erase_end_index));
  746. AssignElements<A>(storage_view.data + erase_index, move_values,
  747. storage_view.size - erase_end_index);
  748. DestroyAdapter<A>::DestroyElements(
  749. GetAllocator(), storage_view.data + (storage_view.size - erase_size),
  750. erase_size);
  751. SubtractSize(erase_size);
  752. return Iterator<A>(storage_view.data + erase_index);
  753. }
  754. template <typename T, size_t N, typename A>
  755. auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void {
  756. StorageView<A> storage_view = MakeStorageView();
  757. if (Y_ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
  758. AllocationTransaction<A> allocation_tx(GetAllocator());
  759. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  760. MoveIterator<A>(storage_view.data));
  761. SizeType<A> new_requested_capacity =
  762. ComputeCapacity(storage_view.capacity, requested_capacity);
  763. Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity);
  764. ConstructElements<A>(GetAllocator(), new_data, move_values,
  765. storage_view.size);
  766. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  767. storage_view.size);
  768. DeallocateIfAllocated();
  769. SetAllocation(std::move(allocation_tx).Release());
  770. SetIsAllocated();
  771. }
  772. template <typename T, size_t N, typename A>
  773. auto Storage<T, N, A>::ShrinkToFit() -> void {
  774. // May only be called on allocated instances!
  775. Y_ABSL_HARDENING_ASSERT(GetIsAllocated());
  776. StorageView<A> storage_view{GetAllocatedData(), GetSize(),
  777. GetAllocatedCapacity()};
  778. if (Y_ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
  779. AllocationTransaction<A> allocation_tx(GetAllocator());
  780. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  781. MoveIterator<A>(storage_view.data));
  782. Pointer<A> construct_data;
  783. if (storage_view.size > GetInlinedCapacity()) {
  784. SizeType<A> requested_capacity = storage_view.size;
  785. construct_data = allocation_tx.Allocate(requested_capacity);
  786. if (allocation_tx.GetCapacity() >= storage_view.capacity) {
  787. // Already using the smallest available heap allocation.
  788. return;
  789. }
  790. } else {
  791. construct_data = GetInlinedData();
  792. }
  793. Y_ABSL_INTERNAL_TRY {
  794. ConstructElements<A>(GetAllocator(), construct_data, move_values,
  795. storage_view.size);
  796. }
  797. Y_ABSL_INTERNAL_CATCH_ANY {
  798. SetAllocation({storage_view.data, storage_view.capacity});
  799. Y_ABSL_INTERNAL_RETHROW;
  800. }
  801. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  802. storage_view.size);
  803. MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data,
  804. storage_view.capacity);
  805. if (allocation_tx.DidAllocate()) {
  806. SetAllocation(std::move(allocation_tx).Release());
  807. } else {
  808. UnsetIsAllocated();
  809. }
  810. }
  811. template <typename T, size_t N, typename A>
  812. auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
  813. using std::swap;
  814. Y_ABSL_HARDENING_ASSERT(this != other_storage_ptr);
  815. if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
  816. swap(data_.allocated, other_storage_ptr->data_.allocated);
  817. } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
  818. SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr);
  819. } else {
  820. Storage* allocated_ptr = this;
  821. Storage* inlined_ptr = other_storage_ptr;
  822. if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
  823. StorageView<A> allocated_storage_view{
  824. allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(),
  825. allocated_ptr->GetAllocatedCapacity()};
  826. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  827. MoveIterator<A>(inlined_ptr->GetInlinedData()));
  828. Y_ABSL_INTERNAL_TRY {
  829. ConstructElements<A>(inlined_ptr->GetAllocator(),
  830. allocated_ptr->GetInlinedData(), move_values,
  831. inlined_ptr->GetSize());
  832. }
  833. Y_ABSL_INTERNAL_CATCH_ANY {
  834. allocated_ptr->SetAllocation(Allocation<A>{
  835. allocated_storage_view.data, allocated_storage_view.capacity});
  836. Y_ABSL_INTERNAL_RETHROW;
  837. }
  838. DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(),
  839. inlined_ptr->GetInlinedData(),
  840. inlined_ptr->GetSize());
  841. inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data,
  842. allocated_storage_view.capacity});
  843. }
  844. swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
  845. swap(GetAllocator(), other_storage_ptr->GetAllocator());
  846. }
  847. template <typename T, size_t N, typename A>
  848. void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other,
  849. SizeType<A> n) {
  850. std::swap_ranges(GetInlinedData(), GetInlinedData() + n,
  851. other->GetInlinedData());
  852. }
  853. template <typename T, size_t N, typename A>
  854. void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other,
  855. SizeType<A> n) {
  856. Pointer<A> a = GetInlinedData();
  857. Pointer<A> b = other->GetInlinedData();
  858. // see note on allocators in `SwapInlinedElements`.
  859. A& allocator_a = GetAllocator();
  860. A& allocator_b = other->GetAllocator();
  861. for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) {
  862. ValueType<A> tmp(std::move(*a));
  863. AllocatorTraits<A>::destroy(allocator_a, a);
  864. AllocatorTraits<A>::construct(allocator_b, a, std::move(*b));
  865. AllocatorTraits<A>::destroy(allocator_b, b);
  866. AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp));
  867. }
  868. }
  869. template <typename T, size_t N, typename A>
  870. void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) {
  871. Data tmp = data_;
  872. data_ = other->data_;
  873. other->data_ = tmp;
  874. }
  875. template <typename T, size_t N, typename A>
  876. template <typename NotMemcpyPolicy>
  877. void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy,
  878. Storage* other) {
  879. // Note: `destroy` needs to use pre-swap allocator while `construct` -
  880. // post-swap allocator. Allocators will be swapped later on outside of
  881. // `SwapInlinedElements`.
  882. Storage* small_ptr = this;
  883. Storage* large_ptr = other;
  884. if (small_ptr->GetSize() > large_ptr->GetSize()) {
  885. std::swap(small_ptr, large_ptr);
  886. }
  887. auto small_size = small_ptr->GetSize();
  888. auto diff = large_ptr->GetSize() - small_size;
  889. SwapN(policy, other, small_size);
  890. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  891. MoveIterator<A>(large_ptr->GetInlinedData() + small_size));
  892. ConstructElements<A>(large_ptr->GetAllocator(),
  893. small_ptr->GetInlinedData() + small_size, move_values,
  894. diff);
  895. DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(),
  896. large_ptr->GetInlinedData() + small_size,
  897. diff);
  898. }
  899. // End ignore "array-bounds"
  900. #if !defined(__clang__) && defined(__GNUC__)
  901. #pragma GCC diagnostic pop
  902. #endif
  903. } // namespace inlined_vector_internal
  904. Y_ABSL_NAMESPACE_END
  905. } // namespace y_absl
  906. #endif // Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_