inlined_vector.h 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090
  1. // Copyright 2019 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #ifndef Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  15. #define Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  16. #include <algorithm>
  17. #include <cstddef>
  18. #include <cstring>
  19. #include <iterator>
  20. #include <limits>
  21. #include <memory>
  22. #include <new>
  23. #include <type_traits>
  24. #include <utility>
  25. #include "y_absl/base/attributes.h"
  26. #include "y_absl/base/macros.h"
  27. #include "y_absl/container/internal/compressed_tuple.h"
  28. #include "y_absl/memory/memory.h"
  29. #include "y_absl/meta/type_traits.h"
  30. #include "y_absl/types/span.h"
  31. namespace y_absl {
  32. Y_ABSL_NAMESPACE_BEGIN
  33. namespace inlined_vector_internal {
  34. // GCC does not deal very well with the below code
  35. #if !defined(__clang__) && defined(__GNUC__)
  36. #pragma GCC diagnostic push
  37. #pragma GCC diagnostic ignored "-Warray-bounds"
  38. #endif
  39. template <typename A>
  40. using AllocatorTraits = std::allocator_traits<A>;
  41. template <typename A>
  42. using ValueType = typename AllocatorTraits<A>::value_type;
  43. template <typename A>
  44. using SizeType = typename AllocatorTraits<A>::size_type;
  45. template <typename A>
  46. using Pointer = typename AllocatorTraits<A>::pointer;
  47. template <typename A>
  48. using ConstPointer = typename AllocatorTraits<A>::const_pointer;
  49. template <typename A>
  50. using SizeType = typename AllocatorTraits<A>::size_type;
  51. template <typename A>
  52. using DifferenceType = typename AllocatorTraits<A>::difference_type;
  53. template <typename A>
  54. using Reference = ValueType<A>&;
  55. template <typename A>
  56. using ConstReference = const ValueType<A>&;
  57. template <typename A>
  58. using Iterator = Pointer<A>;
  59. template <typename A>
  60. using ConstIterator = ConstPointer<A>;
  61. template <typename A>
  62. using ReverseIterator = typename std::reverse_iterator<Iterator<A>>;
  63. template <typename A>
  64. using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>;
  65. template <typename A>
  66. using MoveIterator = typename std::move_iterator<Iterator<A>>;
  67. template <typename Iterator>
  68. using IsAtLeastForwardIterator = std::is_convertible<
  69. typename std::iterator_traits<Iterator>::iterator_category,
  70. std::forward_iterator_tag>;
  71. template <typename A>
  72. using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>;
  73. template <typename A>
  74. using IsSwapOk = y_absl::type_traits_internal::IsSwappable<ValueType<A>>;
  75. template <typename T>
  76. struct TypeIdentity {
  77. using type = T;
  78. };
  79. // Used for function arguments in template functions to prevent ADL by forcing
  80. // callers to explicitly specify the template parameter.
  81. template <typename T>
  82. using NoTypeDeduction = typename TypeIdentity<T>::type;
  83. template <typename A, bool IsTriviallyDestructible =
  84. y_absl::is_trivially_destructible<ValueType<A>>::value>
  85. struct DestroyAdapter;
  86. template <typename A>
  87. struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> {
  88. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  89. SizeType<A> destroy_size) {
  90. for (SizeType<A> i = destroy_size; i != 0;) {
  91. --i;
  92. AllocatorTraits<A>::destroy(allocator, destroy_first + i);
  93. }
  94. }
  95. };
  96. template <typename A>
  97. struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> {
  98. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  99. SizeType<A> destroy_size) {
  100. static_cast<void>(allocator);
  101. static_cast<void>(destroy_first);
  102. static_cast<void>(destroy_size);
  103. }
  104. };
  105. template <typename A>
  106. struct Allocation {
  107. Pointer<A> data = nullptr;
  108. SizeType<A> capacity = 0;
  109. };
  110. template <typename A,
  111. bool IsOverAligned =
  112. (alignof(ValueType<A>) > Y_ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)>
  113. struct MallocAdapter {
  114. static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) {
  115. return {AllocatorTraits<A>::allocate(allocator, requested_capacity),
  116. requested_capacity};
  117. }
  118. static void Deallocate(A& allocator, Pointer<A> pointer,
  119. SizeType<A> capacity) {
  120. AllocatorTraits<A>::deallocate(allocator, pointer, capacity);
  121. }
  122. };
  123. template <typename A, typename ValueAdapter>
  124. void ConstructElements(NoTypeDeduction<A>& allocator,
  125. Pointer<A> construct_first, ValueAdapter& values,
  126. SizeType<A> construct_size) {
  127. for (SizeType<A> i = 0; i < construct_size; ++i) {
  128. Y_ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); }
  129. Y_ABSL_INTERNAL_CATCH_ANY {
  130. DestroyAdapter<A>::DestroyElements(allocator, construct_first, i);
  131. Y_ABSL_INTERNAL_RETHROW;
  132. }
  133. }
  134. }
  135. template <typename A, typename ValueAdapter>
  136. void AssignElements(Pointer<A> assign_first, ValueAdapter& values,
  137. SizeType<A> assign_size) {
  138. for (SizeType<A> i = 0; i < assign_size; ++i) {
  139. values.AssignNext(assign_first + i);
  140. }
  141. }
  142. template <typename A>
  143. struct StorageView {
  144. Pointer<A> data;
  145. SizeType<A> size;
  146. SizeType<A> capacity;
  147. };
  148. template <typename A, typename Iterator>
  149. class IteratorValueAdapter {
  150. public:
  151. explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
  152. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  153. AllocatorTraits<A>::construct(allocator, construct_at, *it_);
  154. ++it_;
  155. }
  156. void AssignNext(Pointer<A> assign_at) {
  157. *assign_at = *it_;
  158. ++it_;
  159. }
  160. private:
  161. Iterator it_;
  162. };
  163. template <typename A>
  164. class CopyValueAdapter {
  165. public:
  166. explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {}
  167. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  168. AllocatorTraits<A>::construct(allocator, construct_at, *ptr_);
  169. }
  170. void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; }
  171. private:
  172. ConstPointer<A> ptr_;
  173. };
  174. template <typename A>
  175. class DefaultValueAdapter {
  176. public:
  177. explicit DefaultValueAdapter() {}
  178. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  179. AllocatorTraits<A>::construct(allocator, construct_at);
  180. }
  181. void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); }
  182. };
  183. template <typename A>
  184. class AllocationTransaction {
  185. public:
  186. explicit AllocationTransaction(A& allocator)
  187. : allocator_data_(allocator, nullptr), capacity_(0) {}
  188. ~AllocationTransaction() {
  189. if (DidAllocate()) {
  190. MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity());
  191. }
  192. }
  193. AllocationTransaction(const AllocationTransaction&) = delete;
  194. void operator=(const AllocationTransaction&) = delete;
  195. A& GetAllocator() { return allocator_data_.template get<0>(); }
  196. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  197. SizeType<A>& GetCapacity() { return capacity_; }
  198. bool DidAllocate() { return GetData() != nullptr; }
  199. Pointer<A> Allocate(SizeType<A> requested_capacity) {
  200. Allocation<A> result =
  201. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  202. GetData() = result.data;
  203. GetCapacity() = result.capacity;
  204. return result.data;
  205. }
  206. Y_ABSL_MUST_USE_RESULT Allocation<A> Release() && {
  207. Allocation<A> result = {GetData(), GetCapacity()};
  208. Reset();
  209. return result;
  210. }
  211. private:
  212. void Reset() {
  213. GetData() = nullptr;
  214. GetCapacity() = 0;
  215. }
  216. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  217. SizeType<A> capacity_;
  218. };
  219. template <typename A>
  220. class ConstructionTransaction {
  221. public:
  222. explicit ConstructionTransaction(A& allocator)
  223. : allocator_data_(allocator, nullptr), size_(0) {}
  224. ~ConstructionTransaction() {
  225. if (DidConstruct()) {
  226. DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize());
  227. }
  228. }
  229. ConstructionTransaction(const ConstructionTransaction&) = delete;
  230. void operator=(const ConstructionTransaction&) = delete;
  231. A& GetAllocator() { return allocator_data_.template get<0>(); }
  232. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  233. SizeType<A>& GetSize() { return size_; }
  234. bool DidConstruct() { return GetData() != nullptr; }
  235. template <typename ValueAdapter>
  236. void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) {
  237. ConstructElements<A>(GetAllocator(), data, values, size);
  238. GetData() = data;
  239. GetSize() = size;
  240. }
  241. void Commit() && {
  242. GetData() = nullptr;
  243. GetSize() = 0;
  244. }
  245. private:
  246. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  247. SizeType<A> size_;
  248. };
  249. template <typename T, size_t N, typename A>
  250. class Storage {
  251. public:
  252. struct MemcpyPolicy {};
  253. struct ElementwiseAssignPolicy {};
  254. struct ElementwiseSwapPolicy {};
  255. struct ElementwiseConstructPolicy {};
  256. using MoveAssignmentPolicy = y_absl::conditional_t<
  257. // Fast path: if the value type can be trivially move assigned and
  258. // destroyed, and we know the allocator doesn't do anything fancy, then
  259. // it's safe for us to simply adopt the contents of the storage for
  260. // `other` and remove its own reference to them. It's as if we had
  261. // individually move-assigned each value and then destroyed the original.
  262. y_absl::conjunction<y_absl::is_trivially_move_assignable<ValueType<A>>,
  263. y_absl::is_trivially_destructible<ValueType<A>>,
  264. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  265. MemcpyPolicy,
  266. // Otherwise we use move assignment if possible. If not, we simulate
  267. // move assignment using move construction.
  268. //
  269. // Note that this is in contrast to e.g. std::vector and std::optional,
  270. // which are themselves not move-assignable when their contained type is
  271. // not.
  272. y_absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy,
  273. ElementwiseConstructPolicy>>;
  274. // The policy to be used specifically when swapping inlined elements.
  275. using SwapInlinedElementsPolicy = y_absl::conditional_t<
  276. // Fast path: if the value type can be trivially move constructed/assigned
  277. // and destroyed, and we know the allocator doesn't do anything fancy,
  278. // then it's safe for us to simply swap the bytes in the inline storage.
  279. // It's as if we had move-constructed a temporary vector, move-assigned
  280. // one to the other, then move-assigned the first from the temporary.
  281. y_absl::conjunction<y_absl::is_trivially_move_constructible<ValueType<A>>,
  282. y_absl::is_trivially_move_assignable<ValueType<A>>,
  283. y_absl::is_trivially_destructible<ValueType<A>>,
  284. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  285. MemcpyPolicy,
  286. y_absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy,
  287. ElementwiseConstructPolicy>>;
  288. static SizeType<A> NextCapacity(SizeType<A> current_capacity) {
  289. return current_capacity * 2;
  290. }
  291. static SizeType<A> ComputeCapacity(SizeType<A> current_capacity,
  292. SizeType<A> requested_capacity) {
  293. return (std::max)(NextCapacity(current_capacity), requested_capacity);
  294. }
  295. // ---------------------------------------------------------------------------
  296. // Storage Constructors and Destructor
  297. // ---------------------------------------------------------------------------
  298. Storage() : metadata_(A(), /* size and is_allocated */ 0u) {}
  299. explicit Storage(const A& allocator)
  300. : metadata_(allocator, /* size and is_allocated */ 0u) {}
  301. ~Storage() {
  302. // Fast path: if we are empty and not allocated, there's nothing to do.
  303. if (GetSizeAndIsAllocated() == 0) {
  304. return;
  305. }
  306. // Fast path: if no destructors need to be run and we know the allocator
  307. // doesn't do anything fancy, then all we need to do is deallocate (and
  308. // maybe not even that).
  309. if (y_absl::is_trivially_destructible<ValueType<A>>::value &&
  310. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  311. DeallocateIfAllocated();
  312. return;
  313. }
  314. DestroyContents();
  315. }
  316. // ---------------------------------------------------------------------------
  317. // Storage Member Accessors
  318. // ---------------------------------------------------------------------------
  319. SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
  320. const SizeType<A>& GetSizeAndIsAllocated() const {
  321. return metadata_.template get<1>();
  322. }
  323. SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; }
  324. bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
  325. Pointer<A> GetAllocatedData() { return data_.allocated.allocated_data; }
  326. ConstPointer<A> GetAllocatedData() const {
  327. return data_.allocated.allocated_data;
  328. }
  329. // Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be
  330. // uninitialized, a common pattern in allocate()+construct() APIs.
  331. // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking
  332. // NOTE: When this was written, LLVM documentation did not explicitly
  333. // mention that casting `char*` and using `reinterpret_cast` qualifies
  334. // as a bad cast.
  335. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() {
  336. return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data);
  337. }
  338. Y_ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const {
  339. return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data);
  340. }
  341. SizeType<A> GetAllocatedCapacity() const {
  342. return data_.allocated.allocated_capacity;
  343. }
  344. SizeType<A> GetInlinedCapacity() const {
  345. return static_cast<SizeType<A>>(kOptimalInlinedSize);
  346. }
  347. StorageView<A> MakeStorageView() {
  348. return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(),
  349. GetAllocatedCapacity()}
  350. : StorageView<A>{GetInlinedData(), GetSize(),
  351. GetInlinedCapacity()};
  352. }
  353. A& GetAllocator() { return metadata_.template get<0>(); }
  354. const A& GetAllocator() const { return metadata_.template get<0>(); }
  355. // ---------------------------------------------------------------------------
  356. // Storage Member Mutators
  357. // ---------------------------------------------------------------------------
  358. Y_ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other);
  359. template <typename ValueAdapter>
  360. void Initialize(ValueAdapter values, SizeType<A> new_size);
  361. template <typename ValueAdapter>
  362. void Assign(ValueAdapter values, SizeType<A> new_size);
  363. template <typename ValueAdapter>
  364. void Resize(ValueAdapter values, SizeType<A> new_size);
  365. template <typename ValueAdapter>
  366. Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values,
  367. SizeType<A> insert_count);
  368. template <typename... Args>
  369. Reference<A> EmplaceBack(Args&&... args);
  370. Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to);
  371. void Reserve(SizeType<A> requested_capacity);
  372. void ShrinkToFit();
  373. void Swap(Storage* other_storage_ptr);
  374. void SetIsAllocated() {
  375. GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1);
  376. }
  377. void UnsetIsAllocated() {
  378. GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1);
  379. }
  380. void SetSize(SizeType<A> size) {
  381. GetSizeAndIsAllocated() =
  382. (size << 1) | static_cast<SizeType<A>>(GetIsAllocated());
  383. }
  384. void SetAllocatedSize(SizeType<A> size) {
  385. GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1);
  386. }
  387. void SetInlinedSize(SizeType<A> size) {
  388. GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1);
  389. }
  390. void AddSize(SizeType<A> count) {
  391. GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1);
  392. }
  393. void SubtractSize(SizeType<A> count) {
  394. Y_ABSL_HARDENING_ASSERT(count <= GetSize());
  395. GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1);
  396. }
  397. void SetAllocation(Allocation<A> allocation) {
  398. data_.allocated.allocated_data = allocation.data;
  399. data_.allocated.allocated_capacity = allocation.capacity;
  400. }
  401. void MemcpyFrom(const Storage& other_storage) {
  402. // Assumption check: it doesn't make sense to memcpy inlined elements unless
  403. // we know the allocator doesn't do anything fancy, and one of the following
  404. // holds:
  405. //
  406. // * The elements are trivially relocatable.
  407. //
  408. // * It's possible to trivially assign the elements and then destroy the
  409. // source.
  410. //
  411. // * It's possible to trivially copy construct/assign the elements.
  412. //
  413. {
  414. using V = ValueType<A>;
  415. Y_ABSL_HARDENING_ASSERT(
  416. other_storage.GetIsAllocated() ||
  417. (std::is_same<A, std::allocator<V>>::value &&
  418. (
  419. // First case above
  420. y_absl::is_trivially_relocatable<V>::value ||
  421. // Second case above
  422. (y_absl::is_trivially_move_assignable<V>::value &&
  423. y_absl::is_trivially_destructible<V>::value) ||
  424. // Third case above
  425. (y_absl::is_trivially_copy_constructible<V>::value ||
  426. y_absl::is_trivially_copy_assignable<V>::value))));
  427. }
  428. GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
  429. data_ = other_storage.data_;
  430. }
  431. void DeallocateIfAllocated() {
  432. if (GetIsAllocated()) {
  433. MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(),
  434. GetAllocatedCapacity());
  435. }
  436. }
  437. private:
  438. Y_ABSL_ATTRIBUTE_NOINLINE void DestroyContents();
  439. using Metadata = container_internal::CompressedTuple<A, SizeType<A>>;
  440. struct Allocated {
  441. Pointer<A> allocated_data;
  442. SizeType<A> allocated_capacity;
  443. };
  444. // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the
  445. // `InlinedVector`. Sometimes, it is possible to increase the capacity (from
  446. // the user requested `N`) without increasing the size of the `InlinedVector`.
  447. static constexpr size_t kOptimalInlinedSize =
  448. (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>));
  449. struct Inlined {
  450. alignas(ValueType<A>) char inlined_data[sizeof(
  451. ValueType<A>[kOptimalInlinedSize])];
  452. };
  453. union Data {
  454. Allocated allocated;
  455. Inlined inlined;
  456. };
  457. void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n);
  458. void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n);
  459. void SwapInlinedElements(MemcpyPolicy, Storage* other);
  460. template <typename NotMemcpyPolicy>
  461. void SwapInlinedElements(NotMemcpyPolicy, Storage* other);
  462. template <typename... Args>
  463. Y_ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args);
  464. Metadata metadata_;
  465. Data data_;
  466. };
  467. template <typename T, size_t N, typename A>
  468. void Storage<T, N, A>::DestroyContents() {
  469. Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
  470. DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize());
  471. DeallocateIfAllocated();
  472. }
  473. template <typename T, size_t N, typename A>
  474. void Storage<T, N, A>::InitFrom(const Storage& other) {
  475. const SizeType<A> n = other.GetSize();
  476. Y_ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller.
  477. ConstPointer<A> src;
  478. Pointer<A> dst;
  479. if (!other.GetIsAllocated()) {
  480. dst = GetInlinedData();
  481. src = other.GetInlinedData();
  482. } else {
  483. // Because this is only called from the `InlinedVector` constructors, it's
  484. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  485. // throws, deallocation will be automatically handled by `~Storage()`.
  486. SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n);
  487. Allocation<A> allocation =
  488. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  489. SetAllocation(allocation);
  490. dst = allocation.data;
  491. src = other.GetAllocatedData();
  492. }
  493. // Fast path: if the value type is trivially copy constructible and we know
  494. // the allocator doesn't do anything fancy, then we know it is legal for us to
  495. // simply memcpy the other vector's elements.
  496. if (y_absl::is_trivially_copy_constructible<ValueType<A>>::value &&
  497. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  498. std::memcpy(reinterpret_cast<char*>(dst),
  499. reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>));
  500. } else {
  501. auto values = IteratorValueAdapter<A, ConstPointer<A>>(src);
  502. ConstructElements<A>(GetAllocator(), dst, values, n);
  503. }
  504. GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated();
  505. }
  506. template <typename T, size_t N, typename A>
  507. template <typename ValueAdapter>
  508. auto Storage<T, N, A>::Initialize(ValueAdapter values, SizeType<A> new_size)
  509. -> void {
  510. // Only callable from constructors!
  511. Y_ABSL_HARDENING_ASSERT(!GetIsAllocated());
  512. Y_ABSL_HARDENING_ASSERT(GetSize() == 0);
  513. Pointer<A> construct_data;
  514. if (new_size > GetInlinedCapacity()) {
  515. // Because this is only called from the `InlinedVector` constructors, it's
  516. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  517. // throws, deallocation will be automatically handled by `~Storage()`.
  518. SizeType<A> requested_capacity =
  519. ComputeCapacity(GetInlinedCapacity(), new_size);
  520. Allocation<A> allocation =
  521. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  522. construct_data = allocation.data;
  523. SetAllocation(allocation);
  524. SetIsAllocated();
  525. } else {
  526. construct_data = GetInlinedData();
  527. }
  528. ConstructElements<A>(GetAllocator(), construct_data, values, new_size);
  529. // Since the initial size was guaranteed to be `0` and the allocated bit is
  530. // already correct for either case, *adding* `new_size` gives us the correct
  531. // result faster than setting it directly.
  532. AddSize(new_size);
  533. }
  534. template <typename T, size_t N, typename A>
  535. template <typename ValueAdapter>
  536. auto Storage<T, N, A>::Assign(ValueAdapter values, SizeType<A> new_size)
  537. -> void {
  538. StorageView<A> storage_view = MakeStorageView();
  539. AllocationTransaction<A> allocation_tx(GetAllocator());
  540. y_absl::Span<ValueType<A>> assign_loop;
  541. y_absl::Span<ValueType<A>> construct_loop;
  542. y_absl::Span<ValueType<A>> destroy_loop;
  543. if (new_size > storage_view.capacity) {
  544. SizeType<A> requested_capacity =
  545. ComputeCapacity(storage_view.capacity, new_size);
  546. construct_loop = {allocation_tx.Allocate(requested_capacity), new_size};
  547. destroy_loop = {storage_view.data, storage_view.size};
  548. } else if (new_size > storage_view.size) {
  549. assign_loop = {storage_view.data, storage_view.size};
  550. construct_loop = {storage_view.data + storage_view.size,
  551. new_size - storage_view.size};
  552. } else {
  553. assign_loop = {storage_view.data, new_size};
  554. destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
  555. }
  556. AssignElements<A>(assign_loop.data(), values, assign_loop.size());
  557. ConstructElements<A>(GetAllocator(), construct_loop.data(), values,
  558. construct_loop.size());
  559. DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(),
  560. destroy_loop.size());
  561. if (allocation_tx.DidAllocate()) {
  562. DeallocateIfAllocated();
  563. SetAllocation(std::move(allocation_tx).Release());
  564. SetIsAllocated();
  565. }
  566. SetSize(new_size);
  567. }
  568. template <typename T, size_t N, typename A>
  569. template <typename ValueAdapter>
  570. auto Storage<T, N, A>::Resize(ValueAdapter values, SizeType<A> new_size)
  571. -> void {
  572. StorageView<A> storage_view = MakeStorageView();
  573. Pointer<A> const base = storage_view.data;
  574. const SizeType<A> size = storage_view.size;
  575. A& alloc = GetAllocator();
  576. if (new_size <= size) {
  577. // Destroy extra old elements.
  578. DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size);
  579. } else if (new_size <= storage_view.capacity) {
  580. // Construct new elements in place.
  581. ConstructElements<A>(alloc, base + size, values, new_size - size);
  582. } else {
  583. // Steps:
  584. // a. Allocate new backing store.
  585. // b. Construct new elements in new backing store.
  586. // c. Move existing elements from old backing store to new backing store.
  587. // d. Destroy all elements in old backing store.
  588. // Use transactional wrappers for the first two steps so we can roll
  589. // back if necessary due to exceptions.
  590. AllocationTransaction<A> allocation_tx(alloc);
  591. SizeType<A> requested_capacity =
  592. ComputeCapacity(storage_view.capacity, new_size);
  593. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  594. ConstructionTransaction<A> construction_tx(alloc);
  595. construction_tx.Construct(new_data + size, values, new_size - size);
  596. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  597. (MoveIterator<A>(base)));
  598. ConstructElements<A>(alloc, new_data, move_values, size);
  599. DestroyAdapter<A>::DestroyElements(alloc, base, size);
  600. std::move(construction_tx).Commit();
  601. DeallocateIfAllocated();
  602. SetAllocation(std::move(allocation_tx).Release());
  603. SetIsAllocated();
  604. }
  605. SetSize(new_size);
  606. }
  607. template <typename T, size_t N, typename A>
  608. template <typename ValueAdapter>
  609. auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values,
  610. SizeType<A> insert_count) -> Iterator<A> {
  611. StorageView<A> storage_view = MakeStorageView();
  612. auto insert_index = static_cast<SizeType<A>>(
  613. std::distance(ConstIterator<A>(storage_view.data), pos));
  614. SizeType<A> insert_end_index = insert_index + insert_count;
  615. SizeType<A> new_size = storage_view.size + insert_count;
  616. if (new_size > storage_view.capacity) {
  617. AllocationTransaction<A> allocation_tx(GetAllocator());
  618. ConstructionTransaction<A> construction_tx(GetAllocator());
  619. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  620. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  621. MoveIterator<A>(storage_view.data));
  622. SizeType<A> requested_capacity =
  623. ComputeCapacity(storage_view.capacity, new_size);
  624. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  625. construction_tx.Construct(new_data + insert_index, values, insert_count);
  626. move_construction_tx.Construct(new_data, move_values, insert_index);
  627. ConstructElements<A>(GetAllocator(), new_data + insert_end_index,
  628. move_values, storage_view.size - insert_index);
  629. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  630. storage_view.size);
  631. std::move(construction_tx).Commit();
  632. std::move(move_construction_tx).Commit();
  633. DeallocateIfAllocated();
  634. SetAllocation(std::move(allocation_tx).Release());
  635. SetAllocatedSize(new_size);
  636. return Iterator<A>(new_data + insert_index);
  637. } else {
  638. SizeType<A> move_construction_destination_index =
  639. (std::max)(insert_end_index, storage_view.size);
  640. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  641. IteratorValueAdapter<A, MoveIterator<A>> move_construction_values(
  642. MoveIterator<A>(storage_view.data +
  643. (move_construction_destination_index - insert_count)));
  644. y_absl::Span<ValueType<A>> move_construction = {
  645. storage_view.data + move_construction_destination_index,
  646. new_size - move_construction_destination_index};
  647. Pointer<A> move_assignment_values = storage_view.data + insert_index;
  648. y_absl::Span<ValueType<A>> move_assignment = {
  649. storage_view.data + insert_end_index,
  650. move_construction_destination_index - insert_end_index};
  651. y_absl::Span<ValueType<A>> insert_assignment = {move_assignment_values,
  652. move_construction.size()};
  653. y_absl::Span<ValueType<A>> insert_construction = {
  654. insert_assignment.data() + insert_assignment.size(),
  655. insert_count - insert_assignment.size()};
  656. move_construction_tx.Construct(move_construction.data(),
  657. move_construction_values,
  658. move_construction.size());
  659. for (Pointer<A>
  660. destination = move_assignment.data() + move_assignment.size(),
  661. last_destination = move_assignment.data(),
  662. source = move_assignment_values + move_assignment.size();
  663. ;) {
  664. --destination;
  665. --source;
  666. if (destination < last_destination) break;
  667. *destination = std::move(*source);
  668. }
  669. AssignElements<A>(insert_assignment.data(), values,
  670. insert_assignment.size());
  671. ConstructElements<A>(GetAllocator(), insert_construction.data(), values,
  672. insert_construction.size());
  673. std::move(move_construction_tx).Commit();
  674. AddSize(insert_count);
  675. return Iterator<A>(storage_view.data + insert_index);
  676. }
  677. }
  678. template <typename T, size_t N, typename A>
  679. template <typename... Args>
  680. auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> {
  681. StorageView<A> storage_view = MakeStorageView();
  682. const SizeType<A> n = storage_view.size;
  683. if (Y_ABSL_PREDICT_TRUE(n != storage_view.capacity)) {
  684. // Fast path; new element fits.
  685. Pointer<A> last_ptr = storage_view.data + n;
  686. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  687. std::forward<Args>(args)...);
  688. AddSize(1);
  689. return *last_ptr;
  690. }
  691. // TODO(b/173712035): Annotate with musttail attribute to prevent regression.
  692. return EmplaceBackSlow(std::forward<Args>(args)...);
  693. }
  694. template <typename T, size_t N, typename A>
  695. template <typename... Args>
  696. auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> {
  697. StorageView<A> storage_view = MakeStorageView();
  698. AllocationTransaction<A> allocation_tx(GetAllocator());
  699. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  700. MoveIterator<A>(storage_view.data));
  701. SizeType<A> requested_capacity = NextCapacity(storage_view.capacity);
  702. Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity);
  703. Pointer<A> last_ptr = construct_data + storage_view.size;
  704. // Construct new element.
  705. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  706. std::forward<Args>(args)...);
  707. // Move elements from old backing store to new backing store.
  708. Y_ABSL_INTERNAL_TRY {
  709. ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values,
  710. storage_view.size);
  711. }
  712. Y_ABSL_INTERNAL_CATCH_ANY {
  713. AllocatorTraits<A>::destroy(GetAllocator(), last_ptr);
  714. Y_ABSL_INTERNAL_RETHROW;
  715. }
  716. // Destroy elements in old backing store.
  717. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  718. storage_view.size);
  719. DeallocateIfAllocated();
  720. SetAllocation(std::move(allocation_tx).Release());
  721. SetIsAllocated();
  722. AddSize(1);
  723. return *last_ptr;
  724. }
  725. template <typename T, size_t N, typename A>
  726. auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to)
  727. -> Iterator<A> {
  728. StorageView<A> storage_view = MakeStorageView();
  729. auto erase_size = static_cast<SizeType<A>>(std::distance(from, to));
  730. auto erase_index = static_cast<SizeType<A>>(
  731. std::distance(ConstIterator<A>(storage_view.data), from));
  732. SizeType<A> erase_end_index = erase_index + erase_size;
  733. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  734. MoveIterator<A>(storage_view.data + erase_end_index));
  735. AssignElements<A>(storage_view.data + erase_index, move_values,
  736. storage_view.size - erase_end_index);
  737. DestroyAdapter<A>::DestroyElements(
  738. GetAllocator(), storage_view.data + (storage_view.size - erase_size),
  739. erase_size);
  740. SubtractSize(erase_size);
  741. return Iterator<A>(storage_view.data + erase_index);
  742. }
  743. template <typename T, size_t N, typename A>
  744. auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void {
  745. StorageView<A> storage_view = MakeStorageView();
  746. if (Y_ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
  747. AllocationTransaction<A> allocation_tx(GetAllocator());
  748. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  749. MoveIterator<A>(storage_view.data));
  750. SizeType<A> new_requested_capacity =
  751. ComputeCapacity(storage_view.capacity, requested_capacity);
  752. Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity);
  753. ConstructElements<A>(GetAllocator(), new_data, move_values,
  754. storage_view.size);
  755. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  756. storage_view.size);
  757. DeallocateIfAllocated();
  758. SetAllocation(std::move(allocation_tx).Release());
  759. SetIsAllocated();
  760. }
  761. template <typename T, size_t N, typename A>
  762. auto Storage<T, N, A>::ShrinkToFit() -> void {
  763. // May only be called on allocated instances!
  764. Y_ABSL_HARDENING_ASSERT(GetIsAllocated());
  765. StorageView<A> storage_view{GetAllocatedData(), GetSize(),
  766. GetAllocatedCapacity()};
  767. if (Y_ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
  768. AllocationTransaction<A> allocation_tx(GetAllocator());
  769. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  770. MoveIterator<A>(storage_view.data));
  771. Pointer<A> construct_data;
  772. if (storage_view.size > GetInlinedCapacity()) {
  773. SizeType<A> requested_capacity = storage_view.size;
  774. construct_data = allocation_tx.Allocate(requested_capacity);
  775. if (allocation_tx.GetCapacity() >= storage_view.capacity) {
  776. // Already using the smallest available heap allocation.
  777. return;
  778. }
  779. } else {
  780. construct_data = GetInlinedData();
  781. }
  782. Y_ABSL_INTERNAL_TRY {
  783. ConstructElements<A>(GetAllocator(), construct_data, move_values,
  784. storage_view.size);
  785. }
  786. Y_ABSL_INTERNAL_CATCH_ANY {
  787. SetAllocation({storage_view.data, storage_view.capacity});
  788. Y_ABSL_INTERNAL_RETHROW;
  789. }
  790. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  791. storage_view.size);
  792. MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data,
  793. storage_view.capacity);
  794. if (allocation_tx.DidAllocate()) {
  795. SetAllocation(std::move(allocation_tx).Release());
  796. } else {
  797. UnsetIsAllocated();
  798. }
  799. }
  800. template <typename T, size_t N, typename A>
  801. auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
  802. using std::swap;
  803. Y_ABSL_HARDENING_ASSERT(this != other_storage_ptr);
  804. if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
  805. swap(data_.allocated, other_storage_ptr->data_.allocated);
  806. } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
  807. SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr);
  808. } else {
  809. Storage* allocated_ptr = this;
  810. Storage* inlined_ptr = other_storage_ptr;
  811. if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
  812. StorageView<A> allocated_storage_view{
  813. allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(),
  814. allocated_ptr->GetAllocatedCapacity()};
  815. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  816. MoveIterator<A>(inlined_ptr->GetInlinedData()));
  817. Y_ABSL_INTERNAL_TRY {
  818. ConstructElements<A>(inlined_ptr->GetAllocator(),
  819. allocated_ptr->GetInlinedData(), move_values,
  820. inlined_ptr->GetSize());
  821. }
  822. Y_ABSL_INTERNAL_CATCH_ANY {
  823. allocated_ptr->SetAllocation(Allocation<A>{
  824. allocated_storage_view.data, allocated_storage_view.capacity});
  825. Y_ABSL_INTERNAL_RETHROW;
  826. }
  827. DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(),
  828. inlined_ptr->GetInlinedData(),
  829. inlined_ptr->GetSize());
  830. inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data,
  831. allocated_storage_view.capacity});
  832. }
  833. swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
  834. swap(GetAllocator(), other_storage_ptr->GetAllocator());
  835. }
  836. template <typename T, size_t N, typename A>
  837. void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other,
  838. SizeType<A> n) {
  839. std::swap_ranges(GetInlinedData(), GetInlinedData() + n,
  840. other->GetInlinedData());
  841. }
  842. template <typename T, size_t N, typename A>
  843. void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other,
  844. SizeType<A> n) {
  845. Pointer<A> a = GetInlinedData();
  846. Pointer<A> b = other->GetInlinedData();
  847. // see note on allocators in `SwapInlinedElements`.
  848. A& allocator_a = GetAllocator();
  849. A& allocator_b = other->GetAllocator();
  850. for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) {
  851. ValueType<A> tmp(std::move(*a));
  852. AllocatorTraits<A>::destroy(allocator_a, a);
  853. AllocatorTraits<A>::construct(allocator_b, a, std::move(*b));
  854. AllocatorTraits<A>::destroy(allocator_b, b);
  855. AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp));
  856. }
  857. }
  858. template <typename T, size_t N, typename A>
  859. void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) {
  860. Data tmp = data_;
  861. data_ = other->data_;
  862. other->data_ = tmp;
  863. }
  864. template <typename T, size_t N, typename A>
  865. template <typename NotMemcpyPolicy>
  866. void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy,
  867. Storage* other) {
  868. // Note: `destroy` needs to use pre-swap allocator while `construct` -
  869. // post-swap allocator. Allocators will be swapped later on outside of
  870. // `SwapInlinedElements`.
  871. Storage* small_ptr = this;
  872. Storage* large_ptr = other;
  873. if (small_ptr->GetSize() > large_ptr->GetSize()) {
  874. std::swap(small_ptr, large_ptr);
  875. }
  876. auto small_size = small_ptr->GetSize();
  877. auto diff = large_ptr->GetSize() - small_size;
  878. SwapN(policy, other, small_size);
  879. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  880. MoveIterator<A>(large_ptr->GetInlinedData() + small_size));
  881. ConstructElements<A>(large_ptr->GetAllocator(),
  882. small_ptr->GetInlinedData() + small_size, move_values,
  883. diff);
  884. DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(),
  885. large_ptr->GetInlinedData() + small_size,
  886. diff);
  887. }
  888. // End ignore "array-bounds"
  889. #if !defined(__clang__) && defined(__GNUC__)
  890. #pragma GCC diagnostic pop
  891. #endif
  892. } // namespace inlined_vector_internal
  893. Y_ABSL_NAMESPACE_END
  894. } // namespace y_absl
  895. #endif // Y_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_