inlined_vector.h 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107
  1. // Copyright 2019 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  15. #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
  16. #include <algorithm>
  17. #include <cstddef>
  18. #include <cstring>
  19. #include <iterator>
  20. #include <limits>
  21. #include <memory>
  22. #include <new>
  23. #include <type_traits>
  24. #include <utility>
  25. #include "absl/base/attributes.h"
  26. #include "absl/base/config.h"
  27. #include "absl/base/internal/identity.h"
  28. #include "absl/base/macros.h"
  29. #include "absl/container/internal/compressed_tuple.h"
  30. #include "absl/memory/memory.h"
  31. #include "absl/meta/type_traits.h"
  32. #include "absl/types/span.h"
  33. namespace absl {
  34. ABSL_NAMESPACE_BEGIN
  35. namespace inlined_vector_internal {
  36. // GCC does not deal very well with the below code
  37. #if !defined(__clang__) && defined(__GNUC__)
  38. #pragma GCC diagnostic push
  39. #pragma GCC diagnostic ignored "-Warray-bounds"
  40. #endif
  41. template <typename A>
  42. using AllocatorTraits = std::allocator_traits<A>;
  43. template <typename A>
  44. using ValueType = typename AllocatorTraits<A>::value_type;
  45. template <typename A>
  46. using SizeType = typename AllocatorTraits<A>::size_type;
  47. template <typename A>
  48. using Pointer = typename AllocatorTraits<A>::pointer;
  49. template <typename A>
  50. using ConstPointer = typename AllocatorTraits<A>::const_pointer;
  51. template <typename A>
  52. using SizeType = typename AllocatorTraits<A>::size_type;
  53. template <typename A>
  54. using DifferenceType = typename AllocatorTraits<A>::difference_type;
  55. template <typename A>
  56. using Reference = ValueType<A>&;
  57. template <typename A>
  58. using ConstReference = const ValueType<A>&;
  59. template <typename A>
  60. using Iterator = Pointer<A>;
  61. template <typename A>
  62. using ConstIterator = ConstPointer<A>;
  63. template <typename A>
  64. using ReverseIterator = typename std::reverse_iterator<Iterator<A>>;
  65. template <typename A>
  66. using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>;
  67. template <typename A>
  68. using MoveIterator = typename std::move_iterator<Iterator<A>>;
  69. template <typename Iterator>
  70. using IsAtLeastForwardIterator = std::is_convertible<
  71. typename std::iterator_traits<Iterator>::iterator_category,
  72. std::forward_iterator_tag>;
  73. template <typename A>
  74. using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>;
  75. template <typename A>
  76. using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>;
  77. template <typename A,
  78. bool IsTriviallyDestructible =
  79. absl::is_trivially_destructible<ValueType<A>>::value &&
  80. std::is_same<A, std::allocator<ValueType<A>>>::value>
  81. struct DestroyAdapter;
  82. template <typename A>
  83. struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> {
  84. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  85. SizeType<A> destroy_size) {
  86. for (SizeType<A> i = destroy_size; i != 0;) {
  87. --i;
  88. AllocatorTraits<A>::destroy(allocator, destroy_first + i);
  89. }
  90. }
  91. };
  92. template <typename A>
  93. struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> {
  94. static void DestroyElements(A& allocator, Pointer<A> destroy_first,
  95. SizeType<A> destroy_size) {
  96. static_cast<void>(allocator);
  97. static_cast<void>(destroy_first);
  98. static_cast<void>(destroy_size);
  99. }
  100. };
  101. template <typename A>
  102. struct Allocation {
  103. Pointer<A> data = nullptr;
  104. SizeType<A> capacity = 0;
  105. };
  106. template <typename A,
  107. bool IsOverAligned =
  108. (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)>
  109. struct MallocAdapter {
  110. static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) {
  111. return {AllocatorTraits<A>::allocate(allocator, requested_capacity),
  112. requested_capacity};
  113. }
  114. static void Deallocate(A& allocator, Pointer<A> pointer,
  115. SizeType<A> capacity) {
  116. AllocatorTraits<A>::deallocate(allocator, pointer, capacity);
  117. }
  118. };
  119. template <typename A, typename ValueAdapter>
  120. void ConstructElements(absl::internal::type_identity_t<A>& allocator,
  121. Pointer<A> construct_first, ValueAdapter& values,
  122. SizeType<A> construct_size) {
  123. for (SizeType<A> i = 0; i < construct_size; ++i) {
  124. ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); }
  125. ABSL_INTERNAL_CATCH_ANY {
  126. DestroyAdapter<A>::DestroyElements(allocator, construct_first, i);
  127. ABSL_INTERNAL_RETHROW;
  128. }
  129. }
  130. }
  131. template <typename A, typename ValueAdapter>
  132. void AssignElements(Pointer<A> assign_first, ValueAdapter& values,
  133. SizeType<A> assign_size) {
  134. for (SizeType<A> i = 0; i < assign_size; ++i) {
  135. values.AssignNext(assign_first + i);
  136. }
  137. }
  138. template <typename A>
  139. struct StorageView {
  140. Pointer<A> data;
  141. SizeType<A> size;
  142. SizeType<A> capacity;
  143. };
  144. template <typename A, typename Iterator>
  145. class IteratorValueAdapter {
  146. public:
  147. explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
  148. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  149. AllocatorTraits<A>::construct(allocator, construct_at, *it_);
  150. ++it_;
  151. }
  152. void AssignNext(Pointer<A> assign_at) {
  153. *assign_at = *it_;
  154. ++it_;
  155. }
  156. private:
  157. Iterator it_;
  158. };
  159. template <typename A>
  160. class CopyValueAdapter {
  161. public:
  162. explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {}
  163. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  164. AllocatorTraits<A>::construct(allocator, construct_at, *ptr_);
  165. }
  166. void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; }
  167. private:
  168. ConstPointer<A> ptr_;
  169. };
  170. template <typename A>
  171. class DefaultValueAdapter {
  172. public:
  173. explicit DefaultValueAdapter() {}
  174. void ConstructNext(A& allocator, Pointer<A> construct_at) {
  175. AllocatorTraits<A>::construct(allocator, construct_at);
  176. }
  177. void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); }
  178. };
  179. template <typename A>
  180. class AllocationTransaction {
  181. public:
  182. explicit AllocationTransaction(A& allocator)
  183. : allocator_data_(allocator, nullptr), capacity_(0) {}
  184. ~AllocationTransaction() {
  185. if (DidAllocate()) {
  186. MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity());
  187. }
  188. }
  189. AllocationTransaction(const AllocationTransaction&) = delete;
  190. void operator=(const AllocationTransaction&) = delete;
  191. A& GetAllocator() { return allocator_data_.template get<0>(); }
  192. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  193. SizeType<A>& GetCapacity() { return capacity_; }
  194. bool DidAllocate() { return GetData() != nullptr; }
  195. Pointer<A> Allocate(SizeType<A> requested_capacity) {
  196. Allocation<A> result =
  197. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  198. GetData() = result.data;
  199. GetCapacity() = result.capacity;
  200. return result.data;
  201. }
  202. ABSL_MUST_USE_RESULT Allocation<A> Release() && {
  203. Allocation<A> result = {GetData(), GetCapacity()};
  204. Reset();
  205. return result;
  206. }
  207. private:
  208. void Reset() {
  209. GetData() = nullptr;
  210. GetCapacity() = 0;
  211. }
  212. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  213. SizeType<A> capacity_;
  214. };
  215. template <typename A>
  216. class ConstructionTransaction {
  217. public:
  218. explicit ConstructionTransaction(A& allocator)
  219. : allocator_data_(allocator, nullptr), size_(0) {}
  220. ~ConstructionTransaction() {
  221. if (DidConstruct()) {
  222. DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize());
  223. }
  224. }
  225. ConstructionTransaction(const ConstructionTransaction&) = delete;
  226. void operator=(const ConstructionTransaction&) = delete;
  227. A& GetAllocator() { return allocator_data_.template get<0>(); }
  228. Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
  229. SizeType<A>& GetSize() { return size_; }
  230. bool DidConstruct() { return GetData() != nullptr; }
  231. template <typename ValueAdapter>
  232. void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) {
  233. ConstructElements<A>(GetAllocator(), data, values, size);
  234. GetData() = data;
  235. GetSize() = size;
  236. }
  237. void Commit() && {
  238. GetData() = nullptr;
  239. GetSize() = 0;
  240. }
  241. private:
  242. container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
  243. SizeType<A> size_;
  244. };
  245. template <typename T, size_t N, typename A>
  246. class Storage {
  247. public:
  248. struct MemcpyPolicy {};
  249. struct ElementwiseAssignPolicy {};
  250. struct ElementwiseSwapPolicy {};
  251. struct ElementwiseConstructPolicy {};
  252. using MoveAssignmentPolicy = absl::conditional_t<
  253. // Fast path: if the value type can be trivially move assigned and
  254. // destroyed, and we know the allocator doesn't do anything fancy, then
  255. // it's safe for us to simply adopt the contents of the storage for
  256. // `other` and remove its own reference to them. It's as if we had
  257. // individually move-assigned each value and then destroyed the original.
  258. absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>,
  259. absl::is_trivially_destructible<ValueType<A>>,
  260. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  261. MemcpyPolicy,
  262. // Otherwise we use move assignment if possible. If not, we simulate
  263. // move assignment using move construction.
  264. //
  265. // Note that this is in contrast to e.g. std::vector and std::optional,
  266. // which are themselves not move-assignable when their contained type is
  267. // not.
  268. absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy,
  269. ElementwiseConstructPolicy>>;
  270. // The policy to be used specifically when swapping inlined elements.
  271. using SwapInlinedElementsPolicy = absl::conditional_t<
  272. // Fast path: if the value type can be trivially relocated, and we
  273. // know the allocator doesn't do anything fancy, then it's safe for us
  274. // to simply swap the bytes in the inline storage. It's as if we had
  275. // relocated the first vector's elements into temporary storage,
  276. // relocated the second's elements into the (now-empty) first's,
  277. // and then relocated from temporary storage into the second.
  278. absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>,
  279. std::is_same<A, std::allocator<ValueType<A>>>>::value,
  280. MemcpyPolicy,
  281. absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy,
  282. ElementwiseConstructPolicy>>;
  283. static SizeType<A> NextCapacity(SizeType<A> current_capacity) {
  284. return current_capacity * 2;
  285. }
  286. static SizeType<A> ComputeCapacity(SizeType<A> current_capacity,
  287. SizeType<A> requested_capacity) {
  288. return (std::max)(NextCapacity(current_capacity), requested_capacity);
  289. }
  290. // ---------------------------------------------------------------------------
  291. // Storage Constructors and Destructor
  292. // ---------------------------------------------------------------------------
  293. Storage() : metadata_(A(), /* size and is_allocated */ 0u) {}
  294. explicit Storage(const A& allocator)
  295. : metadata_(allocator, /* size and is_allocated */ 0u) {}
  296. ~Storage() {
  297. // Fast path: if we are empty and not allocated, there's nothing to do.
  298. if (GetSizeAndIsAllocated() == 0) {
  299. return;
  300. }
  301. // Fast path: if no destructors need to be run and we know the allocator
  302. // doesn't do anything fancy, then all we need to do is deallocate (and
  303. // maybe not even that).
  304. if (absl::is_trivially_destructible<ValueType<A>>::value &&
  305. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  306. DeallocateIfAllocated();
  307. return;
  308. }
  309. DestroyContents();
  310. }
  311. // ---------------------------------------------------------------------------
  312. // Storage Member Accessors
  313. // ---------------------------------------------------------------------------
  314. SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
  315. const SizeType<A>& GetSizeAndIsAllocated() const {
  316. return metadata_.template get<1>();
  317. }
  318. SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; }
  319. bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
  320. Pointer<A> GetAllocatedData() {
  321. // GCC 12 has a false-positive -Wmaybe-uninitialized warning here.
  322. #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  323. #pragma GCC diagnostic push
  324. #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
  325. #endif
  326. return data_.allocated.allocated_data;
  327. #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
  328. #pragma GCC diagnostic pop
  329. #endif
  330. }
  331. ConstPointer<A> GetAllocatedData() const {
  332. return data_.allocated.allocated_data;
  333. }
  334. // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be
  335. // uninitialized, a common pattern in allocate()+construct() APIs.
  336. // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking
  337. // NOTE: When this was written, LLVM documentation did not explicitly
  338. // mention that casting `char*` and using `reinterpret_cast` qualifies
  339. // as a bad cast.
  340. ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() {
  341. return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data);
  342. }
  343. ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const {
  344. return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data);
  345. }
  346. SizeType<A> GetAllocatedCapacity() const {
  347. return data_.allocated.allocated_capacity;
  348. }
  349. SizeType<A> GetInlinedCapacity() const {
  350. return static_cast<SizeType<A>>(kOptimalInlinedSize);
  351. }
  352. StorageView<A> MakeStorageView() {
  353. return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(),
  354. GetAllocatedCapacity()}
  355. : StorageView<A>{GetInlinedData(), GetSize(),
  356. GetInlinedCapacity()};
  357. }
  358. A& GetAllocator() { return metadata_.template get<0>(); }
  359. const A& GetAllocator() const { return metadata_.template get<0>(); }
  360. // ---------------------------------------------------------------------------
  361. // Storage Member Mutators
  362. // ---------------------------------------------------------------------------
  363. ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other);
  364. template <typename ValueAdapter>
  365. void Initialize(ValueAdapter values, SizeType<A> new_size);
  366. template <typename ValueAdapter>
  367. void Assign(ValueAdapter values, SizeType<A> new_size);
  368. template <typename ValueAdapter>
  369. void Resize(ValueAdapter values, SizeType<A> new_size);
  370. template <typename ValueAdapter>
  371. Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values,
  372. SizeType<A> insert_count);
  373. template <typename... Args>
  374. Reference<A> EmplaceBack(Args&&... args);
  375. Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to);
  376. void Reserve(SizeType<A> requested_capacity);
  377. void ShrinkToFit();
  378. void Swap(Storage* other_storage_ptr);
  379. void SetIsAllocated() {
  380. GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1);
  381. }
  382. void UnsetIsAllocated() {
  383. GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1);
  384. }
  385. void SetSize(SizeType<A> size) {
  386. GetSizeAndIsAllocated() =
  387. (size << 1) | static_cast<SizeType<A>>(GetIsAllocated());
  388. }
  389. void SetAllocatedSize(SizeType<A> size) {
  390. GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1);
  391. }
  392. void SetInlinedSize(SizeType<A> size) {
  393. GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1);
  394. }
  395. void AddSize(SizeType<A> count) {
  396. GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1);
  397. }
  398. void SubtractSize(SizeType<A> count) {
  399. ABSL_HARDENING_ASSERT(count <= GetSize());
  400. GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1);
  401. }
  402. void SetAllocation(Allocation<A> allocation) {
  403. data_.allocated.allocated_data = allocation.data;
  404. data_.allocated.allocated_capacity = allocation.capacity;
  405. }
  406. void MemcpyFrom(const Storage& other_storage) {
  407. // Assumption check: it doesn't make sense to memcpy inlined elements unless
  408. // we know the allocator doesn't do anything fancy, and one of the following
  409. // holds:
  410. //
  411. // * The elements are trivially relocatable.
  412. //
  413. // * It's possible to trivially assign the elements and then destroy the
  414. // source.
  415. //
  416. // * It's possible to trivially copy construct/assign the elements.
  417. //
  418. {
  419. using V = ValueType<A>;
  420. ABSL_HARDENING_ASSERT(
  421. other_storage.GetIsAllocated() ||
  422. (std::is_same<A, std::allocator<V>>::value &&
  423. (
  424. // First case above
  425. absl::is_trivially_relocatable<V>::value ||
  426. // Second case above
  427. (absl::is_trivially_move_assignable<V>::value &&
  428. absl::is_trivially_destructible<V>::value) ||
  429. // Third case above
  430. (absl::is_trivially_copy_constructible<V>::value ||
  431. absl::is_trivially_copy_assignable<V>::value))));
  432. }
  433. GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
  434. data_ = other_storage.data_;
  435. }
  436. void DeallocateIfAllocated() {
  437. if (GetIsAllocated()) {
  438. MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(),
  439. GetAllocatedCapacity());
  440. }
  441. }
  442. private:
  443. ABSL_ATTRIBUTE_NOINLINE void DestroyContents();
  444. using Metadata = container_internal::CompressedTuple<A, SizeType<A>>;
  445. struct Allocated {
  446. Pointer<A> allocated_data;
  447. SizeType<A> allocated_capacity;
  448. };
  449. // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the
  450. // `InlinedVector`. Sometimes, it is possible to increase the capacity (from
  451. // the user requested `N`) without increasing the size of the `InlinedVector`.
  452. static constexpr size_t kOptimalInlinedSize =
  453. (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>));
  454. struct Inlined {
  455. alignas(ValueType<A>) char inlined_data[sizeof(
  456. ValueType<A>[kOptimalInlinedSize])];
  457. };
  458. union Data {
  459. Allocated allocated;
  460. Inlined inlined;
  461. };
  462. void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n);
  463. void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n);
  464. void SwapInlinedElements(MemcpyPolicy, Storage* other);
  465. template <typename NotMemcpyPolicy>
  466. void SwapInlinedElements(NotMemcpyPolicy, Storage* other);
  467. template <typename... Args>
  468. ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args);
  469. Metadata metadata_;
  470. Data data_;
  471. };
  472. template <typename T, size_t N, typename A>
  473. void Storage<T, N, A>::DestroyContents() {
  474. Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
  475. DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize());
  476. DeallocateIfAllocated();
  477. }
  478. template <typename T, size_t N, typename A>
  479. void Storage<T, N, A>::InitFrom(const Storage& other) {
  480. const SizeType<A> n = other.GetSize();
  481. ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller.
  482. ConstPointer<A> src;
  483. Pointer<A> dst;
  484. if (!other.GetIsAllocated()) {
  485. dst = GetInlinedData();
  486. src = other.GetInlinedData();
  487. } else {
  488. // Because this is only called from the `InlinedVector` constructors, it's
  489. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  490. // throws, deallocation will be automatically handled by `~Storage()`.
  491. SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n);
  492. Allocation<A> allocation =
  493. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  494. SetAllocation(allocation);
  495. dst = allocation.data;
  496. src = other.GetAllocatedData();
  497. }
  498. // Fast path: if the value type is trivially copy constructible and we know
  499. // the allocator doesn't do anything fancy, then we know it is legal for us to
  500. // simply memcpy the other vector's elements.
  501. if (absl::is_trivially_copy_constructible<ValueType<A>>::value &&
  502. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  503. std::memcpy(reinterpret_cast<char*>(dst),
  504. reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>));
  505. } else {
  506. auto values = IteratorValueAdapter<A, ConstPointer<A>>(src);
  507. ConstructElements<A>(GetAllocator(), dst, values, n);
  508. }
  509. GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated();
  510. }
  511. template <typename T, size_t N, typename A>
  512. template <typename ValueAdapter>
  513. auto Storage<T, N, A>::Initialize(ValueAdapter values,
  514. SizeType<A> new_size) -> void {
  515. // Only callable from constructors!
  516. ABSL_HARDENING_ASSERT(!GetIsAllocated());
  517. ABSL_HARDENING_ASSERT(GetSize() == 0);
  518. Pointer<A> construct_data;
  519. if (new_size > GetInlinedCapacity()) {
  520. // Because this is only called from the `InlinedVector` constructors, it's
  521. // safe to take on the allocation with size `0`. If `ConstructElements(...)`
  522. // throws, deallocation will be automatically handled by `~Storage()`.
  523. SizeType<A> requested_capacity =
  524. ComputeCapacity(GetInlinedCapacity(), new_size);
  525. Allocation<A> allocation =
  526. MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
  527. construct_data = allocation.data;
  528. SetAllocation(allocation);
  529. SetIsAllocated();
  530. } else {
  531. construct_data = GetInlinedData();
  532. }
  533. ConstructElements<A>(GetAllocator(), construct_data, values, new_size);
  534. // Since the initial size was guaranteed to be `0` and the allocated bit is
  535. // already correct for either case, *adding* `new_size` gives us the correct
  536. // result faster than setting it directly.
  537. AddSize(new_size);
  538. }
  539. template <typename T, size_t N, typename A>
  540. template <typename ValueAdapter>
  541. auto Storage<T, N, A>::Assign(ValueAdapter values,
  542. SizeType<A> new_size) -> void {
  543. StorageView<A> storage_view = MakeStorageView();
  544. AllocationTransaction<A> allocation_tx(GetAllocator());
  545. absl::Span<ValueType<A>> assign_loop;
  546. absl::Span<ValueType<A>> construct_loop;
  547. absl::Span<ValueType<A>> destroy_loop;
  548. if (new_size > storage_view.capacity) {
  549. SizeType<A> requested_capacity =
  550. ComputeCapacity(storage_view.capacity, new_size);
  551. construct_loop = {allocation_tx.Allocate(requested_capacity), new_size};
  552. destroy_loop = {storage_view.data, storage_view.size};
  553. } else if (new_size > storage_view.size) {
  554. assign_loop = {storage_view.data, storage_view.size};
  555. construct_loop = {storage_view.data + storage_view.size,
  556. new_size - storage_view.size};
  557. } else {
  558. assign_loop = {storage_view.data, new_size};
  559. destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
  560. }
  561. AssignElements<A>(assign_loop.data(), values, assign_loop.size());
  562. ConstructElements<A>(GetAllocator(), construct_loop.data(), values,
  563. construct_loop.size());
  564. DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(),
  565. destroy_loop.size());
  566. if (allocation_tx.DidAllocate()) {
  567. DeallocateIfAllocated();
  568. SetAllocation(std::move(allocation_tx).Release());
  569. SetIsAllocated();
  570. }
  571. SetSize(new_size);
  572. }
  573. template <typename T, size_t N, typename A>
  574. template <typename ValueAdapter>
  575. auto Storage<T, N, A>::Resize(ValueAdapter values,
  576. SizeType<A> new_size) -> void {
  577. StorageView<A> storage_view = MakeStorageView();
  578. Pointer<A> const base = storage_view.data;
  579. const SizeType<A> size = storage_view.size;
  580. A& alloc = GetAllocator();
  581. if (new_size <= size) {
  582. // Destroy extra old elements.
  583. DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size);
  584. } else if (new_size <= storage_view.capacity) {
  585. // Construct new elements in place.
  586. ConstructElements<A>(alloc, base + size, values, new_size - size);
  587. } else {
  588. // Steps:
  589. // a. Allocate new backing store.
  590. // b. Construct new elements in new backing store.
  591. // c. Move existing elements from old backing store to new backing store.
  592. // d. Destroy all elements in old backing store.
  593. // Use transactional wrappers for the first two steps so we can roll
  594. // back if necessary due to exceptions.
  595. AllocationTransaction<A> allocation_tx(alloc);
  596. SizeType<A> requested_capacity =
  597. ComputeCapacity(storage_view.capacity, new_size);
  598. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  599. ConstructionTransaction<A> construction_tx(alloc);
  600. construction_tx.Construct(new_data + size, values, new_size - size);
  601. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  602. (MoveIterator<A>(base)));
  603. ConstructElements<A>(alloc, new_data, move_values, size);
  604. DestroyAdapter<A>::DestroyElements(alloc, base, size);
  605. std::move(construction_tx).Commit();
  606. DeallocateIfAllocated();
  607. SetAllocation(std::move(allocation_tx).Release());
  608. SetIsAllocated();
  609. }
  610. SetSize(new_size);
  611. }
  612. template <typename T, size_t N, typename A>
  613. template <typename ValueAdapter>
  614. auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values,
  615. SizeType<A> insert_count) -> Iterator<A> {
  616. StorageView<A> storage_view = MakeStorageView();
  617. auto insert_index = static_cast<SizeType<A>>(
  618. std::distance(ConstIterator<A>(storage_view.data), pos));
  619. SizeType<A> insert_end_index = insert_index + insert_count;
  620. SizeType<A> new_size = storage_view.size + insert_count;
  621. if (new_size > storage_view.capacity) {
  622. AllocationTransaction<A> allocation_tx(GetAllocator());
  623. ConstructionTransaction<A> construction_tx(GetAllocator());
  624. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  625. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  626. MoveIterator<A>(storage_view.data));
  627. SizeType<A> requested_capacity =
  628. ComputeCapacity(storage_view.capacity, new_size);
  629. Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
  630. construction_tx.Construct(new_data + insert_index, values, insert_count);
  631. move_construction_tx.Construct(new_data, move_values, insert_index);
  632. ConstructElements<A>(GetAllocator(), new_data + insert_end_index,
  633. move_values, storage_view.size - insert_index);
  634. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  635. storage_view.size);
  636. std::move(construction_tx).Commit();
  637. std::move(move_construction_tx).Commit();
  638. DeallocateIfAllocated();
  639. SetAllocation(std::move(allocation_tx).Release());
  640. SetAllocatedSize(new_size);
  641. return Iterator<A>(new_data + insert_index);
  642. } else {
  643. SizeType<A> move_construction_destination_index =
  644. (std::max)(insert_end_index, storage_view.size);
  645. ConstructionTransaction<A> move_construction_tx(GetAllocator());
  646. IteratorValueAdapter<A, MoveIterator<A>> move_construction_values(
  647. MoveIterator<A>(storage_view.data +
  648. (move_construction_destination_index - insert_count)));
  649. absl::Span<ValueType<A>> move_construction = {
  650. storage_view.data + move_construction_destination_index,
  651. new_size - move_construction_destination_index};
  652. Pointer<A> move_assignment_values = storage_view.data + insert_index;
  653. absl::Span<ValueType<A>> move_assignment = {
  654. storage_view.data + insert_end_index,
  655. move_construction_destination_index - insert_end_index};
  656. absl::Span<ValueType<A>> insert_assignment = {move_assignment_values,
  657. move_construction.size()};
  658. absl::Span<ValueType<A>> insert_construction = {
  659. insert_assignment.data() + insert_assignment.size(),
  660. insert_count - insert_assignment.size()};
  661. move_construction_tx.Construct(move_construction.data(),
  662. move_construction_values,
  663. move_construction.size());
  664. for (Pointer<A>
  665. destination = move_assignment.data() + move_assignment.size(),
  666. last_destination = move_assignment.data(),
  667. source = move_assignment_values + move_assignment.size();
  668. ;) {
  669. --destination;
  670. --source;
  671. if (destination < last_destination) break;
  672. *destination = std::move(*source);
  673. }
  674. AssignElements<A>(insert_assignment.data(), values,
  675. insert_assignment.size());
  676. ConstructElements<A>(GetAllocator(), insert_construction.data(), values,
  677. insert_construction.size());
  678. std::move(move_construction_tx).Commit();
  679. AddSize(insert_count);
  680. return Iterator<A>(storage_view.data + insert_index);
  681. }
  682. }
  683. template <typename T, size_t N, typename A>
  684. template <typename... Args>
  685. auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> {
  686. StorageView<A> storage_view = MakeStorageView();
  687. const SizeType<A> n = storage_view.size;
  688. if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) {
  689. // Fast path; new element fits.
  690. Pointer<A> last_ptr = storage_view.data + n;
  691. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  692. std::forward<Args>(args)...);
  693. AddSize(1);
  694. return *last_ptr;
  695. }
  696. // TODO(b/173712035): Annotate with musttail attribute to prevent regression.
  697. return EmplaceBackSlow(std::forward<Args>(args)...);
  698. }
  699. template <typename T, size_t N, typename A>
  700. template <typename... Args>
  701. auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> {
  702. StorageView<A> storage_view = MakeStorageView();
  703. AllocationTransaction<A> allocation_tx(GetAllocator());
  704. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  705. MoveIterator<A>(storage_view.data));
  706. SizeType<A> requested_capacity = NextCapacity(storage_view.capacity);
  707. Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity);
  708. Pointer<A> last_ptr = construct_data + storage_view.size;
  709. // Construct new element.
  710. AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
  711. std::forward<Args>(args)...);
  712. // Move elements from old backing store to new backing store.
  713. ABSL_INTERNAL_TRY {
  714. ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values,
  715. storage_view.size);
  716. }
  717. ABSL_INTERNAL_CATCH_ANY {
  718. AllocatorTraits<A>::destroy(GetAllocator(), last_ptr);
  719. ABSL_INTERNAL_RETHROW;
  720. }
  721. // Destroy elements in old backing store.
  722. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  723. storage_view.size);
  724. DeallocateIfAllocated();
  725. SetAllocation(std::move(allocation_tx).Release());
  726. SetIsAllocated();
  727. AddSize(1);
  728. return *last_ptr;
  729. }
  730. template <typename T, size_t N, typename A>
  731. auto Storage<T, N, A>::Erase(ConstIterator<A> from,
  732. ConstIterator<A> to) -> Iterator<A> {
  733. StorageView<A> storage_view = MakeStorageView();
  734. auto erase_size = static_cast<SizeType<A>>(std::distance(from, to));
  735. auto erase_index = static_cast<SizeType<A>>(
  736. std::distance(ConstIterator<A>(storage_view.data), from));
  737. SizeType<A> erase_end_index = erase_index + erase_size;
  738. // Fast path: if the value type is trivially relocatable and we know
  739. // the allocator doesn't do anything fancy, then we know it is legal for us to
  740. // simply destroy the elements in the "erasure window" (which cannot throw)
  741. // and then memcpy downward to close the window.
  742. if (absl::is_trivially_relocatable<ValueType<A>>::value &&
  743. std::is_nothrow_destructible<ValueType<A>>::value &&
  744. std::is_same<A, std::allocator<ValueType<A>>>::value) {
  745. DestroyAdapter<A>::DestroyElements(
  746. GetAllocator(), storage_view.data + erase_index, erase_size);
  747. std::memmove(
  748. reinterpret_cast<char*>(storage_view.data + erase_index),
  749. reinterpret_cast<const char*>(storage_view.data + erase_end_index),
  750. (storage_view.size - erase_end_index) * sizeof(ValueType<A>));
  751. } else {
  752. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  753. MoveIterator<A>(storage_view.data + erase_end_index));
  754. AssignElements<A>(storage_view.data + erase_index, move_values,
  755. storage_view.size - erase_end_index);
  756. DestroyAdapter<A>::DestroyElements(
  757. GetAllocator(), storage_view.data + (storage_view.size - erase_size),
  758. erase_size);
  759. }
  760. SubtractSize(erase_size);
  761. return Iterator<A>(storage_view.data + erase_index);
  762. }
  763. template <typename T, size_t N, typename A>
  764. auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void {
  765. StorageView<A> storage_view = MakeStorageView();
  766. if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
  767. AllocationTransaction<A> allocation_tx(GetAllocator());
  768. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  769. MoveIterator<A>(storage_view.data));
  770. SizeType<A> new_requested_capacity =
  771. ComputeCapacity(storage_view.capacity, requested_capacity);
  772. Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity);
  773. ConstructElements<A>(GetAllocator(), new_data, move_values,
  774. storage_view.size);
  775. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  776. storage_view.size);
  777. DeallocateIfAllocated();
  778. SetAllocation(std::move(allocation_tx).Release());
  779. SetIsAllocated();
  780. }
  781. template <typename T, size_t N, typename A>
  782. auto Storage<T, N, A>::ShrinkToFit() -> void {
  783. // May only be called on allocated instances!
  784. ABSL_HARDENING_ASSERT(GetIsAllocated());
  785. StorageView<A> storage_view{GetAllocatedData(), GetSize(),
  786. GetAllocatedCapacity()};
  787. if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
  788. AllocationTransaction<A> allocation_tx(GetAllocator());
  789. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  790. MoveIterator<A>(storage_view.data));
  791. Pointer<A> construct_data;
  792. if (storage_view.size > GetInlinedCapacity()) {
  793. SizeType<A> requested_capacity = storage_view.size;
  794. construct_data = allocation_tx.Allocate(requested_capacity);
  795. if (allocation_tx.GetCapacity() >= storage_view.capacity) {
  796. // Already using the smallest available heap allocation.
  797. return;
  798. }
  799. } else {
  800. construct_data = GetInlinedData();
  801. }
  802. ABSL_INTERNAL_TRY {
  803. ConstructElements<A>(GetAllocator(), construct_data, move_values,
  804. storage_view.size);
  805. }
  806. ABSL_INTERNAL_CATCH_ANY {
  807. SetAllocation({storage_view.data, storage_view.capacity});
  808. ABSL_INTERNAL_RETHROW;
  809. }
  810. DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
  811. storage_view.size);
  812. MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data,
  813. storage_view.capacity);
  814. if (allocation_tx.DidAllocate()) {
  815. SetAllocation(std::move(allocation_tx).Release());
  816. } else {
  817. UnsetIsAllocated();
  818. }
  819. }
  820. template <typename T, size_t N, typename A>
  821. auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
  822. using std::swap;
  823. ABSL_HARDENING_ASSERT(this != other_storage_ptr);
  824. if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
  825. swap(data_.allocated, other_storage_ptr->data_.allocated);
  826. } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
  827. SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr);
  828. } else {
  829. Storage* allocated_ptr = this;
  830. Storage* inlined_ptr = other_storage_ptr;
  831. if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
  832. StorageView<A> allocated_storage_view{
  833. allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(),
  834. allocated_ptr->GetAllocatedCapacity()};
  835. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  836. MoveIterator<A>(inlined_ptr->GetInlinedData()));
  837. ABSL_INTERNAL_TRY {
  838. ConstructElements<A>(inlined_ptr->GetAllocator(),
  839. allocated_ptr->GetInlinedData(), move_values,
  840. inlined_ptr->GetSize());
  841. }
  842. ABSL_INTERNAL_CATCH_ANY {
  843. allocated_ptr->SetAllocation(Allocation<A>{
  844. allocated_storage_view.data, allocated_storage_view.capacity});
  845. ABSL_INTERNAL_RETHROW;
  846. }
  847. DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(),
  848. inlined_ptr->GetInlinedData(),
  849. inlined_ptr->GetSize());
  850. inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data,
  851. allocated_storage_view.capacity});
  852. }
  853. swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
  854. swap(GetAllocator(), other_storage_ptr->GetAllocator());
  855. }
  856. template <typename T, size_t N, typename A>
  857. void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other,
  858. SizeType<A> n) {
  859. std::swap_ranges(GetInlinedData(), GetInlinedData() + n,
  860. other->GetInlinedData());
  861. }
  862. template <typename T, size_t N, typename A>
  863. void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other,
  864. SizeType<A> n) {
  865. Pointer<A> a = GetInlinedData();
  866. Pointer<A> b = other->GetInlinedData();
  867. // see note on allocators in `SwapInlinedElements`.
  868. A& allocator_a = GetAllocator();
  869. A& allocator_b = other->GetAllocator();
  870. for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) {
  871. ValueType<A> tmp(std::move(*a));
  872. AllocatorTraits<A>::destroy(allocator_a, a);
  873. AllocatorTraits<A>::construct(allocator_b, a, std::move(*b));
  874. AllocatorTraits<A>::destroy(allocator_b, b);
  875. AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp));
  876. }
  877. }
  878. template <typename T, size_t N, typename A>
  879. void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) {
  880. Data tmp = data_;
  881. data_ = other->data_;
  882. other->data_ = tmp;
  883. }
  884. template <typename T, size_t N, typename A>
  885. template <typename NotMemcpyPolicy>
  886. void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy,
  887. Storage* other) {
  888. // Note: `destroy` needs to use pre-swap allocator while `construct` -
  889. // post-swap allocator. Allocators will be swapped later on outside of
  890. // `SwapInlinedElements`.
  891. Storage* small_ptr = this;
  892. Storage* large_ptr = other;
  893. if (small_ptr->GetSize() > large_ptr->GetSize()) {
  894. std::swap(small_ptr, large_ptr);
  895. }
  896. auto small_size = small_ptr->GetSize();
  897. auto diff = large_ptr->GetSize() - small_size;
  898. SwapN(policy, other, small_size);
  899. IteratorValueAdapter<A, MoveIterator<A>> move_values(
  900. MoveIterator<A>(large_ptr->GetInlinedData() + small_size));
  901. ConstructElements<A>(large_ptr->GetAllocator(),
  902. small_ptr->GetInlinedData() + small_size, move_values,
  903. diff);
  904. DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(),
  905. large_ptr->GetInlinedData() + small_size,
  906. diff);
  907. }
  908. // End ignore "array-bounds"
  909. #if !defined(__clang__) && defined(__GNUC__)
  910. #pragma GCC diagnostic pop
  911. #endif
  912. } // namespace inlined_vector_internal
  913. ABSL_NAMESPACE_END
  914. } // namespace absl
  915. #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_