SmallVector.h 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- llvm/ADT/SmallVector.h - 'Normally small' vectors --------*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. //
  14. // This file defines the SmallVector class.
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #ifndef LLVM_ADT_SMALLVECTOR_H
  18. #define LLVM_ADT_SMALLVECTOR_H
  19. #include "llvm/ADT/iterator_range.h"
  20. #include "llvm/Support/Compiler.h"
  21. #include "llvm/Support/ErrorHandling.h"
  22. #include "llvm/Support/MathExtras.h"
  23. #include "llvm/Support/MemAlloc.h"
  24. #include "llvm/Support/type_traits.h"
  25. #include <algorithm>
  26. #include <cassert>
  27. #include <cstddef>
  28. #include <cstdlib>
  29. #include <cstring>
  30. #include <initializer_list>
  31. #include <iterator>
  32. #include <limits>
  33. #include <memory>
  34. #include <new>
  35. #include <type_traits>
  36. #include <utility>
  37. namespace llvm {
  38. /// This is all the stuff common to all SmallVectors.
  39. ///
  40. /// The template parameter specifies the type which should be used to hold the
  41. /// Size and Capacity of the SmallVector, so it can be adjusted.
  42. /// Using 32 bit size is desirable to shrink the size of the SmallVector.
  43. /// Using 64 bit size is desirable for cases like SmallVector<char>, where a
  44. /// 32 bit size would limit the vector to ~4GB. SmallVectors are used for
  45. /// buffering bitcode output - which can exceed 4GB.
  46. template <class Size_T> class SmallVectorBase {
  47. protected:
  48. void *BeginX;
  49. Size_T Size = 0, Capacity;
  50. /// The maximum value of the Size_T used.
  51. static constexpr size_t SizeTypeMax() {
  52. return std::numeric_limits<Size_T>::max();
  53. }
  54. SmallVectorBase() = delete;
  55. SmallVectorBase(void *FirstEl, size_t TotalCapacity)
  56. : BeginX(FirstEl), Capacity(TotalCapacity) {}
  57. /// This is a helper for \a grow() that's out of line to reduce code
  58. /// duplication. This function will report a fatal error if it can't grow at
  59. /// least to \p MinSize.
  60. void *mallocForGrow(size_t MinSize, size_t TSize, size_t &NewCapacity);
  61. /// This is an implementation of the grow() method which only works
  62. /// on POD-like data types and is out of line to reduce code duplication.
  63. /// This function will report a fatal error if it cannot increase capacity.
  64. void grow_pod(void *FirstEl, size_t MinSize, size_t TSize);
  65. public:
  66. size_t size() const { return Size; }
  67. size_t capacity() const { return Capacity; }
  68. LLVM_NODISCARD bool empty() const { return !Size; }
  69. /// Set the array size to \p N, which the current array must have enough
  70. /// capacity for.
  71. ///
  72. /// This does not construct or destroy any elements in the vector.
  73. ///
  74. /// Clients can use this in conjunction with capacity() to write past the end
  75. /// of the buffer when they know that more elements are available, and only
  76. /// update the size later. This avoids the cost of value initializing elements
  77. /// which will only be overwritten.
  78. void set_size(size_t N) {
  79. assert(N <= capacity());
  80. Size = N;
  81. }
  82. };
  83. template <class T>
  84. using SmallVectorSizeType =
  85. typename std::conditional<sizeof(T) < 4 && sizeof(void *) >= 8, uint64_t,
  86. uint32_t>::type;
  87. /// Figure out the offset of the first element.
  88. template <class T, typename = void> struct SmallVectorAlignmentAndSize {
  89. alignas(SmallVectorBase<SmallVectorSizeType<T>>) char Base[sizeof(
  90. SmallVectorBase<SmallVectorSizeType<T>>)];
  91. alignas(T) char FirstEl[sizeof(T)];
  92. };
  93. /// This is the part of SmallVectorTemplateBase which does not depend on whether
  94. /// the type T is a POD. The extra dummy template argument is used by ArrayRef
  95. /// to avoid unnecessarily requiring T to be complete.
  96. template <typename T, typename = void>
  97. class SmallVectorTemplateCommon
  98. : public SmallVectorBase<SmallVectorSizeType<T>> {
  99. using Base = SmallVectorBase<SmallVectorSizeType<T>>;
  100. /// Find the address of the first element. For this pointer math to be valid
  101. /// with small-size of 0 for T with lots of alignment, it's important that
  102. /// SmallVectorStorage is properly-aligned even for small-size of 0.
  103. void *getFirstEl() const {
  104. return const_cast<void *>(reinterpret_cast<const void *>(
  105. reinterpret_cast<const char *>(this) +
  106. offsetof(SmallVectorAlignmentAndSize<T>, FirstEl)));
  107. }
  108. // Space after 'FirstEl' is clobbered, do not add any instance vars after it.
  109. protected:
  110. SmallVectorTemplateCommon(size_t Size) : Base(getFirstEl(), Size) {}
  111. void grow_pod(size_t MinSize, size_t TSize) {
  112. Base::grow_pod(getFirstEl(), MinSize, TSize);
  113. }
  114. /// Return true if this is a smallvector which has not had dynamic
  115. /// memory allocated for it.
  116. bool isSmall() const { return this->BeginX == getFirstEl(); }
  117. /// Put this vector in a state of being small.
  118. void resetToSmall() {
  119. this->BeginX = getFirstEl();
  120. this->Size = this->Capacity = 0; // FIXME: Setting Capacity to 0 is suspect.
  121. }
  122. /// Return true if V is an internal reference to the given range.
  123. bool isReferenceToRange(const void *V, const void *First, const void *Last) const {
  124. // Use std::less to avoid UB.
  125. std::less<> LessThan;
  126. return !LessThan(V, First) && LessThan(V, Last);
  127. }
  128. /// Return true if V is an internal reference to this vector.
  129. bool isReferenceToStorage(const void *V) const {
  130. return isReferenceToRange(V, this->begin(), this->end());
  131. }
  132. /// Return true if First and Last form a valid (possibly empty) range in this
  133. /// vector's storage.
  134. bool isRangeInStorage(const void *First, const void *Last) const {
  135. // Use std::less to avoid UB.
  136. std::less<> LessThan;
  137. return !LessThan(First, this->begin()) && !LessThan(Last, First) &&
  138. !LessThan(this->end(), Last);
  139. }
  140. /// Return true unless Elt will be invalidated by resizing the vector to
  141. /// NewSize.
  142. bool isSafeToReferenceAfterResize(const void *Elt, size_t NewSize) {
  143. // Past the end.
  144. if (LLVM_LIKELY(!isReferenceToStorage(Elt)))
  145. return true;
  146. // Return false if Elt will be destroyed by shrinking.
  147. if (NewSize <= this->size())
  148. return Elt < this->begin() + NewSize;
  149. // Return false if we need to grow.
  150. return NewSize <= this->capacity();
  151. }
  152. /// Check whether Elt will be invalidated by resizing the vector to NewSize.
  153. void assertSafeToReferenceAfterResize(const void *Elt, size_t NewSize) {
  154. assert(isSafeToReferenceAfterResize(Elt, NewSize) &&
  155. "Attempting to reference an element of the vector in an operation "
  156. "that invalidates it");
  157. }
  158. /// Check whether Elt will be invalidated by increasing the size of the
  159. /// vector by N.
  160. void assertSafeToAdd(const void *Elt, size_t N = 1) {
  161. this->assertSafeToReferenceAfterResize(Elt, this->size() + N);
  162. }
  163. /// Check whether any part of the range will be invalidated by clearing.
  164. void assertSafeToReferenceAfterClear(const T *From, const T *To) {
  165. if (From == To)
  166. return;
  167. this->assertSafeToReferenceAfterResize(From, 0);
  168. this->assertSafeToReferenceAfterResize(To - 1, 0);
  169. }
  170. template <
  171. class ItTy,
  172. std::enable_if_t<!std::is_same<std::remove_const_t<ItTy>, T *>::value,
  173. bool> = false>
  174. void assertSafeToReferenceAfterClear(ItTy, ItTy) {}
  175. /// Check whether any part of the range will be invalidated by growing.
  176. void assertSafeToAddRange(const T *From, const T *To) {
  177. if (From == To)
  178. return;
  179. this->assertSafeToAdd(From, To - From);
  180. this->assertSafeToAdd(To - 1, To - From);
  181. }
  182. template <
  183. class ItTy,
  184. std::enable_if_t<!std::is_same<std::remove_const_t<ItTy>, T *>::value,
  185. bool> = false>
  186. void assertSafeToAddRange(ItTy, ItTy) {}
  187. /// Reserve enough space to add one element, and return the updated element
  188. /// pointer in case it was a reference to the storage.
  189. template <class U>
  190. static const T *reserveForParamAndGetAddressImpl(U *This, const T &Elt,
  191. size_t N) {
  192. size_t NewSize = This->size() + N;
  193. if (LLVM_LIKELY(NewSize <= This->capacity()))
  194. return &Elt;
  195. bool ReferencesStorage = false;
  196. int64_t Index = -1;
  197. if (!U::TakesParamByValue) {
  198. if (LLVM_UNLIKELY(This->isReferenceToStorage(&Elt))) {
  199. ReferencesStorage = true;
  200. Index = &Elt - This->begin();
  201. }
  202. }
  203. This->grow(NewSize);
  204. return ReferencesStorage ? This->begin() + Index : &Elt;
  205. }
  206. public:
  207. using size_type = size_t;
  208. using difference_type = ptrdiff_t;
  209. using value_type = T;
  210. using iterator = T *;
  211. using const_iterator = const T *;
  212. using const_reverse_iterator = std::reverse_iterator<const_iterator>;
  213. using reverse_iterator = std::reverse_iterator<iterator>;
  214. using reference = T &;
  215. using const_reference = const T &;
  216. using pointer = T *;
  217. using const_pointer = const T *;
  218. using Base::capacity;
  219. using Base::empty;
  220. using Base::size;
  221. // forward iterator creation methods.
  222. iterator begin() { return (iterator)this->BeginX; }
  223. const_iterator begin() const { return (const_iterator)this->BeginX; }
  224. iterator end() { return begin() + size(); }
  225. const_iterator end() const { return begin() + size(); }
  226. // reverse iterator creation methods.
  227. reverse_iterator rbegin() { return reverse_iterator(end()); }
  228. const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
  229. reverse_iterator rend() { return reverse_iterator(begin()); }
  230. const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
  231. size_type size_in_bytes() const { return size() * sizeof(T); }
  232. size_type max_size() const {
  233. return std::min(this->SizeTypeMax(), size_type(-1) / sizeof(T));
  234. }
  235. size_t capacity_in_bytes() const { return capacity() * sizeof(T); }
  236. /// Return a pointer to the vector's buffer, even if empty().
  237. pointer data() { return pointer(begin()); }
  238. /// Return a pointer to the vector's buffer, even if empty().
  239. const_pointer data() const { return const_pointer(begin()); }
  240. reference operator[](size_type idx) {
  241. assert(idx < size());
  242. return begin()[idx];
  243. }
  244. const_reference operator[](size_type idx) const {
  245. assert(idx < size());
  246. return begin()[idx];
  247. }
  248. reference front() {
  249. assert(!empty());
  250. return begin()[0];
  251. }
  252. const_reference front() const {
  253. assert(!empty());
  254. return begin()[0];
  255. }
  256. reference back() {
  257. assert(!empty());
  258. return end()[-1];
  259. }
  260. const_reference back() const {
  261. assert(!empty());
  262. return end()[-1];
  263. }
  264. };
  265. /// SmallVectorTemplateBase<TriviallyCopyable = false> - This is where we put
  266. /// method implementations that are designed to work with non-trivial T's.
  267. ///
  268. /// We approximate is_trivially_copyable with trivial move/copy construction and
  269. /// trivial destruction. While the standard doesn't specify that you're allowed
  270. /// copy these types with memcpy, there is no way for the type to observe this.
  271. /// This catches the important case of std::pair<POD, POD>, which is not
  272. /// trivially assignable.
  273. template <typename T, bool = (is_trivially_copy_constructible<T>::value) &&
  274. (is_trivially_move_constructible<T>::value) &&
  275. std::is_trivially_destructible<T>::value>
  276. class SmallVectorTemplateBase : public SmallVectorTemplateCommon<T> {
  277. friend class SmallVectorTemplateCommon<T>;
  278. protected:
  279. static constexpr bool TakesParamByValue = false;
  280. using ValueParamT = const T &;
  281. SmallVectorTemplateBase(size_t Size) : SmallVectorTemplateCommon<T>(Size) {}
  282. static void destroy_range(T *S, T *E) {
  283. while (S != E) {
  284. --E;
  285. E->~T();
  286. }
  287. }
  288. /// Move the range [I, E) into the uninitialized memory starting with "Dest",
  289. /// constructing elements as needed.
  290. template<typename It1, typename It2>
  291. static void uninitialized_move(It1 I, It1 E, It2 Dest) {
  292. std::uninitialized_copy(std::make_move_iterator(I),
  293. std::make_move_iterator(E), Dest);
  294. }
  295. /// Copy the range [I, E) onto the uninitialized memory starting with "Dest",
  296. /// constructing elements as needed.
  297. template<typename It1, typename It2>
  298. static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
  299. std::uninitialized_copy(I, E, Dest);
  300. }
  301. /// Grow the allocated memory (without initializing new elements), doubling
  302. /// the size of the allocated memory. Guarantees space for at least one more
  303. /// element, or MinSize more elements if specified.
  304. void grow(size_t MinSize = 0);
  305. /// Create a new allocation big enough for \p MinSize and pass back its size
  306. /// in \p NewCapacity. This is the first section of \a grow().
  307. T *mallocForGrow(size_t MinSize, size_t &NewCapacity) {
  308. return static_cast<T *>(
  309. SmallVectorBase<SmallVectorSizeType<T>>::mallocForGrow(
  310. MinSize, sizeof(T), NewCapacity));
  311. }
  312. /// Move existing elements over to the new allocation \p NewElts, the middle
  313. /// section of \a grow().
  314. void moveElementsForGrow(T *NewElts);
  315. /// Transfer ownership of the allocation, finishing up \a grow().
  316. void takeAllocationForGrow(T *NewElts, size_t NewCapacity);
  317. /// Reserve enough space to add one element, and return the updated element
  318. /// pointer in case it was a reference to the storage.
  319. const T *reserveForParamAndGetAddress(const T &Elt, size_t N = 1) {
  320. return this->reserveForParamAndGetAddressImpl(this, Elt, N);
  321. }
  322. /// Reserve enough space to add one element, and return the updated element
  323. /// pointer in case it was a reference to the storage.
  324. T *reserveForParamAndGetAddress(T &Elt, size_t N = 1) {
  325. return const_cast<T *>(
  326. this->reserveForParamAndGetAddressImpl(this, Elt, N));
  327. }
  328. static T &&forward_value_param(T &&V) { return std::move(V); }
  329. static const T &forward_value_param(const T &V) { return V; }
  330. void growAndAssign(size_t NumElts, const T &Elt) {
  331. // Grow manually in case Elt is an internal reference.
  332. size_t NewCapacity;
  333. T *NewElts = mallocForGrow(NumElts, NewCapacity);
  334. std::uninitialized_fill_n(NewElts, NumElts, Elt);
  335. this->destroy_range(this->begin(), this->end());
  336. takeAllocationForGrow(NewElts, NewCapacity);
  337. this->set_size(NumElts);
  338. }
  339. template <typename... ArgTypes> T &growAndEmplaceBack(ArgTypes &&... Args) {
  340. // Grow manually in case one of Args is an internal reference.
  341. size_t NewCapacity;
  342. T *NewElts = mallocForGrow(0, NewCapacity);
  343. ::new ((void *)(NewElts + this->size())) T(std::forward<ArgTypes>(Args)...);
  344. moveElementsForGrow(NewElts);
  345. takeAllocationForGrow(NewElts, NewCapacity);
  346. this->set_size(this->size() + 1);
  347. return this->back();
  348. }
  349. public:
  350. void push_back(const T &Elt) {
  351. const T *EltPtr = reserveForParamAndGetAddress(Elt);
  352. ::new ((void *)this->end()) T(*EltPtr);
  353. this->set_size(this->size() + 1);
  354. }
  355. void push_back(T &&Elt) {
  356. T *EltPtr = reserveForParamAndGetAddress(Elt);
  357. ::new ((void *)this->end()) T(::std::move(*EltPtr));
  358. this->set_size(this->size() + 1);
  359. }
  360. void pop_back() {
  361. this->set_size(this->size() - 1);
  362. this->end()->~T();
  363. }
  364. };
  365. // Define this out-of-line to dissuade the C++ compiler from inlining it.
  366. template <typename T, bool TriviallyCopyable>
  367. void SmallVectorTemplateBase<T, TriviallyCopyable>::grow(size_t MinSize) {
  368. size_t NewCapacity;
  369. T *NewElts = mallocForGrow(MinSize, NewCapacity);
  370. moveElementsForGrow(NewElts);
  371. takeAllocationForGrow(NewElts, NewCapacity);
  372. }
  373. // Define this out-of-line to dissuade the C++ compiler from inlining it.
  374. template <typename T, bool TriviallyCopyable>
  375. void SmallVectorTemplateBase<T, TriviallyCopyable>::moveElementsForGrow(
  376. T *NewElts) {
  377. // Move the elements over.
  378. this->uninitialized_move(this->begin(), this->end(), NewElts);
  379. // Destroy the original elements.
  380. destroy_range(this->begin(), this->end());
  381. }
  382. // Define this out-of-line to dissuade the C++ compiler from inlining it.
  383. template <typename T, bool TriviallyCopyable>
  384. void SmallVectorTemplateBase<T, TriviallyCopyable>::takeAllocationForGrow(
  385. T *NewElts, size_t NewCapacity) {
  386. // If this wasn't grown from the inline copy, deallocate the old space.
  387. if (!this->isSmall())
  388. free(this->begin());
  389. this->BeginX = NewElts;
  390. this->Capacity = NewCapacity;
  391. }
  392. /// SmallVectorTemplateBase<TriviallyCopyable = true> - This is where we put
  393. /// method implementations that are designed to work with trivially copyable
  394. /// T's. This allows using memcpy in place of copy/move construction and
  395. /// skipping destruction.
  396. template <typename T>
  397. class SmallVectorTemplateBase<T, true> : public SmallVectorTemplateCommon<T> {
  398. friend class SmallVectorTemplateCommon<T>;
  399. protected:
  400. /// True if it's cheap enough to take parameters by value. Doing so avoids
  401. /// overhead related to mitigations for reference invalidation.
  402. static constexpr bool TakesParamByValue = sizeof(T) <= 2 * sizeof(void *);
  403. /// Either const T& or T, depending on whether it's cheap enough to take
  404. /// parameters by value.
  405. using ValueParamT =
  406. typename std::conditional<TakesParamByValue, T, const T &>::type;
  407. SmallVectorTemplateBase(size_t Size) : SmallVectorTemplateCommon<T>(Size) {}
  408. // No need to do a destroy loop for POD's.
  409. static void destroy_range(T *, T *) {}
  410. /// Move the range [I, E) onto the uninitialized memory
  411. /// starting with "Dest", constructing elements into it as needed.
  412. template<typename It1, typename It2>
  413. static void uninitialized_move(It1 I, It1 E, It2 Dest) {
  414. // Just do a copy.
  415. uninitialized_copy(I, E, Dest);
  416. }
  417. /// Copy the range [I, E) onto the uninitialized memory
  418. /// starting with "Dest", constructing elements into it as needed.
  419. template<typename It1, typename It2>
  420. static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
  421. // Arbitrary iterator types; just use the basic implementation.
  422. std::uninitialized_copy(I, E, Dest);
  423. }
  424. /// Copy the range [I, E) onto the uninitialized memory
  425. /// starting with "Dest", constructing elements into it as needed.
  426. template <typename T1, typename T2>
  427. static void uninitialized_copy(
  428. T1 *I, T1 *E, T2 *Dest,
  429. std::enable_if_t<std::is_same<typename std::remove_const<T1>::type,
  430. T2>::value> * = nullptr) {
  431. // Use memcpy for PODs iterated by pointers (which includes SmallVector
  432. // iterators): std::uninitialized_copy optimizes to memmove, but we can
  433. // use memcpy here. Note that I and E are iterators and thus might be
  434. // invalid for memcpy if they are equal.
  435. if (I != E)
  436. memcpy(reinterpret_cast<void *>(Dest), I, (E - I) * sizeof(T));
  437. }
  438. /// Double the size of the allocated memory, guaranteeing space for at
  439. /// least one more element or MinSize if specified.
  440. void grow(size_t MinSize = 0) { this->grow_pod(MinSize, sizeof(T)); }
  441. /// Reserve enough space to add one element, and return the updated element
  442. /// pointer in case it was a reference to the storage.
  443. const T *reserveForParamAndGetAddress(const T &Elt, size_t N = 1) {
  444. return this->reserveForParamAndGetAddressImpl(this, Elt, N);
  445. }
  446. /// Reserve enough space to add one element, and return the updated element
  447. /// pointer in case it was a reference to the storage.
  448. T *reserveForParamAndGetAddress(T &Elt, size_t N = 1) {
  449. return const_cast<T *>(
  450. this->reserveForParamAndGetAddressImpl(this, Elt, N));
  451. }
  452. /// Copy \p V or return a reference, depending on \a ValueParamT.
  453. static ValueParamT forward_value_param(ValueParamT V) { return V; }
  454. void growAndAssign(size_t NumElts, T Elt) {
  455. // Elt has been copied in case it's an internal reference, side-stepping
  456. // reference invalidation problems without losing the realloc optimization.
  457. this->set_size(0);
  458. this->grow(NumElts);
  459. std::uninitialized_fill_n(this->begin(), NumElts, Elt);
  460. this->set_size(NumElts);
  461. }
  462. template <typename... ArgTypes> T &growAndEmplaceBack(ArgTypes &&... Args) {
  463. // Use push_back with a copy in case Args has an internal reference,
  464. // side-stepping reference invalidation problems without losing the realloc
  465. // optimization.
  466. push_back(T(std::forward<ArgTypes>(Args)...));
  467. return this->back();
  468. }
  469. public:
  470. void push_back(ValueParamT Elt) {
  471. const T *EltPtr = reserveForParamAndGetAddress(Elt);
  472. memcpy(reinterpret_cast<void *>(this->end()), EltPtr, sizeof(T));
  473. this->set_size(this->size() + 1);
  474. }
  475. void pop_back() { this->set_size(this->size() - 1); }
  476. };
  477. /// This class consists of common code factored out of the SmallVector class to
  478. /// reduce code duplication based on the SmallVector 'N' template parameter.
  479. template <typename T>
  480. class SmallVectorImpl : public SmallVectorTemplateBase<T> {
  481. using SuperClass = SmallVectorTemplateBase<T>;
  482. public:
  483. using iterator = typename SuperClass::iterator;
  484. using const_iterator = typename SuperClass::const_iterator;
  485. using reference = typename SuperClass::reference;
  486. using size_type = typename SuperClass::size_type;
  487. protected:
  488. using SmallVectorTemplateBase<T>::TakesParamByValue;
  489. using ValueParamT = typename SuperClass::ValueParamT;
  490. // Default ctor - Initialize to empty.
  491. explicit SmallVectorImpl(unsigned N)
  492. : SmallVectorTemplateBase<T>(N) {}
  493. public:
  494. SmallVectorImpl(const SmallVectorImpl &) = delete;
  495. ~SmallVectorImpl() {
  496. // Subclass has already destructed this vector's elements.
  497. // If this wasn't grown from the inline copy, deallocate the old space.
  498. if (!this->isSmall())
  499. free(this->begin());
  500. }
  501. void clear() {
  502. this->destroy_range(this->begin(), this->end());
  503. this->Size = 0;
  504. }
  505. private:
  506. template <bool ForOverwrite> void resizeImpl(size_type N) {
  507. if (N < this->size()) {
  508. this->pop_back_n(this->size() - N);
  509. } else if (N > this->size()) {
  510. this->reserve(N);
  511. for (auto I = this->end(), E = this->begin() + N; I != E; ++I)
  512. if (ForOverwrite)
  513. new (&*I) T;
  514. else
  515. new (&*I) T();
  516. this->set_size(N);
  517. }
  518. }
  519. public:
  520. void resize(size_type N) { resizeImpl<false>(N); }
  521. /// Like resize, but \ref T is POD, the new values won't be initialized.
  522. void resize_for_overwrite(size_type N) { resizeImpl<true>(N); }
  523. void resize(size_type N, ValueParamT NV) {
  524. if (N == this->size())
  525. return;
  526. if (N < this->size()) {
  527. this->pop_back_n(this->size() - N);
  528. return;
  529. }
  530. // N > this->size(). Defer to append.
  531. this->append(N - this->size(), NV);
  532. }
  533. void reserve(size_type N) {
  534. if (this->capacity() < N)
  535. this->grow(N);
  536. }
  537. void pop_back_n(size_type NumItems) {
  538. assert(this->size() >= NumItems);
  539. this->destroy_range(this->end() - NumItems, this->end());
  540. this->set_size(this->size() - NumItems);
  541. }
  542. LLVM_NODISCARD T pop_back_val() {
  543. T Result = ::std::move(this->back());
  544. this->pop_back();
  545. return Result;
  546. }
  547. void swap(SmallVectorImpl &RHS);
  548. /// Add the specified range to the end of the SmallVector.
  549. template <typename in_iter,
  550. typename = std::enable_if_t<std::is_convertible<
  551. typename std::iterator_traits<in_iter>::iterator_category,
  552. std::input_iterator_tag>::value>>
  553. void append(in_iter in_start, in_iter in_end) {
  554. this->assertSafeToAddRange(in_start, in_end);
  555. size_type NumInputs = std::distance(in_start, in_end);
  556. this->reserve(this->size() + NumInputs);
  557. this->uninitialized_copy(in_start, in_end, this->end());
  558. this->set_size(this->size() + NumInputs);
  559. }
  560. /// Append \p NumInputs copies of \p Elt to the end.
  561. void append(size_type NumInputs, ValueParamT Elt) {
  562. const T *EltPtr = this->reserveForParamAndGetAddress(Elt, NumInputs);
  563. std::uninitialized_fill_n(this->end(), NumInputs, *EltPtr);
  564. this->set_size(this->size() + NumInputs);
  565. }
  566. void append(std::initializer_list<T> IL) {
  567. append(IL.begin(), IL.end());
  568. }
  569. void append(const SmallVectorImpl &RHS) { append(RHS.begin(), RHS.end()); }
  570. void assign(size_type NumElts, ValueParamT Elt) {
  571. // Note that Elt could be an internal reference.
  572. if (NumElts > this->capacity()) {
  573. this->growAndAssign(NumElts, Elt);
  574. return;
  575. }
  576. // Assign over existing elements.
  577. std::fill_n(this->begin(), std::min(NumElts, this->size()), Elt);
  578. if (NumElts > this->size())
  579. std::uninitialized_fill_n(this->end(), NumElts - this->size(), Elt);
  580. else if (NumElts < this->size())
  581. this->destroy_range(this->begin() + NumElts, this->end());
  582. this->set_size(NumElts);
  583. }
  584. // FIXME: Consider assigning over existing elements, rather than clearing &
  585. // re-initializing them - for all assign(...) variants.
  586. template <typename in_iter,
  587. typename = std::enable_if_t<std::is_convertible<
  588. typename std::iterator_traits<in_iter>::iterator_category,
  589. std::input_iterator_tag>::value>>
  590. void assign(in_iter in_start, in_iter in_end) {
  591. this->assertSafeToReferenceAfterClear(in_start, in_end);
  592. clear();
  593. append(in_start, in_end);
  594. }
  595. void assign(std::initializer_list<T> IL) {
  596. clear();
  597. append(IL);
  598. }
  599. void assign(const SmallVectorImpl &RHS) { assign(RHS.begin(), RHS.end()); }
  600. iterator erase(const_iterator CI) {
  601. // Just cast away constness because this is a non-const member function.
  602. iterator I = const_cast<iterator>(CI);
  603. assert(this->isReferenceToStorage(CI) && "Iterator to erase is out of bounds.");
  604. iterator N = I;
  605. // Shift all elts down one.
  606. std::move(I+1, this->end(), I);
  607. // Drop the last elt.
  608. this->pop_back();
  609. return(N);
  610. }
  611. iterator erase(const_iterator CS, const_iterator CE) {
  612. // Just cast away constness because this is a non-const member function.
  613. iterator S = const_cast<iterator>(CS);
  614. iterator E = const_cast<iterator>(CE);
  615. assert(this->isRangeInStorage(S, E) && "Range to erase is out of bounds.");
  616. iterator N = S;
  617. // Shift all elts down.
  618. iterator I = std::move(E, this->end(), S);
  619. // Drop the last elts.
  620. this->destroy_range(I, this->end());
  621. this->set_size(I - this->begin());
  622. return(N);
  623. }
  624. private:
  625. template <class ArgType> iterator insert_one_impl(iterator I, ArgType &&Elt) {
  626. // Callers ensure that ArgType is derived from T.
  627. static_assert(
  628. std::is_same<std::remove_const_t<std::remove_reference_t<ArgType>>,
  629. T>::value,
  630. "ArgType must be derived from T!");
  631. if (I == this->end()) { // Important special case for empty vector.
  632. this->push_back(::std::forward<ArgType>(Elt));
  633. return this->end()-1;
  634. }
  635. assert(this->isReferenceToStorage(I) && "Insertion iterator is out of bounds.");
  636. // Grow if necessary.
  637. size_t Index = I - this->begin();
  638. std::remove_reference_t<ArgType> *EltPtr =
  639. this->reserveForParamAndGetAddress(Elt);
  640. I = this->begin() + Index;
  641. ::new ((void*) this->end()) T(::std::move(this->back()));
  642. // Push everything else over.
  643. std::move_backward(I, this->end()-1, this->end());
  644. this->set_size(this->size() + 1);
  645. // If we just moved the element we're inserting, be sure to update
  646. // the reference (never happens if TakesParamByValue).
  647. static_assert(!TakesParamByValue || std::is_same<ArgType, T>::value,
  648. "ArgType must be 'T' when taking by value!");
  649. if (!TakesParamByValue && this->isReferenceToRange(EltPtr, I, this->end()))
  650. ++EltPtr;
  651. *I = ::std::forward<ArgType>(*EltPtr);
  652. return I;
  653. }
  654. public:
  655. iterator insert(iterator I, T &&Elt) {
  656. return insert_one_impl(I, this->forward_value_param(std::move(Elt)));
  657. }
  658. iterator insert(iterator I, const T &Elt) {
  659. return insert_one_impl(I, this->forward_value_param(Elt));
  660. }
  661. iterator insert(iterator I, size_type NumToInsert, ValueParamT Elt) {
  662. // Convert iterator to elt# to avoid invalidating iterator when we reserve()
  663. size_t InsertElt = I - this->begin();
  664. if (I == this->end()) { // Important special case for empty vector.
  665. append(NumToInsert, Elt);
  666. return this->begin()+InsertElt;
  667. }
  668. assert(this->isReferenceToStorage(I) && "Insertion iterator is out of bounds.");
  669. // Ensure there is enough space, and get the (maybe updated) address of
  670. // Elt.
  671. const T *EltPtr = this->reserveForParamAndGetAddress(Elt, NumToInsert);
  672. // Uninvalidate the iterator.
  673. I = this->begin()+InsertElt;
  674. // If there are more elements between the insertion point and the end of the
  675. // range than there are being inserted, we can use a simple approach to
  676. // insertion. Since we already reserved space, we know that this won't
  677. // reallocate the vector.
  678. if (size_t(this->end()-I) >= NumToInsert) {
  679. T *OldEnd = this->end();
  680. append(std::move_iterator<iterator>(this->end() - NumToInsert),
  681. std::move_iterator<iterator>(this->end()));
  682. // Copy the existing elements that get replaced.
  683. std::move_backward(I, OldEnd-NumToInsert, OldEnd);
  684. // If we just moved the element we're inserting, be sure to update
  685. // the reference (never happens if TakesParamByValue).
  686. if (!TakesParamByValue && I <= EltPtr && EltPtr < this->end())
  687. EltPtr += NumToInsert;
  688. std::fill_n(I, NumToInsert, *EltPtr);
  689. return I;
  690. }
  691. // Otherwise, we're inserting more elements than exist already, and we're
  692. // not inserting at the end.
  693. // Move over the elements that we're about to overwrite.
  694. T *OldEnd = this->end();
  695. this->set_size(this->size() + NumToInsert);
  696. size_t NumOverwritten = OldEnd-I;
  697. this->uninitialized_move(I, OldEnd, this->end()-NumOverwritten);
  698. // If we just moved the element we're inserting, be sure to update
  699. // the reference (never happens if TakesParamByValue).
  700. if (!TakesParamByValue && I <= EltPtr && EltPtr < this->end())
  701. EltPtr += NumToInsert;
  702. // Replace the overwritten part.
  703. std::fill_n(I, NumOverwritten, *EltPtr);
  704. // Insert the non-overwritten middle part.
  705. std::uninitialized_fill_n(OldEnd, NumToInsert - NumOverwritten, *EltPtr);
  706. return I;
  707. }
  708. template <typename ItTy,
  709. typename = std::enable_if_t<std::is_convertible<
  710. typename std::iterator_traits<ItTy>::iterator_category,
  711. std::input_iterator_tag>::value>>
  712. iterator insert(iterator I, ItTy From, ItTy To) {
  713. // Convert iterator to elt# to avoid invalidating iterator when we reserve()
  714. size_t InsertElt = I - this->begin();
  715. if (I == this->end()) { // Important special case for empty vector.
  716. append(From, To);
  717. return this->begin()+InsertElt;
  718. }
  719. assert(this->isReferenceToStorage(I) && "Insertion iterator is out of bounds.");
  720. // Check that the reserve that follows doesn't invalidate the iterators.
  721. this->assertSafeToAddRange(From, To);
  722. size_t NumToInsert = std::distance(From, To);
  723. // Ensure there is enough space.
  724. reserve(this->size() + NumToInsert);
  725. // Uninvalidate the iterator.
  726. I = this->begin()+InsertElt;
  727. // If there are more elements between the insertion point and the end of the
  728. // range than there are being inserted, we can use a simple approach to
  729. // insertion. Since we already reserved space, we know that this won't
  730. // reallocate the vector.
  731. if (size_t(this->end()-I) >= NumToInsert) {
  732. T *OldEnd = this->end();
  733. append(std::move_iterator<iterator>(this->end() - NumToInsert),
  734. std::move_iterator<iterator>(this->end()));
  735. // Copy the existing elements that get replaced.
  736. std::move_backward(I, OldEnd-NumToInsert, OldEnd);
  737. std::copy(From, To, I);
  738. return I;
  739. }
  740. // Otherwise, we're inserting more elements than exist already, and we're
  741. // not inserting at the end.
  742. // Move over the elements that we're about to overwrite.
  743. T *OldEnd = this->end();
  744. this->set_size(this->size() + NumToInsert);
  745. size_t NumOverwritten = OldEnd-I;
  746. this->uninitialized_move(I, OldEnd, this->end()-NumOverwritten);
  747. // Replace the overwritten part.
  748. for (T *J = I; NumOverwritten > 0; --NumOverwritten) {
  749. *J = *From;
  750. ++J; ++From;
  751. }
  752. // Insert the non-overwritten middle part.
  753. this->uninitialized_copy(From, To, OldEnd);
  754. return I;
  755. }
  756. void insert(iterator I, std::initializer_list<T> IL) {
  757. insert(I, IL.begin(), IL.end());
  758. }
  759. template <typename... ArgTypes> reference emplace_back(ArgTypes &&... Args) {
  760. if (LLVM_UNLIKELY(this->size() >= this->capacity()))
  761. return this->growAndEmplaceBack(std::forward<ArgTypes>(Args)...);
  762. ::new ((void *)this->end()) T(std::forward<ArgTypes>(Args)...);
  763. this->set_size(this->size() + 1);
  764. return this->back();
  765. }
  766. SmallVectorImpl &operator=(const SmallVectorImpl &RHS);
  767. SmallVectorImpl &operator=(SmallVectorImpl &&RHS);
  768. bool operator==(const SmallVectorImpl &RHS) const {
  769. if (this->size() != RHS.size()) return false;
  770. return std::equal(this->begin(), this->end(), RHS.begin());
  771. }
  772. bool operator!=(const SmallVectorImpl &RHS) const {
  773. return !(*this == RHS);
  774. }
  775. bool operator<(const SmallVectorImpl &RHS) const {
  776. return std::lexicographical_compare(this->begin(), this->end(),
  777. RHS.begin(), RHS.end());
  778. }
  779. };
  780. template <typename T>
  781. void SmallVectorImpl<T>::swap(SmallVectorImpl<T> &RHS) {
  782. if (this == &RHS) return;
  783. // We can only avoid copying elements if neither vector is small.
  784. if (!this->isSmall() && !RHS.isSmall()) {
  785. std::swap(this->BeginX, RHS.BeginX);
  786. std::swap(this->Size, RHS.Size);
  787. std::swap(this->Capacity, RHS.Capacity);
  788. return;
  789. }
  790. this->reserve(RHS.size());
  791. RHS.reserve(this->size());
  792. // Swap the shared elements.
  793. size_t NumShared = this->size();
  794. if (NumShared > RHS.size()) NumShared = RHS.size();
  795. for (size_type i = 0; i != NumShared; ++i)
  796. std::swap((*this)[i], RHS[i]);
  797. // Copy over the extra elts.
  798. if (this->size() > RHS.size()) {
  799. size_t EltDiff = this->size() - RHS.size();
  800. this->uninitialized_copy(this->begin()+NumShared, this->end(), RHS.end());
  801. RHS.set_size(RHS.size() + EltDiff);
  802. this->destroy_range(this->begin()+NumShared, this->end());
  803. this->set_size(NumShared);
  804. } else if (RHS.size() > this->size()) {
  805. size_t EltDiff = RHS.size() - this->size();
  806. this->uninitialized_copy(RHS.begin()+NumShared, RHS.end(), this->end());
  807. this->set_size(this->size() + EltDiff);
  808. this->destroy_range(RHS.begin()+NumShared, RHS.end());
  809. RHS.set_size(NumShared);
  810. }
  811. }
  812. template <typename T>
  813. SmallVectorImpl<T> &SmallVectorImpl<T>::
  814. operator=(const SmallVectorImpl<T> &RHS) {
  815. // Avoid self-assignment.
  816. if (this == &RHS) return *this;
  817. // If we already have sufficient space, assign the common elements, then
  818. // destroy any excess.
  819. size_t RHSSize = RHS.size();
  820. size_t CurSize = this->size();
  821. if (CurSize >= RHSSize) {
  822. // Assign common elements.
  823. iterator NewEnd;
  824. if (RHSSize)
  825. NewEnd = std::copy(RHS.begin(), RHS.begin()+RHSSize, this->begin());
  826. else
  827. NewEnd = this->begin();
  828. // Destroy excess elements.
  829. this->destroy_range(NewEnd, this->end());
  830. // Trim.
  831. this->set_size(RHSSize);
  832. return *this;
  833. }
  834. // If we have to grow to have enough elements, destroy the current elements.
  835. // This allows us to avoid copying them during the grow.
  836. // FIXME: don't do this if they're efficiently moveable.
  837. if (this->capacity() < RHSSize) {
  838. // Destroy current elements.
  839. this->clear();
  840. CurSize = 0;
  841. this->grow(RHSSize);
  842. } else if (CurSize) {
  843. // Otherwise, use assignment for the already-constructed elements.
  844. std::copy(RHS.begin(), RHS.begin()+CurSize, this->begin());
  845. }
  846. // Copy construct the new elements in place.
  847. this->uninitialized_copy(RHS.begin()+CurSize, RHS.end(),
  848. this->begin()+CurSize);
  849. // Set end.
  850. this->set_size(RHSSize);
  851. return *this;
  852. }
  853. template <typename T>
  854. SmallVectorImpl<T> &SmallVectorImpl<T>::operator=(SmallVectorImpl<T> &&RHS) {
  855. // Avoid self-assignment.
  856. if (this == &RHS) return *this;
  857. // If the RHS isn't small, clear this vector and then steal its buffer.
  858. if (!RHS.isSmall()) {
  859. this->destroy_range(this->begin(), this->end());
  860. if (!this->isSmall()) free(this->begin());
  861. this->BeginX = RHS.BeginX;
  862. this->Size = RHS.Size;
  863. this->Capacity = RHS.Capacity;
  864. RHS.resetToSmall();
  865. return *this;
  866. }
  867. // If we already have sufficient space, assign the common elements, then
  868. // destroy any excess.
  869. size_t RHSSize = RHS.size();
  870. size_t CurSize = this->size();
  871. if (CurSize >= RHSSize) {
  872. // Assign common elements.
  873. iterator NewEnd = this->begin();
  874. if (RHSSize)
  875. NewEnd = std::move(RHS.begin(), RHS.end(), NewEnd);
  876. // Destroy excess elements and trim the bounds.
  877. this->destroy_range(NewEnd, this->end());
  878. this->set_size(RHSSize);
  879. // Clear the RHS.
  880. RHS.clear();
  881. return *this;
  882. }
  883. // If we have to grow to have enough elements, destroy the current elements.
  884. // This allows us to avoid copying them during the grow.
  885. // FIXME: this may not actually make any sense if we can efficiently move
  886. // elements.
  887. if (this->capacity() < RHSSize) {
  888. // Destroy current elements.
  889. this->clear();
  890. CurSize = 0;
  891. this->grow(RHSSize);
  892. } else if (CurSize) {
  893. // Otherwise, use assignment for the already-constructed elements.
  894. std::move(RHS.begin(), RHS.begin()+CurSize, this->begin());
  895. }
  896. // Move-construct the new elements in place.
  897. this->uninitialized_move(RHS.begin()+CurSize, RHS.end(),
  898. this->begin()+CurSize);
  899. // Set end.
  900. this->set_size(RHSSize);
  901. RHS.clear();
  902. return *this;
  903. }
  904. /// Storage for the SmallVector elements. This is specialized for the N=0 case
  905. /// to avoid allocating unnecessary storage.
  906. template <typename T, unsigned N>
  907. struct SmallVectorStorage {
  908. alignas(T) char InlineElts[N * sizeof(T)];
  909. };
  910. /// We need the storage to be properly aligned even for small-size of 0 so that
  911. /// the pointer math in \a SmallVectorTemplateCommon::getFirstEl() is
  912. /// well-defined.
  913. template <typename T> struct alignas(T) SmallVectorStorage<T, 0> {};
  914. /// Forward declaration of SmallVector so that
  915. /// calculateSmallVectorDefaultInlinedElements can reference
  916. /// `sizeof(SmallVector<T, 0>)`.
  917. template <typename T, unsigned N> class LLVM_GSL_OWNER SmallVector;
  918. /// Helper class for calculating the default number of inline elements for
  919. /// `SmallVector<T>`.
  920. ///
  921. /// This should be migrated to a constexpr function when our minimum
  922. /// compiler support is enough for multi-statement constexpr functions.
  923. template <typename T> struct CalculateSmallVectorDefaultInlinedElements {
  924. // Parameter controlling the default number of inlined elements
  925. // for `SmallVector<T>`.
  926. //
  927. // The default number of inlined elements ensures that
  928. // 1. There is at least one inlined element.
  929. // 2. `sizeof(SmallVector<T>) <= kPreferredSmallVectorSizeof` unless
  930. // it contradicts 1.
  931. static constexpr size_t kPreferredSmallVectorSizeof = 64;
  932. // static_assert that sizeof(T) is not "too big".
  933. //
  934. // Because our policy guarantees at least one inlined element, it is possible
  935. // for an arbitrarily large inlined element to allocate an arbitrarily large
  936. // amount of inline storage. We generally consider it an antipattern for a
  937. // SmallVector to allocate an excessive amount of inline storage, so we want
  938. // to call attention to these cases and make sure that users are making an
  939. // intentional decision if they request a lot of inline storage.
  940. //
  941. // We want this assertion to trigger in pathological cases, but otherwise
  942. // not be too easy to hit. To accomplish that, the cutoff is actually somewhat
  943. // larger than kPreferredSmallVectorSizeof (otherwise,
  944. // `SmallVector<SmallVector<T>>` would be one easy way to trip it, and that
  945. // pattern seems useful in practice).
  946. //
  947. // One wrinkle is that this assertion is in theory non-portable, since
  948. // sizeof(T) is in general platform-dependent. However, we don't expect this
  949. // to be much of an issue, because most LLVM development happens on 64-bit
  950. // hosts, and therefore sizeof(T) is expected to *decrease* when compiled for
  951. // 32-bit hosts, dodging the issue. The reverse situation, where development
  952. // happens on a 32-bit host and then fails due to sizeof(T) *increasing* on a
  953. // 64-bit host, is expected to be very rare.
  954. static_assert(
  955. sizeof(T) <= 256,
  956. "You are trying to use a default number of inlined elements for "
  957. "`SmallVector<T>` but `sizeof(T)` is really big! Please use an "
  958. "explicit number of inlined elements with `SmallVector<T, N>` to make "
  959. "sure you really want that much inline storage.");
  960. // Discount the size of the header itself when calculating the maximum inline
  961. // bytes.
  962. static constexpr size_t PreferredInlineBytes =
  963. kPreferredSmallVectorSizeof - sizeof(SmallVector<T, 0>);
  964. static constexpr size_t NumElementsThatFit = PreferredInlineBytes / sizeof(T);
  965. static constexpr size_t value =
  966. NumElementsThatFit == 0 ? 1 : NumElementsThatFit;
  967. };
  968. /// This is a 'vector' (really, a variable-sized array), optimized
  969. /// for the case when the array is small. It contains some number of elements
  970. /// in-place, which allows it to avoid heap allocation when the actual number of
  971. /// elements is below that threshold. This allows normal "small" cases to be
  972. /// fast without losing generality for large inputs.
  973. ///
  974. /// \note
  975. /// In the absence of a well-motivated choice for the number of inlined
  976. /// elements \p N, it is recommended to use \c SmallVector<T> (that is,
  977. /// omitting the \p N). This will choose a default number of inlined elements
  978. /// reasonable for allocation on the stack (for example, trying to keep \c
  979. /// sizeof(SmallVector<T>) around 64 bytes).
  980. ///
  981. /// \warning This does not attempt to be exception safe.
  982. ///
  983. /// \see https://llvm.org/docs/ProgrammersManual.html#llvm-adt-smallvector-h
  984. template <typename T,
  985. unsigned N = CalculateSmallVectorDefaultInlinedElements<T>::value>
  986. class LLVM_GSL_OWNER SmallVector : public SmallVectorImpl<T>,
  987. SmallVectorStorage<T, N> {
  988. public:
  989. SmallVector() : SmallVectorImpl<T>(N) {}
  990. ~SmallVector() {
  991. // Destroy the constructed elements in the vector.
  992. this->destroy_range(this->begin(), this->end());
  993. }
  994. explicit SmallVector(size_t Size, const T &Value = T())
  995. : SmallVectorImpl<T>(N) {
  996. this->assign(Size, Value);
  997. }
  998. template <typename ItTy,
  999. typename = std::enable_if_t<std::is_convertible<
  1000. typename std::iterator_traits<ItTy>::iterator_category,
  1001. std::input_iterator_tag>::value>>
  1002. SmallVector(ItTy S, ItTy E) : SmallVectorImpl<T>(N) {
  1003. this->append(S, E);
  1004. }
  1005. template <typename RangeTy>
  1006. explicit SmallVector(const iterator_range<RangeTy> &R)
  1007. : SmallVectorImpl<T>(N) {
  1008. this->append(R.begin(), R.end());
  1009. }
  1010. SmallVector(std::initializer_list<T> IL) : SmallVectorImpl<T>(N) {
  1011. this->assign(IL);
  1012. }
  1013. SmallVector(const SmallVector &RHS) : SmallVectorImpl<T>(N) {
  1014. if (!RHS.empty())
  1015. SmallVectorImpl<T>::operator=(RHS);
  1016. }
  1017. SmallVector &operator=(const SmallVector &RHS) {
  1018. SmallVectorImpl<T>::operator=(RHS);
  1019. return *this;
  1020. }
  1021. SmallVector(SmallVector &&RHS) : SmallVectorImpl<T>(N) {
  1022. if (!RHS.empty())
  1023. SmallVectorImpl<T>::operator=(::std::move(RHS));
  1024. }
  1025. SmallVector(SmallVectorImpl<T> &&RHS) : SmallVectorImpl<T>(N) {
  1026. if (!RHS.empty())
  1027. SmallVectorImpl<T>::operator=(::std::move(RHS));
  1028. }
  1029. SmallVector &operator=(SmallVector &&RHS) {
  1030. SmallVectorImpl<T>::operator=(::std::move(RHS));
  1031. return *this;
  1032. }
  1033. SmallVector &operator=(SmallVectorImpl<T> &&RHS) {
  1034. SmallVectorImpl<T>::operator=(::std::move(RHS));
  1035. return *this;
  1036. }
  1037. SmallVector &operator=(std::initializer_list<T> IL) {
  1038. this->assign(IL);
  1039. return *this;
  1040. }
  1041. };
  1042. template <typename T, unsigned N>
  1043. inline size_t capacity_in_bytes(const SmallVector<T, N> &X) {
  1044. return X.capacity_in_bytes();
  1045. }
  1046. /// Given a range of type R, iterate the entire range and return a
  1047. /// SmallVector with elements of the vector. This is useful, for example,
  1048. /// when you want to iterate a range and then sort the results.
  1049. template <unsigned Size, typename R>
  1050. SmallVector<typename std::remove_const<typename std::remove_reference<
  1051. decltype(*std::begin(std::declval<R &>()))>::type>::type,
  1052. Size>
  1053. to_vector(R &&Range) {
  1054. return {std::begin(Range), std::end(Range)};
  1055. }
  1056. } // end namespace llvm
  1057. namespace std {
  1058. /// Implement std::swap in terms of SmallVector swap.
  1059. template<typename T>
  1060. inline void
  1061. swap(llvm::SmallVectorImpl<T> &LHS, llvm::SmallVectorImpl<T> &RHS) {
  1062. LHS.swap(RHS);
  1063. }
  1064. /// Implement std::swap in terms of SmallVector swap.
  1065. template<typename T, unsigned N>
  1066. inline void
  1067. swap(llvm::SmallVector<T, N> &LHS, llvm::SmallVector<T, N> &RHS) {
  1068. LHS.swap(RHS);
  1069. }
  1070. } // end namespace std
  1071. #endif // LLVM_ADT_SMALLVECTOR_H
  1072. #ifdef __GNUC__
  1073. #pragma GCC diagnostic pop
  1074. #endif