atomic 99 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705
  1. // -*- C++ -*-
  2. //===----------------------------------------------------------------------===//
  3. //
  4. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  5. // See https://llvm.org/LICENSE.txt for license information.
  6. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  7. //
  8. //===----------------------------------------------------------------------===//
  9. #ifndef _LIBCPP_ATOMIC
  10. #define _LIBCPP_ATOMIC
  11. /*
  12. atomic synopsis
  13. namespace std
  14. {
  15. // feature test macro [version.syn]
  16. #define __cpp_lib_atomic_is_always_lock_free
  17. #define __cpp_lib_atomic_flag_test
  18. #define __cpp_lib_atomic_lock_free_type_aliases
  19. #define __cpp_lib_atomic_wait
  20. // order and consistency
  21. enum memory_order: unspecified // enum class in C++20
  22. {
  23. relaxed,
  24. consume, // load-consume
  25. acquire, // load-acquire
  26. release, // store-release
  27. acq_rel, // store-release load-acquire
  28. seq_cst // store-release load-acquire
  29. };
  30. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  31. inline constexpr auto memory_order_consume = memory_order::consume;
  32. inline constexpr auto memory_order_acquire = memory_order::acquire;
  33. inline constexpr auto memory_order_release = memory_order::release;
  34. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  35. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  36. template <class T> T kill_dependency(T y) noexcept;
  37. // lock-free property
  38. #define ATOMIC_BOOL_LOCK_FREE unspecified
  39. #define ATOMIC_CHAR_LOCK_FREE unspecified
  40. #define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
  41. #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
  42. #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
  43. #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
  44. #define ATOMIC_SHORT_LOCK_FREE unspecified
  45. #define ATOMIC_INT_LOCK_FREE unspecified
  46. #define ATOMIC_LONG_LOCK_FREE unspecified
  47. #define ATOMIC_LLONG_LOCK_FREE unspecified
  48. #define ATOMIC_POINTER_LOCK_FREE unspecified
  49. template <class T>
  50. struct atomic
  51. {
  52. using value_type = T;
  53. static constexpr bool is_always_lock_free;
  54. bool is_lock_free() const volatile noexcept;
  55. bool is_lock_free() const noexcept;
  56. atomic() noexcept = default; // until C++20
  57. constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
  58. constexpr atomic(T desr) noexcept;
  59. atomic(const atomic&) = delete;
  60. atomic& operator=(const atomic&) = delete;
  61. atomic& operator=(const atomic&) volatile = delete;
  62. T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  63. T load(memory_order m = memory_order_seq_cst) const noexcept;
  64. operator T() const volatile noexcept;
  65. operator T() const noexcept;
  66. void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  67. void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
  68. T operator=(T) volatile noexcept;
  69. T operator=(T) noexcept;
  70. T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  71. T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
  72. bool compare_exchange_weak(T& expc, T desr,
  73. memory_order s, memory_order f) volatile noexcept;
  74. bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
  75. bool compare_exchange_strong(T& expc, T desr,
  76. memory_order s, memory_order f) volatile noexcept;
  77. bool compare_exchange_strong(T& expc, T desr,
  78. memory_order s, memory_order f) noexcept;
  79. bool compare_exchange_weak(T& expc, T desr,
  80. memory_order m = memory_order_seq_cst) volatile noexcept;
  81. bool compare_exchange_weak(T& expc, T desr,
  82. memory_order m = memory_order_seq_cst) noexcept;
  83. bool compare_exchange_strong(T& expc, T desr,
  84. memory_order m = memory_order_seq_cst) volatile noexcept;
  85. bool compare_exchange_strong(T& expc, T desr,
  86. memory_order m = memory_order_seq_cst) noexcept;
  87. void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
  88. void wait(T, memory_order = memory_order::seq_cst) const noexcept;
  89. void notify_one() volatile noexcept;
  90. void notify_one() noexcept;
  91. void notify_all() volatile noexcept;
  92. void notify_all() noexcept;
  93. };
  94. template <>
  95. struct atomic<integral>
  96. {
  97. using value_type = integral;
  98. using difference_type = value_type;
  99. static constexpr bool is_always_lock_free;
  100. bool is_lock_free() const volatile noexcept;
  101. bool is_lock_free() const noexcept;
  102. atomic() noexcept = default;
  103. constexpr atomic(integral desr) noexcept;
  104. atomic(const atomic&) = delete;
  105. atomic& operator=(const atomic&) = delete;
  106. atomic& operator=(const atomic&) volatile = delete;
  107. integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  108. integral load(memory_order m = memory_order_seq_cst) const noexcept;
  109. operator integral() const volatile noexcept;
  110. operator integral() const noexcept;
  111. void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  112. void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  113. integral operator=(integral desr) volatile noexcept;
  114. integral operator=(integral desr) noexcept;
  115. integral exchange(integral desr,
  116. memory_order m = memory_order_seq_cst) volatile noexcept;
  117. integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  118. bool compare_exchange_weak(integral& expc, integral desr,
  119. memory_order s, memory_order f) volatile noexcept;
  120. bool compare_exchange_weak(integral& expc, integral desr,
  121. memory_order s, memory_order f) noexcept;
  122. bool compare_exchange_strong(integral& expc, integral desr,
  123. memory_order s, memory_order f) volatile noexcept;
  124. bool compare_exchange_strong(integral& expc, integral desr,
  125. memory_order s, memory_order f) noexcept;
  126. bool compare_exchange_weak(integral& expc, integral desr,
  127. memory_order m = memory_order_seq_cst) volatile noexcept;
  128. bool compare_exchange_weak(integral& expc, integral desr,
  129. memory_order m = memory_order_seq_cst) noexcept;
  130. bool compare_exchange_strong(integral& expc, integral desr,
  131. memory_order m = memory_order_seq_cst) volatile noexcept;
  132. bool compare_exchange_strong(integral& expc, integral desr,
  133. memory_order m = memory_order_seq_cst) noexcept;
  134. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  135. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
  136. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  137. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
  138. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  139. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
  140. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  141. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
  142. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  143. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
  144. integral operator++(int) volatile noexcept;
  145. integral operator++(int) noexcept;
  146. integral operator--(int) volatile noexcept;
  147. integral operator--(int) noexcept;
  148. integral operator++() volatile noexcept;
  149. integral operator++() noexcept;
  150. integral operator--() volatile noexcept;
  151. integral operator--() noexcept;
  152. integral operator+=(integral op) volatile noexcept;
  153. integral operator+=(integral op) noexcept;
  154. integral operator-=(integral op) volatile noexcept;
  155. integral operator-=(integral op) noexcept;
  156. integral operator&=(integral op) volatile noexcept;
  157. integral operator&=(integral op) noexcept;
  158. integral operator|=(integral op) volatile noexcept;
  159. integral operator|=(integral op) noexcept;
  160. integral operator^=(integral op) volatile noexcept;
  161. integral operator^=(integral op) noexcept;
  162. void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
  163. void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
  164. void notify_one() volatile noexcept;
  165. void notify_one() noexcept;
  166. void notify_all() volatile noexcept;
  167. void notify_all() noexcept;
  168. };
  169. template <class T>
  170. struct atomic<T*>
  171. {
  172. using value_type = T*;
  173. using difference_type = ptrdiff_t;
  174. static constexpr bool is_always_lock_free;
  175. bool is_lock_free() const volatile noexcept;
  176. bool is_lock_free() const noexcept;
  177. atomic() noexcept = default; // until C++20
  178. constexpr atomic() noexcept; // since C++20
  179. constexpr atomic(T* desr) noexcept;
  180. atomic(const atomic&) = delete;
  181. atomic& operator=(const atomic&) = delete;
  182. atomic& operator=(const atomic&) volatile = delete;
  183. T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  184. T* load(memory_order m = memory_order_seq_cst) const noexcept;
  185. operator T*() const volatile noexcept;
  186. operator T*() const noexcept;
  187. void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  188. void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  189. T* operator=(T*) volatile noexcept;
  190. T* operator=(T*) noexcept;
  191. T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  192. T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  193. bool compare_exchange_weak(T*& expc, T* desr,
  194. memory_order s, memory_order f) volatile noexcept;
  195. bool compare_exchange_weak(T*& expc, T* desr,
  196. memory_order s, memory_order f) noexcept;
  197. bool compare_exchange_strong(T*& expc, T* desr,
  198. memory_order s, memory_order f) volatile noexcept;
  199. bool compare_exchange_strong(T*& expc, T* desr,
  200. memory_order s, memory_order f) noexcept;
  201. bool compare_exchange_weak(T*& expc, T* desr,
  202. memory_order m = memory_order_seq_cst) volatile noexcept;
  203. bool compare_exchange_weak(T*& expc, T* desr,
  204. memory_order m = memory_order_seq_cst) noexcept;
  205. bool compare_exchange_strong(T*& expc, T* desr,
  206. memory_order m = memory_order_seq_cst) volatile noexcept;
  207. bool compare_exchange_strong(T*& expc, T* desr,
  208. memory_order m = memory_order_seq_cst) noexcept;
  209. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  210. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  211. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  212. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  213. T* operator++(int) volatile noexcept;
  214. T* operator++(int) noexcept;
  215. T* operator--(int) volatile noexcept;
  216. T* operator--(int) noexcept;
  217. T* operator++() volatile noexcept;
  218. T* operator++() noexcept;
  219. T* operator--() volatile noexcept;
  220. T* operator--() noexcept;
  221. T* operator+=(ptrdiff_t op) volatile noexcept;
  222. T* operator+=(ptrdiff_t op) noexcept;
  223. T* operator-=(ptrdiff_t op) volatile noexcept;
  224. T* operator-=(ptrdiff_t op) noexcept;
  225. void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
  226. void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
  227. void notify_one() volatile noexcept;
  228. void notify_one() noexcept;
  229. void notify_all() volatile noexcept;
  230. void notify_all() noexcept;
  231. };
  232. // [atomics.nonmembers], non-member functions
  233. template<class T>
  234. bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
  235. template<class T>
  236. bool atomic_is_lock_free(const atomic<T>*) noexcept;
  237. template<class T>
  238. void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  239. template<class T>
  240. void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
  241. template<class T>
  242. void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
  243. memory_order) noexcept;
  244. template<class T>
  245. void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
  246. memory_order) noexcept;
  247. template<class T>
  248. T atomic_load(const volatile atomic<T>*) noexcept;
  249. template<class T>
  250. T atomic_load(const atomic<T>*) noexcept;
  251. template<class T>
  252. T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
  253. template<class T>
  254. T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
  255. template<class T>
  256. T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  257. template<class T>
  258. T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
  259. template<class T>
  260. T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
  261. memory_order) noexcept;
  262. template<class T>
  263. T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
  264. memory_order) noexcept;
  265. template<class T>
  266. bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
  267. atomic<T>::value_type) noexcept;
  268. template<class T>
  269. bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
  270. atomic<T>::value_type) noexcept;
  271. template<class T>
  272. bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
  273. atomic<T>::value_type) noexcept;
  274. template<class T>
  275. bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
  276. atomic<T>::value_type) noexcept;
  277. template<class T>
  278. bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
  279. atomic<T>::value_type,
  280. memory_order, memory_order) noexcept;
  281. template<class T>
  282. bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
  283. atomic<T>::value_type,
  284. memory_order, memory_order) noexcept;
  285. template<class T>
  286. bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
  287. atomic<T>::value_type,
  288. memory_order, memory_order) noexcept;
  289. template<class T>
  290. bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
  291. atomic<T>::value_type,
  292. memory_order, memory_order) noexcept;
  293. template<class T>
  294. T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
  295. template<class T>
  296. T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
  297. template<class T>
  298. T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
  299. memory_order) noexcept;
  300. template<class T>
  301. T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
  302. memory_order) noexcept;
  303. template<class T>
  304. T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
  305. template<class T>
  306. T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
  307. template<class T>
  308. T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
  309. memory_order) noexcept;
  310. template<class T>
  311. T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
  312. memory_order) noexcept;
  313. template<class T>
  314. T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  315. template<class T>
  316. T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
  317. template<class T>
  318. T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
  319. memory_order) noexcept;
  320. template<class T>
  321. T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
  322. memory_order) noexcept;
  323. template<class T>
  324. T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  325. template<class T>
  326. T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
  327. template<class T>
  328. T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
  329. memory_order) noexcept;
  330. template<class T>
  331. T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
  332. memory_order) noexcept;
  333. template<class T>
  334. T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  335. template<class T>
  336. T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
  337. template<class T>
  338. T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
  339. memory_order) noexcept;
  340. template<class T>
  341. T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
  342. memory_order) noexcept;
  343. template<class T>
  344. void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
  345. template<class T>
  346. void atomic_wait(const atomic<T>*, atomic<T>::value_type);
  347. template<class T>
  348. void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
  349. memory_order);
  350. template<class T>
  351. void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
  352. memory_order);
  353. template<class T>
  354. void atomic_notify_one(volatile atomic<T>*);
  355. template<class T>
  356. void atomic_notify_one(atomic<T>*);
  357. template<class T>
  358. void atomic_notify_all(volatile atomic<T>*);
  359. template<class T>
  360. void atomic_notify_all(atomic<T>*);
  361. // Atomics for standard typedef types
  362. typedef atomic<bool> atomic_bool;
  363. typedef atomic<char> atomic_char;
  364. typedef atomic<signed char> atomic_schar;
  365. typedef atomic<unsigned char> atomic_uchar;
  366. typedef atomic<short> atomic_short;
  367. typedef atomic<unsigned short> atomic_ushort;
  368. typedef atomic<int> atomic_int;
  369. typedef atomic<unsigned int> atomic_uint;
  370. typedef atomic<long> atomic_long;
  371. typedef atomic<unsigned long> atomic_ulong;
  372. typedef atomic<long long> atomic_llong;
  373. typedef atomic<unsigned long long> atomic_ullong;
  374. typedef atomic<char8_t> atomic_char8_t; // C++20
  375. typedef atomic<char16_t> atomic_char16_t;
  376. typedef atomic<char32_t> atomic_char32_t;
  377. typedef atomic<wchar_t> atomic_wchar_t;
  378. typedef atomic<int_least8_t> atomic_int_least8_t;
  379. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  380. typedef atomic<int_least16_t> atomic_int_least16_t;
  381. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  382. typedef atomic<int_least32_t> atomic_int_least32_t;
  383. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  384. typedef atomic<int_least64_t> atomic_int_least64_t;
  385. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  386. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  387. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  388. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  389. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  390. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  391. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  392. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  393. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  394. typedef atomic<int8_t> atomic_int8_t;
  395. typedef atomic<uint8_t> atomic_uint8_t;
  396. typedef atomic<int16_t> atomic_int16_t;
  397. typedef atomic<uint16_t> atomic_uint16_t;
  398. typedef atomic<int32_t> atomic_int32_t;
  399. typedef atomic<uint32_t> atomic_uint32_t;
  400. typedef atomic<int64_t> atomic_int64_t;
  401. typedef atomic<uint64_t> atomic_uint64_t;
  402. typedef atomic<intptr_t> atomic_intptr_t;
  403. typedef atomic<uintptr_t> atomic_uintptr_t;
  404. typedef atomic<size_t> atomic_size_t;
  405. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  406. typedef atomic<intmax_t> atomic_intmax_t;
  407. typedef atomic<uintmax_t> atomic_uintmax_t;
  408. // flag type and operations
  409. typedef struct atomic_flag
  410. {
  411. atomic_flag() noexcept = default; // until C++20
  412. constexpr atomic_flag() noexcept; // since C++20
  413. atomic_flag(const atomic_flag&) = delete;
  414. atomic_flag& operator=(const atomic_flag&) = delete;
  415. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  416. bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
  417. bool test(memory_order m = memory_order_seq_cst) noexcept;
  418. bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
  419. bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
  420. void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
  421. void clear(memory_order m = memory_order_seq_cst) noexcept;
  422. void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
  423. void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
  424. void notify_one() volatile noexcept;
  425. void notify_one() noexcept;
  426. void notify_all() volatile noexcept;
  427. void notify_all() noexcept;
  428. } atomic_flag;
  429. bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
  430. bool atomic_flag_test(atomic_flag* obj) noexcept;
  431. bool atomic_flag_test_explicit(volatile atomic_flag* obj,
  432. memory_order m) noexcept;
  433. bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
  434. bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
  435. bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
  436. bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
  437. memory_order m) noexcept;
  438. bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
  439. void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
  440. void atomic_flag_clear(atomic_flag* obj) noexcept;
  441. void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
  442. void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
  443. void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
  444. void atomic_wait(const atomic_flag* obj, T old) noexcept;
  445. void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
  446. void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
  447. void atomic_one(volatile atomic_flag* obj) noexcept;
  448. void atomic_one(atomic_flag* obj) noexcept;
  449. void atomic_all(volatile atomic_flag* obj) noexcept;
  450. void atomic_all(atomic_flag* obj) noexcept;
  451. // fences
  452. void atomic_thread_fence(memory_order m) noexcept;
  453. void atomic_signal_fence(memory_order m) noexcept;
  454. // deprecated
  455. template <class T>
  456. void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
  457. template <class T>
  458. void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
  459. #define ATOMIC_VAR_INIT(value) see below
  460. #define ATOMIC_FLAG_INIT see below
  461. } // std
  462. */
  463. #include <__assert> // all public C++ headers provide the assertion handler
  464. #include <__availability>
  465. #include <__chrono/duration.h>
  466. #include <__config>
  467. #include <__thread/poll_with_backoff.h>
  468. #include <__thread/timed_backoff_policy.h>
  469. #include <cstddef>
  470. #include <cstdint>
  471. #include <cstring>
  472. #include <type_traits>
  473. #include <version>
  474. #ifndef _LIBCPP_HAS_NO_THREADS
  475. # include <__threading_support>
  476. #endif
  477. #ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES
  478. # include <chrono>
  479. #endif
  480. #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
  481. # pragma GCC system_header
  482. #endif
  483. #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
  484. # error <atomic> is not implemented
  485. #endif
  486. #ifdef kill_dependency
  487. # error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23.
  488. #endif
  489. #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
  490. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
  491. __m == memory_order_acquire || \
  492. __m == memory_order_acq_rel, \
  493. "memory order argument to atomic operation is invalid")
  494. #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
  495. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
  496. __m == memory_order_acq_rel, \
  497. "memory order argument to atomic operation is invalid")
  498. #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
  499. _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
  500. __f == memory_order_acq_rel, \
  501. "memory order argument to atomic operation is invalid")
  502. _LIBCPP_BEGIN_NAMESPACE_STD
  503. // Figure out what the underlying type for `memory_order` would be if it were
  504. // declared as an unscoped enum (accounting for -fshort-enums). Use this result
  505. // to pin the underlying type in C++20.
  506. enum __legacy_memory_order {
  507. __mo_relaxed,
  508. __mo_consume,
  509. __mo_acquire,
  510. __mo_release,
  511. __mo_acq_rel,
  512. __mo_seq_cst
  513. };
  514. typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
  515. #if _LIBCPP_STD_VER > 17
  516. enum class memory_order : __memory_order_underlying_t {
  517. relaxed = __mo_relaxed,
  518. consume = __mo_consume,
  519. acquire = __mo_acquire,
  520. release = __mo_release,
  521. acq_rel = __mo_acq_rel,
  522. seq_cst = __mo_seq_cst
  523. };
  524. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  525. inline constexpr auto memory_order_consume = memory_order::consume;
  526. inline constexpr auto memory_order_acquire = memory_order::acquire;
  527. inline constexpr auto memory_order_release = memory_order::release;
  528. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  529. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  530. #else
  531. typedef enum memory_order {
  532. memory_order_relaxed = __mo_relaxed,
  533. memory_order_consume = __mo_consume,
  534. memory_order_acquire = __mo_acquire,
  535. memory_order_release = __mo_release,
  536. memory_order_acq_rel = __mo_acq_rel,
  537. memory_order_seq_cst = __mo_seq_cst,
  538. } memory_order;
  539. #endif // _LIBCPP_STD_VER > 17
  540. template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
  541. bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
  542. return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
  543. }
  544. static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
  545. "unexpected underlying type for std::memory_order");
  546. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
  547. defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
  548. // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
  549. // the default operator= in an object is not volatile, a byte-by-byte copy
  550. // is required.
  551. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  552. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  553. __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
  554. __a_value = __val;
  555. }
  556. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  557. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  558. __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
  559. volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
  560. volatile char* __end = __to + sizeof(_Tp);
  561. volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
  562. while (__to != __end)
  563. *__to++ = *__from++;
  564. }
  565. #endif
  566. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
  567. template <typename _Tp>
  568. struct __cxx_atomic_base_impl {
  569. _LIBCPP_INLINE_VISIBILITY
  570. #ifndef _LIBCPP_CXX03_LANG
  571. __cxx_atomic_base_impl() _NOEXCEPT = default;
  572. #else
  573. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  574. #endif // _LIBCPP_CXX03_LANG
  575. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
  576. : __a_value(value) {}
  577. _Tp __a_value;
  578. };
  579. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
  580. // Avoid switch statement to make this a constexpr.
  581. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  582. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  583. (__order == memory_order_release ? __ATOMIC_RELEASE:
  584. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  585. (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
  586. __ATOMIC_CONSUME))));
  587. }
  588. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
  589. // Avoid switch statement to make this a constexpr.
  590. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  591. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  592. (__order == memory_order_release ? __ATOMIC_RELAXED:
  593. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  594. (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
  595. __ATOMIC_CONSUME))));
  596. }
  597. template <typename _Tp>
  598. _LIBCPP_INLINE_VISIBILITY
  599. void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  600. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  601. }
  602. template <typename _Tp>
  603. _LIBCPP_INLINE_VISIBILITY
  604. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  605. __a->__a_value = __val;
  606. }
  607. _LIBCPP_INLINE_VISIBILITY inline
  608. void __cxx_atomic_thread_fence(memory_order __order) {
  609. __atomic_thread_fence(__to_gcc_order(__order));
  610. }
  611. _LIBCPP_INLINE_VISIBILITY inline
  612. void __cxx_atomic_signal_fence(memory_order __order) {
  613. __atomic_signal_fence(__to_gcc_order(__order));
  614. }
  615. template <typename _Tp>
  616. _LIBCPP_INLINE_VISIBILITY
  617. void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  618. memory_order __order) {
  619. __atomic_store(&__a->__a_value, &__val,
  620. __to_gcc_order(__order));
  621. }
  622. template <typename _Tp>
  623. _LIBCPP_INLINE_VISIBILITY
  624. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  625. memory_order __order) {
  626. __atomic_store(&__a->__a_value, &__val,
  627. __to_gcc_order(__order));
  628. }
  629. template <typename _Tp>
  630. _LIBCPP_INLINE_VISIBILITY
  631. _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
  632. memory_order __order) {
  633. _Tp __ret;
  634. __atomic_load(&__a->__a_value, &__ret,
  635. __to_gcc_order(__order));
  636. return __ret;
  637. }
  638. template <typename _Tp>
  639. _LIBCPP_INLINE_VISIBILITY
  640. _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
  641. _Tp __ret;
  642. __atomic_load(&__a->__a_value, &__ret,
  643. __to_gcc_order(__order));
  644. return __ret;
  645. }
  646. template <typename _Tp>
  647. _LIBCPP_INLINE_VISIBILITY
  648. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
  649. _Tp __value, memory_order __order) {
  650. _Tp __ret;
  651. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  652. __to_gcc_order(__order));
  653. return __ret;
  654. }
  655. template <typename _Tp>
  656. _LIBCPP_INLINE_VISIBILITY
  657. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
  658. memory_order __order) {
  659. _Tp __ret;
  660. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  661. __to_gcc_order(__order));
  662. return __ret;
  663. }
  664. template <typename _Tp>
  665. _LIBCPP_INLINE_VISIBILITY
  666. bool __cxx_atomic_compare_exchange_strong(
  667. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  668. memory_order __success, memory_order __failure) {
  669. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  670. false,
  671. __to_gcc_order(__success),
  672. __to_gcc_failure_order(__failure));
  673. }
  674. template <typename _Tp>
  675. _LIBCPP_INLINE_VISIBILITY
  676. bool __cxx_atomic_compare_exchange_strong(
  677. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  678. memory_order __failure) {
  679. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  680. false,
  681. __to_gcc_order(__success),
  682. __to_gcc_failure_order(__failure));
  683. }
  684. template <typename _Tp>
  685. _LIBCPP_INLINE_VISIBILITY
  686. bool __cxx_atomic_compare_exchange_weak(
  687. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  688. memory_order __success, memory_order __failure) {
  689. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  690. true,
  691. __to_gcc_order(__success),
  692. __to_gcc_failure_order(__failure));
  693. }
  694. template <typename _Tp>
  695. _LIBCPP_INLINE_VISIBILITY
  696. bool __cxx_atomic_compare_exchange_weak(
  697. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  698. memory_order __failure) {
  699. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  700. true,
  701. __to_gcc_order(__success),
  702. __to_gcc_failure_order(__failure));
  703. }
  704. template <typename _Tp>
  705. struct __skip_amt { enum {value = 1}; };
  706. template <typename _Tp>
  707. struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
  708. // FIXME: Haven't figured out what the spec says about using arrays with
  709. // atomic_fetch_add. Force a failure rather than creating bad behavior.
  710. template <typename _Tp>
  711. struct __skip_amt<_Tp[]> { };
  712. template <typename _Tp, int n>
  713. struct __skip_amt<_Tp[n]> { };
  714. template <typename _Tp, typename _Td>
  715. _LIBCPP_INLINE_VISIBILITY
  716. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
  717. _Td __delta, memory_order __order) {
  718. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  719. __to_gcc_order(__order));
  720. }
  721. template <typename _Tp, typename _Td>
  722. _LIBCPP_INLINE_VISIBILITY
  723. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  724. memory_order __order) {
  725. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  726. __to_gcc_order(__order));
  727. }
  728. template <typename _Tp, typename _Td>
  729. _LIBCPP_INLINE_VISIBILITY
  730. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
  731. _Td __delta, memory_order __order) {
  732. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  733. __to_gcc_order(__order));
  734. }
  735. template <typename _Tp, typename _Td>
  736. _LIBCPP_INLINE_VISIBILITY
  737. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  738. memory_order __order) {
  739. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  740. __to_gcc_order(__order));
  741. }
  742. template <typename _Tp>
  743. _LIBCPP_INLINE_VISIBILITY
  744. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
  745. _Tp __pattern, memory_order __order) {
  746. return __atomic_fetch_and(&__a->__a_value, __pattern,
  747. __to_gcc_order(__order));
  748. }
  749. template <typename _Tp>
  750. _LIBCPP_INLINE_VISIBILITY
  751. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
  752. _Tp __pattern, memory_order __order) {
  753. return __atomic_fetch_and(&__a->__a_value, __pattern,
  754. __to_gcc_order(__order));
  755. }
  756. template <typename _Tp>
  757. _LIBCPP_INLINE_VISIBILITY
  758. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
  759. _Tp __pattern, memory_order __order) {
  760. return __atomic_fetch_or(&__a->__a_value, __pattern,
  761. __to_gcc_order(__order));
  762. }
  763. template <typename _Tp>
  764. _LIBCPP_INLINE_VISIBILITY
  765. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  766. memory_order __order) {
  767. return __atomic_fetch_or(&__a->__a_value, __pattern,
  768. __to_gcc_order(__order));
  769. }
  770. template <typename _Tp>
  771. _LIBCPP_INLINE_VISIBILITY
  772. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
  773. _Tp __pattern, memory_order __order) {
  774. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  775. __to_gcc_order(__order));
  776. }
  777. template <typename _Tp>
  778. _LIBCPP_INLINE_VISIBILITY
  779. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  780. memory_order __order) {
  781. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  782. __to_gcc_order(__order));
  783. }
  784. #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
  785. #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
  786. template <typename _Tp>
  787. struct __cxx_atomic_base_impl {
  788. _LIBCPP_INLINE_VISIBILITY
  789. #ifndef _LIBCPP_CXX03_LANG
  790. __cxx_atomic_base_impl() _NOEXCEPT = default;
  791. #else
  792. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  793. #endif // _LIBCPP_CXX03_LANG
  794. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT
  795. : __a_value(__value) {}
  796. _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
  797. };
  798. #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
  799. _LIBCPP_INLINE_VISIBILITY inline
  800. void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
  801. __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
  802. }
  803. _LIBCPP_INLINE_VISIBILITY inline
  804. void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
  805. __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
  806. }
  807. template<class _Tp>
  808. _LIBCPP_INLINE_VISIBILITY
  809. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
  810. __c11_atomic_init(&__a->__a_value, __val);
  811. }
  812. template<class _Tp>
  813. _LIBCPP_INLINE_VISIBILITY
  814. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
  815. __c11_atomic_init(&__a->__a_value, __val);
  816. }
  817. template<class _Tp>
  818. _LIBCPP_INLINE_VISIBILITY
  819. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
  820. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  821. }
  822. template<class _Tp>
  823. _LIBCPP_INLINE_VISIBILITY
  824. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
  825. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  826. }
  827. template<class _Tp>
  828. _LIBCPP_INLINE_VISIBILITY
  829. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
  830. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  831. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  832. }
  833. template<class _Tp>
  834. _LIBCPP_INLINE_VISIBILITY
  835. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
  836. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  837. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  838. }
  839. template<class _Tp>
  840. _LIBCPP_INLINE_VISIBILITY
  841. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
  842. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  843. }
  844. template<class _Tp>
  845. _LIBCPP_INLINE_VISIBILITY
  846. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
  847. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  848. }
  849. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
  850. // Avoid switch statement to make this a constexpr.
  851. return __order == memory_order_release ? memory_order_relaxed:
  852. (__order == memory_order_acq_rel ? memory_order_acquire:
  853. __order);
  854. }
  855. template<class _Tp>
  856. _LIBCPP_INLINE_VISIBILITY
  857. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  858. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  859. }
  860. template<class _Tp>
  861. _LIBCPP_INLINE_VISIBILITY
  862. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  863. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  864. }
  865. template<class _Tp>
  866. _LIBCPP_INLINE_VISIBILITY
  867. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  868. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  869. }
  870. template<class _Tp>
  871. _LIBCPP_INLINE_VISIBILITY
  872. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  873. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  874. }
  875. template<class _Tp>
  876. _LIBCPP_INLINE_VISIBILITY
  877. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  878. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  879. }
  880. template<class _Tp>
  881. _LIBCPP_INLINE_VISIBILITY
  882. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  883. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  884. }
  885. template<class _Tp>
  886. _LIBCPP_INLINE_VISIBILITY
  887. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  888. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  889. }
  890. template<class _Tp>
  891. _LIBCPP_INLINE_VISIBILITY
  892. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  893. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  894. }
  895. template<class _Tp>
  896. _LIBCPP_INLINE_VISIBILITY
  897. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  898. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  899. }
  900. template<class _Tp>
  901. _LIBCPP_INLINE_VISIBILITY
  902. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  903. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  904. }
  905. template<class _Tp>
  906. _LIBCPP_INLINE_VISIBILITY
  907. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  908. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  909. }
  910. template<class _Tp>
  911. _LIBCPP_INLINE_VISIBILITY
  912. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  913. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  914. }
  915. template<class _Tp>
  916. _LIBCPP_INLINE_VISIBILITY
  917. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  918. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  919. }
  920. template<class _Tp>
  921. _LIBCPP_INLINE_VISIBILITY
  922. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  923. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  924. }
  925. template<class _Tp>
  926. _LIBCPP_INLINE_VISIBILITY
  927. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  928. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  929. }
  930. template<class _Tp>
  931. _LIBCPP_INLINE_VISIBILITY
  932. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  933. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  934. }
  935. template<class _Tp>
  936. _LIBCPP_INLINE_VISIBILITY
  937. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  938. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  939. }
  940. template<class _Tp>
  941. _LIBCPP_INLINE_VISIBILITY
  942. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  943. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  944. }
  945. #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
  946. template <class _Tp>
  947. _LIBCPP_INLINE_VISIBILITY
  948. _Tp kill_dependency(_Tp __y) _NOEXCEPT
  949. {
  950. return __y;
  951. }
  952. #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
  953. # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
  954. # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
  955. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  956. # define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
  957. #endif
  958. # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
  959. # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
  960. # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
  961. # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
  962. # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
  963. # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
  964. # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
  965. # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
  966. #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
  967. # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  968. # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  969. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  970. # define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE
  971. #endif
  972. # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  973. # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  974. # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  975. # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  976. # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  977. # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  978. # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  979. # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  980. #endif
  981. #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  982. template<typename _Tp>
  983. struct __cxx_atomic_lock_impl {
  984. _LIBCPP_INLINE_VISIBILITY
  985. __cxx_atomic_lock_impl() _NOEXCEPT
  986. : __a_value(), __a_lock(0) {}
  987. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
  988. __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
  989. : __a_value(value), __a_lock(0) {}
  990. _Tp __a_value;
  991. mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
  992. _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
  993. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  994. /*spin*/;
  995. }
  996. _LIBCPP_INLINE_VISIBILITY void __lock() const {
  997. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  998. /*spin*/;
  999. }
  1000. _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
  1001. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  1002. }
  1003. _LIBCPP_INLINE_VISIBILITY void __unlock() const {
  1004. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  1005. }
  1006. _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
  1007. __lock();
  1008. _Tp __old;
  1009. __cxx_atomic_assign_volatile(__old, __a_value);
  1010. __unlock();
  1011. return __old;
  1012. }
  1013. _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
  1014. __lock();
  1015. _Tp __old = __a_value;
  1016. __unlock();
  1017. return __old;
  1018. }
  1019. };
  1020. template <typename _Tp>
  1021. _LIBCPP_INLINE_VISIBILITY
  1022. void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1023. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1024. }
  1025. template <typename _Tp>
  1026. _LIBCPP_INLINE_VISIBILITY
  1027. void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1028. __a->__a_value = __val;
  1029. }
  1030. template <typename _Tp>
  1031. _LIBCPP_INLINE_VISIBILITY
  1032. void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1033. __a->__lock();
  1034. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1035. __a->__unlock();
  1036. }
  1037. template <typename _Tp>
  1038. _LIBCPP_INLINE_VISIBILITY
  1039. void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1040. __a->__lock();
  1041. __a->__a_value = __val;
  1042. __a->__unlock();
  1043. }
  1044. template <typename _Tp>
  1045. _LIBCPP_INLINE_VISIBILITY
  1046. _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1047. return __a->__read();
  1048. }
  1049. template <typename _Tp>
  1050. _LIBCPP_INLINE_VISIBILITY
  1051. _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1052. return __a->__read();
  1053. }
  1054. template <typename _Tp>
  1055. _LIBCPP_INLINE_VISIBILITY
  1056. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1057. __a->__lock();
  1058. _Tp __old;
  1059. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1060. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1061. __a->__unlock();
  1062. return __old;
  1063. }
  1064. template <typename _Tp>
  1065. _LIBCPP_INLINE_VISIBILITY
  1066. _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1067. __a->__lock();
  1068. _Tp __old = __a->__a_value;
  1069. __a->__a_value = __value;
  1070. __a->__unlock();
  1071. return __old;
  1072. }
  1073. template <typename _Tp>
  1074. _LIBCPP_INLINE_VISIBILITY
  1075. bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1076. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1077. _Tp __temp;
  1078. __a->__lock();
  1079. __cxx_atomic_assign_volatile(__temp, __a->__a_value);
  1080. bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
  1081. if(__ret)
  1082. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1083. else
  1084. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1085. __a->__unlock();
  1086. return __ret;
  1087. }
  1088. template <typename _Tp>
  1089. _LIBCPP_INLINE_VISIBILITY
  1090. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
  1091. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1092. __a->__lock();
  1093. bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
  1094. if(__ret)
  1095. _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
  1096. else
  1097. _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
  1098. __a->__unlock();
  1099. return __ret;
  1100. }
  1101. template <typename _Tp>
  1102. _LIBCPP_INLINE_VISIBILITY
  1103. bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1104. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1105. _Tp __temp;
  1106. __a->__lock();
  1107. __cxx_atomic_assign_volatile(__temp, __a->__a_value);
  1108. bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
  1109. if(__ret)
  1110. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1111. else
  1112. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1113. __a->__unlock();
  1114. return __ret;
  1115. }
  1116. template <typename _Tp>
  1117. _LIBCPP_INLINE_VISIBILITY
  1118. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
  1119. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1120. __a->__lock();
  1121. bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
  1122. if(__ret)
  1123. _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
  1124. else
  1125. _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
  1126. __a->__unlock();
  1127. return __ret;
  1128. }
  1129. template <typename _Tp, typename _Td>
  1130. _LIBCPP_INLINE_VISIBILITY
  1131. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1132. _Td __delta, memory_order) {
  1133. __a->__lock();
  1134. _Tp __old;
  1135. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1136. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
  1137. __a->__unlock();
  1138. return __old;
  1139. }
  1140. template <typename _Tp, typename _Td>
  1141. _LIBCPP_INLINE_VISIBILITY
  1142. _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
  1143. _Td __delta, memory_order) {
  1144. __a->__lock();
  1145. _Tp __old = __a->__a_value;
  1146. __a->__a_value += __delta;
  1147. __a->__unlock();
  1148. return __old;
  1149. }
  1150. template <typename _Tp, typename _Td>
  1151. _LIBCPP_INLINE_VISIBILITY
  1152. _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
  1153. ptrdiff_t __delta, memory_order) {
  1154. __a->__lock();
  1155. _Tp* __old;
  1156. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1157. __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
  1158. __a->__unlock();
  1159. return __old;
  1160. }
  1161. template <typename _Tp, typename _Td>
  1162. _LIBCPP_INLINE_VISIBILITY
  1163. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
  1164. ptrdiff_t __delta, memory_order) {
  1165. __a->__lock();
  1166. _Tp* __old = __a->__a_value;
  1167. __a->__a_value += __delta;
  1168. __a->__unlock();
  1169. return __old;
  1170. }
  1171. template <typename _Tp, typename _Td>
  1172. _LIBCPP_INLINE_VISIBILITY
  1173. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1174. _Td __delta, memory_order) {
  1175. __a->__lock();
  1176. _Tp __old;
  1177. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1178. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
  1179. __a->__unlock();
  1180. return __old;
  1181. }
  1182. template <typename _Tp, typename _Td>
  1183. _LIBCPP_INLINE_VISIBILITY
  1184. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
  1185. _Td __delta, memory_order) {
  1186. __a->__lock();
  1187. _Tp __old = __a->__a_value;
  1188. __a->__a_value -= __delta;
  1189. __a->__unlock();
  1190. return __old;
  1191. }
  1192. template <typename _Tp>
  1193. _LIBCPP_INLINE_VISIBILITY
  1194. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1195. _Tp __pattern, memory_order) {
  1196. __a->__lock();
  1197. _Tp __old;
  1198. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1199. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
  1200. __a->__unlock();
  1201. return __old;
  1202. }
  1203. template <typename _Tp>
  1204. _LIBCPP_INLINE_VISIBILITY
  1205. _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
  1206. _Tp __pattern, memory_order) {
  1207. __a->__lock();
  1208. _Tp __old = __a->__a_value;
  1209. __a->__a_value &= __pattern;
  1210. __a->__unlock();
  1211. return __old;
  1212. }
  1213. template <typename _Tp>
  1214. _LIBCPP_INLINE_VISIBILITY
  1215. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1216. _Tp __pattern, memory_order) {
  1217. __a->__lock();
  1218. _Tp __old;
  1219. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1220. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
  1221. __a->__unlock();
  1222. return __old;
  1223. }
  1224. template <typename _Tp>
  1225. _LIBCPP_INLINE_VISIBILITY
  1226. _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
  1227. _Tp __pattern, memory_order) {
  1228. __a->__lock();
  1229. _Tp __old = __a->__a_value;
  1230. __a->__a_value |= __pattern;
  1231. __a->__unlock();
  1232. return __old;
  1233. }
  1234. template <typename _Tp>
  1235. _LIBCPP_INLINE_VISIBILITY
  1236. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1237. _Tp __pattern, memory_order) {
  1238. __a->__lock();
  1239. _Tp __old;
  1240. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1241. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
  1242. __a->__unlock();
  1243. return __old;
  1244. }
  1245. template <typename _Tp>
  1246. _LIBCPP_INLINE_VISIBILITY
  1247. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
  1248. _Tp __pattern, memory_order) {
  1249. __a->__lock();
  1250. _Tp __old = __a->__a_value;
  1251. __a->__a_value ^= __pattern;
  1252. __a->__unlock();
  1253. return __old;
  1254. }
  1255. #ifdef __cpp_lib_atomic_is_always_lock_free
  1256. template<typename _Tp> struct __cxx_is_always_lock_free {
  1257. enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
  1258. #else
  1259. template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
  1260. // Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
  1261. template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
  1262. template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1263. template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1264. template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1265. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  1266. template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
  1267. #endif
  1268. template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
  1269. template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
  1270. #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
  1271. template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
  1272. #endif
  1273. template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1274. template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1275. template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1276. template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1277. template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1278. template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1279. template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1280. template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1281. template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1282. template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1283. #endif //__cpp_lib_atomic_is_always_lock_free
  1284. template <typename _Tp,
  1285. typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
  1286. __cxx_atomic_base_impl<_Tp>,
  1287. __cxx_atomic_lock_impl<_Tp> >::type>
  1288. #else
  1289. template <typename _Tp,
  1290. typename _Base = __cxx_atomic_base_impl<_Tp> >
  1291. #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  1292. struct __cxx_atomic_impl : public _Base {
  1293. static_assert(is_trivially_copyable<_Tp>::value,
  1294. "std::atomic<T> requires that 'T' be a trivially copyable type");
  1295. _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default;
  1296. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT
  1297. : _Base(__value) {}
  1298. };
  1299. #if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__))
  1300. using __cxx_contention_t = int32_t;
  1301. #else
  1302. using __cxx_contention_t = int64_t;
  1303. #endif // __linux__ || (_AIX && !__64BIT__)
  1304. using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
  1305. #if defined(_LIBCPP_HAS_NO_THREADS)
  1306. # define _LIBCPP_HAS_NO_PLATFORM_WAIT
  1307. #endif
  1308. // TODO:
  1309. // _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
  1310. // it is not tied anywhere into the build system or even documented. We should
  1311. // clean it up because it is technically never defined except when threads are
  1312. // disabled. We should clean it up in its own changeset in case we break "bad"
  1313. // users.
  1314. #ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
  1315. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
  1316. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
  1317. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
  1318. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
  1319. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
  1320. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
  1321. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
  1322. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
  1323. template <class _Atp, class _Fn>
  1324. struct __libcpp_atomic_wait_backoff_impl {
  1325. _Atp* __a;
  1326. _Fn __test_fn;
  1327. _LIBCPP_AVAILABILITY_SYNC
  1328. _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
  1329. {
  1330. if(__elapsed > chrono::microseconds(64))
  1331. {
  1332. auto const __monitor = __libcpp_atomic_monitor(__a);
  1333. if(__test_fn())
  1334. return true;
  1335. __libcpp_atomic_wait(__a, __monitor);
  1336. }
  1337. else if(__elapsed > chrono::microseconds(4))
  1338. __libcpp_thread_yield();
  1339. else
  1340. {} // poll
  1341. return false;
  1342. }
  1343. };
  1344. template <class _Atp, class _Fn>
  1345. _LIBCPP_AVAILABILITY_SYNC
  1346. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
  1347. {
  1348. __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
  1349. return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
  1350. }
  1351. #else // _LIBCPP_HAS_NO_PLATFORM_WAIT
  1352. template <class _Tp>
  1353. _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
  1354. template <class _Tp>
  1355. _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
  1356. template <class _Atp, class _Fn>
  1357. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
  1358. {
  1359. #if defined(_LIBCPP_HAS_NO_THREADS)
  1360. using _Policy = __spinning_backoff_policy;
  1361. #else
  1362. using _Policy = __libcpp_timed_backoff_policy;
  1363. #endif
  1364. return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
  1365. }
  1366. #endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
  1367. template <class _Atp, class _Tp>
  1368. struct __cxx_atomic_wait_test_fn_impl {
  1369. _Atp* __a;
  1370. _Tp __val;
  1371. memory_order __order;
  1372. _LIBCPP_INLINE_VISIBILITY bool operator()() const
  1373. {
  1374. return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
  1375. }
  1376. };
  1377. template <class _Atp, class _Tp>
  1378. _LIBCPP_AVAILABILITY_SYNC
  1379. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
  1380. {
  1381. __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
  1382. return __cxx_atomic_wait(__a, __test_fn);
  1383. }
  1384. // general atomic<T>
  1385. template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
  1386. struct __atomic_base // false
  1387. {
  1388. mutable __cxx_atomic_impl<_Tp> __a_;
  1389. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1390. static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
  1391. #endif
  1392. _LIBCPP_INLINE_VISIBILITY
  1393. bool is_lock_free() const volatile _NOEXCEPT
  1394. {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
  1395. _LIBCPP_INLINE_VISIBILITY
  1396. bool is_lock_free() const _NOEXCEPT
  1397. {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
  1398. _LIBCPP_INLINE_VISIBILITY
  1399. void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1400. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1401. {__cxx_atomic_store(&__a_, __d, __m);}
  1402. _LIBCPP_INLINE_VISIBILITY
  1403. void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1404. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1405. {__cxx_atomic_store(&__a_, __d, __m);}
  1406. _LIBCPP_INLINE_VISIBILITY
  1407. _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  1408. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1409. {return __cxx_atomic_load(&__a_, __m);}
  1410. _LIBCPP_INLINE_VISIBILITY
  1411. _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  1412. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1413. {return __cxx_atomic_load(&__a_, __m);}
  1414. _LIBCPP_INLINE_VISIBILITY
  1415. operator _Tp() const volatile _NOEXCEPT {return load();}
  1416. _LIBCPP_INLINE_VISIBILITY
  1417. operator _Tp() const _NOEXCEPT {return load();}
  1418. _LIBCPP_INLINE_VISIBILITY
  1419. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1420. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1421. _LIBCPP_INLINE_VISIBILITY
  1422. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1423. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1424. _LIBCPP_INLINE_VISIBILITY
  1425. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1426. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1427. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1428. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1429. _LIBCPP_INLINE_VISIBILITY
  1430. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1431. memory_order __s, memory_order __f) _NOEXCEPT
  1432. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1433. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1434. _LIBCPP_INLINE_VISIBILITY
  1435. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1436. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1437. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1438. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1439. _LIBCPP_INLINE_VISIBILITY
  1440. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1441. memory_order __s, memory_order __f) _NOEXCEPT
  1442. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1443. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1444. _LIBCPP_INLINE_VISIBILITY
  1445. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1446. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1447. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1448. _LIBCPP_INLINE_VISIBILITY
  1449. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1450. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1451. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1452. _LIBCPP_INLINE_VISIBILITY
  1453. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1454. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1455. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1456. _LIBCPP_INLINE_VISIBILITY
  1457. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1458. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1459. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1460. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  1461. {__cxx_atomic_wait(&__a_, __v, __m);}
  1462. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  1463. {__cxx_atomic_wait(&__a_, __v, __m);}
  1464. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
  1465. {__cxx_atomic_notify_one(&__a_);}
  1466. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
  1467. {__cxx_atomic_notify_one(&__a_);}
  1468. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
  1469. {__cxx_atomic_notify_all(&__a_);}
  1470. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
  1471. {__cxx_atomic_notify_all(&__a_);}
  1472. #if _LIBCPP_STD_VER > 17
  1473. _LIBCPP_INLINE_VISIBILITY constexpr
  1474. __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
  1475. #else
  1476. _LIBCPP_INLINE_VISIBILITY
  1477. __atomic_base() _NOEXCEPT = default;
  1478. #endif
  1479. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  1480. __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
  1481. __atomic_base(const __atomic_base&) = delete;
  1482. };
  1483. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1484. template <class _Tp, bool __b>
  1485. _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
  1486. #endif
  1487. // atomic<Integral>
  1488. template <class _Tp>
  1489. struct __atomic_base<_Tp, true>
  1490. : public __atomic_base<_Tp, false>
  1491. {
  1492. typedef __atomic_base<_Tp, false> __base;
  1493. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
  1494. __atomic_base() _NOEXCEPT = default;
  1495. _LIBCPP_INLINE_VISIBILITY
  1496. _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
  1497. _LIBCPP_INLINE_VISIBILITY
  1498. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1499. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1500. _LIBCPP_INLINE_VISIBILITY
  1501. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1502. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1503. _LIBCPP_INLINE_VISIBILITY
  1504. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1505. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1506. _LIBCPP_INLINE_VISIBILITY
  1507. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1508. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1509. _LIBCPP_INLINE_VISIBILITY
  1510. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1511. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1512. _LIBCPP_INLINE_VISIBILITY
  1513. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1514. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1515. _LIBCPP_INLINE_VISIBILITY
  1516. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1517. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1518. _LIBCPP_INLINE_VISIBILITY
  1519. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1520. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1521. _LIBCPP_INLINE_VISIBILITY
  1522. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1523. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1524. _LIBCPP_INLINE_VISIBILITY
  1525. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1526. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1527. _LIBCPP_INLINE_VISIBILITY
  1528. _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
  1529. _LIBCPP_INLINE_VISIBILITY
  1530. _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
  1531. _LIBCPP_INLINE_VISIBILITY
  1532. _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
  1533. _LIBCPP_INLINE_VISIBILITY
  1534. _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
  1535. _LIBCPP_INLINE_VISIBILITY
  1536. _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1537. _LIBCPP_INLINE_VISIBILITY
  1538. _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1539. _LIBCPP_INLINE_VISIBILITY
  1540. _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1541. _LIBCPP_INLINE_VISIBILITY
  1542. _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1543. _LIBCPP_INLINE_VISIBILITY
  1544. _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1545. _LIBCPP_INLINE_VISIBILITY
  1546. _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1547. _LIBCPP_INLINE_VISIBILITY
  1548. _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1549. _LIBCPP_INLINE_VISIBILITY
  1550. _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1551. _LIBCPP_INLINE_VISIBILITY
  1552. _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
  1553. _LIBCPP_INLINE_VISIBILITY
  1554. _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
  1555. _LIBCPP_INLINE_VISIBILITY
  1556. _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
  1557. _LIBCPP_INLINE_VISIBILITY
  1558. _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
  1559. _LIBCPP_INLINE_VISIBILITY
  1560. _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1561. _LIBCPP_INLINE_VISIBILITY
  1562. _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1563. };
  1564. // atomic<T>
  1565. template <class _Tp>
  1566. struct atomic
  1567. : public __atomic_base<_Tp>
  1568. {
  1569. typedef __atomic_base<_Tp> __base;
  1570. typedef _Tp value_type;
  1571. typedef value_type difference_type;
  1572. #if _LIBCPP_STD_VER > 17
  1573. _LIBCPP_INLINE_VISIBILITY
  1574. atomic() = default;
  1575. #else
  1576. _LIBCPP_INLINE_VISIBILITY
  1577. atomic() _NOEXCEPT = default;
  1578. #endif
  1579. _LIBCPP_INLINE_VISIBILITY
  1580. _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
  1581. _LIBCPP_INLINE_VISIBILITY
  1582. _Tp operator=(_Tp __d) volatile _NOEXCEPT
  1583. {__base::store(__d); return __d;}
  1584. _LIBCPP_INLINE_VISIBILITY
  1585. _Tp operator=(_Tp __d) _NOEXCEPT
  1586. {__base::store(__d); return __d;}
  1587. atomic& operator=(const atomic&) = delete;
  1588. atomic& operator=(const atomic&) volatile = delete;
  1589. };
  1590. // atomic<T*>
  1591. template <class _Tp>
  1592. struct atomic<_Tp*>
  1593. : public __atomic_base<_Tp*>
  1594. {
  1595. typedef __atomic_base<_Tp*> __base;
  1596. typedef _Tp* value_type;
  1597. typedef ptrdiff_t difference_type;
  1598. _LIBCPP_INLINE_VISIBILITY
  1599. atomic() _NOEXCEPT = default;
  1600. _LIBCPP_INLINE_VISIBILITY
  1601. _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
  1602. _LIBCPP_INLINE_VISIBILITY
  1603. _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
  1604. {__base::store(__d); return __d;}
  1605. _LIBCPP_INLINE_VISIBILITY
  1606. _Tp* operator=(_Tp* __d) _NOEXCEPT
  1607. {__base::store(__d); return __d;}
  1608. _LIBCPP_INLINE_VISIBILITY
  1609. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
  1610. // __atomic_fetch_add accepts function pointers, guard against them.
  1611. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1612. return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
  1613. }
  1614. _LIBCPP_INLINE_VISIBILITY
  1615. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
  1616. // __atomic_fetch_add accepts function pointers, guard against them.
  1617. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1618. return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
  1619. }
  1620. _LIBCPP_INLINE_VISIBILITY
  1621. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
  1622. // __atomic_fetch_add accepts function pointers, guard against them.
  1623. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1624. return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
  1625. }
  1626. _LIBCPP_INLINE_VISIBILITY
  1627. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
  1628. // __atomic_fetch_add accepts function pointers, guard against them.
  1629. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1630. return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
  1631. }
  1632. _LIBCPP_INLINE_VISIBILITY
  1633. _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
  1634. _LIBCPP_INLINE_VISIBILITY
  1635. _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
  1636. _LIBCPP_INLINE_VISIBILITY
  1637. _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
  1638. _LIBCPP_INLINE_VISIBILITY
  1639. _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
  1640. _LIBCPP_INLINE_VISIBILITY
  1641. _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
  1642. _LIBCPP_INLINE_VISIBILITY
  1643. _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
  1644. _LIBCPP_INLINE_VISIBILITY
  1645. _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
  1646. _LIBCPP_INLINE_VISIBILITY
  1647. _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
  1648. _LIBCPP_INLINE_VISIBILITY
  1649. _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1650. _LIBCPP_INLINE_VISIBILITY
  1651. _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1652. _LIBCPP_INLINE_VISIBILITY
  1653. _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1654. _LIBCPP_INLINE_VISIBILITY
  1655. _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1656. atomic& operator=(const atomic&) = delete;
  1657. atomic& operator=(const atomic&) volatile = delete;
  1658. };
  1659. // atomic_is_lock_free
  1660. template <class _Tp>
  1661. _LIBCPP_INLINE_VISIBILITY
  1662. bool
  1663. atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1664. {
  1665. return __o->is_lock_free();
  1666. }
  1667. template <class _Tp>
  1668. _LIBCPP_INLINE_VISIBILITY
  1669. bool
  1670. atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
  1671. {
  1672. return __o->is_lock_free();
  1673. }
  1674. // atomic_init
  1675. template <class _Tp>
  1676. _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
  1677. void
  1678. atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1679. {
  1680. __cxx_atomic_init(&__o->__a_, __d);
  1681. }
  1682. template <class _Tp>
  1683. _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
  1684. void
  1685. atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1686. {
  1687. __cxx_atomic_init(&__o->__a_, __d);
  1688. }
  1689. // atomic_store
  1690. template <class _Tp>
  1691. _LIBCPP_INLINE_VISIBILITY
  1692. void
  1693. atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1694. {
  1695. __o->store(__d);
  1696. }
  1697. template <class _Tp>
  1698. _LIBCPP_INLINE_VISIBILITY
  1699. void
  1700. atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1701. {
  1702. __o->store(__d);
  1703. }
  1704. // atomic_store_explicit
  1705. template <class _Tp>
  1706. _LIBCPP_INLINE_VISIBILITY
  1707. void
  1708. atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1709. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1710. {
  1711. __o->store(__d, __m);
  1712. }
  1713. template <class _Tp>
  1714. _LIBCPP_INLINE_VISIBILITY
  1715. void
  1716. atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1717. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1718. {
  1719. __o->store(__d, __m);
  1720. }
  1721. // atomic_load
  1722. template <class _Tp>
  1723. _LIBCPP_INLINE_VISIBILITY
  1724. _Tp
  1725. atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1726. {
  1727. return __o->load();
  1728. }
  1729. template <class _Tp>
  1730. _LIBCPP_INLINE_VISIBILITY
  1731. _Tp
  1732. atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
  1733. {
  1734. return __o->load();
  1735. }
  1736. // atomic_load_explicit
  1737. template <class _Tp>
  1738. _LIBCPP_INLINE_VISIBILITY
  1739. _Tp
  1740. atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1741. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1742. {
  1743. return __o->load(__m);
  1744. }
  1745. template <class _Tp>
  1746. _LIBCPP_INLINE_VISIBILITY
  1747. _Tp
  1748. atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1749. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1750. {
  1751. return __o->load(__m);
  1752. }
  1753. // atomic_exchange
  1754. template <class _Tp>
  1755. _LIBCPP_INLINE_VISIBILITY
  1756. _Tp
  1757. atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1758. {
  1759. return __o->exchange(__d);
  1760. }
  1761. template <class _Tp>
  1762. _LIBCPP_INLINE_VISIBILITY
  1763. _Tp
  1764. atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1765. {
  1766. return __o->exchange(__d);
  1767. }
  1768. // atomic_exchange_explicit
  1769. template <class _Tp>
  1770. _LIBCPP_INLINE_VISIBILITY
  1771. _Tp
  1772. atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1773. {
  1774. return __o->exchange(__d, __m);
  1775. }
  1776. template <class _Tp>
  1777. _LIBCPP_INLINE_VISIBILITY
  1778. _Tp
  1779. atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1780. {
  1781. return __o->exchange(__d, __m);
  1782. }
  1783. // atomic_compare_exchange_weak
  1784. template <class _Tp>
  1785. _LIBCPP_INLINE_VISIBILITY
  1786. bool
  1787. atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1788. {
  1789. return __o->compare_exchange_weak(*__e, __d);
  1790. }
  1791. template <class _Tp>
  1792. _LIBCPP_INLINE_VISIBILITY
  1793. bool
  1794. atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1795. {
  1796. return __o->compare_exchange_weak(*__e, __d);
  1797. }
  1798. // atomic_compare_exchange_strong
  1799. template <class _Tp>
  1800. _LIBCPP_INLINE_VISIBILITY
  1801. bool
  1802. atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1803. {
  1804. return __o->compare_exchange_strong(*__e, __d);
  1805. }
  1806. template <class _Tp>
  1807. _LIBCPP_INLINE_VISIBILITY
  1808. bool
  1809. atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1810. {
  1811. return __o->compare_exchange_strong(*__e, __d);
  1812. }
  1813. // atomic_compare_exchange_weak_explicit
  1814. template <class _Tp>
  1815. _LIBCPP_INLINE_VISIBILITY
  1816. bool
  1817. atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
  1818. typename atomic<_Tp>::value_type __d,
  1819. memory_order __s, memory_order __f) _NOEXCEPT
  1820. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1821. {
  1822. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1823. }
  1824. template <class _Tp>
  1825. _LIBCPP_INLINE_VISIBILITY
  1826. bool
  1827. atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
  1828. memory_order __s, memory_order __f) _NOEXCEPT
  1829. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1830. {
  1831. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1832. }
  1833. // atomic_compare_exchange_strong_explicit
  1834. template <class _Tp>
  1835. _LIBCPP_INLINE_VISIBILITY
  1836. bool
  1837. atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
  1838. typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
  1839. memory_order __s, memory_order __f) _NOEXCEPT
  1840. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1841. {
  1842. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1843. }
  1844. template <class _Tp>
  1845. _LIBCPP_INLINE_VISIBILITY
  1846. bool
  1847. atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
  1848. typename atomic<_Tp>::value_type __d,
  1849. memory_order __s, memory_order __f) _NOEXCEPT
  1850. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1851. {
  1852. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1853. }
  1854. // atomic_wait
  1855. template <class _Tp>
  1856. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1857. void atomic_wait(const volatile atomic<_Tp>* __o,
  1858. typename atomic<_Tp>::value_type __v) _NOEXCEPT
  1859. {
  1860. return __o->wait(__v);
  1861. }
  1862. template <class _Tp>
  1863. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1864. void atomic_wait(const atomic<_Tp>* __o,
  1865. typename atomic<_Tp>::value_type __v) _NOEXCEPT
  1866. {
  1867. return __o->wait(__v);
  1868. }
  1869. // atomic_wait_explicit
  1870. template <class _Tp>
  1871. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1872. void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
  1873. typename atomic<_Tp>::value_type __v,
  1874. memory_order __m) _NOEXCEPT
  1875. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1876. {
  1877. return __o->wait(__v, __m);
  1878. }
  1879. template <class _Tp>
  1880. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1881. void atomic_wait_explicit(const atomic<_Tp>* __o,
  1882. typename atomic<_Tp>::value_type __v,
  1883. memory_order __m) _NOEXCEPT
  1884. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1885. {
  1886. return __o->wait(__v, __m);
  1887. }
  1888. // atomic_notify_one
  1889. template <class _Tp>
  1890. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1891. void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
  1892. {
  1893. __o->notify_one();
  1894. }
  1895. template <class _Tp>
  1896. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1897. void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
  1898. {
  1899. __o->notify_one();
  1900. }
  1901. // atomic_notify_one
  1902. template <class _Tp>
  1903. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1904. void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
  1905. {
  1906. __o->notify_all();
  1907. }
  1908. template <class _Tp>
  1909. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1910. void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
  1911. {
  1912. __o->notify_all();
  1913. }
  1914. // atomic_fetch_add
  1915. template <class _Tp>
  1916. _LIBCPP_INLINE_VISIBILITY
  1917. _Tp
  1918. atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1919. {
  1920. return __o->fetch_add(__op);
  1921. }
  1922. template <class _Tp>
  1923. _LIBCPP_INLINE_VISIBILITY
  1924. _Tp
  1925. atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1926. {
  1927. return __o->fetch_add(__op);
  1928. }
  1929. // atomic_fetch_add_explicit
  1930. template <class _Tp>
  1931. _LIBCPP_INLINE_VISIBILITY
  1932. _Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1933. {
  1934. return __o->fetch_add(__op, __m);
  1935. }
  1936. template <class _Tp>
  1937. _LIBCPP_INLINE_VISIBILITY
  1938. _Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1939. {
  1940. return __o->fetch_add(__op, __m);
  1941. }
  1942. // atomic_fetch_sub
  1943. template <class _Tp>
  1944. _LIBCPP_INLINE_VISIBILITY
  1945. _Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1946. {
  1947. return __o->fetch_sub(__op);
  1948. }
  1949. template <class _Tp>
  1950. _LIBCPP_INLINE_VISIBILITY
  1951. _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1952. {
  1953. return __o->fetch_sub(__op);
  1954. }
  1955. // atomic_fetch_sub_explicit
  1956. template <class _Tp>
  1957. _LIBCPP_INLINE_VISIBILITY
  1958. _Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1959. {
  1960. return __o->fetch_sub(__op, __m);
  1961. }
  1962. template <class _Tp>
  1963. _LIBCPP_INLINE_VISIBILITY
  1964. _Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1965. {
  1966. return __o->fetch_sub(__op, __m);
  1967. }
  1968. // atomic_fetch_and
  1969. template <class _Tp>
  1970. _LIBCPP_INLINE_VISIBILITY
  1971. typename enable_if
  1972. <
  1973. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1974. _Tp
  1975. >::type
  1976. atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  1977. {
  1978. return __o->fetch_and(__op);
  1979. }
  1980. template <class _Tp>
  1981. _LIBCPP_INLINE_VISIBILITY
  1982. typename enable_if
  1983. <
  1984. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1985. _Tp
  1986. >::type
  1987. atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  1988. {
  1989. return __o->fetch_and(__op);
  1990. }
  1991. // atomic_fetch_and_explicit
  1992. template <class _Tp>
  1993. _LIBCPP_INLINE_VISIBILITY
  1994. typename enable_if
  1995. <
  1996. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1997. _Tp
  1998. >::type
  1999. atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2000. {
  2001. return __o->fetch_and(__op, __m);
  2002. }
  2003. template <class _Tp>
  2004. _LIBCPP_INLINE_VISIBILITY
  2005. typename enable_if
  2006. <
  2007. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2008. _Tp
  2009. >::type
  2010. atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2011. {
  2012. return __o->fetch_and(__op, __m);
  2013. }
  2014. // atomic_fetch_or
  2015. template <class _Tp>
  2016. _LIBCPP_INLINE_VISIBILITY
  2017. typename enable_if
  2018. <
  2019. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2020. _Tp
  2021. >::type
  2022. atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2023. {
  2024. return __o->fetch_or(__op);
  2025. }
  2026. template <class _Tp>
  2027. _LIBCPP_INLINE_VISIBILITY
  2028. typename enable_if
  2029. <
  2030. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2031. _Tp
  2032. >::type
  2033. atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2034. {
  2035. return __o->fetch_or(__op);
  2036. }
  2037. // atomic_fetch_or_explicit
  2038. template <class _Tp>
  2039. _LIBCPP_INLINE_VISIBILITY
  2040. typename enable_if
  2041. <
  2042. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2043. _Tp
  2044. >::type
  2045. atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2046. {
  2047. return __o->fetch_or(__op, __m);
  2048. }
  2049. template <class _Tp>
  2050. _LIBCPP_INLINE_VISIBILITY
  2051. typename enable_if
  2052. <
  2053. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2054. _Tp
  2055. >::type
  2056. atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2057. {
  2058. return __o->fetch_or(__op, __m);
  2059. }
  2060. // atomic_fetch_xor
  2061. template <class _Tp>
  2062. _LIBCPP_INLINE_VISIBILITY
  2063. typename enable_if
  2064. <
  2065. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2066. _Tp
  2067. >::type
  2068. atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2069. {
  2070. return __o->fetch_xor(__op);
  2071. }
  2072. template <class _Tp>
  2073. _LIBCPP_INLINE_VISIBILITY
  2074. typename enable_if
  2075. <
  2076. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2077. _Tp
  2078. >::type
  2079. atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2080. {
  2081. return __o->fetch_xor(__op);
  2082. }
  2083. // atomic_fetch_xor_explicit
  2084. template <class _Tp>
  2085. _LIBCPP_INLINE_VISIBILITY
  2086. typename enable_if
  2087. <
  2088. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2089. _Tp
  2090. >::type
  2091. atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2092. {
  2093. return __o->fetch_xor(__op, __m);
  2094. }
  2095. template <class _Tp>
  2096. _LIBCPP_INLINE_VISIBILITY
  2097. typename enable_if
  2098. <
  2099. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2100. _Tp
  2101. >::type
  2102. atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2103. {
  2104. return __o->fetch_xor(__op, __m);
  2105. }
  2106. // flag type and operations
  2107. typedef struct atomic_flag
  2108. {
  2109. __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
  2110. _LIBCPP_INLINE_VISIBILITY
  2111. bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  2112. {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
  2113. _LIBCPP_INLINE_VISIBILITY
  2114. bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  2115. {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
  2116. _LIBCPP_INLINE_VISIBILITY
  2117. bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2118. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2119. _LIBCPP_INLINE_VISIBILITY
  2120. bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2121. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2122. _LIBCPP_INLINE_VISIBILITY
  2123. void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2124. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2125. _LIBCPP_INLINE_VISIBILITY
  2126. void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2127. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2128. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2129. void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  2130. {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
  2131. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2132. void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  2133. {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
  2134. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2135. void notify_one() volatile _NOEXCEPT
  2136. {__cxx_atomic_notify_one(&__a_);}
  2137. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2138. void notify_one() _NOEXCEPT
  2139. {__cxx_atomic_notify_one(&__a_);}
  2140. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2141. void notify_all() volatile _NOEXCEPT
  2142. {__cxx_atomic_notify_all(&__a_);}
  2143. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2144. void notify_all() _NOEXCEPT
  2145. {__cxx_atomic_notify_all(&__a_);}
  2146. #if _LIBCPP_STD_VER > 17
  2147. _LIBCPP_INLINE_VISIBILITY constexpr
  2148. atomic_flag() _NOEXCEPT : __a_(false) {}
  2149. #else
  2150. _LIBCPP_INLINE_VISIBILITY
  2151. atomic_flag() _NOEXCEPT = default;
  2152. #endif
  2153. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  2154. atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
  2155. atomic_flag(const atomic_flag&) = delete;
  2156. atomic_flag& operator=(const atomic_flag&) = delete;
  2157. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  2158. } atomic_flag;
  2159. inline _LIBCPP_INLINE_VISIBILITY
  2160. bool
  2161. atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
  2162. {
  2163. return __o->test();
  2164. }
  2165. inline _LIBCPP_INLINE_VISIBILITY
  2166. bool
  2167. atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
  2168. {
  2169. return __o->test();
  2170. }
  2171. inline _LIBCPP_INLINE_VISIBILITY
  2172. bool
  2173. atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2174. {
  2175. return __o->test(__m);
  2176. }
  2177. inline _LIBCPP_INLINE_VISIBILITY
  2178. bool
  2179. atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
  2180. {
  2181. return __o->test(__m);
  2182. }
  2183. inline _LIBCPP_INLINE_VISIBILITY
  2184. bool
  2185. atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
  2186. {
  2187. return __o->test_and_set();
  2188. }
  2189. inline _LIBCPP_INLINE_VISIBILITY
  2190. bool
  2191. atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
  2192. {
  2193. return __o->test_and_set();
  2194. }
  2195. inline _LIBCPP_INLINE_VISIBILITY
  2196. bool
  2197. atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2198. {
  2199. return __o->test_and_set(__m);
  2200. }
  2201. inline _LIBCPP_INLINE_VISIBILITY
  2202. bool
  2203. atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2204. {
  2205. return __o->test_and_set(__m);
  2206. }
  2207. inline _LIBCPP_INLINE_VISIBILITY
  2208. void
  2209. atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
  2210. {
  2211. __o->clear();
  2212. }
  2213. inline _LIBCPP_INLINE_VISIBILITY
  2214. void
  2215. atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
  2216. {
  2217. __o->clear();
  2218. }
  2219. inline _LIBCPP_INLINE_VISIBILITY
  2220. void
  2221. atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2222. {
  2223. __o->clear(__m);
  2224. }
  2225. inline _LIBCPP_INLINE_VISIBILITY
  2226. void
  2227. atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2228. {
  2229. __o->clear(__m);
  2230. }
  2231. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2232. void
  2233. atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
  2234. {
  2235. __o->wait(__v);
  2236. }
  2237. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2238. void
  2239. atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
  2240. {
  2241. __o->wait(__v);
  2242. }
  2243. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2244. void
  2245. atomic_flag_wait_explicit(const volatile atomic_flag* __o,
  2246. bool __v, memory_order __m) _NOEXCEPT
  2247. {
  2248. __o->wait(__v, __m);
  2249. }
  2250. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2251. void
  2252. atomic_flag_wait_explicit(const atomic_flag* __o,
  2253. bool __v, memory_order __m) _NOEXCEPT
  2254. {
  2255. __o->wait(__v, __m);
  2256. }
  2257. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2258. void
  2259. atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
  2260. {
  2261. __o->notify_one();
  2262. }
  2263. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2264. void
  2265. atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
  2266. {
  2267. __o->notify_one();
  2268. }
  2269. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2270. void
  2271. atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
  2272. {
  2273. __o->notify_all();
  2274. }
  2275. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2276. void
  2277. atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
  2278. {
  2279. __o->notify_all();
  2280. }
  2281. // fences
  2282. inline _LIBCPP_INLINE_VISIBILITY
  2283. void
  2284. atomic_thread_fence(memory_order __m) _NOEXCEPT
  2285. {
  2286. __cxx_atomic_thread_fence(__m);
  2287. }
  2288. inline _LIBCPP_INLINE_VISIBILITY
  2289. void
  2290. atomic_signal_fence(memory_order __m) _NOEXCEPT
  2291. {
  2292. __cxx_atomic_signal_fence(__m);
  2293. }
  2294. // Atomics for standard typedef types
  2295. typedef atomic<bool> atomic_bool;
  2296. typedef atomic<char> atomic_char;
  2297. typedef atomic<signed char> atomic_schar;
  2298. typedef atomic<unsigned char> atomic_uchar;
  2299. typedef atomic<short> atomic_short;
  2300. typedef atomic<unsigned short> atomic_ushort;
  2301. typedef atomic<int> atomic_int;
  2302. typedef atomic<unsigned int> atomic_uint;
  2303. typedef atomic<long> atomic_long;
  2304. typedef atomic<unsigned long> atomic_ulong;
  2305. typedef atomic<long long> atomic_llong;
  2306. typedef atomic<unsigned long long> atomic_ullong;
  2307. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  2308. typedef atomic<char8_t> atomic_char8_t;
  2309. #endif
  2310. typedef atomic<char16_t> atomic_char16_t;
  2311. typedef atomic<char32_t> atomic_char32_t;
  2312. #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
  2313. typedef atomic<wchar_t> atomic_wchar_t;
  2314. #endif
  2315. typedef atomic<int_least8_t> atomic_int_least8_t;
  2316. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  2317. typedef atomic<int_least16_t> atomic_int_least16_t;
  2318. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  2319. typedef atomic<int_least32_t> atomic_int_least32_t;
  2320. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  2321. typedef atomic<int_least64_t> atomic_int_least64_t;
  2322. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  2323. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  2324. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  2325. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  2326. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  2327. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  2328. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  2329. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  2330. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  2331. typedef atomic< int8_t> atomic_int8_t;
  2332. typedef atomic<uint8_t> atomic_uint8_t;
  2333. typedef atomic< int16_t> atomic_int16_t;
  2334. typedef atomic<uint16_t> atomic_uint16_t;
  2335. typedef atomic< int32_t> atomic_int32_t;
  2336. typedef atomic<uint32_t> atomic_uint32_t;
  2337. typedef atomic< int64_t> atomic_int64_t;
  2338. typedef atomic<uint64_t> atomic_uint64_t;
  2339. typedef atomic<intptr_t> atomic_intptr_t;
  2340. typedef atomic<uintptr_t> atomic_uintptr_t;
  2341. typedef atomic<size_t> atomic_size_t;
  2342. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  2343. typedef atomic<intmax_t> atomic_intmax_t;
  2344. typedef atomic<uintmax_t> atomic_uintmax_t;
  2345. // atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
  2346. #ifdef __cpp_lib_atomic_is_always_lock_free
  2347. # define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
  2348. #else
  2349. # define _LIBCPP_CONTENTION_LOCK_FREE false
  2350. #endif
  2351. #if ATOMIC_LLONG_LOCK_FREE == 2
  2352. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free;
  2353. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
  2354. #elif ATOMIC_INT_LOCK_FREE == 2
  2355. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free;
  2356. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free;
  2357. #elif ATOMIC_SHORT_LOCK_FREE == 2
  2358. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free;
  2359. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free;
  2360. #elif ATOMIC_CHAR_LOCK_FREE == 2
  2361. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free;
  2362. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free;
  2363. #else
  2364. // No signed/unsigned lock-free types
  2365. #define _LIBCPP_NO_LOCK_FREE_TYPES
  2366. #endif
  2367. #if !defined(_LIBCPP_NO_LOCK_FREE_TYPES)
  2368. typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
  2369. typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
  2370. #endif
  2371. #define ATOMIC_FLAG_INIT {false}
  2372. #define ATOMIC_VAR_INIT(__v) {__v}
  2373. #if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
  2374. # if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400
  2375. # pragma clang deprecated(ATOMIC_VAR_INIT)
  2376. # endif
  2377. #endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
  2378. _LIBCPP_END_NAMESPACE_STD
  2379. #endif // _LIBCPP_ATOMIC