atomic 99 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735
  1. // -*- C++ -*-
  2. //===----------------------------------------------------------------------===//
  3. //
  4. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  5. // See https://llvm.org/LICENSE.txt for license information.
  6. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  7. //
  8. //===----------------------------------------------------------------------===//
  9. #ifndef _LIBCPP_ATOMIC
  10. #define _LIBCPP_ATOMIC
  11. /*
  12. atomic synopsis
  13. namespace std
  14. {
  15. // feature test macro [version.syn]
  16. #define __cpp_lib_atomic_is_always_lock_free
  17. #define __cpp_lib_atomic_flag_test
  18. #define __cpp_lib_atomic_lock_free_type_aliases
  19. #define __cpp_lib_atomic_wait
  20. // order and consistency
  21. enum memory_order: unspecified // enum class in C++20
  22. {
  23. relaxed,
  24. consume, // load-consume
  25. acquire, // load-acquire
  26. release, // store-release
  27. acq_rel, // store-release load-acquire
  28. seq_cst // store-release load-acquire
  29. };
  30. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  31. inline constexpr auto memory_order_consume = memory_order::consume;
  32. inline constexpr auto memory_order_acquire = memory_order::acquire;
  33. inline constexpr auto memory_order_release = memory_order::release;
  34. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  35. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  36. template <class T> T kill_dependency(T y) noexcept;
  37. // lock-free property
  38. #define ATOMIC_BOOL_LOCK_FREE unspecified
  39. #define ATOMIC_CHAR_LOCK_FREE unspecified
  40. #define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
  41. #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
  42. #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
  43. #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
  44. #define ATOMIC_SHORT_LOCK_FREE unspecified
  45. #define ATOMIC_INT_LOCK_FREE unspecified
  46. #define ATOMIC_LONG_LOCK_FREE unspecified
  47. #define ATOMIC_LLONG_LOCK_FREE unspecified
  48. #define ATOMIC_POINTER_LOCK_FREE unspecified
  49. template <class T>
  50. struct atomic
  51. {
  52. using value_type = T;
  53. static constexpr bool is_always_lock_free;
  54. bool is_lock_free() const volatile noexcept;
  55. bool is_lock_free() const noexcept;
  56. atomic() noexcept = default; // until C++20
  57. constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
  58. constexpr atomic(T desr) noexcept;
  59. atomic(const atomic&) = delete;
  60. atomic& operator=(const atomic&) = delete;
  61. atomic& operator=(const atomic&) volatile = delete;
  62. T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  63. T load(memory_order m = memory_order_seq_cst) const noexcept;
  64. operator T() const volatile noexcept;
  65. operator T() const noexcept;
  66. void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  67. void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
  68. T operator=(T) volatile noexcept;
  69. T operator=(T) noexcept;
  70. T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  71. T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
  72. bool compare_exchange_weak(T& expc, T desr,
  73. memory_order s, memory_order f) volatile noexcept;
  74. bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
  75. bool compare_exchange_strong(T& expc, T desr,
  76. memory_order s, memory_order f) volatile noexcept;
  77. bool compare_exchange_strong(T& expc, T desr,
  78. memory_order s, memory_order f) noexcept;
  79. bool compare_exchange_weak(T& expc, T desr,
  80. memory_order m = memory_order_seq_cst) volatile noexcept;
  81. bool compare_exchange_weak(T& expc, T desr,
  82. memory_order m = memory_order_seq_cst) noexcept;
  83. bool compare_exchange_strong(T& expc, T desr,
  84. memory_order m = memory_order_seq_cst) volatile noexcept;
  85. bool compare_exchange_strong(T& expc, T desr,
  86. memory_order m = memory_order_seq_cst) noexcept;
  87. void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
  88. void wait(T, memory_order = memory_order::seq_cst) const noexcept;
  89. void notify_one() volatile noexcept;
  90. void notify_one() noexcept;
  91. void notify_all() volatile noexcept;
  92. void notify_all() noexcept;
  93. };
  94. template <>
  95. struct atomic<integral>
  96. {
  97. using value_type = integral;
  98. using difference_type = value_type;
  99. static constexpr bool is_always_lock_free;
  100. bool is_lock_free() const volatile noexcept;
  101. bool is_lock_free() const noexcept;
  102. atomic() noexcept = default;
  103. constexpr atomic(integral desr) noexcept;
  104. atomic(const atomic&) = delete;
  105. atomic& operator=(const atomic&) = delete;
  106. atomic& operator=(const atomic&) volatile = delete;
  107. integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  108. integral load(memory_order m = memory_order_seq_cst) const noexcept;
  109. operator integral() const volatile noexcept;
  110. operator integral() const noexcept;
  111. void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  112. void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  113. integral operator=(integral desr) volatile noexcept;
  114. integral operator=(integral desr) noexcept;
  115. integral exchange(integral desr,
  116. memory_order m = memory_order_seq_cst) volatile noexcept;
  117. integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  118. bool compare_exchange_weak(integral& expc, integral desr,
  119. memory_order s, memory_order f) volatile noexcept;
  120. bool compare_exchange_weak(integral& expc, integral desr,
  121. memory_order s, memory_order f) noexcept;
  122. bool compare_exchange_strong(integral& expc, integral desr,
  123. memory_order s, memory_order f) volatile noexcept;
  124. bool compare_exchange_strong(integral& expc, integral desr,
  125. memory_order s, memory_order f) noexcept;
  126. bool compare_exchange_weak(integral& expc, integral desr,
  127. memory_order m = memory_order_seq_cst) volatile noexcept;
  128. bool compare_exchange_weak(integral& expc, integral desr,
  129. memory_order m = memory_order_seq_cst) noexcept;
  130. bool compare_exchange_strong(integral& expc, integral desr,
  131. memory_order m = memory_order_seq_cst) volatile noexcept;
  132. bool compare_exchange_strong(integral& expc, integral desr,
  133. memory_order m = memory_order_seq_cst) noexcept;
  134. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  135. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
  136. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  137. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
  138. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  139. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
  140. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  141. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
  142. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  143. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
  144. integral operator++(int) volatile noexcept;
  145. integral operator++(int) noexcept;
  146. integral operator--(int) volatile noexcept;
  147. integral operator--(int) noexcept;
  148. integral operator++() volatile noexcept;
  149. integral operator++() noexcept;
  150. integral operator--() volatile noexcept;
  151. integral operator--() noexcept;
  152. integral operator+=(integral op) volatile noexcept;
  153. integral operator+=(integral op) noexcept;
  154. integral operator-=(integral op) volatile noexcept;
  155. integral operator-=(integral op) noexcept;
  156. integral operator&=(integral op) volatile noexcept;
  157. integral operator&=(integral op) noexcept;
  158. integral operator|=(integral op) volatile noexcept;
  159. integral operator|=(integral op) noexcept;
  160. integral operator^=(integral op) volatile noexcept;
  161. integral operator^=(integral op) noexcept;
  162. void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
  163. void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
  164. void notify_one() volatile noexcept;
  165. void notify_one() noexcept;
  166. void notify_all() volatile noexcept;
  167. void notify_all() noexcept;
  168. };
  169. template <class T>
  170. struct atomic<T*>
  171. {
  172. using value_type = T*;
  173. using difference_type = ptrdiff_t;
  174. static constexpr bool is_always_lock_free;
  175. bool is_lock_free() const volatile noexcept;
  176. bool is_lock_free() const noexcept;
  177. atomic() noexcept = default; // until C++20
  178. constexpr atomic() noexcept; // since C++20
  179. constexpr atomic(T* desr) noexcept;
  180. atomic(const atomic&) = delete;
  181. atomic& operator=(const atomic&) = delete;
  182. atomic& operator=(const atomic&) volatile = delete;
  183. T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  184. T* load(memory_order m = memory_order_seq_cst) const noexcept;
  185. operator T*() const volatile noexcept;
  186. operator T*() const noexcept;
  187. void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  188. void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  189. T* operator=(T*) volatile noexcept;
  190. T* operator=(T*) noexcept;
  191. T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  192. T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  193. bool compare_exchange_weak(T*& expc, T* desr,
  194. memory_order s, memory_order f) volatile noexcept;
  195. bool compare_exchange_weak(T*& expc, T* desr,
  196. memory_order s, memory_order f) noexcept;
  197. bool compare_exchange_strong(T*& expc, T* desr,
  198. memory_order s, memory_order f) volatile noexcept;
  199. bool compare_exchange_strong(T*& expc, T* desr,
  200. memory_order s, memory_order f) noexcept;
  201. bool compare_exchange_weak(T*& expc, T* desr,
  202. memory_order m = memory_order_seq_cst) volatile noexcept;
  203. bool compare_exchange_weak(T*& expc, T* desr,
  204. memory_order m = memory_order_seq_cst) noexcept;
  205. bool compare_exchange_strong(T*& expc, T* desr,
  206. memory_order m = memory_order_seq_cst) volatile noexcept;
  207. bool compare_exchange_strong(T*& expc, T* desr,
  208. memory_order m = memory_order_seq_cst) noexcept;
  209. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  210. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  211. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  212. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  213. T* operator++(int) volatile noexcept;
  214. T* operator++(int) noexcept;
  215. T* operator--(int) volatile noexcept;
  216. T* operator--(int) noexcept;
  217. T* operator++() volatile noexcept;
  218. T* operator++() noexcept;
  219. T* operator--() volatile noexcept;
  220. T* operator--() noexcept;
  221. T* operator+=(ptrdiff_t op) volatile noexcept;
  222. T* operator+=(ptrdiff_t op) noexcept;
  223. T* operator-=(ptrdiff_t op) volatile noexcept;
  224. T* operator-=(ptrdiff_t op) noexcept;
  225. void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
  226. void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
  227. void notify_one() volatile noexcept;
  228. void notify_one() noexcept;
  229. void notify_all() volatile noexcept;
  230. void notify_all() noexcept;
  231. };
  232. // [atomics.nonmembers], non-member functions
  233. template<class T>
  234. bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
  235. template<class T>
  236. bool atomic_is_lock_free(const atomic<T>*) noexcept;
  237. template<class T>
  238. void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  239. template<class T>
  240. void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
  241. template<class T>
  242. void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
  243. memory_order) noexcept;
  244. template<class T>
  245. void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
  246. memory_order) noexcept;
  247. template<class T>
  248. T atomic_load(const volatile atomic<T>*) noexcept;
  249. template<class T>
  250. T atomic_load(const atomic<T>*) noexcept;
  251. template<class T>
  252. T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
  253. template<class T>
  254. T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
  255. template<class T>
  256. T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  257. template<class T>
  258. T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
  259. template<class T>
  260. T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
  261. memory_order) noexcept;
  262. template<class T>
  263. T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
  264. memory_order) noexcept;
  265. template<class T>
  266. bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
  267. atomic<T>::value_type) noexcept;
  268. template<class T>
  269. bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
  270. atomic<T>::value_type) noexcept;
  271. template<class T>
  272. bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
  273. atomic<T>::value_type) noexcept;
  274. template<class T>
  275. bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
  276. atomic<T>::value_type) noexcept;
  277. template<class T>
  278. bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
  279. atomic<T>::value_type,
  280. memory_order, memory_order) noexcept;
  281. template<class T>
  282. bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
  283. atomic<T>::value_type,
  284. memory_order, memory_order) noexcept;
  285. template<class T>
  286. bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
  287. atomic<T>::value_type,
  288. memory_order, memory_order) noexcept;
  289. template<class T>
  290. bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
  291. atomic<T>::value_type,
  292. memory_order, memory_order) noexcept;
  293. template<class T>
  294. T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
  295. template<class T>
  296. T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
  297. template<class T>
  298. T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
  299. memory_order) noexcept;
  300. template<class T>
  301. T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
  302. memory_order) noexcept;
  303. template<class T>
  304. T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
  305. template<class T>
  306. T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
  307. template<class T>
  308. T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
  309. memory_order) noexcept;
  310. template<class T>
  311. T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
  312. memory_order) noexcept;
  313. template<class T>
  314. T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  315. template<class T>
  316. T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
  317. template<class T>
  318. T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
  319. memory_order) noexcept;
  320. template<class T>
  321. T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
  322. memory_order) noexcept;
  323. template<class T>
  324. T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  325. template<class T>
  326. T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
  327. template<class T>
  328. T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
  329. memory_order) noexcept;
  330. template<class T>
  331. T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
  332. memory_order) noexcept;
  333. template<class T>
  334. T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
  335. template<class T>
  336. T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
  337. template<class T>
  338. T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
  339. memory_order) noexcept;
  340. template<class T>
  341. T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
  342. memory_order) noexcept;
  343. template<class T>
  344. void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
  345. template<class T>
  346. void atomic_wait(const atomic<T>*, atomic<T>::value_type);
  347. template<class T>
  348. void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
  349. memory_order);
  350. template<class T>
  351. void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
  352. memory_order);
  353. template<class T>
  354. void atomic_notify_one(volatile atomic<T>*);
  355. template<class T>
  356. void atomic_notify_one(atomic<T>*);
  357. template<class T>
  358. void atomic_notify_all(volatile atomic<T>*);
  359. template<class T>
  360. void atomic_notify_all(atomic<T>*);
  361. // Atomics for standard typedef types
  362. typedef atomic<bool> atomic_bool;
  363. typedef atomic<char> atomic_char;
  364. typedef atomic<signed char> atomic_schar;
  365. typedef atomic<unsigned char> atomic_uchar;
  366. typedef atomic<short> atomic_short;
  367. typedef atomic<unsigned short> atomic_ushort;
  368. typedef atomic<int> atomic_int;
  369. typedef atomic<unsigned int> atomic_uint;
  370. typedef atomic<long> atomic_long;
  371. typedef atomic<unsigned long> atomic_ulong;
  372. typedef atomic<long long> atomic_llong;
  373. typedef atomic<unsigned long long> atomic_ullong;
  374. typedef atomic<char8_t> atomic_char8_t; // C++20
  375. typedef atomic<char16_t> atomic_char16_t;
  376. typedef atomic<char32_t> atomic_char32_t;
  377. typedef atomic<wchar_t> atomic_wchar_t;
  378. typedef atomic<int_least8_t> atomic_int_least8_t;
  379. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  380. typedef atomic<int_least16_t> atomic_int_least16_t;
  381. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  382. typedef atomic<int_least32_t> atomic_int_least32_t;
  383. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  384. typedef atomic<int_least64_t> atomic_int_least64_t;
  385. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  386. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  387. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  388. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  389. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  390. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  391. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  392. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  393. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  394. typedef atomic<int8_t> atomic_int8_t;
  395. typedef atomic<uint8_t> atomic_uint8_t;
  396. typedef atomic<int16_t> atomic_int16_t;
  397. typedef atomic<uint16_t> atomic_uint16_t;
  398. typedef atomic<int32_t> atomic_int32_t;
  399. typedef atomic<uint32_t> atomic_uint32_t;
  400. typedef atomic<int64_t> atomic_int64_t;
  401. typedef atomic<uint64_t> atomic_uint64_t;
  402. typedef atomic<intptr_t> atomic_intptr_t;
  403. typedef atomic<uintptr_t> atomic_uintptr_t;
  404. typedef atomic<size_t> atomic_size_t;
  405. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  406. typedef atomic<intmax_t> atomic_intmax_t;
  407. typedef atomic<uintmax_t> atomic_uintmax_t;
  408. // flag type and operations
  409. typedef struct atomic_flag
  410. {
  411. atomic_flag() noexcept = default; // until C++20
  412. constexpr atomic_flag() noexcept; // since C++20
  413. atomic_flag(const atomic_flag&) = delete;
  414. atomic_flag& operator=(const atomic_flag&) = delete;
  415. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  416. bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
  417. bool test(memory_order m = memory_order_seq_cst) noexcept;
  418. bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
  419. bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
  420. void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
  421. void clear(memory_order m = memory_order_seq_cst) noexcept;
  422. void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
  423. void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
  424. void notify_one() volatile noexcept;
  425. void notify_one() noexcept;
  426. void notify_all() volatile noexcept;
  427. void notify_all() noexcept;
  428. } atomic_flag;
  429. bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
  430. bool atomic_flag_test(atomic_flag* obj) noexcept;
  431. bool atomic_flag_test_explicit(volatile atomic_flag* obj,
  432. memory_order m) noexcept;
  433. bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
  434. bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
  435. bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
  436. bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
  437. memory_order m) noexcept;
  438. bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
  439. void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
  440. void atomic_flag_clear(atomic_flag* obj) noexcept;
  441. void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
  442. void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
  443. void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
  444. void atomic_wait(const atomic_flag* obj, T old) noexcept;
  445. void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
  446. void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
  447. void atomic_one(volatile atomic_flag* obj) noexcept;
  448. void atomic_one(atomic_flag* obj) noexcept;
  449. void atomic_all(volatile atomic_flag* obj) noexcept;
  450. void atomic_all(atomic_flag* obj) noexcept;
  451. // fences
  452. void atomic_thread_fence(memory_order m) noexcept;
  453. void atomic_signal_fence(memory_order m) noexcept;
  454. // deprecated
  455. template <class T>
  456. void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
  457. template <class T>
  458. void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
  459. #define ATOMIC_VAR_INIT(value) see below
  460. #define ATOMIC_FLAG_INIT see below
  461. } // std
  462. */
  463. #include <__availability>
  464. #include <__config>
  465. #include <__thread/poll_with_backoff.h>
  466. #include <__thread/timed_backoff_policy.h>
  467. #include <cstddef>
  468. #include <cstdint>
  469. #include <cstring>
  470. #include <type_traits>
  471. #include <version>
  472. #ifndef _LIBCPP_HAS_NO_THREADS
  473. # include <__threading_support>
  474. #endif
  475. #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
  476. # pragma GCC system_header
  477. #endif
  478. #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
  479. # error <atomic> is not implemented
  480. #endif
  481. #ifdef kill_dependency
  482. # error C++ standard library is incompatible with <stdatomic.h>
  483. #endif
  484. #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
  485. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
  486. __m == memory_order_acquire || \
  487. __m == memory_order_acq_rel, \
  488. "memory order argument to atomic operation is invalid")
  489. #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
  490. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
  491. __m == memory_order_acq_rel, \
  492. "memory order argument to atomic operation is invalid")
  493. #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
  494. _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
  495. __f == memory_order_acq_rel, \
  496. "memory order argument to atomic operation is invalid")
  497. _LIBCPP_BEGIN_NAMESPACE_STD
  498. // Figure out what the underlying type for `memory_order` would be if it were
  499. // declared as an unscoped enum (accounting for -fshort-enums). Use this result
  500. // to pin the underlying type in C++20.
  501. enum __legacy_memory_order {
  502. __mo_relaxed,
  503. __mo_consume,
  504. __mo_acquire,
  505. __mo_release,
  506. __mo_acq_rel,
  507. __mo_seq_cst
  508. };
  509. typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
  510. #if _LIBCPP_STD_VER > 17
  511. enum class memory_order : __memory_order_underlying_t {
  512. relaxed = __mo_relaxed,
  513. consume = __mo_consume,
  514. acquire = __mo_acquire,
  515. release = __mo_release,
  516. acq_rel = __mo_acq_rel,
  517. seq_cst = __mo_seq_cst
  518. };
  519. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  520. inline constexpr auto memory_order_consume = memory_order::consume;
  521. inline constexpr auto memory_order_acquire = memory_order::acquire;
  522. inline constexpr auto memory_order_release = memory_order::release;
  523. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  524. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  525. #else
  526. typedef enum memory_order {
  527. memory_order_relaxed = __mo_relaxed,
  528. memory_order_consume = __mo_consume,
  529. memory_order_acquire = __mo_acquire,
  530. memory_order_release = __mo_release,
  531. memory_order_acq_rel = __mo_acq_rel,
  532. memory_order_seq_cst = __mo_seq_cst,
  533. } memory_order;
  534. #endif // _LIBCPP_STD_VER > 17
  535. template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
  536. bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
  537. return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
  538. }
  539. static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
  540. "unexpected underlying type for std::memory_order");
  541. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
  542. defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
  543. // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
  544. // the default operator= in an object is not volatile, a byte-by-byte copy
  545. // is required.
  546. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  547. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  548. __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
  549. __a_value = __val;
  550. }
  551. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  552. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  553. __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
  554. volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
  555. volatile char* __end = __to + sizeof(_Tp);
  556. volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
  557. while (__to != __end)
  558. *__to++ = *__from++;
  559. }
  560. #endif
  561. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
  562. template <typename _Tp>
  563. struct __cxx_atomic_base_impl {
  564. _LIBCPP_INLINE_VISIBILITY
  565. #ifndef _LIBCPP_CXX03_LANG
  566. __cxx_atomic_base_impl() _NOEXCEPT = default;
  567. #else
  568. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  569. #endif // _LIBCPP_CXX03_LANG
  570. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
  571. : __a_value(value) {}
  572. _Tp __a_value;
  573. };
  574. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
  575. // Avoid switch statement to make this a constexpr.
  576. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  577. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  578. (__order == memory_order_release ? __ATOMIC_RELEASE:
  579. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  580. (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
  581. __ATOMIC_CONSUME))));
  582. }
  583. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
  584. // Avoid switch statement to make this a constexpr.
  585. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  586. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  587. (__order == memory_order_release ? __ATOMIC_RELAXED:
  588. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  589. (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
  590. __ATOMIC_CONSUME))));
  591. }
  592. template <typename _Tp>
  593. _LIBCPP_INLINE_VISIBILITY
  594. void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  595. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  596. }
  597. template <typename _Tp>
  598. _LIBCPP_INLINE_VISIBILITY
  599. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  600. __a->__a_value = __val;
  601. }
  602. _LIBCPP_INLINE_VISIBILITY inline
  603. void __cxx_atomic_thread_fence(memory_order __order) {
  604. __atomic_thread_fence(__to_gcc_order(__order));
  605. }
  606. _LIBCPP_INLINE_VISIBILITY inline
  607. void __cxx_atomic_signal_fence(memory_order __order) {
  608. __atomic_signal_fence(__to_gcc_order(__order));
  609. }
  610. template <typename _Tp>
  611. _LIBCPP_INLINE_VISIBILITY
  612. void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  613. memory_order __order) {
  614. __atomic_store(&__a->__a_value, &__val,
  615. __to_gcc_order(__order));
  616. }
  617. template <typename _Tp>
  618. _LIBCPP_INLINE_VISIBILITY
  619. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  620. memory_order __order) {
  621. __atomic_store(&__a->__a_value, &__val,
  622. __to_gcc_order(__order));
  623. }
  624. template <typename _Tp>
  625. _LIBCPP_INLINE_VISIBILITY
  626. _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
  627. memory_order __order) {
  628. _Tp __ret;
  629. __atomic_load(&__a->__a_value, &__ret,
  630. __to_gcc_order(__order));
  631. return __ret;
  632. }
  633. template <typename _Tp>
  634. _LIBCPP_INLINE_VISIBILITY
  635. _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
  636. _Tp __ret;
  637. __atomic_load(&__a->__a_value, &__ret,
  638. __to_gcc_order(__order));
  639. return __ret;
  640. }
  641. template <typename _Tp>
  642. _LIBCPP_INLINE_VISIBILITY
  643. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
  644. _Tp __value, memory_order __order) {
  645. _Tp __ret;
  646. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  647. __to_gcc_order(__order));
  648. return __ret;
  649. }
  650. template <typename _Tp>
  651. _LIBCPP_INLINE_VISIBILITY
  652. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
  653. memory_order __order) {
  654. _Tp __ret;
  655. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  656. __to_gcc_order(__order));
  657. return __ret;
  658. }
  659. template <typename _Tp>
  660. _LIBCPP_INLINE_VISIBILITY
  661. bool __cxx_atomic_compare_exchange_strong(
  662. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  663. memory_order __success, memory_order __failure) {
  664. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  665. false,
  666. __to_gcc_order(__success),
  667. __to_gcc_failure_order(__failure));
  668. }
  669. template <typename _Tp>
  670. _LIBCPP_INLINE_VISIBILITY
  671. bool __cxx_atomic_compare_exchange_strong(
  672. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  673. memory_order __failure) {
  674. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  675. false,
  676. __to_gcc_order(__success),
  677. __to_gcc_failure_order(__failure));
  678. }
  679. template <typename _Tp>
  680. _LIBCPP_INLINE_VISIBILITY
  681. bool __cxx_atomic_compare_exchange_weak(
  682. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  683. memory_order __success, memory_order __failure) {
  684. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  685. true,
  686. __to_gcc_order(__success),
  687. __to_gcc_failure_order(__failure));
  688. }
  689. template <typename _Tp>
  690. _LIBCPP_INLINE_VISIBILITY
  691. bool __cxx_atomic_compare_exchange_weak(
  692. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  693. memory_order __failure) {
  694. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  695. true,
  696. __to_gcc_order(__success),
  697. __to_gcc_failure_order(__failure));
  698. }
  699. template <typename _Tp>
  700. struct __skip_amt { enum {value = 1}; };
  701. template <typename _Tp>
  702. struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
  703. // FIXME: Haven't figured out what the spec says about using arrays with
  704. // atomic_fetch_add. Force a failure rather than creating bad behavior.
  705. template <typename _Tp>
  706. struct __skip_amt<_Tp[]> { };
  707. template <typename _Tp, int n>
  708. struct __skip_amt<_Tp[n]> { };
  709. template <typename _Tp, typename _Td>
  710. _LIBCPP_INLINE_VISIBILITY
  711. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
  712. _Td __delta, memory_order __order) {
  713. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  714. __to_gcc_order(__order));
  715. }
  716. template <typename _Tp, typename _Td>
  717. _LIBCPP_INLINE_VISIBILITY
  718. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  719. memory_order __order) {
  720. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  721. __to_gcc_order(__order));
  722. }
  723. template <typename _Tp, typename _Td>
  724. _LIBCPP_INLINE_VISIBILITY
  725. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
  726. _Td __delta, memory_order __order) {
  727. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  728. __to_gcc_order(__order));
  729. }
  730. template <typename _Tp, typename _Td>
  731. _LIBCPP_INLINE_VISIBILITY
  732. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  733. memory_order __order) {
  734. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  735. __to_gcc_order(__order));
  736. }
  737. template <typename _Tp>
  738. _LIBCPP_INLINE_VISIBILITY
  739. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
  740. _Tp __pattern, memory_order __order) {
  741. return __atomic_fetch_and(&__a->__a_value, __pattern,
  742. __to_gcc_order(__order));
  743. }
  744. template <typename _Tp>
  745. _LIBCPP_INLINE_VISIBILITY
  746. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
  747. _Tp __pattern, memory_order __order) {
  748. return __atomic_fetch_and(&__a->__a_value, __pattern,
  749. __to_gcc_order(__order));
  750. }
  751. template <typename _Tp>
  752. _LIBCPP_INLINE_VISIBILITY
  753. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
  754. _Tp __pattern, memory_order __order) {
  755. return __atomic_fetch_or(&__a->__a_value, __pattern,
  756. __to_gcc_order(__order));
  757. }
  758. template <typename _Tp>
  759. _LIBCPP_INLINE_VISIBILITY
  760. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  761. memory_order __order) {
  762. return __atomic_fetch_or(&__a->__a_value, __pattern,
  763. __to_gcc_order(__order));
  764. }
  765. template <typename _Tp>
  766. _LIBCPP_INLINE_VISIBILITY
  767. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
  768. _Tp __pattern, memory_order __order) {
  769. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  770. __to_gcc_order(__order));
  771. }
  772. template <typename _Tp>
  773. _LIBCPP_INLINE_VISIBILITY
  774. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  775. memory_order __order) {
  776. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  777. __to_gcc_order(__order));
  778. }
  779. #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
  780. #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP) || defined(_LIBCPP_COMPILER_MSVC)
  781. #if defined(_LIBCPP_COMPILER_MSVC)
  782. _LIBCPP_END_NAMESPACE_STD
  783. #include <__support/win32/atomic_win32.h>
  784. _LIBCPP_BEGIN_NAMESPACE_STD
  785. #endif
  786. template <typename _Tp>
  787. struct __cxx_atomic_base_impl {
  788. _LIBCPP_INLINE_VISIBILITY
  789. #ifndef _LIBCPP_CXX03_LANG
  790. __cxx_atomic_base_impl() _NOEXCEPT = default;
  791. #else
  792. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  793. #endif // _LIBCPP_CXX03_LANG
  794. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
  795. : __a_value(value) {}
  796. _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
  797. };
  798. #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
  799. #ifdef _LIBCPP_COMPILER_MSVC
  800. #pragma warning ( push )
  801. #pragma warning ( disable : 4141 )
  802. #endif
  803. _LIBCPP_INLINE_VISIBILITY inline
  804. void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
  805. __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
  806. }
  807. _LIBCPP_INLINE_VISIBILITY inline
  808. void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
  809. __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
  810. }
  811. #ifdef _LIBCPP_COMPILER_MSVC
  812. #pragma warning ( pop )
  813. #endif
  814. template<class _Tp>
  815. _LIBCPP_INLINE_VISIBILITY
  816. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
  817. __c11_atomic_init(&__a->__a_value, __val);
  818. }
  819. template<class _Tp>
  820. _LIBCPP_INLINE_VISIBILITY
  821. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
  822. __c11_atomic_init(&__a->__a_value, __val);
  823. }
  824. template<class _Tp>
  825. _LIBCPP_INLINE_VISIBILITY
  826. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
  827. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  828. }
  829. template<class _Tp>
  830. _LIBCPP_INLINE_VISIBILITY
  831. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
  832. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  833. }
  834. template<class _Tp>
  835. _LIBCPP_INLINE_VISIBILITY
  836. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
  837. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  838. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  839. }
  840. template<class _Tp>
  841. _LIBCPP_INLINE_VISIBILITY
  842. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
  843. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  844. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  845. }
  846. template<class _Tp>
  847. _LIBCPP_INLINE_VISIBILITY
  848. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
  849. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  850. }
  851. template<class _Tp>
  852. _LIBCPP_INLINE_VISIBILITY
  853. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
  854. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  855. }
  856. // Disable double inline warning
  857. #ifdef _LIBCPP_COMPILER_MSVC
  858. #pragma warning ( push )
  859. #pragma warning ( disable : 4141 )
  860. #endif
  861. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
  862. // Avoid switch statement to make this a constexpr.
  863. return __order == memory_order_release ? memory_order_relaxed:
  864. (__order == memory_order_acq_rel ? memory_order_acquire:
  865. __order);
  866. }
  867. #ifdef _LIBCPP_COMPILER_MSVC
  868. #pragma warning ( pop )
  869. #endif
  870. template<class _Tp>
  871. _LIBCPP_INLINE_VISIBILITY
  872. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  873. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  874. }
  875. template<class _Tp>
  876. _LIBCPP_INLINE_VISIBILITY
  877. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  878. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  879. }
  880. template<class _Tp>
  881. _LIBCPP_INLINE_VISIBILITY
  882. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  883. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  884. }
  885. template<class _Tp>
  886. _LIBCPP_INLINE_VISIBILITY
  887. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  888. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
  889. }
  890. template<class _Tp>
  891. _LIBCPP_INLINE_VISIBILITY
  892. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  893. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  894. }
  895. template<class _Tp>
  896. _LIBCPP_INLINE_VISIBILITY
  897. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  898. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  899. }
  900. template<class _Tp>
  901. _LIBCPP_INLINE_VISIBILITY
  902. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  903. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  904. }
  905. template<class _Tp>
  906. _LIBCPP_INLINE_VISIBILITY
  907. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  908. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  909. }
  910. template<class _Tp>
  911. _LIBCPP_INLINE_VISIBILITY
  912. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  913. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  914. }
  915. template<class _Tp>
  916. _LIBCPP_INLINE_VISIBILITY
  917. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  918. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  919. }
  920. template<class _Tp>
  921. _LIBCPP_INLINE_VISIBILITY
  922. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  923. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  924. }
  925. template<class _Tp>
  926. _LIBCPP_INLINE_VISIBILITY
  927. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  928. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  929. }
  930. template<class _Tp>
  931. _LIBCPP_INLINE_VISIBILITY
  932. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  933. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  934. }
  935. template<class _Tp>
  936. _LIBCPP_INLINE_VISIBILITY
  937. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  938. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  939. }
  940. template<class _Tp>
  941. _LIBCPP_INLINE_VISIBILITY
  942. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  943. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  944. }
  945. template<class _Tp>
  946. _LIBCPP_INLINE_VISIBILITY
  947. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  948. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  949. }
  950. template<class _Tp>
  951. _LIBCPP_INLINE_VISIBILITY
  952. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  953. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  954. }
  955. template<class _Tp>
  956. _LIBCPP_INLINE_VISIBILITY
  957. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  958. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  959. }
  960. #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
  961. template <class _Tp>
  962. _LIBCPP_INLINE_VISIBILITY
  963. _Tp kill_dependency(_Tp __y) _NOEXCEPT
  964. {
  965. return __y;
  966. }
  967. #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
  968. # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
  969. # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
  970. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  971. # define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
  972. #endif
  973. # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
  974. # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
  975. # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
  976. # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
  977. # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
  978. # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
  979. # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
  980. # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
  981. #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
  982. # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  983. # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  984. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  985. # define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE
  986. #endif
  987. # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  988. # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  989. # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  990. # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  991. # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  992. # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  993. # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  994. # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  995. #endif
  996. #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  997. template<typename _Tp>
  998. struct __cxx_atomic_lock_impl {
  999. _LIBCPP_INLINE_VISIBILITY
  1000. __cxx_atomic_lock_impl() _NOEXCEPT
  1001. : __a_value(), __a_lock(0) {}
  1002. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
  1003. __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
  1004. : __a_value(value), __a_lock(0) {}
  1005. _Tp __a_value;
  1006. mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
  1007. _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
  1008. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  1009. /*spin*/;
  1010. }
  1011. _LIBCPP_INLINE_VISIBILITY void __lock() const {
  1012. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  1013. /*spin*/;
  1014. }
  1015. _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
  1016. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  1017. }
  1018. _LIBCPP_INLINE_VISIBILITY void __unlock() const {
  1019. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  1020. }
  1021. _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
  1022. __lock();
  1023. _Tp __old;
  1024. __cxx_atomic_assign_volatile(__old, __a_value);
  1025. __unlock();
  1026. return __old;
  1027. }
  1028. _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
  1029. __lock();
  1030. _Tp __old = __a_value;
  1031. __unlock();
  1032. return __old;
  1033. }
  1034. };
  1035. template <typename _Tp>
  1036. _LIBCPP_INLINE_VISIBILITY
  1037. void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1038. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1039. }
  1040. template <typename _Tp>
  1041. _LIBCPP_INLINE_VISIBILITY
  1042. void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1043. __a->__a_value = __val;
  1044. }
  1045. template <typename _Tp>
  1046. _LIBCPP_INLINE_VISIBILITY
  1047. void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1048. __a->__lock();
  1049. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1050. __a->__unlock();
  1051. }
  1052. template <typename _Tp>
  1053. _LIBCPP_INLINE_VISIBILITY
  1054. void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1055. __a->__lock();
  1056. __a->__a_value = __val;
  1057. __a->__unlock();
  1058. }
  1059. template <typename _Tp>
  1060. _LIBCPP_INLINE_VISIBILITY
  1061. _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1062. return __a->__read();
  1063. }
  1064. template <typename _Tp>
  1065. _LIBCPP_INLINE_VISIBILITY
  1066. _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1067. return __a->__read();
  1068. }
  1069. template <typename _Tp>
  1070. _LIBCPP_INLINE_VISIBILITY
  1071. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1072. __a->__lock();
  1073. _Tp __old;
  1074. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1075. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1076. __a->__unlock();
  1077. return __old;
  1078. }
  1079. template <typename _Tp>
  1080. _LIBCPP_INLINE_VISIBILITY
  1081. _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1082. __a->__lock();
  1083. _Tp __old = __a->__a_value;
  1084. __a->__a_value = __value;
  1085. __a->__unlock();
  1086. return __old;
  1087. }
  1088. template <typename _Tp>
  1089. _LIBCPP_INLINE_VISIBILITY
  1090. bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1091. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1092. _Tp __temp;
  1093. __a->__lock();
  1094. __cxx_atomic_assign_volatile(__temp, __a->__a_value);
  1095. bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
  1096. if(__ret)
  1097. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1098. else
  1099. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1100. __a->__unlock();
  1101. return __ret;
  1102. }
  1103. template <typename _Tp>
  1104. _LIBCPP_INLINE_VISIBILITY
  1105. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
  1106. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1107. __a->__lock();
  1108. bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
  1109. if(__ret)
  1110. _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
  1111. else
  1112. _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
  1113. __a->__unlock();
  1114. return __ret;
  1115. }
  1116. template <typename _Tp>
  1117. _LIBCPP_INLINE_VISIBILITY
  1118. bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1119. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1120. _Tp __temp;
  1121. __a->__lock();
  1122. __cxx_atomic_assign_volatile(__temp, __a->__a_value);
  1123. bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
  1124. if(__ret)
  1125. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1126. else
  1127. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1128. __a->__unlock();
  1129. return __ret;
  1130. }
  1131. template <typename _Tp>
  1132. _LIBCPP_INLINE_VISIBILITY
  1133. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
  1134. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1135. __a->__lock();
  1136. bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
  1137. if(__ret)
  1138. _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
  1139. else
  1140. _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
  1141. __a->__unlock();
  1142. return __ret;
  1143. }
  1144. template <typename _Tp, typename _Td>
  1145. _LIBCPP_INLINE_VISIBILITY
  1146. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1147. _Td __delta, memory_order) {
  1148. __a->__lock();
  1149. _Tp __old;
  1150. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1151. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
  1152. __a->__unlock();
  1153. return __old;
  1154. }
  1155. template <typename _Tp, typename _Td>
  1156. _LIBCPP_INLINE_VISIBILITY
  1157. _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
  1158. _Td __delta, memory_order) {
  1159. __a->__lock();
  1160. _Tp __old = __a->__a_value;
  1161. __a->__a_value += __delta;
  1162. __a->__unlock();
  1163. return __old;
  1164. }
  1165. template <typename _Tp, typename _Td>
  1166. _LIBCPP_INLINE_VISIBILITY
  1167. _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
  1168. ptrdiff_t __delta, memory_order) {
  1169. __a->__lock();
  1170. _Tp* __old;
  1171. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1172. __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
  1173. __a->__unlock();
  1174. return __old;
  1175. }
  1176. template <typename _Tp, typename _Td>
  1177. _LIBCPP_INLINE_VISIBILITY
  1178. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
  1179. ptrdiff_t __delta, memory_order) {
  1180. __a->__lock();
  1181. _Tp* __old = __a->__a_value;
  1182. __a->__a_value += __delta;
  1183. __a->__unlock();
  1184. return __old;
  1185. }
  1186. template <typename _Tp, typename _Td>
  1187. _LIBCPP_INLINE_VISIBILITY
  1188. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1189. _Td __delta, memory_order) {
  1190. __a->__lock();
  1191. _Tp __old;
  1192. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1193. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
  1194. __a->__unlock();
  1195. return __old;
  1196. }
  1197. template <typename _Tp, typename _Td>
  1198. _LIBCPP_INLINE_VISIBILITY
  1199. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
  1200. _Td __delta, memory_order) {
  1201. __a->__lock();
  1202. _Tp __old = __a->__a_value;
  1203. __a->__a_value -= __delta;
  1204. __a->__unlock();
  1205. return __old;
  1206. }
  1207. template <typename _Tp>
  1208. _LIBCPP_INLINE_VISIBILITY
  1209. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1210. _Tp __pattern, memory_order) {
  1211. __a->__lock();
  1212. _Tp __old;
  1213. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1214. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
  1215. __a->__unlock();
  1216. return __old;
  1217. }
  1218. template <typename _Tp>
  1219. _LIBCPP_INLINE_VISIBILITY
  1220. _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
  1221. _Tp __pattern, memory_order) {
  1222. __a->__lock();
  1223. _Tp __old = __a->__a_value;
  1224. __a->__a_value &= __pattern;
  1225. __a->__unlock();
  1226. return __old;
  1227. }
  1228. template <typename _Tp>
  1229. _LIBCPP_INLINE_VISIBILITY
  1230. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1231. _Tp __pattern, memory_order) {
  1232. __a->__lock();
  1233. _Tp __old;
  1234. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1235. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
  1236. __a->__unlock();
  1237. return __old;
  1238. }
  1239. template <typename _Tp>
  1240. _LIBCPP_INLINE_VISIBILITY
  1241. _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
  1242. _Tp __pattern, memory_order) {
  1243. __a->__lock();
  1244. _Tp __old = __a->__a_value;
  1245. __a->__a_value |= __pattern;
  1246. __a->__unlock();
  1247. return __old;
  1248. }
  1249. template <typename _Tp>
  1250. _LIBCPP_INLINE_VISIBILITY
  1251. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1252. _Tp __pattern, memory_order) {
  1253. __a->__lock();
  1254. _Tp __old;
  1255. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1256. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
  1257. __a->__unlock();
  1258. return __old;
  1259. }
  1260. template <typename _Tp>
  1261. _LIBCPP_INLINE_VISIBILITY
  1262. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
  1263. _Tp __pattern, memory_order) {
  1264. __a->__lock();
  1265. _Tp __old = __a->__a_value;
  1266. __a->__a_value ^= __pattern;
  1267. __a->__unlock();
  1268. return __old;
  1269. }
  1270. #ifdef __cpp_lib_atomic_is_always_lock_free
  1271. template<typename _Tp> struct __cxx_is_always_lock_free {
  1272. enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
  1273. #else
  1274. template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
  1275. // Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
  1276. template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
  1277. template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1278. template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1279. template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1280. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  1281. template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
  1282. #endif
  1283. template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
  1284. template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
  1285. #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
  1286. template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
  1287. #endif
  1288. template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1289. template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1290. template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1291. template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1292. template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1293. template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1294. template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1295. template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1296. template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1297. template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1298. #endif //__cpp_lib_atomic_is_always_lock_free
  1299. template <typename _Tp,
  1300. typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
  1301. __cxx_atomic_base_impl<_Tp>,
  1302. __cxx_atomic_lock_impl<_Tp> >::type>
  1303. #else
  1304. template <typename _Tp,
  1305. typename _Base = __cxx_atomic_base_impl<_Tp> >
  1306. #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  1307. struct __cxx_atomic_impl : public _Base {
  1308. static_assert(is_trivially_copyable<_Tp>::value,
  1309. "std::atomic<T> requires that 'T' be a trivially copyable type");
  1310. _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default;
  1311. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
  1312. : _Base(value) {}
  1313. };
  1314. #ifdef __linux__
  1315. using __cxx_contention_t = int32_t;
  1316. #else
  1317. using __cxx_contention_t = int64_t;
  1318. #endif //__linux__
  1319. using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
  1320. #if defined(_LIBCPP_HAS_NO_THREADS)
  1321. # define _LIBCPP_HAS_NO_PLATFORM_WAIT
  1322. #endif
  1323. // TODO:
  1324. // _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
  1325. // it is not tied anywhere into the build system or even documented. We should
  1326. // clean it up because it is technically never defined except when threads are
  1327. // disabled. We should clean it up in its own changeset in case we break "bad"
  1328. // users.
  1329. #ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
  1330. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
  1331. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
  1332. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
  1333. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
  1334. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
  1335. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
  1336. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
  1337. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
  1338. template <class _Atp, class _Fn>
  1339. struct __libcpp_atomic_wait_backoff_impl {
  1340. _Atp* __a;
  1341. _Fn __test_fn;
  1342. _LIBCPP_AVAILABILITY_SYNC
  1343. _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
  1344. {
  1345. if(__elapsed > chrono::microseconds(64))
  1346. {
  1347. auto const __monitor = __libcpp_atomic_monitor(__a);
  1348. if(__test_fn())
  1349. return true;
  1350. __libcpp_atomic_wait(__a, __monitor);
  1351. }
  1352. else if(__elapsed > chrono::microseconds(4))
  1353. __libcpp_thread_yield();
  1354. else
  1355. {} // poll
  1356. return false;
  1357. }
  1358. };
  1359. template <class _Atp, class _Fn>
  1360. _LIBCPP_AVAILABILITY_SYNC
  1361. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
  1362. {
  1363. __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
  1364. return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
  1365. }
  1366. #else // _LIBCPP_HAS_NO_PLATFORM_WAIT
  1367. template <class _Tp>
  1368. _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
  1369. template <class _Tp>
  1370. _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
  1371. template <class _Atp, class _Fn>
  1372. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
  1373. {
  1374. #if defined(_LIBCPP_HAS_NO_THREADS)
  1375. using _Policy = __spinning_backoff_policy;
  1376. #else
  1377. using _Policy = __libcpp_timed_backoff_policy;
  1378. #endif
  1379. return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
  1380. }
  1381. #endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
  1382. template <class _Atp, class _Tp>
  1383. struct __cxx_atomic_wait_test_fn_impl {
  1384. _Atp* __a;
  1385. _Tp __val;
  1386. memory_order __order;
  1387. _LIBCPP_INLINE_VISIBILITY bool operator()() const
  1388. {
  1389. return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
  1390. }
  1391. };
  1392. template <class _Atp, class _Tp>
  1393. _LIBCPP_AVAILABILITY_SYNC
  1394. _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
  1395. {
  1396. __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
  1397. return __cxx_atomic_wait(__a, __test_fn);
  1398. }
  1399. // general atomic<T>
  1400. template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
  1401. struct __atomic_base // false
  1402. {
  1403. mutable __cxx_atomic_impl<_Tp> __a_;
  1404. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1405. static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
  1406. #endif
  1407. _LIBCPP_INLINE_VISIBILITY
  1408. bool is_lock_free() const volatile _NOEXCEPT
  1409. {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
  1410. _LIBCPP_INLINE_VISIBILITY
  1411. bool is_lock_free() const _NOEXCEPT
  1412. {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
  1413. _LIBCPP_INLINE_VISIBILITY
  1414. void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1415. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1416. {__cxx_atomic_store(&__a_, __d, __m);}
  1417. _LIBCPP_INLINE_VISIBILITY
  1418. void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1419. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1420. {__cxx_atomic_store(&__a_, __d, __m);}
  1421. _LIBCPP_INLINE_VISIBILITY
  1422. _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  1423. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1424. {return __cxx_atomic_load(&__a_, __m);}
  1425. _LIBCPP_INLINE_VISIBILITY
  1426. _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  1427. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1428. {return __cxx_atomic_load(&__a_, __m);}
  1429. _LIBCPP_INLINE_VISIBILITY
  1430. operator _Tp() const volatile _NOEXCEPT {return load();}
  1431. _LIBCPP_INLINE_VISIBILITY
  1432. operator _Tp() const _NOEXCEPT {return load();}
  1433. _LIBCPP_INLINE_VISIBILITY
  1434. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1435. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1436. _LIBCPP_INLINE_VISIBILITY
  1437. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1438. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1439. _LIBCPP_INLINE_VISIBILITY
  1440. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1441. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1442. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1443. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1444. _LIBCPP_INLINE_VISIBILITY
  1445. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1446. memory_order __s, memory_order __f) _NOEXCEPT
  1447. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1448. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1449. _LIBCPP_INLINE_VISIBILITY
  1450. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1451. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1452. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1453. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1454. _LIBCPP_INLINE_VISIBILITY
  1455. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1456. memory_order __s, memory_order __f) _NOEXCEPT
  1457. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1458. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1459. _LIBCPP_INLINE_VISIBILITY
  1460. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1461. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1462. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1463. _LIBCPP_INLINE_VISIBILITY
  1464. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1465. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1466. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1467. _LIBCPP_INLINE_VISIBILITY
  1468. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1469. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1470. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1471. _LIBCPP_INLINE_VISIBILITY
  1472. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1473. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1474. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1475. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  1476. {__cxx_atomic_wait(&__a_, __v, __m);}
  1477. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  1478. {__cxx_atomic_wait(&__a_, __v, __m);}
  1479. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
  1480. {__cxx_atomic_notify_one(&__a_);}
  1481. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
  1482. {__cxx_atomic_notify_one(&__a_);}
  1483. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
  1484. {__cxx_atomic_notify_all(&__a_);}
  1485. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
  1486. {__cxx_atomic_notify_all(&__a_);}
  1487. #if _LIBCPP_STD_VER > 17
  1488. _LIBCPP_INLINE_VISIBILITY constexpr
  1489. __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
  1490. #else
  1491. _LIBCPP_INLINE_VISIBILITY
  1492. __atomic_base() _NOEXCEPT = default;
  1493. #endif
  1494. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  1495. __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
  1496. #ifdef _LIBCPP_COMPILER_MSVC
  1497. #pragma warning ( push )
  1498. #pragma warning ( disable : 4522 )
  1499. #endif
  1500. __atomic_base(const __atomic_base&) = delete;
  1501. #ifdef _LIBCPP_COMPILER_MSVC
  1502. #pragma warning ( pop )
  1503. #endif
  1504. };
  1505. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1506. template <class _Tp, bool __b>
  1507. _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
  1508. #endif
  1509. // atomic<Integral>
  1510. template <class _Tp>
  1511. struct __atomic_base<_Tp, true>
  1512. : public __atomic_base<_Tp, false>
  1513. {
  1514. typedef __atomic_base<_Tp, false> __base;
  1515. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
  1516. __atomic_base() _NOEXCEPT = default;
  1517. _LIBCPP_INLINE_VISIBILITY
  1518. _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
  1519. _LIBCPP_INLINE_VISIBILITY
  1520. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1521. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1522. _LIBCPP_INLINE_VISIBILITY
  1523. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1524. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1525. _LIBCPP_INLINE_VISIBILITY
  1526. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1527. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1528. _LIBCPP_INLINE_VISIBILITY
  1529. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1530. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1531. _LIBCPP_INLINE_VISIBILITY
  1532. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1533. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1534. _LIBCPP_INLINE_VISIBILITY
  1535. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1536. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1537. _LIBCPP_INLINE_VISIBILITY
  1538. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1539. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1540. _LIBCPP_INLINE_VISIBILITY
  1541. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1542. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1543. _LIBCPP_INLINE_VISIBILITY
  1544. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1545. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1546. _LIBCPP_INLINE_VISIBILITY
  1547. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1548. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1549. _LIBCPP_INLINE_VISIBILITY
  1550. _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
  1551. _LIBCPP_INLINE_VISIBILITY
  1552. _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
  1553. _LIBCPP_INLINE_VISIBILITY
  1554. _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
  1555. _LIBCPP_INLINE_VISIBILITY
  1556. _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
  1557. _LIBCPP_INLINE_VISIBILITY
  1558. _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1559. _LIBCPP_INLINE_VISIBILITY
  1560. _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1561. _LIBCPP_INLINE_VISIBILITY
  1562. _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1563. _LIBCPP_INLINE_VISIBILITY
  1564. _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1565. _LIBCPP_INLINE_VISIBILITY
  1566. _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1567. _LIBCPP_INLINE_VISIBILITY
  1568. _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1569. _LIBCPP_INLINE_VISIBILITY
  1570. _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1571. _LIBCPP_INLINE_VISIBILITY
  1572. _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1573. _LIBCPP_INLINE_VISIBILITY
  1574. _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
  1575. _LIBCPP_INLINE_VISIBILITY
  1576. _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
  1577. _LIBCPP_INLINE_VISIBILITY
  1578. _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
  1579. _LIBCPP_INLINE_VISIBILITY
  1580. _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
  1581. _LIBCPP_INLINE_VISIBILITY
  1582. _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1583. _LIBCPP_INLINE_VISIBILITY
  1584. _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1585. };
  1586. // atomic<T>
  1587. template <class _Tp>
  1588. struct atomic
  1589. : public __atomic_base<_Tp>
  1590. {
  1591. typedef __atomic_base<_Tp> __base;
  1592. typedef _Tp value_type;
  1593. typedef value_type difference_type;
  1594. #if _LIBCPP_STD_VER > 17
  1595. _LIBCPP_INLINE_VISIBILITY
  1596. atomic() = default;
  1597. #else
  1598. _LIBCPP_INLINE_VISIBILITY
  1599. atomic() _NOEXCEPT = default;
  1600. #endif
  1601. _LIBCPP_INLINE_VISIBILITY
  1602. _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
  1603. _LIBCPP_INLINE_VISIBILITY
  1604. _Tp operator=(_Tp __d) volatile _NOEXCEPT
  1605. {__base::store(__d); return __d;}
  1606. _LIBCPP_INLINE_VISIBILITY
  1607. _Tp operator=(_Tp __d) _NOEXCEPT
  1608. {__base::store(__d); return __d;}
  1609. atomic& operator=(const atomic&) = delete;
  1610. atomic& operator=(const atomic&) volatile = delete;
  1611. };
  1612. // atomic<T*>
  1613. template <class _Tp>
  1614. struct atomic<_Tp*>
  1615. : public __atomic_base<_Tp*>
  1616. {
  1617. typedef __atomic_base<_Tp*> __base;
  1618. typedef _Tp* value_type;
  1619. typedef ptrdiff_t difference_type;
  1620. _LIBCPP_INLINE_VISIBILITY
  1621. atomic() _NOEXCEPT = default;
  1622. _LIBCPP_INLINE_VISIBILITY
  1623. _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
  1624. _LIBCPP_INLINE_VISIBILITY
  1625. _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
  1626. {__base::store(__d); return __d;}
  1627. _LIBCPP_INLINE_VISIBILITY
  1628. _Tp* operator=(_Tp* __d) _NOEXCEPT
  1629. {__base::store(__d); return __d;}
  1630. _LIBCPP_INLINE_VISIBILITY
  1631. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
  1632. // __atomic_fetch_add accepts function pointers, guard against them.
  1633. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1634. return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
  1635. }
  1636. _LIBCPP_INLINE_VISIBILITY
  1637. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
  1638. // __atomic_fetch_add accepts function pointers, guard against them.
  1639. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1640. return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
  1641. }
  1642. _LIBCPP_INLINE_VISIBILITY
  1643. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
  1644. // __atomic_fetch_add accepts function pointers, guard against them.
  1645. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1646. return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
  1647. }
  1648. _LIBCPP_INLINE_VISIBILITY
  1649. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
  1650. // __atomic_fetch_add accepts function pointers, guard against them.
  1651. static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
  1652. return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
  1653. }
  1654. _LIBCPP_INLINE_VISIBILITY
  1655. _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
  1656. _LIBCPP_INLINE_VISIBILITY
  1657. _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
  1658. _LIBCPP_INLINE_VISIBILITY
  1659. _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
  1660. _LIBCPP_INLINE_VISIBILITY
  1661. _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
  1662. _LIBCPP_INLINE_VISIBILITY
  1663. _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
  1664. _LIBCPP_INLINE_VISIBILITY
  1665. _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
  1666. _LIBCPP_INLINE_VISIBILITY
  1667. _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
  1668. _LIBCPP_INLINE_VISIBILITY
  1669. _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
  1670. _LIBCPP_INLINE_VISIBILITY
  1671. _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1672. _LIBCPP_INLINE_VISIBILITY
  1673. _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1674. _LIBCPP_INLINE_VISIBILITY
  1675. _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1676. _LIBCPP_INLINE_VISIBILITY
  1677. _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1678. atomic& operator=(const atomic&) = delete;
  1679. atomic& operator=(const atomic&) volatile = delete;
  1680. };
  1681. // atomic_is_lock_free
  1682. template <class _Tp>
  1683. _LIBCPP_INLINE_VISIBILITY
  1684. bool
  1685. atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1686. {
  1687. return __o->is_lock_free();
  1688. }
  1689. template <class _Tp>
  1690. _LIBCPP_INLINE_VISIBILITY
  1691. bool
  1692. atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
  1693. {
  1694. return __o->is_lock_free();
  1695. }
  1696. // atomic_init
  1697. template <class _Tp>
  1698. _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
  1699. void
  1700. atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1701. {
  1702. __cxx_atomic_init(&__o->__a_, __d);
  1703. }
  1704. template <class _Tp>
  1705. _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
  1706. void
  1707. atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1708. {
  1709. __cxx_atomic_init(&__o->__a_, __d);
  1710. }
  1711. // atomic_store
  1712. template <class _Tp>
  1713. _LIBCPP_INLINE_VISIBILITY
  1714. void
  1715. atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1716. {
  1717. __o->store(__d);
  1718. }
  1719. template <class _Tp>
  1720. _LIBCPP_INLINE_VISIBILITY
  1721. void
  1722. atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1723. {
  1724. __o->store(__d);
  1725. }
  1726. // atomic_store_explicit
  1727. template <class _Tp>
  1728. _LIBCPP_INLINE_VISIBILITY
  1729. void
  1730. atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1731. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1732. {
  1733. __o->store(__d, __m);
  1734. }
  1735. template <class _Tp>
  1736. _LIBCPP_INLINE_VISIBILITY
  1737. void
  1738. atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1739. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1740. {
  1741. __o->store(__d, __m);
  1742. }
  1743. // atomic_load
  1744. template <class _Tp>
  1745. _LIBCPP_INLINE_VISIBILITY
  1746. _Tp
  1747. atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1748. {
  1749. return __o->load();
  1750. }
  1751. template <class _Tp>
  1752. _LIBCPP_INLINE_VISIBILITY
  1753. _Tp
  1754. atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
  1755. {
  1756. return __o->load();
  1757. }
  1758. // atomic_load_explicit
  1759. template <class _Tp>
  1760. _LIBCPP_INLINE_VISIBILITY
  1761. _Tp
  1762. atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1763. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1764. {
  1765. return __o->load(__m);
  1766. }
  1767. template <class _Tp>
  1768. _LIBCPP_INLINE_VISIBILITY
  1769. _Tp
  1770. atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1771. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1772. {
  1773. return __o->load(__m);
  1774. }
  1775. // atomic_exchange
  1776. template <class _Tp>
  1777. _LIBCPP_INLINE_VISIBILITY
  1778. _Tp
  1779. atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1780. {
  1781. return __o->exchange(__d);
  1782. }
  1783. template <class _Tp>
  1784. _LIBCPP_INLINE_VISIBILITY
  1785. _Tp
  1786. atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1787. {
  1788. return __o->exchange(__d);
  1789. }
  1790. // atomic_exchange_explicit
  1791. template <class _Tp>
  1792. _LIBCPP_INLINE_VISIBILITY
  1793. _Tp
  1794. atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1795. {
  1796. return __o->exchange(__d, __m);
  1797. }
  1798. template <class _Tp>
  1799. _LIBCPP_INLINE_VISIBILITY
  1800. _Tp
  1801. atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
  1802. {
  1803. return __o->exchange(__d, __m);
  1804. }
  1805. // atomic_compare_exchange_weak
  1806. template <class _Tp>
  1807. _LIBCPP_INLINE_VISIBILITY
  1808. bool
  1809. atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1810. {
  1811. return __o->compare_exchange_weak(*__e, __d);
  1812. }
  1813. template <class _Tp>
  1814. _LIBCPP_INLINE_VISIBILITY
  1815. bool
  1816. atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1817. {
  1818. return __o->compare_exchange_weak(*__e, __d);
  1819. }
  1820. // atomic_compare_exchange_strong
  1821. template <class _Tp>
  1822. _LIBCPP_INLINE_VISIBILITY
  1823. bool
  1824. atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1825. {
  1826. return __o->compare_exchange_strong(*__e, __d);
  1827. }
  1828. template <class _Tp>
  1829. _LIBCPP_INLINE_VISIBILITY
  1830. bool
  1831. atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
  1832. {
  1833. return __o->compare_exchange_strong(*__e, __d);
  1834. }
  1835. // atomic_compare_exchange_weak_explicit
  1836. template <class _Tp>
  1837. _LIBCPP_INLINE_VISIBILITY
  1838. bool
  1839. atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
  1840. typename atomic<_Tp>::value_type __d,
  1841. memory_order __s, memory_order __f) _NOEXCEPT
  1842. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1843. {
  1844. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1845. }
  1846. template <class _Tp>
  1847. _LIBCPP_INLINE_VISIBILITY
  1848. bool
  1849. atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
  1850. memory_order __s, memory_order __f) _NOEXCEPT
  1851. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1852. {
  1853. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1854. }
  1855. // atomic_compare_exchange_strong_explicit
  1856. template <class _Tp>
  1857. _LIBCPP_INLINE_VISIBILITY
  1858. bool
  1859. atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
  1860. typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
  1861. memory_order __s, memory_order __f) _NOEXCEPT
  1862. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1863. {
  1864. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1865. }
  1866. template <class _Tp>
  1867. _LIBCPP_INLINE_VISIBILITY
  1868. bool
  1869. atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
  1870. typename atomic<_Tp>::value_type __d,
  1871. memory_order __s, memory_order __f) _NOEXCEPT
  1872. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1873. {
  1874. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1875. }
  1876. // atomic_wait
  1877. template <class _Tp>
  1878. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1879. void atomic_wait(const volatile atomic<_Tp>* __o,
  1880. typename atomic<_Tp>::value_type __v) _NOEXCEPT
  1881. {
  1882. return __o->wait(__v);
  1883. }
  1884. template <class _Tp>
  1885. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1886. void atomic_wait(const atomic<_Tp>* __o,
  1887. typename atomic<_Tp>::value_type __v) _NOEXCEPT
  1888. {
  1889. return __o->wait(__v);
  1890. }
  1891. // atomic_wait_explicit
  1892. template <class _Tp>
  1893. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1894. void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
  1895. typename atomic<_Tp>::value_type __v,
  1896. memory_order __m) _NOEXCEPT
  1897. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1898. {
  1899. return __o->wait(__v, __m);
  1900. }
  1901. template <class _Tp>
  1902. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1903. void atomic_wait_explicit(const atomic<_Tp>* __o,
  1904. typename atomic<_Tp>::value_type __v,
  1905. memory_order __m) _NOEXCEPT
  1906. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1907. {
  1908. return __o->wait(__v, __m);
  1909. }
  1910. // atomic_notify_one
  1911. template <class _Tp>
  1912. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1913. void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
  1914. {
  1915. __o->notify_one();
  1916. }
  1917. template <class _Tp>
  1918. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1919. void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
  1920. {
  1921. __o->notify_one();
  1922. }
  1923. // atomic_notify_one
  1924. template <class _Tp>
  1925. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1926. void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
  1927. {
  1928. __o->notify_all();
  1929. }
  1930. template <class _Tp>
  1931. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  1932. void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
  1933. {
  1934. __o->notify_all();
  1935. }
  1936. // atomic_fetch_add
  1937. template <class _Tp>
  1938. _LIBCPP_INLINE_VISIBILITY
  1939. _Tp
  1940. atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1941. {
  1942. return __o->fetch_add(__op);
  1943. }
  1944. template <class _Tp>
  1945. _LIBCPP_INLINE_VISIBILITY
  1946. _Tp
  1947. atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1948. {
  1949. return __o->fetch_add(__op);
  1950. }
  1951. // atomic_fetch_add_explicit
  1952. template <class _Tp>
  1953. _LIBCPP_INLINE_VISIBILITY
  1954. _Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1955. {
  1956. return __o->fetch_add(__op, __m);
  1957. }
  1958. template <class _Tp>
  1959. _LIBCPP_INLINE_VISIBILITY
  1960. _Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1961. {
  1962. return __o->fetch_add(__op, __m);
  1963. }
  1964. // atomic_fetch_sub
  1965. template <class _Tp>
  1966. _LIBCPP_INLINE_VISIBILITY
  1967. _Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1968. {
  1969. return __o->fetch_sub(__op);
  1970. }
  1971. template <class _Tp>
  1972. _LIBCPP_INLINE_VISIBILITY
  1973. _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
  1974. {
  1975. return __o->fetch_sub(__op);
  1976. }
  1977. // atomic_fetch_sub_explicit
  1978. template <class _Tp>
  1979. _LIBCPP_INLINE_VISIBILITY
  1980. _Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1981. {
  1982. return __o->fetch_sub(__op, __m);
  1983. }
  1984. template <class _Tp>
  1985. _LIBCPP_INLINE_VISIBILITY
  1986. _Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
  1987. {
  1988. return __o->fetch_sub(__op, __m);
  1989. }
  1990. // atomic_fetch_and
  1991. template <class _Tp>
  1992. _LIBCPP_INLINE_VISIBILITY
  1993. typename enable_if
  1994. <
  1995. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1996. _Tp
  1997. >::type
  1998. atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  1999. {
  2000. return __o->fetch_and(__op);
  2001. }
  2002. template <class _Tp>
  2003. _LIBCPP_INLINE_VISIBILITY
  2004. typename enable_if
  2005. <
  2006. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2007. _Tp
  2008. >::type
  2009. atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2010. {
  2011. return __o->fetch_and(__op);
  2012. }
  2013. // atomic_fetch_and_explicit
  2014. template <class _Tp>
  2015. _LIBCPP_INLINE_VISIBILITY
  2016. typename enable_if
  2017. <
  2018. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2019. _Tp
  2020. >::type
  2021. atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2022. {
  2023. return __o->fetch_and(__op, __m);
  2024. }
  2025. template <class _Tp>
  2026. _LIBCPP_INLINE_VISIBILITY
  2027. typename enable_if
  2028. <
  2029. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2030. _Tp
  2031. >::type
  2032. atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2033. {
  2034. return __o->fetch_and(__op, __m);
  2035. }
  2036. // atomic_fetch_or
  2037. template <class _Tp>
  2038. _LIBCPP_INLINE_VISIBILITY
  2039. typename enable_if
  2040. <
  2041. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2042. _Tp
  2043. >::type
  2044. atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2045. {
  2046. return __o->fetch_or(__op);
  2047. }
  2048. template <class _Tp>
  2049. _LIBCPP_INLINE_VISIBILITY
  2050. typename enable_if
  2051. <
  2052. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2053. _Tp
  2054. >::type
  2055. atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2056. {
  2057. return __o->fetch_or(__op);
  2058. }
  2059. // atomic_fetch_or_explicit
  2060. template <class _Tp>
  2061. _LIBCPP_INLINE_VISIBILITY
  2062. typename enable_if
  2063. <
  2064. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2065. _Tp
  2066. >::type
  2067. atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2068. {
  2069. return __o->fetch_or(__op, __m);
  2070. }
  2071. template <class _Tp>
  2072. _LIBCPP_INLINE_VISIBILITY
  2073. typename enable_if
  2074. <
  2075. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2076. _Tp
  2077. >::type
  2078. atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2079. {
  2080. return __o->fetch_or(__op, __m);
  2081. }
  2082. // atomic_fetch_xor
  2083. template <class _Tp>
  2084. _LIBCPP_INLINE_VISIBILITY
  2085. typename enable_if
  2086. <
  2087. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2088. _Tp
  2089. >::type
  2090. atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2091. {
  2092. return __o->fetch_xor(__op);
  2093. }
  2094. template <class _Tp>
  2095. _LIBCPP_INLINE_VISIBILITY
  2096. typename enable_if
  2097. <
  2098. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2099. _Tp
  2100. >::type
  2101. atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
  2102. {
  2103. return __o->fetch_xor(__op);
  2104. }
  2105. // atomic_fetch_xor_explicit
  2106. template <class _Tp>
  2107. _LIBCPP_INLINE_VISIBILITY
  2108. typename enable_if
  2109. <
  2110. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2111. _Tp
  2112. >::type
  2113. atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2114. {
  2115. return __o->fetch_xor(__op, __m);
  2116. }
  2117. template <class _Tp>
  2118. _LIBCPP_INLINE_VISIBILITY
  2119. typename enable_if
  2120. <
  2121. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  2122. _Tp
  2123. >::type
  2124. atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
  2125. {
  2126. return __o->fetch_xor(__op, __m);
  2127. }
  2128. // flag type and operations
  2129. typedef struct atomic_flag
  2130. {
  2131. __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
  2132. _LIBCPP_INLINE_VISIBILITY
  2133. bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  2134. {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
  2135. _LIBCPP_INLINE_VISIBILITY
  2136. bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  2137. {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
  2138. _LIBCPP_INLINE_VISIBILITY
  2139. bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2140. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2141. _LIBCPP_INLINE_VISIBILITY
  2142. bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2143. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2144. _LIBCPP_INLINE_VISIBILITY
  2145. void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2146. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2147. _LIBCPP_INLINE_VISIBILITY
  2148. void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2149. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2150. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2151. void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  2152. {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
  2153. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2154. void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  2155. {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
  2156. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2157. void notify_one() volatile _NOEXCEPT
  2158. {__cxx_atomic_notify_one(&__a_);}
  2159. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2160. void notify_one() _NOEXCEPT
  2161. {__cxx_atomic_notify_one(&__a_);}
  2162. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2163. void notify_all() volatile _NOEXCEPT
  2164. {__cxx_atomic_notify_all(&__a_);}
  2165. _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
  2166. void notify_all() _NOEXCEPT
  2167. {__cxx_atomic_notify_all(&__a_);}
  2168. #if _LIBCPP_STD_VER > 17
  2169. _LIBCPP_INLINE_VISIBILITY constexpr
  2170. atomic_flag() _NOEXCEPT : __a_(false) {}
  2171. #else
  2172. _LIBCPP_INLINE_VISIBILITY
  2173. atomic_flag() _NOEXCEPT = default;
  2174. #endif
  2175. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  2176. atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
  2177. #ifdef _LIBCPP_COMPILER_MSVC
  2178. #pragma warning ( push )
  2179. #pragma warning ( disable : 4522 )
  2180. #endif
  2181. atomic_flag(const atomic_flag&) = delete;
  2182. atomic_flag& operator=(const atomic_flag&) = delete;
  2183. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  2184. #ifdef _LIBCPP_COMPILER_MSVC
  2185. #pragma warning ( pop )
  2186. #endif
  2187. } atomic_flag;
  2188. inline _LIBCPP_INLINE_VISIBILITY
  2189. bool
  2190. atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
  2191. {
  2192. return __o->test();
  2193. }
  2194. inline _LIBCPP_INLINE_VISIBILITY
  2195. bool
  2196. atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
  2197. {
  2198. return __o->test();
  2199. }
  2200. inline _LIBCPP_INLINE_VISIBILITY
  2201. bool
  2202. atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2203. {
  2204. return __o->test(__m);
  2205. }
  2206. inline _LIBCPP_INLINE_VISIBILITY
  2207. bool
  2208. atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
  2209. {
  2210. return __o->test(__m);
  2211. }
  2212. inline _LIBCPP_INLINE_VISIBILITY
  2213. bool
  2214. atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
  2215. {
  2216. return __o->test_and_set();
  2217. }
  2218. inline _LIBCPP_INLINE_VISIBILITY
  2219. bool
  2220. atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
  2221. {
  2222. return __o->test_and_set();
  2223. }
  2224. inline _LIBCPP_INLINE_VISIBILITY
  2225. bool
  2226. atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2227. {
  2228. return __o->test_and_set(__m);
  2229. }
  2230. inline _LIBCPP_INLINE_VISIBILITY
  2231. bool
  2232. atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2233. {
  2234. return __o->test_and_set(__m);
  2235. }
  2236. inline _LIBCPP_INLINE_VISIBILITY
  2237. void
  2238. atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
  2239. {
  2240. __o->clear();
  2241. }
  2242. inline _LIBCPP_INLINE_VISIBILITY
  2243. void
  2244. atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
  2245. {
  2246. __o->clear();
  2247. }
  2248. inline _LIBCPP_INLINE_VISIBILITY
  2249. void
  2250. atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2251. {
  2252. __o->clear(__m);
  2253. }
  2254. inline _LIBCPP_INLINE_VISIBILITY
  2255. void
  2256. atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2257. {
  2258. __o->clear(__m);
  2259. }
  2260. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2261. void
  2262. atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
  2263. {
  2264. __o->wait(__v);
  2265. }
  2266. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2267. void
  2268. atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
  2269. {
  2270. __o->wait(__v);
  2271. }
  2272. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2273. void
  2274. atomic_flag_wait_explicit(const volatile atomic_flag* __o,
  2275. bool __v, memory_order __m) _NOEXCEPT
  2276. {
  2277. __o->wait(__v, __m);
  2278. }
  2279. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2280. void
  2281. atomic_flag_wait_explicit(const atomic_flag* __o,
  2282. bool __v, memory_order __m) _NOEXCEPT
  2283. {
  2284. __o->wait(__v, __m);
  2285. }
  2286. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2287. void
  2288. atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
  2289. {
  2290. __o->notify_one();
  2291. }
  2292. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2293. void
  2294. atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
  2295. {
  2296. __o->notify_one();
  2297. }
  2298. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2299. void
  2300. atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
  2301. {
  2302. __o->notify_all();
  2303. }
  2304. inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
  2305. void
  2306. atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
  2307. {
  2308. __o->notify_all();
  2309. }
  2310. // fences
  2311. inline _LIBCPP_INLINE_VISIBILITY
  2312. void
  2313. atomic_thread_fence(memory_order __m) _NOEXCEPT
  2314. {
  2315. __cxx_atomic_thread_fence(__m);
  2316. }
  2317. inline _LIBCPP_INLINE_VISIBILITY
  2318. void
  2319. atomic_signal_fence(memory_order __m) _NOEXCEPT
  2320. {
  2321. __cxx_atomic_signal_fence(__m);
  2322. }
  2323. // Atomics for standard typedef types
  2324. typedef atomic<bool> atomic_bool;
  2325. typedef atomic<char> atomic_char;
  2326. typedef atomic<signed char> atomic_schar;
  2327. typedef atomic<unsigned char> atomic_uchar;
  2328. typedef atomic<short> atomic_short;
  2329. typedef atomic<unsigned short> atomic_ushort;
  2330. typedef atomic<int> atomic_int;
  2331. typedef atomic<unsigned int> atomic_uint;
  2332. typedef atomic<long> atomic_long;
  2333. typedef atomic<unsigned long> atomic_ulong;
  2334. typedef atomic<long long> atomic_llong;
  2335. typedef atomic<unsigned long long> atomic_ullong;
  2336. #ifndef _LIBCPP_HAS_NO_CHAR8_T
  2337. typedef atomic<char8_t> atomic_char8_t;
  2338. #endif
  2339. typedef atomic<char16_t> atomic_char16_t;
  2340. typedef atomic<char32_t> atomic_char32_t;
  2341. #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
  2342. typedef atomic<wchar_t> atomic_wchar_t;
  2343. #endif
  2344. typedef atomic<int_least8_t> atomic_int_least8_t;
  2345. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  2346. typedef atomic<int_least16_t> atomic_int_least16_t;
  2347. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  2348. typedef atomic<int_least32_t> atomic_int_least32_t;
  2349. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  2350. typedef atomic<int_least64_t> atomic_int_least64_t;
  2351. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  2352. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  2353. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  2354. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  2355. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  2356. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  2357. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  2358. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  2359. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  2360. typedef atomic< int8_t> atomic_int8_t;
  2361. typedef atomic<uint8_t> atomic_uint8_t;
  2362. typedef atomic< int16_t> atomic_int16_t;
  2363. typedef atomic<uint16_t> atomic_uint16_t;
  2364. typedef atomic< int32_t> atomic_int32_t;
  2365. typedef atomic<uint32_t> atomic_uint32_t;
  2366. typedef atomic< int64_t> atomic_int64_t;
  2367. typedef atomic<uint64_t> atomic_uint64_t;
  2368. typedef atomic<intptr_t> atomic_intptr_t;
  2369. typedef atomic<uintptr_t> atomic_uintptr_t;
  2370. typedef atomic<size_t> atomic_size_t;
  2371. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  2372. typedef atomic<intmax_t> atomic_intmax_t;
  2373. typedef atomic<uintmax_t> atomic_uintmax_t;
  2374. // atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
  2375. #ifdef __cpp_lib_atomic_is_always_lock_free
  2376. # define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
  2377. #else
  2378. # define _LIBCPP_CONTENTION_LOCK_FREE false
  2379. #endif
  2380. #if ATOMIC_LLONG_LOCK_FREE == 2
  2381. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free;
  2382. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
  2383. #elif ATOMIC_INT_LOCK_FREE == 2
  2384. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free;
  2385. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free;
  2386. #elif ATOMIC_SHORT_LOCK_FREE == 2
  2387. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free;
  2388. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free;
  2389. #elif ATOMIC_CHAR_LOCK_FREE == 2
  2390. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free;
  2391. typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free;
  2392. #else
  2393. // No signed/unsigned lock-free types
  2394. #define _LIBCPP_NO_LOCK_FREE_TYPES
  2395. #endif
  2396. #if !defined(_LIBCPP_NO_LOCK_FREE_TYPES)
  2397. typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
  2398. typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
  2399. #endif
  2400. #define ATOMIC_FLAG_INIT {false}
  2401. #define ATOMIC_VAR_INIT(__v) {__v}
  2402. #if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
  2403. # if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400
  2404. # pragma clang deprecated(ATOMIC_FLAG_INIT)
  2405. # pragma clang deprecated(ATOMIC_VAR_INIT)
  2406. # endif
  2407. #endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
  2408. _LIBCPP_END_NAMESPACE_STD
  2409. #endif // _LIBCPP_ATOMIC