3 // Copyright (C) 2008-2023 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #define __glibcxx_want_atomic_is_always_lock_free
42 #define __glibcxx_want_atomic_flag_test
43 #define __glibcxx_want_atomic_float
44 #define __glibcxx_want_atomic_ref
45 #define __glibcxx_want_atomic_lock_free_type_aliases
46 #define __glibcxx_want_atomic_value_initialization
47 #define __glibcxx_want_atomic_wait
48 #include <bits/version.h>
50 #include <bits/atomic_base.h>
52 namespace std _GLIBCXX_VISIBILITY(default)
54 _GLIBCXX_BEGIN_NAMESPACE_VERSION
61 template<typename _Tp>
65 // NB: No operators or fetch-operations for this type.
69 using value_type = bool;
72 __atomic_base<bool> _M_base;
75 atomic() noexcept = default;
76 ~atomic() noexcept = default;
77 atomic(const atomic&) = delete;
78 atomic& operator=(const atomic&) = delete;
79 atomic& operator=(const atomic&) volatile = delete;
81 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
84 operator=(bool __i) noexcept
85 { return _M_base.operator=(__i); }
88 operator=(bool __i) volatile noexcept
89 { return _M_base.operator=(__i); }
91 operator bool() const noexcept
92 { return _M_base.load(); }
94 operator bool() const volatile noexcept
95 { return _M_base.load(); }
98 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
101 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
103 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
104 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
108 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
109 { _M_base.store(__i, __m); }
112 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
113 { _M_base.store(__i, __m); }
116 load(memory_order __m = memory_order_seq_cst) const noexcept
117 { return _M_base.load(__m); }
120 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
121 { return _M_base.load(__m); }
124 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
125 { return _M_base.exchange(__i, __m); }
129 memory_order __m = memory_order_seq_cst) volatile noexcept
130 { return _M_base.exchange(__i, __m); }
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
139 memory_order __m2) volatile noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_weak(bool& __i1, bool __i2,
149 memory_order __m = memory_order_seq_cst) volatile noexcept
150 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
159 memory_order __m2) volatile noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
168 compare_exchange_strong(bool& __i1, bool __i2,
169 memory_order __m = memory_order_seq_cst) volatile noexcept
170 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
172 #if __cpp_lib_atomic_wait
174 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
175 { _M_base.wait(__old, __m); }
177 // TODO add const volatile overload
180 notify_one() noexcept
181 { _M_base.notify_one(); }
184 notify_all() noexcept
185 { _M_base.notify_all(); }
186 #endif // __cpp_lib_atomic_wait
189 /// @cond undocumented
190 #if __cpp_lib_atomic_value_initialization
191 # define _GLIBCXX20_INIT(I) = I
193 # define _GLIBCXX20_INIT(I)
198 * @brief Generic atomic type, primary class template.
200 * @tparam _Tp Type to be made atomic, must be trivially copyable.
202 template<typename _Tp>
205 using value_type = _Tp;
208 // Align 1/2/4/8/16-byte types to at least their size.
209 static constexpr int _S_min_alignment
210 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
213 static constexpr int _S_alignment
214 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
216 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
218 static_assert(__is_trivially_copyable(_Tp),
219 "std::atomic requires a trivially copyable type");
221 static_assert(sizeof(_Tp) > 0,
222 "Incomplete or zero-sized types are not supported");
224 #if __cplusplus > 201703L
225 static_assert(is_copy_constructible_v<_Tp>);
226 static_assert(is_move_constructible_v<_Tp>);
227 static_assert(is_copy_assignable_v<_Tp>);
228 static_assert(is_move_assignable_v<_Tp>);
233 ~atomic() noexcept = default;
234 atomic(const atomic&) = delete;
235 atomic& operator=(const atomic&) = delete;
236 atomic& operator=(const atomic&) volatile = delete;
238 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
240 #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
241 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
242 __builtin_clear_padding(std::__addressof(_M_i));
246 operator _Tp() const noexcept
249 operator _Tp() const volatile noexcept
253 operator=(_Tp __i) noexcept
254 { store(__i); return __i; }
257 operator=(_Tp __i) volatile noexcept
258 { store(__i); return __i; }
261 is_lock_free() const noexcept
263 // Produce a fake, minimally aligned pointer.
264 return __atomic_is_lock_free(sizeof(_M_i),
265 reinterpret_cast<void *>(-_S_alignment));
269 is_lock_free() const volatile noexcept
271 // Produce a fake, minimally aligned pointer.
272 return __atomic_is_lock_free(sizeof(_M_i),
273 reinterpret_cast<void *>(-_S_alignment));
276 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
277 static constexpr bool is_always_lock_free
278 = __atomic_always_lock_free(sizeof(_M_i), 0);
282 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
284 __atomic_store(std::__addressof(_M_i),
285 __atomic_impl::__clear_padding(__i),
290 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
292 __atomic_store(std::__addressof(_M_i),
293 __atomic_impl::__clear_padding(__i),
298 load(memory_order __m = memory_order_seq_cst) const noexcept
300 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
301 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
302 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
307 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
309 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
310 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
311 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
316 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
318 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
319 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
320 __atomic_exchange(std::__addressof(_M_i),
321 __atomic_impl::__clear_padding(__i),
328 memory_order __m = memory_order_seq_cst) volatile noexcept
330 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
331 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
332 __atomic_exchange(std::__addressof(_M_i),
333 __atomic_impl::__clear_padding(__i),
339 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
340 memory_order __f) noexcept
342 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
347 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
348 memory_order __f) volatile noexcept
350 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
355 compare_exchange_weak(_Tp& __e, _Tp __i,
356 memory_order __m = memory_order_seq_cst) noexcept
357 { return compare_exchange_weak(__e, __i, __m,
358 __cmpexch_failure_order(__m)); }
361 compare_exchange_weak(_Tp& __e, _Tp __i,
362 memory_order __m = memory_order_seq_cst) volatile noexcept
363 { return compare_exchange_weak(__e, __i, __m,
364 __cmpexch_failure_order(__m)); }
367 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
368 memory_order __f) noexcept
370 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
375 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
376 memory_order __f) volatile noexcept
378 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
383 compare_exchange_strong(_Tp& __e, _Tp __i,
384 memory_order __m = memory_order_seq_cst) noexcept
385 { return compare_exchange_strong(__e, __i, __m,
386 __cmpexch_failure_order(__m)); }
389 compare_exchange_strong(_Tp& __e, _Tp __i,
390 memory_order __m = memory_order_seq_cst) volatile noexcept
391 { return compare_exchange_strong(__e, __i, __m,
392 __cmpexch_failure_order(__m)); }
394 #if __cpp_lib_atomic_wait // C++ >= 20
396 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
398 std::__atomic_wait_address_v(&_M_i, __old,
399 [__m, this] { return this->load(__m); });
402 // TODO add const volatile overload
405 notify_one() noexcept
406 { std::__atomic_notify_address(&_M_i, false); }
409 notify_all() noexcept
410 { std::__atomic_notify_address(&_M_i, true); }
411 #endif // __cpp_lib_atomic_wait
414 #undef _GLIBCXX20_INIT
416 /// Partial specialization for pointer types.
417 template<typename _Tp>
420 using value_type = _Tp*;
421 using difference_type = ptrdiff_t;
423 typedef _Tp* __pointer_type;
424 typedef __atomic_base<_Tp*> __base_type;
427 atomic() noexcept = default;
428 ~atomic() noexcept = default;
429 atomic(const atomic&) = delete;
430 atomic& operator=(const atomic&) = delete;
431 atomic& operator=(const atomic&) volatile = delete;
433 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
435 operator __pointer_type() const noexcept
436 { return __pointer_type(_M_b); }
438 operator __pointer_type() const volatile noexcept
439 { return __pointer_type(_M_b); }
442 operator=(__pointer_type __p) noexcept
443 { return _M_b.operator=(__p); }
446 operator=(__pointer_type __p) volatile noexcept
447 { return _M_b.operator=(__p); }
450 operator++(int) noexcept
452 #if __cplusplus >= 201703L
453 static_assert( is_object<_Tp>::value, "pointer to object type" );
459 operator++(int) volatile noexcept
461 #if __cplusplus >= 201703L
462 static_assert( is_object<_Tp>::value, "pointer to object type" );
468 operator--(int) noexcept
470 #if __cplusplus >= 201703L
471 static_assert( is_object<_Tp>::value, "pointer to object type" );
477 operator--(int) volatile noexcept
479 #if __cplusplus >= 201703L
480 static_assert( is_object<_Tp>::value, "pointer to object type" );
486 operator++() noexcept
488 #if __cplusplus >= 201703L
489 static_assert( is_object<_Tp>::value, "pointer to object type" );
495 operator++() volatile noexcept
497 #if __cplusplus >= 201703L
498 static_assert( is_object<_Tp>::value, "pointer to object type" );
504 operator--() noexcept
506 #if __cplusplus >= 201703L
507 static_assert( is_object<_Tp>::value, "pointer to object type" );
513 operator--() volatile noexcept
515 #if __cplusplus >= 201703L
516 static_assert( is_object<_Tp>::value, "pointer to object type" );
522 operator+=(ptrdiff_t __d) noexcept
524 #if __cplusplus >= 201703L
525 static_assert( is_object<_Tp>::value, "pointer to object type" );
527 return _M_b.operator+=(__d);
531 operator+=(ptrdiff_t __d) volatile noexcept
533 #if __cplusplus >= 201703L
534 static_assert( is_object<_Tp>::value, "pointer to object type" );
536 return _M_b.operator+=(__d);
540 operator-=(ptrdiff_t __d) noexcept
542 #if __cplusplus >= 201703L
543 static_assert( is_object<_Tp>::value, "pointer to object type" );
545 return _M_b.operator-=(__d);
549 operator-=(ptrdiff_t __d) volatile noexcept
551 #if __cplusplus >= 201703L
552 static_assert( is_object<_Tp>::value, "pointer to object type" );
554 return _M_b.operator-=(__d);
558 is_lock_free() const noexcept
559 { return _M_b.is_lock_free(); }
562 is_lock_free() const volatile noexcept
563 { return _M_b.is_lock_free(); }
565 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
566 static constexpr bool is_always_lock_free
567 = ATOMIC_POINTER_LOCK_FREE == 2;
571 store(__pointer_type __p,
572 memory_order __m = memory_order_seq_cst) noexcept
573 { return _M_b.store(__p, __m); }
576 store(__pointer_type __p,
577 memory_order __m = memory_order_seq_cst) volatile noexcept
578 { return _M_b.store(__p, __m); }
581 load(memory_order __m = memory_order_seq_cst) const noexcept
582 { return _M_b.load(__m); }
585 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
586 { return _M_b.load(__m); }
589 exchange(__pointer_type __p,
590 memory_order __m = memory_order_seq_cst) noexcept
591 { return _M_b.exchange(__p, __m); }
594 exchange(__pointer_type __p,
595 memory_order __m = memory_order_seq_cst) volatile noexcept
596 { return _M_b.exchange(__p, __m); }
599 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
600 memory_order __m1, memory_order __m2) noexcept
601 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
604 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
606 memory_order __m2) volatile noexcept
607 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
610 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
611 memory_order __m = memory_order_seq_cst) noexcept
613 return compare_exchange_weak(__p1, __p2, __m,
614 __cmpexch_failure_order(__m));
618 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
619 memory_order __m = memory_order_seq_cst) volatile noexcept
621 return compare_exchange_weak(__p1, __p2, __m,
622 __cmpexch_failure_order(__m));
626 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
627 memory_order __m1, memory_order __m2) noexcept
628 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
631 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
633 memory_order __m2) volatile noexcept
634 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
637 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
638 memory_order __m = memory_order_seq_cst) noexcept
640 return _M_b.compare_exchange_strong(__p1, __p2, __m,
641 __cmpexch_failure_order(__m));
645 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
646 memory_order __m = memory_order_seq_cst) volatile noexcept
648 return _M_b.compare_exchange_strong(__p1, __p2, __m,
649 __cmpexch_failure_order(__m));
652 #if __cpp_lib_atomic_wait
654 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
655 { _M_b.wait(__old, __m); }
657 // TODO add const volatile overload
660 notify_one() noexcept
661 { _M_b.notify_one(); }
664 notify_all() noexcept
665 { _M_b.notify_all(); }
666 #endif // __cpp_lib_atomic_wait
669 fetch_add(ptrdiff_t __d,
670 memory_order __m = memory_order_seq_cst) noexcept
672 #if __cplusplus >= 201703L
673 static_assert( is_object<_Tp>::value, "pointer to object type" );
675 return _M_b.fetch_add(__d, __m);
679 fetch_add(ptrdiff_t __d,
680 memory_order __m = memory_order_seq_cst) volatile noexcept
682 #if __cplusplus >= 201703L
683 static_assert( is_object<_Tp>::value, "pointer to object type" );
685 return _M_b.fetch_add(__d, __m);
689 fetch_sub(ptrdiff_t __d,
690 memory_order __m = memory_order_seq_cst) noexcept
692 #if __cplusplus >= 201703L
693 static_assert( is_object<_Tp>::value, "pointer to object type" );
695 return _M_b.fetch_sub(__d, __m);
699 fetch_sub(ptrdiff_t __d,
700 memory_order __m = memory_order_seq_cst) volatile noexcept
702 #if __cplusplus >= 201703L
703 static_assert( is_object<_Tp>::value, "pointer to object type" );
705 return _M_b.fetch_sub(__d, __m);
710 /// Explicit specialization for char.
712 struct atomic<char> : __atomic_base<char>
714 typedef char __integral_type;
715 typedef __atomic_base<char> __base_type;
717 atomic() noexcept = default;
718 ~atomic() noexcept = default;
719 atomic(const atomic&) = delete;
720 atomic& operator=(const atomic&) = delete;
721 atomic& operator=(const atomic&) volatile = delete;
723 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
725 using __base_type::operator __integral_type;
726 using __base_type::operator=;
728 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
729 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
733 /// Explicit specialization for signed char.
735 struct atomic<signed char> : __atomic_base<signed char>
737 typedef signed char __integral_type;
738 typedef __atomic_base<signed char> __base_type;
740 atomic() noexcept= default;
741 ~atomic() noexcept = default;
742 atomic(const atomic&) = delete;
743 atomic& operator=(const atomic&) = delete;
744 atomic& operator=(const atomic&) volatile = delete;
746 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
748 using __base_type::operator __integral_type;
749 using __base_type::operator=;
751 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
752 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
756 /// Explicit specialization for unsigned char.
758 struct atomic<unsigned char> : __atomic_base<unsigned char>
760 typedef unsigned char __integral_type;
761 typedef __atomic_base<unsigned char> __base_type;
763 atomic() noexcept= default;
764 ~atomic() noexcept = default;
765 atomic(const atomic&) = delete;
766 atomic& operator=(const atomic&) = delete;
767 atomic& operator=(const atomic&) volatile = delete;
769 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
771 using __base_type::operator __integral_type;
772 using __base_type::operator=;
774 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
775 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
779 /// Explicit specialization for short.
781 struct atomic<short> : __atomic_base<short>
783 typedef short __integral_type;
784 typedef __atomic_base<short> __base_type;
786 atomic() noexcept = default;
787 ~atomic() noexcept = default;
788 atomic(const atomic&) = delete;
789 atomic& operator=(const atomic&) = delete;
790 atomic& operator=(const atomic&) volatile = delete;
792 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
794 using __base_type::operator __integral_type;
795 using __base_type::operator=;
797 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
798 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
802 /// Explicit specialization for unsigned short.
804 struct atomic<unsigned short> : __atomic_base<unsigned short>
806 typedef unsigned short __integral_type;
807 typedef __atomic_base<unsigned short> __base_type;
809 atomic() noexcept = default;
810 ~atomic() noexcept = default;
811 atomic(const atomic&) = delete;
812 atomic& operator=(const atomic&) = delete;
813 atomic& operator=(const atomic&) volatile = delete;
815 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
817 using __base_type::operator __integral_type;
818 using __base_type::operator=;
820 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
821 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
825 /// Explicit specialization for int.
827 struct atomic<int> : __atomic_base<int>
829 typedef int __integral_type;
830 typedef __atomic_base<int> __base_type;
832 atomic() noexcept = default;
833 ~atomic() noexcept = default;
834 atomic(const atomic&) = delete;
835 atomic& operator=(const atomic&) = delete;
836 atomic& operator=(const atomic&) volatile = delete;
838 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
840 using __base_type::operator __integral_type;
841 using __base_type::operator=;
843 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
844 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
848 /// Explicit specialization for unsigned int.
850 struct atomic<unsigned int> : __atomic_base<unsigned int>
852 typedef unsigned int __integral_type;
853 typedef __atomic_base<unsigned int> __base_type;
855 atomic() noexcept = default;
856 ~atomic() noexcept = default;
857 atomic(const atomic&) = delete;
858 atomic& operator=(const atomic&) = delete;
859 atomic& operator=(const atomic&) volatile = delete;
861 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
863 using __base_type::operator __integral_type;
864 using __base_type::operator=;
866 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
867 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
871 /// Explicit specialization for long.
873 struct atomic<long> : __atomic_base<long>
875 typedef long __integral_type;
876 typedef __atomic_base<long> __base_type;
878 atomic() noexcept = default;
879 ~atomic() noexcept = default;
880 atomic(const atomic&) = delete;
881 atomic& operator=(const atomic&) = delete;
882 atomic& operator=(const atomic&) volatile = delete;
884 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
886 using __base_type::operator __integral_type;
887 using __base_type::operator=;
889 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
890 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
894 /// Explicit specialization for unsigned long.
896 struct atomic<unsigned long> : __atomic_base<unsigned long>
898 typedef unsigned long __integral_type;
899 typedef __atomic_base<unsigned long> __base_type;
901 atomic() noexcept = default;
902 ~atomic() noexcept = default;
903 atomic(const atomic&) = delete;
904 atomic& operator=(const atomic&) = delete;
905 atomic& operator=(const atomic&) volatile = delete;
907 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
909 using __base_type::operator __integral_type;
910 using __base_type::operator=;
912 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
913 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
917 /// Explicit specialization for long long.
919 struct atomic<long long> : __atomic_base<long long>
921 typedef long long __integral_type;
922 typedef __atomic_base<long long> __base_type;
924 atomic() noexcept = default;
925 ~atomic() noexcept = default;
926 atomic(const atomic&) = delete;
927 atomic& operator=(const atomic&) = delete;
928 atomic& operator=(const atomic&) volatile = delete;
930 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
932 using __base_type::operator __integral_type;
933 using __base_type::operator=;
935 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
936 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
940 /// Explicit specialization for unsigned long long.
942 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
944 typedef unsigned long long __integral_type;
945 typedef __atomic_base<unsigned long long> __base_type;
947 atomic() noexcept = default;
948 ~atomic() noexcept = default;
949 atomic(const atomic&) = delete;
950 atomic& operator=(const atomic&) = delete;
951 atomic& operator=(const atomic&) volatile = delete;
953 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
955 using __base_type::operator __integral_type;
956 using __base_type::operator=;
958 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
959 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
963 /// Explicit specialization for wchar_t.
965 struct atomic<wchar_t> : __atomic_base<wchar_t>
967 typedef wchar_t __integral_type;
968 typedef __atomic_base<wchar_t> __base_type;
970 atomic() noexcept = default;
971 ~atomic() noexcept = default;
972 atomic(const atomic&) = delete;
973 atomic& operator=(const atomic&) = delete;
974 atomic& operator=(const atomic&) volatile = delete;
976 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
978 using __base_type::operator __integral_type;
979 using __base_type::operator=;
981 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
982 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
986 #ifdef _GLIBCXX_USE_CHAR8_T
987 /// Explicit specialization for char8_t.
989 struct atomic<char8_t> : __atomic_base<char8_t>
991 typedef char8_t __integral_type;
992 typedef __atomic_base<char8_t> __base_type;
994 atomic() noexcept = default;
995 ~atomic() noexcept = default;
996 atomic(const atomic&) = delete;
997 atomic& operator=(const atomic&) = delete;
998 atomic& operator=(const atomic&) volatile = delete;
1000 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1002 using __base_type::operator __integral_type;
1003 using __base_type::operator=;
1005 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1006 static constexpr bool is_always_lock_free
1007 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1012 /// Explicit specialization for char16_t.
1014 struct atomic<char16_t> : __atomic_base<char16_t>
1016 typedef char16_t __integral_type;
1017 typedef __atomic_base<char16_t> __base_type;
1019 atomic() noexcept = default;
1020 ~atomic() noexcept = default;
1021 atomic(const atomic&) = delete;
1022 atomic& operator=(const atomic&) = delete;
1023 atomic& operator=(const atomic&) volatile = delete;
1025 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1027 using __base_type::operator __integral_type;
1028 using __base_type::operator=;
1030 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1031 static constexpr bool is_always_lock_free
1032 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1036 /// Explicit specialization for char32_t.
1038 struct atomic<char32_t> : __atomic_base<char32_t>
1040 typedef char32_t __integral_type;
1041 typedef __atomic_base<char32_t> __base_type;
1043 atomic() noexcept = default;
1044 ~atomic() noexcept = default;
1045 atomic(const atomic&) = delete;
1046 atomic& operator=(const atomic&) = delete;
1047 atomic& operator=(const atomic&) volatile = delete;
1049 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1051 using __base_type::operator __integral_type;
1052 using __base_type::operator=;
1054 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1055 static constexpr bool is_always_lock_free
1056 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1062 typedef atomic<bool> atomic_bool;
1065 typedef atomic<char> atomic_char;
1068 typedef atomic<signed char> atomic_schar;
1071 typedef atomic<unsigned char> atomic_uchar;
1074 typedef atomic<short> atomic_short;
1077 typedef atomic<unsigned short> atomic_ushort;
1080 typedef atomic<int> atomic_int;
1083 typedef atomic<unsigned int> atomic_uint;
1086 typedef atomic<long> atomic_long;
1089 typedef atomic<unsigned long> atomic_ulong;
1092 typedef atomic<long long> atomic_llong;
1095 typedef atomic<unsigned long long> atomic_ullong;
1098 typedef atomic<wchar_t> atomic_wchar_t;
1100 #ifdef _GLIBCXX_USE_CHAR8_T
1102 typedef atomic<char8_t> atomic_char8_t;
1106 typedef atomic<char16_t> atomic_char16_t;
1109 typedef atomic<char32_t> atomic_char32_t;
1111 #ifdef _GLIBCXX_USE_C99_STDINT
1112 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1113 // 2441. Exact-width atomic typedefs should be provided
1116 typedef atomic<int8_t> atomic_int8_t;
1119 typedef atomic<uint8_t> atomic_uint8_t;
1122 typedef atomic<int16_t> atomic_int16_t;
1125 typedef atomic<uint16_t> atomic_uint16_t;
1128 typedef atomic<int32_t> atomic_int32_t;
1131 typedef atomic<uint32_t> atomic_uint32_t;
1134 typedef atomic<int64_t> atomic_int64_t;
1137 typedef atomic<uint64_t> atomic_uint64_t;
1140 /// atomic_int_least8_t
1141 typedef atomic<int_least8_t> atomic_int_least8_t;
1143 /// atomic_uint_least8_t
1144 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1146 /// atomic_int_least16_t
1147 typedef atomic<int_least16_t> atomic_int_least16_t;
1149 /// atomic_uint_least16_t
1150 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1152 /// atomic_int_least32_t
1153 typedef atomic<int_least32_t> atomic_int_least32_t;
1155 /// atomic_uint_least32_t
1156 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1158 /// atomic_int_least64_t
1159 typedef atomic<int_least64_t> atomic_int_least64_t;
1161 /// atomic_uint_least64_t
1162 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1165 /// atomic_int_fast8_t
1166 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1168 /// atomic_uint_fast8_t
1169 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1171 /// atomic_int_fast16_t
1172 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1174 /// atomic_uint_fast16_t
1175 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1177 /// atomic_int_fast32_t
1178 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1180 /// atomic_uint_fast32_t
1181 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1183 /// atomic_int_fast64_t
1184 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1186 /// atomic_uint_fast64_t
1187 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1191 typedef atomic<intptr_t> atomic_intptr_t;
1193 /// atomic_uintptr_t
1194 typedef atomic<uintptr_t> atomic_uintptr_t;
1197 typedef atomic<size_t> atomic_size_t;
1199 /// atomic_ptrdiff_t
1200 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1203 typedef atomic<intmax_t> atomic_intmax_t;
1205 /// atomic_uintmax_t
1206 typedef atomic<uintmax_t> atomic_uintmax_t;
1208 // Function definitions, atomic_flag operations.
1210 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1211 memory_order __m) noexcept
1212 { return __a->test_and_set(__m); }
1215 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1216 memory_order __m) noexcept
1217 { return __a->test_and_set(__m); }
1219 #if __cpp_lib_atomic_flag_test
1221 atomic_flag_test(const atomic_flag* __a) noexcept
1222 { return __a->test(); }
1225 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1226 { return __a->test(); }
1229 atomic_flag_test_explicit(const atomic_flag* __a,
1230 memory_order __m) noexcept
1231 { return __a->test(__m); }
1234 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1235 memory_order __m) noexcept
1236 { return __a->test(__m); }
1240 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1241 { __a->clear(__m); }
1244 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1245 memory_order __m) noexcept
1246 { __a->clear(__m); }
1249 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1250 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1253 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1254 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1257 atomic_flag_clear(atomic_flag* __a) noexcept
1258 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1261 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1262 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1264 #if __cpp_lib_atomic_wait
1266 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1267 { __a->wait(__old); }
1270 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1271 memory_order __m) noexcept
1272 { __a->wait(__old, __m); }
1275 atomic_flag_notify_one(atomic_flag* __a) noexcept
1276 { __a->notify_one(); }
1279 atomic_flag_notify_all(atomic_flag* __a) noexcept
1280 { __a->notify_all(); }
1281 #endif // __cpp_lib_atomic_wait
1283 /// @cond undocumented
1284 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1285 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1286 template<typename _Tp>
1287 using __atomic_val_t = __type_identity_t<_Tp>;
1288 template<typename _Tp>
1289 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1292 // [atomics.nonmembers] Non-member functions.
1293 // Function templates generally applicable to atomic types.
1294 template<typename _ITp>
1296 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1297 { return __a->is_lock_free(); }
1299 template<typename _ITp>
1301 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1302 { return __a->is_lock_free(); }
1304 template<typename _ITp>
1306 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1307 { __a->store(__i, memory_order_relaxed); }
1309 template<typename _ITp>
1311 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1312 { __a->store(__i, memory_order_relaxed); }
1314 template<typename _ITp>
1316 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1317 memory_order __m) noexcept
1318 { __a->store(__i, __m); }
1320 template<typename _ITp>
1322 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1323 memory_order __m) noexcept
1324 { __a->store(__i, __m); }
1326 template<typename _ITp>
1328 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1329 { return __a->load(__m); }
1331 template<typename _ITp>
1333 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1334 memory_order __m) noexcept
1335 { return __a->load(__m); }
1337 template<typename _ITp>
1339 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1340 memory_order __m) noexcept
1341 { return __a->exchange(__i, __m); }
1343 template<typename _ITp>
1345 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1346 __atomic_val_t<_ITp> __i,
1347 memory_order __m) noexcept
1348 { return __a->exchange(__i, __m); }
1350 template<typename _ITp>
1352 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1353 __atomic_val_t<_ITp>* __i1,
1354 __atomic_val_t<_ITp> __i2,
1356 memory_order __m2) noexcept
1357 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1359 template<typename _ITp>
1361 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1362 __atomic_val_t<_ITp>* __i1,
1363 __atomic_val_t<_ITp> __i2,
1365 memory_order __m2) noexcept
1366 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1368 template<typename _ITp>
1370 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1371 __atomic_val_t<_ITp>* __i1,
1372 __atomic_val_t<_ITp> __i2,
1374 memory_order __m2) noexcept
1375 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1377 template<typename _ITp>
1379 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1380 __atomic_val_t<_ITp>* __i1,
1381 __atomic_val_t<_ITp> __i2,
1383 memory_order __m2) noexcept
1384 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1387 template<typename _ITp>
1389 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1390 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1392 template<typename _ITp>
1394 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1395 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1397 template<typename _ITp>
1399 atomic_load(const atomic<_ITp>* __a) noexcept
1400 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1402 template<typename _ITp>
1404 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1405 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1407 template<typename _ITp>
1409 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1410 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1412 template<typename _ITp>
1414 atomic_exchange(volatile atomic<_ITp>* __a,
1415 __atomic_val_t<_ITp> __i) noexcept
1416 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1418 template<typename _ITp>
1420 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1421 __atomic_val_t<_ITp>* __i1,
1422 __atomic_val_t<_ITp> __i2) noexcept
1424 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1425 memory_order_seq_cst,
1426 memory_order_seq_cst);
1429 template<typename _ITp>
1431 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1432 __atomic_val_t<_ITp>* __i1,
1433 __atomic_val_t<_ITp> __i2) noexcept
1435 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1436 memory_order_seq_cst,
1437 memory_order_seq_cst);
1440 template<typename _ITp>
1442 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1443 __atomic_val_t<_ITp>* __i1,
1444 __atomic_val_t<_ITp> __i2) noexcept
1446 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1447 memory_order_seq_cst,
1448 memory_order_seq_cst);
1451 template<typename _ITp>
1453 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1454 __atomic_val_t<_ITp>* __i1,
1455 __atomic_val_t<_ITp> __i2) noexcept
1457 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1458 memory_order_seq_cst,
1459 memory_order_seq_cst);
1463 #if __cpp_lib_atomic_wait
1464 template<typename _Tp>
1466 atomic_wait(const atomic<_Tp>* __a,
1467 typename std::atomic<_Tp>::value_type __old) noexcept
1468 { __a->wait(__old); }
1470 template<typename _Tp>
1472 atomic_wait_explicit(const atomic<_Tp>* __a,
1473 typename std::atomic<_Tp>::value_type __old,
1474 std::memory_order __m) noexcept
1475 { __a->wait(__old, __m); }
1477 template<typename _Tp>
1479 atomic_notify_one(atomic<_Tp>* __a) noexcept
1480 { __a->notify_one(); }
1482 template<typename _Tp>
1484 atomic_notify_all(atomic<_Tp>* __a) noexcept
1485 { __a->notify_all(); }
1486 #endif // __cpp_lib_atomic_wait
1488 // Function templates for atomic_integral and atomic_pointer operations only.
1489 // Some operations (and, or, xor) are only available for atomic integrals,
1490 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1492 template<typename _ITp>
1494 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1495 __atomic_diff_t<_ITp> __i,
1496 memory_order __m) noexcept
1497 { return __a->fetch_add(__i, __m); }
1499 template<typename _ITp>
1501 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1502 __atomic_diff_t<_ITp> __i,
1503 memory_order __m) noexcept
1504 { return __a->fetch_add(__i, __m); }
1506 template<typename _ITp>
1508 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1509 __atomic_diff_t<_ITp> __i,
1510 memory_order __m) noexcept
1511 { return __a->fetch_sub(__i, __m); }
1513 template<typename _ITp>
1515 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1516 __atomic_diff_t<_ITp> __i,
1517 memory_order __m) noexcept
1518 { return __a->fetch_sub(__i, __m); }
1520 template<typename _ITp>
1522 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1523 __atomic_val_t<_ITp> __i,
1524 memory_order __m) noexcept
1525 { return __a->fetch_and(__i, __m); }
1527 template<typename _ITp>
1529 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1530 __atomic_val_t<_ITp> __i,
1531 memory_order __m) noexcept
1532 { return __a->fetch_and(__i, __m); }
1534 template<typename _ITp>
1536 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1537 __atomic_val_t<_ITp> __i,
1538 memory_order __m) noexcept
1539 { return __a->fetch_or(__i, __m); }
1541 template<typename _ITp>
1543 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1544 __atomic_val_t<_ITp> __i,
1545 memory_order __m) noexcept
1546 { return __a->fetch_or(__i, __m); }
1548 template<typename _ITp>
1550 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1551 __atomic_val_t<_ITp> __i,
1552 memory_order __m) noexcept
1553 { return __a->fetch_xor(__i, __m); }
1555 template<typename _ITp>
1557 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1558 __atomic_val_t<_ITp> __i,
1559 memory_order __m) noexcept
1560 { return __a->fetch_xor(__i, __m); }
1562 template<typename _ITp>
1564 atomic_fetch_add(atomic<_ITp>* __a,
1565 __atomic_diff_t<_ITp> __i) noexcept
1566 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1568 template<typename _ITp>
1570 atomic_fetch_add(volatile atomic<_ITp>* __a,
1571 __atomic_diff_t<_ITp> __i) noexcept
1572 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1574 template<typename _ITp>
1576 atomic_fetch_sub(atomic<_ITp>* __a,
1577 __atomic_diff_t<_ITp> __i) noexcept
1578 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1580 template<typename _ITp>
1582 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1583 __atomic_diff_t<_ITp> __i) noexcept
1584 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1586 template<typename _ITp>
1588 atomic_fetch_and(__atomic_base<_ITp>* __a,
1589 __atomic_val_t<_ITp> __i) noexcept
1590 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1592 template<typename _ITp>
1594 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1595 __atomic_val_t<_ITp> __i) noexcept
1596 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1598 template<typename _ITp>
1600 atomic_fetch_or(__atomic_base<_ITp>* __a,
1601 __atomic_val_t<_ITp> __i) noexcept
1602 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1604 template<typename _ITp>
1606 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1607 __atomic_val_t<_ITp> __i) noexcept
1608 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1610 template<typename _ITp>
1612 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1613 __atomic_val_t<_ITp> __i) noexcept
1614 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1616 template<typename _ITp>
1618 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1619 __atomic_val_t<_ITp> __i) noexcept
1620 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1622 #ifdef __cpp_lib_atomic_float
1624 struct atomic<float> : __atomic_float<float>
1626 atomic() noexcept = default;
1629 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1632 atomic& operator=(const atomic&) volatile = delete;
1633 atomic& operator=(const atomic&) = delete;
1635 using __atomic_float<float>::operator=;
1639 struct atomic<double> : __atomic_float<double>
1641 atomic() noexcept = default;
1644 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1647 atomic& operator=(const atomic&) volatile = delete;
1648 atomic& operator=(const atomic&) = delete;
1650 using __atomic_float<double>::operator=;
1654 struct atomic<long double> : __atomic_float<long double>
1656 atomic() noexcept = default;
1659 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1662 atomic& operator=(const atomic&) volatile = delete;
1663 atomic& operator=(const atomic&) = delete;
1665 using __atomic_float<long double>::operator=;
1668 #ifdef __STDCPP_FLOAT16_T__
1670 struct atomic<_Float16> : __atomic_float<_Float16>
1672 atomic() noexcept = default;
1675 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1678 atomic& operator=(const atomic&) volatile = delete;
1679 atomic& operator=(const atomic&) = delete;
1681 using __atomic_float<_Float16>::operator=;
1685 #ifdef __STDCPP_FLOAT32_T__
1687 struct atomic<_Float32> : __atomic_float<_Float32>
1689 atomic() noexcept = default;
1692 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1695 atomic& operator=(const atomic&) volatile = delete;
1696 atomic& operator=(const atomic&) = delete;
1698 using __atomic_float<_Float32>::operator=;
1702 #ifdef __STDCPP_FLOAT64_T__
1704 struct atomic<_Float64> : __atomic_float<_Float64>
1706 atomic() noexcept = default;
1709 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1712 atomic& operator=(const atomic&) volatile = delete;
1713 atomic& operator=(const atomic&) = delete;
1715 using __atomic_float<_Float64>::operator=;
1719 #ifdef __STDCPP_FLOAT128_T__
1721 struct atomic<_Float128> : __atomic_float<_Float128>
1723 atomic() noexcept = default;
1726 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1729 atomic& operator=(const atomic&) volatile = delete;
1730 atomic& operator=(const atomic&) = delete;
1732 using __atomic_float<_Float128>::operator=;
1736 #ifdef __STDCPP_BFLOAT16_T__
1738 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1740 atomic() noexcept = default;
1743 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1746 atomic& operator=(const atomic&) volatile = delete;
1747 atomic& operator=(const atomic&) = delete;
1749 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1752 #endif // __cpp_lib_atomic_float
1754 #ifdef __cpp_lib_atomic_ref
1755 /// Class template to provide atomic operations on a non-atomic variable.
1756 template<typename _Tp>
1757 struct atomic_ref : __atomic_ref<_Tp>
1760 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1763 atomic_ref& operator=(const atomic_ref&) = delete;
1765 atomic_ref(const atomic_ref&) = default;
1767 using __atomic_ref<_Tp>::operator=;
1769 #endif // __cpp_lib_atomic_ref
1771 #ifdef __cpp_lib_atomic_lock_free_type_aliases
1772 # ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1773 using atomic_signed_lock_free
1774 = atomic<make_signed_t<__detail::__platform_wait_t>>;
1775 using atomic_unsigned_lock_free
1776 = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1777 # elif ATOMIC_INT_LOCK_FREE || !(ATOMIC_LONG_LOCK_FREE || ATOMIC_CHAR_LOCK_FREE)
1778 using atomic_signed_lock_free = atomic<signed int>;
1779 using atomic_unsigned_lock_free = atomic<unsigned int>;
1780 # elif ATOMIC_LONG_LOCK_FREE
1781 using atomic_signed_lock_free = atomic<signed long>;
1782 using atomic_unsigned_lock_free = atomic<unsigned long>;
1783 # elif ATOMIC_CHAR_LOCK_FREE
1784 using atomic_signed_lock_free = atomic<signed char>;
1785 using atomic_unsigned_lock_free = atomic<unsigned char>;
1787 # error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1791 /// @} group atomics
1793 _GLIBCXX_END_NAMESPACE_VERSION
1798 #endif // _GLIBCXX_ATOMIC