3 // Copyright (C) 2008-2019 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
37 #include <bits/atomic_lockfree_defines.h>
39 #ifndef _GLIBCXX_ALWAYS_INLINE
40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__))
43 namespace std
_GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48 * @defgroup atomics Atomics
50 * Components for performing atomic operations.
54 /// Enumeration for memory_order
55 typedef enum memory_order
65 enum __memory_order_modifier
67 __memory_order_mask
= 0x0ffff,
68 __memory_order_modifier_mask
= 0xffff0000,
69 __memory_order_hle_acquire
= 0x10000,
70 __memory_order_hle_release
= 0x20000
73 constexpr memory_order
74 operator|(memory_order __m
, __memory_order_modifier __mod
)
76 return memory_order(__m
| int(__mod
));
79 constexpr memory_order
80 operator&(memory_order __m
, __memory_order_modifier __mod
)
82 return memory_order(__m
& int(__mod
));
85 // Drop release ordering as per [atomics.types.operations.req]/21
86 constexpr memory_order
87 __cmpexch_failure_order2(memory_order __m
) noexcept
89 return __m
== memory_order_acq_rel
? memory_order_acquire
90 : __m
== memory_order_release
? memory_order_relaxed
: __m
;
93 constexpr memory_order
94 __cmpexch_failure_order(memory_order __m
) noexcept
96 return memory_order(__cmpexch_failure_order2(__m
& __memory_order_mask
)
97 | (__m
& __memory_order_modifier_mask
));
100 _GLIBCXX_ALWAYS_INLINE
void
101 atomic_thread_fence(memory_order __m
) noexcept
102 { __atomic_thread_fence(__m
); }
104 _GLIBCXX_ALWAYS_INLINE
void
105 atomic_signal_fence(memory_order __m
) noexcept
106 { __atomic_signal_fence(__m
); }
109 template<typename _Tp
>
111 kill_dependency(_Tp __y
) noexcept
118 // Base types for atomics.
119 template<typename _IntTp
>
120 struct __atomic_base
;
123 #define ATOMIC_VAR_INIT(_VI) { _VI }
125 template<typename _Tp
>
128 template<typename _Tp
>
131 /* The target's "set" value for test-and-set may not be exactly 1. */
132 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
133 typedef bool __atomic_flag_data_type
;
135 typedef unsigned char __atomic_flag_data_type
;
139 * @brief Base type for atomic_flag.
141 * Base type is POD with data, allowing atomic_flag to derive from
142 * it and meet the standard layout type requirement. In addition to
143 * compatibility with a C interface, this allows different
144 * implementations of atomic_flag to use the same atomic operation
145 * functions, via a standard conversion to the __atomic_flag_base
148 _GLIBCXX_BEGIN_EXTERN_C
150 struct __atomic_flag_base
152 __atomic_flag_data_type _M_i
;
155 _GLIBCXX_END_EXTERN_C
157 #define ATOMIC_FLAG_INIT { 0 }
160 struct atomic_flag
: public __atomic_flag_base
162 atomic_flag() noexcept
= default;
163 ~atomic_flag() noexcept
= default;
164 atomic_flag(const atomic_flag
&) = delete;
165 atomic_flag
& operator=(const atomic_flag
&) = delete;
166 atomic_flag
& operator=(const atomic_flag
&) volatile = delete;
168 // Conversion to ATOMIC_FLAG_INIT.
169 constexpr atomic_flag(bool __i
) noexcept
170 : __atomic_flag_base
{ _S_init(__i
) }
173 _GLIBCXX_ALWAYS_INLINE
bool
174 test_and_set(memory_order __m
= memory_order_seq_cst
) noexcept
176 return __atomic_test_and_set (&_M_i
, __m
);
179 _GLIBCXX_ALWAYS_INLINE
bool
180 test_and_set(memory_order __m
= memory_order_seq_cst
) volatile noexcept
182 return __atomic_test_and_set (&_M_i
, __m
);
185 _GLIBCXX_ALWAYS_INLINE
void
186 clear(memory_order __m
= memory_order_seq_cst
) noexcept
188 memory_order __b
= __m
& __memory_order_mask
;
189 __glibcxx_assert(__b
!= memory_order_consume
);
190 __glibcxx_assert(__b
!= memory_order_acquire
);
191 __glibcxx_assert(__b
!= memory_order_acq_rel
);
193 __atomic_clear (&_M_i
, __m
);
196 _GLIBCXX_ALWAYS_INLINE
void
197 clear(memory_order __m
= memory_order_seq_cst
) volatile noexcept
199 memory_order __b
= __m
& __memory_order_mask
;
200 __glibcxx_assert(__b
!= memory_order_consume
);
201 __glibcxx_assert(__b
!= memory_order_acquire
);
202 __glibcxx_assert(__b
!= memory_order_acq_rel
);
204 __atomic_clear (&_M_i
, __m
);
208 static constexpr __atomic_flag_data_type
210 { return __i
? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL
: 0; }
214 /// Base class for atomic integrals.
216 // For each of the integral types, define atomic_[integral type] struct
220 // atomic_schar signed char
221 // atomic_uchar unsigned char
222 // atomic_short short
223 // atomic_ushort unsigned short
225 // atomic_uint unsigned int
227 // atomic_ulong unsigned long
228 // atomic_llong long long
229 // atomic_ullong unsigned long long
230 // atomic_char16_t char16_t
231 // atomic_char32_t char32_t
232 // atomic_wchar_t wchar_t
234 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
235 // 8 bytes, since that is what GCC built-in functions for atomic
236 // memory access expect.
237 template<typename _ITp
>
240 using value_type
= _ITp
;
241 using difference_type
= value_type
;
244 typedef _ITp __int_type
;
246 static constexpr int _S_alignment
=
247 sizeof(_ITp
) > alignof(_ITp
) ? sizeof(_ITp
) : alignof(_ITp
);
249 alignas(_S_alignment
) __int_type _M_i
;
252 __atomic_base() noexcept
= default;
253 ~__atomic_base() noexcept
= default;
254 __atomic_base(const __atomic_base
&) = delete;
255 __atomic_base
& operator=(const __atomic_base
&) = delete;
256 __atomic_base
& operator=(const __atomic_base
&) volatile = delete;
258 // Requires __int_type convertible to _M_i.
259 constexpr __atomic_base(__int_type __i
) noexcept
: _M_i (__i
) { }
261 operator __int_type() const noexcept
264 operator __int_type() const volatile noexcept
268 operator=(__int_type __i
) noexcept
275 operator=(__int_type __i
) volatile noexcept
282 operator++(int) noexcept
283 { return fetch_add(1); }
286 operator++(int) volatile noexcept
287 { return fetch_add(1); }
290 operator--(int) noexcept
291 { return fetch_sub(1); }
294 operator--(int) volatile noexcept
295 { return fetch_sub(1); }
298 operator++() noexcept
299 { return __atomic_add_fetch(&_M_i
, 1, memory_order_seq_cst
); }
302 operator++() volatile noexcept
303 { return __atomic_add_fetch(&_M_i
, 1, memory_order_seq_cst
); }
306 operator--() noexcept
307 { return __atomic_sub_fetch(&_M_i
, 1, memory_order_seq_cst
); }
310 operator--() volatile noexcept
311 { return __atomic_sub_fetch(&_M_i
, 1, memory_order_seq_cst
); }
314 operator+=(__int_type __i
) noexcept
315 { return __atomic_add_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
318 operator+=(__int_type __i
) volatile noexcept
319 { return __atomic_add_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
322 operator-=(__int_type __i
) noexcept
323 { return __atomic_sub_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
326 operator-=(__int_type __i
) volatile noexcept
327 { return __atomic_sub_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
330 operator&=(__int_type __i
) noexcept
331 { return __atomic_and_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
334 operator&=(__int_type __i
) volatile noexcept
335 { return __atomic_and_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
338 operator|=(__int_type __i
) noexcept
339 { return __atomic_or_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
342 operator|=(__int_type __i
) volatile noexcept
343 { return __atomic_or_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
346 operator^=(__int_type __i
) noexcept
347 { return __atomic_xor_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
350 operator^=(__int_type __i
) volatile noexcept
351 { return __atomic_xor_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
354 is_lock_free() const noexcept
356 // Use a fake, minimally aligned pointer.
357 return __atomic_is_lock_free(sizeof(_M_i
),
358 reinterpret_cast<void *>(-_S_alignment
));
362 is_lock_free() const volatile noexcept
364 // Use a fake, minimally aligned pointer.
365 return __atomic_is_lock_free(sizeof(_M_i
),
366 reinterpret_cast<void *>(-_S_alignment
));
369 _GLIBCXX_ALWAYS_INLINE
void
370 store(__int_type __i
, memory_order __m
= memory_order_seq_cst
) noexcept
372 memory_order __b
= __m
& __memory_order_mask
;
373 __glibcxx_assert(__b
!= memory_order_acquire
);
374 __glibcxx_assert(__b
!= memory_order_acq_rel
);
375 __glibcxx_assert(__b
!= memory_order_consume
);
377 __atomic_store_n(&_M_i
, __i
, __m
);
380 _GLIBCXX_ALWAYS_INLINE
void
381 store(__int_type __i
,
382 memory_order __m
= memory_order_seq_cst
) volatile noexcept
384 memory_order __b
= __m
& __memory_order_mask
;
385 __glibcxx_assert(__b
!= memory_order_acquire
);
386 __glibcxx_assert(__b
!= memory_order_acq_rel
);
387 __glibcxx_assert(__b
!= memory_order_consume
);
389 __atomic_store_n(&_M_i
, __i
, __m
);
392 _GLIBCXX_ALWAYS_INLINE __int_type
393 load(memory_order __m
= memory_order_seq_cst
) const noexcept
395 memory_order __b
= __m
& __memory_order_mask
;
396 __glibcxx_assert(__b
!= memory_order_release
);
397 __glibcxx_assert(__b
!= memory_order_acq_rel
);
399 return __atomic_load_n(&_M_i
, __m
);
402 _GLIBCXX_ALWAYS_INLINE __int_type
403 load(memory_order __m
= memory_order_seq_cst
) const volatile noexcept
405 memory_order __b
= __m
& __memory_order_mask
;
406 __glibcxx_assert(__b
!= memory_order_release
);
407 __glibcxx_assert(__b
!= memory_order_acq_rel
);
409 return __atomic_load_n(&_M_i
, __m
);
412 _GLIBCXX_ALWAYS_INLINE __int_type
413 exchange(__int_type __i
,
414 memory_order __m
= memory_order_seq_cst
) noexcept
416 return __atomic_exchange_n(&_M_i
, __i
, __m
);
420 _GLIBCXX_ALWAYS_INLINE __int_type
421 exchange(__int_type __i
,
422 memory_order __m
= memory_order_seq_cst
) volatile noexcept
424 return __atomic_exchange_n(&_M_i
, __i
, __m
);
427 _GLIBCXX_ALWAYS_INLINE
bool
428 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
429 memory_order __m1
, memory_order __m2
) noexcept
431 memory_order __b2
= __m2
& __memory_order_mask
;
432 memory_order __b1
= __m1
& __memory_order_mask
;
433 __glibcxx_assert(__b2
!= memory_order_release
);
434 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
435 __glibcxx_assert(__b2
<= __b1
);
437 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 1, __m1
, __m2
);
440 _GLIBCXX_ALWAYS_INLINE
bool
441 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
443 memory_order __m2
) volatile noexcept
445 memory_order __b2
= __m2
& __memory_order_mask
;
446 memory_order __b1
= __m1
& __memory_order_mask
;
447 __glibcxx_assert(__b2
!= memory_order_release
);
448 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
449 __glibcxx_assert(__b2
<= __b1
);
451 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 1, __m1
, __m2
);
454 _GLIBCXX_ALWAYS_INLINE
bool
455 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
456 memory_order __m
= memory_order_seq_cst
) noexcept
458 return compare_exchange_weak(__i1
, __i2
, __m
,
459 __cmpexch_failure_order(__m
));
462 _GLIBCXX_ALWAYS_INLINE
bool
463 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
464 memory_order __m
= memory_order_seq_cst
) volatile noexcept
466 return compare_exchange_weak(__i1
, __i2
, __m
,
467 __cmpexch_failure_order(__m
));
470 _GLIBCXX_ALWAYS_INLINE
bool
471 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
472 memory_order __m1
, memory_order __m2
) noexcept
474 memory_order __b2
= __m2
& __memory_order_mask
;
475 memory_order __b1
= __m1
& __memory_order_mask
;
476 __glibcxx_assert(__b2
!= memory_order_release
);
477 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
478 __glibcxx_assert(__b2
<= __b1
);
480 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 0, __m1
, __m2
);
483 _GLIBCXX_ALWAYS_INLINE
bool
484 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
486 memory_order __m2
) volatile noexcept
488 memory_order __b2
= __m2
& __memory_order_mask
;
489 memory_order __b1
= __m1
& __memory_order_mask
;
491 __glibcxx_assert(__b2
!= memory_order_release
);
492 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
493 __glibcxx_assert(__b2
<= __b1
);
495 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 0, __m1
, __m2
);
498 _GLIBCXX_ALWAYS_INLINE
bool
499 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
500 memory_order __m
= memory_order_seq_cst
) noexcept
502 return compare_exchange_strong(__i1
, __i2
, __m
,
503 __cmpexch_failure_order(__m
));
506 _GLIBCXX_ALWAYS_INLINE
bool
507 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
508 memory_order __m
= memory_order_seq_cst
) volatile noexcept
510 return compare_exchange_strong(__i1
, __i2
, __m
,
511 __cmpexch_failure_order(__m
));
514 _GLIBCXX_ALWAYS_INLINE __int_type
515 fetch_add(__int_type __i
,
516 memory_order __m
= memory_order_seq_cst
) noexcept
517 { return __atomic_fetch_add(&_M_i
, __i
, __m
); }
519 _GLIBCXX_ALWAYS_INLINE __int_type
520 fetch_add(__int_type __i
,
521 memory_order __m
= memory_order_seq_cst
) volatile noexcept
522 { return __atomic_fetch_add(&_M_i
, __i
, __m
); }
524 _GLIBCXX_ALWAYS_INLINE __int_type
525 fetch_sub(__int_type __i
,
526 memory_order __m
= memory_order_seq_cst
) noexcept
527 { return __atomic_fetch_sub(&_M_i
, __i
, __m
); }
529 _GLIBCXX_ALWAYS_INLINE __int_type
530 fetch_sub(__int_type __i
,
531 memory_order __m
= memory_order_seq_cst
) volatile noexcept
532 { return __atomic_fetch_sub(&_M_i
, __i
, __m
); }
534 _GLIBCXX_ALWAYS_INLINE __int_type
535 fetch_and(__int_type __i
,
536 memory_order __m
= memory_order_seq_cst
) noexcept
537 { return __atomic_fetch_and(&_M_i
, __i
, __m
); }
539 _GLIBCXX_ALWAYS_INLINE __int_type
540 fetch_and(__int_type __i
,
541 memory_order __m
= memory_order_seq_cst
) volatile noexcept
542 { return __atomic_fetch_and(&_M_i
, __i
, __m
); }
544 _GLIBCXX_ALWAYS_INLINE __int_type
545 fetch_or(__int_type __i
,
546 memory_order __m
= memory_order_seq_cst
) noexcept
547 { return __atomic_fetch_or(&_M_i
, __i
, __m
); }
549 _GLIBCXX_ALWAYS_INLINE __int_type
550 fetch_or(__int_type __i
,
551 memory_order __m
= memory_order_seq_cst
) volatile noexcept
552 { return __atomic_fetch_or(&_M_i
, __i
, __m
); }
554 _GLIBCXX_ALWAYS_INLINE __int_type
555 fetch_xor(__int_type __i
,
556 memory_order __m
= memory_order_seq_cst
) noexcept
557 { return __atomic_fetch_xor(&_M_i
, __i
, __m
); }
559 _GLIBCXX_ALWAYS_INLINE __int_type
560 fetch_xor(__int_type __i
,
561 memory_order __m
= memory_order_seq_cst
) volatile noexcept
562 { return __atomic_fetch_xor(&_M_i
, __i
, __m
); }
566 /// Partial specialization for pointer types.
567 template<typename _PTp
>
568 struct __atomic_base
<_PTp
*>
571 typedef _PTp
* __pointer_type
;
575 // Factored out to facilitate explicit specialization.
577 _M_type_size(ptrdiff_t __d
) const { return __d
* sizeof(_PTp
); }
580 _M_type_size(ptrdiff_t __d
) const volatile { return __d
* sizeof(_PTp
); }
583 __atomic_base() noexcept
= default;
584 ~__atomic_base() noexcept
= default;
585 __atomic_base(const __atomic_base
&) = delete;
586 __atomic_base
& operator=(const __atomic_base
&) = delete;
587 __atomic_base
& operator=(const __atomic_base
&) volatile = delete;
589 // Requires __pointer_type convertible to _M_p.
590 constexpr __atomic_base(__pointer_type __p
) noexcept
: _M_p (__p
) { }
592 operator __pointer_type() const noexcept
595 operator __pointer_type() const volatile noexcept
599 operator=(__pointer_type __p
) noexcept
606 operator=(__pointer_type __p
) volatile noexcept
613 operator++(int) noexcept
614 { return fetch_add(1); }
617 operator++(int) volatile noexcept
618 { return fetch_add(1); }
621 operator--(int) noexcept
622 { return fetch_sub(1); }
625 operator--(int) volatile noexcept
626 { return fetch_sub(1); }
629 operator++() noexcept
630 { return __atomic_add_fetch(&_M_p
, _M_type_size(1),
631 memory_order_seq_cst
); }
634 operator++() volatile noexcept
635 { return __atomic_add_fetch(&_M_p
, _M_type_size(1),
636 memory_order_seq_cst
); }
639 operator--() noexcept
640 { return __atomic_sub_fetch(&_M_p
, _M_type_size(1),
641 memory_order_seq_cst
); }
644 operator--() volatile noexcept
645 { return __atomic_sub_fetch(&_M_p
, _M_type_size(1),
646 memory_order_seq_cst
); }
649 operator+=(ptrdiff_t __d
) noexcept
650 { return __atomic_add_fetch(&_M_p
, _M_type_size(__d
),
651 memory_order_seq_cst
); }
654 operator+=(ptrdiff_t __d
) volatile noexcept
655 { return __atomic_add_fetch(&_M_p
, _M_type_size(__d
),
656 memory_order_seq_cst
); }
659 operator-=(ptrdiff_t __d
) noexcept
660 { return __atomic_sub_fetch(&_M_p
, _M_type_size(__d
),
661 memory_order_seq_cst
); }
664 operator-=(ptrdiff_t __d
) volatile noexcept
665 { return __atomic_sub_fetch(&_M_p
, _M_type_size(__d
),
666 memory_order_seq_cst
); }
669 is_lock_free() const noexcept
671 // Produce a fake, minimally aligned pointer.
672 return __atomic_is_lock_free(sizeof(_M_p
),
673 reinterpret_cast<void *>(-__alignof(_M_p
)));
677 is_lock_free() const volatile noexcept
679 // Produce a fake, minimally aligned pointer.
680 return __atomic_is_lock_free(sizeof(_M_p
),
681 reinterpret_cast<void *>(-__alignof(_M_p
)));
684 _GLIBCXX_ALWAYS_INLINE
void
685 store(__pointer_type __p
,
686 memory_order __m
= memory_order_seq_cst
) noexcept
688 memory_order __b
= __m
& __memory_order_mask
;
690 __glibcxx_assert(__b
!= memory_order_acquire
);
691 __glibcxx_assert(__b
!= memory_order_acq_rel
);
692 __glibcxx_assert(__b
!= memory_order_consume
);
694 __atomic_store_n(&_M_p
, __p
, __m
);
697 _GLIBCXX_ALWAYS_INLINE
void
698 store(__pointer_type __p
,
699 memory_order __m
= memory_order_seq_cst
) volatile noexcept
701 memory_order __b
= __m
& __memory_order_mask
;
702 __glibcxx_assert(__b
!= memory_order_acquire
);
703 __glibcxx_assert(__b
!= memory_order_acq_rel
);
704 __glibcxx_assert(__b
!= memory_order_consume
);
706 __atomic_store_n(&_M_p
, __p
, __m
);
709 _GLIBCXX_ALWAYS_INLINE __pointer_type
710 load(memory_order __m
= memory_order_seq_cst
) const noexcept
712 memory_order __b
= __m
& __memory_order_mask
;
713 __glibcxx_assert(__b
!= memory_order_release
);
714 __glibcxx_assert(__b
!= memory_order_acq_rel
);
716 return __atomic_load_n(&_M_p
, __m
);
719 _GLIBCXX_ALWAYS_INLINE __pointer_type
720 load(memory_order __m
= memory_order_seq_cst
) const volatile noexcept
722 memory_order __b
= __m
& __memory_order_mask
;
723 __glibcxx_assert(__b
!= memory_order_release
);
724 __glibcxx_assert(__b
!= memory_order_acq_rel
);
726 return __atomic_load_n(&_M_p
, __m
);
729 _GLIBCXX_ALWAYS_INLINE __pointer_type
730 exchange(__pointer_type __p
,
731 memory_order __m
= memory_order_seq_cst
) noexcept
733 return __atomic_exchange_n(&_M_p
, __p
, __m
);
737 _GLIBCXX_ALWAYS_INLINE __pointer_type
738 exchange(__pointer_type __p
,
739 memory_order __m
= memory_order_seq_cst
) volatile noexcept
741 return __atomic_exchange_n(&_M_p
, __p
, __m
);
744 _GLIBCXX_ALWAYS_INLINE
bool
745 compare_exchange_strong(__pointer_type
& __p1
, __pointer_type __p2
,
747 memory_order __m2
) noexcept
749 memory_order __b2
= __m2
& __memory_order_mask
;
750 memory_order __b1
= __m1
& __memory_order_mask
;
751 __glibcxx_assert(__b2
!= memory_order_release
);
752 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
753 __glibcxx_assert(__b2
<= __b1
);
755 return __atomic_compare_exchange_n(&_M_p
, &__p1
, __p2
, 0, __m1
, __m2
);
758 _GLIBCXX_ALWAYS_INLINE
bool
759 compare_exchange_strong(__pointer_type
& __p1
, __pointer_type __p2
,
761 memory_order __m2
) volatile noexcept
763 memory_order __b2
= __m2
& __memory_order_mask
;
764 memory_order __b1
= __m1
& __memory_order_mask
;
766 __glibcxx_assert(__b2
!= memory_order_release
);
767 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
768 __glibcxx_assert(__b2
<= __b1
);
770 return __atomic_compare_exchange_n(&_M_p
, &__p1
, __p2
, 0, __m1
, __m2
);
773 _GLIBCXX_ALWAYS_INLINE __pointer_type
774 fetch_add(ptrdiff_t __d
,
775 memory_order __m
= memory_order_seq_cst
) noexcept
776 { return __atomic_fetch_add(&_M_p
, _M_type_size(__d
), __m
); }
778 _GLIBCXX_ALWAYS_INLINE __pointer_type
779 fetch_add(ptrdiff_t __d
,
780 memory_order __m
= memory_order_seq_cst
) volatile noexcept
781 { return __atomic_fetch_add(&_M_p
, _M_type_size(__d
), __m
); }
783 _GLIBCXX_ALWAYS_INLINE __pointer_type
784 fetch_sub(ptrdiff_t __d
,
785 memory_order __m
= memory_order_seq_cst
) noexcept
786 { return __atomic_fetch_sub(&_M_p
, _M_type_size(__d
), __m
); }
788 _GLIBCXX_ALWAYS_INLINE __pointer_type
789 fetch_sub(ptrdiff_t __d
,
790 memory_order __m
= memory_order_seq_cst
) volatile noexcept
791 { return __atomic_fetch_sub(&_M_p
, _M_type_size(__d
), __m
); }
796 _GLIBCXX_END_NAMESPACE_VERSION