]> git.ipfire.org Git - thirdparty/gcc.git/blob - libitm/local_atomic
libitm: Remove unused PAGE_SIZE macros
[thirdparty/gcc.git] / libitm / local_atomic
1 // -*- C++ -*- header.
2
3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24
25 // ????????????????????????????????????????????????????????????????????
26 //
27 // This is a copy of the libstdc++ header, with the trivial modification
28 // of ignoring the c++config.h include. If and when the top-level build is
29 // fixed so that target libraries can be built using the newly built, we can
30 // delete this file.
31 //
32 // ????????????????????????????????????????????????????????????????????
33
34 /** @file include/atomic
35 * This is a Standard C++ Library header.
36 */
37
38 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
40
41 #ifndef _GLIBCXX_ATOMIC
42 #define _GLIBCXX_ATOMIC 1
43
44 #undef __always_inline
45 #define __always_inline __attribute__((always_inline))
46
47 // #pragma GCC system_header
48
49 // #ifndef __GXX_EXPERIMENTAL_CXX0X__
50 // # include <bits/c++0x_warning.h>
51 // #endif
52
53 // #include <bits/atomic_base.h>
54
55 namespace std // _GLIBCXX_VISIBILITY(default)
56 {
57 // _GLIBCXX_BEGIN_NAMESPACE_VERSION
58
59 /**
60 * @defgroup atomics Atomics
61 *
62 * Components for performing atomic operations.
63 * @{
64 */
65
66 /// Enumeration for memory_order
67 typedef enum memory_order
68 {
69 memory_order_relaxed,
70 memory_order_consume,
71 memory_order_acquire,
72 memory_order_release,
73 memory_order_acq_rel,
74 memory_order_seq_cst
75 } memory_order;
76
77 inline __always_inline memory_order
78 __calculate_memory_order(memory_order __m) noexcept
79 {
80 const bool __cond1 = __m == memory_order_release;
81 const bool __cond2 = __m == memory_order_acq_rel;
82 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
83 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
84 return __mo2;
85 }
86
87 inline __always_inline void
88 atomic_thread_fence(memory_order __m) noexcept
89 {
90 __atomic_thread_fence (__m);
91 }
92
93 inline __always_inline void
94 atomic_signal_fence(memory_order __m) noexcept
95 {
96 __atomic_thread_fence (__m);
97 }
98
99 /// kill_dependency
100 template<typename _Tp>
101 inline _Tp
102 kill_dependency(_Tp __y) noexcept
103 {
104 _Tp __ret(__y);
105 return __ret;
106 }
107
108 /// Lock-free Property
109
110
111 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
112 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
113 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
114 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
115 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
116 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
117 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
118 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
119 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
120 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
121
122 // Base types for atomics.
123 template<typename _IntTp>
124 struct __atomic_base;
125
126 /// atomic_char
127 typedef __atomic_base<char> atomic_char;
128
129 /// atomic_schar
130 typedef __atomic_base<signed char> atomic_schar;
131
132 /// atomic_uchar
133 typedef __atomic_base<unsigned char> atomic_uchar;
134
135 /// atomic_short
136 typedef __atomic_base<short> atomic_short;
137
138 /// atomic_ushort
139 typedef __atomic_base<unsigned short> atomic_ushort;
140
141 /// atomic_int
142 typedef __atomic_base<int> atomic_int;
143
144 /// atomic_uint
145 typedef __atomic_base<unsigned int> atomic_uint;
146
147 /// atomic_long
148 typedef __atomic_base<long> atomic_long;
149
150 /// atomic_ulong
151 typedef __atomic_base<unsigned long> atomic_ulong;
152
153 /// atomic_llong
154 typedef __atomic_base<long long> atomic_llong;
155
156 /// atomic_ullong
157 typedef __atomic_base<unsigned long long> atomic_ullong;
158
159 /// atomic_wchar_t
160 typedef __atomic_base<wchar_t> atomic_wchar_t;
161
162 /// atomic_char16_t
163 typedef __atomic_base<char16_t> atomic_char16_t;
164
165 /// atomic_char32_t
166 typedef __atomic_base<char32_t> atomic_char32_t;
167
168 /// atomic_char32_t
169 typedef __atomic_base<char32_t> atomic_char32_t;
170
171
172 /// atomic_int_least8_t
173 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
174
175 /// atomic_uint_least8_t
176 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
177
178 /// atomic_int_least16_t
179 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
180
181 /// atomic_uint_least16_t
182 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
183
184 /// atomic_int_least32_t
185 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
186
187 /// atomic_uint_least32_t
188 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
189
190 /// atomic_int_least64_t
191 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
192
193 /// atomic_uint_least64_t
194 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
195
196
197 /// atomic_int_fast8_t
198 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
199
200 /// atomic_uint_fast8_t
201 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
202
203 /// atomic_int_fast16_t
204 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
205
206 /// atomic_uint_fast16_t
207 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
208
209 /// atomic_int_fast32_t
210 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
211
212 /// atomic_uint_fast32_t
213 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
214
215 /// atomic_int_fast64_t
216 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
217
218 /// atomic_uint_fast64_t
219 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
220
221
222 /// atomic_intptr_t
223 typedef __atomic_base<intptr_t> atomic_intptr_t;
224
225 /// atomic_uintptr_t
226 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
227
228 /// atomic_size_t
229 typedef __atomic_base<size_t> atomic_size_t;
230
231 /// atomic_intmax_t
232 typedef __atomic_base<intmax_t> atomic_intmax_t;
233
234 /// atomic_uintmax_t
235 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
236
237 /// atomic_ptrdiff_t
238 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
239
240
241 #define ATOMIC_VAR_INIT(_VI) { _VI }
242
243 template<typename _Tp>
244 struct atomic;
245
246 template<typename _Tp>
247 struct atomic<_Tp*>;
248
249
250 /**
251 * @brief Base type for atomic_flag.
252 *
253 * Base type is POD with data, allowing atomic_flag to derive from
254 * it and meet the standard layout type requirement. In addition to
255 * compatibilty with a C interface, this allows different
256 * implementations of atomic_flag to use the same atomic operation
257 * functions, via a standard conversion to the __atomic_flag_base
258 * argument.
259 */
260 // _GLIBCXX_BEGIN_EXTERN_C
261
262 struct __atomic_flag_base
263 {
264 bool _M_i;
265 };
266
267 // _GLIBCXX_END_EXTERN_C
268
269 #define ATOMIC_FLAG_INIT { false }
270
271 /// atomic_flag
272 struct atomic_flag : public __atomic_flag_base
273 {
274 atomic_flag() noexcept = default;
275 ~atomic_flag() noexcept = default;
276 atomic_flag(const atomic_flag&) = delete;
277 atomic_flag& operator=(const atomic_flag&) = delete;
278 atomic_flag& operator=(const atomic_flag&) volatile = delete;
279
280 // Conversion to ATOMIC_FLAG_INIT.
281 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
282
283 __always_inline bool
284 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
285 {
286 return __atomic_test_and_set (&_M_i, __m);
287 }
288
289 __always_inline bool
290 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
291 {
292 return __atomic_test_and_set (&_M_i, __m);
293 }
294
295 __always_inline void
296 clear(memory_order __m = memory_order_seq_cst) noexcept
297 {
298 // __glibcxx_assert(__m != memory_order_consume);
299 // __glibcxx_assert(__m != memory_order_acquire);
300 // __glibcxx_assert(__m != memory_order_acq_rel);
301
302 __atomic_clear (&_M_i, __m);
303 }
304
305 __always_inline void
306 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
307 {
308 // __glibcxx_assert(__m != memory_order_consume);
309 // __glibcxx_assert(__m != memory_order_acquire);
310 // __glibcxx_assert(__m != memory_order_acq_rel);
311
312 __atomic_clear (&_M_i, __m);
313 }
314 };
315
316
317 /// Base class for atomic integrals.
318 //
319 // For each of the integral types, define atomic_[integral type] struct
320 //
321 // atomic_bool bool
322 // atomic_char char
323 // atomic_schar signed char
324 // atomic_uchar unsigned char
325 // atomic_short short
326 // atomic_ushort unsigned short
327 // atomic_int int
328 // atomic_uint unsigned int
329 // atomic_long long
330 // atomic_ulong unsigned long
331 // atomic_llong long long
332 // atomic_ullong unsigned long long
333 // atomic_char16_t char16_t
334 // atomic_char32_t char32_t
335 // atomic_wchar_t wchar_t
336 //
337 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
338 // 8 bytes, since that is what GCC built-in functions for atomic
339 // memory access expect.
340 template<typename _ITp>
341 struct __atomic_base
342 {
343 private:
344 typedef _ITp __int_type;
345
346 __int_type _M_i;
347
348 public:
349 __atomic_base() noexcept = default;
350 ~__atomic_base() noexcept = default;
351 __atomic_base(const __atomic_base&) = delete;
352 __atomic_base& operator=(const __atomic_base&) = delete;
353 __atomic_base& operator=(const __atomic_base&) volatile = delete;
354
355 // Requires __int_type convertible to _M_i.
356 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
357
358 operator __int_type() const noexcept
359 { return load(); }
360
361 operator __int_type() const volatile noexcept
362 { return load(); }
363
364 __int_type
365 operator=(__int_type __i) noexcept
366 {
367 store(__i);
368 return __i;
369 }
370
371 __int_type
372 operator=(__int_type __i) volatile noexcept
373 {
374 store(__i);
375 return __i;
376 }
377
378 __int_type
379 operator++(int) noexcept
380 { return fetch_add(1); }
381
382 __int_type
383 operator++(int) volatile noexcept
384 { return fetch_add(1); }
385
386 __int_type
387 operator--(int) noexcept
388 { return fetch_sub(1); }
389
390 __int_type
391 operator--(int) volatile noexcept
392 { return fetch_sub(1); }
393
394 __int_type
395 operator++() noexcept
396 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
397
398 __int_type
399 operator++() volatile noexcept
400 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
401
402 __int_type
403 operator--() noexcept
404 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
405
406 __int_type
407 operator--() volatile noexcept
408 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
409
410 __int_type
411 operator+=(__int_type __i) noexcept
412 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
413
414 __int_type
415 operator+=(__int_type __i) volatile noexcept
416 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
417
418 __int_type
419 operator-=(__int_type __i) noexcept
420 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
421
422 __int_type
423 operator-=(__int_type __i) volatile noexcept
424 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
425
426 __int_type
427 operator&=(__int_type __i) noexcept
428 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
429
430 __int_type
431 operator&=(__int_type __i) volatile noexcept
432 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
433
434 __int_type
435 operator|=(__int_type __i) noexcept
436 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
437
438 __int_type
439 operator|=(__int_type __i) volatile noexcept
440 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
441
442 __int_type
443 operator^=(__int_type __i) noexcept
444 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
445
446 __int_type
447 operator^=(__int_type __i) volatile noexcept
448 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
449
450 bool
451 is_lock_free() const noexcept
452 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
453
454 bool
455 is_lock_free() const volatile noexcept
456 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
457
458 __always_inline void
459 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
460 {
461 // __glibcxx_assert(__m != memory_order_acquire);
462 // __glibcxx_assert(__m != memory_order_acq_rel);
463 // __glibcxx_assert(__m != memory_order_consume);
464
465 __atomic_store_n(&_M_i, __i, __m);
466 }
467
468 __always_inline void
469 store(__int_type __i,
470 memory_order __m = memory_order_seq_cst) volatile noexcept
471 {
472 // __glibcxx_assert(__m != memory_order_acquire);
473 // __glibcxx_assert(__m != memory_order_acq_rel);
474 // __glibcxx_assert(__m != memory_order_consume);
475
476 __atomic_store_n(&_M_i, __i, __m);
477 }
478
479 __always_inline __int_type
480 load(memory_order __m = memory_order_seq_cst) const noexcept
481 {
482 // __glibcxx_assert(__m != memory_order_release);
483 // __glibcxx_assert(__m != memory_order_acq_rel);
484
485 return __atomic_load_n(&_M_i, __m);
486 }
487
488 __always_inline __int_type
489 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
490 {
491 // __glibcxx_assert(__m != memory_order_release);
492 // __glibcxx_assert(__m != memory_order_acq_rel);
493
494 return __atomic_load_n(&_M_i, __m);
495 }
496
497 __always_inline __int_type
498 exchange(__int_type __i,
499 memory_order __m = memory_order_seq_cst) noexcept
500 {
501 return __atomic_exchange_n(&_M_i, __i, __m);
502 }
503
504 __always_inline __int_type
505 exchange(__int_type __i,
506 memory_order __m = memory_order_seq_cst) volatile noexcept
507 {
508 return __atomic_exchange_n(&_M_i, __i, __m);
509 }
510
511 __always_inline bool
512 compare_exchange_weak(__int_type& __i1, __int_type __i2,
513 memory_order __m1, memory_order __m2) noexcept
514 {
515 // __glibcxx_assert(__m2 != memory_order_release);
516 // __glibcxx_assert(__m2 != memory_order_acq_rel);
517 // __glibcxx_assert(__m2 <= __m1);
518
519 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
520 }
521
522 __always_inline bool
523 compare_exchange_weak(__int_type& __i1, __int_type __i2,
524 memory_order __m1,
525 memory_order __m2) volatile noexcept
526 {
527 // __glibcxx_assert(__m2 != memory_order_release);
528 // __glibcxx_assert(__m2 != memory_order_acq_rel);
529 // __glibcxx_assert(__m2 <= __m1);
530
531 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
532 }
533
534 __always_inline bool
535 compare_exchange_weak(__int_type& __i1, __int_type __i2,
536 memory_order __m = memory_order_seq_cst) noexcept
537 {
538 return compare_exchange_weak(__i1, __i2, __m,
539 __calculate_memory_order(__m));
540 }
541
542 __always_inline bool
543 compare_exchange_weak(__int_type& __i1, __int_type __i2,
544 memory_order __m = memory_order_seq_cst) volatile noexcept
545 {
546 return compare_exchange_weak(__i1, __i2, __m,
547 __calculate_memory_order(__m));
548 }
549
550 __always_inline bool
551 compare_exchange_strong(__int_type& __i1, __int_type __i2,
552 memory_order __m1, memory_order __m2) noexcept
553 {
554 // __glibcxx_assert(__m2 != memory_order_release);
555 // __glibcxx_assert(__m2 != memory_order_acq_rel);
556 // __glibcxx_assert(__m2 <= __m1);
557
558 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
559 }
560
561 __always_inline bool
562 compare_exchange_strong(__int_type& __i1, __int_type __i2,
563 memory_order __m1,
564 memory_order __m2) volatile noexcept
565 {
566 // __glibcxx_assert(__m2 != memory_order_release);
567 // __glibcxx_assert(__m2 != memory_order_acq_rel);
568 // __glibcxx_assert(__m2 <= __m1);
569
570 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
571 }
572
573 __always_inline bool
574 compare_exchange_strong(__int_type& __i1, __int_type __i2,
575 memory_order __m = memory_order_seq_cst) noexcept
576 {
577 return compare_exchange_strong(__i1, __i2, __m,
578 __calculate_memory_order(__m));
579 }
580
581 __always_inline bool
582 compare_exchange_strong(__int_type& __i1, __int_type __i2,
583 memory_order __m = memory_order_seq_cst) volatile noexcept
584 {
585 return compare_exchange_strong(__i1, __i2, __m,
586 __calculate_memory_order(__m));
587 }
588
589 __always_inline __int_type
590 fetch_add(__int_type __i,
591 memory_order __m = memory_order_seq_cst) noexcept
592 { return __atomic_fetch_add(&_M_i, __i, __m); }
593
594 __always_inline __int_type
595 fetch_add(__int_type __i,
596 memory_order __m = memory_order_seq_cst) volatile noexcept
597 { return __atomic_fetch_add(&_M_i, __i, __m); }
598
599 __always_inline __int_type
600 fetch_sub(__int_type __i,
601 memory_order __m = memory_order_seq_cst) noexcept
602 { return __atomic_fetch_sub(&_M_i, __i, __m); }
603
604 __always_inline __int_type
605 fetch_sub(__int_type __i,
606 memory_order __m = memory_order_seq_cst) volatile noexcept
607 { return __atomic_fetch_sub(&_M_i, __i, __m); }
608
609 __always_inline __int_type
610 fetch_and(__int_type __i,
611 memory_order __m = memory_order_seq_cst) noexcept
612 { return __atomic_fetch_and(&_M_i, __i, __m); }
613
614 __always_inline __int_type
615 fetch_and(__int_type __i,
616 memory_order __m = memory_order_seq_cst) volatile noexcept
617 { return __atomic_fetch_and(&_M_i, __i, __m); }
618
619 __always_inline __int_type
620 fetch_or(__int_type __i,
621 memory_order __m = memory_order_seq_cst) noexcept
622 { return __atomic_fetch_or(&_M_i, __i, __m); }
623
624 __always_inline __int_type
625 fetch_or(__int_type __i,
626 memory_order __m = memory_order_seq_cst) volatile noexcept
627 { return __atomic_fetch_or(&_M_i, __i, __m); }
628
629 __always_inline __int_type
630 fetch_xor(__int_type __i,
631 memory_order __m = memory_order_seq_cst) noexcept
632 { return __atomic_fetch_xor(&_M_i, __i, __m); }
633
634 __always_inline __int_type
635 fetch_xor(__int_type __i,
636 memory_order __m = memory_order_seq_cst) volatile noexcept
637 { return __atomic_fetch_xor(&_M_i, __i, __m); }
638 };
639
640
641 /// Partial specialization for pointer types.
642 template<typename _PTp>
643 struct __atomic_base<_PTp*>
644 {
645 private:
646 typedef _PTp* __pointer_type;
647
648 __pointer_type _M_p;
649
650 public:
651 __atomic_base() noexcept = default;
652 ~__atomic_base() noexcept = default;
653 __atomic_base(const __atomic_base&) = delete;
654 __atomic_base& operator=(const __atomic_base&) = delete;
655 __atomic_base& operator=(const __atomic_base&) volatile = delete;
656
657 // Requires __pointer_type convertible to _M_p.
658 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
659
660 operator __pointer_type() const noexcept
661 { return load(); }
662
663 operator __pointer_type() const volatile noexcept
664 { return load(); }
665
666 __pointer_type
667 operator=(__pointer_type __p) noexcept
668 {
669 store(__p);
670 return __p;
671 }
672
673 __pointer_type
674 operator=(__pointer_type __p) volatile noexcept
675 {
676 store(__p);
677 return __p;
678 }
679
680 __pointer_type
681 operator++(int) noexcept
682 { return fetch_add(1); }
683
684 __pointer_type
685 operator++(int) volatile noexcept
686 { return fetch_add(1); }
687
688 __pointer_type
689 operator--(int) noexcept
690 { return fetch_sub(1); }
691
692 __pointer_type
693 operator--(int) volatile noexcept
694 { return fetch_sub(1); }
695
696 __pointer_type
697 operator++() noexcept
698 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
699
700 __pointer_type
701 operator++() volatile noexcept
702 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
703
704 __pointer_type
705 operator--() noexcept
706 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
707
708 __pointer_type
709 operator--() volatile noexcept
710 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
711
712 __pointer_type
713 operator+=(ptrdiff_t __d) noexcept
714 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
715
716 __pointer_type
717 operator+=(ptrdiff_t __d) volatile noexcept
718 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
719
720 __pointer_type
721 operator-=(ptrdiff_t __d) noexcept
722 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
723
724 __pointer_type
725 operator-=(ptrdiff_t __d) volatile noexcept
726 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
727
728 bool
729 is_lock_free() const noexcept
730 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
731
732 bool
733 is_lock_free() const volatile noexcept
734 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
735
736 __always_inline void
737 store(__pointer_type __p,
738 memory_order __m = memory_order_seq_cst) noexcept
739 {
740 // __glibcxx_assert(__m != memory_order_acquire);
741 // __glibcxx_assert(__m != memory_order_acq_rel);
742 // __glibcxx_assert(__m != memory_order_consume);
743
744 __atomic_store_n(&_M_p, __p, __m);
745 }
746
747 __always_inline void
748 store(__pointer_type __p,
749 memory_order __m = memory_order_seq_cst) volatile noexcept
750 {
751 // __glibcxx_assert(__m != memory_order_acquire);
752 // __glibcxx_assert(__m != memory_order_acq_rel);
753 // __glibcxx_assert(__m != memory_order_consume);
754
755 __atomic_store_n(&_M_p, __p, __m);
756 }
757
758 __always_inline __pointer_type
759 load(memory_order __m = memory_order_seq_cst) const noexcept
760 {
761 // __glibcxx_assert(__m != memory_order_release);
762 // __glibcxx_assert(__m != memory_order_acq_rel);
763
764 return __atomic_load_n(&_M_p, __m);
765 }
766
767 __always_inline __pointer_type
768 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
769 {
770 // __glibcxx_assert(__m != memory_order_release);
771 // __glibcxx_assert(__m != memory_order_acq_rel);
772
773 return __atomic_load_n(&_M_p, __m);
774 }
775
776 __always_inline __pointer_type
777 exchange(__pointer_type __p,
778 memory_order __m = memory_order_seq_cst) noexcept
779 {
780 return __atomic_exchange_n(&_M_p, __p, __m);
781 }
782
783 __always_inline __pointer_type
784 exchange(__pointer_type __p,
785 memory_order __m = memory_order_seq_cst) volatile noexcept
786 {
787 return __atomic_exchange_n(&_M_p, __p, __m);
788 }
789
790 __always_inline bool
791 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
792 memory_order __m1,
793 memory_order __m2) noexcept
794 {
795 // __glibcxx_assert(__m2 != memory_order_release);
796 // __glibcxx_assert(__m2 != memory_order_acq_rel);
797 // __glibcxx_assert(__m2 <= __m1);
798
799 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
800 }
801
802 __always_inline bool
803 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
804 memory_order __m1,
805 memory_order __m2) volatile noexcept
806 {
807 // __glibcxx_assert(__m2 != memory_order_release);
808 // __glibcxx_assert(__m2 != memory_order_acq_rel);
809 // __glibcxx_assert(__m2 <= __m1);
810
811 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
812 }
813
814 __always_inline __pointer_type
815 fetch_add(ptrdiff_t __d,
816 memory_order __m = memory_order_seq_cst) noexcept
817 { return __atomic_fetch_add(&_M_p, __d, __m); }
818
819 __always_inline __pointer_type
820 fetch_add(ptrdiff_t __d,
821 memory_order __m = memory_order_seq_cst) volatile noexcept
822 { return __atomic_fetch_add(&_M_p, __d, __m); }
823
824 __always_inline __pointer_type
825 fetch_sub(ptrdiff_t __d,
826 memory_order __m = memory_order_seq_cst) noexcept
827 { return __atomic_fetch_sub(&_M_p, __d, __m); }
828
829 __always_inline __pointer_type
830 fetch_sub(ptrdiff_t __d,
831 memory_order __m = memory_order_seq_cst) volatile noexcept
832 { return __atomic_fetch_sub(&_M_p, __d, __m); }
833 };
834
835
836 /**
837 * @addtogroup atomics
838 * @{
839 */
840
841 /// atomic_bool
842 // NB: No operators or fetch-operations for this type.
843 struct atomic_bool
844 {
845 private:
846 __atomic_base<bool> _M_base;
847
848 public:
849 atomic_bool() noexcept = default;
850 ~atomic_bool() noexcept = default;
851 atomic_bool(const atomic_bool&) = delete;
852 atomic_bool& operator=(const atomic_bool&) = delete;
853 atomic_bool& operator=(const atomic_bool&) volatile = delete;
854
855 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
856
857 bool
858 operator=(bool __i) noexcept
859 { return _M_base.operator=(__i); }
860
861 operator bool() const noexcept
862 { return _M_base.load(); }
863
864 operator bool() const volatile noexcept
865 { return _M_base.load(); }
866
867 bool
868 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
869
870 bool
871 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
872
873 __always_inline void
874 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
875 { _M_base.store(__i, __m); }
876
877 __always_inline void
878 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
879 { _M_base.store(__i, __m); }
880
881 __always_inline bool
882 load(memory_order __m = memory_order_seq_cst) const noexcept
883 { return _M_base.load(__m); }
884
885 __always_inline bool
886 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
887 { return _M_base.load(__m); }
888
889 __always_inline bool
890 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
891 { return _M_base.exchange(__i, __m); }
892
893 __always_inline bool
894 exchange(bool __i,
895 memory_order __m = memory_order_seq_cst) volatile noexcept
896 { return _M_base.exchange(__i, __m); }
897
898 __always_inline bool
899 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
900 memory_order __m2) noexcept
901 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
902
903 __always_inline bool
904 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
905 memory_order __m2) volatile noexcept
906 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
907
908 __always_inline bool
909 compare_exchange_weak(bool& __i1, bool __i2,
910 memory_order __m = memory_order_seq_cst) noexcept
911 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
912
913 __always_inline bool
914 compare_exchange_weak(bool& __i1, bool __i2,
915 memory_order __m = memory_order_seq_cst) volatile noexcept
916 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
917
918 __always_inline bool
919 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
920 memory_order __m2) noexcept
921 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
922
923 __always_inline bool
924 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
925 memory_order __m2) volatile noexcept
926 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
927
928 __always_inline bool
929 compare_exchange_strong(bool& __i1, bool __i2,
930 memory_order __m = memory_order_seq_cst) noexcept
931 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
932
933 __always_inline bool
934 compare_exchange_strong(bool& __i1, bool __i2,
935 memory_order __m = memory_order_seq_cst) volatile noexcept
936 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
937 };
938
939
940 /// atomic
941 /// 29.4.3, Generic atomic type, primary class template.
942 template<typename _Tp>
943 struct atomic
944 {
945 private:
946 _Tp _M_i;
947
948 public:
949 atomic() noexcept = default;
950 ~atomic() noexcept = default;
951 atomic(const atomic&) = delete;
952 atomic& operator=(const atomic&) = delete;
953 atomic& operator=(const atomic&) volatile = delete;
954
955 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
956
957 operator _Tp() const noexcept
958 { return load(); }
959
960 operator _Tp() const volatile noexcept
961 { return load(); }
962
963 _Tp
964 operator=(_Tp __i) noexcept
965 { store(__i); return __i; }
966
967 _Tp
968 operator=(_Tp __i) volatile noexcept
969 { store(__i); return __i; }
970
971 bool
972 is_lock_free() const noexcept
973 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
974
975 bool
976 is_lock_free() const volatile noexcept
977 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
978
979 void
980 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
981 { __atomic_store(&_M_i, &__i, _m); }
982
983 __always_inline void
984 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
985 { __atomic_store(&_M_i, &__i, _m); }
986
987 __always_inline _Tp
988 load(memory_order _m = memory_order_seq_cst) const noexcept
989 {
990 _Tp tmp;
991 __atomic_load(&_M_i, &tmp, _m);
992 return tmp;
993 }
994
995 __always_inline _Tp
996 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
997 {
998 _Tp tmp;
999 __atomic_load(&_M_i, &tmp, _m);
1000 return tmp;
1001 }
1002
1003 __always_inline _Tp
1004 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1005 {
1006 _Tp tmp;
1007 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1008 return tmp;
1009 }
1010
1011 __always_inline _Tp
1012 exchange(_Tp __i,
1013 memory_order _m = memory_order_seq_cst) volatile noexcept
1014 {
1015 _Tp tmp;
1016 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1017 return tmp;
1018 }
1019
1020 __always_inline bool
1021 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1022 memory_order __f) noexcept
1023 {
1024 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1025 }
1026
1027 __always_inline bool
1028 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1029 memory_order __f) volatile noexcept
1030 {
1031 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1032 }
1033
1034 __always_inline bool
1035 compare_exchange_weak(_Tp& __e, _Tp __i,
1036 memory_order __m = memory_order_seq_cst) noexcept
1037 { return compare_exchange_weak(__e, __i, __m, __m); }
1038
1039 __always_inline bool
1040 compare_exchange_weak(_Tp& __e, _Tp __i,
1041 memory_order __m = memory_order_seq_cst) volatile noexcept
1042 { return compare_exchange_weak(__e, __i, __m, __m); }
1043
1044 __always_inline bool
1045 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1046 memory_order __f) noexcept
1047 {
1048 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1049 }
1050
1051 __always_inline bool
1052 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1053 memory_order __f) volatile noexcept
1054 {
1055 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1056 }
1057
1058 __always_inline bool
1059 compare_exchange_strong(_Tp& __e, _Tp __i,
1060 memory_order __m = memory_order_seq_cst) noexcept
1061 { return compare_exchange_strong(__e, __i, __m, __m); }
1062
1063 __always_inline bool
1064 compare_exchange_strong(_Tp& __e, _Tp __i,
1065 memory_order __m = memory_order_seq_cst) volatile noexcept
1066 { return compare_exchange_strong(__e, __i, __m, __m); }
1067 };
1068
1069
1070 /// Partial specialization for pointer types.
1071 template<typename _Tp>
1072 struct atomic<_Tp*>
1073 {
1074 typedef _Tp* __pointer_type;
1075 typedef __atomic_base<_Tp*> __base_type;
1076 __base_type _M_b;
1077
1078 atomic() noexcept = default;
1079 ~atomic() noexcept = default;
1080 atomic(const atomic&) = delete;
1081 atomic& operator=(const atomic&) = delete;
1082 atomic& operator=(const atomic&) volatile = delete;
1083
1084 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1085
1086 operator __pointer_type() const noexcept
1087 { return __pointer_type(_M_b); }
1088
1089 operator __pointer_type() const volatile noexcept
1090 { return __pointer_type(_M_b); }
1091
1092 __pointer_type
1093 operator=(__pointer_type __p) noexcept
1094 { return _M_b.operator=(__p); }
1095
1096 __pointer_type
1097 operator=(__pointer_type __p) volatile noexcept
1098 { return _M_b.operator=(__p); }
1099
1100 __pointer_type
1101 operator++(int) noexcept
1102 { return _M_b++; }
1103
1104 __pointer_type
1105 operator++(int) volatile noexcept
1106 { return _M_b++; }
1107
1108 __pointer_type
1109 operator--(int) noexcept
1110 { return _M_b--; }
1111
1112 __pointer_type
1113 operator--(int) volatile noexcept
1114 { return _M_b--; }
1115
1116 __pointer_type
1117 operator++() noexcept
1118 { return ++_M_b; }
1119
1120 __pointer_type
1121 operator++() volatile noexcept
1122 { return ++_M_b; }
1123
1124 __pointer_type
1125 operator--() noexcept
1126 { return --_M_b; }
1127
1128 __pointer_type
1129 operator--() volatile noexcept
1130 { return --_M_b; }
1131
1132 __pointer_type
1133 operator+=(ptrdiff_t __d) noexcept
1134 { return _M_b.operator+=(__d); }
1135
1136 __pointer_type
1137 operator+=(ptrdiff_t __d) volatile noexcept
1138 { return _M_b.operator+=(__d); }
1139
1140 __pointer_type
1141 operator-=(ptrdiff_t __d) noexcept
1142 { return _M_b.operator-=(__d); }
1143
1144 __pointer_type
1145 operator-=(ptrdiff_t __d) volatile noexcept
1146 { return _M_b.operator-=(__d); }
1147
1148 bool
1149 is_lock_free() const noexcept
1150 { return _M_b.is_lock_free(); }
1151
1152 bool
1153 is_lock_free() const volatile noexcept
1154 { return _M_b.is_lock_free(); }
1155
1156 __always_inline void
1157 store(__pointer_type __p,
1158 memory_order __m = memory_order_seq_cst) noexcept
1159 { return _M_b.store(__p, __m); }
1160
1161 __always_inline void
1162 store(__pointer_type __p,
1163 memory_order __m = memory_order_seq_cst) volatile noexcept
1164 { return _M_b.store(__p, __m); }
1165
1166 __always_inline __pointer_type
1167 load(memory_order __m = memory_order_seq_cst) const noexcept
1168 { return _M_b.load(__m); }
1169
1170 __always_inline __pointer_type
1171 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1172 { return _M_b.load(__m); }
1173
1174 __always_inline __pointer_type
1175 exchange(__pointer_type __p,
1176 memory_order __m = memory_order_seq_cst) noexcept
1177 { return _M_b.exchange(__p, __m); }
1178
1179 __always_inline __pointer_type
1180 exchange(__pointer_type __p,
1181 memory_order __m = memory_order_seq_cst) volatile noexcept
1182 { return _M_b.exchange(__p, __m); }
1183
1184 __always_inline bool
1185 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1186 memory_order __m1, memory_order __m2) noexcept
1187 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1188
1189 __always_inline bool
1190 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1191 memory_order __m1,
1192 memory_order __m2) volatile noexcept
1193 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1194
1195 __always_inline bool
1196 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1197 memory_order __m = memory_order_seq_cst) noexcept
1198 {
1199 return compare_exchange_weak(__p1, __p2, __m,
1200 __calculate_memory_order(__m));
1201 }
1202
1203 __always_inline bool
1204 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1205 memory_order __m = memory_order_seq_cst) volatile noexcept
1206 {
1207 return compare_exchange_weak(__p1, __p2, __m,
1208 __calculate_memory_order(__m));
1209 }
1210
1211 __always_inline bool
1212 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1213 memory_order __m1, memory_order __m2) noexcept
1214 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1215
1216 __always_inline bool
1217 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1218 memory_order __m1,
1219 memory_order __m2) volatile noexcept
1220 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1221
1222 __always_inline bool
1223 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1224 memory_order __m = memory_order_seq_cst) noexcept
1225 {
1226 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1227 __calculate_memory_order(__m));
1228 }
1229
1230 __always_inline bool
1231 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1232 memory_order __m = memory_order_seq_cst) volatile noexcept
1233 {
1234 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1235 __calculate_memory_order(__m));
1236 }
1237
1238 __always_inline __pointer_type
1239 fetch_add(ptrdiff_t __d,
1240 memory_order __m = memory_order_seq_cst) noexcept
1241 { return _M_b.fetch_add(__d, __m); }
1242
1243 __always_inline __pointer_type
1244 fetch_add(ptrdiff_t __d,
1245 memory_order __m = memory_order_seq_cst) volatile noexcept
1246 { return _M_b.fetch_add(__d, __m); }
1247
1248 __always_inline __pointer_type
1249 fetch_sub(ptrdiff_t __d,
1250 memory_order __m = memory_order_seq_cst) noexcept
1251 { return _M_b.fetch_sub(__d, __m); }
1252
1253 __always_inline __pointer_type
1254 fetch_sub(ptrdiff_t __d,
1255 memory_order __m = memory_order_seq_cst) volatile noexcept
1256 { return _M_b.fetch_sub(__d, __m); }
1257 };
1258
1259
1260 /// Explicit specialization for bool.
1261 template<>
1262 struct atomic<bool> : public atomic_bool
1263 {
1264 typedef bool __integral_type;
1265 typedef atomic_bool __base_type;
1266
1267 atomic() noexcept = default;
1268 ~atomic() noexcept = default;
1269 atomic(const atomic&) = delete;
1270 atomic& operator=(const atomic&) = delete;
1271 atomic& operator=(const atomic&) volatile = delete;
1272
1273 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1274
1275 using __base_type::operator __integral_type;
1276 using __base_type::operator=;
1277 };
1278
1279 /// Explicit specialization for char.
1280 template<>
1281 struct atomic<char> : public atomic_char
1282 {
1283 typedef char __integral_type;
1284 typedef atomic_char __base_type;
1285
1286 atomic() noexcept = default;
1287 ~atomic() noexcept = default;
1288 atomic(const atomic&) = delete;
1289 atomic& operator=(const atomic&) = delete;
1290 atomic& operator=(const atomic&) volatile = delete;
1291
1292 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1293
1294 using __base_type::operator __integral_type;
1295 using __base_type::operator=;
1296 };
1297
1298 /// Explicit specialization for signed char.
1299 template<>
1300 struct atomic<signed char> : public atomic_schar
1301 {
1302 typedef signed char __integral_type;
1303 typedef atomic_schar __base_type;
1304
1305 atomic() noexcept= default;
1306 ~atomic() noexcept = default;
1307 atomic(const atomic&) = delete;
1308 atomic& operator=(const atomic&) = delete;
1309 atomic& operator=(const atomic&) volatile = delete;
1310
1311 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1312
1313 using __base_type::operator __integral_type;
1314 using __base_type::operator=;
1315 };
1316
1317 /// Explicit specialization for unsigned char.
1318 template<>
1319 struct atomic<unsigned char> : public atomic_uchar
1320 {
1321 typedef unsigned char __integral_type;
1322 typedef atomic_uchar __base_type;
1323
1324 atomic() noexcept= default;
1325 ~atomic() noexcept = default;
1326 atomic(const atomic&) = delete;
1327 atomic& operator=(const atomic&) = delete;
1328 atomic& operator=(const atomic&) volatile = delete;
1329
1330 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1331
1332 using __base_type::operator __integral_type;
1333 using __base_type::operator=;
1334 };
1335
1336 /// Explicit specialization for short.
1337 template<>
1338 struct atomic<short> : public atomic_short
1339 {
1340 typedef short __integral_type;
1341 typedef atomic_short __base_type;
1342
1343 atomic() noexcept = default;
1344 ~atomic() noexcept = default;
1345 atomic(const atomic&) = delete;
1346 atomic& operator=(const atomic&) = delete;
1347 atomic& operator=(const atomic&) volatile = delete;
1348
1349 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1350
1351 using __base_type::operator __integral_type;
1352 using __base_type::operator=;
1353 };
1354
1355 /// Explicit specialization for unsigned short.
1356 template<>
1357 struct atomic<unsigned short> : public atomic_ushort
1358 {
1359 typedef unsigned short __integral_type;
1360 typedef atomic_ushort __base_type;
1361
1362 atomic() noexcept = default;
1363 ~atomic() noexcept = default;
1364 atomic(const atomic&) = delete;
1365 atomic& operator=(const atomic&) = delete;
1366 atomic& operator=(const atomic&) volatile = delete;
1367
1368 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1369
1370 using __base_type::operator __integral_type;
1371 using __base_type::operator=;
1372 };
1373
1374 /// Explicit specialization for int.
1375 template<>
1376 struct atomic<int> : atomic_int
1377 {
1378 typedef int __integral_type;
1379 typedef atomic_int __base_type;
1380
1381 atomic() noexcept = default;
1382 ~atomic() noexcept = default;
1383 atomic(const atomic&) = delete;
1384 atomic& operator=(const atomic&) = delete;
1385 atomic& operator=(const atomic&) volatile = delete;
1386
1387 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1388
1389 using __base_type::operator __integral_type;
1390 using __base_type::operator=;
1391 };
1392
1393 /// Explicit specialization for unsigned int.
1394 template<>
1395 struct atomic<unsigned int> : public atomic_uint
1396 {
1397 typedef unsigned int __integral_type;
1398 typedef atomic_uint __base_type;
1399
1400 atomic() noexcept = default;
1401 ~atomic() noexcept = default;
1402 atomic(const atomic&) = delete;
1403 atomic& operator=(const atomic&) = delete;
1404 atomic& operator=(const atomic&) volatile = delete;
1405
1406 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1407
1408 using __base_type::operator __integral_type;
1409 using __base_type::operator=;
1410 };
1411
1412 /// Explicit specialization for long.
1413 template<>
1414 struct atomic<long> : public atomic_long
1415 {
1416 typedef long __integral_type;
1417 typedef atomic_long __base_type;
1418
1419 atomic() noexcept = default;
1420 ~atomic() noexcept = default;
1421 atomic(const atomic&) = delete;
1422 atomic& operator=(const atomic&) = delete;
1423 atomic& operator=(const atomic&) volatile = delete;
1424
1425 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1426
1427 using __base_type::operator __integral_type;
1428 using __base_type::operator=;
1429 };
1430
1431 /// Explicit specialization for unsigned long.
1432 template<>
1433 struct atomic<unsigned long> : public atomic_ulong
1434 {
1435 typedef unsigned long __integral_type;
1436 typedef atomic_ulong __base_type;
1437
1438 atomic() noexcept = default;
1439 ~atomic() noexcept = default;
1440 atomic(const atomic&) = delete;
1441 atomic& operator=(const atomic&) = delete;
1442 atomic& operator=(const atomic&) volatile = delete;
1443
1444 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1445
1446 using __base_type::operator __integral_type;
1447 using __base_type::operator=;
1448 };
1449
1450 /// Explicit specialization for long long.
1451 template<>
1452 struct atomic<long long> : public atomic_llong
1453 {
1454 typedef long long __integral_type;
1455 typedef atomic_llong __base_type;
1456
1457 atomic() noexcept = default;
1458 ~atomic() noexcept = default;
1459 atomic(const atomic&) = delete;
1460 atomic& operator=(const atomic&) = delete;
1461 atomic& operator=(const atomic&) volatile = delete;
1462
1463 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1464
1465 using __base_type::operator __integral_type;
1466 using __base_type::operator=;
1467 };
1468
1469 /// Explicit specialization for unsigned long long.
1470 template<>
1471 struct atomic<unsigned long long> : public atomic_ullong
1472 {
1473 typedef unsigned long long __integral_type;
1474 typedef atomic_ullong __base_type;
1475
1476 atomic() noexcept = default;
1477 ~atomic() noexcept = default;
1478 atomic(const atomic&) = delete;
1479 atomic& operator=(const atomic&) = delete;
1480 atomic& operator=(const atomic&) volatile = delete;
1481
1482 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1483
1484 using __base_type::operator __integral_type;
1485 using __base_type::operator=;
1486 };
1487
1488 /// Explicit specialization for wchar_t.
1489 template<>
1490 struct atomic<wchar_t> : public atomic_wchar_t
1491 {
1492 typedef wchar_t __integral_type;
1493 typedef atomic_wchar_t __base_type;
1494
1495 atomic() noexcept = default;
1496 ~atomic() noexcept = default;
1497 atomic(const atomic&) = delete;
1498 atomic& operator=(const atomic&) = delete;
1499 atomic& operator=(const atomic&) volatile = delete;
1500
1501 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1502
1503 using __base_type::operator __integral_type;
1504 using __base_type::operator=;
1505 };
1506
1507 /// Explicit specialization for char16_t.
1508 template<>
1509 struct atomic<char16_t> : public atomic_char16_t
1510 {
1511 typedef char16_t __integral_type;
1512 typedef atomic_char16_t __base_type;
1513
1514 atomic() noexcept = default;
1515 ~atomic() noexcept = default;
1516 atomic(const atomic&) = delete;
1517 atomic& operator=(const atomic&) = delete;
1518 atomic& operator=(const atomic&) volatile = delete;
1519
1520 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1521
1522 using __base_type::operator __integral_type;
1523 using __base_type::operator=;
1524 };
1525
1526 /// Explicit specialization for char32_t.
1527 template<>
1528 struct atomic<char32_t> : public atomic_char32_t
1529 {
1530 typedef char32_t __integral_type;
1531 typedef atomic_char32_t __base_type;
1532
1533 atomic() noexcept = default;
1534 ~atomic() noexcept = default;
1535 atomic(const atomic&) = delete;
1536 atomic& operator=(const atomic&) = delete;
1537 atomic& operator=(const atomic&) volatile = delete;
1538
1539 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1540
1541 using __base_type::operator __integral_type;
1542 using __base_type::operator=;
1543 };
1544
1545
1546 // Function definitions, atomic_flag operations.
1547 inline __always_inline bool
1548 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1549 memory_order __m) noexcept
1550 { return __a->test_and_set(__m); }
1551
1552 inline __always_inline bool
1553 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1554 memory_order __m) noexcept
1555 { return __a->test_and_set(__m); }
1556
1557 inline __always_inline void
1558 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1559 { __a->clear(__m); }
1560
1561 inline __always_inline void
1562 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1563 memory_order __m) noexcept
1564 { __a->clear(__m); }
1565
1566 inline __always_inline bool
1567 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1568 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1569
1570 inline __always_inline bool
1571 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1572 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1573
1574 inline __always_inline void
1575 atomic_flag_clear(atomic_flag* __a) noexcept
1576 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1577
1578 inline __always_inline void
1579 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1580 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1581
1582
1583 // Function templates generally applicable to atomic types.
1584 template<typename _ITp>
1585 __always_inline bool
1586 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1587 { return __a->is_lock_free(); }
1588
1589 template<typename _ITp>
1590 __always_inline bool
1591 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1592 { return __a->is_lock_free(); }
1593
1594 template<typename _ITp>
1595 __always_inline void
1596 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1597
1598 template<typename _ITp>
1599 __always_inline void
1600 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1601
1602 template<typename _ITp>
1603 __always_inline void
1604 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1605 memory_order __m) noexcept
1606 { __a->store(__i, __m); }
1607
1608 template<typename _ITp>
1609 __always_inline void
1610 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1611 memory_order __m) noexcept
1612 { __a->store(__i, __m); }
1613
1614 template<typename _ITp>
1615 __always_inline _ITp
1616 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1617 { return __a->load(__m); }
1618
1619 template<typename _ITp>
1620 __always_inline _ITp
1621 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1622 memory_order __m) noexcept
1623 { return __a->load(__m); }
1624
1625 template<typename _ITp>
1626 __always_inline _ITp
1627 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1628 memory_order __m) noexcept
1629 { return __a->exchange(__i, __m); }
1630
1631 template<typename _ITp>
1632 __always_inline _ITp
1633 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1634 memory_order __m) noexcept
1635 { return __a->exchange(__i, __m); }
1636
1637 template<typename _ITp>
1638 __always_inline bool
1639 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1640 _ITp* __i1, _ITp __i2,
1641 memory_order __m1,
1642 memory_order __m2) noexcept
1643 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1644
1645 template<typename _ITp>
1646 __always_inline bool
1647 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1648 _ITp* __i1, _ITp __i2,
1649 memory_order __m1,
1650 memory_order __m2) noexcept
1651 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1652
1653 template<typename _ITp>
1654 __always_inline bool
1655 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1656 _ITp* __i1, _ITp __i2,
1657 memory_order __m1,
1658 memory_order __m2) noexcept
1659 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1660
1661 template<typename _ITp>
1662 __always_inline bool
1663 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1664 _ITp* __i1, _ITp __i2,
1665 memory_order __m1,
1666 memory_order __m2) noexcept
1667 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1668
1669
1670 template<typename _ITp>
1671 __always_inline void
1672 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1673 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1674
1675 template<typename _ITp>
1676 __always_inline void
1677 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1678 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1679
1680 template<typename _ITp>
1681 __always_inline _ITp
1682 atomic_load(const atomic<_ITp>* __a) noexcept
1683 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1684
1685 template<typename _ITp>
1686 __always_inline _ITp
1687 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1688 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1689
1690 template<typename _ITp>
1691 __always_inline _ITp
1692 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1693 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1694
1695 template<typename _ITp>
1696 __always_inline _ITp
1697 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1698 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1699
1700 template<typename _ITp>
1701 __always_inline bool
1702 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1703 _ITp* __i1, _ITp __i2) noexcept
1704 {
1705 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1706 memory_order_seq_cst,
1707 memory_order_seq_cst);
1708 }
1709
1710 template<typename _ITp>
1711 __always_inline bool
1712 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1713 _ITp* __i1, _ITp __i2) noexcept
1714 {
1715 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1716 memory_order_seq_cst,
1717 memory_order_seq_cst);
1718 }
1719
1720 template<typename _ITp>
1721 __always_inline bool
1722 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1723 _ITp* __i1, _ITp __i2) noexcept
1724 {
1725 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1726 memory_order_seq_cst,
1727 memory_order_seq_cst);
1728 }
1729
1730 template<typename _ITp>
1731 __always_inline bool
1732 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1733 _ITp* __i1, _ITp __i2) noexcept
1734 {
1735 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1736 memory_order_seq_cst,
1737 memory_order_seq_cst);
1738 }
1739
1740 // Function templates for atomic_integral operations only, using
1741 // __atomic_base. Template argument should be constricted to
1742 // intergral types as specified in the standard, excluding address
1743 // types.
1744 template<typename _ITp>
1745 __always_inline _ITp
1746 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1747 memory_order __m) noexcept
1748 { return __a->fetch_add(__i, __m); }
1749
1750 template<typename _ITp>
1751 __always_inline _ITp
1752 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1753 memory_order __m) noexcept
1754 { return __a->fetch_add(__i, __m); }
1755
1756 template<typename _ITp>
1757 __always_inline _ITp
1758 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1759 memory_order __m) noexcept
1760 { return __a->fetch_sub(__i, __m); }
1761
1762 template<typename _ITp>
1763 __always_inline _ITp
1764 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1765 memory_order __m) noexcept
1766 { return __a->fetch_sub(__i, __m); }
1767
1768 template<typename _ITp>
1769 __always_inline _ITp
1770 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1771 memory_order __m) noexcept
1772 { return __a->fetch_and(__i, __m); }
1773
1774 template<typename _ITp>
1775 __always_inline _ITp
1776 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1777 memory_order __m) noexcept
1778 { return __a->fetch_and(__i, __m); }
1779
1780 template<typename _ITp>
1781 __always_inline _ITp
1782 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1783 memory_order __m) noexcept
1784 { return __a->fetch_or(__i, __m); }
1785
1786 template<typename _ITp>
1787 __always_inline _ITp
1788 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1789 memory_order __m) noexcept
1790 { return __a->fetch_or(__i, __m); }
1791
1792 template<typename _ITp>
1793 __always_inline _ITp
1794 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1795 memory_order __m) noexcept
1796 { return __a->fetch_xor(__i, __m); }
1797
1798 template<typename _ITp>
1799 __always_inline _ITp
1800 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1801 memory_order __m) noexcept
1802 { return __a->fetch_xor(__i, __m); }
1803
1804 template<typename _ITp>
1805 __always_inline _ITp
1806 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1807 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1808
1809 template<typename _ITp>
1810 __always_inline _ITp
1811 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1812 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1813
1814 template<typename _ITp>
1815 __always_inline _ITp
1816 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1817 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1818
1819 template<typename _ITp>
1820 __always_inline _ITp
1821 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1822 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1823
1824 template<typename _ITp>
1825 __always_inline _ITp
1826 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1827 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1828
1829 template<typename _ITp>
1830 __always_inline _ITp
1831 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1832 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1833
1834 template<typename _ITp>
1835 __always_inline _ITp
1836 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1837 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1838
1839 template<typename _ITp>
1840 __always_inline _ITp
1841 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1842 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1843
1844 template<typename _ITp>
1845 __always_inline _ITp
1846 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1847 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1848
1849 template<typename _ITp>
1850 __always_inline _ITp
1851 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1852 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1853
1854
1855 // Partial specializations for pointers.
1856 template<typename _ITp>
1857 __always_inline _ITp*
1858 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1859 memory_order __m) noexcept
1860 { return __a->fetch_add(__d, __m); }
1861
1862 template<typename _ITp>
1863 __always_inline _ITp*
1864 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1865 memory_order __m) noexcept
1866 { return __a->fetch_add(__d, __m); }
1867
1868 template<typename _ITp>
1869 __always_inline _ITp*
1870 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1871 { return __a->fetch_add(__d); }
1872
1873 template<typename _ITp>
1874 __always_inline _ITp*
1875 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1876 { return __a->fetch_add(__d); }
1877
1878 template<typename _ITp>
1879 __always_inline _ITp*
1880 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1881 ptrdiff_t __d, memory_order __m) noexcept
1882 { return __a->fetch_sub(__d, __m); }
1883
1884 template<typename _ITp>
1885 __always_inline _ITp*
1886 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1887 memory_order __m) noexcept
1888 { return __a->fetch_sub(__d, __m); }
1889
1890 template<typename _ITp>
1891 __always_inline _ITp*
1892 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1893 { return __a->fetch_sub(__d); }
1894
1895 template<typename _ITp>
1896 __always_inline _ITp*
1897 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1898 { return __a->fetch_sub(__d); }
1899 // @} group atomics
1900
1901 // _GLIBCXX_END_NAMESPACE_VERSION
1902 } // namespace
1903
1904 #endif