]> git.ipfire.org Git - thirdparty/gcc.git/blob - libstdc++-v3/include/std/atomic
PR libstdc++/36104 part four
[thirdparty/gcc.git] / libstdc++-v3 / include / std / atomic
1 // -*- C++ -*- header.
2
3 // Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34
35 #pragma GCC system_header
36
37 #ifndef __GXX_EXPERIMENTAL_CXX0X__
38 # include <bits/c++0x_warning.h>
39 #endif
40
41 #include <bits/atomic_base.h>
42 #include <bits/atomic_0.h>
43 #include <bits/atomic_2.h>
44
45 namespace std _GLIBCXX_VISIBILITY(default)
46 {
47 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48
49 /**
50 * @addtogroup atomics
51 * @{
52 */
53
54 /// atomic_bool
55 // NB: No operators or fetch-operations for this type.
56 struct atomic_bool
57 {
58 private:
59 __atomic_base<bool> _M_base;
60
61 public:
62 atomic_bool() = default;
63 ~atomic_bool() = default;
64 atomic_bool(const atomic_bool&) = delete;
65 atomic_bool& operator=(const atomic_bool&) = delete;
66 atomic_bool& operator=(const atomic_bool&) volatile = delete;
67
68 constexpr atomic_bool(bool __i) : _M_base(__i) { }
69
70 bool
71 operator=(bool __i)
72 { return _M_base.operator=(__i); }
73
74 operator bool() const
75 { return _M_base.load(); }
76
77 operator bool() const volatile
78 { return _M_base.load(); }
79
80 bool
81 is_lock_free() const { return _M_base.is_lock_free(); }
82
83 bool
84 is_lock_free() const volatile { return _M_base.is_lock_free(); }
85
86 void
87 store(bool __i, memory_order __m = memory_order_seq_cst)
88 { _M_base.store(__i, __m); }
89
90 void
91 store(bool __i, memory_order __m = memory_order_seq_cst) volatile
92 { _M_base.store(__i, __m); }
93
94 bool
95 load(memory_order __m = memory_order_seq_cst) const
96 { return _M_base.load(__m); }
97
98 bool
99 load(memory_order __m = memory_order_seq_cst) const volatile
100 { return _M_base.load(__m); }
101
102 bool
103 exchange(bool __i, memory_order __m = memory_order_seq_cst)
104 { return _M_base.exchange(__i, __m); }
105
106 bool
107 exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile
108 { return _M_base.exchange(__i, __m); }
109
110 bool
111 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
112 memory_order __m2)
113 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
114
115 bool
116 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
117 memory_order __m2) volatile
118 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
119
120 bool
121 compare_exchange_weak(bool& __i1, bool __i2,
122 memory_order __m = memory_order_seq_cst)
123 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
124
125 bool
126 compare_exchange_weak(bool& __i1, bool __i2,
127 memory_order __m = memory_order_seq_cst) volatile
128 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
129
130 bool
131 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
132 memory_order __m2)
133 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
134
135 bool
136 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
137 memory_order __m2) volatile
138 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
139
140 bool
141 compare_exchange_strong(bool& __i1, bool __i2,
142 memory_order __m = memory_order_seq_cst)
143 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
144
145 bool
146 compare_exchange_strong(bool& __i1, bool __i2,
147 memory_order __m = memory_order_seq_cst) volatile
148 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
149 };
150
151
152 /// atomic
153 /// 29.4.3, Generic atomic type, primary class template.
154 template<typename _Tp>
155 struct atomic
156 {
157 private:
158 _Tp _M_i;
159
160 public:
161 atomic() = default;
162 ~atomic() = default;
163 atomic(const atomic&) = delete;
164 atomic& operator=(const atomic&) = delete;
165 atomic& operator=(const atomic&) volatile = delete;
166
167 constexpr atomic(_Tp __i) : _M_i(__i) { }
168
169 operator _Tp() const;
170
171 operator _Tp() const volatile;
172
173 _Tp
174 operator=(_Tp __i) { store(__i); return __i; }
175
176 _Tp
177 operator=(_Tp __i) volatile { store(__i); return __i; }
178
179 bool
180 is_lock_free() const;
181
182 bool
183 is_lock_free() const volatile;
184
185 void
186 store(_Tp, memory_order = memory_order_seq_cst);
187
188 void
189 store(_Tp, memory_order = memory_order_seq_cst) volatile;
190
191 _Tp
192 load(memory_order = memory_order_seq_cst) const;
193
194 _Tp
195 load(memory_order = memory_order_seq_cst) const volatile;
196
197 _Tp
198 exchange(_Tp __i, memory_order = memory_order_seq_cst);
199
200 _Tp
201 exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile;
202
203 bool
204 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order);
205
206 bool
207 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile;
208
209 bool
210 compare_exchange_weak(_Tp&, _Tp, memory_order = memory_order_seq_cst);
211
212 bool
213 compare_exchange_weak(_Tp&, _Tp,
214 memory_order = memory_order_seq_cst) volatile;
215
216 bool
217 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order);
218
219 bool
220 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile;
221
222 bool
223 compare_exchange_strong(_Tp&, _Tp, memory_order = memory_order_seq_cst);
224
225 bool
226 compare_exchange_strong(_Tp&, _Tp,
227 memory_order = memory_order_seq_cst) volatile;
228 };
229
230
231 /// Partial specialization for pointer types.
232 template<typename _Tp>
233 struct atomic<_Tp*> : atomic_address
234 {
235 atomic() = default;
236 ~atomic() = default;
237 atomic(const atomic&) = delete;
238 atomic& operator=(const atomic&) volatile = delete;
239
240 constexpr atomic(_Tp* __v) : atomic_address(__v) { }
241
242 void
243 store(_Tp*, memory_order = memory_order_seq_cst);
244
245 void
246 store(_Tp*, memory_order = memory_order_seq_cst) volatile;
247
248 _Tp*
249 load(memory_order = memory_order_seq_cst) const;
250
251 _Tp*
252 load(memory_order = memory_order_seq_cst) const volatile;
253
254 _Tp*
255 exchange(_Tp*, memory_order = memory_order_seq_cst);
256
257 _Tp*
258 exchange(_Tp*, memory_order = memory_order_seq_cst) volatile;
259
260 bool
261 compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order);
262
263 bool
264 compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order) volatile;
265
266 bool
267 compare_exchange_weak(_Tp*&, _Tp*, memory_order = memory_order_seq_cst);
268
269 bool
270 compare_exchange_weak(_Tp*&, _Tp*,
271 memory_order = memory_order_seq_cst) volatile;
272
273 bool
274 compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order);
275
276 bool
277 compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order) volatile;
278
279 bool
280 compare_exchange_strong(_Tp*&, _Tp*, memory_order = memory_order_seq_cst);
281
282 bool
283 compare_exchange_strong(_Tp*&, _Tp*,
284 memory_order = memory_order_seq_cst) volatile;
285
286 _Tp*
287 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst);
288
289 _Tp*
290 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
291
292 _Tp*
293 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst);
294
295 _Tp*
296 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
297
298 operator _Tp*() const
299 { return load(); }
300
301 operator _Tp*() const volatile
302 { return load(); }
303
304 _Tp*
305 operator=(_Tp* __v)
306 {
307 store(__v);
308 return __v;
309 }
310
311 _Tp*
312 operator=(_Tp* __v) volatile
313 {
314 store(__v);
315 return __v;
316 }
317
318 _Tp*
319 operator++(int) { return fetch_add(1); }
320
321 _Tp*
322 operator++(int) volatile { return fetch_add(1); }
323
324 _Tp*
325 operator--(int) { return fetch_sub(1); }
326
327 _Tp*
328 operator--(int) volatile { return fetch_sub(1); }
329
330 _Tp*
331 operator++() { return fetch_add(1) + 1; }
332
333 _Tp*
334 operator++() volatile { return fetch_add(1) + 1; }
335
336 _Tp*
337 operator--() { return fetch_sub(1) - 1; }
338
339 _Tp*
340 operator--() volatile { return fetch_sub(1) - 1; }
341
342 _Tp*
343 operator+=(ptrdiff_t __d)
344 { return fetch_add(__d) + __d; }
345
346 _Tp*
347 operator+=(ptrdiff_t __d) volatile
348 { return fetch_add(__d) + __d; }
349
350 _Tp*
351 operator-=(ptrdiff_t __d)
352 { return fetch_sub(__d) - __d; }
353
354 _Tp*
355 operator-=(ptrdiff_t __d) volatile
356 { return fetch_sub(__d) - __d; }
357 };
358
359 /// Explicit specialization for bool.
360 template<>
361 struct atomic<bool> : public atomic_bool
362 {
363 typedef bool __integral_type;
364 typedef atomic_bool __base_type;
365
366 atomic() = default;
367 ~atomic() = default;
368 atomic(const atomic&) = delete;
369 atomic& operator=(const atomic&) = delete;
370 atomic& operator=(const atomic&) volatile = delete;
371
372 constexpr atomic(__integral_type __i) : __base_type(__i) { }
373
374 using __base_type::operator __integral_type;
375 using __base_type::operator=;
376 };
377
378 /// Explicit specialization for char.
379 template<>
380 struct atomic<char> : public atomic_char
381 {
382 typedef char __integral_type;
383 typedef atomic_char __base_type;
384
385 atomic() = default;
386 ~atomic() = default;
387 atomic(const atomic&) = delete;
388 atomic& operator=(const atomic&) = delete;
389 atomic& operator=(const atomic&) volatile = delete;
390
391 constexpr atomic(__integral_type __i) : __base_type(__i) { }
392
393 using __base_type::operator __integral_type;
394 using __base_type::operator=;
395 };
396
397 /// Explicit specialization for signed char.
398 template<>
399 struct atomic<signed char> : public atomic_schar
400 {
401 typedef signed char __integral_type;
402 typedef atomic_schar __base_type;
403
404 atomic() = default;
405 ~atomic() = default;
406 atomic(const atomic&) = delete;
407 atomic& operator=(const atomic&) = delete;
408 atomic& operator=(const atomic&) volatile = delete;
409
410 constexpr atomic(__integral_type __i) : __base_type(__i) { }
411
412 using __base_type::operator __integral_type;
413 using __base_type::operator=;
414 };
415
416 /// Explicit specialization for unsigned char.
417 template<>
418 struct atomic<unsigned char> : public atomic_uchar
419 {
420 typedef unsigned char __integral_type;
421 typedef atomic_uchar __base_type;
422
423 atomic() = default;
424 ~atomic() = default;
425 atomic(const atomic&) = delete;
426 atomic& operator=(const atomic&) = delete;
427 atomic& operator=(const atomic&) volatile = delete;
428
429 constexpr atomic(__integral_type __i) : __base_type(__i) { }
430
431 using __base_type::operator __integral_type;
432 using __base_type::operator=;
433 };
434
435 /// Explicit specialization for short.
436 template<>
437 struct atomic<short> : public atomic_short
438 {
439 typedef short __integral_type;
440 typedef atomic_short __base_type;
441
442 atomic() = default;
443 ~atomic() = default;
444 atomic(const atomic&) = delete;
445 atomic& operator=(const atomic&) = delete;
446 atomic& operator=(const atomic&) volatile = delete;
447
448 constexpr atomic(__integral_type __i) : __base_type(__i) { }
449
450 using __base_type::operator __integral_type;
451 using __base_type::operator=;
452 };
453
454 /// Explicit specialization for unsigned short.
455 template<>
456 struct atomic<unsigned short> : public atomic_ushort
457 {
458 typedef unsigned short __integral_type;
459 typedef atomic_ushort __base_type;
460
461 atomic() = default;
462 ~atomic() = default;
463 atomic(const atomic&) = delete;
464 atomic& operator=(const atomic&) = delete;
465 atomic& operator=(const atomic&) volatile = delete;
466
467 constexpr atomic(__integral_type __i) : __base_type(__i) { }
468
469 using __base_type::operator __integral_type;
470 using __base_type::operator=;
471 };
472
473 /// Explicit specialization for int.
474 template<>
475 struct atomic<int> : atomic_int
476 {
477 typedef int __integral_type;
478 typedef atomic_int __base_type;
479
480 atomic() = default;
481 ~atomic() = default;
482 atomic(const atomic&) = delete;
483 atomic& operator=(const atomic&) = delete;
484 atomic& operator=(const atomic&) volatile = delete;
485
486 constexpr atomic(__integral_type __i) : __base_type(__i) { }
487
488 using __base_type::operator __integral_type;
489 using __base_type::operator=;
490 };
491
492 /// Explicit specialization for unsigned int.
493 template<>
494 struct atomic<unsigned int> : public atomic_uint
495 {
496 typedef unsigned int __integral_type;
497 typedef atomic_uint __base_type;
498
499 atomic() = default;
500 ~atomic() = default;
501 atomic(const atomic&) = delete;
502 atomic& operator=(const atomic&) = delete;
503 atomic& operator=(const atomic&) volatile = delete;
504
505 constexpr atomic(__integral_type __i) : __base_type(__i) { }
506
507 using __base_type::operator __integral_type;
508 using __base_type::operator=;
509 };
510
511 /// Explicit specialization for long.
512 template<>
513 struct atomic<long> : public atomic_long
514 {
515 typedef long __integral_type;
516 typedef atomic_long __base_type;
517
518 atomic() = default;
519 ~atomic() = default;
520 atomic(const atomic&) = delete;
521 atomic& operator=(const atomic&) = delete;
522 atomic& operator=(const atomic&) volatile = delete;
523
524 constexpr atomic(__integral_type __i) : __base_type(__i) { }
525
526 using __base_type::operator __integral_type;
527 using __base_type::operator=;
528 };
529
530 /// Explicit specialization for unsigned long.
531 template<>
532 struct atomic<unsigned long> : public atomic_ulong
533 {
534 typedef unsigned long __integral_type;
535 typedef atomic_ulong __base_type;
536
537 atomic() = default;
538 ~atomic() = default;
539 atomic(const atomic&) = delete;
540 atomic& operator=(const atomic&) = delete;
541 atomic& operator=(const atomic&) volatile = delete;
542
543 constexpr atomic(__integral_type __i) : __base_type(__i) { }
544
545 using __base_type::operator __integral_type;
546 using __base_type::operator=;
547 };
548
549 /// Explicit specialization for long long.
550 template<>
551 struct atomic<long long> : public atomic_llong
552 {
553 typedef long long __integral_type;
554 typedef atomic_llong __base_type;
555
556 atomic() = default;
557 ~atomic() = default;
558 atomic(const atomic&) = delete;
559 atomic& operator=(const atomic&) = delete;
560 atomic& operator=(const atomic&) volatile = delete;
561
562 constexpr atomic(__integral_type __i) : __base_type(__i) { }
563
564 using __base_type::operator __integral_type;
565 using __base_type::operator=;
566 };
567
568 /// Explicit specialization for unsigned long long.
569 template<>
570 struct atomic<unsigned long long> : public atomic_ullong
571 {
572 typedef unsigned long long __integral_type;
573 typedef atomic_ullong __base_type;
574
575 atomic() = default;
576 ~atomic() = default;
577 atomic(const atomic&) = delete;
578 atomic& operator=(const atomic&) = delete;
579 atomic& operator=(const atomic&) volatile = delete;
580
581 constexpr atomic(__integral_type __i) : __base_type(__i) { }
582
583 using __base_type::operator __integral_type;
584 using __base_type::operator=;
585 };
586
587 /// Explicit specialization for wchar_t.
588 template<>
589 struct atomic<wchar_t> : public atomic_wchar_t
590 {
591 typedef wchar_t __integral_type;
592 typedef atomic_wchar_t __base_type;
593
594 atomic() = default;
595 ~atomic() = default;
596 atomic(const atomic&) = delete;
597 atomic& operator=(const atomic&) = delete;
598 atomic& operator=(const atomic&) volatile = delete;
599
600 constexpr atomic(__integral_type __i) : __base_type(__i) { }
601
602 using __base_type::operator __integral_type;
603 using __base_type::operator=;
604 };
605
606 /// Explicit specialization for char16_t.
607 template<>
608 struct atomic<char16_t> : public atomic_char16_t
609 {
610 typedef char16_t __integral_type;
611 typedef atomic_char16_t __base_type;
612
613 atomic() = default;
614 ~atomic() = default;
615 atomic(const atomic&) = delete;
616 atomic& operator=(const atomic&) = delete;
617 atomic& operator=(const atomic&) volatile = delete;
618
619 constexpr atomic(__integral_type __i) : __base_type(__i) { }
620
621 using __base_type::operator __integral_type;
622 using __base_type::operator=;
623 };
624
625 /// Explicit specialization for char32_t.
626 template<>
627 struct atomic<char32_t> : public atomic_char32_t
628 {
629 typedef char32_t __integral_type;
630 typedef atomic_char32_t __base_type;
631
632 atomic() = default;
633 ~atomic() = default;
634 atomic(const atomic&) = delete;
635 atomic& operator=(const atomic&) = delete;
636 atomic& operator=(const atomic&) volatile = delete;
637
638 constexpr atomic(__integral_type __i) : __base_type(__i) { }
639
640 using __base_type::operator __integral_type;
641 using __base_type::operator=;
642 };
643
644
645 template<typename _Tp>
646 _Tp*
647 atomic<_Tp*>::load(memory_order __m) const
648 { return static_cast<_Tp*>(atomic_address::load(__m)); }
649
650 template<typename _Tp>
651 _Tp*
652 atomic<_Tp*>::load(memory_order __m) const volatile
653 { return static_cast<_Tp*>(atomic_address::load(__m)); }
654
655 template<typename _Tp>
656 _Tp*
657 atomic<_Tp*>::exchange(_Tp* __v, memory_order __m)
658 { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
659
660 template<typename _Tp>
661 _Tp*
662 atomic<_Tp*>::exchange(_Tp* __v, memory_order __m) volatile
663 { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
664
665 template<typename _Tp>
666 bool
667 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
668 memory_order __m2)
669 {
670 void** __vr = reinterpret_cast<void**>(&__r);
671 void* __vv = static_cast<void*>(__v);
672 return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
673 }
674
675 template<typename _Tp>
676 bool
677 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
678 memory_order __m2) volatile
679 {
680 void** __vr = reinterpret_cast<void**>(&__r);
681 void* __vv = static_cast<void*>(__v);
682 return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
683 }
684
685 template<typename _Tp>
686 bool
687 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m)
688 {
689 return compare_exchange_weak(__r, __v, __m,
690 __calculate_memory_order(__m));
691 }
692
693 template<typename _Tp>
694 bool
695 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v,
696 memory_order __m) volatile
697 {
698 return compare_exchange_weak(__r, __v, __m,
699 __calculate_memory_order(__m));
700 }
701
702 template<typename _Tp>
703 bool
704 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
705 memory_order __m1,
706 memory_order __m2)
707 {
708 void** __vr = reinterpret_cast<void**>(&__r);
709 void* __vv = static_cast<void*>(__v);
710 return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
711 }
712
713 template<typename _Tp>
714 bool
715 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
716 memory_order __m1,
717 memory_order __m2) volatile
718 {
719 void** __vr = reinterpret_cast<void**>(&__r);
720 void* __vv = static_cast<void*>(__v);
721 return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
722 }
723
724 template<typename _Tp>
725 bool
726 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
727 memory_order __m)
728 {
729 return compare_exchange_strong(__r, __v, __m,
730 __calculate_memory_order(__m));
731 }
732
733 template<typename _Tp>
734 bool
735 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
736 memory_order __m) volatile
737 {
738 return compare_exchange_strong(__r, __v, __m,
739 __calculate_memory_order(__m));
740 }
741
742 template<typename _Tp>
743 _Tp*
744 atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m)
745 {
746 void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
747 return static_cast<_Tp*>(__p);
748 }
749
750 template<typename _Tp>
751 _Tp*
752 atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m) volatile
753 {
754 void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
755 return static_cast<_Tp*>(__p);
756 }
757
758 template<typename _Tp>
759 _Tp*
760 atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m)
761 {
762 void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
763 return static_cast<_Tp*>(__p);
764 }
765
766 template<typename _Tp>
767 _Tp*
768 atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m) volatile
769 {
770 void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
771 return static_cast<_Tp*>(__p);
772 }
773
774
775 // Function definitions, atomic_flag operations.
776 inline bool
777 atomic_flag_test_and_set_explicit(atomic_flag* __a, memory_order __m)
778 { return __a->test_and_set(__m); }
779
780 inline bool
781 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
782 memory_order __m)
783 { return __a->test_and_set(__m); }
784
785 inline void
786 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m)
787 { __a->clear(__m); }
788
789 inline void
790 atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m)
791 { __a->clear(__m); }
792
793 inline bool
794 atomic_flag_test_and_set(atomic_flag* __a)
795 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
796
797 inline bool
798 atomic_flag_test_and_set(volatile atomic_flag* __a)
799 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
800
801 inline void
802 atomic_flag_clear(atomic_flag* __a)
803 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
804
805 inline void
806 atomic_flag_clear(volatile atomic_flag* __a)
807 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
808
809
810 // Function definitions, atomic_address operations.
811 inline bool
812 atomic_is_lock_free(const atomic_address* __a)
813 { return __a->is_lock_free(); }
814
815 inline bool
816 atomic_is_lock_free(const volatile atomic_address* __a)
817 { return __a->is_lock_free(); }
818
819 inline void
820 atomic_init(atomic_address* __a, void* __v);
821
822 inline void
823 atomic_init(volatile atomic_address* __a, void* __v);
824
825 inline void
826 atomic_store_explicit(atomic_address* __a, void* __v, memory_order __m)
827 { __a->store(__v, __m); }
828
829 inline void
830 atomic_store_explicit(volatile atomic_address* __a, void* __v,
831 memory_order __m)
832 { __a->store(__v, __m); }
833
834 inline void
835 atomic_store(atomic_address* __a, void* __v)
836 { __a->store(__v); }
837
838 inline void
839 atomic_store(volatile atomic_address* __a, void* __v)
840 { __a->store(__v); }
841
842 inline void*
843 atomic_load_explicit(const atomic_address* __a, memory_order __m)
844 { return __a->load(__m); }
845
846 inline void*
847 atomic_load_explicit(const volatile atomic_address* __a, memory_order __m)
848 { return __a->load(__m); }
849
850 inline void*
851 atomic_load(const atomic_address* __a)
852 { return __a->load(); }
853
854 inline void*
855 atomic_load(const volatile atomic_address* __a)
856 { return __a->load(); }
857
858 inline void*
859 atomic_exchange_explicit(atomic_address* __a, void* __v, memory_order __m)
860 { return __a->exchange(__v, __m); }
861
862 inline void*
863 atomic_exchange_explicit(volatile atomic_address* __a, void* __v,
864 memory_order __m)
865 { return __a->exchange(__v, __m); }
866
867 inline void*
868 atomic_exchange(atomic_address* __a, void* __v)
869 { return __a->exchange(__v); }
870
871 inline void*
872 atomic_exchange(volatile atomic_address* __a, void* __v)
873 { return __a->exchange(__v); }
874
875
876 inline bool
877 atomic_compare_exchange_weak_explicit(atomic_address* __a,
878 void** __v1, void* __v2,
879 memory_order __m1, memory_order __m2)
880 { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
881
882 inline bool
883 atomic_compare_exchange_weak_explicit(volatile atomic_address* __a,
884 void** __v1, void* __v2,
885 memory_order __m1, memory_order __m2)
886 { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
887
888 inline bool
889 atomic_compare_exchange_weak(atomic_address* __a, void** __v1, void* __v2)
890 {
891 return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
892 memory_order_seq_cst);
893 }
894
895 inline bool
896 atomic_compare_exchange_weak(volatile atomic_address* __a, void** __v1,
897 void* __v2)
898 {
899 return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
900 memory_order_seq_cst);
901 }
902
903 inline bool
904 atomic_compare_exchange_strong_explicit(atomic_address* __a,
905 void** __v1, void* __v2,
906 memory_order __m1, memory_order __m2)
907 { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
908
909 inline bool
910 atomic_compare_exchange_strong_explicit(volatile atomic_address* __a,
911 void** __v1, void* __v2,
912 memory_order __m1, memory_order __m2)
913 { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
914
915 inline bool
916 atomic_compare_exchange_strong(atomic_address* __a, void** __v1, void* __v2)
917 {
918 return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
919 memory_order_seq_cst);
920 }
921
922 inline bool
923 atomic_compare_exchange_strong(volatile atomic_address* __a,
924 void** __v1, void* __v2)
925 {
926 return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
927 memory_order_seq_cst);
928 }
929
930 inline void*
931 atomic_fetch_add_explicit(atomic_address* __a, ptrdiff_t __d,
932 memory_order __m)
933 { return __a->fetch_add(__d, __m); }
934
935 inline void*
936 atomic_fetch_add_explicit(volatile atomic_address* __a, ptrdiff_t __d,
937 memory_order __m)
938 { return __a->fetch_add(__d, __m); }
939
940 inline void*
941 atomic_fetch_add(atomic_address* __a, ptrdiff_t __d)
942 { return __a->fetch_add(__d); }
943
944 inline void*
945 atomic_fetch_add(volatile atomic_address* __a, ptrdiff_t __d)
946 { return __a->fetch_add(__d); }
947
948 inline void*
949 atomic_fetch_sub_explicit(atomic_address* __a, ptrdiff_t __d,
950 memory_order __m)
951 { return __a->fetch_sub(__d, __m); }
952
953 inline void*
954 atomic_fetch_sub_explicit(volatile atomic_address* __a, ptrdiff_t __d,
955 memory_order __m)
956 { return __a->fetch_sub(__d, __m); }
957
958 inline void*
959 atomic_fetch_sub(atomic_address* __a, ptrdiff_t __d)
960 { return __a->fetch_sub(__d); }
961
962 inline void*
963 atomic_fetch_sub(volatile atomic_address* __a, ptrdiff_t __d)
964 { return __a->fetch_sub(__d); }
965
966
967 // Function definitions, atomic_bool operations.
968 inline bool
969 atomic_is_lock_free(const atomic_bool* __a)
970 { return __a->is_lock_free(); }
971
972 inline bool
973 atomic_is_lock_free(const volatile atomic_bool* __a)
974 { return __a->is_lock_free(); }
975
976 inline void
977 atomic_init(atomic_bool* __a, bool __b);
978
979 inline void
980 atomic_init(volatile atomic_bool* __a, bool __b);
981
982 inline void
983 atomic_store_explicit(atomic_bool* __a, bool __i, memory_order __m)
984 { __a->store(__i, __m); }
985
986 inline void
987 atomic_store_explicit(volatile atomic_bool* __a, bool __i, memory_order __m)
988 { __a->store(__i, __m); }
989
990 inline void
991 atomic_store(atomic_bool* __a, bool __i)
992 { __a->store(__i); }
993
994 inline void
995 atomic_store(volatile atomic_bool* __a, bool __i)
996 { __a->store(__i); }
997
998 inline bool
999 atomic_load_explicit(const atomic_bool* __a, memory_order __m)
1000 { return __a->load(__m); }
1001
1002 inline bool
1003 atomic_load_explicit(const volatile atomic_bool* __a, memory_order __m)
1004 { return __a->load(__m); }
1005
1006 inline bool
1007 atomic_load(const atomic_bool* __a)
1008 { return __a->load(); }
1009
1010 inline bool
1011 atomic_load(const volatile atomic_bool* __a)
1012 { return __a->load(); }
1013
1014 inline bool
1015 atomic_exchange_explicit(atomic_bool* __a, bool __i, memory_order __m)
1016 { return __a->exchange(__i, __m); }
1017
1018 inline bool
1019 atomic_exchange_explicit(volatile atomic_bool* __a, bool __i,
1020 memory_order __m)
1021 { return __a->exchange(__i, __m); }
1022
1023 inline bool
1024 atomic_exchange(atomic_bool* __a, bool __i)
1025 { return __a->exchange(__i); }
1026
1027 inline bool
1028 atomic_exchange(volatile atomic_bool* __a, bool __i)
1029 { return __a->exchange(__i); }
1030
1031 inline bool
1032 atomic_compare_exchange_weak_explicit(atomic_bool* __a, bool* __i1,
1033 bool __i2, memory_order __m1,
1034 memory_order __m2)
1035 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1036
1037 inline bool
1038 atomic_compare_exchange_weak_explicit(volatile atomic_bool* __a, bool* __i1,
1039 bool __i2, memory_order __m1,
1040 memory_order __m2)
1041 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1042
1043 inline bool
1044 atomic_compare_exchange_weak(atomic_bool* __a, bool* __i1, bool __i2)
1045 {
1046 return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
1047 memory_order_seq_cst);
1048 }
1049
1050 inline bool
1051 atomic_compare_exchange_weak(volatile atomic_bool* __a, bool* __i1, bool __i2)
1052 {
1053 return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
1054 memory_order_seq_cst);
1055 }
1056
1057 inline bool
1058 atomic_compare_exchange_strong_explicit(atomic_bool* __a,
1059 bool* __i1, bool __i2,
1060 memory_order __m1, memory_order __m2)
1061 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1062
1063 inline bool
1064 atomic_compare_exchange_strong_explicit(volatile atomic_bool* __a,
1065 bool* __i1, bool __i2,
1066 memory_order __m1, memory_order __m2)
1067 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1068
1069 inline bool
1070 atomic_compare_exchange_strong(atomic_bool* __a, bool* __i1, bool __i2)
1071 {
1072 return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
1073 memory_order_seq_cst);
1074 }
1075
1076 inline bool
1077 atomic_compare_exchange_strong(volatile atomic_bool* __a,
1078 bool* __i1, bool __i2)
1079 {
1080 return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
1081 memory_order_seq_cst);
1082 }
1083
1084
1085 // Function templates for atomic_integral operations, using
1086 // __atomic_base . Template argument should be constricted to
1087 // intergral types as specified in the standard.
1088 template<typename _ITp>
1089 inline bool
1090 atomic_is_lock_free(const __atomic_base<_ITp>* __a)
1091 { return __a->is_lock_free(); }
1092
1093 template<typename _ITp>
1094 inline bool
1095 atomic_is_lock_free(const volatile __atomic_base<_ITp>* __a)
1096 { return __a->is_lock_free(); }
1097
1098 template<typename _ITp>
1099 inline void
1100 atomic_init(__atomic_base<_ITp>* __a, _ITp __i);
1101
1102 template<typename _ITp>
1103 inline void
1104 atomic_init(volatile __atomic_base<_ITp>* __a, _ITp __i);
1105
1106 template<typename _ITp>
1107 inline void
1108 atomic_store_explicit(__atomic_base<_ITp>* __a, _ITp __i, memory_order __m)
1109 { __a->store(__i, __m); }
1110
1111 template<typename _ITp>
1112 inline void
1113 atomic_store_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1114 memory_order __m)
1115 { __a->store(__i, __m); }
1116
1117 template<typename _ITp>
1118 inline _ITp
1119 atomic_load_explicit(const __atomic_base<_ITp>* __a, memory_order __m)
1120 { return __a->load(__m); }
1121
1122 template<typename _ITp>
1123 inline _ITp
1124 atomic_load_explicit(const volatile __atomic_base<_ITp>* __a,
1125 memory_order __m)
1126 { return __a->load(__m); }
1127
1128 template<typename _ITp>
1129 inline _ITp
1130 atomic_exchange_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1131 memory_order __m)
1132 { return __a->exchange(__i, __m); }
1133
1134 template<typename _ITp>
1135 inline _ITp
1136 atomic_exchange_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1137 memory_order __m)
1138 { return __a->exchange(__i, __m); }
1139
1140 template<typename _ITp>
1141 inline bool
1142 atomic_compare_exchange_weak_explicit(__atomic_base<_ITp>* __a,
1143 _ITp* __i1, _ITp __i2,
1144 memory_order __m1, memory_order __m2)
1145 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1146
1147 template<typename _ITp>
1148 inline bool
1149 atomic_compare_exchange_weak_explicit(volatile __atomic_base<_ITp>* __a,
1150 _ITp* __i1, _ITp __i2,
1151 memory_order __m1, memory_order __m2)
1152 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1153
1154 template<typename _ITp>
1155 inline bool
1156 atomic_compare_exchange_strong_explicit(__atomic_base<_ITp>* __a,
1157 _ITp* __i1, _ITp __i2,
1158 memory_order __m1,
1159 memory_order __m2)
1160 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1161
1162 template<typename _ITp>
1163 inline bool
1164 atomic_compare_exchange_strong_explicit(volatile __atomic_base<_ITp>* __a,
1165 _ITp* __i1, _ITp __i2,
1166 memory_order __m1,
1167 memory_order __m2)
1168 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1169
1170 template<typename _ITp>
1171 inline _ITp
1172 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1173 memory_order __m)
1174 { return __a->fetch_add(__i, __m); }
1175
1176 template<typename _ITp>
1177 inline _ITp
1178 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1179 memory_order __m)
1180 { return __a->fetch_add(__i, __m); }
1181
1182 template<typename _ITp>
1183 inline _ITp
1184 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1185 memory_order __m)
1186 { return __a->fetch_sub(__i, __m); }
1187
1188 template<typename _ITp>
1189 inline _ITp
1190 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1191 memory_order __m)
1192 { return __a->fetch_sub(__i, __m); }
1193
1194 template<typename _ITp>
1195 inline _ITp
1196 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1197 memory_order __m)
1198 { return __a->fetch_and(__i, __m); }
1199
1200 template<typename _ITp>
1201 inline _ITp
1202 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1203 memory_order __m)
1204 { return __a->fetch_and(__i, __m); }
1205
1206 template<typename _ITp>
1207 inline _ITp
1208 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1209 memory_order __m)
1210 { return __a->fetch_or(__i, __m); }
1211
1212 template<typename _ITp>
1213 inline _ITp
1214 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1215 memory_order __m)
1216 { return __a->fetch_or(__i, __m); }
1217
1218 template<typename _ITp>
1219 inline _ITp
1220 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1221 memory_order __m)
1222 { return __a->fetch_xor(__i, __m); }
1223
1224 template<typename _ITp>
1225 inline _ITp
1226 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1227 memory_order __m)
1228 { return __a->fetch_xor(__i, __m); }
1229
1230 template<typename _ITp>
1231 inline void
1232 atomic_store(__atomic_base<_ITp>* __a, _ITp __i)
1233 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1234
1235 template<typename _ITp>
1236 inline void
1237 atomic_store(volatile __atomic_base<_ITp>* __a, _ITp __i)
1238 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1239
1240 template<typename _ITp>
1241 inline _ITp
1242 atomic_load(const __atomic_base<_ITp>* __a)
1243 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1244
1245 template<typename _ITp>
1246 inline _ITp
1247 atomic_load(const volatile __atomic_base<_ITp>* __a)
1248 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1249
1250 template<typename _ITp>
1251 inline _ITp
1252 atomic_exchange(__atomic_base<_ITp>* __a, _ITp __i)
1253 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1254
1255 template<typename _ITp>
1256 inline _ITp
1257 atomic_exchange(volatile __atomic_base<_ITp>* __a, _ITp __i)
1258 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1259
1260 template<typename _ITp>
1261 inline bool
1262 atomic_compare_exchange_weak(__atomic_base<_ITp>* __a,
1263 _ITp* __i1, _ITp __i2)
1264 {
1265 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1266 memory_order_seq_cst,
1267 memory_order_seq_cst);
1268 }
1269
1270 template<typename _ITp>
1271 inline bool
1272 atomic_compare_exchange_weak(volatile __atomic_base<_ITp>* __a,
1273 _ITp* __i1, _ITp __i2)
1274 {
1275 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1276 memory_order_seq_cst,
1277 memory_order_seq_cst);
1278 }
1279
1280 template<typename _ITp>
1281 inline bool
1282 atomic_compare_exchange_strong(__atomic_base<_ITp>* __a,
1283 _ITp* __i1, _ITp __i2)
1284 {
1285 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1286 memory_order_seq_cst,
1287 memory_order_seq_cst);
1288 }
1289
1290 template<typename _ITp>
1291 inline bool
1292 atomic_compare_exchange_strong(volatile __atomic_base<_ITp>* __a,
1293 _ITp* __i1, _ITp __i2)
1294 {
1295 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1296 memory_order_seq_cst,
1297 memory_order_seq_cst);
1298 }
1299
1300 template<typename _ITp>
1301 inline _ITp
1302 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i)
1303 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1304
1305 template<typename _ITp>
1306 inline _ITp
1307 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i)
1308 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1309
1310 template<typename _ITp>
1311 inline _ITp
1312 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i)
1313 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1314
1315 template<typename _ITp>
1316 inline _ITp
1317 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i)
1318 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1319
1320 template<typename _ITp>
1321 inline _ITp
1322 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i)
1323 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1324
1325 template<typename _ITp>
1326 inline _ITp
1327 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i)
1328 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1329
1330 template<typename _ITp>
1331 inline _ITp
1332 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i)
1333 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1334
1335 template<typename _ITp>
1336 inline _ITp
1337 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i)
1338 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1339
1340 template<typename _ITp>
1341 inline _ITp
1342 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i)
1343 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1344
1345 template<typename _ITp>
1346 inline _ITp
1347 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i)
1348 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1349
1350 // @} group atomics
1351
1352 _GLIBCXX_END_NAMESPACE_VERSION
1353 } // namespace
1354
1355 #endif