1 // Support for atomic operations -*- C++ -*-
3 // Copyright (C) 2004-2022 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file ext/atomicity.h
26 * This file is a GNU extension to the Standard C++ Library.
29 #ifndef _GLIBCXX_ATOMICITY_H
30 #define _GLIBCXX_ATOMICITY_H 1
32 #pragma GCC system_header
34 #include <bits/c++config.h>
35 #include <bits/gthr.h>
36 #include <bits/atomic_word.h>
37 #if __has_include(<sys/single_threaded.h>)
38 # include <sys/single_threaded.h>
41 namespace __gnu_cxx
_GLIBCXX_VISIBILITY(default)
43 _GLIBCXX_BEGIN_NAMESPACE_VERSION
45 __attribute__((__always_inline__
))
47 __is_single_threaded() _GLIBCXX_NOTHROW
51 #elif __has_include(<sys/single_threaded.h>)
52 return ::__libc_single_threaded
;
54 return !__gthread_active_p();
58 // Functions for portable atomic access.
59 // To abstract locking primitives across all thread policies, use:
60 // __exchange_and_add_dispatch
61 // __atomic_add_dispatch
62 #ifdef _GLIBCXX_ATOMIC_BUILTINS
64 __attribute__((__always_inline__
))
65 __exchange_and_add(volatile _Atomic_word
* __mem
, int __val
)
66 { return __atomic_fetch_add(__mem
, __val
, __ATOMIC_ACQ_REL
); }
69 __attribute__((__always_inline__
))
70 __atomic_add(volatile _Atomic_word
* __mem
, int __val
)
71 { __atomic_fetch_add(__mem
, __val
, __ATOMIC_ACQ_REL
); }
74 __exchange_and_add(volatile _Atomic_word
*, int) _GLIBCXX_NOTHROW
;
77 __atomic_add(volatile _Atomic_word
*, int) _GLIBCXX_NOTHROW
;
81 __attribute__((__always_inline__
))
82 __exchange_and_add_single(_Atomic_word
* __mem
, int __val
)
84 _Atomic_word __result
= *__mem
;
90 __attribute__((__always_inline__
))
91 __atomic_add_single(_Atomic_word
* __mem
, int __val
)
95 __attribute__ ((__always_inline__
))
96 __exchange_and_add_dispatch(_Atomic_word
* __mem
, int __val
)
98 if (__is_single_threaded())
99 return __exchange_and_add_single(__mem
, __val
);
101 return __exchange_and_add(__mem
, __val
);
105 __attribute__ ((__always_inline__
))
106 __atomic_add_dispatch(_Atomic_word
* __mem
, int __val
)
108 if (__is_single_threaded())
109 __atomic_add_single(__mem
, __val
);
111 __atomic_add(__mem
, __val
);
114 _GLIBCXX_END_NAMESPACE_VERSION
117 // Even if the CPU doesn't need a memory barrier, we need to ensure
118 // that the compiler doesn't reorder memory accesses across the
120 #ifndef _GLIBCXX_READ_MEM_BARRIER
121 #define _GLIBCXX_READ_MEM_BARRIER __atomic_thread_fence (__ATOMIC_ACQUIRE)
123 #ifndef _GLIBCXX_WRITE_MEM_BARRIER
124 #define _GLIBCXX_WRITE_MEM_BARRIER __atomic_thread_fence (__ATOMIC_RELEASE)