]>
Commit | Line | Data |
---|---|---|
8675b78a MT |
1 | From: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61164 |
2 | ||
3 | --- trunk/libitm/local_atomic 2015/08/20 17:43:55 227039 | |
4 | +++ trunk/libitm/local_atomic 2015/08/20 17:55:24 227040 | |
5 | @@ -41,8 +41,7 @@ | |
6 | #ifndef _GLIBCXX_ATOMIC | |
7 | #define _GLIBCXX_ATOMIC 1 | |
8 | ||
9 | -#undef __always_inline | |
10 | -#define __always_inline __attribute__((always_inline)) | |
11 | +#define __libitm_always_inline __attribute__((always_inline)) | |
12 | ||
13 | // #pragma GCC system_header | |
14 | ||
15 | @@ -74,7 +73,7 @@ | |
16 | memory_order_seq_cst | |
17 | } memory_order; | |
18 | ||
19 | - inline __always_inline memory_order | |
20 | + inline __libitm_always_inline memory_order | |
21 | __calculate_memory_order(memory_order __m) noexcept | |
22 | { | |
23 | const bool __cond1 = __m == memory_order_release; | |
24 | @@ -84,13 +83,13 @@ | |
25 | return __mo2; | |
26 | } | |
27 | ||
28 | - inline __always_inline void | |
29 | + inline __libitm_always_inline void | |
30 | atomic_thread_fence(memory_order __m) noexcept | |
31 | { | |
32 | __atomic_thread_fence (__m); | |
33 | } | |
34 | ||
35 | - inline __always_inline void | |
36 | + inline __libitm_always_inline void | |
37 | atomic_signal_fence(memory_order __m) noexcept | |
38 | { | |
39 | __atomic_thread_fence (__m); | |
40 | @@ -280,19 +279,19 @@ | |
41 | // Conversion to ATOMIC_FLAG_INIT. | |
42 | atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { } | |
43 | ||
44 | - __always_inline bool | |
45 | + __libitm_always_inline bool | |
46 | test_and_set(memory_order __m = memory_order_seq_cst) noexcept | |
47 | { | |
48 | return __atomic_test_and_set (&_M_i, __m); | |
49 | } | |
50 | ||
51 | - __always_inline bool | |
52 | + __libitm_always_inline bool | |
53 | test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept | |
54 | { | |
55 | return __atomic_test_and_set (&_M_i, __m); | |
56 | } | |
57 | ||
58 | - __always_inline void | |
59 | + __libitm_always_inline void | |
60 | clear(memory_order __m = memory_order_seq_cst) noexcept | |
61 | { | |
62 | // __glibcxx_assert(__m != memory_order_consume); | |
63 | @@ -302,7 +301,7 @@ | |
64 | __atomic_clear (&_M_i, __m); | |
65 | } | |
66 | ||
67 | - __always_inline void | |
68 | + __libitm_always_inline void | |
69 | clear(memory_order __m = memory_order_seq_cst) volatile noexcept | |
70 | { | |
71 | // __glibcxx_assert(__m != memory_order_consume); | |
72 | @@ -455,7 +454,7 @@ | |
73 | is_lock_free() const volatile noexcept | |
74 | { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); } | |
75 | ||
76 | - __always_inline void | |
77 | + __libitm_always_inline void | |
78 | store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept | |
79 | { | |
80 | // __glibcxx_assert(__m != memory_order_acquire); | |
81 | @@ -465,7 +464,7 @@ | |
82 | __atomic_store_n(&_M_i, __i, __m); | |
83 | } | |
84 | ||
85 | - __always_inline void | |
86 | + __libitm_always_inline void | |
87 | store(__int_type __i, | |
88 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
89 | { | |
90 | @@ -476,7 +475,7 @@ | |
91 | __atomic_store_n(&_M_i, __i, __m); | |
92 | } | |
93 | ||
94 | - __always_inline __int_type | |
95 | + __libitm_always_inline __int_type | |
96 | load(memory_order __m = memory_order_seq_cst) const noexcept | |
97 | { | |
98 | // __glibcxx_assert(__m != memory_order_release); | |
99 | @@ -485,7 +484,7 @@ | |
100 | return __atomic_load_n(&_M_i, __m); | |
101 | } | |
102 | ||
103 | - __always_inline __int_type | |
104 | + __libitm_always_inline __int_type | |
105 | load(memory_order __m = memory_order_seq_cst) const volatile noexcept | |
106 | { | |
107 | // __glibcxx_assert(__m != memory_order_release); | |
108 | @@ -494,21 +493,21 @@ | |
109 | return __atomic_load_n(&_M_i, __m); | |
110 | } | |
111 | ||
112 | - __always_inline __int_type | |
113 | + __libitm_always_inline __int_type | |
114 | exchange(__int_type __i, | |
115 | memory_order __m = memory_order_seq_cst) noexcept | |
116 | { | |
117 | return __atomic_exchange_n(&_M_i, __i, __m); | |
118 | } | |
119 | ||
120 | - __always_inline __int_type | |
121 | + __libitm_always_inline __int_type | |
122 | exchange(__int_type __i, | |
123 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
124 | { | |
125 | return __atomic_exchange_n(&_M_i, __i, __m); | |
126 | } | |
127 | ||
128 | - __always_inline bool | |
129 | + __libitm_always_inline bool | |
130 | compare_exchange_weak(__int_type& __i1, __int_type __i2, | |
131 | memory_order __m1, memory_order __m2) noexcept | |
132 | { | |
133 | @@ -519,7 +518,7 @@ | |
134 | return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); | |
135 | } | |
136 | ||
137 | - __always_inline bool | |
138 | + __libitm_always_inline bool | |
139 | compare_exchange_weak(__int_type& __i1, __int_type __i2, | |
140 | memory_order __m1, | |
141 | memory_order __m2) volatile noexcept | |
142 | @@ -531,7 +530,7 @@ | |
143 | return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); | |
144 | } | |
145 | ||
146 | - __always_inline bool | |
147 | + __libitm_always_inline bool | |
148 | compare_exchange_weak(__int_type& __i1, __int_type __i2, | |
149 | memory_order __m = memory_order_seq_cst) noexcept | |
150 | { | |
151 | @@ -539,7 +538,7 @@ | |
152 | __calculate_memory_order(__m)); | |
153 | } | |
154 | ||
155 | - __always_inline bool | |
156 | + __libitm_always_inline bool | |
157 | compare_exchange_weak(__int_type& __i1, __int_type __i2, | |
158 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
159 | { | |
160 | @@ -547,7 +546,7 @@ | |
161 | __calculate_memory_order(__m)); | |
162 | } | |
163 | ||
164 | - __always_inline bool | |
165 | + __libitm_always_inline bool | |
166 | compare_exchange_strong(__int_type& __i1, __int_type __i2, | |
167 | memory_order __m1, memory_order __m2) noexcept | |
168 | { | |
169 | @@ -558,7 +557,7 @@ | |
170 | return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); | |
171 | } | |
172 | ||
173 | - __always_inline bool | |
174 | + __libitm_always_inline bool | |
175 | compare_exchange_strong(__int_type& __i1, __int_type __i2, | |
176 | memory_order __m1, | |
177 | memory_order __m2) volatile noexcept | |
178 | @@ -570,7 +569,7 @@ | |
179 | return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); | |
180 | } | |
181 | ||
182 | - __always_inline bool | |
183 | + __libitm_always_inline bool | |
184 | compare_exchange_strong(__int_type& __i1, __int_type __i2, | |
185 | memory_order __m = memory_order_seq_cst) noexcept | |
186 | { | |
187 | @@ -578,7 +577,7 @@ | |
188 | __calculate_memory_order(__m)); | |
189 | } | |
190 | ||
191 | - __always_inline bool | |
192 | + __libitm_always_inline bool | |
193 | compare_exchange_strong(__int_type& __i1, __int_type __i2, | |
194 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
195 | { | |
196 | @@ -586,52 +585,52 @@ | |
197 | __calculate_memory_order(__m)); | |
198 | } | |
199 | ||
200 | - __always_inline __int_type | |
201 | + __libitm_always_inline __int_type | |
202 | fetch_add(__int_type __i, | |
203 | memory_order __m = memory_order_seq_cst) noexcept | |
204 | { return __atomic_fetch_add(&_M_i, __i, __m); } | |
205 | ||
206 | - __always_inline __int_type | |
207 | + __libitm_always_inline __int_type | |
208 | fetch_add(__int_type __i, | |
209 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
210 | { return __atomic_fetch_add(&_M_i, __i, __m); } | |
211 | ||
212 | - __always_inline __int_type | |
213 | + __libitm_always_inline __int_type | |
214 | fetch_sub(__int_type __i, | |
215 | memory_order __m = memory_order_seq_cst) noexcept | |
216 | { return __atomic_fetch_sub(&_M_i, __i, __m); } | |
217 | ||
218 | - __always_inline __int_type | |
219 | + __libitm_always_inline __int_type | |
220 | fetch_sub(__int_type __i, | |
221 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
222 | { return __atomic_fetch_sub(&_M_i, __i, __m); } | |
223 | ||
224 | - __always_inline __int_type | |
225 | + __libitm_always_inline __int_type | |
226 | fetch_and(__int_type __i, | |
227 | memory_order __m = memory_order_seq_cst) noexcept | |
228 | { return __atomic_fetch_and(&_M_i, __i, __m); } | |
229 | ||
230 | - __always_inline __int_type | |
231 | + __libitm_always_inline __int_type | |
232 | fetch_and(__int_type __i, | |
233 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
234 | { return __atomic_fetch_and(&_M_i, __i, __m); } | |
235 | ||
236 | - __always_inline __int_type | |
237 | + __libitm_always_inline __int_type | |
238 | fetch_or(__int_type __i, | |
239 | memory_order __m = memory_order_seq_cst) noexcept | |
240 | { return __atomic_fetch_or(&_M_i, __i, __m); } | |
241 | ||
242 | - __always_inline __int_type | |
243 | + __libitm_always_inline __int_type | |
244 | fetch_or(__int_type __i, | |
245 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
246 | { return __atomic_fetch_or(&_M_i, __i, __m); } | |
247 | ||
248 | - __always_inline __int_type | |
249 | + __libitm_always_inline __int_type | |
250 | fetch_xor(__int_type __i, | |
251 | memory_order __m = memory_order_seq_cst) noexcept | |
252 | { return __atomic_fetch_xor(&_M_i, __i, __m); } | |
253 | ||
254 | - __always_inline __int_type | |
255 | + __libitm_always_inline __int_type | |
256 | fetch_xor(__int_type __i, | |
257 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
258 | { return __atomic_fetch_xor(&_M_i, __i, __m); } | |
259 | @@ -733,7 +732,7 @@ | |
260 | is_lock_free() const volatile noexcept | |
261 | { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); } | |
262 | ||
263 | - __always_inline void | |
264 | + __libitm_always_inline void | |
265 | store(__pointer_type __p, | |
266 | memory_order __m = memory_order_seq_cst) noexcept | |
267 | { | |
268 | @@ -744,7 +743,7 @@ | |
269 | __atomic_store_n(&_M_p, __p, __m); | |
270 | } | |
271 | ||
272 | - __always_inline void | |
273 | + __libitm_always_inline void | |
274 | store(__pointer_type __p, | |
275 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
276 | { | |
277 | @@ -755,7 +754,7 @@ | |
278 | __atomic_store_n(&_M_p, __p, __m); | |
279 | } | |
280 | ||
281 | - __always_inline __pointer_type | |
282 | + __libitm_always_inline __pointer_type | |
283 | load(memory_order __m = memory_order_seq_cst) const noexcept | |
284 | { | |
285 | // __glibcxx_assert(__m != memory_order_release); | |
286 | @@ -764,7 +763,7 @@ | |
287 | return __atomic_load_n(&_M_p, __m); | |
288 | } | |
289 | ||
290 | - __always_inline __pointer_type | |
291 | + __libitm_always_inline __pointer_type | |
292 | load(memory_order __m = memory_order_seq_cst) const volatile noexcept | |
293 | { | |
294 | // __glibcxx_assert(__m != memory_order_release); | |
295 | @@ -773,21 +772,21 @@ | |
296 | return __atomic_load_n(&_M_p, __m); | |
297 | } | |
298 | ||
299 | - __always_inline __pointer_type | |
300 | + __libitm_always_inline __pointer_type | |
301 | exchange(__pointer_type __p, | |
302 | memory_order __m = memory_order_seq_cst) noexcept | |
303 | { | |
304 | return __atomic_exchange_n(&_M_p, __p, __m); | |
305 | } | |
306 | ||
307 | - __always_inline __pointer_type | |
308 | + __libitm_always_inline __pointer_type | |
309 | exchange(__pointer_type __p, | |
310 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
311 | { | |
312 | return __atomic_exchange_n(&_M_p, __p, __m); | |
313 | } | |
314 | ||
315 | - __always_inline bool | |
316 | + __libitm_always_inline bool | |
317 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
318 | memory_order __m1, | |
319 | memory_order __m2) noexcept | |
320 | @@ -799,7 +798,7 @@ | |
321 | return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); | |
322 | } | |
323 | ||
324 | - __always_inline bool | |
325 | + __libitm_always_inline bool | |
326 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
327 | memory_order __m1, | |
328 | memory_order __m2) volatile noexcept | |
329 | @@ -811,22 +810,22 @@ | |
330 | return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); | |
331 | } | |
332 | ||
333 | - __always_inline __pointer_type | |
334 | + __libitm_always_inline __pointer_type | |
335 | fetch_add(ptrdiff_t __d, | |
336 | memory_order __m = memory_order_seq_cst) noexcept | |
337 | { return __atomic_fetch_add(&_M_p, __d, __m); } | |
338 | ||
339 | - __always_inline __pointer_type | |
340 | + __libitm_always_inline __pointer_type | |
341 | fetch_add(ptrdiff_t __d, | |
342 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
343 | { return __atomic_fetch_add(&_M_p, __d, __m); } | |
344 | ||
345 | - __always_inline __pointer_type | |
346 | + __libitm_always_inline __pointer_type | |
347 | fetch_sub(ptrdiff_t __d, | |
348 | memory_order __m = memory_order_seq_cst) noexcept | |
349 | { return __atomic_fetch_sub(&_M_p, __d, __m); } | |
350 | ||
351 | - __always_inline __pointer_type | |
352 | + __libitm_always_inline __pointer_type | |
353 | fetch_sub(ptrdiff_t __d, | |
354 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
355 | { return __atomic_fetch_sub(&_M_p, __d, __m); } | |
356 | @@ -870,67 +869,67 @@ | |
357 | bool | |
358 | is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } | |
359 | ||
360 | - __always_inline void | |
361 | + __libitm_always_inline void | |
362 | store(bool __i, memory_order __m = memory_order_seq_cst) noexcept | |
363 | { _M_base.store(__i, __m); } | |
364 | ||
365 | - __always_inline void | |
366 | + __libitm_always_inline void | |
367 | store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept | |
368 | { _M_base.store(__i, __m); } | |
369 | ||
370 | - __always_inline bool | |
371 | + __libitm_always_inline bool | |
372 | load(memory_order __m = memory_order_seq_cst) const noexcept | |
373 | { return _M_base.load(__m); } | |
374 | ||
375 | - __always_inline bool | |
376 | + __libitm_always_inline bool | |
377 | load(memory_order __m = memory_order_seq_cst) const volatile noexcept | |
378 | { return _M_base.load(__m); } | |
379 | ||
380 | - __always_inline bool | |
381 | + __libitm_always_inline bool | |
382 | exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept | |
383 | { return _M_base.exchange(__i, __m); } | |
384 | ||
385 | - __always_inline bool | |
386 | + __libitm_always_inline bool | |
387 | exchange(bool __i, | |
388 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
389 | { return _M_base.exchange(__i, __m); } | |
390 | ||
391 | - __always_inline bool | |
392 | + __libitm_always_inline bool | |
393 | compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, | |
394 | memory_order __m2) noexcept | |
395 | { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } | |
396 | ||
397 | - __always_inline bool | |
398 | + __libitm_always_inline bool | |
399 | compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, | |
400 | memory_order __m2) volatile noexcept | |
401 | { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } | |
402 | ||
403 | - __always_inline bool | |
404 | + __libitm_always_inline bool | |
405 | compare_exchange_weak(bool& __i1, bool __i2, | |
406 | memory_order __m = memory_order_seq_cst) noexcept | |
407 | { return _M_base.compare_exchange_weak(__i1, __i2, __m); } | |
408 | ||
409 | - __always_inline bool | |
410 | + __libitm_always_inline bool | |
411 | compare_exchange_weak(bool& __i1, bool __i2, | |
412 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
413 | { return _M_base.compare_exchange_weak(__i1, __i2, __m); } | |
414 | ||
415 | - __always_inline bool | |
416 | + __libitm_always_inline bool | |
417 | compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, | |
418 | memory_order __m2) noexcept | |
419 | { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } | |
420 | ||
421 | - __always_inline bool | |
422 | + __libitm_always_inline bool | |
423 | compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, | |
424 | memory_order __m2) volatile noexcept | |
425 | { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } | |
426 | ||
427 | - __always_inline bool | |
428 | + __libitm_always_inline bool | |
429 | compare_exchange_strong(bool& __i1, bool __i2, | |
430 | memory_order __m = memory_order_seq_cst) noexcept | |
431 | { return _M_base.compare_exchange_strong(__i1, __i2, __m); } | |
432 | ||
433 | - __always_inline bool | |
434 | + __libitm_always_inline bool | |
435 | compare_exchange_strong(bool& __i1, bool __i2, | |
436 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
437 | { return _M_base.compare_exchange_strong(__i1, __i2, __m); } | |
438 | @@ -980,11 +979,11 @@ | |
439 | store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept | |
440 | { __atomic_store(&_M_i, &__i, _m); } | |
441 | ||
442 | - __always_inline void | |
443 | + __libitm_always_inline void | |
444 | store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept | |
445 | { __atomic_store(&_M_i, &__i, _m); } | |
446 | ||
447 | - __always_inline _Tp | |
448 | + __libitm_always_inline _Tp | |
449 | load(memory_order _m = memory_order_seq_cst) const noexcept | |
450 | { | |
451 | _Tp tmp; | |
452 | @@ -992,7 +991,7 @@ | |
453 | return tmp; | |
454 | } | |
455 | ||
456 | - __always_inline _Tp | |
457 | + __libitm_always_inline _Tp | |
458 | load(memory_order _m = memory_order_seq_cst) const volatile noexcept | |
459 | { | |
460 | _Tp tmp; | |
461 | @@ -1000,7 +999,7 @@ | |
462 | return tmp; | |
463 | } | |
464 | ||
465 | - __always_inline _Tp | |
466 | + __libitm_always_inline _Tp | |
467 | exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept | |
468 | { | |
469 | _Tp tmp; | |
470 | @@ -1008,7 +1007,7 @@ | |
471 | return tmp; | |
472 | } | |
473 | ||
474 | - __always_inline _Tp | |
475 | + __libitm_always_inline _Tp | |
476 | exchange(_Tp __i, | |
477 | memory_order _m = memory_order_seq_cst) volatile noexcept | |
478 | { | |
479 | @@ -1017,50 +1016,50 @@ | |
480 | return tmp; | |
481 | } | |
482 | ||
483 | - __always_inline bool | |
484 | + __libitm_always_inline bool | |
485 | compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, | |
486 | memory_order __f) noexcept | |
487 | { | |
488 | return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); | |
489 | } | |
490 | ||
491 | - __always_inline bool | |
492 | + __libitm_always_inline bool | |
493 | compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, | |
494 | memory_order __f) volatile noexcept | |
495 | { | |
496 | return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); | |
497 | } | |
498 | ||
499 | - __always_inline bool | |
500 | + __libitm_always_inline bool | |
501 | compare_exchange_weak(_Tp& __e, _Tp __i, | |
502 | memory_order __m = memory_order_seq_cst) noexcept | |
503 | { return compare_exchange_weak(__e, __i, __m, __m); } | |
504 | ||
505 | - __always_inline bool | |
506 | + __libitm_always_inline bool | |
507 | compare_exchange_weak(_Tp& __e, _Tp __i, | |
508 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
509 | { return compare_exchange_weak(__e, __i, __m, __m); } | |
510 | ||
511 | - __always_inline bool | |
512 | + __libitm_always_inline bool | |
513 | compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, | |
514 | memory_order __f) noexcept | |
515 | { | |
516 | return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); | |
517 | } | |
518 | ||
519 | - __always_inline bool | |
520 | + __libitm_always_inline bool | |
521 | compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, | |
522 | memory_order __f) volatile noexcept | |
523 | { | |
524 | return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); | |
525 | } | |
526 | ||
527 | - __always_inline bool | |
528 | + __libitm_always_inline bool | |
529 | compare_exchange_strong(_Tp& __e, _Tp __i, | |
530 | memory_order __m = memory_order_seq_cst) noexcept | |
531 | { return compare_exchange_strong(__e, __i, __m, __m); } | |
532 | ||
533 | - __always_inline bool | |
534 | + __libitm_always_inline bool | |
535 | compare_exchange_strong(_Tp& __e, _Tp __i, | |
536 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
537 | { return compare_exchange_strong(__e, __i, __m, __m); } | |
538 | @@ -1153,46 +1152,46 @@ | |
539 | is_lock_free() const volatile noexcept | |
540 | { return _M_b.is_lock_free(); } | |
541 | ||
542 | - __always_inline void | |
543 | + __libitm_always_inline void | |
544 | store(__pointer_type __p, | |
545 | memory_order __m = memory_order_seq_cst) noexcept | |
546 | { return _M_b.store(__p, __m); } | |
547 | ||
548 | - __always_inline void | |
549 | + __libitm_always_inline void | |
550 | store(__pointer_type __p, | |
551 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
552 | { return _M_b.store(__p, __m); } | |
553 | ||
554 | - __always_inline __pointer_type | |
555 | + __libitm_always_inline __pointer_type | |
556 | load(memory_order __m = memory_order_seq_cst) const noexcept | |
557 | { return _M_b.load(__m); } | |
558 | ||
559 | - __always_inline __pointer_type | |
560 | + __libitm_always_inline __pointer_type | |
561 | load(memory_order __m = memory_order_seq_cst) const volatile noexcept | |
562 | { return _M_b.load(__m); } | |
563 | ||
564 | - __always_inline __pointer_type | |
565 | + __libitm_always_inline __pointer_type | |
566 | exchange(__pointer_type __p, | |
567 | memory_order __m = memory_order_seq_cst) noexcept | |
568 | { return _M_b.exchange(__p, __m); } | |
569 | ||
570 | - __always_inline __pointer_type | |
571 | + __libitm_always_inline __pointer_type | |
572 | exchange(__pointer_type __p, | |
573 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
574 | { return _M_b.exchange(__p, __m); } | |
575 | ||
576 | - __always_inline bool | |
577 | + __libitm_always_inline bool | |
578 | compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, | |
579 | memory_order __m1, memory_order __m2) noexcept | |
580 | { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } | |
581 | ||
582 | - __always_inline bool | |
583 | + __libitm_always_inline bool | |
584 | compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, | |
585 | memory_order __m1, | |
586 | memory_order __m2) volatile noexcept | |
587 | { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } | |
588 | ||
589 | - __always_inline bool | |
590 | + __libitm_always_inline bool | |
591 | compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, | |
592 | memory_order __m = memory_order_seq_cst) noexcept | |
593 | { | |
594 | @@ -1200,7 +1199,7 @@ | |
595 | __calculate_memory_order(__m)); | |
596 | } | |
597 | ||
598 | - __always_inline bool | |
599 | + __libitm_always_inline bool | |
600 | compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, | |
601 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
602 | { | |
603 | @@ -1208,18 +1207,18 @@ | |
604 | __calculate_memory_order(__m)); | |
605 | } | |
606 | ||
607 | - __always_inline bool | |
608 | + __libitm_always_inline bool | |
609 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
610 | memory_order __m1, memory_order __m2) noexcept | |
611 | { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } | |
612 | ||
613 | - __always_inline bool | |
614 | + __libitm_always_inline bool | |
615 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
616 | memory_order __m1, | |
617 | memory_order __m2) volatile noexcept | |
618 | { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } | |
619 | ||
620 | - __always_inline bool | |
621 | + __libitm_always_inline bool | |
622 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
623 | memory_order __m = memory_order_seq_cst) noexcept | |
624 | { | |
625 | @@ -1227,7 +1226,7 @@ | |
626 | __calculate_memory_order(__m)); | |
627 | } | |
628 | ||
629 | - __always_inline bool | |
630 | + __libitm_always_inline bool | |
631 | compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, | |
632 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
633 | { | |
634 | @@ -1235,22 +1234,22 @@ | |
635 | __calculate_memory_order(__m)); | |
636 | } | |
637 | ||
638 | - __always_inline __pointer_type | |
639 | + __libitm_always_inline __pointer_type | |
640 | fetch_add(ptrdiff_t __d, | |
641 | memory_order __m = memory_order_seq_cst) noexcept | |
642 | { return _M_b.fetch_add(__d, __m); } | |
643 | ||
644 | - __always_inline __pointer_type | |
645 | + __libitm_always_inline __pointer_type | |
646 | fetch_add(ptrdiff_t __d, | |
647 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
648 | { return _M_b.fetch_add(__d, __m); } | |
649 | ||
650 | - __always_inline __pointer_type | |
651 | + __libitm_always_inline __pointer_type | |
652 | fetch_sub(ptrdiff_t __d, | |
653 | memory_order __m = memory_order_seq_cst) noexcept | |
654 | { return _M_b.fetch_sub(__d, __m); } | |
655 | ||
656 | - __always_inline __pointer_type | |
657 | + __libitm_always_inline __pointer_type | |
658 | fetch_sub(ptrdiff_t __d, | |
659 | memory_order __m = memory_order_seq_cst) volatile noexcept | |
660 | { return _M_b.fetch_sub(__d, __m); } | |
661 | @@ -1544,98 +1543,98 @@ | |
662 | ||
663 | ||
664 | // Function definitions, atomic_flag operations. | |
665 | - inline __always_inline bool | |
666 | + inline __libitm_always_inline bool | |
667 | atomic_flag_test_and_set_explicit(atomic_flag* __a, | |
668 | memory_order __m) noexcept | |
669 | { return __a->test_and_set(__m); } | |
670 | ||
671 | - inline __always_inline bool | |
672 | + inline __libitm_always_inline bool | |
673 | atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, | |
674 | memory_order __m) noexcept | |
675 | { return __a->test_and_set(__m); } | |
676 | ||
677 | - inline __always_inline void | |
678 | + inline __libitm_always_inline void | |
679 | atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept | |
680 | { __a->clear(__m); } | |
681 | ||
682 | - inline __always_inline void | |
683 | + inline __libitm_always_inline void | |
684 | atomic_flag_clear_explicit(volatile atomic_flag* __a, | |
685 | memory_order __m) noexcept | |
686 | { __a->clear(__m); } | |
687 | ||
688 | - inline __always_inline bool | |
689 | + inline __libitm_always_inline bool | |
690 | atomic_flag_test_and_set(atomic_flag* __a) noexcept | |
691 | { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } | |
692 | ||
693 | - inline __always_inline bool | |
694 | + inline __libitm_always_inline bool | |
695 | atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept | |
696 | { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } | |
697 | ||
698 | - inline __always_inline void | |
699 | + inline __libitm_always_inline void | |
700 | atomic_flag_clear(atomic_flag* __a) noexcept | |
701 | { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } | |
702 | ||
703 | - inline __always_inline void | |
704 | + inline __libitm_always_inline void | |
705 | atomic_flag_clear(volatile atomic_flag* __a) noexcept | |
706 | { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } | |
707 | ||
708 | ||
709 | // Function templates generally applicable to atomic types. | |
710 | template<typename _ITp> | |
711 | - __always_inline bool | |
712 | + __libitm_always_inline bool | |
713 | atomic_is_lock_free(const atomic<_ITp>* __a) noexcept | |
714 | { return __a->is_lock_free(); } | |
715 | ||
716 | template<typename _ITp> | |
717 | - __always_inline bool | |
718 | + __libitm_always_inline bool | |
719 | atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept | |
720 | { return __a->is_lock_free(); } | |
721 | ||
722 | template<typename _ITp> | |
723 | - __always_inline void | |
724 | + __libitm_always_inline void | |
725 | atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; | |
726 | ||
727 | template<typename _ITp> | |
728 | - __always_inline void | |
729 | + __libitm_always_inline void | |
730 | atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; | |
731 | ||
732 | template<typename _ITp> | |
733 | - __always_inline void | |
734 | + __libitm_always_inline void | |
735 | atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, | |
736 | memory_order __m) noexcept | |
737 | { __a->store(__i, __m); } | |
738 | ||
739 | template<typename _ITp> | |
740 | - __always_inline void | |
741 | + __libitm_always_inline void | |
742 | atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, | |
743 | memory_order __m) noexcept | |
744 | { __a->store(__i, __m); } | |
745 | ||
746 | template<typename _ITp> | |
747 | - __always_inline _ITp | |
748 | + __libitm_always_inline _ITp | |
749 | atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept | |
750 | { return __a->load(__m); } | |
751 | ||
752 | template<typename _ITp> | |
753 | - __always_inline _ITp | |
754 | + __libitm_always_inline _ITp | |
755 | atomic_load_explicit(const volatile atomic<_ITp>* __a, | |
756 | memory_order __m) noexcept | |
757 | { return __a->load(__m); } | |
758 | ||
759 | template<typename _ITp> | |
760 | - __always_inline _ITp | |
761 | + __libitm_always_inline _ITp | |
762 | atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, | |
763 | memory_order __m) noexcept | |
764 | { return __a->exchange(__i, __m); } | |
765 | ||
766 | template<typename _ITp> | |
767 | - __always_inline _ITp | |
768 | + __libitm_always_inline _ITp | |
769 | atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, | |
770 | memory_order __m) noexcept | |
771 | { return __a->exchange(__i, __m); } | |
772 | ||
773 | template<typename _ITp> | |
774 | - __always_inline bool | |
775 | + __libitm_always_inline bool | |
776 | atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, | |
777 | _ITp* __i1, _ITp __i2, | |
778 | memory_order __m1, | |
779 | @@ -1643,7 +1642,7 @@ | |
780 | { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } | |
781 | ||
782 | template<typename _ITp> | |
783 | - __always_inline bool | |
784 | + __libitm_always_inline bool | |
785 | atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, | |
786 | _ITp* __i1, _ITp __i2, | |
787 | memory_order __m1, | |
788 | @@ -1651,7 +1650,7 @@ | |
789 | { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } | |
790 | ||
791 | template<typename _ITp> | |
792 | - __always_inline bool | |
793 | + __libitm_always_inline bool | |
794 | atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, | |
795 | _ITp* __i1, _ITp __i2, | |
796 | memory_order __m1, | |
797 | @@ -1659,7 +1658,7 @@ | |
798 | { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } | |
799 | ||
800 | template<typename _ITp> | |
801 | - __always_inline bool | |
802 | + __libitm_always_inline bool | |
803 | atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, | |
804 | _ITp* __i1, _ITp __i2, | |
805 | memory_order __m1, | |
806 | @@ -1668,37 +1667,37 @@ | |
807 | ||
808 | ||
809 | template<typename _ITp> | |
810 | - __always_inline void | |
811 | + __libitm_always_inline void | |
812 | atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept | |
813 | { atomic_store_explicit(__a, __i, memory_order_seq_cst); } | |
814 | ||
815 | template<typename _ITp> | |
816 | - __always_inline void | |
817 | + __libitm_always_inline void | |
818 | atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept | |
819 | { atomic_store_explicit(__a, __i, memory_order_seq_cst); } | |
820 | ||
821 | template<typename _ITp> | |
822 | - __always_inline _ITp | |
823 | + __libitm_always_inline _ITp | |
824 | atomic_load(const atomic<_ITp>* __a) noexcept | |
825 | { return atomic_load_explicit(__a, memory_order_seq_cst); } | |
826 | ||
827 | template<typename _ITp> | |
828 | - __always_inline _ITp | |
829 | + __libitm_always_inline _ITp | |
830 | atomic_load(const volatile atomic<_ITp>* __a) noexcept | |
831 | { return atomic_load_explicit(__a, memory_order_seq_cst); } | |
832 | ||
833 | template<typename _ITp> | |
834 | - __always_inline _ITp | |
835 | + __libitm_always_inline _ITp | |
836 | atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept | |
837 | { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } | |
838 | ||
839 | template<typename _ITp> | |
840 | - __always_inline _ITp | |
841 | + __libitm_always_inline _ITp | |
842 | atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept | |
843 | { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } | |
844 | ||
845 | template<typename _ITp> | |
846 | - __always_inline bool | |
847 | + __libitm_always_inline bool | |
848 | atomic_compare_exchange_weak(atomic<_ITp>* __a, | |
849 | _ITp* __i1, _ITp __i2) noexcept | |
850 | { | |
851 | @@ -1708,7 +1707,7 @@ | |
852 | } | |
853 | ||
854 | template<typename _ITp> | |
855 | - __always_inline bool | |
856 | + __libitm_always_inline bool | |
857 | atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, | |
858 | _ITp* __i1, _ITp __i2) noexcept | |
859 | { | |
860 | @@ -1718,7 +1717,7 @@ | |
861 | } | |
862 | ||
863 | template<typename _ITp> | |
864 | - __always_inline bool | |
865 | + __libitm_always_inline bool | |
866 | atomic_compare_exchange_strong(atomic<_ITp>* __a, | |
867 | _ITp* __i1, _ITp __i2) noexcept | |
868 | { | |
869 | @@ -1728,7 +1727,7 @@ | |
870 | } | |
871 | ||
872 | template<typename _ITp> | |
873 | - __always_inline bool | |
874 | + __libitm_always_inline bool | |
875 | atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, | |
876 | _ITp* __i1, _ITp __i2) noexcept | |
877 | { | |
878 | @@ -1742,158 +1741,158 @@ | |
879 | // intergral types as specified in the standard, excluding address | |
880 | // types. | |
881 | template<typename _ITp> | |
882 | - __always_inline _ITp | |
883 | + __libitm_always_inline _ITp | |
884 | atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, | |
885 | memory_order __m) noexcept | |
886 | { return __a->fetch_add(__i, __m); } | |
887 | ||
888 | template<typename _ITp> | |
889 | - __always_inline _ITp | |
890 | + __libitm_always_inline _ITp | |
891 | atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, | |
892 | memory_order __m) noexcept | |
893 | { return __a->fetch_add(__i, __m); } | |
894 | ||
895 | template<typename _ITp> | |
896 | - __always_inline _ITp | |
897 | + __libitm_always_inline _ITp | |
898 | atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, | |
899 | memory_order __m) noexcept | |
900 | { return __a->fetch_sub(__i, __m); } | |
901 | ||
902 | template<typename _ITp> | |
903 | - __always_inline _ITp | |
904 | + __libitm_always_inline _ITp | |
905 | atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, | |
906 | memory_order __m) noexcept | |
907 | { return __a->fetch_sub(__i, __m); } | |
908 | ||
909 | template<typename _ITp> | |
910 | - __always_inline _ITp | |
911 | + __libitm_always_inline _ITp | |
912 | atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, | |
913 | memory_order __m) noexcept | |
914 | { return __a->fetch_and(__i, __m); } | |
915 | ||
916 | template<typename _ITp> | |
917 | - __always_inline _ITp | |
918 | + __libitm_always_inline _ITp | |
919 | atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, | |
920 | memory_order __m) noexcept | |
921 | { return __a->fetch_and(__i, __m); } | |
922 | ||
923 | template<typename _ITp> | |
924 | - __always_inline _ITp | |
925 | + __libitm_always_inline _ITp | |
926 | atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, | |
927 | memory_order __m) noexcept | |
928 | { return __a->fetch_or(__i, __m); } | |
929 | ||
930 | template<typename _ITp> | |
931 | - __always_inline _ITp | |
932 | + __libitm_always_inline _ITp | |
933 | atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, | |
934 | memory_order __m) noexcept | |
935 | { return __a->fetch_or(__i, __m); } | |
936 | ||
937 | template<typename _ITp> | |
938 | - __always_inline _ITp | |
939 | + __libitm_always_inline _ITp | |
940 | atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, | |
941 | memory_order __m) noexcept | |
942 | { return __a->fetch_xor(__i, __m); } | |
943 | ||
944 | template<typename _ITp> | |
945 | - __always_inline _ITp | |
946 | + __libitm_always_inline _ITp | |
947 | atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, | |
948 | memory_order __m) noexcept | |
949 | { return __a->fetch_xor(__i, __m); } | |
950 | ||
951 | template<typename _ITp> | |
952 | - __always_inline _ITp | |
953 | + __libitm_always_inline _ITp | |
954 | atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept | |
955 | { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } | |
956 | ||
957 | template<typename _ITp> | |
958 | - __always_inline _ITp | |
959 | + __libitm_always_inline _ITp | |
960 | atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept | |
961 | { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } | |
962 | ||
963 | template<typename _ITp> | |
964 | - __always_inline _ITp | |
965 | + __libitm_always_inline _ITp | |
966 | atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept | |
967 | { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } | |
968 | ||
969 | template<typename _ITp> | |
970 | - __always_inline _ITp | |
971 | + __libitm_always_inline _ITp | |
972 | atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept | |
973 | { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } | |
974 | ||
975 | template<typename _ITp> | |
976 | - __always_inline _ITp | |
977 | + __libitm_always_inline _ITp | |
978 | atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept | |
979 | { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } | |
980 | ||
981 | template<typename _ITp> | |
982 | - __always_inline _ITp | |
983 | + __libitm_always_inline _ITp | |
984 | atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept | |
985 | { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } | |
986 | ||
987 | template<typename _ITp> | |
988 | - __always_inline _ITp | |
989 | + __libitm_always_inline _ITp | |
990 | atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept | |
991 | { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } | |
992 | ||
993 | template<typename _ITp> | |
994 | - __always_inline _ITp | |
995 | + __libitm_always_inline _ITp | |
996 | atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept | |
997 | { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } | |
998 | ||
999 | template<typename _ITp> | |
1000 | - __always_inline _ITp | |
1001 | + __libitm_always_inline _ITp | |
1002 | atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept | |
1003 | { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } | |
1004 | ||
1005 | template<typename _ITp> | |
1006 | - __always_inline _ITp | |
1007 | + __libitm_always_inline _ITp | |
1008 | atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept | |
1009 | { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } | |
1010 | ||
1011 | ||
1012 | // Partial specializations for pointers. | |
1013 | template<typename _ITp> | |
1014 | - __always_inline _ITp* | |
1015 | + __libitm_always_inline _ITp* | |
1016 | atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, | |
1017 | memory_order __m) noexcept | |
1018 | { return __a->fetch_add(__d, __m); } | |
1019 | ||
1020 | template<typename _ITp> | |
1021 | - __always_inline _ITp* | |
1022 | + __libitm_always_inline _ITp* | |
1023 | atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, | |
1024 | memory_order __m) noexcept | |
1025 | { return __a->fetch_add(__d, __m); } | |
1026 | ||
1027 | template<typename _ITp> | |
1028 | - __always_inline _ITp* | |
1029 | + __libitm_always_inline _ITp* | |
1030 | atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept | |
1031 | { return __a->fetch_add(__d); } | |
1032 | ||
1033 | template<typename _ITp> | |
1034 | - __always_inline _ITp* | |
1035 | + __libitm_always_inline _ITp* | |
1036 | atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept | |
1037 | { return __a->fetch_add(__d); } | |
1038 | ||
1039 | template<typename _ITp> | |
1040 | - __always_inline _ITp* | |
1041 | + __libitm_always_inline _ITp* | |
1042 | atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, | |
1043 | ptrdiff_t __d, memory_order __m) noexcept | |
1044 | { return __a->fetch_sub(__d, __m); } | |
1045 | ||
1046 | template<typename _ITp> | |
1047 | - __always_inline _ITp* | |
1048 | + __libitm_always_inline _ITp* | |
1049 | atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, | |
1050 | memory_order __m) noexcept | |
1051 | { return __a->fetch_sub(__d, __m); } | |
1052 | ||
1053 | template<typename _ITp> | |
1054 | - __always_inline _ITp* | |
1055 | + __libitm_always_inline _ITp* | |
1056 | atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept | |
1057 | { return __a->fetch_sub(__d); } | |
1058 | ||
1059 | template<typename _ITp> | |
1060 | - __always_inline _ITp* | |
1061 | + __libitm_always_inline _ITp* | |
1062 | atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept | |
1063 | { return __a->fetch_sub(__d); } | |
1064 | // @} group atomics |