]> git.ipfire.org Git - thirdparty/linux.git/blame - include/asm-generic/atomic-instrumented.h
locking/atomics: Instrument cmpxchg_double*()
[thirdparty/linux.git] / include / asm-generic / atomic-instrumented.h
CommitLineData
ac605bee
DV
1/*
2 * This file provides wrappers with KASAN instrumentation for atomic operations.
3 * To use this functionality an arch's atomic.h file needs to define all
4 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
5 * this file at the end. This file provides atomic_read() that forwards to
6 * arch_atomic_read() for actual atomic operation.
7 * Note: if an arch atomic operation is implemented by means of other atomic
8 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
9 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
10 * double instrumentation.
11 */
12
b06ed71a
DV
13#ifndef _LINUX_ATOMIC_INSTRUMENTED_H
14#define _LINUX_ATOMIC_INSTRUMENTED_H
15
16#include <linux/build_bug.h>
a35353bb 17#include <linux/kasan-checks.h>
b06ed71a
DV
18
19static __always_inline int atomic_read(const atomic_t *v)
20{
a35353bb 21 kasan_check_read(v, sizeof(*v));
b06ed71a
DV
22 return arch_atomic_read(v);
23}
24
25static __always_inline s64 atomic64_read(const atomic64_t *v)
26{
a35353bb 27 kasan_check_read(v, sizeof(*v));
b06ed71a
DV
28 return arch_atomic64_read(v);
29}
30
31static __always_inline void atomic_set(atomic_t *v, int i)
32{
a35353bb 33 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
34 arch_atomic_set(v, i);
35}
36
37static __always_inline void atomic64_set(atomic64_t *v, s64 i)
38{
a35353bb 39 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
40 arch_atomic64_set(v, i);
41}
42
43static __always_inline int atomic_xchg(atomic_t *v, int i)
44{
a35353bb 45 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
46 return arch_atomic_xchg(v, i);
47}
48
49static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
50{
a35353bb 51 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
52 return arch_atomic64_xchg(v, i);
53}
54
55static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
56{
a35353bb 57 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
58 return arch_atomic_cmpxchg(v, old, new);
59}
60
61static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
62{
a35353bb 63 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
64 return arch_atomic64_cmpxchg(v, old, new);
65}
66
67#ifdef arch_atomic_try_cmpxchg
68#define atomic_try_cmpxchg atomic_try_cmpxchg
69static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
70{
a35353bb
DV
71 kasan_check_write(v, sizeof(*v));
72 kasan_check_read(old, sizeof(*old));
b06ed71a
DV
73 return arch_atomic_try_cmpxchg(v, old, new);
74}
75#endif
76
77#ifdef arch_atomic64_try_cmpxchg
78#define atomic64_try_cmpxchg atomic64_try_cmpxchg
79static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
80{
a35353bb
DV
81 kasan_check_write(v, sizeof(*v));
82 kasan_check_read(old, sizeof(*old));
b06ed71a
DV
83 return arch_atomic64_try_cmpxchg(v, old, new);
84}
85#endif
86
eccc2da8
MR
87#ifdef arch_atomic_fetch_add_unless
88#define atomic_fetch_add_unless atomic_fetch_add_unless
bfc18e38 89static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
b06ed71a 90{
a35353bb 91 kasan_check_write(v, sizeof(*v));
bfc18e38 92 return arch_atomic_fetch_add_unless(v, a, u);
b06ed71a 93}
eccc2da8 94#endif
b06ed71a 95
0ae1d994
MR
96#ifdef arch_atomic64_fetch_add_unless
97#define atomic64_fetch_add_unless atomic64_fetch_add_unless
98static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
99{
100 kasan_check_write(v, sizeof(*v));
101 return arch_atomic64_fetch_add_unless(v, a, u);
102}
0ae1d994 103#endif
b06ed71a 104
9837559d
MR
105#ifdef arch_atomic_inc
106#define atomic_inc atomic_inc
b06ed71a
DV
107static __always_inline void atomic_inc(atomic_t *v)
108{
a35353bb 109 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
110 arch_atomic_inc(v);
111}
9837559d 112#endif
b06ed71a 113
9837559d
MR
114#ifdef arch_atomic64_inc
115#define atomic64_inc atomic64_inc
b06ed71a
DV
116static __always_inline void atomic64_inc(atomic64_t *v)
117{
a35353bb 118 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
119 arch_atomic64_inc(v);
120}
9837559d 121#endif
b06ed71a 122
9837559d
MR
123#ifdef arch_atomic_dec
124#define atomic_dec atomic_dec
b06ed71a
DV
125static __always_inline void atomic_dec(atomic_t *v)
126{
a35353bb 127 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
128 arch_atomic_dec(v);
129}
9837559d 130#endif
b06ed71a 131
9837559d
MR
132#ifdef atch_atomic64_dec
133#define atomic64_dec
b06ed71a
DV
134static __always_inline void atomic64_dec(atomic64_t *v)
135{
a35353bb 136 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
137 arch_atomic64_dec(v);
138}
9837559d 139#endif
b06ed71a
DV
140
141static __always_inline void atomic_add(int i, atomic_t *v)
142{
a35353bb 143 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
144 arch_atomic_add(i, v);
145}
146
147static __always_inline void atomic64_add(s64 i, atomic64_t *v)
148{
a35353bb 149 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
150 arch_atomic64_add(i, v);
151}
152
153static __always_inline void atomic_sub(int i, atomic_t *v)
154{
a35353bb 155 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
156 arch_atomic_sub(i, v);
157}
158
159static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
160{
a35353bb 161 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
162 arch_atomic64_sub(i, v);
163}
164
165static __always_inline void atomic_and(int i, atomic_t *v)
166{
a35353bb 167 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
168 arch_atomic_and(i, v);
169}
170
171static __always_inline void atomic64_and(s64 i, atomic64_t *v)
172{
a35353bb 173 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
174 arch_atomic64_and(i, v);
175}
176
177static __always_inline void atomic_or(int i, atomic_t *v)
178{
a35353bb 179 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
180 arch_atomic_or(i, v);
181}
182
183static __always_inline void atomic64_or(s64 i, atomic64_t *v)
184{
a35353bb 185 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
186 arch_atomic64_or(i, v);
187}
188
189static __always_inline void atomic_xor(int i, atomic_t *v)
190{
a35353bb 191 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
192 arch_atomic_xor(i, v);
193}
194
195static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
196{
a35353bb 197 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
198 arch_atomic64_xor(i, v);
199}
200
9837559d
MR
201#ifdef arch_atomic_inc_return
202#define atomic_inc_return atomic_inc_return
b06ed71a
DV
203static __always_inline int atomic_inc_return(atomic_t *v)
204{
a35353bb 205 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
206 return arch_atomic_inc_return(v);
207}
9837559d 208#endif
b06ed71a 209
9837559d
MR
210#ifdef arch_atomic64_in_return
211#define atomic64_inc_return atomic64_inc_return
b06ed71a
DV
212static __always_inline s64 atomic64_inc_return(atomic64_t *v)
213{
a35353bb 214 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
215 return arch_atomic64_inc_return(v);
216}
9837559d 217#endif
b06ed71a 218
9837559d
MR
219#ifdef arch_atomic_dec_return
220#define atomic_dec_return atomic_dec_return
b06ed71a
DV
221static __always_inline int atomic_dec_return(atomic_t *v)
222{
a35353bb 223 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
224 return arch_atomic_dec_return(v);
225}
9837559d 226#endif
b06ed71a 227
9837559d
MR
228#ifdef arch_atomic64_dec_return
229#define atomic64_dec_return atomic64_dec_return
b06ed71a
DV
230static __always_inline s64 atomic64_dec_return(atomic64_t *v)
231{
a35353bb 232 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
233 return arch_atomic64_dec_return(v);
234}
9837559d 235#endif
b06ed71a 236
bef82820
MR
237#ifdef arch_atomic64_inc_not_zero
238#define atomic64_inc_not_zero atomic64_inc_not_zero
ade5ef92 239static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
b06ed71a 240{
a35353bb 241 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
242 return arch_atomic64_inc_not_zero(v);
243}
bef82820 244#endif
b06ed71a 245
b3a2a05f
MR
246#ifdef arch_atomic64_dec_if_positive
247#define atomic64_dec_if_positive atomic64_dec_if_positive
b06ed71a
DV
248static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
249{
a35353bb 250 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
251 return arch_atomic64_dec_if_positive(v);
252}
b3a2a05f 253#endif
b06ed71a 254
18cc1814
MR
255#ifdef arch_atomic_dec_and_test
256#define atomic_dec_and_test atomic_dec_and_test
b06ed71a
DV
257static __always_inline bool atomic_dec_and_test(atomic_t *v)
258{
a35353bb 259 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
260 return arch_atomic_dec_and_test(v);
261}
18cc1814 262#endif
b06ed71a 263
18cc1814
MR
264#ifdef arch_atomic64_dec_and_test
265#define atomic64_dec_and_test atomic64_dec_and_test
b06ed71a
DV
266static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
267{
a35353bb 268 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
269 return arch_atomic64_dec_and_test(v);
270}
18cc1814 271#endif
b06ed71a 272
18cc1814
MR
273#ifdef arch_atomic_inc_and_test
274#define atomic_inc_and_test atomic_inc_and_test
b06ed71a
DV
275static __always_inline bool atomic_inc_and_test(atomic_t *v)
276{
a35353bb 277 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
278 return arch_atomic_inc_and_test(v);
279}
18cc1814 280#endif
b06ed71a 281
18cc1814
MR
282#ifdef arch_atomic64_inc_and_test
283#define atomic64_inc_and_test atomic64_inc_and_test
b06ed71a
DV
284static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
285{
a35353bb 286 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
287 return arch_atomic64_inc_and_test(v);
288}
18cc1814 289#endif
b06ed71a
DV
290
291static __always_inline int atomic_add_return(int i, atomic_t *v)
292{
a35353bb 293 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
294 return arch_atomic_add_return(i, v);
295}
296
297static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
298{
a35353bb 299 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
300 return arch_atomic64_add_return(i, v);
301}
302
303static __always_inline int atomic_sub_return(int i, atomic_t *v)
304{
a35353bb 305 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
306 return arch_atomic_sub_return(i, v);
307}
308
309static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
310{
a35353bb 311 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
312 return arch_atomic64_sub_return(i, v);
313}
314
315static __always_inline int atomic_fetch_add(int i, atomic_t *v)
316{
a35353bb 317 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
318 return arch_atomic_fetch_add(i, v);
319}
320
321static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
322{
a35353bb 323 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
324 return arch_atomic64_fetch_add(i, v);
325}
326
327static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
328{
a35353bb 329 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
330 return arch_atomic_fetch_sub(i, v);
331}
332
333static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
334{
a35353bb 335 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
336 return arch_atomic64_fetch_sub(i, v);
337}
338
339static __always_inline int atomic_fetch_and(int i, atomic_t *v)
340{
a35353bb 341 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
342 return arch_atomic_fetch_and(i, v);
343}
344
345static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
346{
a35353bb 347 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
348 return arch_atomic64_fetch_and(i, v);
349}
350
351static __always_inline int atomic_fetch_or(int i, atomic_t *v)
352{
a35353bb 353 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
354 return arch_atomic_fetch_or(i, v);
355}
356
357static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
358{
a35353bb 359 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
360 return arch_atomic64_fetch_or(i, v);
361}
362
363static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
364{
a35353bb 365 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
366 return arch_atomic_fetch_xor(i, v);
367}
368
369static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
370{
a35353bb 371 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
372 return arch_atomic64_fetch_xor(i, v);
373}
374
18cc1814
MR
375#ifdef arch_atomic_sub_and_test
376#define atomic_sub_and_test atomic_sub_and_test
b06ed71a
DV
377static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
378{
a35353bb 379 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
380 return arch_atomic_sub_and_test(i, v);
381}
18cc1814 382#endif
b06ed71a 383
18cc1814
MR
384#ifdef arch_atomic64_sub_and_test
385#define atomic64_sub_and_test atomic64_sub_and_test
b06ed71a
DV
386static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
387{
a35353bb 388 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
389 return arch_atomic64_sub_and_test(i, v);
390}
18cc1814 391#endif
b06ed71a 392
18cc1814
MR
393#ifdef arch_atomic_add_negative
394#define atomic_add_negative atomic_add_negative
b06ed71a
DV
395static __always_inline bool atomic_add_negative(int i, atomic_t *v)
396{
a35353bb 397 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
398 return arch_atomic_add_negative(i, v);
399}
18cc1814 400#endif
b06ed71a 401
18cc1814
MR
402#ifdef arch_atomic64_add_negative
403#define atomic64_add_negative atomic64_add_negative
b06ed71a
DV
404static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
405{
a35353bb 406 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
407 return arch_atomic64_add_negative(i, v);
408}
18cc1814 409#endif
b06ed71a 410
f9881cc4
MR
411#define xchg(ptr, new) \
412({ \
413 typeof(ptr) __ai_ptr = (ptr); \
414 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
415 arch_xchg(__ai_ptr, (new)); \
416})
417
b06ed71a
DV
418#define cmpxchg(ptr, old, new) \
419({ \
df79ed2c
MR
420 typeof(ptr) __ai_ptr = (ptr); \
421 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
422 arch_cmpxchg(__ai_ptr, (old), (new)); \
b06ed71a
DV
423})
424
b06ed71a
DV
425#define sync_cmpxchg(ptr, old, new) \
426({ \
df79ed2c
MR
427 typeof(ptr) __ai_ptr = (ptr); \
428 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
429 arch_sync_cmpxchg(__ai_ptr, (old), (new)); \
b06ed71a
DV
430})
431
b06ed71a
DV
432#define cmpxchg_local(ptr, old, new) \
433({ \
df79ed2c
MR
434 typeof(ptr) __ai_ptr = (ptr); \
435 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
436 arch_cmpxchg_local(__ai_ptr, (old), (new)); \
b06ed71a
DV
437})
438
b06ed71a
DV
439#define cmpxchg64(ptr, old, new) \
440({ \
df79ed2c
MR
441 typeof(ptr) __ai_ptr = (ptr); \
442 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
443 arch_cmpxchg64(__ai_ptr, (old), (new)); \
b06ed71a
DV
444})
445
b06ed71a
DV
446#define cmpxchg64_local(ptr, old, new) \
447({ \
df79ed2c
MR
448 typeof(ptr) __ai_ptr = (ptr); \
449 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
450 arch_cmpxchg64_local(__ai_ptr, (old), (new)); \
b06ed71a
DV
451})
452
453#define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
454({ \
4d2b25f6
MR
455 typeof(p1) __ai_p1 = (p1); \
456 kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
457 arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
b06ed71a
DV
458})
459
4d2b25f6
MR
460#define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
461({ \
462 typeof(p1) __ai_p1 = (p1); \
463 kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
464 arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
b06ed71a
DV
465})
466
467#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */