]> git.ipfire.org Git - thirdparty/linux.git/blob - arch/arm64/include/asm/futex.h
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 234
[thirdparty/linux.git] / arch / arm64 / include / asm / futex.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Copyright (C) 2012 ARM Ltd.
4 */
5 #ifndef __ASM_FUTEX_H
6 #define __ASM_FUTEX_H
7
8 #ifdef __KERNEL__
9
10 #include <linux/futex.h>
11 #include <linux/uaccess.h>
12
13 #include <asm/errno.h>
14
15 #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
16
17 #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
18 do { \
19 unsigned int loops = FUTEX_MAX_LOOPS; \
20 \
21 uaccess_enable(); \
22 asm volatile( \
23 " prfm pstl1strm, %2\n" \
24 "1: ldxr %w1, %2\n" \
25 insn "\n" \
26 "2: stlxr %w0, %w3, %2\n" \
27 " cbz %w0, 3f\n" \
28 " sub %w4, %w4, %w0\n" \
29 " cbnz %w4, 1b\n" \
30 " mov %w0, %w7\n" \
31 "3:\n" \
32 " dmb ish\n" \
33 " .pushsection .fixup,\"ax\"\n" \
34 " .align 2\n" \
35 "4: mov %w0, %w6\n" \
36 " b 3b\n" \
37 " .popsection\n" \
38 _ASM_EXTABLE(1b, 4b) \
39 _ASM_EXTABLE(2b, 4b) \
40 : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
41 "+r" (loops) \
42 : "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN) \
43 : "memory"); \
44 uaccess_disable(); \
45 } while (0)
46
47 static inline int
48 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
49 {
50 int oldval = 0, ret, tmp;
51 u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
52
53 pagefault_disable();
54
55 switch (op) {
56 case FUTEX_OP_SET:
57 __futex_atomic_op("mov %w3, %w5",
58 ret, oldval, uaddr, tmp, oparg);
59 break;
60 case FUTEX_OP_ADD:
61 __futex_atomic_op("add %w3, %w1, %w5",
62 ret, oldval, uaddr, tmp, oparg);
63 break;
64 case FUTEX_OP_OR:
65 __futex_atomic_op("orr %w3, %w1, %w5",
66 ret, oldval, uaddr, tmp, oparg);
67 break;
68 case FUTEX_OP_ANDN:
69 __futex_atomic_op("and %w3, %w1, %w5",
70 ret, oldval, uaddr, tmp, ~oparg);
71 break;
72 case FUTEX_OP_XOR:
73 __futex_atomic_op("eor %w3, %w1, %w5",
74 ret, oldval, uaddr, tmp, oparg);
75 break;
76 default:
77 ret = -ENOSYS;
78 }
79
80 pagefault_enable();
81
82 if (!ret)
83 *oval = oldval;
84
85 return ret;
86 }
87
88 static inline int
89 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
90 u32 oldval, u32 newval)
91 {
92 int ret = 0;
93 unsigned int loops = FUTEX_MAX_LOOPS;
94 u32 val, tmp;
95 u32 __user *uaddr;
96
97 if (!access_ok(_uaddr, sizeof(u32)))
98 return -EFAULT;
99
100 uaddr = __uaccess_mask_ptr(_uaddr);
101 uaccess_enable();
102 asm volatile("// futex_atomic_cmpxchg_inatomic\n"
103 " prfm pstl1strm, %2\n"
104 "1: ldxr %w1, %2\n"
105 " sub %w3, %w1, %w5\n"
106 " cbnz %w3, 4f\n"
107 "2: stlxr %w3, %w6, %2\n"
108 " cbz %w3, 3f\n"
109 " sub %w4, %w4, %w3\n"
110 " cbnz %w4, 1b\n"
111 " mov %w0, %w8\n"
112 "3:\n"
113 " dmb ish\n"
114 "4:\n"
115 " .pushsection .fixup,\"ax\"\n"
116 "5: mov %w0, %w7\n"
117 " b 4b\n"
118 " .popsection\n"
119 _ASM_EXTABLE(1b, 5b)
120 _ASM_EXTABLE(2b, 5b)
121 : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
122 : "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
123 : "memory");
124 uaccess_disable();
125
126 if (!ret)
127 *uval = val;
128
129 return ret;
130 }
131
132 #endif /* __KERNEL__ */
133 #endif /* __ASM_FUTEX_H */