]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1da177e4 LT |
2 | #ifndef __ARCH_S390_PERCPU__ |
3 | #define __ARCH_S390_PERCPU__ | |
4 | ||
4c2241fd HC |
5 | #include <linux/preempt.h> |
6 | #include <asm/cmpxchg.h> | |
7 | ||
1da177e4 LT |
8 | /* |
9 | * s390 uses its own implementation for per cpu data, the offset of | |
10 | * the cpu local data area is cached in the cpu's lowcore memory. | |
1da177e4 | 11 | */ |
9a0ef292 | 12 | #define __my_cpu_offset S390_lowcore.percpu_offset |
1da177e4 | 13 | |
9a0ef292 TH |
14 | /* |
15 | * For 64 bit module code, the module may be more than 4G above the | |
16 | * per cpu area, use weak definitions to force the compiler to | |
17 | * generate external references. | |
18 | */ | |
0702fbf5 | 19 | #if defined(CONFIG_SMP) && defined(MODULE) |
9a0ef292 | 20 | #define ARCH_NEEDS_WEAK_PER_CPU |
1da177e4 LT |
21 | #endif |
22 | ||
f84cd97e HC |
23 | /* |
24 | * We use a compare-and-swap loop since that uses less cpu cycles than | |
25 | * disabling and enabling interrupts like the generic variant would do. | |
26 | */ | |
27 | #define arch_this_cpu_to_op_simple(pcp, val, op) \ | |
ba6f5c2a | 28 | ({ \ |
4c2241fd HC |
29 | typedef typeof(pcp) pcp_op_T__; \ |
30 | pcp_op_T__ old__, new__, prev__; \ | |
31 | pcp_op_T__ *ptr__; \ | |
32 | preempt_disable(); \ | |
eb7e7d76 | 33 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
4c2241fd HC |
34 | prev__ = *ptr__; \ |
35 | do { \ | |
36 | old__ = prev__; \ | |
37 | new__ = old__ op (val); \ | |
0702fbf5 | 38 | prev__ = cmpxchg(ptr__, old__, new__); \ |
4c2241fd HC |
39 | } while (prev__ != old__); \ |
40 | preempt_enable(); \ | |
ba6f5c2a HC |
41 | new__; \ |
42 | }) | |
4c2241fd | 43 | |
f84cd97e HC |
44 | #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) |
45 | #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
46 | #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
47 | #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
48 | #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) | |
49 | #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) | |
50 | #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) | |
51 | #define this_cpu_or_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) | |
f84cd97e HC |
52 | |
53 | #ifndef CONFIG_HAVE_MARCH_Z196_FEATURES | |
54 | ||
55 | #define this_cpu_add_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
56 | #define this_cpu_add_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
57 | #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
58 | #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) | |
59 | #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) | |
60 | #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) | |
61 | #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) | |
62 | #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) | |
f84cd97e HC |
63 | |
64 | #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */ | |
65 | ||
66 | #define arch_this_cpu_add(pcp, val, op1, op2, szcast) \ | |
67 | { \ | |
68 | typedef typeof(pcp) pcp_op_T__; \ | |
69 | pcp_op_T__ val__ = (val); \ | |
70 | pcp_op_T__ old__, *ptr__; \ | |
71 | preempt_disable(); \ | |
eb7e7d76 | 72 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
f84cd97e HC |
73 | if (__builtin_constant_p(val__) && \ |
74 | ((szcast)val__ > -129) && ((szcast)val__ < 128)) { \ | |
75 | asm volatile( \ | |
76 | op2 " %[ptr__],%[val__]\n" \ | |
77 | : [ptr__] "+Q" (*ptr__) \ | |
78 | : [val__] "i" ((szcast)val__) \ | |
79 | : "cc"); \ | |
80 | } else { \ | |
81 | asm volatile( \ | |
82 | op1 " %[old__],%[val__],%[ptr__]\n" \ | |
83 | : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \ | |
84 | : [val__] "d" (val__) \ | |
85 | : "cc"); \ | |
86 | } \ | |
87 | preempt_enable(); \ | |
88 | } | |
89 | ||
90 | #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int) | |
91 | #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long) | |
92 | ||
93 | #define arch_this_cpu_add_return(pcp, val, op) \ | |
94 | ({ \ | |
95 | typedef typeof(pcp) pcp_op_T__; \ | |
96 | pcp_op_T__ val__ = (val); \ | |
97 | pcp_op_T__ old__, *ptr__; \ | |
98 | preempt_disable(); \ | |
eb7e7d76 | 99 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
f84cd97e HC |
100 | asm volatile( \ |
101 | op " %[old__],%[val__],%[ptr__]\n" \ | |
102 | : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \ | |
103 | : [val__] "d" (val__) \ | |
104 | : "cc"); \ | |
105 | preempt_enable(); \ | |
106 | old__ + val__; \ | |
107 | }) | |
108 | ||
109 | #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa") | |
110 | #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag") | |
111 | ||
112 | #define arch_this_cpu_to_op(pcp, val, op) \ | |
113 | { \ | |
114 | typedef typeof(pcp) pcp_op_T__; \ | |
115 | pcp_op_T__ val__ = (val); \ | |
116 | pcp_op_T__ old__, *ptr__; \ | |
117 | preempt_disable(); \ | |
eb7e7d76 | 118 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
f84cd97e HC |
119 | asm volatile( \ |
120 | op " %[old__],%[val__],%[ptr__]\n" \ | |
121 | : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \ | |
122 | : [val__] "d" (val__) \ | |
123 | : "cc"); \ | |
124 | preempt_enable(); \ | |
125 | } | |
126 | ||
127 | #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op(pcp, val, "lan") | |
128 | #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op(pcp, val, "lang") | |
129 | #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op(pcp, val, "lao") | |
130 | #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op(pcp, val, "laog") | |
f84cd97e HC |
131 | |
132 | #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */ | |
4c2241fd | 133 | |
b1d6b40c | 134 | #define arch_this_cpu_cmpxchg(pcp, oval, nval) \ |
4c2241fd HC |
135 | ({ \ |
136 | typedef typeof(pcp) pcp_op_T__; \ | |
137 | pcp_op_T__ ret__; \ | |
138 | pcp_op_T__ *ptr__; \ | |
139 | preempt_disable(); \ | |
eb7e7d76 | 140 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
0702fbf5 | 141 | ret__ = cmpxchg(ptr__, oval, nval); \ |
4c2241fd HC |
142 | preempt_enable(); \ |
143 | ret__; \ | |
144 | }) | |
145 | ||
933393f5 CL |
146 | #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
147 | #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
148 | #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
149 | #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) | |
4c2241fd | 150 | |
28634a07 HC |
151 | #define arch_this_cpu_xchg(pcp, nval) \ |
152 | ({ \ | |
153 | typeof(pcp) *ptr__; \ | |
154 | typeof(pcp) ret__; \ | |
155 | preempt_disable(); \ | |
eb7e7d76 | 156 | ptr__ = raw_cpu_ptr(&(pcp)); \ |
28634a07 HC |
157 | ret__ = xchg(ptr__, nval); \ |
158 | preempt_enable(); \ | |
159 | ret__; \ | |
160 | }) | |
161 | ||
162 | #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval) | |
163 | #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval) | |
164 | #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval) | |
28634a07 | 165 | #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval) |
28634a07 | 166 | |
b1d6b40c HC |
167 | #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2) \ |
168 | ({ \ | |
169 | typeof(pcp1) o1__ = (o1), n1__ = (n1); \ | |
170 | typeof(pcp2) o2__ = (o2), n2__ = (n2); \ | |
171 | typeof(pcp1) *p1__; \ | |
172 | typeof(pcp2) *p2__; \ | |
173 | int ret__; \ | |
174 | preempt_disable(); \ | |
eb7e7d76 CL |
175 | p1__ = raw_cpu_ptr(&(pcp1)); \ |
176 | p2__ = raw_cpu_ptr(&(pcp2)); \ | |
b1d6b40c HC |
177 | ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__); \ |
178 | preempt_enable(); \ | |
179 | ret__; \ | |
180 | }) | |
181 | ||
b1d6b40c | 182 | #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double |
0702fbf5 | 183 | |
f0343474 | 184 | #include <asm-generic/percpu.h> |
1da177e4 | 185 | |
1da177e4 | 186 | #endif /* __ARCH_S390_PERCPU__ */ |