]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - arch/loongarch/kernel/fpu.S
KVM: clean up directives to compile out irqfds
[thirdparty/kernel/stable.git] / arch / loongarch / kernel / fpu.S
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Author: Lu Zeng <zenglu@loongson.cn>
4 * Pei Huang <huangpei@loongson.cn>
5 * Huacai Chen <chenhuacai@loongson.cn>
6 *
7 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
8 */
9 #include <linux/export.h>
10 #include <asm/asm.h>
11 #include <asm/asmmacro.h>
12 #include <asm/asm-extable.h>
13 #include <asm/asm-offsets.h>
14 #include <asm/errno.h>
15 #include <asm/fpregdef.h>
16 #include <asm/loongarch.h>
17 #include <asm/regdef.h>
18
19 #define FPU_REG_WIDTH 8
20 #define LSX_REG_WIDTH 16
21 #define LASX_REG_WIDTH 32
22
23 .macro EX insn, reg, src, offs
24 .ex\@: \insn \reg, \src, \offs
25 _asm_extable .ex\@, .L_fpu_fault
26 .endm
27
28 .macro sc_save_fp base
29 EX fst.d $f0, \base, (0 * FPU_REG_WIDTH)
30 EX fst.d $f1, \base, (1 * FPU_REG_WIDTH)
31 EX fst.d $f2, \base, (2 * FPU_REG_WIDTH)
32 EX fst.d $f3, \base, (3 * FPU_REG_WIDTH)
33 EX fst.d $f4, \base, (4 * FPU_REG_WIDTH)
34 EX fst.d $f5, \base, (5 * FPU_REG_WIDTH)
35 EX fst.d $f6, \base, (6 * FPU_REG_WIDTH)
36 EX fst.d $f7, \base, (7 * FPU_REG_WIDTH)
37 EX fst.d $f8, \base, (8 * FPU_REG_WIDTH)
38 EX fst.d $f9, \base, (9 * FPU_REG_WIDTH)
39 EX fst.d $f10, \base, (10 * FPU_REG_WIDTH)
40 EX fst.d $f11, \base, (11 * FPU_REG_WIDTH)
41 EX fst.d $f12, \base, (12 * FPU_REG_WIDTH)
42 EX fst.d $f13, \base, (13 * FPU_REG_WIDTH)
43 EX fst.d $f14, \base, (14 * FPU_REG_WIDTH)
44 EX fst.d $f15, \base, (15 * FPU_REG_WIDTH)
45 EX fst.d $f16, \base, (16 * FPU_REG_WIDTH)
46 EX fst.d $f17, \base, (17 * FPU_REG_WIDTH)
47 EX fst.d $f18, \base, (18 * FPU_REG_WIDTH)
48 EX fst.d $f19, \base, (19 * FPU_REG_WIDTH)
49 EX fst.d $f20, \base, (20 * FPU_REG_WIDTH)
50 EX fst.d $f21, \base, (21 * FPU_REG_WIDTH)
51 EX fst.d $f22, \base, (22 * FPU_REG_WIDTH)
52 EX fst.d $f23, \base, (23 * FPU_REG_WIDTH)
53 EX fst.d $f24, \base, (24 * FPU_REG_WIDTH)
54 EX fst.d $f25, \base, (25 * FPU_REG_WIDTH)
55 EX fst.d $f26, \base, (26 * FPU_REG_WIDTH)
56 EX fst.d $f27, \base, (27 * FPU_REG_WIDTH)
57 EX fst.d $f28, \base, (28 * FPU_REG_WIDTH)
58 EX fst.d $f29, \base, (29 * FPU_REG_WIDTH)
59 EX fst.d $f30, \base, (30 * FPU_REG_WIDTH)
60 EX fst.d $f31, \base, (31 * FPU_REG_WIDTH)
61 .endm
62
63 .macro sc_restore_fp base
64 EX fld.d $f0, \base, (0 * FPU_REG_WIDTH)
65 EX fld.d $f1, \base, (1 * FPU_REG_WIDTH)
66 EX fld.d $f2, \base, (2 * FPU_REG_WIDTH)
67 EX fld.d $f3, \base, (3 * FPU_REG_WIDTH)
68 EX fld.d $f4, \base, (4 * FPU_REG_WIDTH)
69 EX fld.d $f5, \base, (5 * FPU_REG_WIDTH)
70 EX fld.d $f6, \base, (6 * FPU_REG_WIDTH)
71 EX fld.d $f7, \base, (7 * FPU_REG_WIDTH)
72 EX fld.d $f8, \base, (8 * FPU_REG_WIDTH)
73 EX fld.d $f9, \base, (9 * FPU_REG_WIDTH)
74 EX fld.d $f10, \base, (10 * FPU_REG_WIDTH)
75 EX fld.d $f11, \base, (11 * FPU_REG_WIDTH)
76 EX fld.d $f12, \base, (12 * FPU_REG_WIDTH)
77 EX fld.d $f13, \base, (13 * FPU_REG_WIDTH)
78 EX fld.d $f14, \base, (14 * FPU_REG_WIDTH)
79 EX fld.d $f15, \base, (15 * FPU_REG_WIDTH)
80 EX fld.d $f16, \base, (16 * FPU_REG_WIDTH)
81 EX fld.d $f17, \base, (17 * FPU_REG_WIDTH)
82 EX fld.d $f18, \base, (18 * FPU_REG_WIDTH)
83 EX fld.d $f19, \base, (19 * FPU_REG_WIDTH)
84 EX fld.d $f20, \base, (20 * FPU_REG_WIDTH)
85 EX fld.d $f21, \base, (21 * FPU_REG_WIDTH)
86 EX fld.d $f22, \base, (22 * FPU_REG_WIDTH)
87 EX fld.d $f23, \base, (23 * FPU_REG_WIDTH)
88 EX fld.d $f24, \base, (24 * FPU_REG_WIDTH)
89 EX fld.d $f25, \base, (25 * FPU_REG_WIDTH)
90 EX fld.d $f26, \base, (26 * FPU_REG_WIDTH)
91 EX fld.d $f27, \base, (27 * FPU_REG_WIDTH)
92 EX fld.d $f28, \base, (28 * FPU_REG_WIDTH)
93 EX fld.d $f29, \base, (29 * FPU_REG_WIDTH)
94 EX fld.d $f30, \base, (30 * FPU_REG_WIDTH)
95 EX fld.d $f31, \base, (31 * FPU_REG_WIDTH)
96 .endm
97
98 .macro sc_save_fcc base, tmp0, tmp1
99 movcf2gr \tmp0, $fcc0
100 move \tmp1, \tmp0
101 movcf2gr \tmp0, $fcc1
102 bstrins.d \tmp1, \tmp0, 15, 8
103 movcf2gr \tmp0, $fcc2
104 bstrins.d \tmp1, \tmp0, 23, 16
105 movcf2gr \tmp0, $fcc3
106 bstrins.d \tmp1, \tmp0, 31, 24
107 movcf2gr \tmp0, $fcc4
108 bstrins.d \tmp1, \tmp0, 39, 32
109 movcf2gr \tmp0, $fcc5
110 bstrins.d \tmp1, \tmp0, 47, 40
111 movcf2gr \tmp0, $fcc6
112 bstrins.d \tmp1, \tmp0, 55, 48
113 movcf2gr \tmp0, $fcc7
114 bstrins.d \tmp1, \tmp0, 63, 56
115 EX st.d \tmp1, \base, 0
116 .endm
117
118 .macro sc_restore_fcc base, tmp0, tmp1
119 EX ld.d \tmp0, \base, 0
120 bstrpick.d \tmp1, \tmp0, 7, 0
121 movgr2cf $fcc0, \tmp1
122 bstrpick.d \tmp1, \tmp0, 15, 8
123 movgr2cf $fcc1, \tmp1
124 bstrpick.d \tmp1, \tmp0, 23, 16
125 movgr2cf $fcc2, \tmp1
126 bstrpick.d \tmp1, \tmp0, 31, 24
127 movgr2cf $fcc3, \tmp1
128 bstrpick.d \tmp1, \tmp0, 39, 32
129 movgr2cf $fcc4, \tmp1
130 bstrpick.d \tmp1, \tmp0, 47, 40
131 movgr2cf $fcc5, \tmp1
132 bstrpick.d \tmp1, \tmp0, 55, 48
133 movgr2cf $fcc6, \tmp1
134 bstrpick.d \tmp1, \tmp0, 63, 56
135 movgr2cf $fcc7, \tmp1
136 .endm
137
138 .macro sc_save_fcsr base, tmp0
139 movfcsr2gr \tmp0, fcsr0
140 EX st.w \tmp0, \base, 0
141 #if defined(CONFIG_CPU_HAS_LBT)
142 /* TM bit is always 0 if LBT not supported */
143 andi \tmp0, \tmp0, FPU_CSR_TM
144 beqz \tmp0, 1f
145 x86clrtm
146 1:
147 #endif
148 .endm
149
150 .macro sc_restore_fcsr base, tmp0
151 EX ld.w \tmp0, \base, 0
152 movgr2fcsr fcsr0, \tmp0
153 .endm
154
155 .macro sc_save_lsx base
156 #ifdef CONFIG_CPU_HAS_LSX
157 EX vst $vr0, \base, (0 * LSX_REG_WIDTH)
158 EX vst $vr1, \base, (1 * LSX_REG_WIDTH)
159 EX vst $vr2, \base, (2 * LSX_REG_WIDTH)
160 EX vst $vr3, \base, (3 * LSX_REG_WIDTH)
161 EX vst $vr4, \base, (4 * LSX_REG_WIDTH)
162 EX vst $vr5, \base, (5 * LSX_REG_WIDTH)
163 EX vst $vr6, \base, (6 * LSX_REG_WIDTH)
164 EX vst $vr7, \base, (7 * LSX_REG_WIDTH)
165 EX vst $vr8, \base, (8 * LSX_REG_WIDTH)
166 EX vst $vr9, \base, (9 * LSX_REG_WIDTH)
167 EX vst $vr10, \base, (10 * LSX_REG_WIDTH)
168 EX vst $vr11, \base, (11 * LSX_REG_WIDTH)
169 EX vst $vr12, \base, (12 * LSX_REG_WIDTH)
170 EX vst $vr13, \base, (13 * LSX_REG_WIDTH)
171 EX vst $vr14, \base, (14 * LSX_REG_WIDTH)
172 EX vst $vr15, \base, (15 * LSX_REG_WIDTH)
173 EX vst $vr16, \base, (16 * LSX_REG_WIDTH)
174 EX vst $vr17, \base, (17 * LSX_REG_WIDTH)
175 EX vst $vr18, \base, (18 * LSX_REG_WIDTH)
176 EX vst $vr19, \base, (19 * LSX_REG_WIDTH)
177 EX vst $vr20, \base, (20 * LSX_REG_WIDTH)
178 EX vst $vr21, \base, (21 * LSX_REG_WIDTH)
179 EX vst $vr22, \base, (22 * LSX_REG_WIDTH)
180 EX vst $vr23, \base, (23 * LSX_REG_WIDTH)
181 EX vst $vr24, \base, (24 * LSX_REG_WIDTH)
182 EX vst $vr25, \base, (25 * LSX_REG_WIDTH)
183 EX vst $vr26, \base, (26 * LSX_REG_WIDTH)
184 EX vst $vr27, \base, (27 * LSX_REG_WIDTH)
185 EX vst $vr28, \base, (28 * LSX_REG_WIDTH)
186 EX vst $vr29, \base, (29 * LSX_REG_WIDTH)
187 EX vst $vr30, \base, (30 * LSX_REG_WIDTH)
188 EX vst $vr31, \base, (31 * LSX_REG_WIDTH)
189 #endif
190 .endm
191
192 .macro sc_restore_lsx base
193 #ifdef CONFIG_CPU_HAS_LSX
194 EX vld $vr0, \base, (0 * LSX_REG_WIDTH)
195 EX vld $vr1, \base, (1 * LSX_REG_WIDTH)
196 EX vld $vr2, \base, (2 * LSX_REG_WIDTH)
197 EX vld $vr3, \base, (3 * LSX_REG_WIDTH)
198 EX vld $vr4, \base, (4 * LSX_REG_WIDTH)
199 EX vld $vr5, \base, (5 * LSX_REG_WIDTH)
200 EX vld $vr6, \base, (6 * LSX_REG_WIDTH)
201 EX vld $vr7, \base, (7 * LSX_REG_WIDTH)
202 EX vld $vr8, \base, (8 * LSX_REG_WIDTH)
203 EX vld $vr9, \base, (9 * LSX_REG_WIDTH)
204 EX vld $vr10, \base, (10 * LSX_REG_WIDTH)
205 EX vld $vr11, \base, (11 * LSX_REG_WIDTH)
206 EX vld $vr12, \base, (12 * LSX_REG_WIDTH)
207 EX vld $vr13, \base, (13 * LSX_REG_WIDTH)
208 EX vld $vr14, \base, (14 * LSX_REG_WIDTH)
209 EX vld $vr15, \base, (15 * LSX_REG_WIDTH)
210 EX vld $vr16, \base, (16 * LSX_REG_WIDTH)
211 EX vld $vr17, \base, (17 * LSX_REG_WIDTH)
212 EX vld $vr18, \base, (18 * LSX_REG_WIDTH)
213 EX vld $vr19, \base, (19 * LSX_REG_WIDTH)
214 EX vld $vr20, \base, (20 * LSX_REG_WIDTH)
215 EX vld $vr21, \base, (21 * LSX_REG_WIDTH)
216 EX vld $vr22, \base, (22 * LSX_REG_WIDTH)
217 EX vld $vr23, \base, (23 * LSX_REG_WIDTH)
218 EX vld $vr24, \base, (24 * LSX_REG_WIDTH)
219 EX vld $vr25, \base, (25 * LSX_REG_WIDTH)
220 EX vld $vr26, \base, (26 * LSX_REG_WIDTH)
221 EX vld $vr27, \base, (27 * LSX_REG_WIDTH)
222 EX vld $vr28, \base, (28 * LSX_REG_WIDTH)
223 EX vld $vr29, \base, (29 * LSX_REG_WIDTH)
224 EX vld $vr30, \base, (30 * LSX_REG_WIDTH)
225 EX vld $vr31, \base, (31 * LSX_REG_WIDTH)
226 #endif
227 .endm
228
229 .macro sc_save_lasx base
230 #ifdef CONFIG_CPU_HAS_LASX
231 EX xvst $xr0, \base, (0 * LASX_REG_WIDTH)
232 EX xvst $xr1, \base, (1 * LASX_REG_WIDTH)
233 EX xvst $xr2, \base, (2 * LASX_REG_WIDTH)
234 EX xvst $xr3, \base, (3 * LASX_REG_WIDTH)
235 EX xvst $xr4, \base, (4 * LASX_REG_WIDTH)
236 EX xvst $xr5, \base, (5 * LASX_REG_WIDTH)
237 EX xvst $xr6, \base, (6 * LASX_REG_WIDTH)
238 EX xvst $xr7, \base, (7 * LASX_REG_WIDTH)
239 EX xvst $xr8, \base, (8 * LASX_REG_WIDTH)
240 EX xvst $xr9, \base, (9 * LASX_REG_WIDTH)
241 EX xvst $xr10, \base, (10 * LASX_REG_WIDTH)
242 EX xvst $xr11, \base, (11 * LASX_REG_WIDTH)
243 EX xvst $xr12, \base, (12 * LASX_REG_WIDTH)
244 EX xvst $xr13, \base, (13 * LASX_REG_WIDTH)
245 EX xvst $xr14, \base, (14 * LASX_REG_WIDTH)
246 EX xvst $xr15, \base, (15 * LASX_REG_WIDTH)
247 EX xvst $xr16, \base, (16 * LASX_REG_WIDTH)
248 EX xvst $xr17, \base, (17 * LASX_REG_WIDTH)
249 EX xvst $xr18, \base, (18 * LASX_REG_WIDTH)
250 EX xvst $xr19, \base, (19 * LASX_REG_WIDTH)
251 EX xvst $xr20, \base, (20 * LASX_REG_WIDTH)
252 EX xvst $xr21, \base, (21 * LASX_REG_WIDTH)
253 EX xvst $xr22, \base, (22 * LASX_REG_WIDTH)
254 EX xvst $xr23, \base, (23 * LASX_REG_WIDTH)
255 EX xvst $xr24, \base, (24 * LASX_REG_WIDTH)
256 EX xvst $xr25, \base, (25 * LASX_REG_WIDTH)
257 EX xvst $xr26, \base, (26 * LASX_REG_WIDTH)
258 EX xvst $xr27, \base, (27 * LASX_REG_WIDTH)
259 EX xvst $xr28, \base, (28 * LASX_REG_WIDTH)
260 EX xvst $xr29, \base, (29 * LASX_REG_WIDTH)
261 EX xvst $xr30, \base, (30 * LASX_REG_WIDTH)
262 EX xvst $xr31, \base, (31 * LASX_REG_WIDTH)
263 #endif
264 .endm
265
266 .macro sc_restore_lasx base
267 #ifdef CONFIG_CPU_HAS_LASX
268 EX xvld $xr0, \base, (0 * LASX_REG_WIDTH)
269 EX xvld $xr1, \base, (1 * LASX_REG_WIDTH)
270 EX xvld $xr2, \base, (2 * LASX_REG_WIDTH)
271 EX xvld $xr3, \base, (3 * LASX_REG_WIDTH)
272 EX xvld $xr4, \base, (4 * LASX_REG_WIDTH)
273 EX xvld $xr5, \base, (5 * LASX_REG_WIDTH)
274 EX xvld $xr6, \base, (6 * LASX_REG_WIDTH)
275 EX xvld $xr7, \base, (7 * LASX_REG_WIDTH)
276 EX xvld $xr8, \base, (8 * LASX_REG_WIDTH)
277 EX xvld $xr9, \base, (9 * LASX_REG_WIDTH)
278 EX xvld $xr10, \base, (10 * LASX_REG_WIDTH)
279 EX xvld $xr11, \base, (11 * LASX_REG_WIDTH)
280 EX xvld $xr12, \base, (12 * LASX_REG_WIDTH)
281 EX xvld $xr13, \base, (13 * LASX_REG_WIDTH)
282 EX xvld $xr14, \base, (14 * LASX_REG_WIDTH)
283 EX xvld $xr15, \base, (15 * LASX_REG_WIDTH)
284 EX xvld $xr16, \base, (16 * LASX_REG_WIDTH)
285 EX xvld $xr17, \base, (17 * LASX_REG_WIDTH)
286 EX xvld $xr18, \base, (18 * LASX_REG_WIDTH)
287 EX xvld $xr19, \base, (19 * LASX_REG_WIDTH)
288 EX xvld $xr20, \base, (20 * LASX_REG_WIDTH)
289 EX xvld $xr21, \base, (21 * LASX_REG_WIDTH)
290 EX xvld $xr22, \base, (22 * LASX_REG_WIDTH)
291 EX xvld $xr23, \base, (23 * LASX_REG_WIDTH)
292 EX xvld $xr24, \base, (24 * LASX_REG_WIDTH)
293 EX xvld $xr25, \base, (25 * LASX_REG_WIDTH)
294 EX xvld $xr26, \base, (26 * LASX_REG_WIDTH)
295 EX xvld $xr27, \base, (27 * LASX_REG_WIDTH)
296 EX xvld $xr28, \base, (28 * LASX_REG_WIDTH)
297 EX xvld $xr29, \base, (29 * LASX_REG_WIDTH)
298 EX xvld $xr30, \base, (30 * LASX_REG_WIDTH)
299 EX xvld $xr31, \base, (31 * LASX_REG_WIDTH)
300 #endif
301 .endm
302
303 /*
304 * Save a thread's fp context.
305 */
306 SYM_FUNC_START(_save_fp)
307 fpu_save_csr a0 t1
308 fpu_save_double a0 t1 # clobbers t1
309 fpu_save_cc a0 t1 t2 # clobbers t1, t2
310 jr ra
311 SYM_FUNC_END(_save_fp)
312 EXPORT_SYMBOL(_save_fp)
313
314 /*
315 * Restore a thread's fp context.
316 */
317 SYM_FUNC_START(_restore_fp)
318 fpu_restore_double a0 t1 # clobbers t1
319 fpu_restore_csr a0 t1 t2
320 fpu_restore_cc a0 t1 t2 # clobbers t1, t2
321 jr ra
322 SYM_FUNC_END(_restore_fp)
323
324 #ifdef CONFIG_CPU_HAS_LSX
325
326 /*
327 * Save a thread's LSX vector context.
328 */
329 SYM_FUNC_START(_save_lsx)
330 lsx_save_all a0 t1 t2
331 jr ra
332 SYM_FUNC_END(_save_lsx)
333 EXPORT_SYMBOL(_save_lsx)
334
335 /*
336 * Restore a thread's LSX vector context.
337 */
338 SYM_FUNC_START(_restore_lsx)
339 lsx_restore_all a0 t1 t2
340 jr ra
341 SYM_FUNC_END(_restore_lsx)
342
343 SYM_FUNC_START(_save_lsx_upper)
344 lsx_save_all_upper a0 t0 t1
345 jr ra
346 SYM_FUNC_END(_save_lsx_upper)
347
348 SYM_FUNC_START(_restore_lsx_upper)
349 lsx_restore_all_upper a0 t0 t1
350 jr ra
351 SYM_FUNC_END(_restore_lsx_upper)
352
353 SYM_FUNC_START(_init_lsx_upper)
354 lsx_init_all_upper t1
355 jr ra
356 SYM_FUNC_END(_init_lsx_upper)
357 #endif
358
359 #ifdef CONFIG_CPU_HAS_LASX
360
361 /*
362 * Save a thread's LASX vector context.
363 */
364 SYM_FUNC_START(_save_lasx)
365 lasx_save_all a0 t1 t2
366 jr ra
367 SYM_FUNC_END(_save_lasx)
368 EXPORT_SYMBOL(_save_lasx)
369
370 /*
371 * Restore a thread's LASX vector context.
372 */
373 SYM_FUNC_START(_restore_lasx)
374 lasx_restore_all a0 t1 t2
375 jr ra
376 SYM_FUNC_END(_restore_lasx)
377
378 SYM_FUNC_START(_save_lasx_upper)
379 lasx_save_all_upper a0 t0 t1
380 jr ra
381 SYM_FUNC_END(_save_lasx_upper)
382
383 SYM_FUNC_START(_restore_lasx_upper)
384 lasx_restore_all_upper a0 t0 t1
385 jr ra
386 SYM_FUNC_END(_restore_lasx_upper)
387
388 SYM_FUNC_START(_init_lasx_upper)
389 lasx_init_all_upper t1
390 jr ra
391 SYM_FUNC_END(_init_lasx_upper)
392 #endif
393
394 /*
395 * Load the FPU with signalling NANS. This bit pattern we're using has
396 * the property that no matter whether considered as single or as double
397 * precision represents signaling NANS.
398 *
399 * The value to initialize fcsr0 to comes in $a0.
400 */
401
402 SYM_FUNC_START(_init_fpu)
403 li.w t1, CSR_EUEN_FPEN
404 csrxchg t1, t1, LOONGARCH_CSR_EUEN
405
406 movgr2fcsr fcsr0, a0
407
408 li.w t1, -1 # SNaN
409
410 movgr2fr.d $f0, t1
411 movgr2fr.d $f1, t1
412 movgr2fr.d $f2, t1
413 movgr2fr.d $f3, t1
414 movgr2fr.d $f4, t1
415 movgr2fr.d $f5, t1
416 movgr2fr.d $f6, t1
417 movgr2fr.d $f7, t1
418 movgr2fr.d $f8, t1
419 movgr2fr.d $f9, t1
420 movgr2fr.d $f10, t1
421 movgr2fr.d $f11, t1
422 movgr2fr.d $f12, t1
423 movgr2fr.d $f13, t1
424 movgr2fr.d $f14, t1
425 movgr2fr.d $f15, t1
426 movgr2fr.d $f16, t1
427 movgr2fr.d $f17, t1
428 movgr2fr.d $f18, t1
429 movgr2fr.d $f19, t1
430 movgr2fr.d $f20, t1
431 movgr2fr.d $f21, t1
432 movgr2fr.d $f22, t1
433 movgr2fr.d $f23, t1
434 movgr2fr.d $f24, t1
435 movgr2fr.d $f25, t1
436 movgr2fr.d $f26, t1
437 movgr2fr.d $f27, t1
438 movgr2fr.d $f28, t1
439 movgr2fr.d $f29, t1
440 movgr2fr.d $f30, t1
441 movgr2fr.d $f31, t1
442
443 jr ra
444 SYM_FUNC_END(_init_fpu)
445
446 /*
447 * a0: fpregs
448 * a1: fcc
449 * a2: fcsr
450 */
451 SYM_FUNC_START(_save_fp_context)
452 sc_save_fcc a1 t1 t2
453 sc_save_fcsr a2 t1
454 sc_save_fp a0
455 li.w a0, 0 # success
456 jr ra
457 SYM_FUNC_END(_save_fp_context)
458
459 /*
460 * a0: fpregs
461 * a1: fcc
462 * a2: fcsr
463 */
464 SYM_FUNC_START(_restore_fp_context)
465 sc_restore_fp a0
466 sc_restore_fcc a1 t1 t2
467 sc_restore_fcsr a2 t1
468 li.w a0, 0 # success
469 jr ra
470 SYM_FUNC_END(_restore_fp_context)
471
472 /*
473 * a0: fpregs
474 * a1: fcc
475 * a2: fcsr
476 */
477 SYM_FUNC_START(_save_lsx_context)
478 sc_save_fcc a1, t0, t1
479 sc_save_fcsr a2, t0
480 sc_save_lsx a0
481 li.w a0, 0 # success
482 jr ra
483 SYM_FUNC_END(_save_lsx_context)
484
485 /*
486 * a0: fpregs
487 * a1: fcc
488 * a2: fcsr
489 */
490 SYM_FUNC_START(_restore_lsx_context)
491 sc_restore_lsx a0
492 sc_restore_fcc a1, t1, t2
493 sc_restore_fcsr a2, t1
494 li.w a0, 0 # success
495 jr ra
496 SYM_FUNC_END(_restore_lsx_context)
497
498 /*
499 * a0: fpregs
500 * a1: fcc
501 * a2: fcsr
502 */
503 SYM_FUNC_START(_save_lasx_context)
504 sc_save_fcc a1, t0, t1
505 sc_save_fcsr a2, t0
506 sc_save_lasx a0
507 li.w a0, 0 # success
508 jr ra
509 SYM_FUNC_END(_save_lasx_context)
510
511 /*
512 * a0: fpregs
513 * a1: fcc
514 * a2: fcsr
515 */
516 SYM_FUNC_START(_restore_lasx_context)
517 sc_restore_lasx a0
518 sc_restore_fcc a1, t1, t2
519 sc_restore_fcsr a2, t1
520 li.w a0, 0 # success
521 jr ra
522 SYM_FUNC_END(_restore_lasx_context)
523
524 .L_fpu_fault:
525 li.w a0, -EFAULT # failure
526 jr ra