]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/unix/sysv/linux/powerpc/powerpc32/swapcontext-common.S
Update copyright dates with scripts/update-copyrights.
[thirdparty/glibc.git] / sysdeps / unix / sysv / linux / powerpc / powerpc32 / swapcontext-common.S
1 /* Save current context and jump to a new context.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19 /* This is the common implementation of setcontext for powerpc32.
20 It not complete in itself should be included in to a framework that
21 defines:
22 __CONTEXT_FUNC_NAME
23 and if appropriate:
24 __CONTEXT_ENABLE_FPRS
25 __CONTEXT_ENABLE_VRS
26 Any architecture that implements the Vector unit is assumed to also
27 implement the floating unit. */
28
29 /* Stack frame offsets. */
30 #define _FRAME_BACKCHAIN 0
31 #define _FRAME_LR_SAVE 4
32 #define _FRAME_PARM_SAVE1 8
33 #define _FRAME_PARM_SAVE2 12
34 #define _FRAME_PARM_SAVE3 16
35 #define _FRAME_PARM_SAVE4 20
36
37 #ifdef __CONTEXT_ENABLE_VRS
38 .machine "altivec"
39 #endif
40 ENTRY(__CONTEXT_FUNC_NAME)
41 stwu r1,-16(r1)
42 cfi_adjust_cfa_offset (16)
43 /* Insure that the _UC_REGS start on a quadword boundary. */
44 stw r3,_FRAME_PARM_SAVE1(r1)
45 addi r3,r3,_UC_REG_SPACE+12
46 stw r4,_FRAME_PARM_SAVE2(r1) /* new context pointer */
47 clrrwi r3,r3,4
48
49 /* Save the general purpose registers */
50 stw r0,_UC_GREGS+(PT_R0*4)(r3)
51 mflr r0
52 stw r2,_UC_GREGS+(PT_R2*4)(r3)
53 stw r4,_UC_GREGS+(PT_R4*4)(r3)
54 /* Set the callers LR_SAVE, and the ucontext LR and NIP to the callers
55 return address. */
56 stw r0,_UC_GREGS+(PT_LNK*4)(r3)
57 stw r0,_UC_GREGS+(PT_NIP*4)(r3)
58 stw r0,_FRAME_LR_SAVE+16(r1)
59 cfi_offset (lr, _FRAME_LR_SAVE)
60 stw r5,_UC_GREGS+(PT_R5*4)(r3)
61 stw r6,_UC_GREGS+(PT_R6*4)(r3)
62 stw r7,_UC_GREGS+(PT_R7*4)(r3)
63 stw r8,_UC_GREGS+(PT_R8*4)(r3)
64 stw r9,_UC_GREGS+(PT_R9*4)(r3)
65 stw r10,_UC_GREGS+(PT_R10*4)(r3)
66 stw r11,_UC_GREGS+(PT_R11*4)(r3)
67 stw r12,_UC_GREGS+(PT_R12*4)(r3)
68 stw r13,_UC_GREGS+(PT_R13*4)(r3)
69 stw r14,_UC_GREGS+(PT_R14*4)(r3)
70 stw r15,_UC_GREGS+(PT_R15*4)(r3)
71 stw r16,_UC_GREGS+(PT_R16*4)(r3)
72 stw r17,_UC_GREGS+(PT_R17*4)(r3)
73 stw r18,_UC_GREGS+(PT_R18*4)(r3)
74 stw r19,_UC_GREGS+(PT_R19*4)(r3)
75 stw r20,_UC_GREGS+(PT_R20*4)(r3)
76 stw r21,_UC_GREGS+(PT_R21*4)(r3)
77 stw r22,_UC_GREGS+(PT_R22*4)(r3)
78 stw r23,_UC_GREGS+(PT_R23*4)(r3)
79 stw r24,_UC_GREGS+(PT_R24*4)(r3)
80 stw r25,_UC_GREGS+(PT_R25*4)(r3)
81 stw r26,_UC_GREGS+(PT_R26*4)(r3)
82 stw r27,_UC_GREGS+(PT_R27*4)(r3)
83 stw r28,_UC_GREGS+(PT_R28*4)(r3)
84 stw r29,_UC_GREGS+(PT_R29*4)(r3)
85 stw r30,_UC_GREGS+(PT_R30*4)(r3)
86 stw r31,_UC_GREGS+(PT_R31*4)(r3)
87
88 /* Save the value of R1. We had to push the stack before we
89 had the address of uc_reg_space. So compute the address of
90 the callers stack pointer and save it as R1. */
91 addi r8,r1,16
92 li r0,0
93 /* Save the count, exception and condition registers. */
94 mfctr r11
95 mfxer r10
96 mfcr r9
97 stw r8,_UC_GREGS+(PT_R1*4)(r3)
98 stw r11,_UC_GREGS+(PT_CTR*4)(r3)
99 stw r10,_UC_GREGS+(PT_XER*4)(r3)
100 stw r9,_UC_GREGS+(PT_CCR*4)(r3)
101 /* Set the return value of getcontext to "success". R3 is the only
102 register whose value is not preserved in the saved context. */
103 stw r0,_UC_GREGS+(PT_R3*4)(r3)
104
105 /* Zero fill fields that can't be set in user state. */
106 stw r0,_UC_GREGS+(PT_MSR*4)(r3)
107 stw r0,_UC_GREGS+(PT_MQ*4)(r3)
108
109 #ifdef __CONTEXT_ENABLE_FPRS
110 /* Save the floating-point registers */
111 stfd fp0,_UC_FREGS+(0*8)(r3)
112 stfd fp1,_UC_FREGS+(1*8)(r3)
113 stfd fp2,_UC_FREGS+(2*8)(r3)
114 stfd fp3,_UC_FREGS+(3*8)(r3)
115 stfd fp4,_UC_FREGS+(4*8)(r3)
116 stfd fp5,_UC_FREGS+(5*8)(r3)
117 stfd fp6,_UC_FREGS+(6*8)(r3)
118 stfd fp7,_UC_FREGS+(7*8)(r3)
119 stfd fp8,_UC_FREGS+(8*8)(r3)
120 stfd fp9,_UC_FREGS+(9*8)(r3)
121 stfd fp10,_UC_FREGS+(10*8)(r3)
122 stfd fp11,_UC_FREGS+(11*8)(r3)
123 stfd fp12,_UC_FREGS+(12*8)(r3)
124 stfd fp13,_UC_FREGS+(13*8)(r3)
125 stfd fp14,_UC_FREGS+(14*8)(r3)
126 stfd fp15,_UC_FREGS+(15*8)(r3)
127 stfd fp16,_UC_FREGS+(16*8)(r3)
128 stfd fp17,_UC_FREGS+(17*8)(r3)
129 stfd fp18,_UC_FREGS+(18*8)(r3)
130 stfd fp19,_UC_FREGS+(19*8)(r3)
131 stfd fp20,_UC_FREGS+(20*8)(r3)
132 stfd fp21,_UC_FREGS+(21*8)(r3)
133 stfd fp22,_UC_FREGS+(22*8)(r3)
134 stfd fp23,_UC_FREGS+(23*8)(r3)
135 stfd fp24,_UC_FREGS+(24*8)(r3)
136 stfd fp25,_UC_FREGS+(25*8)(r3)
137 stfd fp26,_UC_FREGS+(26*8)(r3)
138 stfd fp27,_UC_FREGS+(27*8)(r3)
139 stfd fp28,_UC_FREGS+(28*8)(r3)
140 stfd fp29,_UC_FREGS+(29*8)(r3)
141 mffs fp0
142 stfd fp30,_UC_FREGS+(30*8)(r3)
143 stfd fp31,_UC_FREGS+(31*8)(r3)
144 stfd fp0,_UC_FREGS+(32*8)(r3)
145
146 # ifdef PIC
147 mflr r8
148 # define got_label GENERATE_GOT_LABEL (__CONTEXT_FUNC_NAME)
149 SETUP_GOT_ACCESS(r7,got_label)
150 addis r7,r7,_GLOBAL_OFFSET_TABLE_-got_label@ha
151 addi r7,r7,_GLOBAL_OFFSET_TABLE_-got_label@l
152 # ifdef SHARED
153 lwz r7,_rtld_global_ro@got(r7)
154 mtlr r8
155 lwz r7,RTLD_GLOBAL_RO_DL_HWCAP_OFFSET+LOWORD(r7)
156 # else
157 lwz r7,_dl_hwcap@got(r7)
158 mtlr r8
159 lwz r7,LOWORD(r7)
160 # endif
161 # else
162 lis r7,(_dl_hwcap+LOWORD)@ha
163 lwz r7,(_dl_hwcap+LOWORD)@l(r7)
164 # endif
165
166 # ifdef __CONTEXT_ENABLE_VRS
167 andis. r6,r7,(PPC_FEATURE_HAS_ALTIVEC >> 16)
168
169 la r10,(_UC_VREGS)(r3)
170 la r9,(_UC_VREGS+16)(r3)
171
172 /* beq L(no_vec)*/
173 beq 2f
174 /* address of the combined VSCR/VSAVE quadword. */
175 la r8,(_UC_VREGS+512)(r3)
176
177 /* Save the vector registers */
178 stvx v0,0,r10
179 stvx v1,0,r9
180 addi r10,r10,32
181 addi r9,r9,32
182 /* We need to get the Vector Status and Control Register early to avoid
183 store order problems later with the VSAVE register that shares the
184 same quadword. */
185 mfvscr v0
186
187 stvx v2,0,r10
188 stvx v3,0,r9
189 addi r10,r10,32
190 addi r9,r9,32
191
192 stvx v0,0,r8
193
194 stvx v4,0,r10
195 stvx v5,0,r9
196 addi r10,r10,32
197 addi r9,r9,32
198
199 stvx v6,0,r10
200 stvx v7,0,r9
201 addi r10,r10,32
202 addi r9,r9,32
203
204 stvx v8,0,r10
205 stvx v9,0,r9
206 addi r10,r10,32
207 addi r9,r9,32
208
209 stvx v10,0,r10
210 stvx v11,0,r9
211 addi r10,r10,32
212 addi r9,r9,32
213
214 stvx v12,0,r10
215 stvx v13,0,r9
216 addi r10,r10,32
217 addi r9,r9,32
218
219 stvx v14,0,r10
220 stvx v15,0,r9
221 addi r10,r10,32
222 addi r9,r9,32
223
224 stvx v16,0,r10
225 stvx v17,0,r9
226 addi r10,r10,32
227 addi r9,r9,32
228
229 stvx v18,0,r10
230 stvx v19,0,r9
231 addi r10,r10,32
232 addi r9,r9,32
233
234 stvx v20,0,r10
235 stvx v21,0,r9
236 addi r10,r10,32
237 addi r9,r9,32
238
239 stvx v22,0,r10
240 stvx v23,0,r9
241 addi r10,r10,32
242 addi r9,r9,32
243
244 stvx v24,0,r10
245 stvx v25,0,r9
246 addi r10,r10,32
247 addi r9,r9,32
248
249 stvx v26,0,r10
250 stvx v27,0,r9
251 addi r10,r10,32
252 addi r9,r9,32
253
254 stvx v28,0,r10
255 stvx v29,0,r9
256 addi r10,r10,32
257 addi r9,r9,32
258
259 mfvscr v0
260 stvx v30,0,r10
261 stvx v31,0,r9
262 stw r0,0(r8)
263
264 2: /*L(no_vec):*/
265 # endif /* __CONTEXT_ENABLE_VRS */
266 #endif /* __CONTEXT_ENABLE_FPRS */
267
268 #ifdef __CONTEXT_ENABLE_E500
269 getcontext_e500
270 #endif
271
272 /* Restore ucontext (parm1) from stack. */
273 lwz r12,_FRAME_PARM_SAVE1(r1)
274 lwz r4,_FRAME_PARM_SAVE2(r1)
275 addi r4,r4,_UC_SIGMASK
276 stw r3,_UC_REGS_PTR(r12)
277 addi r5,r12,_UC_SIGMASK
278 li r3,SIG_SETMASK
279 bl __sigprocmask@local
280 cmpwi r3,0
281 bne 3f /* L(error_exit) */
282
283 /*
284 * If the new ucontext refers to the point where we were interrupted
285 * by a signal, we have to use the rt_sigreturn system call to
286 * return to the context so we get both LR and CTR restored.
287 *
288 * Otherwise, the context we are restoring is either just after
289 * a procedure call (getcontext/swapcontext) or at the beginning
290 * of a procedure call (makecontext), so we don't need to restore
291 * r0, xer, ctr. We don't restore r2 since it will be used as
292 * the TLS pointer.
293 */
294 lwz r4,_FRAME_PARM_SAVE2(r1)
295 lwz r31,_UC_REGS_PTR(r4)
296 lwz r0,_UC_GREGS+(PT_MSR*4)(r31)
297 cmpwi r0,0
298 bne 4f /* L(do_sigret) */
299
300 #ifdef __CONTEXT_ENABLE_FPRS
301 # ifdef __CONTEXT_ENABLE_VRS
302
303 # ifdef PIC
304 mflr r8
305 SETUP_GOT_ACCESS(r7,got_label)
306 addis r7,r7,_GLOBAL_OFFSET_TABLE_-got_label@ha
307 addi r7,r7,_GLOBAL_OFFSET_TABLE_-got_label@l
308 mtlr r8
309 # ifdef SHARED
310 lwz r7,_rtld_global_ro@got(r7)
311 lwz r7,RTLD_GLOBAL_RO_DL_HWCAP_OFFSET+LOWORD(r7)
312 # else
313 lwz r7,_dl_hwcap@got(r7)
314 lwz r7,LOWORD(r7)
315 # endif
316 # else
317 lis r7,(_dl_hwcap+LOWORD)@ha
318 lwz r7,(_dl_hwcap+LOWORD)@l(r7)
319 # endif
320 andis. r7,r7,(PPC_FEATURE_HAS_ALTIVEC >> 16)
321 la r10,(_UC_VREGS)(r31)
322 beq 6f /* L(has_no_vec) */
323
324 lwz r0,(32*16)(r10)
325 li r9,(32*16)
326 cmpwi r0,0
327 mtspr VRSAVE,r0
328 beq 6f /* L(has_no_vec) */
329
330 lvx v19,r9,r10
331 la r9,(16)(r10)
332
333 lvx v0,0,r10
334 lvx v1,0,r9
335 addi r10,r10,32
336 addi r9,r9,32
337
338 mtvscr v19
339 lvx v2,0,r10
340 lvx v3,0,r9
341 addi r10,r10,32
342 addi r9,r9,32
343
344 lvx v4,0,r10
345 lvx v5,0,r9
346 addi r10,r10,32
347 addi r9,r9,32
348
349 lvx v6,0,r10
350 lvx v7,0,r9
351 addi r10,r10,32
352 addi r9,r9,32
353
354 lvx v8,0,r10
355 lvx v9,0,r9
356 addi r10,r10,32
357 addi r9,r9,32
358
359 lvx v10,0,r10
360 lvx v11,0,r9
361 addi r10,r10,32
362 addi r9,r9,32
363
364 lvx v12,0,r10
365 lvx v13,0,r9
366 addi r10,r10,32
367 addi r9,r9,32
368
369 lvx v14,0,r10
370 lvx v15,0,r9
371 addi r10,r10,32
372 addi r9,r9,32
373
374 lvx v16,0,r10
375 lvx v17,0,r9
376 addi r10,r10,32
377 addi r9,r9,32
378
379 lvx v18,0,r10
380 lvx v19,0,r9
381 addi r10,r10,32
382 addi r9,r9,32
383
384 lvx v20,0,r10
385 lvx v21,0,r9
386 addi r10,r10,32
387 addi r9,r9,32
388
389 lvx v22,0,r10
390 lvx v23,0,r9
391 addi r10,r10,32
392 addi r9,r9,32
393
394 lvx v24,0,r10
395 lvx v25,0,r9
396 addi r10,r10,32
397 addi r9,r9,32
398
399 lvx v26,0,r10
400 lvx v27,0,r9
401 addi r10,r10,32
402 addi r9,r9,32
403
404 lvx v28,0,r10
405 lvx v29,0,r9
406 addi r10,r10,32
407 addi r9,r9,32
408
409 lvx v30,0,r10
410 lvx v31,0,r9
411 addi r10,r10,32
412 addi r9,r9,32
413
414 lvx v10,0,r10
415 lvx v11,0,r9
416
417 6: /* L(has_no_vec): */
418 # endif /* __CONTEXT_ENABLE_VRS */
419 /* Restore the floating-point registers */
420 lfd fp31,_UC_FREGS+(32*8)(r31)
421 lfd fp0,_UC_FREGS+(0*8)(r31)
422 # ifdef _ARCH_PWR6
423 /* Use the extended four-operand version of the mtfsf insn. */
424 mtfsf 0xff,fp31,1,0
425 # else
426 .machine push
427 .machine "power6"
428 /* Availability of DFP indicates a 64-bit FPSCR. */
429 andi. r6,r7,PPC_FEATURE_HAS_DFP
430 beq 7f
431 /* Use the extended four-operand version of the mtfsf insn. */
432 mtfsf 0xff,fp31,1,0
433 b 8f
434 /* Continue to operate on the FPSCR as if it were 32-bits. */
435 7: mtfsf 0xff,fp31
436 8: .machine pop
437 #endif /* _ARCH_PWR6 */
438 lfd fp1,_UC_FREGS+(1*8)(r31)
439 lfd fp2,_UC_FREGS+(2*8)(r31)
440 lfd fp3,_UC_FREGS+(3*8)(r31)
441 lfd fp4,_UC_FREGS+(4*8)(r31)
442 lfd fp5,_UC_FREGS+(5*8)(r31)
443 lfd fp6,_UC_FREGS+(6*8)(r31)
444 lfd fp7,_UC_FREGS+(7*8)(r31)
445 lfd fp8,_UC_FREGS+(8*8)(r31)
446 lfd fp9,_UC_FREGS+(9*8)(r31)
447 lfd fp10,_UC_FREGS+(10*8)(r31)
448 lfd fp11,_UC_FREGS+(11*8)(r31)
449 lfd fp12,_UC_FREGS+(12*8)(r31)
450 lfd fp13,_UC_FREGS+(13*8)(r31)
451 lfd fp14,_UC_FREGS+(14*8)(r31)
452 lfd fp15,_UC_FREGS+(15*8)(r31)
453 lfd fp16,_UC_FREGS+(16*8)(r31)
454 lfd fp17,_UC_FREGS+(17*8)(r31)
455 lfd fp18,_UC_FREGS+(18*8)(r31)
456 lfd fp19,_UC_FREGS+(19*8)(r31)
457 lfd fp20,_UC_FREGS+(20*8)(r31)
458 lfd fp21,_UC_FREGS+(21*8)(r31)
459 lfd fp22,_UC_FREGS+(22*8)(r31)
460 lfd fp23,_UC_FREGS+(23*8)(r31)
461 lfd fp24,_UC_FREGS+(24*8)(r31)
462 lfd fp25,_UC_FREGS+(25*8)(r31)
463 lfd fp26,_UC_FREGS+(26*8)(r31)
464 lfd fp27,_UC_FREGS+(27*8)(r31)
465 lfd fp28,_UC_FREGS+(28*8)(r31)
466 lfd fp29,_UC_FREGS+(29*8)(r31)
467 lfd fp30,_UC_FREGS+(30*8)(r31)
468 lfd fp31,_UC_FREGS+(31*8)(r31)
469 #endif /* __CONTEXT_ENABLE_FPRS */
470
471 #ifdef __CONTEXT_ENABLE_E500
472 setcontext_e500
473 #endif
474
475 /* Restore LR and CCR, and set CTR to the NIP value */
476 lwz r3,_UC_GREGS+(PT_LNK*4)(r31)
477 lwz r4,_UC_GREGS+(PT_NIP*4)(r31)
478 lwz r5,_UC_GREGS+(PT_CCR*4)(r31)
479 mtlr r3
480 mtctr r4
481 mtcr r5
482
483 /* Restore the general registers */
484 lwz r3,_UC_GREGS+(PT_R3*4)(r31)
485 lwz r4,_UC_GREGS+(PT_R4*4)(r31)
486 lwz r5,_UC_GREGS+(PT_R5*4)(r31)
487 lwz r6,_UC_GREGS+(PT_R6*4)(r31)
488 lwz r7,_UC_GREGS+(PT_R7*4)(r31)
489 lwz r8,_UC_GREGS+(PT_R8*4)(r31)
490 lwz r9,_UC_GREGS+(PT_R9*4)(r31)
491 lwz r10,_UC_GREGS+(PT_R10*4)(r31)
492 lwz r11,_UC_GREGS+(PT_R11*4)(r31)
493 lwz r12,_UC_GREGS+(PT_R12*4)(r31)
494 lwz r13,_UC_GREGS+(PT_R13*4)(r31)
495 lwz r14,_UC_GREGS+(PT_R14*4)(r31)
496 lwz r15,_UC_GREGS+(PT_R15*4)(r31)
497 lwz r16,_UC_GREGS+(PT_R16*4)(r31)
498 lwz r17,_UC_GREGS+(PT_R17*4)(r31)
499 lwz r18,_UC_GREGS+(PT_R18*4)(r31)
500 lwz r19,_UC_GREGS+(PT_R19*4)(r31)
501 lwz r20,_UC_GREGS+(PT_R20*4)(r31)
502 lwz r21,_UC_GREGS+(PT_R21*4)(r31)
503 lwz r22,_UC_GREGS+(PT_R22*4)(r31)
504 lwz r23,_UC_GREGS+(PT_R23*4)(r31)
505 lwz r24,_UC_GREGS+(PT_R24*4)(r31)
506 lwz r25,_UC_GREGS+(PT_R25*4)(r31)
507 lwz r26,_UC_GREGS+(PT_R26*4)(r31)
508 lwz r27,_UC_GREGS+(PT_R27*4)(r31)
509 lwz r28,_UC_GREGS+(PT_R28*4)(r31)
510 lwz r29,_UC_GREGS+(PT_R29*4)(r31)
511 lwz r30,_UC_GREGS+(PT_R30*4)(r31)
512 lwz r1,_UC_GREGS+(PT_R1*4)(r31)
513 lwz r31,_UC_GREGS+(PT_R31*4)(r31)
514
515 bctr
516
517 3:/*L(error_exit):*/
518 lwz r0,_FRAME_LR_SAVE+16(r1)
519 addi r1,r1,16
520 mtlr r0
521 blr
522
523 4:/*L(do_sigret):*/
524 addi r1,r4,-0xd0
525 li r0,SYS_ify(rt_sigreturn)
526 sc
527 /* NOTREACHED */
528
529 END(__CONTEXT_FUNC_NAME)