]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/powerpc/powerpc64/setjmp-common.S
PowerPC64 ABI fixes
[thirdparty/glibc.git] / sysdeps / powerpc / powerpc64 / setjmp-common.S
1 /* setjmp for PowerPC64.
2 Copyright (C) 1995-2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, write to the Free
17 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
18 02111-1307 USA. */
19
20 #include <sysdep.h>
21 #define _ASM
22 #ifdef __NO_VMX__
23 #include <novmxsetjmp.h>
24 #else
25 #include <jmpbuf-offsets.h>
26 #endif
27 #include <bp-sym.h>
28 #include <bp-asm.h>
29
30 #ifndef __NO_VMX__
31 .section ".toc","aw"
32 .LC__dl_hwcap:
33 # ifdef SHARED
34 .tc _rtld_global_ro[TC],_rtld_global_ro
35 # else
36 .tc _dl_hwcap[TC],_dl_hwcap
37 # endif
38 .section ".text"
39 #endif
40
41 .machine "altivec"
42 ENTRY (setjmp)
43 CALL_MCOUNT 1
44 li r4,1 /* Set second argument to 1. */
45 b JUMPTARGET (GLUE(__sigsetjmp,_ent))
46 END (setjmp)
47
48 #if defined SHARED && !defined IS_IN_rtld && !defined __NO_VMX__
49 /* When called from within libc we need a special version of _setjmp
50 that saves r2 since the call won't go via a plt call stub. See
51 bugz #269. __GI__setjmp is used in csu/libc-start.c when
52 HAVE_CLEANUP_JMP_BUF is defined. */
53 ENTRY (BP_SYM (__GI__setjmp))
54 std r2,40(r1) /* Save the callers TOC in the save area. */
55 cfi_endproc
56 END_2 (BP_SYM (__GI__setjmp))
57 /* Fall thru. */
58 #endif
59
60 ENTRY (BP_SYM (_setjmp))
61 CALL_MCOUNT 1
62 li r4,0 /* Set second argument to 0. */
63 b JUMPTARGET (GLUE(__sigsetjmp,_ent))
64 END (BP_SYM (_setjmp))
65 libc_hidden_def (_setjmp)
66
67 ENTRY (BP_SYM (__sigsetjmp))
68 CALL_MCOUNT 2
69 JUMPTARGET(GLUE(__sigsetjmp,_ent)):
70 CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
71 #ifdef PTR_MANGLE
72 mr r5, r1
73 PTR_MANGLE (r5, r6)
74 std r5,(JB_GPR1*8)(3)
75 #else
76 std r1,(JB_GPR1*8)(3)
77 #endif
78 mflr r0
79 #if defined SHARED && !defined IS_IN_rtld
80 ld r5,40(r1) /* Retrieve the callers TOC. */
81 std r5,(JB_GPR2*8)(3)
82 #else
83 std r2,(JB_GPR2*8)(3)
84 #endif
85 std r14,((JB_GPRS+0)*8)(3)
86 stfd fp14,((JB_FPRS+0)*8)(3)
87 #ifdef PTR_MANGLE
88 PTR_MANGLE2 (r0, r6)
89 #endif
90 std r0,(JB_LR*8)(3)
91 std r15,((JB_GPRS+1)*8)(3)
92 stfd fp15,((JB_FPRS+1)*8)(3)
93 mfcr r0
94 std r16,((JB_GPRS+2)*8)(3)
95 stfd fp16,((JB_FPRS+2)*8)(3)
96 std r0,(JB_CR*8)(3)
97 std r17,((JB_GPRS+3)*8)(3)
98 stfd fp17,((JB_FPRS+3)*8)(3)
99 std r18,((JB_GPRS+4)*8)(3)
100 stfd fp18,((JB_FPRS+4)*8)(3)
101 std r19,((JB_GPRS+5)*8)(3)
102 stfd fp19,((JB_FPRS+5)*8)(3)
103 std r20,((JB_GPRS+6)*8)(3)
104 stfd fp20,((JB_FPRS+6)*8)(3)
105 std r21,((JB_GPRS+7)*8)(3)
106 stfd fp21,((JB_FPRS+7)*8)(3)
107 std r22,((JB_GPRS+8)*8)(3)
108 stfd fp22,((JB_FPRS+8)*8)(3)
109 std r23,((JB_GPRS+9)*8)(3)
110 stfd fp23,((JB_FPRS+9)*8)(3)
111 std r24,((JB_GPRS+10)*8)(3)
112 stfd fp24,((JB_FPRS+10)*8)(3)
113 std r25,((JB_GPRS+11)*8)(3)
114 stfd fp25,((JB_FPRS+11)*8)(3)
115 std r26,((JB_GPRS+12)*8)(3)
116 stfd fp26,((JB_FPRS+12)*8)(3)
117 std r27,((JB_GPRS+13)*8)(3)
118 stfd fp27,((JB_FPRS+13)*8)(3)
119 std r28,((JB_GPRS+14)*8)(3)
120 stfd fp28,((JB_FPRS+14)*8)(3)
121 std r29,((JB_GPRS+15)*8)(3)
122 stfd fp29,((JB_FPRS+15)*8)(3)
123 std r30,((JB_GPRS+16)*8)(3)
124 stfd fp30,((JB_FPRS+16)*8)(3)
125 std r31,((JB_GPRS+17)*8)(3)
126 stfd fp31,((JB_FPRS+17)*8)(3)
127 #ifndef __NO_VMX__
128 ld r6,.LC__dl_hwcap@toc(r2)
129 # ifdef SHARED
130 /* Load _rtld-global._dl_hwcap. */
131 ld r6,RTLD_GLOBAL_RO_DL_HWCAP_OFFSET(r6)
132 # else
133 ld r6,0(r6) /* Load extern _dl_hwcap. */
134 # endif
135 andis. r6,r6,(PPC_FEATURE_HAS_ALTIVEC >> 16)
136 beq L(no_vmx)
137 la r5,((JB_VRS)*8)(3)
138 andi. r6,r5,0xf
139 mfspr r0,VRSAVE
140 stw r0,((JB_VRSAVE)*8)(3)
141 addi r6,r5,16
142 beq+ L(aligned_save_vmx)
143 lvsr v0,0,r5
144 vspltisb v1,-1 /* set v1 to all 1's */
145 vspltisb v2,0 /* set v2 to all 0's */
146 vperm v3,v2,v1,v0 /* v3 contains shift mask with num all 1 bytes
147 on left = misalignment */
148
149
150 /* Special case for v20 we need to preserve what is in save area
151 below v20 before obliterating it */
152 lvx v5,0,r5
153 vperm v20,v20,v20,v0
154 vsel v5,v5,v20,v3
155 vsel v20,v20,v2,v3
156 stvx v5,0,r5
157
158 # define save_2vmx_partial(savevr,prev_savevr,hivr,shiftvr,maskvr,savegpr,addgpr) \
159 addi addgpr,addgpr,32; \
160 vperm savevr,savevr,savevr,shiftvr; \
161 vsel hivr,prev_savevr,savevr,maskvr; \
162 stvx hivr,0,savegpr;
163
164 save_2vmx_partial(v21,v20,v5,v0,v3,r6,r5)
165 save_2vmx_partial(v22,v21,v5,v0,v3,r5,r6)
166 save_2vmx_partial(v23,v22,v5,v0,v3,r6,r5)
167 save_2vmx_partial(v24,v23,v5,v0,v3,r5,r6)
168 save_2vmx_partial(v25,v24,v5,v0,v3,r6,r5)
169 save_2vmx_partial(v26,v25,v5,v0,v3,r5,r6)
170 save_2vmx_partial(v27,v26,v5,v0,v3,r6,r5)
171 save_2vmx_partial(v28,v27,v5,v0,v3,r5,r6)
172 save_2vmx_partial(v29,v28,v5,v0,v3,r6,r5)
173 save_2vmx_partial(v30,v29,v5,v0,v3,r5,r6)
174
175 /* Special case for r31 we need to preserve what is in save area
176 above v31 before obliterating it */
177 addi r5,r5,32
178 vperm v31,v31,v31,v0
179 lvx v4,0,r5
180 vsel v5,v30,v31,v3
181 stvx v5,0,r6
182 vsel v4,v31,v4,v3
183 stvx v4,0,r5
184 b L(no_vmx)
185
186 L(aligned_save_vmx):
187 stvx 20,0,r5
188 addi r5,r5,32
189 stvx 21,0,r6
190 addi r6,r6,32
191 stvx 22,0,r5
192 addi r5,r5,32
193 stvx 23,0,r6
194 addi r6,r6,32
195 stvx 24,0,r5
196 addi r5,r5,32
197 stvx 25,0,r6
198 addi r6,r6,32
199 stvx 26,0,r5
200 addi r5,r5,32
201 stvx 27,0,r6
202 addi r6,r6,32
203 stvx 28,0,r5
204 addi r5,r5,32
205 stvx 29,0,r6
206 addi r6,r6,32
207 stvx 30,0,r5
208 stvx 31,0,r6
209 L(no_vmx):
210 #else
211 li r6,0
212 #endif
213 #if defined NOT_IN_libc && defined IS_IN_rtld
214 li r3,0
215 blr
216 #elif defined SHARED
217 b JUMPTARGET (BP_SYM (__sigjmp_save))
218 #else
219 mflr r0
220 std r0,16(r1)
221 stdu r1,-112(r1)
222 cfi_adjust_cfa_offset(112)
223 cfi_offset(lr,16)
224 bl JUMPTARGET (BP_SYM (__sigjmp_save))
225 nop
226 ld r0,112+16(r1)
227 addi r1,r1,112
228 mtlr r0
229 blr
230 #endif
231 END (BP_SYM (__sigsetjmp))