]> git.ipfire.org Git - thirdparty/glibc.git/blob - nptl/sysdeps/unix/sysv/linux/sh/pthread_once.S
Update copyright notices with scripts/update-copyrights
[thirdparty/glibc.git] / nptl / sysdeps / unix / sysv / linux / sh / pthread_once.S
1 /* Copyright (C) 2003-2014 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
17
18 #include <unwindbuf.h>
19 #include <sysdep.h>
20 #include <kernel-features.h>
21 #include <lowlevellock.h>
22 #include "lowlevel-atomic.h"
23
24
25 .comm __fork_generation, 4, 4
26
27 .text
28 .globl __pthread_once
29 .type __pthread_once,@function
30 .align 5
31 cfi_startproc
32 __pthread_once:
33 mov.l @r4, r0
34 tst #2, r0
35 bt 1f
36 rts
37 mov #0, r0
38
39 1:
40 mov.l r12, @-r15
41 cfi_adjust_cfa_offset (4)
42 cfi_rel_offset (r12, 0)
43 mov.l r9, @-r15
44 cfi_adjust_cfa_offset (4)
45 cfi_rel_offset (r9, 0)
46 mov.l r8, @-r15
47 cfi_adjust_cfa_offset (4)
48 cfi_rel_offset (r8, 0)
49 sts.l pr, @-r15
50 cfi_adjust_cfa_offset (4)
51 cfi_rel_offset (pr, 0)
52 mov r5, r8
53 mov r4, r9
54
55 /* Not yet initialized or initialization in progress.
56 Get the fork generation counter now. */
57 6:
58 mov.l @r4, r1
59 mova .Lgot, r0
60 mov.l .Lgot, r12
61 add r0, r12
62
63 5:
64 mov r1, r0
65
66 tst #2, r0
67 bf 4f
68
69 and #3, r0
70 mov.l .Lfgen, r2
71 #ifdef PIC
72 add r12, r2
73 #endif
74 mov.l @r2, r3
75 or r3, r0
76 or #1, r0
77 mov r0, r3
78 mov r1, r5
79
80 CMPXCHG (r5, @r4, r3, r2)
81 bf 5b
82
83 /* Check whether another thread already runs the initializer. */
84 mov r2, r0
85 tst #1, r0
86 bt 3f /* No -> do it. */
87
88 /* Check whether the initializer execution was interrupted
89 by a fork. */
90 xor r3, r0
91 mov #-4, r1 /* -4 = 0xfffffffc */
92 tst r1, r0
93 bf 3f /* Different for generation -> run initializer. */
94
95 /* Somebody else got here first. Wait. */
96 #ifdef __ASSUME_PRIVATE_FUTEX
97 mov #(FUTEX_PRIVATE_FLAG|FUTEX_WAIT), r5
98 extu.b r5, r5
99 #else
100 stc gbr, r1
101 mov.w .Lpfoff, r2
102 add r2, r1
103 mov.l @r1, r5
104 # if FUTEX_WAIT != 0
105 mov #FUTEX_WAIT, r0
106 or r0, r5
107 # endif
108 #endif
109 mov r3, r6
110 mov #0, r7
111 mov #SYS_futex, r3
112 extu.b r3, r3
113 trapa #0x14
114 SYSCALL_INST_PAD
115 bra 6b
116 nop
117
118 .align 2
119 .Lgot:
120 .long _GLOBAL_OFFSET_TABLE_
121 #ifdef PIC
122 .Lfgen:
123 .long __fork_generation@GOTOFF
124 #else
125 .Lfgen:
126 .long __fork_generation
127 #endif
128
129 3:
130 /* Call the initializer function after setting up the
131 cancellation handler. Note that it is not possible here
132 to use the unwind-based cleanup handling. This would require
133 that the user-provided function and all the code it calls
134 is compiled with exceptions. Unfortunately this cannot be
135 guaranteed. */
136 add #-UNWINDBUFSIZE, r15
137 cfi_adjust_cfa_offset (UNWINDBUFSIZE)
138
139 mov.l .Lsigsetjmp, r1
140 mov #UWJMPBUF, r4
141 add r15, r4
142 bsrf r1
143 mov #0, r5
144 .Lsigsetjmp0:
145 tst r0, r0
146 bf 7f
147
148 mov.l .Lcpush, r1
149 bsrf r1
150 mov r15, r4
151 .Lcpush0:
152
153 /* Call the user-provided initialization function. */
154 jsr @r8
155 nop
156
157 /* Pop the cleanup handler. */
158 mov.l .Lcpop, r1
159 bsrf r1
160 mov r15, r4
161 .Lcpop0:
162
163 add #UNWINDBUFSIZE, r15
164 cfi_adjust_cfa_offset (-UNWINDBUFSIZE)
165
166 /* Sucessful run of the initializer. Signal that we are done. */
167 INC (@r9, r2)
168 /* Wake up all other threads. */
169 mov r9, r4
170 #ifdef __ASSUME_PRIVATE_FUTEX
171 mov #(FUTEX_PRIVATE_FLAG|FUTEX_WAKE), r5
172 extu.b r5, r5
173 #else
174 stc gbr, r1
175 mov.w .Lpfoff, r2
176 add r2, r1
177 mov.l @r1, r5
178 mov #FUTEX_WAKE, r0
179 or r0, r5
180 #endif
181 mov #-1, r6
182 shlr r6 /* r6 = 0x7fffffff */
183 mov #0, r7
184 mov #SYS_futex, r3
185 extu.b r3, r3
186 trapa #0x14
187 SYSCALL_INST_PAD
188
189 4:
190 lds.l @r15+, pr
191 cfi_adjust_cfa_offset (-4)
192 cfi_restore (pr)
193 mov.l @r15+, r8
194 cfi_adjust_cfa_offset (-4)
195 cfi_restore (r8)
196 mov.l @r15+, r9
197 cfi_adjust_cfa_offset (-4)
198 cfi_restore (r9)
199 mov.l @r15+, r12
200 cfi_adjust_cfa_offset (-4)
201 cfi_restore (r12)
202 rts
203 mov #0, r0
204
205 7:
206 /* __sigsetjmp returned for the second time. */
207 cfi_adjust_cfa_offset (UNWINDBUFSIZE+16)
208 cfi_offset (r12, -4)
209 cfi_offset (r9, -8)
210 cfi_offset (r8, -12)
211 cfi_offset (pr, -16)
212 mov #0, r7
213 mov.l r7, @r9
214 mov r9, r4
215 #ifdef __ASSUME_PRIVATE_FUTEX
216 mov #(FUTEX_PRIVATE_FLAG|FUTEX_WAKE), r5
217 #else
218 stc gbr, r1
219 mov.w .Lpfoff, r2
220 add r2, r1
221 mov.l @r1, r5
222 mov #FUTEX_WAKE, r0
223 or r0, r5
224 #endif
225 extu.b r5, r5
226 mov #-1, r6
227 shlr r6 /* r6 = 0x7fffffff */
228 mov #SYS_futex, r3
229 extu.b r3, r3
230 trapa #0x14
231 SYSCALL_INST_PAD
232
233 mov.l .Lunext, r1
234 bsrf r1
235 mov r15, r4
236 .Lunext0:
237 /* NOTREACHED */
238 sleep
239 cfi_endproc
240
241 #ifndef __ASSUME_PRIVATE_FUTEX
242 .Lpfoff:
243 .word PRIVATE_FUTEX - TLS_PRE_TCB_SIZE
244 #endif
245 .align 2
246 .Lsigsetjmp:
247 .long __sigsetjmp@PLT-(.Lsigsetjmp0-.)
248 .Lcpush:
249 .long HIDDEN_JUMPTARGET(__pthread_register_cancel)-.Lcpush0
250 .Lcpop:
251 .long HIDDEN_JUMPTARGET(__pthread_unregister_cancel)-.Lcpop0
252 .Lunext:
253 .long HIDDEN_JUMPTARGET(__pthread_unwind_next)-.Lunext0
254 .size __pthread_once,.-__pthread_once
255
256 hidden_def (__pthread_once)
257 strong_alias (__pthread_once, pthread_once)