]> git.ipfire.org Git - thirdparty/gcc.git/blob - libgcc/config/arm/bpabi.S
Update copyright years.
[thirdparty/gcc.git] / libgcc / config / arm / bpabi.S
1 /* Miscellaneous BPABI functions.
2
3 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4 Contributed by CodeSourcery, LLC.
5
6 This file is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 This file is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
15
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
19
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
24
25 .cfi_sections .debug_frame
26
27 #ifdef __ARM_EABI__
28 /* Some attributes that are common to all routines in this file. */
29 /* Tag_ABI_align_needed: This code does not require 8-byte
30 alignment from the caller. */
31 /* .eabi_attribute 24, 0 -- default setting. */
32 /* Tag_ABI_align_preserved: This code preserves 8-byte
33 alignment in any callee. */
34 .eabi_attribute 25, 1
35 #endif /* __ARM_EABI__ */
36
37 #ifdef L_aeabi_lcmp
38
39 ARM_FUNC_START aeabi_lcmp
40 cmp xxh, yyh
41 do_it lt
42 movlt r0, #-1
43 do_it gt
44 movgt r0, #1
45 do_it ne
46 RETc(ne)
47 subs r0, xxl, yyl
48 do_it lo
49 movlo r0, #-1
50 do_it hi
51 movhi r0, #1
52 RET
53 FUNC_END aeabi_lcmp
54
55 #endif /* L_aeabi_lcmp */
56
57 #ifdef L_aeabi_ulcmp
58
59 ARM_FUNC_START aeabi_ulcmp
60 cmp xxh, yyh
61 do_it lo
62 movlo r0, #-1
63 do_it hi
64 movhi r0, #1
65 do_it ne
66 RETc(ne)
67 cmp xxl, yyl
68 do_it lo
69 movlo r0, #-1
70 do_it hi
71 movhi r0, #1
72 do_it eq
73 moveq r0, #0
74 RET
75 FUNC_END aeabi_ulcmp
76
77 #endif /* L_aeabi_ulcmp */
78
79 .macro test_div_by_zero signed
80 /* Tail-call to divide-by-zero handlers which may be overridden by the user,
81 so unwinding works properly. */
82 #if defined(__thumb2__)
83 cbnz yyh, 2f
84 cbnz yyl, 2f
85 cmp xxh, #0
86 .ifc \signed, unsigned
87 do_it eq
88 cmpeq xxl, #0
89 do_it ne, t
90 movne xxh, #0xffffffff
91 movne xxl, #0xffffffff
92 .else
93 do_it lt, tt
94 movlt xxl, #0
95 movlt xxh, #0x80000000
96 blt 1f
97 do_it eq
98 cmpeq xxl, #0
99 do_it ne, t
100 movne xxh, #0x7fffffff
101 movne xxl, #0xffffffff
102 .endif
103 1:
104 b SYM (__aeabi_ldiv0) __PLT__
105 2:
106 #else
107 /* Note: Thumb-1 code calls via an ARM shim on processors which
108 support ARM mode. */
109 cmp yyh, #0
110 cmpeq yyl, #0
111 bne 2f
112 cmp xxh, #0
113 .ifc \signed, unsigned
114 cmpeq xxl, #0
115 movne xxh, #0xffffffff
116 movne xxl, #0xffffffff
117 .else
118 movlt xxh, #0x80000000
119 movlt xxl, #0
120 blt 1f
121 cmpeq xxl, #0
122 movne xxh, #0x7fffffff
123 movne xxl, #0xffffffff
124 .endif
125 1:
126 b SYM (__aeabi_ldiv0) __PLT__
127 2:
128 #endif
129 .endm
130
131 /* we can use STRD/LDRD on v5TE and later, and any Thumb-2 architecture. */
132 #if (defined(__ARM_EABI__) \
133 && (defined(__thumb2__) \
134 || (__ARM_ARCH >= 5 && defined(__TARGET_FEATURE_DSP))))
135 #define CAN_USE_LDRD 1
136 #else
137 #define CAN_USE_LDRD 0
138 #endif
139
140 /* set up stack from for call to __udivmoddi4. At the end of the macro the
141 stack is arranged as follows:
142 sp+12 / space for remainder
143 sp+8 \ (written by __udivmoddi4)
144 sp+4 lr
145 sp+0 sp+8 [rp (remainder pointer) argument for __udivmoddi4]
146
147 */
148 .macro push_for_divide fname
149 #if defined(__thumb2__) && CAN_USE_LDRD
150 sub ip, sp, #8
151 strd ip, lr, [sp, #-16]!
152 #else
153 sub sp, sp, #8
154 do_push {sp, lr}
155 #endif
156 .cfi_adjust_cfa_offset 16
157 .cfi_offset 14, -12
158 .endm
159
160 /* restore stack */
161 .macro pop_for_divide
162 ldr lr, [sp, #4]
163 #if CAN_USE_LDRD
164 ldrd r2, r3, [sp, #8]
165 add sp, sp, #16
166 #else
167 add sp, sp, #8
168 do_pop {r2, r3}
169 #endif
170 .cfi_restore 14
171 .cfi_adjust_cfa_offset 0
172 .endm
173
174 #ifdef L_aeabi_ldivmod
175
176 /* Perform 64 bit signed division.
177 Inputs:
178 r0:r1 numerator
179 r2:r3 denominator
180 Outputs:
181 r0:r1 quotient
182 r2:r3 remainder
183 */
184 ARM_FUNC_START aeabi_ldivmod
185 .cfi_startproc
186 test_div_by_zero signed
187
188 push_for_divide __aeabi_ldivmod
189 cmp xxh, #0
190 blt 1f
191 cmp yyh, #0
192 blt 2f
193 /* arguments in (r0:r1), (r2:r3) and *sp */
194 bl SYM(__udivmoddi4) __PLT__
195 .cfi_remember_state
196 pop_for_divide
197 RET
198
199 1: /* xxh:xxl is negative */
200 .cfi_restore_state
201 negs xxl, xxl
202 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
203 cmp yyh, #0
204 blt 3f
205 /* arguments in (r0:r1), (r2:r3) and *sp */
206 bl SYM(__udivmoddi4) __PLT__
207 .cfi_remember_state
208 pop_for_divide
209 negs xxl, xxl
210 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
211 negs yyl, yyl
212 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
213 RET
214
215 2: /* only yyh:yyl is negative */
216 .cfi_restore_state
217 negs yyl, yyl
218 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
219 /* arguments in (r0:r1), (r2:r3) and *sp */
220 bl SYM(__udivmoddi4) __PLT__
221 .cfi_remember_state
222 pop_for_divide
223 negs xxl, xxl
224 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
225 RET
226
227 3: /* both xxh:xxl and yyh:yyl are negative */
228 .cfi_restore_state
229 negs yyl, yyl
230 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
231 /* arguments in (r0:r1), (r2:r3) and *sp */
232 bl SYM(__udivmoddi4) __PLT__
233 pop_for_divide
234 negs yyl, yyl
235 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
236 RET
237
238 .cfi_endproc
239
240 #endif /* L_aeabi_ldivmod */
241
242 #ifdef L_aeabi_uldivmod
243
244 /* Perform 64 bit signed division.
245 Inputs:
246 r0:r1 numerator
247 r2:r3 denominator
248 Outputs:
249 r0:r1 quotient
250 r2:r3 remainder
251 */
252 ARM_FUNC_START aeabi_uldivmod
253 .cfi_startproc
254 test_div_by_zero unsigned
255
256 push_for_divide __aeabi_uldivmod
257 /* arguments in (r0:r1), (r2:r3) and *sp */
258 bl SYM(__udivmoddi4) __PLT__
259 pop_for_divide
260 RET
261 .cfi_endproc
262
263 #endif /* L_aeabi_divmod */
264