]> git.ipfire.org Git - thirdparty/gcc.git/blame - libgcc/config/arm/bpabi.S
Update copyright years.
[thirdparty/gcc.git] / libgcc / config / arm / bpabi.S
CommitLineData
b3f8d95d
MM
1/* Miscellaneous BPABI functions.
2
cbe34bb5 3 Copyright (C) 2003-2017 Free Software Foundation, Inc.
b3f8d95d
MM
4 Contributed by CodeSourcery, LLC.
5
6 This file is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
748086b7 8 Free Software Foundation; either version 3, or (at your option) any
b3f8d95d
MM
9 later version.
10
b3f8d95d
MM
11 This file is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
15
748086b7
JJ
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
19
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
b3f8d95d 24
158ef346
CB
25 .cfi_sections .debug_frame
26
6f0668cf
RE
27#ifdef __ARM_EABI__
28/* Some attributes that are common to all routines in this file. */
29 /* Tag_ABI_align_needed: This code does not require 8-byte
30 alignment from the caller. */
31 /* .eabi_attribute 24, 0 -- default setting. */
32 /* Tag_ABI_align_preserved: This code preserves 8-byte
33 alignment in any callee. */
34 .eabi_attribute 25, 1
35#endif /* __ARM_EABI__ */
36
b3f8d95d
MM
37#ifdef L_aeabi_lcmp
38
39ARM_FUNC_START aeabi_lcmp
ad1ec76f
PB
40 cmp xxh, yyh
41 do_it lt
42 movlt r0, #-1
43 do_it gt
44 movgt r0, #1
45 do_it ne
46 RETc(ne)
47 subs r0, xxl, yyl
48 do_it lo
49 movlo r0, #-1
50 do_it hi
51 movhi r0, #1
b3f8d95d
MM
52 RET
53 FUNC_END aeabi_lcmp
54
55#endif /* L_aeabi_lcmp */
56
57#ifdef L_aeabi_ulcmp
58
59ARM_FUNC_START aeabi_ulcmp
60 cmp xxh, yyh
5b3e6663 61 do_it lo
b3f8d95d 62 movlo r0, #-1
5b3e6663 63 do_it hi
b3f8d95d 64 movhi r0, #1
5b3e6663 65 do_it ne
b3f8d95d
MM
66 RETc(ne)
67 cmp xxl, yyl
5b3e6663 68 do_it lo
b3f8d95d 69 movlo r0, #-1
5b3e6663 70 do_it hi
b3f8d95d 71 movhi r0, #1
5b3e6663 72 do_it eq
b3f8d95d
MM
73 moveq r0, #0
74 RET
75 FUNC_END aeabi_ulcmp
76
77#endif /* L_aeabi_ulcmp */
78
0c23e1be
JB
79.macro test_div_by_zero signed
80/* Tail-call to divide-by-zero handlers which may be overridden by the user,
81 so unwinding works properly. */
82#if defined(__thumb2__)
53cfb467
SL
83 cbnz yyh, 2f
84 cbnz yyl, 2f
0c23e1be 85 cmp xxh, #0
53cfb467 86 .ifc \signed, unsigned
0c23e1be
JB
87 do_it eq
88 cmpeq xxl, #0
53cfb467
SL
89 do_it ne, t
90 movne xxh, #0xffffffff
91 movne xxl, #0xffffffff
0c23e1be 92 .else
53cfb467 93 do_it lt, tt
0c23e1be
JB
94 movlt xxl, #0
95 movlt xxh, #0x80000000
53cfb467
SL
96 blt 1f
97 do_it eq
98 cmpeq xxl, #0
99 do_it ne, t
100 movne xxh, #0x7fffffff
101 movne xxl, #0xffffffff
0c23e1be 102 .endif
53cfb467 1031:
0c23e1be 104 b SYM (__aeabi_ldiv0) __PLT__
53cfb467 1052:
0c23e1be
JB
106#else
107 /* Note: Thumb-1 code calls via an ARM shim on processors which
108 support ARM mode. */
109 cmp yyh, #0
110 cmpeq yyl, #0
111 bne 2f
112 cmp xxh, #0
0c23e1be 113 .ifc \signed, unsigned
53cfb467 114 cmpeq xxl, #0
0c23e1be
JB
115 movne xxh, #0xffffffff
116 movne xxl, #0xffffffff
117 .else
118 movlt xxh, #0x80000000
119 movlt xxl, #0
53cfb467
SL
120 blt 1f
121 cmpeq xxl, #0
122 movne xxh, #0x7fffffff
123 movne xxl, #0xffffffff
0c23e1be 124 .endif
53cfb467 1251:
0c23e1be
JB
126 b SYM (__aeabi_ldiv0) __PLT__
1272:
128#endif
129.endm
130
c9dae335
CB
131/* we can use STRD/LDRD on v5TE and later, and any Thumb-2 architecture. */
132#if (defined(__ARM_EABI__) \
133 && (defined(__thumb2__) \
134 || (__ARM_ARCH >= 5 && defined(__TARGET_FEATURE_DSP))))
135#define CAN_USE_LDRD 1
136#else
137#define CAN_USE_LDRD 0
138#endif
139
140/* set up stack from for call to __udivmoddi4. At the end of the macro the
141 stack is arranged as follows:
142 sp+12 / space for remainder
143 sp+8 \ (written by __udivmoddi4)
144 sp+4 lr
145 sp+0 sp+8 [rp (remainder pointer) argument for __udivmoddi4]
146
147 */
148.macro push_for_divide fname
149#if defined(__thumb2__) && CAN_USE_LDRD
150 sub ip, sp, #8
151 strd ip, lr, [sp, #-16]!
152#else
153 sub sp, sp, #8
154 do_push {sp, lr}
155#endif
158ef346
CB
156 .cfi_adjust_cfa_offset 16
157 .cfi_offset 14, -12
c9dae335
CB
158.endm
159
160/* restore stack */
161.macro pop_for_divide
162 ldr lr, [sp, #4]
163#if CAN_USE_LDRD
164 ldrd r2, r3, [sp, #8]
165 add sp, sp, #16
166#else
167 add sp, sp, #8
168 do_pop {r2, r3}
169#endif
158ef346
CB
170 .cfi_restore 14
171 .cfi_adjust_cfa_offset 0
c9dae335
CB
172.endm
173
b3f8d95d
MM
174#ifdef L_aeabi_ldivmod
175
6857b807
CB
176/* Perform 64 bit signed division.
177 Inputs:
178 r0:r1 numerator
179 r2:r3 denominator
180 Outputs:
181 r0:r1 quotient
182 r2:r3 remainder
183 */
b3f8d95d 184ARM_FUNC_START aeabi_ldivmod
158ef346 185 .cfi_startproc
f21d8faa 186 test_div_by_zero signed
0c23e1be 187
f493def1 188 push_for_divide __aeabi_ldivmod
13381189
CB
189 cmp xxh, #0
190 blt 1f
191 cmp yyh, #0
192 blt 2f
193 /* arguments in (r0:r1), (r2:r3) and *sp */
194 bl SYM(__udivmoddi4) __PLT__
158ef346 195 .cfi_remember_state
13381189
CB
196 pop_for_divide
197 RET
198
1991: /* xxh:xxl is negative */
158ef346 200 .cfi_restore_state
13381189
CB
201 negs xxl, xxl
202 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
203 cmp yyh, #0
204 blt 3f
205 /* arguments in (r0:r1), (r2:r3) and *sp */
206 bl SYM(__udivmoddi4) __PLT__
158ef346 207 .cfi_remember_state
13381189
CB
208 pop_for_divide
209 negs xxl, xxl
210 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
211 negs yyl, yyl
212 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
213 RET
214
2152: /* only yyh:yyl is negative */
158ef346 216 .cfi_restore_state
13381189
CB
217 negs yyl, yyl
218 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
219 /* arguments in (r0:r1), (r2:r3) and *sp */
220 bl SYM(__udivmoddi4) __PLT__
158ef346 221 .cfi_remember_state
13381189
CB
222 pop_for_divide
223 negs xxl, xxl
224 sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
225 RET
226
2273: /* both xxh:xxl and yyh:yyl are negative */
158ef346 228 .cfi_restore_state
13381189
CB
229 negs yyl, yyl
230 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
f493def1 231 /* arguments in (r0:r1), (r2:r3) and *sp */
13381189 232 bl SYM(__udivmoddi4) __PLT__
f493def1 233 pop_for_divide
13381189
CB
234 negs yyl, yyl
235 sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */
b3f8d95d 236 RET
13381189 237
158ef346 238 .cfi_endproc
b3f8d95d
MM
239
240#endif /* L_aeabi_ldivmod */
241
242#ifdef L_aeabi_uldivmod
243
6857b807
CB
244/* Perform 64 bit signed division.
245 Inputs:
246 r0:r1 numerator
247 r2:r3 denominator
248 Outputs:
249 r0:r1 quotient
250 r2:r3 remainder
251 */
b3f8d95d 252ARM_FUNC_START aeabi_uldivmod
158ef346 253 .cfi_startproc
f21d8faa 254 test_div_by_zero unsigned
0c23e1be 255
c9dae335
CB
256 push_for_divide __aeabi_uldivmod
257 /* arguments in (r0:r1), (r2:r3) and *sp */
0b227df4 258 bl SYM(__udivmoddi4) __PLT__
c9dae335 259 pop_for_divide
b3f8d95d 260 RET
158ef346 261 .cfi_endproc
34652576 262
b3f8d95d
MM
263#endif /* L_aeabi_divmod */
264