]> git.ipfire.org Git - thirdparty/glibc.git/blame - ports/sysdeps/m68k/m680x0/rshift.S
Update copyright notices with scripts/update-copyrights
[thirdparty/glibc.git] / ports / sysdeps / m68k / m680x0 / rshift.S
CommitLineData
3de9f02e
RM
1/* mc68020 __mpn_rshift -- Shift right a low-level natural-number integer.
2
d4697bc9 3Copyright (C) 1996-2014 Free Software Foundation, Inc.
3de9f02e
RM
4
5This file is part of the GNU MP Library.
6
7The GNU MP Library is free software; you can redistribute it and/or modify
f01ec467
AJ
8it under the terms of the GNU Lesser General Public License as published by
9the Free Software Foundation; either version 2.1 of the License, or (at your
3de9f02e
RM
10option) any later version.
11
12The GNU MP Library is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
f01ec467 14or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
3de9f02e
RM
15License for more details.
16
f01ec467 17You should have received a copy of the GNU Lesser General Public License
ab84e3ff 18along with the GNU MP Library. If not, see <http://www.gnu.org/licenses/>. */
3de9f02e
RM
19
20/*
21 INPUT PARAMETERS
22 res_ptr (sp + 4)
23 s_ptr (sp + 8)
24 s_size (sp + 16)
25 cnt (sp + 12)
26*/
27
28#include "sysdep.h"
29#include "asm-syntax.h"
30
31#define res_ptr a1
32#define s_ptr a0
33#define s_size d6
34#define cnt d4
35
36 TEXT
56d7f029 37ENTRY(__mpn_rshift)
3de9f02e
RM
38/* Save used registers on the stack. */
39 moveml R(d2)-R(d6)/R(a2),MEM_PREDEC(sp)
50e73d13
AS
40 cfi_adjust_cfa_offset (6*4)
41 cfi_rel_offset (R(d2), 0)
42 cfi_rel_offset (R(d3), 4)
43 cfi_rel_offset (R(d4), 8)
44 cfi_rel_offset (R(d5), 12)
45 cfi_rel_offset (R(d6), 16)
46 cfi_rel_offset (R(a2), 20)
3de9f02e
RM
47
48/* Copy the arguments to registers. */
49 movel MEM_DISP(sp,28),R(res_ptr)
50 movel MEM_DISP(sp,32),R(s_ptr)
51 movel MEM_DISP(sp,36),R(s_size)
52 movel MEM_DISP(sp,40),R(cnt)
53
54 moveql #1,R(d5)
55 cmpl R(d5),R(cnt)
56 bne L(Lnormal)
57 cmpl R(res_ptr),R(s_ptr)
58 bls L(Lspecial) /* jump if res_ptr >= s_ptr */
59#if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
60 lea MEM_INDX1(res_ptr,s_size,l,4),R(a2)
61#else /* not mc68020 */
62 movel R(s_size),R(d0)
63 asll #2,R(d0)
64 lea MEM_INDX(res_ptr,d0,l),R(a2)
65#endif
66 cmpl R(s_ptr),R(a2)
67 bls L(Lspecial) /* jump if s_ptr >= res_ptr + s_size */
68
69L(Lnormal:)
70 moveql #32,R(d5)
71 subl R(cnt),R(d5)
72 movel MEM_POSTINC(s_ptr),R(d2)
73 movel R(d2),R(d0)
74 lsll R(d5),R(d0) /* compute carry limb */
910e2e14 75
3de9f02e
RM
76 lsrl R(cnt),R(d2)
77 movel R(d2),R(d1)
78 subql #1,R(s_size)
79 beq L(Lend)
80 lsrl #1,R(s_size)
81 bcs L(L1)
82 subql #1,R(s_size)
83
84L(Loop:)
85 movel MEM_POSTINC(s_ptr),R(d2)
86 movel R(d2),R(d3)
87 lsll R(d5),R(d3)
88 orl R(d3),R(d1)
89 movel R(d1),MEM_POSTINC(res_ptr)
90 lsrl R(cnt),R(d2)
91L(L1:)
92 movel MEM_POSTINC(s_ptr),R(d1)
93 movel R(d1),R(d3)
94 lsll R(d5),R(d3)
95 orl R(d3),R(d2)
96 movel R(d2),MEM_POSTINC(res_ptr)
97 lsrl R(cnt),R(d1)
98
99 dbf R(s_size),L(Loop)
100 subl #0x10000,R(s_size)
101 bcc L(Loop)
102
103L(Lend:)
104 movel R(d1),MEM(res_ptr) /* store most significant limb */
105
106/* Restore used registers from stack frame. */
107 moveml MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
50e73d13
AS
108 cfi_remember_state
109 cfi_adjust_cfa_offset (-6*4)
110 cfi_restore (R(d2))
111 cfi_restore (R(d3))
112 cfi_restore (R(d4))
113 cfi_restore (R(d5))
114 cfi_restore (R(d6))
115 cfi_restore (R(a2))
3de9f02e
RM
116 rts
117
118/* We loop from most significant end of the arrays, which is only
910e2e14 119 permissible if the source and destination don't overlap, since the
3de9f02e
RM
120 function is documented to work for overlapping source and destination. */
121
50e73d13 122 cfi_restore_state
3de9f02e
RM
123L(Lspecial:)
124#if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
125 lea MEM_INDX1(s_ptr,s_size,l,4),R(s_ptr)
126 lea MEM_INDX1(res_ptr,s_size,l,4),R(res_ptr)
127#else /* not mc68000 */
128 movel R(s_size),R(d0)
129 asll #2,R(d0)
130 addl R(s_size),R(s_ptr)
131 addl R(s_size),R(res_ptr)
132#endif
133
134 clrl R(d0) /* initialize carry */
135 eorw #1,R(s_size)
136 lsrl #1,R(s_size)
137 bcc L(LL1)
138 subql #1,R(s_size)
139
140L(LLoop:)
141 movel MEM_PREDEC(s_ptr),R(d2)
142 roxrl #1,R(d2)
143 movel R(d2),MEM_PREDEC(res_ptr)
144L(LL1:)
145 movel MEM_PREDEC(s_ptr),R(d2)
146 roxrl #1,R(d2)
147 movel R(d2),MEM_PREDEC(res_ptr)
148
149 dbf R(s_size),L(LLoop)
150 roxrl #1,R(d0) /* save cy in msb */
151 subl #0x10000,R(s_size)
152 bcs L(LLend)
153 addl R(d0),R(d0) /* restore cy */
154 bra L(LLoop)
155
156L(LLend:)
157/* Restore used registers from stack frame. */
158 moveml MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
50e73d13
AS
159 cfi_adjust_cfa_offset (-6*4)
160 cfi_restore (R(d2))
161 cfi_restore (R(d3))
162 cfi_restore (R(d4))
163 cfi_restore (R(d5))
164 cfi_restore (R(d6))
165 cfi_restore (R(a2))
3de9f02e 166 rts
56d7f029 167END(__mpn_rshift)