]> git.ipfire.org Git - people/ms/linux.git/blob - arch/arm/vfp/vfphw.S
Linux-2.6.12-rc2
[people/ms/linux.git] / arch / arm / vfp / vfphw.S
1 /*
2 * linux/arch/arm/vfp/vfphw.S
3 *
4 * Copyright (C) 2004 ARM Limited.
5 * Written by Deep Blue Solutions Limited.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This code is called from the kernel's undefined instruction trap.
12 * r9 holds the return address for successful handling.
13 * lr holds the return address for unrecognised instructions.
14 * r10 points at the start of the private FP workspace in the thread structure
15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h)
16 */
17 #include <asm/thread_info.h>
18 #include <asm/vfpmacros.h>
19 #include "../kernel/entry-header.S"
20
21 .macro DBGSTR, str
22 #ifdef DEBUG
23 stmfd sp!, {r0-r3, ip, lr}
24 add r0, pc, #4
25 bl printk
26 b 1f
27 .asciz "<7>VFP: \str\n"
28 .balign 4
29 1: ldmfd sp!, {r0-r3, ip, lr}
30 #endif
31 .endm
32
33 .macro DBGSTR1, str, arg
34 #ifdef DEBUG
35 stmfd sp!, {r0-r3, ip, lr}
36 mov r1, \arg
37 add r0, pc, #4
38 bl printk
39 b 1f
40 .asciz "<7>VFP: \str\n"
41 .balign 4
42 1: ldmfd sp!, {r0-r3, ip, lr}
43 #endif
44 .endm
45
46 .macro DBGSTR3, str, arg1, arg2, arg3
47 #ifdef DEBUG
48 stmfd sp!, {r0-r3, ip, lr}
49 mov r3, \arg3
50 mov r2, \arg2
51 mov r1, \arg1
52 add r0, pc, #4
53 bl printk
54 b 1f
55 .asciz "<7>VFP: \str\n"
56 .balign 4
57 1: ldmfd sp!, {r0-r3, ip, lr}
58 #endif
59 .endm
60
61
62 @ VFP hardware support entry point.
63 @
64 @ r0 = faulted instruction
65 @ r2 = faulted PC+4
66 @ r9 = successful return
67 @ r10 = vfp_state union
68 @ lr = failure return
69
70 .globl vfp_support_entry
71 vfp_support_entry:
72 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10
73
74 VFPFMRX r1, FPEXC @ Is the VFP enabled?
75 DBGSTR1 "fpexc %08x", r1
76 tst r1, #FPEXC_ENABLE
77 bne look_for_VFP_exceptions @ VFP is already enabled
78
79 DBGSTR1 "enable %x", r10
80 ldr r3, last_VFP_context_address
81 orr r1, r1, #FPEXC_ENABLE @ user FPEXC has the enable bit set
82 ldr r4, [r3] @ last_VFP_context pointer
83 bic r5, r1, #FPEXC_EXCEPTION @ make sure exceptions are disabled
84 cmp r4, r10
85 beq check_for_exception @ we are returning to the same
86 @ process, so the registers are
87 @ still there. In this case, we do
88 @ not want to drop a pending exception.
89
90 VFPFMXR FPEXC, r5 @ enable VFP, disable any pending
91 @ exceptions, so we can get at the
92 @ rest of it
93
94 @ Save out the current registers to the old thread state
95
96 DBGSTR1 "save old state %p", r4
97 cmp r4, #0
98 beq no_old_VFP_process
99 VFPFMRX r5, FPSCR @ current status
100 VFPFMRX r6, FPINST @ FPINST (always there, rev0 onwards)
101 tst r1, #FPEXC_FPV2 @ is there an FPINST2 to read?
102 VFPFMRX r8, FPINST2, NE @ FPINST2 if needed - avoids reading
103 @ nonexistant reg on rev0
104 VFPFSTMIA r4 @ save the working registers
105 add r4, r4, #8*16+4
106 stmia r4, {r1, r5, r6, r8} @ save FPEXC, FPSCR, FPINST, FPINST2
107 @ and point r4 at the word at the
108 @ start of the register dump
109
110 no_old_VFP_process:
111 DBGSTR1 "load state %p", r10
112 str r10, [r3] @ update the last_VFP_context pointer
113 @ Load the saved state back into the VFP
114 add r4, r10, #8*16+4
115 ldmia r4, {r1, r5, r6, r8} @ load FPEXC, FPSCR, FPINST, FPINST2
116 VFPFLDMIA r10 @ reload the working registers while
117 @ FPEXC is in a safe state
118 tst r1, #FPEXC_FPV2 @ is there an FPINST2 to write?
119 VFPFMXR FPINST2, r8, NE @ FPINST2 if needed - avoids writing
120 @ nonexistant reg on rev0
121 VFPFMXR FPINST, r6
122 VFPFMXR FPSCR, r5 @ restore status
123
124 check_for_exception:
125 tst r1, #FPEXC_EXCEPTION
126 bne process_exception @ might as well handle the pending
127 @ exception before retrying branch
128 @ out before setting an FPEXC that
129 @ stops us reading stuff
130 VFPFMXR FPEXC, r1 @ restore FPEXC last
131 sub r2, r2, #4
132 str r2, [sp, #S_PC] @ retry the instruction
133 mov pc, r9 @ we think we have handled things
134
135
136 look_for_VFP_exceptions:
137 tst r1, #FPEXC_EXCEPTION
138 bne process_exception
139 VFPFMRX r5, FPSCR
140 tst r5, #FPSCR_IXE @ IXE doesn't set FPEXC_EXCEPTION !
141 bne process_exception
142
143 @ Fall into hand on to next handler - appropriate coproc instr
144 @ not recognised by VFP
145
146 DBGSTR "not VFP"
147 mov pc, lr
148
149 process_exception:
150 DBGSTR "bounce"
151 sub r2, r2, #4
152 str r2, [sp, #S_PC] @ retry the instruction on exit from
153 @ the imprecise exception handling in
154 @ the support code
155 mov r2, sp @ nothing stacked - regdump is at TOS
156 mov lr, r9 @ setup for a return to the user code.
157
158 @ Now call the C code to package up the bounce to the support code
159 @ r0 holds the trigger instruction
160 @ r1 holds the FPEXC value
161 @ r2 pointer to register dump
162 b VFP9_bounce @ we have handled this - the support
163 @ code will raise an exception if
164 @ required. If not, the user code will
165 @ retry the faulted instruction
166
167 last_VFP_context_address:
168 .word last_VFP_context
169
170 .globl vfp_get_float
171 vfp_get_float:
172 add pc, pc, r0, lsl #3
173 mov r0, r0
174 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
175 mrc p10, 0, r0, c\dr, c0, 0 @ fmrs r0, s0
176 mov pc, lr
177 mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1
178 mov pc, lr
179 .endr
180
181 .globl vfp_put_float
182 vfp_put_float:
183 add pc, pc, r0, lsl #3
184 mov r0, r0
185 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
186 mcr p10, 0, r1, c\dr, c0, 0 @ fmsr r0, s0
187 mov pc, lr
188 mcr p10, 0, r1, c\dr, c0, 4 @ fmsr r0, s1
189 mov pc, lr
190 .endr
191
192 .globl vfp_get_double
193 vfp_get_double:
194 mov r0, r0, lsr #1
195 add pc, pc, r0, lsl #3
196 mov r0, r0
197 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
198 mrrc p10, 1, r0, r1, c\dr @ fmrrd r0, r1, d\dr
199 mov pc, lr
200 .endr
201
202 @ virtual register 16 for compare with zero
203 mov r0, #0
204 mov r1, #0
205 mov pc, lr
206
207 .globl vfp_put_double
208 vfp_put_double:
209 mov r0, r0, lsr #1
210 add pc, pc, r0, lsl #3
211 mov r0, r0
212 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
213 mcrr p10, 1, r1, r2, c\dr @ fmrrd r1, r2, d\dr
214 mov pc, lr
215 .endr