Commit 138de1c44a8e0606501cd8593407e9248e84f1b7

Authored by Russell King
1 parent 31f4671799

ARM: VFP: Fix vfp_put_double() for d16-d31

vfp_put_double() takes the double value in r0,r1 not r1,r2.

Reported-by: Tarun Kanti DebBarma <tarun.kanti@ti.com>
Cc: <stable@kernel.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>

Showing 1 changed file with 1 additions and 1 deletions Inline Diff

arch/arm/vfp/vfphw.S
1 /* 1 /*
2 * linux/arch/arm/vfp/vfphw.S 2 * linux/arch/arm/vfp/vfphw.S
3 * 3 *
4 * Copyright (C) 2004 ARM Limited. 4 * Copyright (C) 2004 ARM Limited.
5 * Written by Deep Blue Solutions Limited. 5 * Written by Deep Blue Solutions Limited.
6 * 6 *
7 * This program is free software; you can redistribute it and/or modify 7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as 8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation. 9 * published by the Free Software Foundation.
10 * 10 *
11 * This code is called from the kernel's undefined instruction trap. 11 * This code is called from the kernel's undefined instruction trap.
12 * r9 holds the return address for successful handling. 12 * r9 holds the return address for successful handling.
13 * lr holds the return address for unrecognised instructions. 13 * lr holds the return address for unrecognised instructions.
14 * r10 points at the start of the private FP workspace in the thread structure 14 * r10 points at the start of the private FP workspace in the thread structure
15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h) 15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h)
16 */ 16 */
17 #include <asm/thread_info.h> 17 #include <asm/thread_info.h>
18 #include <asm/vfpmacros.h> 18 #include <asm/vfpmacros.h>
19 #include "../kernel/entry-header.S" 19 #include "../kernel/entry-header.S"
20 20
21 .macro DBGSTR, str 21 .macro DBGSTR, str
22 #ifdef DEBUG 22 #ifdef DEBUG
23 stmfd sp!, {r0-r3, ip, lr} 23 stmfd sp!, {r0-r3, ip, lr}
24 add r0, pc, #4 24 add r0, pc, #4
25 bl printk 25 bl printk
26 b 1f 26 b 1f
27 .asciz "<7>VFP: \str\n" 27 .asciz "<7>VFP: \str\n"
28 .balign 4 28 .balign 4
29 1: ldmfd sp!, {r0-r3, ip, lr} 29 1: ldmfd sp!, {r0-r3, ip, lr}
30 #endif 30 #endif
31 .endm 31 .endm
32 32
33 .macro DBGSTR1, str, arg 33 .macro DBGSTR1, str, arg
34 #ifdef DEBUG 34 #ifdef DEBUG
35 stmfd sp!, {r0-r3, ip, lr} 35 stmfd sp!, {r0-r3, ip, lr}
36 mov r1, \arg 36 mov r1, \arg
37 add r0, pc, #4 37 add r0, pc, #4
38 bl printk 38 bl printk
39 b 1f 39 b 1f
40 .asciz "<7>VFP: \str\n" 40 .asciz "<7>VFP: \str\n"
41 .balign 4 41 .balign 4
42 1: ldmfd sp!, {r0-r3, ip, lr} 42 1: ldmfd sp!, {r0-r3, ip, lr}
43 #endif 43 #endif
44 .endm 44 .endm
45 45
46 .macro DBGSTR3, str, arg1, arg2, arg3 46 .macro DBGSTR3, str, arg1, arg2, arg3
47 #ifdef DEBUG 47 #ifdef DEBUG
48 stmfd sp!, {r0-r3, ip, lr} 48 stmfd sp!, {r0-r3, ip, lr}
49 mov r3, \arg3 49 mov r3, \arg3
50 mov r2, \arg2 50 mov r2, \arg2
51 mov r1, \arg1 51 mov r1, \arg1
52 add r0, pc, #4 52 add r0, pc, #4
53 bl printk 53 bl printk
54 b 1f 54 b 1f
55 .asciz "<7>VFP: \str\n" 55 .asciz "<7>VFP: \str\n"
56 .balign 4 56 .balign 4
57 1: ldmfd sp!, {r0-r3, ip, lr} 57 1: ldmfd sp!, {r0-r3, ip, lr}
58 #endif 58 #endif
59 .endm 59 .endm
60 60
61 61
62 @ VFP hardware support entry point. 62 @ VFP hardware support entry point.
63 @ 63 @
64 @ r0 = faulted instruction 64 @ r0 = faulted instruction
65 @ r2 = faulted PC+4 65 @ r2 = faulted PC+4
66 @ r9 = successful return 66 @ r9 = successful return
67 @ r10 = vfp_state union 67 @ r10 = vfp_state union
68 @ r11 = CPU number 68 @ r11 = CPU number
69 @ lr = failure return 69 @ lr = failure return
70 70
71 ENTRY(vfp_support_entry) 71 ENTRY(vfp_support_entry)
72 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10 72 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10
73 73
74 VFPFMRX r1, FPEXC @ Is the VFP enabled? 74 VFPFMRX r1, FPEXC @ Is the VFP enabled?
75 DBGSTR1 "fpexc %08x", r1 75 DBGSTR1 "fpexc %08x", r1
76 tst r1, #FPEXC_EN 76 tst r1, #FPEXC_EN
77 bne look_for_VFP_exceptions @ VFP is already enabled 77 bne look_for_VFP_exceptions @ VFP is already enabled
78 78
79 DBGSTR1 "enable %x", r10 79 DBGSTR1 "enable %x", r10
80 ldr r3, last_VFP_context_address 80 ldr r3, last_VFP_context_address
81 orr r1, r1, #FPEXC_EN @ user FPEXC has the enable bit set 81 orr r1, r1, #FPEXC_EN @ user FPEXC has the enable bit set
82 ldr r4, [r3, r11, lsl #2] @ last_VFP_context pointer 82 ldr r4, [r3, r11, lsl #2] @ last_VFP_context pointer
83 bic r5, r1, #FPEXC_EX @ make sure exceptions are disabled 83 bic r5, r1, #FPEXC_EX @ make sure exceptions are disabled
84 cmp r4, r10 84 cmp r4, r10
85 beq check_for_exception @ we are returning to the same 85 beq check_for_exception @ we are returning to the same
86 @ process, so the registers are 86 @ process, so the registers are
87 @ still there. In this case, we do 87 @ still there. In this case, we do
88 @ not want to drop a pending exception. 88 @ not want to drop a pending exception.
89 89
90 VFPFMXR FPEXC, r5 @ enable VFP, disable any pending 90 VFPFMXR FPEXC, r5 @ enable VFP, disable any pending
91 @ exceptions, so we can get at the 91 @ exceptions, so we can get at the
92 @ rest of it 92 @ rest of it
93 93
94 #ifndef CONFIG_SMP 94 #ifndef CONFIG_SMP
95 @ Save out the current registers to the old thread state 95 @ Save out the current registers to the old thread state
96 @ No need for SMP since this is not done lazily 96 @ No need for SMP since this is not done lazily
97 97
98 DBGSTR1 "save old state %p", r4 98 DBGSTR1 "save old state %p", r4
99 cmp r4, #0 99 cmp r4, #0
100 beq no_old_VFP_process 100 beq no_old_VFP_process
101 VFPFSTMIA r4, r5 @ save the working registers 101 VFPFSTMIA r4, r5 @ save the working registers
102 VFPFMRX r5, FPSCR @ current status 102 VFPFMRX r5, FPSCR @ current status
103 #ifndef CONFIG_CPU_FEROCEON 103 #ifndef CONFIG_CPU_FEROCEON
104 tst r1, #FPEXC_EX @ is there additional state to save? 104 tst r1, #FPEXC_EX @ is there additional state to save?
105 beq 1f 105 beq 1f
106 VFPFMRX r6, FPINST @ FPINST (only if FPEXC.EX is set) 106 VFPFMRX r6, FPINST @ FPINST (only if FPEXC.EX is set)
107 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? 107 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read?
108 beq 1f 108 beq 1f
109 VFPFMRX r8, FPINST2 @ FPINST2 if needed (and present) 109 VFPFMRX r8, FPINST2 @ FPINST2 if needed (and present)
110 1: 110 1:
111 #endif 111 #endif
112 stmia r4, {r1, r5, r6, r8} @ save FPEXC, FPSCR, FPINST, FPINST2 112 stmia r4, {r1, r5, r6, r8} @ save FPEXC, FPSCR, FPINST, FPINST2
113 @ and point r4 at the word at the 113 @ and point r4 at the word at the
114 @ start of the register dump 114 @ start of the register dump
115 #endif 115 #endif
116 116
117 no_old_VFP_process: 117 no_old_VFP_process:
118 DBGSTR1 "load state %p", r10 118 DBGSTR1 "load state %p", r10
119 str r10, [r3, r11, lsl #2] @ update the last_VFP_context pointer 119 str r10, [r3, r11, lsl #2] @ update the last_VFP_context pointer
120 @ Load the saved state back into the VFP 120 @ Load the saved state back into the VFP
121 VFPFLDMIA r10, r5 @ reload the working registers while 121 VFPFLDMIA r10, r5 @ reload the working registers while
122 @ FPEXC is in a safe state 122 @ FPEXC is in a safe state
123 ldmia r10, {r1, r5, r6, r8} @ load FPEXC, FPSCR, FPINST, FPINST2 123 ldmia r10, {r1, r5, r6, r8} @ load FPEXC, FPSCR, FPINST, FPINST2
124 #ifndef CONFIG_CPU_FEROCEON 124 #ifndef CONFIG_CPU_FEROCEON
125 tst r1, #FPEXC_EX @ is there additional state to restore? 125 tst r1, #FPEXC_EX @ is there additional state to restore?
126 beq 1f 126 beq 1f
127 VFPFMXR FPINST, r6 @ restore FPINST (only if FPEXC.EX is set) 127 VFPFMXR FPINST, r6 @ restore FPINST (only if FPEXC.EX is set)
128 tst r1, #FPEXC_FP2V @ is there an FPINST2 to write? 128 tst r1, #FPEXC_FP2V @ is there an FPINST2 to write?
129 beq 1f 129 beq 1f
130 VFPFMXR FPINST2, r8 @ FPINST2 if needed (and present) 130 VFPFMXR FPINST2, r8 @ FPINST2 if needed (and present)
131 1: 131 1:
132 #endif 132 #endif
133 VFPFMXR FPSCR, r5 @ restore status 133 VFPFMXR FPSCR, r5 @ restore status
134 134
135 check_for_exception: 135 check_for_exception:
136 tst r1, #FPEXC_EX 136 tst r1, #FPEXC_EX
137 bne process_exception @ might as well handle the pending 137 bne process_exception @ might as well handle the pending
138 @ exception before retrying branch 138 @ exception before retrying branch
139 @ out before setting an FPEXC that 139 @ out before setting an FPEXC that
140 @ stops us reading stuff 140 @ stops us reading stuff
141 VFPFMXR FPEXC, r1 @ restore FPEXC last 141 VFPFMXR FPEXC, r1 @ restore FPEXC last
142 sub r2, r2, #4 142 sub r2, r2, #4
143 str r2, [sp, #S_PC] @ retry the instruction 143 str r2, [sp, #S_PC] @ retry the instruction
144 #ifdef CONFIG_PREEMPT 144 #ifdef CONFIG_PREEMPT
145 get_thread_info r10 145 get_thread_info r10
146 ldr r4, [r10, #TI_PREEMPT] @ get preempt count 146 ldr r4, [r10, #TI_PREEMPT] @ get preempt count
147 sub r11, r4, #1 @ decrement it 147 sub r11, r4, #1 @ decrement it
148 str r11, [r10, #TI_PREEMPT] 148 str r11, [r10, #TI_PREEMPT]
149 #endif 149 #endif
150 mov pc, r9 @ we think we have handled things 150 mov pc, r9 @ we think we have handled things
151 151
152 152
153 look_for_VFP_exceptions: 153 look_for_VFP_exceptions:
154 @ Check for synchronous or asynchronous exception 154 @ Check for synchronous or asynchronous exception
155 tst r1, #FPEXC_EX | FPEXC_DEX 155 tst r1, #FPEXC_EX | FPEXC_DEX
156 bne process_exception 156 bne process_exception
157 @ On some implementations of the VFP subarch 1, setting FPSCR.IXE 157 @ On some implementations of the VFP subarch 1, setting FPSCR.IXE
158 @ causes all the CDP instructions to be bounced synchronously without 158 @ causes all the CDP instructions to be bounced synchronously without
159 @ setting the FPEXC.EX bit 159 @ setting the FPEXC.EX bit
160 VFPFMRX r5, FPSCR 160 VFPFMRX r5, FPSCR
161 tst r5, #FPSCR_IXE 161 tst r5, #FPSCR_IXE
162 bne process_exception 162 bne process_exception
163 163
164 @ Fall into hand on to next handler - appropriate coproc instr 164 @ Fall into hand on to next handler - appropriate coproc instr
165 @ not recognised by VFP 165 @ not recognised by VFP
166 166
167 DBGSTR "not VFP" 167 DBGSTR "not VFP"
168 #ifdef CONFIG_PREEMPT 168 #ifdef CONFIG_PREEMPT
169 get_thread_info r10 169 get_thread_info r10
170 ldr r4, [r10, #TI_PREEMPT] @ get preempt count 170 ldr r4, [r10, #TI_PREEMPT] @ get preempt count
171 sub r11, r4, #1 @ decrement it 171 sub r11, r4, #1 @ decrement it
172 str r11, [r10, #TI_PREEMPT] 172 str r11, [r10, #TI_PREEMPT]
173 #endif 173 #endif
174 mov pc, lr 174 mov pc, lr
175 175
176 process_exception: 176 process_exception:
177 DBGSTR "bounce" 177 DBGSTR "bounce"
178 mov r2, sp @ nothing stacked - regdump is at TOS 178 mov r2, sp @ nothing stacked - regdump is at TOS
179 mov lr, r9 @ setup for a return to the user code. 179 mov lr, r9 @ setup for a return to the user code.
180 180
181 @ Now call the C code to package up the bounce to the support code 181 @ Now call the C code to package up the bounce to the support code
182 @ r0 holds the trigger instruction 182 @ r0 holds the trigger instruction
183 @ r1 holds the FPEXC value 183 @ r1 holds the FPEXC value
184 @ r2 pointer to register dump 184 @ r2 pointer to register dump
185 b VFP_bounce @ we have handled this - the support 185 b VFP_bounce @ we have handled this - the support
186 @ code will raise an exception if 186 @ code will raise an exception if
187 @ required. If not, the user code will 187 @ required. If not, the user code will
188 @ retry the faulted instruction 188 @ retry the faulted instruction
189 ENDPROC(vfp_support_entry) 189 ENDPROC(vfp_support_entry)
190 190
191 ENTRY(vfp_save_state) 191 ENTRY(vfp_save_state)
192 @ Save the current VFP state 192 @ Save the current VFP state
193 @ r0 - save location 193 @ r0 - save location
194 @ r1 - FPEXC 194 @ r1 - FPEXC
195 DBGSTR1 "save VFP state %p", r0 195 DBGSTR1 "save VFP state %p", r0
196 VFPFSTMIA r0, r2 @ save the working registers 196 VFPFSTMIA r0, r2 @ save the working registers
197 VFPFMRX r2, FPSCR @ current status 197 VFPFMRX r2, FPSCR @ current status
198 tst r1, #FPEXC_EX @ is there additional state to save? 198 tst r1, #FPEXC_EX @ is there additional state to save?
199 beq 1f 199 beq 1f
200 VFPFMRX r3, FPINST @ FPINST (only if FPEXC.EX is set) 200 VFPFMRX r3, FPINST @ FPINST (only if FPEXC.EX is set)
201 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? 201 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read?
202 beq 1f 202 beq 1f
203 VFPFMRX r12, FPINST2 @ FPINST2 if needed (and present) 203 VFPFMRX r12, FPINST2 @ FPINST2 if needed (and present)
204 1: 204 1:
205 stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2 205 stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2
206 mov pc, lr 206 mov pc, lr
207 ENDPROC(vfp_save_state) 207 ENDPROC(vfp_save_state)
208 208
209 last_VFP_context_address: 209 last_VFP_context_address:
210 .word last_VFP_context 210 .word last_VFP_context
211 211
212 .macro tbl_branch, base, tmp, shift 212 .macro tbl_branch, base, tmp, shift
213 #ifdef CONFIG_THUMB2_KERNEL 213 #ifdef CONFIG_THUMB2_KERNEL
214 adr \tmp, 1f 214 adr \tmp, 1f
215 add \tmp, \tmp, \base, lsl \shift 215 add \tmp, \tmp, \base, lsl \shift
216 mov pc, \tmp 216 mov pc, \tmp
217 #else 217 #else
218 add pc, pc, \base, lsl \shift 218 add pc, pc, \base, lsl \shift
219 mov r0, r0 219 mov r0, r0
220 #endif 220 #endif
221 1: 221 1:
222 .endm 222 .endm
223 223
224 ENTRY(vfp_get_float) 224 ENTRY(vfp_get_float)
225 tbl_branch r0, r3, #3 225 tbl_branch r0, r3, #3
226 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 226 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
227 1: mrc p10, 0, r0, c\dr, c0, 0 @ fmrs r0, s0 227 1: mrc p10, 0, r0, c\dr, c0, 0 @ fmrs r0, s0
228 mov pc, lr 228 mov pc, lr
229 .org 1b + 8 229 .org 1b + 8
230 1: mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1 230 1: mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1
231 mov pc, lr 231 mov pc, lr
232 .org 1b + 8 232 .org 1b + 8
233 .endr 233 .endr
234 ENDPROC(vfp_get_float) 234 ENDPROC(vfp_get_float)
235 235
236 ENTRY(vfp_put_float) 236 ENTRY(vfp_put_float)
237 tbl_branch r1, r3, #3 237 tbl_branch r1, r3, #3
238 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 238 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
239 1: mcr p10, 0, r0, c\dr, c0, 0 @ fmsr r0, s0 239 1: mcr p10, 0, r0, c\dr, c0, 0 @ fmsr r0, s0
240 mov pc, lr 240 mov pc, lr
241 .org 1b + 8 241 .org 1b + 8
242 1: mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1 242 1: mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1
243 mov pc, lr 243 mov pc, lr
244 .org 1b + 8 244 .org 1b + 8
245 .endr 245 .endr
246 ENDPROC(vfp_put_float) 246 ENDPROC(vfp_put_float)
247 247
248 ENTRY(vfp_get_double) 248 ENTRY(vfp_get_double)
249 tbl_branch r0, r3, #3 249 tbl_branch r0, r3, #3
250 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 250 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
251 1: fmrrd r0, r1, d\dr 251 1: fmrrd r0, r1, d\dr
252 mov pc, lr 252 mov pc, lr
253 .org 1b + 8 253 .org 1b + 8
254 .endr 254 .endr
255 #ifdef CONFIG_VFPv3 255 #ifdef CONFIG_VFPv3
256 @ d16 - d31 registers 256 @ d16 - d31 registers
257 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 257 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
258 1: mrrc p11, 3, r0, r1, c\dr @ fmrrd r0, r1, d\dr 258 1: mrrc p11, 3, r0, r1, c\dr @ fmrrd r0, r1, d\dr
259 mov pc, lr 259 mov pc, lr
260 .org 1b + 8 260 .org 1b + 8
261 .endr 261 .endr
262 #endif 262 #endif
263 263
264 @ virtual register 16 (or 32 if VFPv3) for compare with zero 264 @ virtual register 16 (or 32 if VFPv3) for compare with zero
265 mov r0, #0 265 mov r0, #0
266 mov r1, #0 266 mov r1, #0
267 mov pc, lr 267 mov pc, lr
268 ENDPROC(vfp_get_double) 268 ENDPROC(vfp_get_double)
269 269
270 ENTRY(vfp_put_double) 270 ENTRY(vfp_put_double)
271 tbl_branch r2, r3, #3 271 tbl_branch r2, r3, #3
272 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 272 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
273 1: fmdrr d\dr, r0, r1 273 1: fmdrr d\dr, r0, r1
274 mov pc, lr 274 mov pc, lr
275 .org 1b + 8 275 .org 1b + 8
276 .endr 276 .endr
277 #ifdef CONFIG_VFPv3 277 #ifdef CONFIG_VFPv3
278 @ d16 - d31 registers 278 @ d16 - d31 registers
279 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 279 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
280 1: mcrr p11, 3, r1, r2, c\dr @ fmdrr r1, r2, d\dr 280 1: mcrr p11, 3, r0, r1, c\dr @ fmdrr r0, r1, d\dr
281 mov pc, lr 281 mov pc, lr
282 .org 1b + 8 282 .org 1b + 8
283 .endr 283 .endr
284 #endif 284 #endif
285 ENDPROC(vfp_put_double) 285 ENDPROC(vfp_put_double)
286 286