Commit 842dfc11ea9a21f9825167c8a4f2834b205b0a79

Authored by Manuel Lauss
Committed by Ralf Baechle
1 parent 491a48aa52

MIPS: Fix build with binutils 2.24.51+

Starting with version 2.24.51.20140728 MIPS binutils complain loudly
about mixing soft-float and hard-float object files, leading to this
build failure since GCC is invoked with "-msoft-float" on MIPS:

{standard input}: Warning: .gnu_attribute 4,3 requires `softfloat'
  LD      arch/mips/alchemy/common/built-in.o
mipsel-softfloat-linux-gnu-ld: Warning: arch/mips/alchemy/common/built-in.o
 uses -msoft-float (set by arch/mips/alchemy/common/prom.o),
 arch/mips/alchemy/common/sleeper.o uses -mhard-float

To fix this, we detect if GAS is new enough to support "-msoft-float" command
option, and if it does, we can let GCC pass it to GAS;  but then we also need
to sprinkle the files which make use of floating point registers with the
necessary ".set hardfloat" directives.

Signed-off-by: Manuel Lauss <manuel.lauss@gmail.com>
Cc: Linux-MIPS <linux-mips@linux-mips.org>
Cc: Matthew Fortune <Matthew.Fortune@imgtec.com>
Cc: Markos Chandras <Markos.Chandras@imgtec.com>
Cc: Maciej W. Rozycki <macro@linux-mips.org>
Patchwork: https://patchwork.linux-mips.org/patch/8355/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>

Showing 14 changed files with 118 additions and 17 deletions Inline Diff

1 # 1 #
2 # This file is subject to the terms and conditions of the GNU General Public 2 # This file is subject to the terms and conditions of the GNU General Public
3 # License. See the file "COPYING" in the main directory of this archive 3 # License. See the file "COPYING" in the main directory of this archive
4 # for more details. 4 # for more details.
5 # 5 #
6 # Copyright (C) 1994, 95, 96, 2003 by Ralf Baechle 6 # Copyright (C) 1994, 95, 96, 2003 by Ralf Baechle
7 # DECStation modifications by Paul M. Antoine, 1996 7 # DECStation modifications by Paul M. Antoine, 1996
8 # Copyright (C) 2002, 2003, 2004 Maciej W. Rozycki 8 # Copyright (C) 2002, 2003, 2004 Maciej W. Rozycki
9 # 9 #
10 # This file is included by the global makefile so that you can add your own 10 # This file is included by the global makefile so that you can add your own
11 # architecture-specific flags and dependencies. Remember to do have actions 11 # architecture-specific flags and dependencies. Remember to do have actions
12 # for "archclean" cleaning up for this architecture. 12 # for "archclean" cleaning up for this architecture.
13 # 13 #
14 14
15 KBUILD_DEFCONFIG := ip22_defconfig 15 KBUILD_DEFCONFIG := ip22_defconfig
16 16
17 # 17 #
18 # Select the object file format to substitute into the linker script. 18 # Select the object file format to substitute into the linker script.
19 # 19 #
20 ifdef CONFIG_CPU_LITTLE_ENDIAN 20 ifdef CONFIG_CPU_LITTLE_ENDIAN
21 32bit-tool-archpref = mipsel 21 32bit-tool-archpref = mipsel
22 64bit-tool-archpref = mips64el 22 64bit-tool-archpref = mips64el
23 32bit-bfd = elf32-tradlittlemips 23 32bit-bfd = elf32-tradlittlemips
24 64bit-bfd = elf64-tradlittlemips 24 64bit-bfd = elf64-tradlittlemips
25 32bit-emul = elf32ltsmip 25 32bit-emul = elf32ltsmip
26 64bit-emul = elf64ltsmip 26 64bit-emul = elf64ltsmip
27 else 27 else
28 32bit-tool-archpref = mips 28 32bit-tool-archpref = mips
29 64bit-tool-archpref = mips64 29 64bit-tool-archpref = mips64
30 32bit-bfd = elf32-tradbigmips 30 32bit-bfd = elf32-tradbigmips
31 64bit-bfd = elf64-tradbigmips 31 64bit-bfd = elf64-tradbigmips
32 32bit-emul = elf32btsmip 32 32bit-emul = elf32btsmip
33 64bit-emul = elf64btsmip 33 64bit-emul = elf64btsmip
34 endif 34 endif
35 35
36 ifdef CONFIG_32BIT 36 ifdef CONFIG_32BIT
37 tool-archpref = $(32bit-tool-archpref) 37 tool-archpref = $(32bit-tool-archpref)
38 UTS_MACHINE := mips 38 UTS_MACHINE := mips
39 endif 39 endif
40 ifdef CONFIG_64BIT 40 ifdef CONFIG_64BIT
41 tool-archpref = $(64bit-tool-archpref) 41 tool-archpref = $(64bit-tool-archpref)
42 UTS_MACHINE := mips64 42 UTS_MACHINE := mips64
43 endif 43 endif
44 44
45 ifneq ($(SUBARCH),$(ARCH)) 45 ifneq ($(SUBARCH),$(ARCH))
46 ifeq ($(CROSS_COMPILE),) 46 ifeq ($(CROSS_COMPILE),)
47 CROSS_COMPILE := $(call cc-cross-prefix, $(tool-archpref)-linux- $(tool-archpref)-linux-gnu- $(tool-archpref)-unknown-linux-gnu-) 47 CROSS_COMPILE := $(call cc-cross-prefix, $(tool-archpref)-linux- $(tool-archpref)-linux-gnu- $(tool-archpref)-unknown-linux-gnu-)
48 endif 48 endif
49 endif 49 endif
50 50
51 ifdef CONFIG_FUNCTION_GRAPH_TRACER 51 ifdef CONFIG_FUNCTION_GRAPH_TRACER
52 ifndef KBUILD_MCOUNT_RA_ADDRESS 52 ifndef KBUILD_MCOUNT_RA_ADDRESS
53 ifeq ($(call cc-option-yn,-mmcount-ra-address), y) 53 ifeq ($(call cc-option-yn,-mmcount-ra-address), y)
54 cflags-y += -mmcount-ra-address -DKBUILD_MCOUNT_RA_ADDRESS 54 cflags-y += -mmcount-ra-address -DKBUILD_MCOUNT_RA_ADDRESS
55 endif 55 endif
56 endif 56 endif
57 endif 57 endif
58 cflags-y += $(call cc-option, -mno-check-zero-division) 58 cflags-y += $(call cc-option, -mno-check-zero-division)
59 59
60 ifdef CONFIG_32BIT 60 ifdef CONFIG_32BIT
61 ld-emul = $(32bit-emul) 61 ld-emul = $(32bit-emul)
62 vmlinux-32 = vmlinux 62 vmlinux-32 = vmlinux
63 vmlinux-64 = vmlinux.64 63 vmlinux-64 = vmlinux.64
64 64
65 cflags-y += -mabi=32 65 cflags-y += -mabi=32
66 endif 66 endif
67 67
68 ifdef CONFIG_64BIT 68 ifdef CONFIG_64BIT
69 ld-emul = $(64bit-emul) 69 ld-emul = $(64bit-emul)
70 vmlinux-32 = vmlinux.32 70 vmlinux-32 = vmlinux.32
71 vmlinux-64 = vmlinux 71 vmlinux-64 = vmlinux
72 72
73 cflags-y += -mabi=64 73 cflags-y += -mabi=64
74 endif 74 endif
75 75
76 all-$(CONFIG_BOOT_ELF32) := $(vmlinux-32) 76 all-$(CONFIG_BOOT_ELF32) := $(vmlinux-32)
77 all-$(CONFIG_BOOT_ELF64) := $(vmlinux-64) 77 all-$(CONFIG_BOOT_ELF64) := $(vmlinux-64)
78 all-$(CONFIG_SYS_SUPPORTS_ZBOOT)+= vmlinuz 78 all-$(CONFIG_SYS_SUPPORTS_ZBOOT)+= vmlinuz
79 79
80 # 80 #
81 # GCC uses -G 0 -mabicalls -fpic as default. We don't want PIC in the kernel 81 # GCC uses -G 0 -mabicalls -fpic as default. We don't want PIC in the kernel
82 # code since it only slows down the whole thing. At some point we might make 82 # code since it only slows down the whole thing. At some point we might make
83 # use of global pointer optimizations but their use of $28 conflicts with 83 # use of global pointer optimizations but their use of $28 conflicts with
84 # the current pointer optimization. 84 # the current pointer optimization.
85 # 85 #
86 # The DECStation requires an ECOFF kernel for remote booting, other MIPS 86 # The DECStation requires an ECOFF kernel for remote booting, other MIPS
87 # machines may also. Since BFD is incredibly buggy with respect to 87 # machines may also. Since BFD is incredibly buggy with respect to
88 # crossformat linking we rely on the elf2ecoff tool for format conversion. 88 # crossformat linking we rely on the elf2ecoff tool for format conversion.
89 # 89 #
90 cflags-y += -G 0 -mno-abicalls -fno-pic -pipe 90 cflags-y += -G 0 -mno-abicalls -fno-pic -pipe
91 cflags-y += -msoft-float 91 cflags-y += -msoft-float
92 LDFLAGS_vmlinux += -G 0 -static -n -nostdlib 92 LDFLAGS_vmlinux += -G 0 -static -n -nostdlib
93 KBUILD_AFLAGS_MODULE += -mlong-calls 93 KBUILD_AFLAGS_MODULE += -mlong-calls
94 KBUILD_CFLAGS_MODULE += -mlong-calls 94 KBUILD_CFLAGS_MODULE += -mlong-calls
95 95
96 #
97 # pass -msoft-float to GAS if it supports it. However on newer binutils
98 # (specifically newer than 2.24.51.20140728) we then also need to explicitly
99 # set ".set hardfloat" in all files which manipulate floating point registers.
100 #
101 ifneq ($(call as-option,-Wa$(comma)-msoft-float,),)
102 cflags-y += -DGAS_HAS_SET_HARDFLOAT -Wa,-msoft-float
103 endif
104
96 cflags-y += -ffreestanding 105 cflags-y += -ffreestanding
97 106
98 # 107 #
99 # We explicitly add the endianness specifier if needed, this allows 108 # We explicitly add the endianness specifier if needed, this allows
100 # to compile kernels with a toolchain for the other endianness. We 109 # to compile kernels with a toolchain for the other endianness. We
101 # carefully avoid to add it redundantly because gcc 3.3/3.4 complains 110 # carefully avoid to add it redundantly because gcc 3.3/3.4 complains
102 # when fed the toolchain default! 111 # when fed the toolchain default!
103 # 112 #
104 # Certain gcc versions up to gcc 4.1.1 (probably 4.2-subversion as of 113 # Certain gcc versions up to gcc 4.1.1 (probably 4.2-subversion as of
105 # 2006-10-10 don't properly change the predefined symbols if -EB / -EL 114 # 2006-10-10 don't properly change the predefined symbols if -EB / -EL
106 # are used, so we kludge that here. A bug has been filed at 115 # are used, so we kludge that here. A bug has been filed at
107 # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29413. 116 # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29413.
108 # 117 #
109 undef-all += -UMIPSEB -U_MIPSEB -U__MIPSEB -U__MIPSEB__ 118 undef-all += -UMIPSEB -U_MIPSEB -U__MIPSEB -U__MIPSEB__
110 undef-all += -UMIPSEL -U_MIPSEL -U__MIPSEL -U__MIPSEL__ 119 undef-all += -UMIPSEL -U_MIPSEL -U__MIPSEL -U__MIPSEL__
111 predef-be += -DMIPSEB -D_MIPSEB -D__MIPSEB -D__MIPSEB__ 120 predef-be += -DMIPSEB -D_MIPSEB -D__MIPSEB -D__MIPSEB__
112 predef-le += -DMIPSEL -D_MIPSEL -D__MIPSEL -D__MIPSEL__ 121 predef-le += -DMIPSEL -D_MIPSEL -D__MIPSEL -D__MIPSEL__
113 cflags-$(CONFIG_CPU_BIG_ENDIAN) += $(shell $(CC) -dumpmachine |grep -q 'mips.*el-.*' && echo -EB $(undef-all) $(predef-be)) 122 cflags-$(CONFIG_CPU_BIG_ENDIAN) += $(shell $(CC) -dumpmachine |grep -q 'mips.*el-.*' && echo -EB $(undef-all) $(predef-be))
114 cflags-$(CONFIG_CPU_LITTLE_ENDIAN) += $(shell $(CC) -dumpmachine |grep -q 'mips.*el-.*' || echo -EL $(undef-all) $(predef-le)) 123 cflags-$(CONFIG_CPU_LITTLE_ENDIAN) += $(shell $(CC) -dumpmachine |grep -q 'mips.*el-.*' || echo -EL $(undef-all) $(predef-le))
115 124
116 # For smartmips configurations, there are hundreds of warnings due to ISA overrides 125 # For smartmips configurations, there are hundreds of warnings due to ISA overrides
117 # in assembly and header files. smartmips is only supported for MIPS32r1 onwards 126 # in assembly and header files. smartmips is only supported for MIPS32r1 onwards
118 # and there is no support for 64-bit. Various '.set mips2' or '.set mips3' or 127 # and there is no support for 64-bit. Various '.set mips2' or '.set mips3' or
119 # similar directives in the kernel will spam the build logs with the following warnings: 128 # similar directives in the kernel will spam the build logs with the following warnings:
120 # Warning: the `smartmips' extension requires MIPS32 revision 1 or greater 129 # Warning: the `smartmips' extension requires MIPS32 revision 1 or greater
121 # or 130 # or
122 # Warning: the 64-bit MIPS architecture does not support the `smartmips' extension 131 # Warning: the 64-bit MIPS architecture does not support the `smartmips' extension
123 # Pass -Wa,--no-warn to disable all assembler warnings until the kernel code has 132 # Pass -Wa,--no-warn to disable all assembler warnings until the kernel code has
124 # been fixed properly. 133 # been fixed properly.
125 cflags-$(CONFIG_CPU_HAS_SMARTMIPS) += $(call cc-option,-msmartmips) -Wa,--no-warn 134 cflags-$(CONFIG_CPU_HAS_SMARTMIPS) += $(call cc-option,-msmartmips) -Wa,--no-warn
126 cflags-$(CONFIG_CPU_MICROMIPS) += $(call cc-option,-mmicromips) 135 cflags-$(CONFIG_CPU_MICROMIPS) += $(call cc-option,-mmicromips)
127 136
128 cflags-$(CONFIG_SB1XXX_CORELIS) += $(call cc-option,-mno-sched-prolog) \ 137 cflags-$(CONFIG_SB1XXX_CORELIS) += $(call cc-option,-mno-sched-prolog) \
129 -fno-omit-frame-pointer 138 -fno-omit-frame-pointer
130 139
131 ifeq ($(CONFIG_CPU_HAS_MSA),y) 140 ifeq ($(CONFIG_CPU_HAS_MSA),y)
132 toolchain-msa := $(call cc-option-yn,-mhard-float -mfp64 -Wa$(comma)-mmsa) 141 toolchain-msa := $(call cc-option-yn,-mhard-float -mfp64 -Wa$(comma)-mmsa)
133 cflags-$(toolchain-msa) += -DTOOLCHAIN_SUPPORTS_MSA 142 cflags-$(toolchain-msa) += -DTOOLCHAIN_SUPPORTS_MSA
134 endif 143 endif
135 144
136 # 145 #
137 # CPU-dependent compiler/assembler options for optimization. 146 # CPU-dependent compiler/assembler options for optimization.
138 # 147 #
139 cflags-$(CONFIG_CPU_R3000) += -march=r3000 148 cflags-$(CONFIG_CPU_R3000) += -march=r3000
140 cflags-$(CONFIG_CPU_TX39XX) += -march=r3900 149 cflags-$(CONFIG_CPU_TX39XX) += -march=r3900
141 cflags-$(CONFIG_CPU_R6000) += -march=r6000 -Wa,--trap 150 cflags-$(CONFIG_CPU_R6000) += -march=r6000 -Wa,--trap
142 cflags-$(CONFIG_CPU_R4300) += -march=r4300 -Wa,--trap 151 cflags-$(CONFIG_CPU_R4300) += -march=r4300 -Wa,--trap
143 cflags-$(CONFIG_CPU_VR41XX) += -march=r4100 -Wa,--trap 152 cflags-$(CONFIG_CPU_VR41XX) += -march=r4100 -Wa,--trap
144 cflags-$(CONFIG_CPU_R4X00) += -march=r4600 -Wa,--trap 153 cflags-$(CONFIG_CPU_R4X00) += -march=r4600 -Wa,--trap
145 cflags-$(CONFIG_CPU_TX49XX) += -march=r4600 -Wa,--trap 154 cflags-$(CONFIG_CPU_TX49XX) += -march=r4600 -Wa,--trap
146 cflags-$(CONFIG_CPU_MIPS32_R1) += $(call cc-option,-march=mips32,-mips32 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS32) \ 155 cflags-$(CONFIG_CPU_MIPS32_R1) += $(call cc-option,-march=mips32,-mips32 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS32) \
147 -Wa,-mips32 -Wa,--trap 156 -Wa,-mips32 -Wa,--trap
148 cflags-$(CONFIG_CPU_MIPS32_R2) += $(call cc-option,-march=mips32r2,-mips32r2 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS32) \ 157 cflags-$(CONFIG_CPU_MIPS32_R2) += $(call cc-option,-march=mips32r2,-mips32r2 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS32) \
149 -Wa,-mips32r2 -Wa,--trap 158 -Wa,-mips32r2 -Wa,--trap
150 cflags-$(CONFIG_CPU_MIPS64_R1) += $(call cc-option,-march=mips64,-mips64 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS64) \ 159 cflags-$(CONFIG_CPU_MIPS64_R1) += $(call cc-option,-march=mips64,-mips64 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS64) \
151 -Wa,-mips64 -Wa,--trap 160 -Wa,-mips64 -Wa,--trap
152 cflags-$(CONFIG_CPU_MIPS64_R2) += $(call cc-option,-march=mips64r2,-mips64r2 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS64) \ 161 cflags-$(CONFIG_CPU_MIPS64_R2) += $(call cc-option,-march=mips64r2,-mips64r2 -U_MIPS_ISA -D_MIPS_ISA=_MIPS_ISA_MIPS64) \
153 -Wa,-mips64r2 -Wa,--trap 162 -Wa,-mips64r2 -Wa,--trap
154 cflags-$(CONFIG_CPU_R5000) += -march=r5000 -Wa,--trap 163 cflags-$(CONFIG_CPU_R5000) += -march=r5000 -Wa,--trap
155 cflags-$(CONFIG_CPU_R5432) += $(call cc-option,-march=r5400,-march=r5000) \ 164 cflags-$(CONFIG_CPU_R5432) += $(call cc-option,-march=r5400,-march=r5000) \
156 -Wa,--trap 165 -Wa,--trap
157 cflags-$(CONFIG_CPU_R5500) += $(call cc-option,-march=r5500,-march=r5000) \ 166 cflags-$(CONFIG_CPU_R5500) += $(call cc-option,-march=r5500,-march=r5000) \
158 -Wa,--trap 167 -Wa,--trap
159 cflags-$(CONFIG_CPU_NEVADA) += $(call cc-option,-march=rm5200,-march=r5000) \ 168 cflags-$(CONFIG_CPU_NEVADA) += $(call cc-option,-march=rm5200,-march=r5000) \
160 -Wa,--trap 169 -Wa,--trap
161 cflags-$(CONFIG_CPU_RM7000) += $(call cc-option,-march=rm7000,-march=r5000) \ 170 cflags-$(CONFIG_CPU_RM7000) += $(call cc-option,-march=rm7000,-march=r5000) \
162 -Wa,--trap 171 -Wa,--trap
163 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-march=sb1,-march=r5000) \ 172 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-march=sb1,-march=r5000) \
164 -Wa,--trap 173 -Wa,--trap
165 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-mno-mdmx) 174 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-mno-mdmx)
166 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-mno-mips3d) 175 cflags-$(CONFIG_CPU_SB1) += $(call cc-option,-mno-mips3d)
167 cflags-$(CONFIG_CPU_R8000) += -march=r8000 -Wa,--trap 176 cflags-$(CONFIG_CPU_R8000) += -march=r8000 -Wa,--trap
168 cflags-$(CONFIG_CPU_R10000) += $(call cc-option,-march=r10000,-march=r8000) \ 177 cflags-$(CONFIG_CPU_R10000) += $(call cc-option,-march=r10000,-march=r8000) \
169 -Wa,--trap 178 -Wa,--trap
170 cflags-$(CONFIG_CPU_CAVIUM_OCTEON) += $(call cc-option,-march=octeon) -Wa,--trap 179 cflags-$(CONFIG_CPU_CAVIUM_OCTEON) += $(call cc-option,-march=octeon) -Wa,--trap
171 ifeq (,$(findstring march=octeon, $(cflags-$(CONFIG_CPU_CAVIUM_OCTEON)))) 180 ifeq (,$(findstring march=octeon, $(cflags-$(CONFIG_CPU_CAVIUM_OCTEON))))
172 cflags-$(CONFIG_CPU_CAVIUM_OCTEON) += -Wa,-march=octeon 181 cflags-$(CONFIG_CPU_CAVIUM_OCTEON) += -Wa,-march=octeon
173 endif 182 endif
174 cflags-$(CONFIG_CAVIUM_CN63XXP1) += -Wa,-mfix-cn63xxp1 183 cflags-$(CONFIG_CAVIUM_CN63XXP1) += -Wa,-mfix-cn63xxp1
175 cflags-$(CONFIG_CPU_BMIPS) += -march=mips32 -Wa,-mips32 -Wa,--trap 184 cflags-$(CONFIG_CPU_BMIPS) += -march=mips32 -Wa,-mips32 -Wa,--trap
176 185
177 cflags-$(CONFIG_CPU_R4000_WORKAROUNDS) += $(call cc-option,-mfix-r4000,) 186 cflags-$(CONFIG_CPU_R4000_WORKAROUNDS) += $(call cc-option,-mfix-r4000,)
178 cflags-$(CONFIG_CPU_R4400_WORKAROUNDS) += $(call cc-option,-mfix-r4400,) 187 cflags-$(CONFIG_CPU_R4400_WORKAROUNDS) += $(call cc-option,-mfix-r4400,)
179 cflags-$(CONFIG_CPU_DADDI_WORKAROUNDS) += $(call cc-option,-mno-daddi,) 188 cflags-$(CONFIG_CPU_DADDI_WORKAROUNDS) += $(call cc-option,-mno-daddi,)
180 189
181 ifdef CONFIG_CPU_SB1 190 ifdef CONFIG_CPU_SB1
182 ifdef CONFIG_SB1_PASS_1_WORKAROUNDS 191 ifdef CONFIG_SB1_PASS_1_WORKAROUNDS
183 KBUILD_AFLAGS_MODULE += -msb1-pass1-workarounds 192 KBUILD_AFLAGS_MODULE += -msb1-pass1-workarounds
184 KBUILD_CFLAGS_MODULE += -msb1-pass1-workarounds 193 KBUILD_CFLAGS_MODULE += -msb1-pass1-workarounds
185 endif 194 endif
186 endif 195 endif
187 196
188 # 197 #
189 # Firmware support 198 # Firmware support
190 # 199 #
191 libs-$(CONFIG_FW_ARC) += arch/mips/fw/arc/ 200 libs-$(CONFIG_FW_ARC) += arch/mips/fw/arc/
192 libs-$(CONFIG_FW_CFE) += arch/mips/fw/cfe/ 201 libs-$(CONFIG_FW_CFE) += arch/mips/fw/cfe/
193 libs-$(CONFIG_FW_SNIPROM) += arch/mips/fw/sni/ 202 libs-$(CONFIG_FW_SNIPROM) += arch/mips/fw/sni/
194 libs-y += arch/mips/fw/lib/ 203 libs-y += arch/mips/fw/lib/
195 204
196 # 205 #
197 # Kernel compression 206 # Kernel compression
198 # 207 #
199 ifdef SYS_SUPPORTS_ZBOOT 208 ifdef SYS_SUPPORTS_ZBOOT
200 COMPRESSION_FNAME = vmlinuz 209 COMPRESSION_FNAME = vmlinuz
201 else 210 else
202 COMPRESSION_FNAME = vmlinux 211 COMPRESSION_FNAME = vmlinux
203 endif 212 endif
204 213
205 # 214 #
206 # Board-dependent options and extra files 215 # Board-dependent options and extra files
207 # 216 #
208 include $(srctree)/arch/mips/Kbuild.platforms 217 include $(srctree)/arch/mips/Kbuild.platforms
209 218
210 ifdef CONFIG_PHYSICAL_START 219 ifdef CONFIG_PHYSICAL_START
211 load-y = $(CONFIG_PHYSICAL_START) 220 load-y = $(CONFIG_PHYSICAL_START)
212 endif 221 endif
213 entry-y = 0x$(shell $(NM) vmlinux 2>/dev/null \ 222 entry-y = 0x$(shell $(NM) vmlinux 2>/dev/null \
214 | grep "\bkernel_entry\b" | cut -f1 -d \ ) 223 | grep "\bkernel_entry\b" | cut -f1 -d \ )
215 224
216 cflags-y += -I$(srctree)/arch/mips/include/asm/mach-generic 225 cflags-y += -I$(srctree)/arch/mips/include/asm/mach-generic
217 drivers-$(CONFIG_PCI) += arch/mips/pci/ 226 drivers-$(CONFIG_PCI) += arch/mips/pci/
218 227
219 # 228 #
220 # Automatically detect the build format. By default we choose 229 # Automatically detect the build format. By default we choose
221 # the elf format according to the load address. 230 # the elf format according to the load address.
222 # We can always force a build with a 64-bits symbol format by 231 # We can always force a build with a 64-bits symbol format by
223 # passing 'KBUILD_SYM32=no' option to the make's command line. 232 # passing 'KBUILD_SYM32=no' option to the make's command line.
224 # 233 #
225 ifdef CONFIG_64BIT 234 ifdef CONFIG_64BIT
226 ifndef KBUILD_SYM32 235 ifndef KBUILD_SYM32
227 ifeq ($(shell expr $(load-y) \< 0xffffffff80000000), 0) 236 ifeq ($(shell expr $(load-y) \< 0xffffffff80000000), 0)
228 KBUILD_SYM32 = y 237 KBUILD_SYM32 = y
229 endif 238 endif
230 endif 239 endif
231 240
232 ifeq ($(KBUILD_SYM32)$(call cc-option-yn,-msym32), yy) 241 ifeq ($(KBUILD_SYM32)$(call cc-option-yn,-msym32), yy)
233 cflags-y += -msym32 -DKBUILD_64BIT_SYM32 242 cflags-y += -msym32 -DKBUILD_64BIT_SYM32
234 else 243 else
235 ifeq ($(CONFIG_CPU_DADDI_WORKAROUNDS), y) 244 ifeq ($(CONFIG_CPU_DADDI_WORKAROUNDS), y)
236 $(error CONFIG_CPU_DADDI_WORKAROUNDS unsupported without -msym32) 245 $(error CONFIG_CPU_DADDI_WORKAROUNDS unsupported without -msym32)
237 endif 246 endif
238 endif 247 endif
239 endif 248 endif
240 249
241 KBUILD_AFLAGS += $(cflags-y) 250 KBUILD_AFLAGS += $(cflags-y)
242 KBUILD_CFLAGS += $(cflags-y) 251 KBUILD_CFLAGS += $(cflags-y)
243 KBUILD_CPPFLAGS += -DVMLINUX_LOAD_ADDRESS=$(load-y) 252 KBUILD_CPPFLAGS += -DVMLINUX_LOAD_ADDRESS=$(load-y)
244 KBUILD_CPPFLAGS += -DDATAOFFSET=$(if $(dataoffset-y),$(dataoffset-y),0) 253 KBUILD_CPPFLAGS += -DDATAOFFSET=$(if $(dataoffset-y),$(dataoffset-y),0)
245 254
246 bootvars-y = VMLINUX_LOAD_ADDRESS=$(load-y) \ 255 bootvars-y = VMLINUX_LOAD_ADDRESS=$(load-y) \
247 VMLINUX_ENTRY_ADDRESS=$(entry-y) 256 VMLINUX_ENTRY_ADDRESS=$(entry-y)
248 257
249 LDFLAGS += -m $(ld-emul) 258 LDFLAGS += -m $(ld-emul)
250 259
251 ifdef CONFIG_MIPS 260 ifdef CONFIG_MIPS
252 CHECKFLAGS += $(shell $(CC) $(KBUILD_CFLAGS) -dM -E -x c /dev/null | \ 261 CHECKFLAGS += $(shell $(CC) $(KBUILD_CFLAGS) -dM -E -x c /dev/null | \
253 egrep -vw '__GNUC_(|MINOR_|PATCHLEVEL_)_' | \ 262 egrep -vw '__GNUC_(|MINOR_|PATCHLEVEL_)_' | \
254 sed -e "s/^\#define /-D'/" -e "s/ /'='/" -e "s/$$/'/") 263 sed -e "s/^\#define /-D'/" -e "s/ /'='/" -e "s/$$/'/")
255 ifdef CONFIG_64BIT 264 ifdef CONFIG_64BIT
256 CHECKFLAGS += -m64 265 CHECKFLAGS += -m64
257 endif 266 endif
258 endif 267 endif
259 268
260 OBJCOPYFLAGS += --remove-section=.reginfo 269 OBJCOPYFLAGS += --remove-section=.reginfo
261 270
262 head-y := arch/mips/kernel/head.o 271 head-y := arch/mips/kernel/head.o
263 272
264 libs-y += arch/mips/lib/ 273 libs-y += arch/mips/lib/
265 libs-y += arch/mips/math-emu/ 274 libs-y += arch/mips/math-emu/
266 275
267 # See arch/mips/Kbuild for content of core part of the kernel 276 # See arch/mips/Kbuild for content of core part of the kernel
268 core-y += arch/mips/ 277 core-y += arch/mips/
269 278
270 drivers-$(CONFIG_OPROFILE) += arch/mips/oprofile/ 279 drivers-$(CONFIG_OPROFILE) += arch/mips/oprofile/
271 280
272 # suspend and hibernation support 281 # suspend and hibernation support
273 drivers-$(CONFIG_PM) += arch/mips/power/ 282 drivers-$(CONFIG_PM) += arch/mips/power/
274 283
275 # boot image targets (arch/mips/boot/) 284 # boot image targets (arch/mips/boot/)
276 boot-y := vmlinux.bin 285 boot-y := vmlinux.bin
277 boot-y += vmlinux.ecoff 286 boot-y += vmlinux.ecoff
278 boot-y += vmlinux.srec 287 boot-y += vmlinux.srec
279 ifeq ($(shell expr $(load-y) \< 0xffffffff80000000 2> /dev/null), 0) 288 ifeq ($(shell expr $(load-y) \< 0xffffffff80000000 2> /dev/null), 0)
280 boot-y += uImage 289 boot-y += uImage
281 boot-y += uImage.gz 290 boot-y += uImage.gz
282 endif 291 endif
283 292
284 # compressed boot image targets (arch/mips/boot/compressed/) 293 # compressed boot image targets (arch/mips/boot/compressed/)
285 bootz-y := vmlinuz 294 bootz-y := vmlinuz
286 bootz-y += vmlinuz.bin 295 bootz-y += vmlinuz.bin
287 bootz-y += vmlinuz.ecoff 296 bootz-y += vmlinuz.ecoff
288 bootz-y += vmlinuz.srec 297 bootz-y += vmlinuz.srec
289 298
290 ifdef CONFIG_LASAT 299 ifdef CONFIG_LASAT
291 rom.bin rom.sw: vmlinux 300 rom.bin rom.sw: vmlinux
292 $(Q)$(MAKE) $(build)=arch/mips/lasat/image \ 301 $(Q)$(MAKE) $(build)=arch/mips/lasat/image \
293 $(bootvars-y) $@ 302 $(bootvars-y) $@
294 endif 303 endif
295 304
296 # 305 #
297 # Some machines like the Indy need 32-bit ELF binaries for booting purposes. 306 # Some machines like the Indy need 32-bit ELF binaries for booting purposes.
298 # Other need ECOFF, so we build a 32-bit ELF binary for them which we then 307 # Other need ECOFF, so we build a 32-bit ELF binary for them which we then
299 # convert to ECOFF using elf2ecoff. 308 # convert to ECOFF using elf2ecoff.
300 # 309 #
301 quiet_cmd_32 = OBJCOPY $@ 310 quiet_cmd_32 = OBJCOPY $@
302 cmd_32 = $(OBJCOPY) -O $(32bit-bfd) $(OBJCOPYFLAGS) $< $@ 311 cmd_32 = $(OBJCOPY) -O $(32bit-bfd) $(OBJCOPYFLAGS) $< $@
303 vmlinux.32: vmlinux 312 vmlinux.32: vmlinux
304 $(call cmd,32) 313 $(call cmd,32)
305 314
306 # 315 #
307 # The 64-bit ELF tools are pretty broken so at this time we generate 64-bit 316 # The 64-bit ELF tools are pretty broken so at this time we generate 64-bit
308 # ELF files from 32-bit files by conversion. 317 # ELF files from 32-bit files by conversion.
309 # 318 #
310 quiet_cmd_64 = OBJCOPY $@ 319 quiet_cmd_64 = OBJCOPY $@
311 cmd_64 = $(OBJCOPY) -O $(64bit-bfd) $(OBJCOPYFLAGS) $< $@ 320 cmd_64 = $(OBJCOPY) -O $(64bit-bfd) $(OBJCOPYFLAGS) $< $@
312 vmlinux.64: vmlinux 321 vmlinux.64: vmlinux
313 $(call cmd,64) 322 $(call cmd,64)
314 323
315 all: $(all-y) 324 all: $(all-y)
316 325
317 # boot 326 # boot
318 $(boot-y): $(vmlinux-32) FORCE 327 $(boot-y): $(vmlinux-32) FORCE
319 $(Q)$(MAKE) $(build)=arch/mips/boot VMLINUX=$(vmlinux-32) \ 328 $(Q)$(MAKE) $(build)=arch/mips/boot VMLINUX=$(vmlinux-32) \
320 $(bootvars-y) arch/mips/boot/$@ 329 $(bootvars-y) arch/mips/boot/$@
321 330
322 ifdef CONFIG_SYS_SUPPORTS_ZBOOT 331 ifdef CONFIG_SYS_SUPPORTS_ZBOOT
323 # boot/compressed 332 # boot/compressed
324 $(bootz-y): $(vmlinux-32) FORCE 333 $(bootz-y): $(vmlinux-32) FORCE
325 $(Q)$(MAKE) $(build)=arch/mips/boot/compressed \ 334 $(Q)$(MAKE) $(build)=arch/mips/boot/compressed \
326 $(bootvars-y) 32bit-bfd=$(32bit-bfd) $@ 335 $(bootvars-y) 32bit-bfd=$(32bit-bfd) $@
327 else 336 else
328 vmlinuz: FORCE 337 vmlinuz: FORCE
329 @echo ' CONFIG_SYS_SUPPORTS_ZBOOT is not enabled' 338 @echo ' CONFIG_SYS_SUPPORTS_ZBOOT is not enabled'
330 /bin/false 339 /bin/false
331 endif 340 endif
332 341
333 342
334 CLEAN_FILES += vmlinux.32 vmlinux.64 343 CLEAN_FILES += vmlinux.32 vmlinux.64
335 344
336 # device-trees 345 # device-trees
337 core-$(CONFIG_BUILTIN_DTB) += arch/mips/boot/dts/ 346 core-$(CONFIG_BUILTIN_DTB) += arch/mips/boot/dts/
338 347
339 %.dtb %.dtb.S %.dtb.o: | scripts 348 %.dtb %.dtb.S %.dtb.o: | scripts
340 $(Q)$(MAKE) $(build)=arch/mips/boot/dts arch/mips/boot/dts/$@ 349 $(Q)$(MAKE) $(build)=arch/mips/boot/dts arch/mips/boot/dts/$@
341 350
342 PHONY += dtbs 351 PHONY += dtbs
343 dtbs: scripts 352 dtbs: scripts
344 $(Q)$(MAKE) $(build)=arch/mips/boot/dts dtbs 353 $(Q)$(MAKE) $(build)=arch/mips/boot/dts dtbs
345 354
346 archprepare: 355 archprepare:
347 ifdef CONFIG_MIPS32_N32 356 ifdef CONFIG_MIPS32_N32
348 @echo ' Checking missing-syscalls for N32' 357 @echo ' Checking missing-syscalls for N32'
349 $(Q)$(MAKE) $(build)=. missing-syscalls missing_syscalls_flags="-mabi=n32" 358 $(Q)$(MAKE) $(build)=. missing-syscalls missing_syscalls_flags="-mabi=n32"
350 endif 359 endif
351 ifdef CONFIG_MIPS32_O32 360 ifdef CONFIG_MIPS32_O32
352 @echo ' Checking missing-syscalls for O32' 361 @echo ' Checking missing-syscalls for O32'
353 $(Q)$(MAKE) $(build)=. missing-syscalls missing_syscalls_flags="-mabi=32" 362 $(Q)$(MAKE) $(build)=. missing-syscalls missing_syscalls_flags="-mabi=32"
354 endif 363 endif
355 364
356 install: 365 install:
357 $(Q)install -D -m 755 vmlinux $(INSTALL_PATH)/vmlinux-$(KERNELRELEASE) 366 $(Q)install -D -m 755 vmlinux $(INSTALL_PATH)/vmlinux-$(KERNELRELEASE)
358 ifdef CONFIG_SYS_SUPPORTS_ZBOOT 367 ifdef CONFIG_SYS_SUPPORTS_ZBOOT
359 $(Q)install -D -m 755 vmlinuz $(INSTALL_PATH)/vmlinuz-$(KERNELRELEASE) 368 $(Q)install -D -m 755 vmlinuz $(INSTALL_PATH)/vmlinuz-$(KERNELRELEASE)
360 endif 369 endif
361 $(Q)install -D -m 644 .config $(INSTALL_PATH)/config-$(KERNELRELEASE) 370 $(Q)install -D -m 644 .config $(INSTALL_PATH)/config-$(KERNELRELEASE)
362 $(Q)install -D -m 644 System.map $(INSTALL_PATH)/System.map-$(KERNELRELEASE) 371 $(Q)install -D -m 644 System.map $(INSTALL_PATH)/System.map-$(KERNELRELEASE)
363 372
364 archclean: 373 archclean:
365 $(Q)$(MAKE) $(clean)=arch/mips/boot 374 $(Q)$(MAKE) $(clean)=arch/mips/boot
366 $(Q)$(MAKE) $(clean)=arch/mips/boot/compressed 375 $(Q)$(MAKE) $(clean)=arch/mips/boot/compressed
367 $(Q)$(MAKE) $(clean)=arch/mips/lasat 376 $(Q)$(MAKE) $(clean)=arch/mips/lasat
368 377
369 define archhelp 378 define archhelp
370 echo ' install - install kernel into $(INSTALL_PATH)' 379 echo ' install - install kernel into $(INSTALL_PATH)'
371 echo ' vmlinux.ecoff - ECOFF boot image' 380 echo ' vmlinux.ecoff - ECOFF boot image'
372 echo ' vmlinux.bin - Raw binary boot image' 381 echo ' vmlinux.bin - Raw binary boot image'
373 echo ' vmlinux.srec - SREC boot image' 382 echo ' vmlinux.srec - SREC boot image'
374 echo ' vmlinuz - Compressed boot(zboot) image' 383 echo ' vmlinuz - Compressed boot(zboot) image'
375 echo ' vmlinuz.ecoff - ECOFF zboot image' 384 echo ' vmlinuz.ecoff - ECOFF zboot image'
376 echo ' vmlinuz.bin - Raw binary zboot image' 385 echo ' vmlinuz.bin - Raw binary zboot image'
377 echo ' vmlinuz.srec - SREC zboot image' 386 echo ' vmlinuz.srec - SREC zboot image'
378 echo ' uImage - U-Boot image' 387 echo ' uImage - U-Boot image'
379 echo ' uImage.gz - U-Boot image (gzip)' 388 echo ' uImage.gz - U-Boot image (gzip)'
380 echo ' dtbs - Device-tree blobs for enabled boards' 389 echo ' dtbs - Device-tree blobs for enabled boards'
381 echo 390 echo
382 echo ' These will be default as appropriate for a configured platform.' 391 echo ' These will be default as appropriate for a configured platform.'
383 endef 392 endef
384 393
arch/mips/include/asm/asmmacro-32.h
1 /* 1 /*
2 * asmmacro.h: Assembler macros to make things easier to read. 2 * asmmacro.h: Assembler macros to make things easier to read.
3 * 3 *
4 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 4 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
5 * Copyright (C) 1998, 1999, 2003 Ralf Baechle 5 * Copyright (C) 1998, 1999, 2003 Ralf Baechle
6 */ 6 */
7 #ifndef _ASM_ASMMACRO_32_H 7 #ifndef _ASM_ASMMACRO_32_H
8 #define _ASM_ASMMACRO_32_H 8 #define _ASM_ASMMACRO_32_H
9 9
10 #include <asm/asm-offsets.h> 10 #include <asm/asm-offsets.h>
11 #include <asm/regdef.h> 11 #include <asm/regdef.h>
12 #include <asm/fpregdef.h> 12 #include <asm/fpregdef.h>
13 #include <asm/mipsregs.h> 13 #include <asm/mipsregs.h>
14 14
15 .macro fpu_save_single thread tmp=t0 15 .macro fpu_save_single thread tmp=t0
16 .set push
17 SET_HARDFLOAT
16 cfc1 \tmp, fcr31 18 cfc1 \tmp, fcr31
17 swc1 $f0, THREAD_FPR0_LS64(\thread) 19 swc1 $f0, THREAD_FPR0_LS64(\thread)
18 swc1 $f1, THREAD_FPR1_LS64(\thread) 20 swc1 $f1, THREAD_FPR1_LS64(\thread)
19 swc1 $f2, THREAD_FPR2_LS64(\thread) 21 swc1 $f2, THREAD_FPR2_LS64(\thread)
20 swc1 $f3, THREAD_FPR3_LS64(\thread) 22 swc1 $f3, THREAD_FPR3_LS64(\thread)
21 swc1 $f4, THREAD_FPR4_LS64(\thread) 23 swc1 $f4, THREAD_FPR4_LS64(\thread)
22 swc1 $f5, THREAD_FPR5_LS64(\thread) 24 swc1 $f5, THREAD_FPR5_LS64(\thread)
23 swc1 $f6, THREAD_FPR6_LS64(\thread) 25 swc1 $f6, THREAD_FPR6_LS64(\thread)
24 swc1 $f7, THREAD_FPR7_LS64(\thread) 26 swc1 $f7, THREAD_FPR7_LS64(\thread)
25 swc1 $f8, THREAD_FPR8_LS64(\thread) 27 swc1 $f8, THREAD_FPR8_LS64(\thread)
26 swc1 $f9, THREAD_FPR9_LS64(\thread) 28 swc1 $f9, THREAD_FPR9_LS64(\thread)
27 swc1 $f10, THREAD_FPR10_LS64(\thread) 29 swc1 $f10, THREAD_FPR10_LS64(\thread)
28 swc1 $f11, THREAD_FPR11_LS64(\thread) 30 swc1 $f11, THREAD_FPR11_LS64(\thread)
29 swc1 $f12, THREAD_FPR12_LS64(\thread) 31 swc1 $f12, THREAD_FPR12_LS64(\thread)
30 swc1 $f13, THREAD_FPR13_LS64(\thread) 32 swc1 $f13, THREAD_FPR13_LS64(\thread)
31 swc1 $f14, THREAD_FPR14_LS64(\thread) 33 swc1 $f14, THREAD_FPR14_LS64(\thread)
32 swc1 $f15, THREAD_FPR15_LS64(\thread) 34 swc1 $f15, THREAD_FPR15_LS64(\thread)
33 swc1 $f16, THREAD_FPR16_LS64(\thread) 35 swc1 $f16, THREAD_FPR16_LS64(\thread)
34 swc1 $f17, THREAD_FPR17_LS64(\thread) 36 swc1 $f17, THREAD_FPR17_LS64(\thread)
35 swc1 $f18, THREAD_FPR18_LS64(\thread) 37 swc1 $f18, THREAD_FPR18_LS64(\thread)
36 swc1 $f19, THREAD_FPR19_LS64(\thread) 38 swc1 $f19, THREAD_FPR19_LS64(\thread)
37 swc1 $f20, THREAD_FPR20_LS64(\thread) 39 swc1 $f20, THREAD_FPR20_LS64(\thread)
38 swc1 $f21, THREAD_FPR21_LS64(\thread) 40 swc1 $f21, THREAD_FPR21_LS64(\thread)
39 swc1 $f22, THREAD_FPR22_LS64(\thread) 41 swc1 $f22, THREAD_FPR22_LS64(\thread)
40 swc1 $f23, THREAD_FPR23_LS64(\thread) 42 swc1 $f23, THREAD_FPR23_LS64(\thread)
41 swc1 $f24, THREAD_FPR24_LS64(\thread) 43 swc1 $f24, THREAD_FPR24_LS64(\thread)
42 swc1 $f25, THREAD_FPR25_LS64(\thread) 44 swc1 $f25, THREAD_FPR25_LS64(\thread)
43 swc1 $f26, THREAD_FPR26_LS64(\thread) 45 swc1 $f26, THREAD_FPR26_LS64(\thread)
44 swc1 $f27, THREAD_FPR27_LS64(\thread) 46 swc1 $f27, THREAD_FPR27_LS64(\thread)
45 swc1 $f28, THREAD_FPR28_LS64(\thread) 47 swc1 $f28, THREAD_FPR28_LS64(\thread)
46 swc1 $f29, THREAD_FPR29_LS64(\thread) 48 swc1 $f29, THREAD_FPR29_LS64(\thread)
47 swc1 $f30, THREAD_FPR30_LS64(\thread) 49 swc1 $f30, THREAD_FPR30_LS64(\thread)
48 swc1 $f31, THREAD_FPR31_LS64(\thread) 50 swc1 $f31, THREAD_FPR31_LS64(\thread)
49 sw \tmp, THREAD_FCR31(\thread) 51 sw \tmp, THREAD_FCR31(\thread)
52 .set pop
50 .endm 53 .endm
51 54
52 .macro fpu_restore_single thread tmp=t0 55 .macro fpu_restore_single thread tmp=t0
56 .set push
57 SET_HARDFLOAT
53 lw \tmp, THREAD_FCR31(\thread) 58 lw \tmp, THREAD_FCR31(\thread)
54 lwc1 $f0, THREAD_FPR0_LS64(\thread) 59 lwc1 $f0, THREAD_FPR0_LS64(\thread)
55 lwc1 $f1, THREAD_FPR1_LS64(\thread) 60 lwc1 $f1, THREAD_FPR1_LS64(\thread)
56 lwc1 $f2, THREAD_FPR2_LS64(\thread) 61 lwc1 $f2, THREAD_FPR2_LS64(\thread)
57 lwc1 $f3, THREAD_FPR3_LS64(\thread) 62 lwc1 $f3, THREAD_FPR3_LS64(\thread)
58 lwc1 $f4, THREAD_FPR4_LS64(\thread) 63 lwc1 $f4, THREAD_FPR4_LS64(\thread)
59 lwc1 $f5, THREAD_FPR5_LS64(\thread) 64 lwc1 $f5, THREAD_FPR5_LS64(\thread)
60 lwc1 $f6, THREAD_FPR6_LS64(\thread) 65 lwc1 $f6, THREAD_FPR6_LS64(\thread)
61 lwc1 $f7, THREAD_FPR7_LS64(\thread) 66 lwc1 $f7, THREAD_FPR7_LS64(\thread)
62 lwc1 $f8, THREAD_FPR8_LS64(\thread) 67 lwc1 $f8, THREAD_FPR8_LS64(\thread)
63 lwc1 $f9, THREAD_FPR9_LS64(\thread) 68 lwc1 $f9, THREAD_FPR9_LS64(\thread)
64 lwc1 $f10, THREAD_FPR10_LS64(\thread) 69 lwc1 $f10, THREAD_FPR10_LS64(\thread)
65 lwc1 $f11, THREAD_FPR11_LS64(\thread) 70 lwc1 $f11, THREAD_FPR11_LS64(\thread)
66 lwc1 $f12, THREAD_FPR12_LS64(\thread) 71 lwc1 $f12, THREAD_FPR12_LS64(\thread)
67 lwc1 $f13, THREAD_FPR13_LS64(\thread) 72 lwc1 $f13, THREAD_FPR13_LS64(\thread)
68 lwc1 $f14, THREAD_FPR14_LS64(\thread) 73 lwc1 $f14, THREAD_FPR14_LS64(\thread)
69 lwc1 $f15, THREAD_FPR15_LS64(\thread) 74 lwc1 $f15, THREAD_FPR15_LS64(\thread)
70 lwc1 $f16, THREAD_FPR16_LS64(\thread) 75 lwc1 $f16, THREAD_FPR16_LS64(\thread)
71 lwc1 $f17, THREAD_FPR17_LS64(\thread) 76 lwc1 $f17, THREAD_FPR17_LS64(\thread)
72 lwc1 $f18, THREAD_FPR18_LS64(\thread) 77 lwc1 $f18, THREAD_FPR18_LS64(\thread)
73 lwc1 $f19, THREAD_FPR19_LS64(\thread) 78 lwc1 $f19, THREAD_FPR19_LS64(\thread)
74 lwc1 $f20, THREAD_FPR20_LS64(\thread) 79 lwc1 $f20, THREAD_FPR20_LS64(\thread)
75 lwc1 $f21, THREAD_FPR21_LS64(\thread) 80 lwc1 $f21, THREAD_FPR21_LS64(\thread)
76 lwc1 $f22, THREAD_FPR22_LS64(\thread) 81 lwc1 $f22, THREAD_FPR22_LS64(\thread)
77 lwc1 $f23, THREAD_FPR23_LS64(\thread) 82 lwc1 $f23, THREAD_FPR23_LS64(\thread)
78 lwc1 $f24, THREAD_FPR24_LS64(\thread) 83 lwc1 $f24, THREAD_FPR24_LS64(\thread)
79 lwc1 $f25, THREAD_FPR25_LS64(\thread) 84 lwc1 $f25, THREAD_FPR25_LS64(\thread)
80 lwc1 $f26, THREAD_FPR26_LS64(\thread) 85 lwc1 $f26, THREAD_FPR26_LS64(\thread)
81 lwc1 $f27, THREAD_FPR27_LS64(\thread) 86 lwc1 $f27, THREAD_FPR27_LS64(\thread)
82 lwc1 $f28, THREAD_FPR28_LS64(\thread) 87 lwc1 $f28, THREAD_FPR28_LS64(\thread)
83 lwc1 $f29, THREAD_FPR29_LS64(\thread) 88 lwc1 $f29, THREAD_FPR29_LS64(\thread)
84 lwc1 $f30, THREAD_FPR30_LS64(\thread) 89 lwc1 $f30, THREAD_FPR30_LS64(\thread)
85 lwc1 $f31, THREAD_FPR31_LS64(\thread) 90 lwc1 $f31, THREAD_FPR31_LS64(\thread)
86 ctc1 \tmp, fcr31 91 ctc1 \tmp, fcr31
92 .set pop
87 .endm 93 .endm
88 94
89 .macro cpu_save_nonscratch thread 95 .macro cpu_save_nonscratch thread
90 LONG_S s0, THREAD_REG16(\thread) 96 LONG_S s0, THREAD_REG16(\thread)
91 LONG_S s1, THREAD_REG17(\thread) 97 LONG_S s1, THREAD_REG17(\thread)
92 LONG_S s2, THREAD_REG18(\thread) 98 LONG_S s2, THREAD_REG18(\thread)
93 LONG_S s3, THREAD_REG19(\thread) 99 LONG_S s3, THREAD_REG19(\thread)
94 LONG_S s4, THREAD_REG20(\thread) 100 LONG_S s4, THREAD_REG20(\thread)
95 LONG_S s5, THREAD_REG21(\thread) 101 LONG_S s5, THREAD_REG21(\thread)
96 LONG_S s6, THREAD_REG22(\thread) 102 LONG_S s6, THREAD_REG22(\thread)
97 LONG_S s7, THREAD_REG23(\thread) 103 LONG_S s7, THREAD_REG23(\thread)
98 LONG_S sp, THREAD_REG29(\thread) 104 LONG_S sp, THREAD_REG29(\thread)
99 LONG_S fp, THREAD_REG30(\thread) 105 LONG_S fp, THREAD_REG30(\thread)
100 .endm 106 .endm
101 107
102 .macro cpu_restore_nonscratch thread 108 .macro cpu_restore_nonscratch thread
103 LONG_L s0, THREAD_REG16(\thread) 109 LONG_L s0, THREAD_REG16(\thread)
104 LONG_L s1, THREAD_REG17(\thread) 110 LONG_L s1, THREAD_REG17(\thread)
105 LONG_L s2, THREAD_REG18(\thread) 111 LONG_L s2, THREAD_REG18(\thread)
106 LONG_L s3, THREAD_REG19(\thread) 112 LONG_L s3, THREAD_REG19(\thread)
107 LONG_L s4, THREAD_REG20(\thread) 113 LONG_L s4, THREAD_REG20(\thread)
108 LONG_L s5, THREAD_REG21(\thread) 114 LONG_L s5, THREAD_REG21(\thread)
109 LONG_L s6, THREAD_REG22(\thread) 115 LONG_L s6, THREAD_REG22(\thread)
110 LONG_L s7, THREAD_REG23(\thread) 116 LONG_L s7, THREAD_REG23(\thread)
111 LONG_L sp, THREAD_REG29(\thread) 117 LONG_L sp, THREAD_REG29(\thread)
112 LONG_L fp, THREAD_REG30(\thread) 118 LONG_L fp, THREAD_REG30(\thread)
113 LONG_L ra, THREAD_REG31(\thread) 119 LONG_L ra, THREAD_REG31(\thread)
114 .endm 120 .endm
115 121
116 #endif /* _ASM_ASMMACRO_32_H */ 122 #endif /* _ASM_ASMMACRO_32_H */
117 123
arch/mips/include/asm/asmmacro.h
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 2003 Ralf Baechle 6 * Copyright (C) 2003 Ralf Baechle
7 */ 7 */
8 #ifndef _ASM_ASMMACRO_H 8 #ifndef _ASM_ASMMACRO_H
9 #define _ASM_ASMMACRO_H 9 #define _ASM_ASMMACRO_H
10 10
11 #include <asm/hazards.h> 11 #include <asm/hazards.h>
12 #include <asm/asm-offsets.h> 12 #include <asm/asm-offsets.h>
13 #include <asm/msa.h> 13 #include <asm/msa.h>
14 14
15 #ifdef CONFIG_32BIT 15 #ifdef CONFIG_32BIT
16 #include <asm/asmmacro-32.h> 16 #include <asm/asmmacro-32.h>
17 #endif 17 #endif
18 #ifdef CONFIG_64BIT 18 #ifdef CONFIG_64BIT
19 #include <asm/asmmacro-64.h> 19 #include <asm/asmmacro-64.h>
20 #endif 20 #endif
21 21
22 #ifdef CONFIG_CPU_MIPSR2 22 #ifdef CONFIG_CPU_MIPSR2
23 .macro local_irq_enable reg=t0 23 .macro local_irq_enable reg=t0
24 ei 24 ei
25 irq_enable_hazard 25 irq_enable_hazard
26 .endm 26 .endm
27 27
28 .macro local_irq_disable reg=t0 28 .macro local_irq_disable reg=t0
29 di 29 di
30 irq_disable_hazard 30 irq_disable_hazard
31 .endm 31 .endm
32 #else 32 #else
33 .macro local_irq_enable reg=t0 33 .macro local_irq_enable reg=t0
34 mfc0 \reg, CP0_STATUS 34 mfc0 \reg, CP0_STATUS
35 ori \reg, \reg, 1 35 ori \reg, \reg, 1
36 mtc0 \reg, CP0_STATUS 36 mtc0 \reg, CP0_STATUS
37 irq_enable_hazard 37 irq_enable_hazard
38 .endm 38 .endm
39 39
40 .macro local_irq_disable reg=t0 40 .macro local_irq_disable reg=t0
41 #ifdef CONFIG_PREEMPT 41 #ifdef CONFIG_PREEMPT
42 lw \reg, TI_PRE_COUNT($28) 42 lw \reg, TI_PRE_COUNT($28)
43 addi \reg, \reg, 1 43 addi \reg, \reg, 1
44 sw \reg, TI_PRE_COUNT($28) 44 sw \reg, TI_PRE_COUNT($28)
45 #endif 45 #endif
46 mfc0 \reg, CP0_STATUS 46 mfc0 \reg, CP0_STATUS
47 ori \reg, \reg, 1 47 ori \reg, \reg, 1
48 xori \reg, \reg, 1 48 xori \reg, \reg, 1
49 mtc0 \reg, CP0_STATUS 49 mtc0 \reg, CP0_STATUS
50 irq_disable_hazard 50 irq_disable_hazard
51 #ifdef CONFIG_PREEMPT 51 #ifdef CONFIG_PREEMPT
52 lw \reg, TI_PRE_COUNT($28) 52 lw \reg, TI_PRE_COUNT($28)
53 addi \reg, \reg, -1 53 addi \reg, \reg, -1
54 sw \reg, TI_PRE_COUNT($28) 54 sw \reg, TI_PRE_COUNT($28)
55 #endif 55 #endif
56 .endm 56 .endm
57 #endif /* CONFIG_CPU_MIPSR2 */ 57 #endif /* CONFIG_CPU_MIPSR2 */
58 58
59 .macro fpu_save_16even thread tmp=t0 59 .macro fpu_save_16even thread tmp=t0
60 .set push
61 SET_HARDFLOAT
60 cfc1 \tmp, fcr31 62 cfc1 \tmp, fcr31
61 sdc1 $f0, THREAD_FPR0_LS64(\thread) 63 sdc1 $f0, THREAD_FPR0_LS64(\thread)
62 sdc1 $f2, THREAD_FPR2_LS64(\thread) 64 sdc1 $f2, THREAD_FPR2_LS64(\thread)
63 sdc1 $f4, THREAD_FPR4_LS64(\thread) 65 sdc1 $f4, THREAD_FPR4_LS64(\thread)
64 sdc1 $f6, THREAD_FPR6_LS64(\thread) 66 sdc1 $f6, THREAD_FPR6_LS64(\thread)
65 sdc1 $f8, THREAD_FPR8_LS64(\thread) 67 sdc1 $f8, THREAD_FPR8_LS64(\thread)
66 sdc1 $f10, THREAD_FPR10_LS64(\thread) 68 sdc1 $f10, THREAD_FPR10_LS64(\thread)
67 sdc1 $f12, THREAD_FPR12_LS64(\thread) 69 sdc1 $f12, THREAD_FPR12_LS64(\thread)
68 sdc1 $f14, THREAD_FPR14_LS64(\thread) 70 sdc1 $f14, THREAD_FPR14_LS64(\thread)
69 sdc1 $f16, THREAD_FPR16_LS64(\thread) 71 sdc1 $f16, THREAD_FPR16_LS64(\thread)
70 sdc1 $f18, THREAD_FPR18_LS64(\thread) 72 sdc1 $f18, THREAD_FPR18_LS64(\thread)
71 sdc1 $f20, THREAD_FPR20_LS64(\thread) 73 sdc1 $f20, THREAD_FPR20_LS64(\thread)
72 sdc1 $f22, THREAD_FPR22_LS64(\thread) 74 sdc1 $f22, THREAD_FPR22_LS64(\thread)
73 sdc1 $f24, THREAD_FPR24_LS64(\thread) 75 sdc1 $f24, THREAD_FPR24_LS64(\thread)
74 sdc1 $f26, THREAD_FPR26_LS64(\thread) 76 sdc1 $f26, THREAD_FPR26_LS64(\thread)
75 sdc1 $f28, THREAD_FPR28_LS64(\thread) 77 sdc1 $f28, THREAD_FPR28_LS64(\thread)
76 sdc1 $f30, THREAD_FPR30_LS64(\thread) 78 sdc1 $f30, THREAD_FPR30_LS64(\thread)
77 sw \tmp, THREAD_FCR31(\thread) 79 sw \tmp, THREAD_FCR31(\thread)
80 .set pop
78 .endm 81 .endm
79 82
80 .macro fpu_save_16odd thread 83 .macro fpu_save_16odd thread
81 .set push 84 .set push
82 .set mips64r2 85 .set mips64r2
86 SET_HARDFLOAT
83 sdc1 $f1, THREAD_FPR1_LS64(\thread) 87 sdc1 $f1, THREAD_FPR1_LS64(\thread)
84 sdc1 $f3, THREAD_FPR3_LS64(\thread) 88 sdc1 $f3, THREAD_FPR3_LS64(\thread)
85 sdc1 $f5, THREAD_FPR5_LS64(\thread) 89 sdc1 $f5, THREAD_FPR5_LS64(\thread)
86 sdc1 $f7, THREAD_FPR7_LS64(\thread) 90 sdc1 $f7, THREAD_FPR7_LS64(\thread)
87 sdc1 $f9, THREAD_FPR9_LS64(\thread) 91 sdc1 $f9, THREAD_FPR9_LS64(\thread)
88 sdc1 $f11, THREAD_FPR11_LS64(\thread) 92 sdc1 $f11, THREAD_FPR11_LS64(\thread)
89 sdc1 $f13, THREAD_FPR13_LS64(\thread) 93 sdc1 $f13, THREAD_FPR13_LS64(\thread)
90 sdc1 $f15, THREAD_FPR15_LS64(\thread) 94 sdc1 $f15, THREAD_FPR15_LS64(\thread)
91 sdc1 $f17, THREAD_FPR17_LS64(\thread) 95 sdc1 $f17, THREAD_FPR17_LS64(\thread)
92 sdc1 $f19, THREAD_FPR19_LS64(\thread) 96 sdc1 $f19, THREAD_FPR19_LS64(\thread)
93 sdc1 $f21, THREAD_FPR21_LS64(\thread) 97 sdc1 $f21, THREAD_FPR21_LS64(\thread)
94 sdc1 $f23, THREAD_FPR23_LS64(\thread) 98 sdc1 $f23, THREAD_FPR23_LS64(\thread)
95 sdc1 $f25, THREAD_FPR25_LS64(\thread) 99 sdc1 $f25, THREAD_FPR25_LS64(\thread)
96 sdc1 $f27, THREAD_FPR27_LS64(\thread) 100 sdc1 $f27, THREAD_FPR27_LS64(\thread)
97 sdc1 $f29, THREAD_FPR29_LS64(\thread) 101 sdc1 $f29, THREAD_FPR29_LS64(\thread)
98 sdc1 $f31, THREAD_FPR31_LS64(\thread) 102 sdc1 $f31, THREAD_FPR31_LS64(\thread)
99 .set pop 103 .set pop
100 .endm 104 .endm
101 105
102 .macro fpu_save_double thread status tmp 106 .macro fpu_save_double thread status tmp
103 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 107 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
104 sll \tmp, \status, 5 108 sll \tmp, \status, 5
105 bgez \tmp, 10f 109 bgez \tmp, 10f
106 fpu_save_16odd \thread 110 fpu_save_16odd \thread
107 10: 111 10:
108 #endif 112 #endif
109 fpu_save_16even \thread \tmp 113 fpu_save_16even \thread \tmp
110 .endm 114 .endm
111 115
112 .macro fpu_restore_16even thread tmp=t0 116 .macro fpu_restore_16even thread tmp=t0
117 .set push
118 SET_HARDFLOAT
113 lw \tmp, THREAD_FCR31(\thread) 119 lw \tmp, THREAD_FCR31(\thread)
114 ldc1 $f0, THREAD_FPR0_LS64(\thread) 120 ldc1 $f0, THREAD_FPR0_LS64(\thread)
115 ldc1 $f2, THREAD_FPR2_LS64(\thread) 121 ldc1 $f2, THREAD_FPR2_LS64(\thread)
116 ldc1 $f4, THREAD_FPR4_LS64(\thread) 122 ldc1 $f4, THREAD_FPR4_LS64(\thread)
117 ldc1 $f6, THREAD_FPR6_LS64(\thread) 123 ldc1 $f6, THREAD_FPR6_LS64(\thread)
118 ldc1 $f8, THREAD_FPR8_LS64(\thread) 124 ldc1 $f8, THREAD_FPR8_LS64(\thread)
119 ldc1 $f10, THREAD_FPR10_LS64(\thread) 125 ldc1 $f10, THREAD_FPR10_LS64(\thread)
120 ldc1 $f12, THREAD_FPR12_LS64(\thread) 126 ldc1 $f12, THREAD_FPR12_LS64(\thread)
121 ldc1 $f14, THREAD_FPR14_LS64(\thread) 127 ldc1 $f14, THREAD_FPR14_LS64(\thread)
122 ldc1 $f16, THREAD_FPR16_LS64(\thread) 128 ldc1 $f16, THREAD_FPR16_LS64(\thread)
123 ldc1 $f18, THREAD_FPR18_LS64(\thread) 129 ldc1 $f18, THREAD_FPR18_LS64(\thread)
124 ldc1 $f20, THREAD_FPR20_LS64(\thread) 130 ldc1 $f20, THREAD_FPR20_LS64(\thread)
125 ldc1 $f22, THREAD_FPR22_LS64(\thread) 131 ldc1 $f22, THREAD_FPR22_LS64(\thread)
126 ldc1 $f24, THREAD_FPR24_LS64(\thread) 132 ldc1 $f24, THREAD_FPR24_LS64(\thread)
127 ldc1 $f26, THREAD_FPR26_LS64(\thread) 133 ldc1 $f26, THREAD_FPR26_LS64(\thread)
128 ldc1 $f28, THREAD_FPR28_LS64(\thread) 134 ldc1 $f28, THREAD_FPR28_LS64(\thread)
129 ldc1 $f30, THREAD_FPR30_LS64(\thread) 135 ldc1 $f30, THREAD_FPR30_LS64(\thread)
130 ctc1 \tmp, fcr31 136 ctc1 \tmp, fcr31
131 .endm 137 .endm
132 138
133 .macro fpu_restore_16odd thread 139 .macro fpu_restore_16odd thread
134 .set push 140 .set push
135 .set mips64r2 141 .set mips64r2
142 SET_HARDFLOAT
136 ldc1 $f1, THREAD_FPR1_LS64(\thread) 143 ldc1 $f1, THREAD_FPR1_LS64(\thread)
137 ldc1 $f3, THREAD_FPR3_LS64(\thread) 144 ldc1 $f3, THREAD_FPR3_LS64(\thread)
138 ldc1 $f5, THREAD_FPR5_LS64(\thread) 145 ldc1 $f5, THREAD_FPR5_LS64(\thread)
139 ldc1 $f7, THREAD_FPR7_LS64(\thread) 146 ldc1 $f7, THREAD_FPR7_LS64(\thread)
140 ldc1 $f9, THREAD_FPR9_LS64(\thread) 147 ldc1 $f9, THREAD_FPR9_LS64(\thread)
141 ldc1 $f11, THREAD_FPR11_LS64(\thread) 148 ldc1 $f11, THREAD_FPR11_LS64(\thread)
142 ldc1 $f13, THREAD_FPR13_LS64(\thread) 149 ldc1 $f13, THREAD_FPR13_LS64(\thread)
143 ldc1 $f15, THREAD_FPR15_LS64(\thread) 150 ldc1 $f15, THREAD_FPR15_LS64(\thread)
144 ldc1 $f17, THREAD_FPR17_LS64(\thread) 151 ldc1 $f17, THREAD_FPR17_LS64(\thread)
145 ldc1 $f19, THREAD_FPR19_LS64(\thread) 152 ldc1 $f19, THREAD_FPR19_LS64(\thread)
146 ldc1 $f21, THREAD_FPR21_LS64(\thread) 153 ldc1 $f21, THREAD_FPR21_LS64(\thread)
147 ldc1 $f23, THREAD_FPR23_LS64(\thread) 154 ldc1 $f23, THREAD_FPR23_LS64(\thread)
148 ldc1 $f25, THREAD_FPR25_LS64(\thread) 155 ldc1 $f25, THREAD_FPR25_LS64(\thread)
149 ldc1 $f27, THREAD_FPR27_LS64(\thread) 156 ldc1 $f27, THREAD_FPR27_LS64(\thread)
150 ldc1 $f29, THREAD_FPR29_LS64(\thread) 157 ldc1 $f29, THREAD_FPR29_LS64(\thread)
151 ldc1 $f31, THREAD_FPR31_LS64(\thread) 158 ldc1 $f31, THREAD_FPR31_LS64(\thread)
152 .set pop 159 .set pop
153 .endm 160 .endm
154 161
155 .macro fpu_restore_double thread status tmp 162 .macro fpu_restore_double thread status tmp
156 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 163 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
157 sll \tmp, \status, 5 164 sll \tmp, \status, 5
158 bgez \tmp, 10f # 16 register mode? 165 bgez \tmp, 10f # 16 register mode?
159 166
160 fpu_restore_16odd \thread 167 fpu_restore_16odd \thread
161 10: 168 10:
162 #endif 169 #endif
163 fpu_restore_16even \thread \tmp 170 fpu_restore_16even \thread \tmp
164 .endm 171 .endm
165 172
166 #ifdef CONFIG_CPU_MIPSR2 173 #ifdef CONFIG_CPU_MIPSR2
167 .macro _EXT rd, rs, p, s 174 .macro _EXT rd, rs, p, s
168 ext \rd, \rs, \p, \s 175 ext \rd, \rs, \p, \s
169 .endm 176 .endm
170 #else /* !CONFIG_CPU_MIPSR2 */ 177 #else /* !CONFIG_CPU_MIPSR2 */
171 .macro _EXT rd, rs, p, s 178 .macro _EXT rd, rs, p, s
172 srl \rd, \rs, \p 179 srl \rd, \rs, \p
173 andi \rd, \rd, (1 << \s) - 1 180 andi \rd, \rd, (1 << \s) - 1
174 .endm 181 .endm
175 #endif /* !CONFIG_CPU_MIPSR2 */ 182 #endif /* !CONFIG_CPU_MIPSR2 */
176 183
177 /* 184 /*
178 * Temporary until all gas have MT ASE support 185 * Temporary until all gas have MT ASE support
179 */ 186 */
180 .macro DMT reg=0 187 .macro DMT reg=0
181 .word 0x41600bc1 | (\reg << 16) 188 .word 0x41600bc1 | (\reg << 16)
182 .endm 189 .endm
183 190
184 .macro EMT reg=0 191 .macro EMT reg=0
185 .word 0x41600be1 | (\reg << 16) 192 .word 0x41600be1 | (\reg << 16)
186 .endm 193 .endm
187 194
188 .macro DVPE reg=0 195 .macro DVPE reg=0
189 .word 0x41600001 | (\reg << 16) 196 .word 0x41600001 | (\reg << 16)
190 .endm 197 .endm
191 198
192 .macro EVPE reg=0 199 .macro EVPE reg=0
193 .word 0x41600021 | (\reg << 16) 200 .word 0x41600021 | (\reg << 16)
194 .endm 201 .endm
195 202
196 .macro MFTR rt=0, rd=0, u=0, sel=0 203 .macro MFTR rt=0, rd=0, u=0, sel=0
197 .word 0x41000000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel) 204 .word 0x41000000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
198 .endm 205 .endm
199 206
200 .macro MTTR rt=0, rd=0, u=0, sel=0 207 .macro MTTR rt=0, rd=0, u=0, sel=0
201 .word 0x41800000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel) 208 .word 0x41800000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
202 .endm 209 .endm
203 210
204 #ifdef TOOLCHAIN_SUPPORTS_MSA 211 #ifdef TOOLCHAIN_SUPPORTS_MSA
205 .macro ld_d wd, off, base 212 .macro ld_d wd, off, base
206 .set push 213 .set push
207 .set mips32r2 214 .set mips32r2
208 .set msa 215 .set msa
209 ld.d $w\wd, \off(\base) 216 ld.d $w\wd, \off(\base)
210 .set pop 217 .set pop
211 .endm 218 .endm
212 219
213 .macro st_d wd, off, base 220 .macro st_d wd, off, base
214 .set push 221 .set push
215 .set mips32r2 222 .set mips32r2
216 .set msa 223 .set msa
217 st.d $w\wd, \off(\base) 224 st.d $w\wd, \off(\base)
218 .set pop 225 .set pop
219 .endm 226 .endm
220 227
221 .macro copy_u_w rd, ws, n 228 .macro copy_u_w rd, ws, n
222 .set push 229 .set push
223 .set mips32r2 230 .set mips32r2
224 .set msa 231 .set msa
225 copy_u.w \rd, $w\ws[\n] 232 copy_u.w \rd, $w\ws[\n]
226 .set pop 233 .set pop
227 .endm 234 .endm
228 235
229 .macro copy_u_d rd, ws, n 236 .macro copy_u_d rd, ws, n
230 .set push 237 .set push
231 .set mips64r2 238 .set mips64r2
232 .set msa 239 .set msa
233 copy_u.d \rd, $w\ws[\n] 240 copy_u.d \rd, $w\ws[\n]
234 .set pop 241 .set pop
235 .endm 242 .endm
236 243
237 .macro insert_w wd, n, rs 244 .macro insert_w wd, n, rs
238 .set push 245 .set push
239 .set mips32r2 246 .set mips32r2
240 .set msa 247 .set msa
241 insert.w $w\wd[\n], \rs 248 insert.w $w\wd[\n], \rs
242 .set pop 249 .set pop
243 .endm 250 .endm
244 251
245 .macro insert_d wd, n, rs 252 .macro insert_d wd, n, rs
246 .set push 253 .set push
247 .set mips64r2 254 .set mips64r2
248 .set msa 255 .set msa
249 insert.d $w\wd[\n], \rs 256 insert.d $w\wd[\n], \rs
250 .set pop 257 .set pop
251 .endm 258 .endm
252 #else 259 #else
253 260
254 #ifdef CONFIG_CPU_MICROMIPS 261 #ifdef CONFIG_CPU_MICROMIPS
255 #define CFC_MSA_INSN 0x587e0056 262 #define CFC_MSA_INSN 0x587e0056
256 #define CTC_MSA_INSN 0x583e0816 263 #define CTC_MSA_INSN 0x583e0816
257 #define LDD_MSA_INSN 0x58000837 264 #define LDD_MSA_INSN 0x58000837
258 #define STD_MSA_INSN 0x5800083f 265 #define STD_MSA_INSN 0x5800083f
259 #define COPY_UW_MSA_INSN 0x58f00056 266 #define COPY_UW_MSA_INSN 0x58f00056
260 #define COPY_UD_MSA_INSN 0x58f80056 267 #define COPY_UD_MSA_INSN 0x58f80056
261 #define INSERT_W_MSA_INSN 0x59300816 268 #define INSERT_W_MSA_INSN 0x59300816
262 #define INSERT_D_MSA_INSN 0x59380816 269 #define INSERT_D_MSA_INSN 0x59380816
263 #else 270 #else
264 #define CFC_MSA_INSN 0x787e0059 271 #define CFC_MSA_INSN 0x787e0059
265 #define CTC_MSA_INSN 0x783e0819 272 #define CTC_MSA_INSN 0x783e0819
266 #define LDD_MSA_INSN 0x78000823 273 #define LDD_MSA_INSN 0x78000823
267 #define STD_MSA_INSN 0x78000827 274 #define STD_MSA_INSN 0x78000827
268 #define COPY_UW_MSA_INSN 0x78f00059 275 #define COPY_UW_MSA_INSN 0x78f00059
269 #define COPY_UD_MSA_INSN 0x78f80059 276 #define COPY_UD_MSA_INSN 0x78f80059
270 #define INSERT_W_MSA_INSN 0x79300819 277 #define INSERT_W_MSA_INSN 0x79300819
271 #define INSERT_D_MSA_INSN 0x79380819 278 #define INSERT_D_MSA_INSN 0x79380819
272 #endif 279 #endif
273 280
274 /* 281 /*
275 * Temporary until all toolchains in use include MSA support. 282 * Temporary until all toolchains in use include MSA support.
276 */ 283 */
277 .macro cfcmsa rd, cs 284 .macro cfcmsa rd, cs
278 .set push 285 .set push
279 .set noat 286 .set noat
287 SET_HARDFLOAT
280 .insn 288 .insn
281 .word CFC_MSA_INSN | (\cs << 11) 289 .word CFC_MSA_INSN | (\cs << 11)
282 move \rd, $1 290 move \rd, $1
283 .set pop 291 .set pop
284 .endm 292 .endm
285 293
286 .macro ctcmsa cd, rs 294 .macro ctcmsa cd, rs
287 .set push 295 .set push
288 .set noat 296 .set noat
297 SET_HARDFLOAT
289 move $1, \rs 298 move $1, \rs
290 .word CTC_MSA_INSN | (\cd << 6) 299 .word CTC_MSA_INSN | (\cd << 6)
291 .set pop 300 .set pop
292 .endm 301 .endm
293 302
294 .macro ld_d wd, off, base 303 .macro ld_d wd, off, base
295 .set push 304 .set push
296 .set noat 305 .set noat
306 SET_HARDFLOAT
297 add $1, \base, \off 307 add $1, \base, \off
298 .word LDD_MSA_INSN | (\wd << 6) 308 .word LDD_MSA_INSN | (\wd << 6)
299 .set pop 309 .set pop
300 .endm 310 .endm
301 311
302 .macro st_d wd, off, base 312 .macro st_d wd, off, base
303 .set push 313 .set push
304 .set noat 314 .set noat
315 SET_HARDFLOAT
305 add $1, \base, \off 316 add $1, \base, \off
306 .word STD_MSA_INSN | (\wd << 6) 317 .word STD_MSA_INSN | (\wd << 6)
307 .set pop 318 .set pop
308 .endm 319 .endm
309 320
310 .macro copy_u_w rd, ws, n 321 .macro copy_u_w rd, ws, n
311 .set push 322 .set push
312 .set noat 323 .set noat
324 SET_HARDFLOAT
313 .insn 325 .insn
314 .word COPY_UW_MSA_INSN | (\n << 16) | (\ws << 11) 326 .word COPY_UW_MSA_INSN | (\n << 16) | (\ws << 11)
315 /* move triggers an assembler bug... */ 327 /* move triggers an assembler bug... */
316 or \rd, $1, zero 328 or \rd, $1, zero
317 .set pop 329 .set pop
318 .endm 330 .endm
319 331
320 .macro copy_u_d rd, ws, n 332 .macro copy_u_d rd, ws, n
321 .set push 333 .set push
322 .set noat 334 .set noat
335 SET_HARDFLOAT
323 .insn 336 .insn
324 .word COPY_UD_MSA_INSN | (\n << 16) | (\ws << 11) 337 .word COPY_UD_MSA_INSN | (\n << 16) | (\ws << 11)
325 /* move triggers an assembler bug... */ 338 /* move triggers an assembler bug... */
326 or \rd, $1, zero 339 or \rd, $1, zero
327 .set pop 340 .set pop
328 .endm 341 .endm
329 342
330 .macro insert_w wd, n, rs 343 .macro insert_w wd, n, rs
331 .set push 344 .set push
332 .set noat 345 .set noat
346 SET_HARDFLOAT
333 /* move triggers an assembler bug... */ 347 /* move triggers an assembler bug... */
334 or $1, \rs, zero 348 or $1, \rs, zero
335 .word INSERT_W_MSA_INSN | (\n << 16) | (\wd << 6) 349 .word INSERT_W_MSA_INSN | (\n << 16) | (\wd << 6)
336 .set pop 350 .set pop
337 .endm 351 .endm
338 352
339 .macro insert_d wd, n, rs 353 .macro insert_d wd, n, rs
340 .set push 354 .set push
341 .set noat 355 .set noat
356 SET_HARDFLOAT
342 /* move triggers an assembler bug... */ 357 /* move triggers an assembler bug... */
343 or $1, \rs, zero 358 or $1, \rs, zero
344 .word INSERT_D_MSA_INSN | (\n << 16) | (\wd << 6) 359 .word INSERT_D_MSA_INSN | (\n << 16) | (\wd << 6)
345 .set pop 360 .set pop
346 .endm 361 .endm
347 #endif 362 #endif
348 363
349 .macro msa_save_all thread 364 .macro msa_save_all thread
350 st_d 0, THREAD_FPR0, \thread 365 st_d 0, THREAD_FPR0, \thread
351 st_d 1, THREAD_FPR1, \thread 366 st_d 1, THREAD_FPR1, \thread
352 st_d 2, THREAD_FPR2, \thread 367 st_d 2, THREAD_FPR2, \thread
353 st_d 3, THREAD_FPR3, \thread 368 st_d 3, THREAD_FPR3, \thread
354 st_d 4, THREAD_FPR4, \thread 369 st_d 4, THREAD_FPR4, \thread
355 st_d 5, THREAD_FPR5, \thread 370 st_d 5, THREAD_FPR5, \thread
356 st_d 6, THREAD_FPR6, \thread 371 st_d 6, THREAD_FPR6, \thread
357 st_d 7, THREAD_FPR7, \thread 372 st_d 7, THREAD_FPR7, \thread
358 st_d 8, THREAD_FPR8, \thread 373 st_d 8, THREAD_FPR8, \thread
359 st_d 9, THREAD_FPR9, \thread 374 st_d 9, THREAD_FPR9, \thread
360 st_d 10, THREAD_FPR10, \thread 375 st_d 10, THREAD_FPR10, \thread
361 st_d 11, THREAD_FPR11, \thread 376 st_d 11, THREAD_FPR11, \thread
362 st_d 12, THREAD_FPR12, \thread 377 st_d 12, THREAD_FPR12, \thread
363 st_d 13, THREAD_FPR13, \thread 378 st_d 13, THREAD_FPR13, \thread
364 st_d 14, THREAD_FPR14, \thread 379 st_d 14, THREAD_FPR14, \thread
365 st_d 15, THREAD_FPR15, \thread 380 st_d 15, THREAD_FPR15, \thread
366 st_d 16, THREAD_FPR16, \thread 381 st_d 16, THREAD_FPR16, \thread
367 st_d 17, THREAD_FPR17, \thread 382 st_d 17, THREAD_FPR17, \thread
368 st_d 18, THREAD_FPR18, \thread 383 st_d 18, THREAD_FPR18, \thread
369 st_d 19, THREAD_FPR19, \thread 384 st_d 19, THREAD_FPR19, \thread
370 st_d 20, THREAD_FPR20, \thread 385 st_d 20, THREAD_FPR20, \thread
371 st_d 21, THREAD_FPR21, \thread 386 st_d 21, THREAD_FPR21, \thread
372 st_d 22, THREAD_FPR22, \thread 387 st_d 22, THREAD_FPR22, \thread
373 st_d 23, THREAD_FPR23, \thread 388 st_d 23, THREAD_FPR23, \thread
374 st_d 24, THREAD_FPR24, \thread 389 st_d 24, THREAD_FPR24, \thread
375 st_d 25, THREAD_FPR25, \thread 390 st_d 25, THREAD_FPR25, \thread
376 st_d 26, THREAD_FPR26, \thread 391 st_d 26, THREAD_FPR26, \thread
377 st_d 27, THREAD_FPR27, \thread 392 st_d 27, THREAD_FPR27, \thread
378 st_d 28, THREAD_FPR28, \thread 393 st_d 28, THREAD_FPR28, \thread
379 st_d 29, THREAD_FPR29, \thread 394 st_d 29, THREAD_FPR29, \thread
380 st_d 30, THREAD_FPR30, \thread 395 st_d 30, THREAD_FPR30, \thread
381 st_d 31, THREAD_FPR31, \thread 396 st_d 31, THREAD_FPR31, \thread
382 .set push 397 .set push
383 .set noat 398 .set noat
399 SET_HARDFLOAT
384 cfcmsa $1, MSA_CSR 400 cfcmsa $1, MSA_CSR
385 sw $1, THREAD_MSA_CSR(\thread) 401 sw $1, THREAD_MSA_CSR(\thread)
386 .set pop 402 .set pop
387 .endm 403 .endm
388 404
389 .macro msa_restore_all thread 405 .macro msa_restore_all thread
390 .set push 406 .set push
391 .set noat 407 .set noat
408 SET_HARDFLOAT
392 lw $1, THREAD_MSA_CSR(\thread) 409 lw $1, THREAD_MSA_CSR(\thread)
393 ctcmsa MSA_CSR, $1 410 ctcmsa MSA_CSR, $1
394 .set pop 411 .set pop
395 ld_d 0, THREAD_FPR0, \thread 412 ld_d 0, THREAD_FPR0, \thread
396 ld_d 1, THREAD_FPR1, \thread 413 ld_d 1, THREAD_FPR1, \thread
397 ld_d 2, THREAD_FPR2, \thread 414 ld_d 2, THREAD_FPR2, \thread
398 ld_d 3, THREAD_FPR3, \thread 415 ld_d 3, THREAD_FPR3, \thread
399 ld_d 4, THREAD_FPR4, \thread 416 ld_d 4, THREAD_FPR4, \thread
400 ld_d 5, THREAD_FPR5, \thread 417 ld_d 5, THREAD_FPR5, \thread
401 ld_d 6, THREAD_FPR6, \thread 418 ld_d 6, THREAD_FPR6, \thread
402 ld_d 7, THREAD_FPR7, \thread 419 ld_d 7, THREAD_FPR7, \thread
403 ld_d 8, THREAD_FPR8, \thread 420 ld_d 8, THREAD_FPR8, \thread
404 ld_d 9, THREAD_FPR9, \thread 421 ld_d 9, THREAD_FPR9, \thread
405 ld_d 10, THREAD_FPR10, \thread 422 ld_d 10, THREAD_FPR10, \thread
406 ld_d 11, THREAD_FPR11, \thread 423 ld_d 11, THREAD_FPR11, \thread
407 ld_d 12, THREAD_FPR12, \thread 424 ld_d 12, THREAD_FPR12, \thread
408 ld_d 13, THREAD_FPR13, \thread 425 ld_d 13, THREAD_FPR13, \thread
409 ld_d 14, THREAD_FPR14, \thread 426 ld_d 14, THREAD_FPR14, \thread
410 ld_d 15, THREAD_FPR15, \thread 427 ld_d 15, THREAD_FPR15, \thread
411 ld_d 16, THREAD_FPR16, \thread 428 ld_d 16, THREAD_FPR16, \thread
412 ld_d 17, THREAD_FPR17, \thread 429 ld_d 17, THREAD_FPR17, \thread
413 ld_d 18, THREAD_FPR18, \thread 430 ld_d 18, THREAD_FPR18, \thread
414 ld_d 19, THREAD_FPR19, \thread 431 ld_d 19, THREAD_FPR19, \thread
415 ld_d 20, THREAD_FPR20, \thread 432 ld_d 20, THREAD_FPR20, \thread
416 ld_d 21, THREAD_FPR21, \thread 433 ld_d 21, THREAD_FPR21, \thread
417 ld_d 22, THREAD_FPR22, \thread 434 ld_d 22, THREAD_FPR22, \thread
418 ld_d 23, THREAD_FPR23, \thread 435 ld_d 23, THREAD_FPR23, \thread
419 ld_d 24, THREAD_FPR24, \thread 436 ld_d 24, THREAD_FPR24, \thread
420 ld_d 25, THREAD_FPR25, \thread 437 ld_d 25, THREAD_FPR25, \thread
421 ld_d 26, THREAD_FPR26, \thread 438 ld_d 26, THREAD_FPR26, \thread
422 ld_d 27, THREAD_FPR27, \thread 439 ld_d 27, THREAD_FPR27, \thread
423 ld_d 28, THREAD_FPR28, \thread 440 ld_d 28, THREAD_FPR28, \thread
424 ld_d 29, THREAD_FPR29, \thread 441 ld_d 29, THREAD_FPR29, \thread
425 ld_d 30, THREAD_FPR30, \thread 442 ld_d 30, THREAD_FPR30, \thread
426 ld_d 31, THREAD_FPR31, \thread 443 ld_d 31, THREAD_FPR31, \thread
427 .endm 444 .endm
428 445
429 .macro msa_init_upper wd 446 .macro msa_init_upper wd
430 #ifdef CONFIG_64BIT 447 #ifdef CONFIG_64BIT
431 insert_d \wd, 1 448 insert_d \wd, 1
432 #else 449 #else
433 insert_w \wd, 2 450 insert_w \wd, 2
434 insert_w \wd, 3 451 insert_w \wd, 3
435 #endif 452 #endif
436 .if 31-\wd 453 .if 31-\wd
437 msa_init_upper (\wd+1) 454 msa_init_upper (\wd+1)
438 .endif 455 .endif
439 .endm 456 .endm
440 457
441 .macro msa_init_all_upper 458 .macro msa_init_all_upper
442 .set push 459 .set push
443 .set noat 460 .set noat
461 SET_HARDFLOAT
444 not $1, zero 462 not $1, zero
445 msa_init_upper 0 463 msa_init_upper 0
446 .set pop 464 .set pop
447 .endm 465 .endm
448 466
449 #endif /* _ASM_ASMMACRO_H */ 467 #endif /* _ASM_ASMMACRO_H */
450 468
arch/mips/include/asm/fpregdef.h
1 /* 1 /*
2 * Definitions for the FPU register names 2 * Definitions for the FPU register names
3 * 3 *
4 * This file is subject to the terms and conditions of the GNU General Public 4 * This file is subject to the terms and conditions of the GNU General Public
5 * License. See the file "COPYING" in the main directory of this archive 5 * License. See the file "COPYING" in the main directory of this archive
6 * for more details. 6 * for more details.
7 * 7 *
8 * Copyright (C) 1995, 1999 Ralf Baechle 8 * Copyright (C) 1995, 1999 Ralf Baechle
9 * Copyright (C) 1985 MIPS Computer Systems, Inc. 9 * Copyright (C) 1985 MIPS Computer Systems, Inc.
10 * Copyright (C) 1990 - 1992, 1999 Silicon Graphics, Inc. 10 * Copyright (C) 1990 - 1992, 1999 Silicon Graphics, Inc.
11 */ 11 */
12 #ifndef _ASM_FPREGDEF_H 12 #ifndef _ASM_FPREGDEF_H
13 #define _ASM_FPREGDEF_H 13 #define _ASM_FPREGDEF_H
14 14
15 #include <asm/sgidefs.h> 15 #include <asm/sgidefs.h>
16 16
17 /*
18 * starting with binutils 2.24.51.20140729, MIPS binutils warn about mixing
19 * hardfloat and softfloat object files. The kernel build uses soft-float by
20 * default, so we also need to pass -msoft-float along to GAS if it supports it.
21 * But this in turn causes assembler errors in files which access hardfloat
22 * registers. We detect if GAS supports "-msoft-float" in the Makefile and
23 * explicitly put ".set hardfloat" where floating point registers are touched.
24 */
25 #ifdef GAS_HAS_SET_HARDFLOAT
26 #define SET_HARDFLOAT .set hardfloat
27 #else
28 #define SET_HARDFLOAT
29 #endif
30
17 #if _MIPS_SIM == _MIPS_SIM_ABI32 31 #if _MIPS_SIM == _MIPS_SIM_ABI32
18 32
19 /* 33 /*
20 * These definitions only cover the R3000-ish 16/32 register model. 34 * These definitions only cover the R3000-ish 16/32 register model.
21 * But we're trying to be R3000 friendly anyway ... 35 * But we're trying to be R3000 friendly anyway ...
22 */ 36 */
23 #define fv0 $f0 /* return value */ 37 #define fv0 $f0 /* return value */
24 #define fv0f $f1 38 #define fv0f $f1
25 #define fv1 $f2 39 #define fv1 $f2
26 #define fv1f $f3 40 #define fv1f $f3
27 #define fa0 $f12 /* argument registers */ 41 #define fa0 $f12 /* argument registers */
28 #define fa0f $f13 42 #define fa0f $f13
29 #define fa1 $f14 43 #define fa1 $f14
30 #define fa1f $f15 44 #define fa1f $f15
31 #define ft0 $f4 /* caller saved */ 45 #define ft0 $f4 /* caller saved */
32 #define ft0f $f5 46 #define ft0f $f5
33 #define ft1 $f6 47 #define ft1 $f6
34 #define ft1f $f7 48 #define ft1f $f7
35 #define ft2 $f8 49 #define ft2 $f8
36 #define ft2f $f9 50 #define ft2f $f9
37 #define ft3 $f10 51 #define ft3 $f10
38 #define ft3f $f11 52 #define ft3f $f11
39 #define ft4 $f16 53 #define ft4 $f16
40 #define ft4f $f17 54 #define ft4f $f17
41 #define ft5 $f18 55 #define ft5 $f18
42 #define ft5f $f19 56 #define ft5f $f19
43 #define fs0 $f20 /* callee saved */ 57 #define fs0 $f20 /* callee saved */
44 #define fs0f $f21 58 #define fs0f $f21
45 #define fs1 $f22 59 #define fs1 $f22
46 #define fs1f $f23 60 #define fs1f $f23
47 #define fs2 $f24 61 #define fs2 $f24
48 #define fs2f $f25 62 #define fs2f $f25
49 #define fs3 $f26 63 #define fs3 $f26
50 #define fs3f $f27 64 #define fs3f $f27
51 #define fs4 $f28 65 #define fs4 $f28
52 #define fs4f $f29 66 #define fs4f $f29
53 #define fs5 $f30 67 #define fs5 $f30
54 #define fs5f $f31 68 #define fs5f $f31
55 69
56 #define fcr31 $31 /* FPU status register */ 70 #define fcr31 $31 /* FPU status register */
57 71
58 #endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */ 72 #endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */
59 73
60 #if _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32 74 #if _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32
61 75
62 #define fv0 $f0 /* return value */ 76 #define fv0 $f0 /* return value */
63 #define fv1 $f2 77 #define fv1 $f2
64 #define fa0 $f12 /* argument registers */ 78 #define fa0 $f12 /* argument registers */
65 #define fa1 $f13 79 #define fa1 $f13
66 #define fa2 $f14 80 #define fa2 $f14
67 #define fa3 $f15 81 #define fa3 $f15
68 #define fa4 $f16 82 #define fa4 $f16
69 #define fa5 $f17 83 #define fa5 $f17
70 #define fa6 $f18 84 #define fa6 $f18
71 #define fa7 $f19 85 #define fa7 $f19
72 #define ft0 $f4 /* caller saved */ 86 #define ft0 $f4 /* caller saved */
73 #define ft1 $f5 87 #define ft1 $f5
74 #define ft2 $f6 88 #define ft2 $f6
75 #define ft3 $f7 89 #define ft3 $f7
76 #define ft4 $f8 90 #define ft4 $f8
77 #define ft5 $f9 91 #define ft5 $f9
78 #define ft6 $f10 92 #define ft6 $f10
79 #define ft7 $f11 93 #define ft7 $f11
80 #define ft8 $f20 94 #define ft8 $f20
81 #define ft9 $f21 95 #define ft9 $f21
82 #define ft10 $f22 96 #define ft10 $f22
83 #define ft11 $f23 97 #define ft11 $f23
84 #define ft12 $f1 98 #define ft12 $f1
85 #define ft13 $f3 99 #define ft13 $f3
86 #define fs0 $f24 /* callee saved */ 100 #define fs0 $f24 /* callee saved */
87 #define fs1 $f25 101 #define fs1 $f25
88 #define fs2 $f26 102 #define fs2 $f26
89 #define fs3 $f27 103 #define fs3 $f27
90 #define fs4 $f28 104 #define fs4 $f28
91 #define fs5 $f29 105 #define fs5 $f29
92 #define fs6 $f30 106 #define fs6 $f30
93 #define fs7 $f31 107 #define fs7 $f31
94 108
95 #define fcr31 $31 109 #define fcr31 $31
96 110
97 #endif /* _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32 */ 111 #endif /* _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32 */
98 112
99 #endif /* _ASM_FPREGDEF_H */ 113 #endif /* _ASM_FPREGDEF_H */
100 114
arch/mips/include/asm/fpu.h
1 /* 1 /*
2 * Copyright (C) 2002 MontaVista Software Inc. 2 * Copyright (C) 2002 MontaVista Software Inc.
3 * Author: Jun Sun, jsun@mvista.com or jsun@junsun.net 3 * Author: Jun Sun, jsun@mvista.com or jsun@junsun.net
4 * 4 *
5 * This program is free software; you can redistribute it and/or modify it 5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License as published by the 6 * under the terms of the GNU General Public License as published by the
7 * Free Software Foundation; either version 2 of the License, or (at your 7 * Free Software Foundation; either version 2 of the License, or (at your
8 * option) any later version. 8 * option) any later version.
9 */ 9 */
10 #ifndef _ASM_FPU_H 10 #ifndef _ASM_FPU_H
11 #define _ASM_FPU_H 11 #define _ASM_FPU_H
12 12
13 #include <linux/sched.h> 13 #include <linux/sched.h>
14 #include <linux/thread_info.h> 14 #include <linux/thread_info.h>
15 #include <linux/bitops.h> 15 #include <linux/bitops.h>
16 16
17 #include <asm/mipsregs.h> 17 #include <asm/mipsregs.h>
18 #include <asm/cpu.h> 18 #include <asm/cpu.h>
19 #include <asm/cpu-features.h> 19 #include <asm/cpu-features.h>
20 #include <asm/fpu_emulator.h> 20 #include <asm/fpu_emulator.h>
21 #include <asm/hazards.h> 21 #include <asm/hazards.h>
22 #include <asm/processor.h> 22 #include <asm/processor.h>
23 #include <asm/current.h> 23 #include <asm/current.h>
24 #include <asm/msa.h> 24 #include <asm/msa.h>
25 25
26 #ifdef CONFIG_MIPS_MT_FPAFF 26 #ifdef CONFIG_MIPS_MT_FPAFF
27 #include <asm/mips_mt.h> 27 #include <asm/mips_mt.h>
28 #endif 28 #endif
29 29
30 struct sigcontext; 30 struct sigcontext;
31 struct sigcontext32; 31 struct sigcontext32;
32 32
33 extern void _init_fpu(void); 33 extern void _init_fpu(void);
34 extern void _save_fp(struct task_struct *); 34 extern void _save_fp(struct task_struct *);
35 extern void _restore_fp(struct task_struct *); 35 extern void _restore_fp(struct task_struct *);
36 36
37 /* 37 /*
38 * This enum specifies a mode in which we want the FPU to operate, for cores 38 * This enum specifies a mode in which we want the FPU to operate, for cores
39 * which implement the Status.FR bit. Note that FPU_32BIT & FPU_64BIT 39 * which implement the Status.FR bit. Note that FPU_32BIT & FPU_64BIT
40 * purposefully have the values 0 & 1 respectively, so that an integer value 40 * purposefully have the values 0 & 1 respectively, so that an integer value
41 * of Status.FR can be trivially casted to the corresponding enum fpu_mode. 41 * of Status.FR can be trivially casted to the corresponding enum fpu_mode.
42 */ 42 */
43 enum fpu_mode { 43 enum fpu_mode {
44 FPU_32BIT = 0, /* FR = 0 */ 44 FPU_32BIT = 0, /* FR = 0 */
45 FPU_64BIT, /* FR = 1 */ 45 FPU_64BIT, /* FR = 1 */
46 FPU_AS_IS, 46 FPU_AS_IS,
47 }; 47 };
48 48
49 static inline int __enable_fpu(enum fpu_mode mode) 49 static inline int __enable_fpu(enum fpu_mode mode)
50 { 50 {
51 int fr; 51 int fr;
52 52
53 switch (mode) { 53 switch (mode) {
54 case FPU_AS_IS: 54 case FPU_AS_IS:
55 /* just enable the FPU in its current mode */ 55 /* just enable the FPU in its current mode */
56 set_c0_status(ST0_CU1); 56 set_c0_status(ST0_CU1);
57 enable_fpu_hazard(); 57 enable_fpu_hazard();
58 return 0; 58 return 0;
59 59
60 case FPU_64BIT: 60 case FPU_64BIT:
61 #if !(defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_64BIT)) 61 #if !(defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_64BIT))
62 /* we only have a 32-bit FPU */ 62 /* we only have a 32-bit FPU */
63 return SIGFPE; 63 return SIGFPE;
64 #endif 64 #endif
65 /* fall through */ 65 /* fall through */
66 case FPU_32BIT: 66 case FPU_32BIT:
67 /* set CU1 & change FR appropriately */ 67 /* set CU1 & change FR appropriately */
68 fr = (int)mode; 68 fr = (int)mode;
69 change_c0_status(ST0_CU1 | ST0_FR, ST0_CU1 | (fr ? ST0_FR : 0)); 69 change_c0_status(ST0_CU1 | ST0_FR, ST0_CU1 | (fr ? ST0_FR : 0));
70 enable_fpu_hazard(); 70 enable_fpu_hazard();
71 71
72 /* check FR has the desired value */ 72 /* check FR has the desired value */
73 return (!!(read_c0_status() & ST0_FR) == !!fr) ? 0 : SIGFPE; 73 return (!!(read_c0_status() & ST0_FR) == !!fr) ? 0 : SIGFPE;
74 74
75 default: 75 default:
76 BUG(); 76 BUG();
77 } 77 }
78 78
79 return SIGFPE; 79 return SIGFPE;
80 } 80 }
81 81
82 #define __disable_fpu() \ 82 #define __disable_fpu() \
83 do { \ 83 do { \
84 clear_c0_status(ST0_CU1); \ 84 clear_c0_status(ST0_CU1); \
85 disable_fpu_hazard(); \ 85 disable_fpu_hazard(); \
86 } while (0) 86 } while (0)
87 87
88 #define clear_fpu_owner() clear_thread_flag(TIF_USEDFPU) 88 #define clear_fpu_owner() clear_thread_flag(TIF_USEDFPU)
89 89
90 static inline int __is_fpu_owner(void) 90 static inline int __is_fpu_owner(void)
91 { 91 {
92 return test_thread_flag(TIF_USEDFPU); 92 return test_thread_flag(TIF_USEDFPU);
93 } 93 }
94 94
95 static inline int is_fpu_owner(void) 95 static inline int is_fpu_owner(void)
96 { 96 {
97 return cpu_has_fpu && __is_fpu_owner(); 97 return cpu_has_fpu && __is_fpu_owner();
98 } 98 }
99 99
100 static inline int __own_fpu(void) 100 static inline int __own_fpu(void)
101 { 101 {
102 enum fpu_mode mode; 102 enum fpu_mode mode;
103 int ret; 103 int ret;
104 104
105 mode = !test_thread_flag(TIF_32BIT_FPREGS); 105 mode = !test_thread_flag(TIF_32BIT_FPREGS);
106 ret = __enable_fpu(mode); 106 ret = __enable_fpu(mode);
107 if (ret) 107 if (ret)
108 return ret; 108 return ret;
109 109
110 KSTK_STATUS(current) |= ST0_CU1; 110 KSTK_STATUS(current) |= ST0_CU1;
111 if (mode == FPU_64BIT) 111 if (mode == FPU_64BIT)
112 KSTK_STATUS(current) |= ST0_FR; 112 KSTK_STATUS(current) |= ST0_FR;
113 else /* mode == FPU_32BIT */ 113 else /* mode == FPU_32BIT */
114 KSTK_STATUS(current) &= ~ST0_FR; 114 KSTK_STATUS(current) &= ~ST0_FR;
115 115
116 set_thread_flag(TIF_USEDFPU); 116 set_thread_flag(TIF_USEDFPU);
117 return 0; 117 return 0;
118 } 118 }
119 119
120 static inline int own_fpu_inatomic(int restore) 120 static inline int own_fpu_inatomic(int restore)
121 { 121 {
122 int ret = 0; 122 int ret = 0;
123 123
124 if (cpu_has_fpu && !__is_fpu_owner()) { 124 if (cpu_has_fpu && !__is_fpu_owner()) {
125 ret = __own_fpu(); 125 ret = __own_fpu();
126 if (restore && !ret) 126 if (restore && !ret)
127 _restore_fp(current); 127 _restore_fp(current);
128 } 128 }
129 return ret; 129 return ret;
130 } 130 }
131 131
132 static inline int own_fpu(int restore) 132 static inline int own_fpu(int restore)
133 { 133 {
134 int ret; 134 int ret;
135 135
136 preempt_disable(); 136 preempt_disable();
137 ret = own_fpu_inatomic(restore); 137 ret = own_fpu_inatomic(restore);
138 preempt_enable(); 138 preempt_enable();
139 return ret; 139 return ret;
140 } 140 }
141 141
142 static inline void lose_fpu(int save) 142 static inline void lose_fpu(int save)
143 { 143 {
144 preempt_disable(); 144 preempt_disable();
145 if (is_msa_enabled()) { 145 if (is_msa_enabled()) {
146 if (save) { 146 if (save) {
147 save_msa(current); 147 save_msa(current);
148 asm volatile("cfc1 %0, $31" 148 current->thread.fpu.fcr31 =
149 : "=r"(current->thread.fpu.fcr31)); 149 read_32bit_cp1_register(CP1_STATUS);
150 } 150 }
151 disable_msa(); 151 disable_msa();
152 clear_thread_flag(TIF_USEDMSA); 152 clear_thread_flag(TIF_USEDMSA);
153 } else if (is_fpu_owner()) { 153 } else if (is_fpu_owner()) {
154 if (save) 154 if (save)
155 _save_fp(current); 155 _save_fp(current);
156 __disable_fpu(); 156 __disable_fpu();
157 } 157 }
158 KSTK_STATUS(current) &= ~ST0_CU1; 158 KSTK_STATUS(current) &= ~ST0_CU1;
159 clear_thread_flag(TIF_USEDFPU); 159 clear_thread_flag(TIF_USEDFPU);
160 preempt_enable(); 160 preempt_enable();
161 } 161 }
162 162
163 static inline int init_fpu(void) 163 static inline int init_fpu(void)
164 { 164 {
165 int ret = 0; 165 int ret = 0;
166 166
167 if (cpu_has_fpu) { 167 if (cpu_has_fpu) {
168 ret = __own_fpu(); 168 ret = __own_fpu();
169 if (!ret) 169 if (!ret)
170 _init_fpu(); 170 _init_fpu();
171 } else 171 } else
172 fpu_emulator_init_fpu(); 172 fpu_emulator_init_fpu();
173 173
174 return ret; 174 return ret;
175 } 175 }
176 176
177 static inline void save_fp(struct task_struct *tsk) 177 static inline void save_fp(struct task_struct *tsk)
178 { 178 {
179 if (cpu_has_fpu) 179 if (cpu_has_fpu)
180 _save_fp(tsk); 180 _save_fp(tsk);
181 } 181 }
182 182
183 static inline void restore_fp(struct task_struct *tsk) 183 static inline void restore_fp(struct task_struct *tsk)
184 { 184 {
185 if (cpu_has_fpu) 185 if (cpu_has_fpu)
186 _restore_fp(tsk); 186 _restore_fp(tsk);
187 } 187 }
188 188
189 static inline union fpureg *get_fpu_regs(struct task_struct *tsk) 189 static inline union fpureg *get_fpu_regs(struct task_struct *tsk)
190 { 190 {
191 if (tsk == current) { 191 if (tsk == current) {
192 preempt_disable(); 192 preempt_disable();
193 if (is_fpu_owner()) 193 if (is_fpu_owner())
194 _save_fp(current); 194 _save_fp(current);
195 preempt_enable(); 195 preempt_enable();
196 } 196 }
197 197
198 return tsk->thread.fpu.fpr; 198 return tsk->thread.fpu.fpr;
199 } 199 }
200 200
201 #endif /* _ASM_FPU_H */ 201 #endif /* _ASM_FPU_H */
202 202
arch/mips/include/asm/mipsregs.h
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1994, 1995, 1996, 1997, 2000, 2001 by Ralf Baechle 6 * Copyright (C) 1994, 1995, 1996, 1997, 2000, 2001 by Ralf Baechle
7 * Copyright (C) 2000 Silicon Graphics, Inc. 7 * Copyright (C) 2000 Silicon Graphics, Inc.
8 * Modified for further R[236]000 support by Paul M. Antoine, 1996. 8 * Modified for further R[236]000 support by Paul M. Antoine, 1996.
9 * Kevin D. Kissell, kevink@mips.com and Carsten Langgaard, carstenl@mips.com 9 * Kevin D. Kissell, kevink@mips.com and Carsten Langgaard, carstenl@mips.com
10 * Copyright (C) 2000, 07 MIPS Technologies, Inc. 10 * Copyright (C) 2000, 07 MIPS Technologies, Inc.
11 * Copyright (C) 2003, 2004 Maciej W. Rozycki 11 * Copyright (C) 2003, 2004 Maciej W. Rozycki
12 */ 12 */
13 #ifndef _ASM_MIPSREGS_H 13 #ifndef _ASM_MIPSREGS_H
14 #define _ASM_MIPSREGS_H 14 #define _ASM_MIPSREGS_H
15 15
16 #include <linux/linkage.h> 16 #include <linux/linkage.h>
17 #include <linux/types.h> 17 #include <linux/types.h>
18 #include <asm/hazards.h> 18 #include <asm/hazards.h>
19 #include <asm/war.h> 19 #include <asm/war.h>
20 20
21 /* 21 /*
22 * The following macros are especially useful for __asm__ 22 * The following macros are especially useful for __asm__
23 * inline assembler. 23 * inline assembler.
24 */ 24 */
25 #ifndef __STR 25 #ifndef __STR
26 #define __STR(x) #x 26 #define __STR(x) #x
27 #endif 27 #endif
28 #ifndef STR 28 #ifndef STR
29 #define STR(x) __STR(x) 29 #define STR(x) __STR(x)
30 #endif 30 #endif
31 31
32 /* 32 /*
33 * Configure language 33 * Configure language
34 */ 34 */
35 #ifdef __ASSEMBLY__ 35 #ifdef __ASSEMBLY__
36 #define _ULCAST_ 36 #define _ULCAST_
37 #else 37 #else
38 #define _ULCAST_ (unsigned long) 38 #define _ULCAST_ (unsigned long)
39 #endif 39 #endif
40 40
41 /* 41 /*
42 * Coprocessor 0 register names 42 * Coprocessor 0 register names
43 */ 43 */
44 #define CP0_INDEX $0 44 #define CP0_INDEX $0
45 #define CP0_RANDOM $1 45 #define CP0_RANDOM $1
46 #define CP0_ENTRYLO0 $2 46 #define CP0_ENTRYLO0 $2
47 #define CP0_ENTRYLO1 $3 47 #define CP0_ENTRYLO1 $3
48 #define CP0_CONF $3 48 #define CP0_CONF $3
49 #define CP0_CONTEXT $4 49 #define CP0_CONTEXT $4
50 #define CP0_PAGEMASK $5 50 #define CP0_PAGEMASK $5
51 #define CP0_WIRED $6 51 #define CP0_WIRED $6
52 #define CP0_INFO $7 52 #define CP0_INFO $7
53 #define CP0_BADVADDR $8 53 #define CP0_BADVADDR $8
54 #define CP0_COUNT $9 54 #define CP0_COUNT $9
55 #define CP0_ENTRYHI $10 55 #define CP0_ENTRYHI $10
56 #define CP0_COMPARE $11 56 #define CP0_COMPARE $11
57 #define CP0_STATUS $12 57 #define CP0_STATUS $12
58 #define CP0_CAUSE $13 58 #define CP0_CAUSE $13
59 #define CP0_EPC $14 59 #define CP0_EPC $14
60 #define CP0_PRID $15 60 #define CP0_PRID $15
61 #define CP0_CONFIG $16 61 #define CP0_CONFIG $16
62 #define CP0_LLADDR $17 62 #define CP0_LLADDR $17
63 #define CP0_WATCHLO $18 63 #define CP0_WATCHLO $18
64 #define CP0_WATCHHI $19 64 #define CP0_WATCHHI $19
65 #define CP0_XCONTEXT $20 65 #define CP0_XCONTEXT $20
66 #define CP0_FRAMEMASK $21 66 #define CP0_FRAMEMASK $21
67 #define CP0_DIAGNOSTIC $22 67 #define CP0_DIAGNOSTIC $22
68 #define CP0_DEBUG $23 68 #define CP0_DEBUG $23
69 #define CP0_DEPC $24 69 #define CP0_DEPC $24
70 #define CP0_PERFORMANCE $25 70 #define CP0_PERFORMANCE $25
71 #define CP0_ECC $26 71 #define CP0_ECC $26
72 #define CP0_CACHEERR $27 72 #define CP0_CACHEERR $27
73 #define CP0_TAGLO $28 73 #define CP0_TAGLO $28
74 #define CP0_TAGHI $29 74 #define CP0_TAGHI $29
75 #define CP0_ERROREPC $30 75 #define CP0_ERROREPC $30
76 #define CP0_DESAVE $31 76 #define CP0_DESAVE $31
77 77
78 /* 78 /*
79 * R4640/R4650 cp0 register names. These registers are listed 79 * R4640/R4650 cp0 register names. These registers are listed
80 * here only for completeness; without MMU these CPUs are not useable 80 * here only for completeness; without MMU these CPUs are not useable
81 * by Linux. A future ELKS port might take make Linux run on them 81 * by Linux. A future ELKS port might take make Linux run on them
82 * though ... 82 * though ...
83 */ 83 */
84 #define CP0_IBASE $0 84 #define CP0_IBASE $0
85 #define CP0_IBOUND $1 85 #define CP0_IBOUND $1
86 #define CP0_DBASE $2 86 #define CP0_DBASE $2
87 #define CP0_DBOUND $3 87 #define CP0_DBOUND $3
88 #define CP0_CALG $17 88 #define CP0_CALG $17
89 #define CP0_IWATCH $18 89 #define CP0_IWATCH $18
90 #define CP0_DWATCH $19 90 #define CP0_DWATCH $19
91 91
92 /* 92 /*
93 * Coprocessor 0 Set 1 register names 93 * Coprocessor 0 Set 1 register names
94 */ 94 */
95 #define CP0_S1_DERRADDR0 $26 95 #define CP0_S1_DERRADDR0 $26
96 #define CP0_S1_DERRADDR1 $27 96 #define CP0_S1_DERRADDR1 $27
97 #define CP0_S1_INTCONTROL $20 97 #define CP0_S1_INTCONTROL $20
98 98
99 /* 99 /*
100 * Coprocessor 0 Set 2 register names 100 * Coprocessor 0 Set 2 register names
101 */ 101 */
102 #define CP0_S2_SRSCTL $12 /* MIPSR2 */ 102 #define CP0_S2_SRSCTL $12 /* MIPSR2 */
103 103
104 /* 104 /*
105 * Coprocessor 0 Set 3 register names 105 * Coprocessor 0 Set 3 register names
106 */ 106 */
107 #define CP0_S3_SRSMAP $12 /* MIPSR2 */ 107 #define CP0_S3_SRSMAP $12 /* MIPSR2 */
108 108
109 /* 109 /*
110 * TX39 Series 110 * TX39 Series
111 */ 111 */
112 #define CP0_TX39_CACHE $7 112 #define CP0_TX39_CACHE $7
113 113
114 /* 114 /*
115 * Coprocessor 1 (FPU) register names 115 * Coprocessor 1 (FPU) register names
116 */ 116 */
117 #define CP1_REVISION $0 117 #define CP1_REVISION $0
118 #define CP1_STATUS $31 118 #define CP1_STATUS $31
119 119
120 /* 120 /*
121 * FPU Status Register Values 121 * FPU Status Register Values
122 */ 122 */
123 /* 123 /*
124 * Status Register Values 124 * Status Register Values
125 */ 125 */
126 126
127 #define FPU_CSR_FLUSH 0x01000000 /* flush denormalised results to 0 */ 127 #define FPU_CSR_FLUSH 0x01000000 /* flush denormalised results to 0 */
128 #define FPU_CSR_COND 0x00800000 /* $fcc0 */ 128 #define FPU_CSR_COND 0x00800000 /* $fcc0 */
129 #define FPU_CSR_COND0 0x00800000 /* $fcc0 */ 129 #define FPU_CSR_COND0 0x00800000 /* $fcc0 */
130 #define FPU_CSR_COND1 0x02000000 /* $fcc1 */ 130 #define FPU_CSR_COND1 0x02000000 /* $fcc1 */
131 #define FPU_CSR_COND2 0x04000000 /* $fcc2 */ 131 #define FPU_CSR_COND2 0x04000000 /* $fcc2 */
132 #define FPU_CSR_COND3 0x08000000 /* $fcc3 */ 132 #define FPU_CSR_COND3 0x08000000 /* $fcc3 */
133 #define FPU_CSR_COND4 0x10000000 /* $fcc4 */ 133 #define FPU_CSR_COND4 0x10000000 /* $fcc4 */
134 #define FPU_CSR_COND5 0x20000000 /* $fcc5 */ 134 #define FPU_CSR_COND5 0x20000000 /* $fcc5 */
135 #define FPU_CSR_COND6 0x40000000 /* $fcc6 */ 135 #define FPU_CSR_COND6 0x40000000 /* $fcc6 */
136 #define FPU_CSR_COND7 0x80000000 /* $fcc7 */ 136 #define FPU_CSR_COND7 0x80000000 /* $fcc7 */
137 137
138 /* 138 /*
139 * Bits 18 - 20 of the FPU Status Register will be read as 0, 139 * Bits 18 - 20 of the FPU Status Register will be read as 0,
140 * and should be written as zero. 140 * and should be written as zero.
141 */ 141 */
142 #define FPU_CSR_RSVD 0x001c0000 142 #define FPU_CSR_RSVD 0x001c0000
143 143
144 /* 144 /*
145 * X the exception cause indicator 145 * X the exception cause indicator
146 * E the exception enable 146 * E the exception enable
147 * S the sticky/flag bit 147 * S the sticky/flag bit
148 */ 148 */
149 #define FPU_CSR_ALL_X 0x0003f000 149 #define FPU_CSR_ALL_X 0x0003f000
150 #define FPU_CSR_UNI_X 0x00020000 150 #define FPU_CSR_UNI_X 0x00020000
151 #define FPU_CSR_INV_X 0x00010000 151 #define FPU_CSR_INV_X 0x00010000
152 #define FPU_CSR_DIV_X 0x00008000 152 #define FPU_CSR_DIV_X 0x00008000
153 #define FPU_CSR_OVF_X 0x00004000 153 #define FPU_CSR_OVF_X 0x00004000
154 #define FPU_CSR_UDF_X 0x00002000 154 #define FPU_CSR_UDF_X 0x00002000
155 #define FPU_CSR_INE_X 0x00001000 155 #define FPU_CSR_INE_X 0x00001000
156 156
157 #define FPU_CSR_ALL_E 0x00000f80 157 #define FPU_CSR_ALL_E 0x00000f80
158 #define FPU_CSR_INV_E 0x00000800 158 #define FPU_CSR_INV_E 0x00000800
159 #define FPU_CSR_DIV_E 0x00000400 159 #define FPU_CSR_DIV_E 0x00000400
160 #define FPU_CSR_OVF_E 0x00000200 160 #define FPU_CSR_OVF_E 0x00000200
161 #define FPU_CSR_UDF_E 0x00000100 161 #define FPU_CSR_UDF_E 0x00000100
162 #define FPU_CSR_INE_E 0x00000080 162 #define FPU_CSR_INE_E 0x00000080
163 163
164 #define FPU_CSR_ALL_S 0x0000007c 164 #define FPU_CSR_ALL_S 0x0000007c
165 #define FPU_CSR_INV_S 0x00000040 165 #define FPU_CSR_INV_S 0x00000040
166 #define FPU_CSR_DIV_S 0x00000020 166 #define FPU_CSR_DIV_S 0x00000020
167 #define FPU_CSR_OVF_S 0x00000010 167 #define FPU_CSR_OVF_S 0x00000010
168 #define FPU_CSR_UDF_S 0x00000008 168 #define FPU_CSR_UDF_S 0x00000008
169 #define FPU_CSR_INE_S 0x00000004 169 #define FPU_CSR_INE_S 0x00000004
170 170
171 /* Bits 0 and 1 of FPU Status Register specify the rounding mode */ 171 /* Bits 0 and 1 of FPU Status Register specify the rounding mode */
172 #define FPU_CSR_RM 0x00000003 172 #define FPU_CSR_RM 0x00000003
173 #define FPU_CSR_RN 0x0 /* nearest */ 173 #define FPU_CSR_RN 0x0 /* nearest */
174 #define FPU_CSR_RZ 0x1 /* towards zero */ 174 #define FPU_CSR_RZ 0x1 /* towards zero */
175 #define FPU_CSR_RU 0x2 /* towards +Infinity */ 175 #define FPU_CSR_RU 0x2 /* towards +Infinity */
176 #define FPU_CSR_RD 0x3 /* towards -Infinity */ 176 #define FPU_CSR_RD 0x3 /* towards -Infinity */
177 177
178 178
179 /* 179 /*
180 * Values for PageMask register 180 * Values for PageMask register
181 */ 181 */
182 #ifdef CONFIG_CPU_VR41XX 182 #ifdef CONFIG_CPU_VR41XX
183 183
184 /* Why doesn't stupidity hurt ... */ 184 /* Why doesn't stupidity hurt ... */
185 185
186 #define PM_1K 0x00000000 186 #define PM_1K 0x00000000
187 #define PM_4K 0x00001800 187 #define PM_4K 0x00001800
188 #define PM_16K 0x00007800 188 #define PM_16K 0x00007800
189 #define PM_64K 0x0001f800 189 #define PM_64K 0x0001f800
190 #define PM_256K 0x0007f800 190 #define PM_256K 0x0007f800
191 191
192 #else 192 #else
193 193
194 #define PM_4K 0x00000000 194 #define PM_4K 0x00000000
195 #define PM_8K 0x00002000 195 #define PM_8K 0x00002000
196 #define PM_16K 0x00006000 196 #define PM_16K 0x00006000
197 #define PM_32K 0x0000e000 197 #define PM_32K 0x0000e000
198 #define PM_64K 0x0001e000 198 #define PM_64K 0x0001e000
199 #define PM_128K 0x0003e000 199 #define PM_128K 0x0003e000
200 #define PM_256K 0x0007e000 200 #define PM_256K 0x0007e000
201 #define PM_512K 0x000fe000 201 #define PM_512K 0x000fe000
202 #define PM_1M 0x001fe000 202 #define PM_1M 0x001fe000
203 #define PM_2M 0x003fe000 203 #define PM_2M 0x003fe000
204 #define PM_4M 0x007fe000 204 #define PM_4M 0x007fe000
205 #define PM_8M 0x00ffe000 205 #define PM_8M 0x00ffe000
206 #define PM_16M 0x01ffe000 206 #define PM_16M 0x01ffe000
207 #define PM_32M 0x03ffe000 207 #define PM_32M 0x03ffe000
208 #define PM_64M 0x07ffe000 208 #define PM_64M 0x07ffe000
209 #define PM_256M 0x1fffe000 209 #define PM_256M 0x1fffe000
210 #define PM_1G 0x7fffe000 210 #define PM_1G 0x7fffe000
211 211
212 #endif 212 #endif
213 213
214 /* 214 /*
215 * Default page size for a given kernel configuration 215 * Default page size for a given kernel configuration
216 */ 216 */
217 #ifdef CONFIG_PAGE_SIZE_4KB 217 #ifdef CONFIG_PAGE_SIZE_4KB
218 #define PM_DEFAULT_MASK PM_4K 218 #define PM_DEFAULT_MASK PM_4K
219 #elif defined(CONFIG_PAGE_SIZE_8KB) 219 #elif defined(CONFIG_PAGE_SIZE_8KB)
220 #define PM_DEFAULT_MASK PM_8K 220 #define PM_DEFAULT_MASK PM_8K
221 #elif defined(CONFIG_PAGE_SIZE_16KB) 221 #elif defined(CONFIG_PAGE_SIZE_16KB)
222 #define PM_DEFAULT_MASK PM_16K 222 #define PM_DEFAULT_MASK PM_16K
223 #elif defined(CONFIG_PAGE_SIZE_32KB) 223 #elif defined(CONFIG_PAGE_SIZE_32KB)
224 #define PM_DEFAULT_MASK PM_32K 224 #define PM_DEFAULT_MASK PM_32K
225 #elif defined(CONFIG_PAGE_SIZE_64KB) 225 #elif defined(CONFIG_PAGE_SIZE_64KB)
226 #define PM_DEFAULT_MASK PM_64K 226 #define PM_DEFAULT_MASK PM_64K
227 #else 227 #else
228 #error Bad page size configuration! 228 #error Bad page size configuration!
229 #endif 229 #endif
230 230
231 /* 231 /*
232 * Default huge tlb size for a given kernel configuration 232 * Default huge tlb size for a given kernel configuration
233 */ 233 */
234 #ifdef CONFIG_PAGE_SIZE_4KB 234 #ifdef CONFIG_PAGE_SIZE_4KB
235 #define PM_HUGE_MASK PM_1M 235 #define PM_HUGE_MASK PM_1M
236 #elif defined(CONFIG_PAGE_SIZE_8KB) 236 #elif defined(CONFIG_PAGE_SIZE_8KB)
237 #define PM_HUGE_MASK PM_4M 237 #define PM_HUGE_MASK PM_4M
238 #elif defined(CONFIG_PAGE_SIZE_16KB) 238 #elif defined(CONFIG_PAGE_SIZE_16KB)
239 #define PM_HUGE_MASK PM_16M 239 #define PM_HUGE_MASK PM_16M
240 #elif defined(CONFIG_PAGE_SIZE_32KB) 240 #elif defined(CONFIG_PAGE_SIZE_32KB)
241 #define PM_HUGE_MASK PM_64M 241 #define PM_HUGE_MASK PM_64M
242 #elif defined(CONFIG_PAGE_SIZE_64KB) 242 #elif defined(CONFIG_PAGE_SIZE_64KB)
243 #define PM_HUGE_MASK PM_256M 243 #define PM_HUGE_MASK PM_256M
244 #elif defined(CONFIG_MIPS_HUGE_TLB_SUPPORT) 244 #elif defined(CONFIG_MIPS_HUGE_TLB_SUPPORT)
245 #error Bad page size configuration for hugetlbfs! 245 #error Bad page size configuration for hugetlbfs!
246 #endif 246 #endif
247 247
248 /* 248 /*
249 * Values used for computation of new tlb entries 249 * Values used for computation of new tlb entries
250 */ 250 */
251 #define PL_4K 12 251 #define PL_4K 12
252 #define PL_16K 14 252 #define PL_16K 14
253 #define PL_64K 16 253 #define PL_64K 16
254 #define PL_256K 18 254 #define PL_256K 18
255 #define PL_1M 20 255 #define PL_1M 20
256 #define PL_4M 22 256 #define PL_4M 22
257 #define PL_16M 24 257 #define PL_16M 24
258 #define PL_64M 26 258 #define PL_64M 26
259 #define PL_256M 28 259 #define PL_256M 28
260 260
261 /* 261 /*
262 * PageGrain bits 262 * PageGrain bits
263 */ 263 */
264 #define PG_RIE (_ULCAST_(1) << 31) 264 #define PG_RIE (_ULCAST_(1) << 31)
265 #define PG_XIE (_ULCAST_(1) << 30) 265 #define PG_XIE (_ULCAST_(1) << 30)
266 #define PG_ELPA (_ULCAST_(1) << 29) 266 #define PG_ELPA (_ULCAST_(1) << 29)
267 #define PG_ESP (_ULCAST_(1) << 28) 267 #define PG_ESP (_ULCAST_(1) << 28)
268 #define PG_IEC (_ULCAST_(1) << 27) 268 #define PG_IEC (_ULCAST_(1) << 27)
269 269
270 /* 270 /*
271 * R4x00 interrupt enable / cause bits 271 * R4x00 interrupt enable / cause bits
272 */ 272 */
273 #define IE_SW0 (_ULCAST_(1) << 8) 273 #define IE_SW0 (_ULCAST_(1) << 8)
274 #define IE_SW1 (_ULCAST_(1) << 9) 274 #define IE_SW1 (_ULCAST_(1) << 9)
275 #define IE_IRQ0 (_ULCAST_(1) << 10) 275 #define IE_IRQ0 (_ULCAST_(1) << 10)
276 #define IE_IRQ1 (_ULCAST_(1) << 11) 276 #define IE_IRQ1 (_ULCAST_(1) << 11)
277 #define IE_IRQ2 (_ULCAST_(1) << 12) 277 #define IE_IRQ2 (_ULCAST_(1) << 12)
278 #define IE_IRQ3 (_ULCAST_(1) << 13) 278 #define IE_IRQ3 (_ULCAST_(1) << 13)
279 #define IE_IRQ4 (_ULCAST_(1) << 14) 279 #define IE_IRQ4 (_ULCAST_(1) << 14)
280 #define IE_IRQ5 (_ULCAST_(1) << 15) 280 #define IE_IRQ5 (_ULCAST_(1) << 15)
281 281
282 /* 282 /*
283 * R4x00 interrupt cause bits 283 * R4x00 interrupt cause bits
284 */ 284 */
285 #define C_SW0 (_ULCAST_(1) << 8) 285 #define C_SW0 (_ULCAST_(1) << 8)
286 #define C_SW1 (_ULCAST_(1) << 9) 286 #define C_SW1 (_ULCAST_(1) << 9)
287 #define C_IRQ0 (_ULCAST_(1) << 10) 287 #define C_IRQ0 (_ULCAST_(1) << 10)
288 #define C_IRQ1 (_ULCAST_(1) << 11) 288 #define C_IRQ1 (_ULCAST_(1) << 11)
289 #define C_IRQ2 (_ULCAST_(1) << 12) 289 #define C_IRQ2 (_ULCAST_(1) << 12)
290 #define C_IRQ3 (_ULCAST_(1) << 13) 290 #define C_IRQ3 (_ULCAST_(1) << 13)
291 #define C_IRQ4 (_ULCAST_(1) << 14) 291 #define C_IRQ4 (_ULCAST_(1) << 14)
292 #define C_IRQ5 (_ULCAST_(1) << 15) 292 #define C_IRQ5 (_ULCAST_(1) << 15)
293 293
294 /* 294 /*
295 * Bitfields in the R4xx0 cp0 status register 295 * Bitfields in the R4xx0 cp0 status register
296 */ 296 */
297 #define ST0_IE 0x00000001 297 #define ST0_IE 0x00000001
298 #define ST0_EXL 0x00000002 298 #define ST0_EXL 0x00000002
299 #define ST0_ERL 0x00000004 299 #define ST0_ERL 0x00000004
300 #define ST0_KSU 0x00000018 300 #define ST0_KSU 0x00000018
301 # define KSU_USER 0x00000010 301 # define KSU_USER 0x00000010
302 # define KSU_SUPERVISOR 0x00000008 302 # define KSU_SUPERVISOR 0x00000008
303 # define KSU_KERNEL 0x00000000 303 # define KSU_KERNEL 0x00000000
304 #define ST0_UX 0x00000020 304 #define ST0_UX 0x00000020
305 #define ST0_SX 0x00000040 305 #define ST0_SX 0x00000040
306 #define ST0_KX 0x00000080 306 #define ST0_KX 0x00000080
307 #define ST0_DE 0x00010000 307 #define ST0_DE 0x00010000
308 #define ST0_CE 0x00020000 308 #define ST0_CE 0x00020000
309 309
310 /* 310 /*
311 * Setting c0_status.co enables Hit_Writeback and Hit_Writeback_Invalidate 311 * Setting c0_status.co enables Hit_Writeback and Hit_Writeback_Invalidate
312 * cacheops in userspace. This bit exists only on RM7000 and RM9000 312 * cacheops in userspace. This bit exists only on RM7000 and RM9000
313 * processors. 313 * processors.
314 */ 314 */
315 #define ST0_CO 0x08000000 315 #define ST0_CO 0x08000000
316 316
317 /* 317 /*
318 * Bitfields in the R[23]000 cp0 status register. 318 * Bitfields in the R[23]000 cp0 status register.
319 */ 319 */
320 #define ST0_IEC 0x00000001 320 #define ST0_IEC 0x00000001
321 #define ST0_KUC 0x00000002 321 #define ST0_KUC 0x00000002
322 #define ST0_IEP 0x00000004 322 #define ST0_IEP 0x00000004
323 #define ST0_KUP 0x00000008 323 #define ST0_KUP 0x00000008
324 #define ST0_IEO 0x00000010 324 #define ST0_IEO 0x00000010
325 #define ST0_KUO 0x00000020 325 #define ST0_KUO 0x00000020
326 /* bits 6 & 7 are reserved on R[23]000 */ 326 /* bits 6 & 7 are reserved on R[23]000 */
327 #define ST0_ISC 0x00010000 327 #define ST0_ISC 0x00010000
328 #define ST0_SWC 0x00020000 328 #define ST0_SWC 0x00020000
329 #define ST0_CM 0x00080000 329 #define ST0_CM 0x00080000
330 330
331 /* 331 /*
332 * Bits specific to the R4640/R4650 332 * Bits specific to the R4640/R4650
333 */ 333 */
334 #define ST0_UM (_ULCAST_(1) << 4) 334 #define ST0_UM (_ULCAST_(1) << 4)
335 #define ST0_IL (_ULCAST_(1) << 23) 335 #define ST0_IL (_ULCAST_(1) << 23)
336 #define ST0_DL (_ULCAST_(1) << 24) 336 #define ST0_DL (_ULCAST_(1) << 24)
337 337
338 /* 338 /*
339 * Enable the MIPS MDMX and DSP ASEs 339 * Enable the MIPS MDMX and DSP ASEs
340 */ 340 */
341 #define ST0_MX 0x01000000 341 #define ST0_MX 0x01000000
342 342
343 /* 343 /*
344 * Bitfields in the TX39 family CP0 Configuration Register 3 344 * Bitfields in the TX39 family CP0 Configuration Register 3
345 */ 345 */
346 #define TX39_CONF_ICS_SHIFT 19 346 #define TX39_CONF_ICS_SHIFT 19
347 #define TX39_CONF_ICS_MASK 0x00380000 347 #define TX39_CONF_ICS_MASK 0x00380000
348 #define TX39_CONF_ICS_1KB 0x00000000 348 #define TX39_CONF_ICS_1KB 0x00000000
349 #define TX39_CONF_ICS_2KB 0x00080000 349 #define TX39_CONF_ICS_2KB 0x00080000
350 #define TX39_CONF_ICS_4KB 0x00100000 350 #define TX39_CONF_ICS_4KB 0x00100000
351 #define TX39_CONF_ICS_8KB 0x00180000 351 #define TX39_CONF_ICS_8KB 0x00180000
352 #define TX39_CONF_ICS_16KB 0x00200000 352 #define TX39_CONF_ICS_16KB 0x00200000
353 353
354 #define TX39_CONF_DCS_SHIFT 16 354 #define TX39_CONF_DCS_SHIFT 16
355 #define TX39_CONF_DCS_MASK 0x00070000 355 #define TX39_CONF_DCS_MASK 0x00070000
356 #define TX39_CONF_DCS_1KB 0x00000000 356 #define TX39_CONF_DCS_1KB 0x00000000
357 #define TX39_CONF_DCS_2KB 0x00010000 357 #define TX39_CONF_DCS_2KB 0x00010000
358 #define TX39_CONF_DCS_4KB 0x00020000 358 #define TX39_CONF_DCS_4KB 0x00020000
359 #define TX39_CONF_DCS_8KB 0x00030000 359 #define TX39_CONF_DCS_8KB 0x00030000
360 #define TX39_CONF_DCS_16KB 0x00040000 360 #define TX39_CONF_DCS_16KB 0x00040000
361 361
362 #define TX39_CONF_CWFON 0x00004000 362 #define TX39_CONF_CWFON 0x00004000
363 #define TX39_CONF_WBON 0x00002000 363 #define TX39_CONF_WBON 0x00002000
364 #define TX39_CONF_RF_SHIFT 10 364 #define TX39_CONF_RF_SHIFT 10
365 #define TX39_CONF_RF_MASK 0x00000c00 365 #define TX39_CONF_RF_MASK 0x00000c00
366 #define TX39_CONF_DOZE 0x00000200 366 #define TX39_CONF_DOZE 0x00000200
367 #define TX39_CONF_HALT 0x00000100 367 #define TX39_CONF_HALT 0x00000100
368 #define TX39_CONF_LOCK 0x00000080 368 #define TX39_CONF_LOCK 0x00000080
369 #define TX39_CONF_ICE 0x00000020 369 #define TX39_CONF_ICE 0x00000020
370 #define TX39_CONF_DCE 0x00000010 370 #define TX39_CONF_DCE 0x00000010
371 #define TX39_CONF_IRSIZE_SHIFT 2 371 #define TX39_CONF_IRSIZE_SHIFT 2
372 #define TX39_CONF_IRSIZE_MASK 0x0000000c 372 #define TX39_CONF_IRSIZE_MASK 0x0000000c
373 #define TX39_CONF_DRSIZE_SHIFT 0 373 #define TX39_CONF_DRSIZE_SHIFT 0
374 #define TX39_CONF_DRSIZE_MASK 0x00000003 374 #define TX39_CONF_DRSIZE_MASK 0x00000003
375 375
376 /* 376 /*
377 * Status register bits available in all MIPS CPUs. 377 * Status register bits available in all MIPS CPUs.
378 */ 378 */
379 #define ST0_IM 0x0000ff00 379 #define ST0_IM 0x0000ff00
380 #define STATUSB_IP0 8 380 #define STATUSB_IP0 8
381 #define STATUSF_IP0 (_ULCAST_(1) << 8) 381 #define STATUSF_IP0 (_ULCAST_(1) << 8)
382 #define STATUSB_IP1 9 382 #define STATUSB_IP1 9
383 #define STATUSF_IP1 (_ULCAST_(1) << 9) 383 #define STATUSF_IP1 (_ULCAST_(1) << 9)
384 #define STATUSB_IP2 10 384 #define STATUSB_IP2 10
385 #define STATUSF_IP2 (_ULCAST_(1) << 10) 385 #define STATUSF_IP2 (_ULCAST_(1) << 10)
386 #define STATUSB_IP3 11 386 #define STATUSB_IP3 11
387 #define STATUSF_IP3 (_ULCAST_(1) << 11) 387 #define STATUSF_IP3 (_ULCAST_(1) << 11)
388 #define STATUSB_IP4 12 388 #define STATUSB_IP4 12
389 #define STATUSF_IP4 (_ULCAST_(1) << 12) 389 #define STATUSF_IP4 (_ULCAST_(1) << 12)
390 #define STATUSB_IP5 13 390 #define STATUSB_IP5 13
391 #define STATUSF_IP5 (_ULCAST_(1) << 13) 391 #define STATUSF_IP5 (_ULCAST_(1) << 13)
392 #define STATUSB_IP6 14 392 #define STATUSB_IP6 14
393 #define STATUSF_IP6 (_ULCAST_(1) << 14) 393 #define STATUSF_IP6 (_ULCAST_(1) << 14)
394 #define STATUSB_IP7 15 394 #define STATUSB_IP7 15
395 #define STATUSF_IP7 (_ULCAST_(1) << 15) 395 #define STATUSF_IP7 (_ULCAST_(1) << 15)
396 #define STATUSB_IP8 0 396 #define STATUSB_IP8 0
397 #define STATUSF_IP8 (_ULCAST_(1) << 0) 397 #define STATUSF_IP8 (_ULCAST_(1) << 0)
398 #define STATUSB_IP9 1 398 #define STATUSB_IP9 1
399 #define STATUSF_IP9 (_ULCAST_(1) << 1) 399 #define STATUSF_IP9 (_ULCAST_(1) << 1)
400 #define STATUSB_IP10 2 400 #define STATUSB_IP10 2
401 #define STATUSF_IP10 (_ULCAST_(1) << 2) 401 #define STATUSF_IP10 (_ULCAST_(1) << 2)
402 #define STATUSB_IP11 3 402 #define STATUSB_IP11 3
403 #define STATUSF_IP11 (_ULCAST_(1) << 3) 403 #define STATUSF_IP11 (_ULCAST_(1) << 3)
404 #define STATUSB_IP12 4 404 #define STATUSB_IP12 4
405 #define STATUSF_IP12 (_ULCAST_(1) << 4) 405 #define STATUSF_IP12 (_ULCAST_(1) << 4)
406 #define STATUSB_IP13 5 406 #define STATUSB_IP13 5
407 #define STATUSF_IP13 (_ULCAST_(1) << 5) 407 #define STATUSF_IP13 (_ULCAST_(1) << 5)
408 #define STATUSB_IP14 6 408 #define STATUSB_IP14 6
409 #define STATUSF_IP14 (_ULCAST_(1) << 6) 409 #define STATUSF_IP14 (_ULCAST_(1) << 6)
410 #define STATUSB_IP15 7 410 #define STATUSB_IP15 7
411 #define STATUSF_IP15 (_ULCAST_(1) << 7) 411 #define STATUSF_IP15 (_ULCAST_(1) << 7)
412 #define ST0_CH 0x00040000 412 #define ST0_CH 0x00040000
413 #define ST0_NMI 0x00080000 413 #define ST0_NMI 0x00080000
414 #define ST0_SR 0x00100000 414 #define ST0_SR 0x00100000
415 #define ST0_TS 0x00200000 415 #define ST0_TS 0x00200000
416 #define ST0_BEV 0x00400000 416 #define ST0_BEV 0x00400000
417 #define ST0_RE 0x02000000 417 #define ST0_RE 0x02000000
418 #define ST0_FR 0x04000000 418 #define ST0_FR 0x04000000
419 #define ST0_CU 0xf0000000 419 #define ST0_CU 0xf0000000
420 #define ST0_CU0 0x10000000 420 #define ST0_CU0 0x10000000
421 #define ST0_CU1 0x20000000 421 #define ST0_CU1 0x20000000
422 #define ST0_CU2 0x40000000 422 #define ST0_CU2 0x40000000
423 #define ST0_CU3 0x80000000 423 #define ST0_CU3 0x80000000
424 #define ST0_XX 0x80000000 /* MIPS IV naming */ 424 #define ST0_XX 0x80000000 /* MIPS IV naming */
425 425
426 /* 426 /*
427 * Bitfields and bit numbers in the coprocessor 0 IntCtl register. (MIPSR2) 427 * Bitfields and bit numbers in the coprocessor 0 IntCtl register. (MIPSR2)
428 * 428 *
429 * Refer to your MIPS R4xx0 manual, chapter 5 for explanation. 429 * Refer to your MIPS R4xx0 manual, chapter 5 for explanation.
430 */ 430 */
431 #define INTCTLB_IPPCI 26 431 #define INTCTLB_IPPCI 26
432 #define INTCTLF_IPPCI (_ULCAST_(7) << INTCTLB_IPPCI) 432 #define INTCTLF_IPPCI (_ULCAST_(7) << INTCTLB_IPPCI)
433 #define INTCTLB_IPTI 29 433 #define INTCTLB_IPTI 29
434 #define INTCTLF_IPTI (_ULCAST_(7) << INTCTLB_IPTI) 434 #define INTCTLF_IPTI (_ULCAST_(7) << INTCTLB_IPTI)
435 435
436 /* 436 /*
437 * Bitfields and bit numbers in the coprocessor 0 cause register. 437 * Bitfields and bit numbers in the coprocessor 0 cause register.
438 * 438 *
439 * Refer to your MIPS R4xx0 manual, chapter 5 for explanation. 439 * Refer to your MIPS R4xx0 manual, chapter 5 for explanation.
440 */ 440 */
441 #define CAUSEB_EXCCODE 2 441 #define CAUSEB_EXCCODE 2
442 #define CAUSEF_EXCCODE (_ULCAST_(31) << 2) 442 #define CAUSEF_EXCCODE (_ULCAST_(31) << 2)
443 #define CAUSEB_IP 8 443 #define CAUSEB_IP 8
444 #define CAUSEF_IP (_ULCAST_(255) << 8) 444 #define CAUSEF_IP (_ULCAST_(255) << 8)
445 #define CAUSEB_IP0 8 445 #define CAUSEB_IP0 8
446 #define CAUSEF_IP0 (_ULCAST_(1) << 8) 446 #define CAUSEF_IP0 (_ULCAST_(1) << 8)
447 #define CAUSEB_IP1 9 447 #define CAUSEB_IP1 9
448 #define CAUSEF_IP1 (_ULCAST_(1) << 9) 448 #define CAUSEF_IP1 (_ULCAST_(1) << 9)
449 #define CAUSEB_IP2 10 449 #define CAUSEB_IP2 10
450 #define CAUSEF_IP2 (_ULCAST_(1) << 10) 450 #define CAUSEF_IP2 (_ULCAST_(1) << 10)
451 #define CAUSEB_IP3 11 451 #define CAUSEB_IP3 11
452 #define CAUSEF_IP3 (_ULCAST_(1) << 11) 452 #define CAUSEF_IP3 (_ULCAST_(1) << 11)
453 #define CAUSEB_IP4 12 453 #define CAUSEB_IP4 12
454 #define CAUSEF_IP4 (_ULCAST_(1) << 12) 454 #define CAUSEF_IP4 (_ULCAST_(1) << 12)
455 #define CAUSEB_IP5 13 455 #define CAUSEB_IP5 13
456 #define CAUSEF_IP5 (_ULCAST_(1) << 13) 456 #define CAUSEF_IP5 (_ULCAST_(1) << 13)
457 #define CAUSEB_IP6 14 457 #define CAUSEB_IP6 14
458 #define CAUSEF_IP6 (_ULCAST_(1) << 14) 458 #define CAUSEF_IP6 (_ULCAST_(1) << 14)
459 #define CAUSEB_IP7 15 459 #define CAUSEB_IP7 15
460 #define CAUSEF_IP7 (_ULCAST_(1) << 15) 460 #define CAUSEF_IP7 (_ULCAST_(1) << 15)
461 #define CAUSEB_IV 23 461 #define CAUSEB_IV 23
462 #define CAUSEF_IV (_ULCAST_(1) << 23) 462 #define CAUSEF_IV (_ULCAST_(1) << 23)
463 #define CAUSEB_PCI 26 463 #define CAUSEB_PCI 26
464 #define CAUSEF_PCI (_ULCAST_(1) << 26) 464 #define CAUSEF_PCI (_ULCAST_(1) << 26)
465 #define CAUSEB_CE 28 465 #define CAUSEB_CE 28
466 #define CAUSEF_CE (_ULCAST_(3) << 28) 466 #define CAUSEF_CE (_ULCAST_(3) << 28)
467 #define CAUSEB_TI 30 467 #define CAUSEB_TI 30
468 #define CAUSEF_TI (_ULCAST_(1) << 30) 468 #define CAUSEF_TI (_ULCAST_(1) << 30)
469 #define CAUSEB_BD 31 469 #define CAUSEB_BD 31
470 #define CAUSEF_BD (_ULCAST_(1) << 31) 470 #define CAUSEF_BD (_ULCAST_(1) << 31)
471 471
472 /* 472 /*
473 * Bits in the coprocessor 0 config register. 473 * Bits in the coprocessor 0 config register.
474 */ 474 */
475 /* Generic bits. */ 475 /* Generic bits. */
476 #define CONF_CM_CACHABLE_NO_WA 0 476 #define CONF_CM_CACHABLE_NO_WA 0
477 #define CONF_CM_CACHABLE_WA 1 477 #define CONF_CM_CACHABLE_WA 1
478 #define CONF_CM_UNCACHED 2 478 #define CONF_CM_UNCACHED 2
479 #define CONF_CM_CACHABLE_NONCOHERENT 3 479 #define CONF_CM_CACHABLE_NONCOHERENT 3
480 #define CONF_CM_CACHABLE_CE 4 480 #define CONF_CM_CACHABLE_CE 4
481 #define CONF_CM_CACHABLE_COW 5 481 #define CONF_CM_CACHABLE_COW 5
482 #define CONF_CM_CACHABLE_CUW 6 482 #define CONF_CM_CACHABLE_CUW 6
483 #define CONF_CM_CACHABLE_ACCELERATED 7 483 #define CONF_CM_CACHABLE_ACCELERATED 7
484 #define CONF_CM_CMASK 7 484 #define CONF_CM_CMASK 7
485 #define CONF_BE (_ULCAST_(1) << 15) 485 #define CONF_BE (_ULCAST_(1) << 15)
486 486
487 /* Bits common to various processors. */ 487 /* Bits common to various processors. */
488 #define CONF_CU (_ULCAST_(1) << 3) 488 #define CONF_CU (_ULCAST_(1) << 3)
489 #define CONF_DB (_ULCAST_(1) << 4) 489 #define CONF_DB (_ULCAST_(1) << 4)
490 #define CONF_IB (_ULCAST_(1) << 5) 490 #define CONF_IB (_ULCAST_(1) << 5)
491 #define CONF_DC (_ULCAST_(7) << 6) 491 #define CONF_DC (_ULCAST_(7) << 6)
492 #define CONF_IC (_ULCAST_(7) << 9) 492 #define CONF_IC (_ULCAST_(7) << 9)
493 #define CONF_EB (_ULCAST_(1) << 13) 493 #define CONF_EB (_ULCAST_(1) << 13)
494 #define CONF_EM (_ULCAST_(1) << 14) 494 #define CONF_EM (_ULCAST_(1) << 14)
495 #define CONF_SM (_ULCAST_(1) << 16) 495 #define CONF_SM (_ULCAST_(1) << 16)
496 #define CONF_SC (_ULCAST_(1) << 17) 496 #define CONF_SC (_ULCAST_(1) << 17)
497 #define CONF_EW (_ULCAST_(3) << 18) 497 #define CONF_EW (_ULCAST_(3) << 18)
498 #define CONF_EP (_ULCAST_(15)<< 24) 498 #define CONF_EP (_ULCAST_(15)<< 24)
499 #define CONF_EC (_ULCAST_(7) << 28) 499 #define CONF_EC (_ULCAST_(7) << 28)
500 #define CONF_CM (_ULCAST_(1) << 31) 500 #define CONF_CM (_ULCAST_(1) << 31)
501 501
502 /* Bits specific to the R4xx0. */ 502 /* Bits specific to the R4xx0. */
503 #define R4K_CONF_SW (_ULCAST_(1) << 20) 503 #define R4K_CONF_SW (_ULCAST_(1) << 20)
504 #define R4K_CONF_SS (_ULCAST_(1) << 21) 504 #define R4K_CONF_SS (_ULCAST_(1) << 21)
505 #define R4K_CONF_SB (_ULCAST_(3) << 22) 505 #define R4K_CONF_SB (_ULCAST_(3) << 22)
506 506
507 /* Bits specific to the R5000. */ 507 /* Bits specific to the R5000. */
508 #define R5K_CONF_SE (_ULCAST_(1) << 12) 508 #define R5K_CONF_SE (_ULCAST_(1) << 12)
509 #define R5K_CONF_SS (_ULCAST_(3) << 20) 509 #define R5K_CONF_SS (_ULCAST_(3) << 20)
510 510
511 /* Bits specific to the RM7000. */ 511 /* Bits specific to the RM7000. */
512 #define RM7K_CONF_SE (_ULCAST_(1) << 3) 512 #define RM7K_CONF_SE (_ULCAST_(1) << 3)
513 #define RM7K_CONF_TE (_ULCAST_(1) << 12) 513 #define RM7K_CONF_TE (_ULCAST_(1) << 12)
514 #define RM7K_CONF_CLK (_ULCAST_(1) << 16) 514 #define RM7K_CONF_CLK (_ULCAST_(1) << 16)
515 #define RM7K_CONF_TC (_ULCAST_(1) << 17) 515 #define RM7K_CONF_TC (_ULCAST_(1) << 17)
516 #define RM7K_CONF_SI (_ULCAST_(3) << 20) 516 #define RM7K_CONF_SI (_ULCAST_(3) << 20)
517 #define RM7K_CONF_SC (_ULCAST_(1) << 31) 517 #define RM7K_CONF_SC (_ULCAST_(1) << 31)
518 518
519 /* Bits specific to the R10000. */ 519 /* Bits specific to the R10000. */
520 #define R10K_CONF_DN (_ULCAST_(3) << 3) 520 #define R10K_CONF_DN (_ULCAST_(3) << 3)
521 #define R10K_CONF_CT (_ULCAST_(1) << 5) 521 #define R10K_CONF_CT (_ULCAST_(1) << 5)
522 #define R10K_CONF_PE (_ULCAST_(1) << 6) 522 #define R10K_CONF_PE (_ULCAST_(1) << 6)
523 #define R10K_CONF_PM (_ULCAST_(3) << 7) 523 #define R10K_CONF_PM (_ULCAST_(3) << 7)
524 #define R10K_CONF_EC (_ULCAST_(15)<< 9) 524 #define R10K_CONF_EC (_ULCAST_(15)<< 9)
525 #define R10K_CONF_SB (_ULCAST_(1) << 13) 525 #define R10K_CONF_SB (_ULCAST_(1) << 13)
526 #define R10K_CONF_SK (_ULCAST_(1) << 14) 526 #define R10K_CONF_SK (_ULCAST_(1) << 14)
527 #define R10K_CONF_SS (_ULCAST_(7) << 16) 527 #define R10K_CONF_SS (_ULCAST_(7) << 16)
528 #define R10K_CONF_SC (_ULCAST_(7) << 19) 528 #define R10K_CONF_SC (_ULCAST_(7) << 19)
529 #define R10K_CONF_DC (_ULCAST_(7) << 26) 529 #define R10K_CONF_DC (_ULCAST_(7) << 26)
530 #define R10K_CONF_IC (_ULCAST_(7) << 29) 530 #define R10K_CONF_IC (_ULCAST_(7) << 29)
531 531
532 /* Bits specific to the VR41xx. */ 532 /* Bits specific to the VR41xx. */
533 #define VR41_CONF_CS (_ULCAST_(1) << 12) 533 #define VR41_CONF_CS (_ULCAST_(1) << 12)
534 #define VR41_CONF_P4K (_ULCAST_(1) << 13) 534 #define VR41_CONF_P4K (_ULCAST_(1) << 13)
535 #define VR41_CONF_BP (_ULCAST_(1) << 16) 535 #define VR41_CONF_BP (_ULCAST_(1) << 16)
536 #define VR41_CONF_M16 (_ULCAST_(1) << 20) 536 #define VR41_CONF_M16 (_ULCAST_(1) << 20)
537 #define VR41_CONF_AD (_ULCAST_(1) << 23) 537 #define VR41_CONF_AD (_ULCAST_(1) << 23)
538 538
539 /* Bits specific to the R30xx. */ 539 /* Bits specific to the R30xx. */
540 #define R30XX_CONF_FDM (_ULCAST_(1) << 19) 540 #define R30XX_CONF_FDM (_ULCAST_(1) << 19)
541 #define R30XX_CONF_REV (_ULCAST_(1) << 22) 541 #define R30XX_CONF_REV (_ULCAST_(1) << 22)
542 #define R30XX_CONF_AC (_ULCAST_(1) << 23) 542 #define R30XX_CONF_AC (_ULCAST_(1) << 23)
543 #define R30XX_CONF_RF (_ULCAST_(1) << 24) 543 #define R30XX_CONF_RF (_ULCAST_(1) << 24)
544 #define R30XX_CONF_HALT (_ULCAST_(1) << 25) 544 #define R30XX_CONF_HALT (_ULCAST_(1) << 25)
545 #define R30XX_CONF_FPINT (_ULCAST_(7) << 26) 545 #define R30XX_CONF_FPINT (_ULCAST_(7) << 26)
546 #define R30XX_CONF_DBR (_ULCAST_(1) << 29) 546 #define R30XX_CONF_DBR (_ULCAST_(1) << 29)
547 #define R30XX_CONF_SB (_ULCAST_(1) << 30) 547 #define R30XX_CONF_SB (_ULCAST_(1) << 30)
548 #define R30XX_CONF_LOCK (_ULCAST_(1) << 31) 548 #define R30XX_CONF_LOCK (_ULCAST_(1) << 31)
549 549
550 /* Bits specific to the TX49. */ 550 /* Bits specific to the TX49. */
551 #define TX49_CONF_DC (_ULCAST_(1) << 16) 551 #define TX49_CONF_DC (_ULCAST_(1) << 16)
552 #define TX49_CONF_IC (_ULCAST_(1) << 17) /* conflict with CONF_SC */ 552 #define TX49_CONF_IC (_ULCAST_(1) << 17) /* conflict with CONF_SC */
553 #define TX49_CONF_HALT (_ULCAST_(1) << 18) 553 #define TX49_CONF_HALT (_ULCAST_(1) << 18)
554 #define TX49_CONF_CWFON (_ULCAST_(1) << 27) 554 #define TX49_CONF_CWFON (_ULCAST_(1) << 27)
555 555
556 /* Bits specific to the MIPS32/64 PRA. */ 556 /* Bits specific to the MIPS32/64 PRA. */
557 #define MIPS_CONF_MT (_ULCAST_(7) << 7) 557 #define MIPS_CONF_MT (_ULCAST_(7) << 7)
558 #define MIPS_CONF_AR (_ULCAST_(7) << 10) 558 #define MIPS_CONF_AR (_ULCAST_(7) << 10)
559 #define MIPS_CONF_AT (_ULCAST_(3) << 13) 559 #define MIPS_CONF_AT (_ULCAST_(3) << 13)
560 #define MIPS_CONF_M (_ULCAST_(1) << 31) 560 #define MIPS_CONF_M (_ULCAST_(1) << 31)
561 561
562 /* 562 /*
563 * Bits in the MIPS32/64 PRA coprocessor 0 config registers 1 and above. 563 * Bits in the MIPS32/64 PRA coprocessor 0 config registers 1 and above.
564 */ 564 */
565 #define MIPS_CONF1_FP (_ULCAST_(1) << 0) 565 #define MIPS_CONF1_FP (_ULCAST_(1) << 0)
566 #define MIPS_CONF1_EP (_ULCAST_(1) << 1) 566 #define MIPS_CONF1_EP (_ULCAST_(1) << 1)
567 #define MIPS_CONF1_CA (_ULCAST_(1) << 2) 567 #define MIPS_CONF1_CA (_ULCAST_(1) << 2)
568 #define MIPS_CONF1_WR (_ULCAST_(1) << 3) 568 #define MIPS_CONF1_WR (_ULCAST_(1) << 3)
569 #define MIPS_CONF1_PC (_ULCAST_(1) << 4) 569 #define MIPS_CONF1_PC (_ULCAST_(1) << 4)
570 #define MIPS_CONF1_MD (_ULCAST_(1) << 5) 570 #define MIPS_CONF1_MD (_ULCAST_(1) << 5)
571 #define MIPS_CONF1_C2 (_ULCAST_(1) << 6) 571 #define MIPS_CONF1_C2 (_ULCAST_(1) << 6)
572 #define MIPS_CONF1_DA_SHF 7 572 #define MIPS_CONF1_DA_SHF 7
573 #define MIPS_CONF1_DA_SZ 3 573 #define MIPS_CONF1_DA_SZ 3
574 #define MIPS_CONF1_DA (_ULCAST_(7) << 7) 574 #define MIPS_CONF1_DA (_ULCAST_(7) << 7)
575 #define MIPS_CONF1_DL_SHF 10 575 #define MIPS_CONF1_DL_SHF 10
576 #define MIPS_CONF1_DL_SZ 3 576 #define MIPS_CONF1_DL_SZ 3
577 #define MIPS_CONF1_DL (_ULCAST_(7) << 10) 577 #define MIPS_CONF1_DL (_ULCAST_(7) << 10)
578 #define MIPS_CONF1_DS_SHF 13 578 #define MIPS_CONF1_DS_SHF 13
579 #define MIPS_CONF1_DS_SZ 3 579 #define MIPS_CONF1_DS_SZ 3
580 #define MIPS_CONF1_DS (_ULCAST_(7) << 13) 580 #define MIPS_CONF1_DS (_ULCAST_(7) << 13)
581 #define MIPS_CONF1_IA_SHF 16 581 #define MIPS_CONF1_IA_SHF 16
582 #define MIPS_CONF1_IA_SZ 3 582 #define MIPS_CONF1_IA_SZ 3
583 #define MIPS_CONF1_IA (_ULCAST_(7) << 16) 583 #define MIPS_CONF1_IA (_ULCAST_(7) << 16)
584 #define MIPS_CONF1_IL_SHF 19 584 #define MIPS_CONF1_IL_SHF 19
585 #define MIPS_CONF1_IL_SZ 3 585 #define MIPS_CONF1_IL_SZ 3
586 #define MIPS_CONF1_IL (_ULCAST_(7) << 19) 586 #define MIPS_CONF1_IL (_ULCAST_(7) << 19)
587 #define MIPS_CONF1_IS_SHF 22 587 #define MIPS_CONF1_IS_SHF 22
588 #define MIPS_CONF1_IS_SZ 3 588 #define MIPS_CONF1_IS_SZ 3
589 #define MIPS_CONF1_IS (_ULCAST_(7) << 22) 589 #define MIPS_CONF1_IS (_ULCAST_(7) << 22)
590 #define MIPS_CONF1_TLBS_SHIFT (25) 590 #define MIPS_CONF1_TLBS_SHIFT (25)
591 #define MIPS_CONF1_TLBS_SIZE (6) 591 #define MIPS_CONF1_TLBS_SIZE (6)
592 #define MIPS_CONF1_TLBS (_ULCAST_(63) << MIPS_CONF1_TLBS_SHIFT) 592 #define MIPS_CONF1_TLBS (_ULCAST_(63) << MIPS_CONF1_TLBS_SHIFT)
593 593
594 #define MIPS_CONF2_SA (_ULCAST_(15)<< 0) 594 #define MIPS_CONF2_SA (_ULCAST_(15)<< 0)
595 #define MIPS_CONF2_SL (_ULCAST_(15)<< 4) 595 #define MIPS_CONF2_SL (_ULCAST_(15)<< 4)
596 #define MIPS_CONF2_SS (_ULCAST_(15)<< 8) 596 #define MIPS_CONF2_SS (_ULCAST_(15)<< 8)
597 #define MIPS_CONF2_SU (_ULCAST_(15)<< 12) 597 #define MIPS_CONF2_SU (_ULCAST_(15)<< 12)
598 #define MIPS_CONF2_TA (_ULCAST_(15)<< 16) 598 #define MIPS_CONF2_TA (_ULCAST_(15)<< 16)
599 #define MIPS_CONF2_TL (_ULCAST_(15)<< 20) 599 #define MIPS_CONF2_TL (_ULCAST_(15)<< 20)
600 #define MIPS_CONF2_TS (_ULCAST_(15)<< 24) 600 #define MIPS_CONF2_TS (_ULCAST_(15)<< 24)
601 #define MIPS_CONF2_TU (_ULCAST_(7) << 28) 601 #define MIPS_CONF2_TU (_ULCAST_(7) << 28)
602 602
603 #define MIPS_CONF3_TL (_ULCAST_(1) << 0) 603 #define MIPS_CONF3_TL (_ULCAST_(1) << 0)
604 #define MIPS_CONF3_SM (_ULCAST_(1) << 1) 604 #define MIPS_CONF3_SM (_ULCAST_(1) << 1)
605 #define MIPS_CONF3_MT (_ULCAST_(1) << 2) 605 #define MIPS_CONF3_MT (_ULCAST_(1) << 2)
606 #define MIPS_CONF3_CDMM (_ULCAST_(1) << 3) 606 #define MIPS_CONF3_CDMM (_ULCAST_(1) << 3)
607 #define MIPS_CONF3_SP (_ULCAST_(1) << 4) 607 #define MIPS_CONF3_SP (_ULCAST_(1) << 4)
608 #define MIPS_CONF3_VINT (_ULCAST_(1) << 5) 608 #define MIPS_CONF3_VINT (_ULCAST_(1) << 5)
609 #define MIPS_CONF3_VEIC (_ULCAST_(1) << 6) 609 #define MIPS_CONF3_VEIC (_ULCAST_(1) << 6)
610 #define MIPS_CONF3_LPA (_ULCAST_(1) << 7) 610 #define MIPS_CONF3_LPA (_ULCAST_(1) << 7)
611 #define MIPS_CONF3_ITL (_ULCAST_(1) << 8) 611 #define MIPS_CONF3_ITL (_ULCAST_(1) << 8)
612 #define MIPS_CONF3_CTXTC (_ULCAST_(1) << 9) 612 #define MIPS_CONF3_CTXTC (_ULCAST_(1) << 9)
613 #define MIPS_CONF3_DSP (_ULCAST_(1) << 10) 613 #define MIPS_CONF3_DSP (_ULCAST_(1) << 10)
614 #define MIPS_CONF3_DSP2P (_ULCAST_(1) << 11) 614 #define MIPS_CONF3_DSP2P (_ULCAST_(1) << 11)
615 #define MIPS_CONF3_RXI (_ULCAST_(1) << 12) 615 #define MIPS_CONF3_RXI (_ULCAST_(1) << 12)
616 #define MIPS_CONF3_ULRI (_ULCAST_(1) << 13) 616 #define MIPS_CONF3_ULRI (_ULCAST_(1) << 13)
617 #define MIPS_CONF3_ISA (_ULCAST_(3) << 14) 617 #define MIPS_CONF3_ISA (_ULCAST_(3) << 14)
618 #define MIPS_CONF3_ISA_OE (_ULCAST_(1) << 16) 618 #define MIPS_CONF3_ISA_OE (_ULCAST_(1) << 16)
619 #define MIPS_CONF3_MCU (_ULCAST_(1) << 17) 619 #define MIPS_CONF3_MCU (_ULCAST_(1) << 17)
620 #define MIPS_CONF3_MMAR (_ULCAST_(7) << 18) 620 #define MIPS_CONF3_MMAR (_ULCAST_(7) << 18)
621 #define MIPS_CONF3_IPLW (_ULCAST_(3) << 21) 621 #define MIPS_CONF3_IPLW (_ULCAST_(3) << 21)
622 #define MIPS_CONF3_VZ (_ULCAST_(1) << 23) 622 #define MIPS_CONF3_VZ (_ULCAST_(1) << 23)
623 #define MIPS_CONF3_PW (_ULCAST_(1) << 24) 623 #define MIPS_CONF3_PW (_ULCAST_(1) << 24)
624 #define MIPS_CONF3_SC (_ULCAST_(1) << 25) 624 #define MIPS_CONF3_SC (_ULCAST_(1) << 25)
625 #define MIPS_CONF3_BI (_ULCAST_(1) << 26) 625 #define MIPS_CONF3_BI (_ULCAST_(1) << 26)
626 #define MIPS_CONF3_BP (_ULCAST_(1) << 27) 626 #define MIPS_CONF3_BP (_ULCAST_(1) << 27)
627 #define MIPS_CONF3_MSA (_ULCAST_(1) << 28) 627 #define MIPS_CONF3_MSA (_ULCAST_(1) << 28)
628 #define MIPS_CONF3_CMGCR (_ULCAST_(1) << 29) 628 #define MIPS_CONF3_CMGCR (_ULCAST_(1) << 29)
629 #define MIPS_CONF3_BPG (_ULCAST_(1) << 30) 629 #define MIPS_CONF3_BPG (_ULCAST_(1) << 30)
630 630
631 #define MIPS_CONF4_MMUSIZEEXT_SHIFT (0) 631 #define MIPS_CONF4_MMUSIZEEXT_SHIFT (0)
632 #define MIPS_CONF4_MMUSIZEEXT (_ULCAST_(255) << 0) 632 #define MIPS_CONF4_MMUSIZEEXT (_ULCAST_(255) << 0)
633 #define MIPS_CONF4_FTLBSETS_SHIFT (0) 633 #define MIPS_CONF4_FTLBSETS_SHIFT (0)
634 #define MIPS_CONF4_FTLBSETS (_ULCAST_(15) << MIPS_CONF4_FTLBSETS_SHIFT) 634 #define MIPS_CONF4_FTLBSETS (_ULCAST_(15) << MIPS_CONF4_FTLBSETS_SHIFT)
635 #define MIPS_CONF4_FTLBWAYS_SHIFT (4) 635 #define MIPS_CONF4_FTLBWAYS_SHIFT (4)
636 #define MIPS_CONF4_FTLBWAYS (_ULCAST_(15) << MIPS_CONF4_FTLBWAYS_SHIFT) 636 #define MIPS_CONF4_FTLBWAYS (_ULCAST_(15) << MIPS_CONF4_FTLBWAYS_SHIFT)
637 #define MIPS_CONF4_FTLBPAGESIZE_SHIFT (8) 637 #define MIPS_CONF4_FTLBPAGESIZE_SHIFT (8)
638 /* bits 10:8 in FTLB-only configurations */ 638 /* bits 10:8 in FTLB-only configurations */
639 #define MIPS_CONF4_FTLBPAGESIZE (_ULCAST_(7) << MIPS_CONF4_FTLBPAGESIZE_SHIFT) 639 #define MIPS_CONF4_FTLBPAGESIZE (_ULCAST_(7) << MIPS_CONF4_FTLBPAGESIZE_SHIFT)
640 /* bits 12:8 in VTLB-FTLB only configurations */ 640 /* bits 12:8 in VTLB-FTLB only configurations */
641 #define MIPS_CONF4_VFTLBPAGESIZE (_ULCAST_(31) << MIPS_CONF4_FTLBPAGESIZE_SHIFT) 641 #define MIPS_CONF4_VFTLBPAGESIZE (_ULCAST_(31) << MIPS_CONF4_FTLBPAGESIZE_SHIFT)
642 #define MIPS_CONF4_MMUEXTDEF (_ULCAST_(3) << 14) 642 #define MIPS_CONF4_MMUEXTDEF (_ULCAST_(3) << 14)
643 #define MIPS_CONF4_MMUEXTDEF_MMUSIZEEXT (_ULCAST_(1) << 14) 643 #define MIPS_CONF4_MMUEXTDEF_MMUSIZEEXT (_ULCAST_(1) << 14)
644 #define MIPS_CONF4_MMUEXTDEF_FTLBSIZEEXT (_ULCAST_(2) << 14) 644 #define MIPS_CONF4_MMUEXTDEF_FTLBSIZEEXT (_ULCAST_(2) << 14)
645 #define MIPS_CONF4_MMUEXTDEF_VTLBSIZEEXT (_ULCAST_(3) << 14) 645 #define MIPS_CONF4_MMUEXTDEF_VTLBSIZEEXT (_ULCAST_(3) << 14)
646 #define MIPS_CONF4_KSCREXIST (_ULCAST_(255) << 16) 646 #define MIPS_CONF4_KSCREXIST (_ULCAST_(255) << 16)
647 #define MIPS_CONF4_VTLBSIZEEXT_SHIFT (24) 647 #define MIPS_CONF4_VTLBSIZEEXT_SHIFT (24)
648 #define MIPS_CONF4_VTLBSIZEEXT (_ULCAST_(15) << MIPS_CONF4_VTLBSIZEEXT_SHIFT) 648 #define MIPS_CONF4_VTLBSIZEEXT (_ULCAST_(15) << MIPS_CONF4_VTLBSIZEEXT_SHIFT)
649 #define MIPS_CONF4_AE (_ULCAST_(1) << 28) 649 #define MIPS_CONF4_AE (_ULCAST_(1) << 28)
650 #define MIPS_CONF4_IE (_ULCAST_(3) << 29) 650 #define MIPS_CONF4_IE (_ULCAST_(3) << 29)
651 #define MIPS_CONF4_TLBINV (_ULCAST_(2) << 29) 651 #define MIPS_CONF4_TLBINV (_ULCAST_(2) << 29)
652 652
653 #define MIPS_CONF5_NF (_ULCAST_(1) << 0) 653 #define MIPS_CONF5_NF (_ULCAST_(1) << 0)
654 #define MIPS_CONF5_UFR (_ULCAST_(1) << 2) 654 #define MIPS_CONF5_UFR (_ULCAST_(1) << 2)
655 #define MIPS_CONF5_MRP (_ULCAST_(1) << 3) 655 #define MIPS_CONF5_MRP (_ULCAST_(1) << 3)
656 #define MIPS_CONF5_MSAEN (_ULCAST_(1) << 27) 656 #define MIPS_CONF5_MSAEN (_ULCAST_(1) << 27)
657 #define MIPS_CONF5_EVA (_ULCAST_(1) << 28) 657 #define MIPS_CONF5_EVA (_ULCAST_(1) << 28)
658 #define MIPS_CONF5_CV (_ULCAST_(1) << 29) 658 #define MIPS_CONF5_CV (_ULCAST_(1) << 29)
659 #define MIPS_CONF5_K (_ULCAST_(1) << 30) 659 #define MIPS_CONF5_K (_ULCAST_(1) << 30)
660 660
661 #define MIPS_CONF6_SYND (_ULCAST_(1) << 13) 661 #define MIPS_CONF6_SYND (_ULCAST_(1) << 13)
662 /* proAptiv FTLB on/off bit */ 662 /* proAptiv FTLB on/off bit */
663 #define MIPS_CONF6_FTLBEN (_ULCAST_(1) << 15) 663 #define MIPS_CONF6_FTLBEN (_ULCAST_(1) << 15)
664 664
665 #define MIPS_CONF7_WII (_ULCAST_(1) << 31) 665 #define MIPS_CONF7_WII (_ULCAST_(1) << 31)
666 666
667 #define MIPS_CONF7_RPS (_ULCAST_(1) << 2) 667 #define MIPS_CONF7_RPS (_ULCAST_(1) << 2)
668 668
669 #define MIPS_CONF7_IAR (_ULCAST_(1) << 10) 669 #define MIPS_CONF7_IAR (_ULCAST_(1) << 10)
670 #define MIPS_CONF7_AR (_ULCAST_(1) << 16) 670 #define MIPS_CONF7_AR (_ULCAST_(1) << 16)
671 671
672 /* MAAR bit definitions */ 672 /* MAAR bit definitions */
673 #define MIPS_MAAR_ADDR ((BIT_ULL(BITS_PER_LONG - 12) - 1) << 12) 673 #define MIPS_MAAR_ADDR ((BIT_ULL(BITS_PER_LONG - 12) - 1) << 12)
674 #define MIPS_MAAR_ADDR_SHIFT 12 674 #define MIPS_MAAR_ADDR_SHIFT 12
675 #define MIPS_MAAR_S (_ULCAST_(1) << 1) 675 #define MIPS_MAAR_S (_ULCAST_(1) << 1)
676 #define MIPS_MAAR_V (_ULCAST_(1) << 0) 676 #define MIPS_MAAR_V (_ULCAST_(1) << 0)
677 677
678 /* EntryHI bit definition */ 678 /* EntryHI bit definition */
679 #define MIPS_ENTRYHI_EHINV (_ULCAST_(1) << 10) 679 #define MIPS_ENTRYHI_EHINV (_ULCAST_(1) << 10)
680 680
681 /* CMGCRBase bit definitions */ 681 /* CMGCRBase bit definitions */
682 #define MIPS_CMGCRB_BASE 11 682 #define MIPS_CMGCRB_BASE 11
683 #define MIPS_CMGCRF_BASE (~_ULCAST_((1 << MIPS_CMGCRB_BASE) - 1)) 683 #define MIPS_CMGCRF_BASE (~_ULCAST_((1 << MIPS_CMGCRB_BASE) - 1))
684 684
685 /* 685 /*
686 * Bits in the MIPS32/64 coprocessor 1 (FPU) revision register. 686 * Bits in the MIPS32/64 coprocessor 1 (FPU) revision register.
687 */ 687 */
688 #define MIPS_FPIR_S (_ULCAST_(1) << 16) 688 #define MIPS_FPIR_S (_ULCAST_(1) << 16)
689 #define MIPS_FPIR_D (_ULCAST_(1) << 17) 689 #define MIPS_FPIR_D (_ULCAST_(1) << 17)
690 #define MIPS_FPIR_PS (_ULCAST_(1) << 18) 690 #define MIPS_FPIR_PS (_ULCAST_(1) << 18)
691 #define MIPS_FPIR_3D (_ULCAST_(1) << 19) 691 #define MIPS_FPIR_3D (_ULCAST_(1) << 19)
692 #define MIPS_FPIR_W (_ULCAST_(1) << 20) 692 #define MIPS_FPIR_W (_ULCAST_(1) << 20)
693 #define MIPS_FPIR_L (_ULCAST_(1) << 21) 693 #define MIPS_FPIR_L (_ULCAST_(1) << 21)
694 #define MIPS_FPIR_F64 (_ULCAST_(1) << 22) 694 #define MIPS_FPIR_F64 (_ULCAST_(1) << 22)
695 695
696 /* 696 /*
697 * Bits in the MIPS32 Memory Segmentation registers. 697 * Bits in the MIPS32 Memory Segmentation registers.
698 */ 698 */
699 #define MIPS_SEGCFG_PA_SHIFT 9 699 #define MIPS_SEGCFG_PA_SHIFT 9
700 #define MIPS_SEGCFG_PA (_ULCAST_(127) << MIPS_SEGCFG_PA_SHIFT) 700 #define MIPS_SEGCFG_PA (_ULCAST_(127) << MIPS_SEGCFG_PA_SHIFT)
701 #define MIPS_SEGCFG_AM_SHIFT 4 701 #define MIPS_SEGCFG_AM_SHIFT 4
702 #define MIPS_SEGCFG_AM (_ULCAST_(7) << MIPS_SEGCFG_AM_SHIFT) 702 #define MIPS_SEGCFG_AM (_ULCAST_(7) << MIPS_SEGCFG_AM_SHIFT)
703 #define MIPS_SEGCFG_EU_SHIFT 3 703 #define MIPS_SEGCFG_EU_SHIFT 3
704 #define MIPS_SEGCFG_EU (_ULCAST_(1) << MIPS_SEGCFG_EU_SHIFT) 704 #define MIPS_SEGCFG_EU (_ULCAST_(1) << MIPS_SEGCFG_EU_SHIFT)
705 #define MIPS_SEGCFG_C_SHIFT 0 705 #define MIPS_SEGCFG_C_SHIFT 0
706 #define MIPS_SEGCFG_C (_ULCAST_(7) << MIPS_SEGCFG_C_SHIFT) 706 #define MIPS_SEGCFG_C (_ULCAST_(7) << MIPS_SEGCFG_C_SHIFT)
707 707
708 #define MIPS_SEGCFG_UUSK _ULCAST_(7) 708 #define MIPS_SEGCFG_UUSK _ULCAST_(7)
709 #define MIPS_SEGCFG_USK _ULCAST_(5) 709 #define MIPS_SEGCFG_USK _ULCAST_(5)
710 #define MIPS_SEGCFG_MUSUK _ULCAST_(4) 710 #define MIPS_SEGCFG_MUSUK _ULCAST_(4)
711 #define MIPS_SEGCFG_MUSK _ULCAST_(3) 711 #define MIPS_SEGCFG_MUSK _ULCAST_(3)
712 #define MIPS_SEGCFG_MSK _ULCAST_(2) 712 #define MIPS_SEGCFG_MSK _ULCAST_(2)
713 #define MIPS_SEGCFG_MK _ULCAST_(1) 713 #define MIPS_SEGCFG_MK _ULCAST_(1)
714 #define MIPS_SEGCFG_UK _ULCAST_(0) 714 #define MIPS_SEGCFG_UK _ULCAST_(0)
715 715
716 #define MIPS_PWFIELD_GDI_SHIFT 24 716 #define MIPS_PWFIELD_GDI_SHIFT 24
717 #define MIPS_PWFIELD_GDI_MASK 0x3f000000 717 #define MIPS_PWFIELD_GDI_MASK 0x3f000000
718 #define MIPS_PWFIELD_UDI_SHIFT 18 718 #define MIPS_PWFIELD_UDI_SHIFT 18
719 #define MIPS_PWFIELD_UDI_MASK 0x00fc0000 719 #define MIPS_PWFIELD_UDI_MASK 0x00fc0000
720 #define MIPS_PWFIELD_MDI_SHIFT 12 720 #define MIPS_PWFIELD_MDI_SHIFT 12
721 #define MIPS_PWFIELD_MDI_MASK 0x0003f000 721 #define MIPS_PWFIELD_MDI_MASK 0x0003f000
722 #define MIPS_PWFIELD_PTI_SHIFT 6 722 #define MIPS_PWFIELD_PTI_SHIFT 6
723 #define MIPS_PWFIELD_PTI_MASK 0x00000fc0 723 #define MIPS_PWFIELD_PTI_MASK 0x00000fc0
724 #define MIPS_PWFIELD_PTEI_SHIFT 0 724 #define MIPS_PWFIELD_PTEI_SHIFT 0
725 #define MIPS_PWFIELD_PTEI_MASK 0x0000003f 725 #define MIPS_PWFIELD_PTEI_MASK 0x0000003f
726 726
727 #define MIPS_PWSIZE_GDW_SHIFT 24 727 #define MIPS_PWSIZE_GDW_SHIFT 24
728 #define MIPS_PWSIZE_GDW_MASK 0x3f000000 728 #define MIPS_PWSIZE_GDW_MASK 0x3f000000
729 #define MIPS_PWSIZE_UDW_SHIFT 18 729 #define MIPS_PWSIZE_UDW_SHIFT 18
730 #define MIPS_PWSIZE_UDW_MASK 0x00fc0000 730 #define MIPS_PWSIZE_UDW_MASK 0x00fc0000
731 #define MIPS_PWSIZE_MDW_SHIFT 12 731 #define MIPS_PWSIZE_MDW_SHIFT 12
732 #define MIPS_PWSIZE_MDW_MASK 0x0003f000 732 #define MIPS_PWSIZE_MDW_MASK 0x0003f000
733 #define MIPS_PWSIZE_PTW_SHIFT 6 733 #define MIPS_PWSIZE_PTW_SHIFT 6
734 #define MIPS_PWSIZE_PTW_MASK 0x00000fc0 734 #define MIPS_PWSIZE_PTW_MASK 0x00000fc0
735 #define MIPS_PWSIZE_PTEW_SHIFT 0 735 #define MIPS_PWSIZE_PTEW_SHIFT 0
736 #define MIPS_PWSIZE_PTEW_MASK 0x0000003f 736 #define MIPS_PWSIZE_PTEW_MASK 0x0000003f
737 737
738 #define MIPS_PWCTL_PWEN_SHIFT 31 738 #define MIPS_PWCTL_PWEN_SHIFT 31
739 #define MIPS_PWCTL_PWEN_MASK 0x80000000 739 #define MIPS_PWCTL_PWEN_MASK 0x80000000
740 #define MIPS_PWCTL_DPH_SHIFT 7 740 #define MIPS_PWCTL_DPH_SHIFT 7
741 #define MIPS_PWCTL_DPH_MASK 0x00000080 741 #define MIPS_PWCTL_DPH_MASK 0x00000080
742 #define MIPS_PWCTL_HUGEPG_SHIFT 6 742 #define MIPS_PWCTL_HUGEPG_SHIFT 6
743 #define MIPS_PWCTL_HUGEPG_MASK 0x00000060 743 #define MIPS_PWCTL_HUGEPG_MASK 0x00000060
744 #define MIPS_PWCTL_PSN_SHIFT 0 744 #define MIPS_PWCTL_PSN_SHIFT 0
745 #define MIPS_PWCTL_PSN_MASK 0x0000003f 745 #define MIPS_PWCTL_PSN_MASK 0x0000003f
746 746
747 #ifndef __ASSEMBLY__ 747 #ifndef __ASSEMBLY__
748 748
749 /* 749 /*
750 * Macros for handling the ISA mode bit for MIPS16 and microMIPS. 750 * Macros for handling the ISA mode bit for MIPS16 and microMIPS.
751 */ 751 */
752 #if defined(CONFIG_SYS_SUPPORTS_MIPS16) || \ 752 #if defined(CONFIG_SYS_SUPPORTS_MIPS16) || \
753 defined(CONFIG_SYS_SUPPORTS_MICROMIPS) 753 defined(CONFIG_SYS_SUPPORTS_MICROMIPS)
754 #define get_isa16_mode(x) ((x) & 0x1) 754 #define get_isa16_mode(x) ((x) & 0x1)
755 #define msk_isa16_mode(x) ((x) & ~0x1) 755 #define msk_isa16_mode(x) ((x) & ~0x1)
756 #define set_isa16_mode(x) do { (x) |= 0x1; } while(0) 756 #define set_isa16_mode(x) do { (x) |= 0x1; } while(0)
757 #else 757 #else
758 #define get_isa16_mode(x) 0 758 #define get_isa16_mode(x) 0
759 #define msk_isa16_mode(x) (x) 759 #define msk_isa16_mode(x) (x)
760 #define set_isa16_mode(x) do { } while(0) 760 #define set_isa16_mode(x) do { } while(0)
761 #endif 761 #endif
762 762
763 /* 763 /*
764 * microMIPS instructions can be 16-bit or 32-bit in length. This 764 * microMIPS instructions can be 16-bit or 32-bit in length. This
765 * returns a 1 if the instruction is 16-bit and a 0 if 32-bit. 765 * returns a 1 if the instruction is 16-bit and a 0 if 32-bit.
766 */ 766 */
767 static inline int mm_insn_16bit(u16 insn) 767 static inline int mm_insn_16bit(u16 insn)
768 { 768 {
769 u16 opcode = (insn >> 10) & 0x7; 769 u16 opcode = (insn >> 10) & 0x7;
770 770
771 return (opcode >= 1 && opcode <= 3) ? 1 : 0; 771 return (opcode >= 1 && opcode <= 3) ? 1 : 0;
772 } 772 }
773 773
774 /* 774 /*
775 * TLB Invalidate Flush 775 * TLB Invalidate Flush
776 */ 776 */
777 static inline void tlbinvf(void) 777 static inline void tlbinvf(void)
778 { 778 {
779 __asm__ __volatile__( 779 __asm__ __volatile__(
780 ".set push\n\t" 780 ".set push\n\t"
781 ".set noreorder\n\t" 781 ".set noreorder\n\t"
782 ".word 0x42000004\n\t" /* tlbinvf */ 782 ".word 0x42000004\n\t" /* tlbinvf */
783 ".set pop"); 783 ".set pop");
784 } 784 }
785 785
786 786
787 /* 787 /*
788 * Functions to access the R10000 performance counters. These are basically 788 * Functions to access the R10000 performance counters. These are basically
789 * mfc0 and mtc0 instructions from and to coprocessor register with a 5-bit 789 * mfc0 and mtc0 instructions from and to coprocessor register with a 5-bit
790 * performance counter number encoded into bits 1 ... 5 of the instruction. 790 * performance counter number encoded into bits 1 ... 5 of the instruction.
791 * Only performance counters 0 to 1 actually exist, so for a non-R10000 aware 791 * Only performance counters 0 to 1 actually exist, so for a non-R10000 aware
792 * disassembler these will look like an access to sel 0 or 1. 792 * disassembler these will look like an access to sel 0 or 1.
793 */ 793 */
794 #define read_r10k_perf_cntr(counter) \ 794 #define read_r10k_perf_cntr(counter) \
795 ({ \ 795 ({ \
796 unsigned int __res; \ 796 unsigned int __res; \
797 __asm__ __volatile__( \ 797 __asm__ __volatile__( \
798 "mfpc\t%0, %1" \ 798 "mfpc\t%0, %1" \
799 : "=r" (__res) \ 799 : "=r" (__res) \
800 : "i" (counter)); \ 800 : "i" (counter)); \
801 \ 801 \
802 __res; \ 802 __res; \
803 }) 803 })
804 804
805 #define write_r10k_perf_cntr(counter,val) \ 805 #define write_r10k_perf_cntr(counter,val) \
806 do { \ 806 do { \
807 __asm__ __volatile__( \ 807 __asm__ __volatile__( \
808 "mtpc\t%0, %1" \ 808 "mtpc\t%0, %1" \
809 : \ 809 : \
810 : "r" (val), "i" (counter)); \ 810 : "r" (val), "i" (counter)); \
811 } while (0) 811 } while (0)
812 812
813 #define read_r10k_perf_event(counter) \ 813 #define read_r10k_perf_event(counter) \
814 ({ \ 814 ({ \
815 unsigned int __res; \ 815 unsigned int __res; \
816 __asm__ __volatile__( \ 816 __asm__ __volatile__( \
817 "mfps\t%0, %1" \ 817 "mfps\t%0, %1" \
818 : "=r" (__res) \ 818 : "=r" (__res) \
819 : "i" (counter)); \ 819 : "i" (counter)); \
820 \ 820 \
821 __res; \ 821 __res; \
822 }) 822 })
823 823
824 #define write_r10k_perf_cntl(counter,val) \ 824 #define write_r10k_perf_cntl(counter,val) \
825 do { \ 825 do { \
826 __asm__ __volatile__( \ 826 __asm__ __volatile__( \
827 "mtps\t%0, %1" \ 827 "mtps\t%0, %1" \
828 : \ 828 : \
829 : "r" (val), "i" (counter)); \ 829 : "r" (val), "i" (counter)); \
830 } while (0) 830 } while (0)
831 831
832 832
833 /* 833 /*
834 * Macros to access the system control coprocessor 834 * Macros to access the system control coprocessor
835 */ 835 */
836 836
837 #define __read_32bit_c0_register(source, sel) \ 837 #define __read_32bit_c0_register(source, sel) \
838 ({ int __res; \ 838 ({ int __res; \
839 if (sel == 0) \ 839 if (sel == 0) \
840 __asm__ __volatile__( \ 840 __asm__ __volatile__( \
841 "mfc0\t%0, " #source "\n\t" \ 841 "mfc0\t%0, " #source "\n\t" \
842 : "=r" (__res)); \ 842 : "=r" (__res)); \
843 else \ 843 else \
844 __asm__ __volatile__( \ 844 __asm__ __volatile__( \
845 ".set\tmips32\n\t" \ 845 ".set\tmips32\n\t" \
846 "mfc0\t%0, " #source ", " #sel "\n\t" \ 846 "mfc0\t%0, " #source ", " #sel "\n\t" \
847 ".set\tmips0\n\t" \ 847 ".set\tmips0\n\t" \
848 : "=r" (__res)); \ 848 : "=r" (__res)); \
849 __res; \ 849 __res; \
850 }) 850 })
851 851
852 #define __read_64bit_c0_register(source, sel) \ 852 #define __read_64bit_c0_register(source, sel) \
853 ({ unsigned long long __res; \ 853 ({ unsigned long long __res; \
854 if (sizeof(unsigned long) == 4) \ 854 if (sizeof(unsigned long) == 4) \
855 __res = __read_64bit_c0_split(source, sel); \ 855 __res = __read_64bit_c0_split(source, sel); \
856 else if (sel == 0) \ 856 else if (sel == 0) \
857 __asm__ __volatile__( \ 857 __asm__ __volatile__( \
858 ".set\tmips3\n\t" \ 858 ".set\tmips3\n\t" \
859 "dmfc0\t%0, " #source "\n\t" \ 859 "dmfc0\t%0, " #source "\n\t" \
860 ".set\tmips0" \ 860 ".set\tmips0" \
861 : "=r" (__res)); \ 861 : "=r" (__res)); \
862 else \ 862 else \
863 __asm__ __volatile__( \ 863 __asm__ __volatile__( \
864 ".set\tmips64\n\t" \ 864 ".set\tmips64\n\t" \
865 "dmfc0\t%0, " #source ", " #sel "\n\t" \ 865 "dmfc0\t%0, " #source ", " #sel "\n\t" \
866 ".set\tmips0" \ 866 ".set\tmips0" \
867 : "=r" (__res)); \ 867 : "=r" (__res)); \
868 __res; \ 868 __res; \
869 }) 869 })
870 870
871 #define __write_32bit_c0_register(register, sel, value) \ 871 #define __write_32bit_c0_register(register, sel, value) \
872 do { \ 872 do { \
873 if (sel == 0) \ 873 if (sel == 0) \
874 __asm__ __volatile__( \ 874 __asm__ __volatile__( \
875 "mtc0\t%z0, " #register "\n\t" \ 875 "mtc0\t%z0, " #register "\n\t" \
876 : : "Jr" ((unsigned int)(value))); \ 876 : : "Jr" ((unsigned int)(value))); \
877 else \ 877 else \
878 __asm__ __volatile__( \ 878 __asm__ __volatile__( \
879 ".set\tmips32\n\t" \ 879 ".set\tmips32\n\t" \
880 "mtc0\t%z0, " #register ", " #sel "\n\t" \ 880 "mtc0\t%z0, " #register ", " #sel "\n\t" \
881 ".set\tmips0" \ 881 ".set\tmips0" \
882 : : "Jr" ((unsigned int)(value))); \ 882 : : "Jr" ((unsigned int)(value))); \
883 } while (0) 883 } while (0)
884 884
885 #define __write_64bit_c0_register(register, sel, value) \ 885 #define __write_64bit_c0_register(register, sel, value) \
886 do { \ 886 do { \
887 if (sizeof(unsigned long) == 4) \ 887 if (sizeof(unsigned long) == 4) \
888 __write_64bit_c0_split(register, sel, value); \ 888 __write_64bit_c0_split(register, sel, value); \
889 else if (sel == 0) \ 889 else if (sel == 0) \
890 __asm__ __volatile__( \ 890 __asm__ __volatile__( \
891 ".set\tmips3\n\t" \ 891 ".set\tmips3\n\t" \
892 "dmtc0\t%z0, " #register "\n\t" \ 892 "dmtc0\t%z0, " #register "\n\t" \
893 ".set\tmips0" \ 893 ".set\tmips0" \
894 : : "Jr" (value)); \ 894 : : "Jr" (value)); \
895 else \ 895 else \
896 __asm__ __volatile__( \ 896 __asm__ __volatile__( \
897 ".set\tmips64\n\t" \ 897 ".set\tmips64\n\t" \
898 "dmtc0\t%z0, " #register ", " #sel "\n\t" \ 898 "dmtc0\t%z0, " #register ", " #sel "\n\t" \
899 ".set\tmips0" \ 899 ".set\tmips0" \
900 : : "Jr" (value)); \ 900 : : "Jr" (value)); \
901 } while (0) 901 } while (0)
902 902
903 #define __read_ulong_c0_register(reg, sel) \ 903 #define __read_ulong_c0_register(reg, sel) \
904 ((sizeof(unsigned long) == 4) ? \ 904 ((sizeof(unsigned long) == 4) ? \
905 (unsigned long) __read_32bit_c0_register(reg, sel) : \ 905 (unsigned long) __read_32bit_c0_register(reg, sel) : \
906 (unsigned long) __read_64bit_c0_register(reg, sel)) 906 (unsigned long) __read_64bit_c0_register(reg, sel))
907 907
908 #define __write_ulong_c0_register(reg, sel, val) \ 908 #define __write_ulong_c0_register(reg, sel, val) \
909 do { \ 909 do { \
910 if (sizeof(unsigned long) == 4) \ 910 if (sizeof(unsigned long) == 4) \
911 __write_32bit_c0_register(reg, sel, val); \ 911 __write_32bit_c0_register(reg, sel, val); \
912 else \ 912 else \
913 __write_64bit_c0_register(reg, sel, val); \ 913 __write_64bit_c0_register(reg, sel, val); \
914 } while (0) 914 } while (0)
915 915
916 /* 916 /*
917 * On RM7000/RM9000 these are uses to access cop0 set 1 registers 917 * On RM7000/RM9000 these are uses to access cop0 set 1 registers
918 */ 918 */
919 #define __read_32bit_c0_ctrl_register(source) \ 919 #define __read_32bit_c0_ctrl_register(source) \
920 ({ int __res; \ 920 ({ int __res; \
921 __asm__ __volatile__( \ 921 __asm__ __volatile__( \
922 "cfc0\t%0, " #source "\n\t" \ 922 "cfc0\t%0, " #source "\n\t" \
923 : "=r" (__res)); \ 923 : "=r" (__res)); \
924 __res; \ 924 __res; \
925 }) 925 })
926 926
927 #define __write_32bit_c0_ctrl_register(register, value) \ 927 #define __write_32bit_c0_ctrl_register(register, value) \
928 do { \ 928 do { \
929 __asm__ __volatile__( \ 929 __asm__ __volatile__( \
930 "ctc0\t%z0, " #register "\n\t" \ 930 "ctc0\t%z0, " #register "\n\t" \
931 : : "Jr" ((unsigned int)(value))); \ 931 : : "Jr" ((unsigned int)(value))); \
932 } while (0) 932 } while (0)
933 933
934 /* 934 /*
935 * These versions are only needed for systems with more than 38 bits of 935 * These versions are only needed for systems with more than 38 bits of
936 * physical address space running the 32-bit kernel. That's none atm :-) 936 * physical address space running the 32-bit kernel. That's none atm :-)
937 */ 937 */
938 #define __read_64bit_c0_split(source, sel) \ 938 #define __read_64bit_c0_split(source, sel) \
939 ({ \ 939 ({ \
940 unsigned long long __val; \ 940 unsigned long long __val; \
941 unsigned long __flags; \ 941 unsigned long __flags; \
942 \ 942 \
943 local_irq_save(__flags); \ 943 local_irq_save(__flags); \
944 if (sel == 0) \ 944 if (sel == 0) \
945 __asm__ __volatile__( \ 945 __asm__ __volatile__( \
946 ".set\tmips64\n\t" \ 946 ".set\tmips64\n\t" \
947 "dmfc0\t%M0, " #source "\n\t" \ 947 "dmfc0\t%M0, " #source "\n\t" \
948 "dsll\t%L0, %M0, 32\n\t" \ 948 "dsll\t%L0, %M0, 32\n\t" \
949 "dsra\t%M0, %M0, 32\n\t" \ 949 "dsra\t%M0, %M0, 32\n\t" \
950 "dsra\t%L0, %L0, 32\n\t" \ 950 "dsra\t%L0, %L0, 32\n\t" \
951 ".set\tmips0" \ 951 ".set\tmips0" \
952 : "=r" (__val)); \ 952 : "=r" (__val)); \
953 else \ 953 else \
954 __asm__ __volatile__( \ 954 __asm__ __volatile__( \
955 ".set\tmips64\n\t" \ 955 ".set\tmips64\n\t" \
956 "dmfc0\t%M0, " #source ", " #sel "\n\t" \ 956 "dmfc0\t%M0, " #source ", " #sel "\n\t" \
957 "dsll\t%L0, %M0, 32\n\t" \ 957 "dsll\t%L0, %M0, 32\n\t" \
958 "dsra\t%M0, %M0, 32\n\t" \ 958 "dsra\t%M0, %M0, 32\n\t" \
959 "dsra\t%L0, %L0, 32\n\t" \ 959 "dsra\t%L0, %L0, 32\n\t" \
960 ".set\tmips0" \ 960 ".set\tmips0" \
961 : "=r" (__val)); \ 961 : "=r" (__val)); \
962 local_irq_restore(__flags); \ 962 local_irq_restore(__flags); \
963 \ 963 \
964 __val; \ 964 __val; \
965 }) 965 })
966 966
967 #define __write_64bit_c0_split(source, sel, val) \ 967 #define __write_64bit_c0_split(source, sel, val) \
968 do { \ 968 do { \
969 unsigned long __flags; \ 969 unsigned long __flags; \
970 \ 970 \
971 local_irq_save(__flags); \ 971 local_irq_save(__flags); \
972 if (sel == 0) \ 972 if (sel == 0) \
973 __asm__ __volatile__( \ 973 __asm__ __volatile__( \
974 ".set\tmips64\n\t" \ 974 ".set\tmips64\n\t" \
975 "dsll\t%L0, %L0, 32\n\t" \ 975 "dsll\t%L0, %L0, 32\n\t" \
976 "dsrl\t%L0, %L0, 32\n\t" \ 976 "dsrl\t%L0, %L0, 32\n\t" \
977 "dsll\t%M0, %M0, 32\n\t" \ 977 "dsll\t%M0, %M0, 32\n\t" \
978 "or\t%L0, %L0, %M0\n\t" \ 978 "or\t%L0, %L0, %M0\n\t" \
979 "dmtc0\t%L0, " #source "\n\t" \ 979 "dmtc0\t%L0, " #source "\n\t" \
980 ".set\tmips0" \ 980 ".set\tmips0" \
981 : : "r" (val)); \ 981 : : "r" (val)); \
982 else \ 982 else \
983 __asm__ __volatile__( \ 983 __asm__ __volatile__( \
984 ".set\tmips64\n\t" \ 984 ".set\tmips64\n\t" \
985 "dsll\t%L0, %L0, 32\n\t" \ 985 "dsll\t%L0, %L0, 32\n\t" \
986 "dsrl\t%L0, %L0, 32\n\t" \ 986 "dsrl\t%L0, %L0, 32\n\t" \
987 "dsll\t%M0, %M0, 32\n\t" \ 987 "dsll\t%M0, %M0, 32\n\t" \
988 "or\t%L0, %L0, %M0\n\t" \ 988 "or\t%L0, %L0, %M0\n\t" \
989 "dmtc0\t%L0, " #source ", " #sel "\n\t" \ 989 "dmtc0\t%L0, " #source ", " #sel "\n\t" \
990 ".set\tmips0" \ 990 ".set\tmips0" \
991 : : "r" (val)); \ 991 : : "r" (val)); \
992 local_irq_restore(__flags); \ 992 local_irq_restore(__flags); \
993 } while (0) 993 } while (0)
994 994
995 #define read_c0_index() __read_32bit_c0_register($0, 0) 995 #define read_c0_index() __read_32bit_c0_register($0, 0)
996 #define write_c0_index(val) __write_32bit_c0_register($0, 0, val) 996 #define write_c0_index(val) __write_32bit_c0_register($0, 0, val)
997 997
998 #define read_c0_random() __read_32bit_c0_register($1, 0) 998 #define read_c0_random() __read_32bit_c0_register($1, 0)
999 #define write_c0_random(val) __write_32bit_c0_register($1, 0, val) 999 #define write_c0_random(val) __write_32bit_c0_register($1, 0, val)
1000 1000
1001 #define read_c0_entrylo0() __read_ulong_c0_register($2, 0) 1001 #define read_c0_entrylo0() __read_ulong_c0_register($2, 0)
1002 #define write_c0_entrylo0(val) __write_ulong_c0_register($2, 0, val) 1002 #define write_c0_entrylo0(val) __write_ulong_c0_register($2, 0, val)
1003 1003
1004 #define read_c0_entrylo1() __read_ulong_c0_register($3, 0) 1004 #define read_c0_entrylo1() __read_ulong_c0_register($3, 0)
1005 #define write_c0_entrylo1(val) __write_ulong_c0_register($3, 0, val) 1005 #define write_c0_entrylo1(val) __write_ulong_c0_register($3, 0, val)
1006 1006
1007 #define read_c0_conf() __read_32bit_c0_register($3, 0) 1007 #define read_c0_conf() __read_32bit_c0_register($3, 0)
1008 #define write_c0_conf(val) __write_32bit_c0_register($3, 0, val) 1008 #define write_c0_conf(val) __write_32bit_c0_register($3, 0, val)
1009 1009
1010 #define read_c0_context() __read_ulong_c0_register($4, 0) 1010 #define read_c0_context() __read_ulong_c0_register($4, 0)
1011 #define write_c0_context(val) __write_ulong_c0_register($4, 0, val) 1011 #define write_c0_context(val) __write_ulong_c0_register($4, 0, val)
1012 1012
1013 #define read_c0_userlocal() __read_ulong_c0_register($4, 2) 1013 #define read_c0_userlocal() __read_ulong_c0_register($4, 2)
1014 #define write_c0_userlocal(val) __write_ulong_c0_register($4, 2, val) 1014 #define write_c0_userlocal(val) __write_ulong_c0_register($4, 2, val)
1015 1015
1016 #define read_c0_pagemask() __read_32bit_c0_register($5, 0) 1016 #define read_c0_pagemask() __read_32bit_c0_register($5, 0)
1017 #define write_c0_pagemask(val) __write_32bit_c0_register($5, 0, val) 1017 #define write_c0_pagemask(val) __write_32bit_c0_register($5, 0, val)
1018 1018
1019 #define read_c0_pagegrain() __read_32bit_c0_register($5, 1) 1019 #define read_c0_pagegrain() __read_32bit_c0_register($5, 1)
1020 #define write_c0_pagegrain(val) __write_32bit_c0_register($5, 1, val) 1020 #define write_c0_pagegrain(val) __write_32bit_c0_register($5, 1, val)
1021 1021
1022 #define read_c0_wired() __read_32bit_c0_register($6, 0) 1022 #define read_c0_wired() __read_32bit_c0_register($6, 0)
1023 #define write_c0_wired(val) __write_32bit_c0_register($6, 0, val) 1023 #define write_c0_wired(val) __write_32bit_c0_register($6, 0, val)
1024 1024
1025 #define read_c0_info() __read_32bit_c0_register($7, 0) 1025 #define read_c0_info() __read_32bit_c0_register($7, 0)
1026 1026
1027 #define read_c0_cache() __read_32bit_c0_register($7, 0) /* TX39xx */ 1027 #define read_c0_cache() __read_32bit_c0_register($7, 0) /* TX39xx */
1028 #define write_c0_cache(val) __write_32bit_c0_register($7, 0, val) 1028 #define write_c0_cache(val) __write_32bit_c0_register($7, 0, val)
1029 1029
1030 #define read_c0_badvaddr() __read_ulong_c0_register($8, 0) 1030 #define read_c0_badvaddr() __read_ulong_c0_register($8, 0)
1031 #define write_c0_badvaddr(val) __write_ulong_c0_register($8, 0, val) 1031 #define write_c0_badvaddr(val) __write_ulong_c0_register($8, 0, val)
1032 1032
1033 #define read_c0_count() __read_32bit_c0_register($9, 0) 1033 #define read_c0_count() __read_32bit_c0_register($9, 0)
1034 #define write_c0_count(val) __write_32bit_c0_register($9, 0, val) 1034 #define write_c0_count(val) __write_32bit_c0_register($9, 0, val)
1035 1035
1036 #define read_c0_count2() __read_32bit_c0_register($9, 6) /* pnx8550 */ 1036 #define read_c0_count2() __read_32bit_c0_register($9, 6) /* pnx8550 */
1037 #define write_c0_count2(val) __write_32bit_c0_register($9, 6, val) 1037 #define write_c0_count2(val) __write_32bit_c0_register($9, 6, val)
1038 1038
1039 #define read_c0_count3() __read_32bit_c0_register($9, 7) /* pnx8550 */ 1039 #define read_c0_count3() __read_32bit_c0_register($9, 7) /* pnx8550 */
1040 #define write_c0_count3(val) __write_32bit_c0_register($9, 7, val) 1040 #define write_c0_count3(val) __write_32bit_c0_register($9, 7, val)
1041 1041
1042 #define read_c0_entryhi() __read_ulong_c0_register($10, 0) 1042 #define read_c0_entryhi() __read_ulong_c0_register($10, 0)
1043 #define write_c0_entryhi(val) __write_ulong_c0_register($10, 0, val) 1043 #define write_c0_entryhi(val) __write_ulong_c0_register($10, 0, val)
1044 1044
1045 #define read_c0_compare() __read_32bit_c0_register($11, 0) 1045 #define read_c0_compare() __read_32bit_c0_register($11, 0)
1046 #define write_c0_compare(val) __write_32bit_c0_register($11, 0, val) 1046 #define write_c0_compare(val) __write_32bit_c0_register($11, 0, val)
1047 1047
1048 #define read_c0_compare2() __read_32bit_c0_register($11, 6) /* pnx8550 */ 1048 #define read_c0_compare2() __read_32bit_c0_register($11, 6) /* pnx8550 */
1049 #define write_c0_compare2(val) __write_32bit_c0_register($11, 6, val) 1049 #define write_c0_compare2(val) __write_32bit_c0_register($11, 6, val)
1050 1050
1051 #define read_c0_compare3() __read_32bit_c0_register($11, 7) /* pnx8550 */ 1051 #define read_c0_compare3() __read_32bit_c0_register($11, 7) /* pnx8550 */
1052 #define write_c0_compare3(val) __write_32bit_c0_register($11, 7, val) 1052 #define write_c0_compare3(val) __write_32bit_c0_register($11, 7, val)
1053 1053
1054 #define read_c0_status() __read_32bit_c0_register($12, 0) 1054 #define read_c0_status() __read_32bit_c0_register($12, 0)
1055 1055
1056 #define write_c0_status(val) __write_32bit_c0_register($12, 0, val) 1056 #define write_c0_status(val) __write_32bit_c0_register($12, 0, val)
1057 1057
1058 #define read_c0_cause() __read_32bit_c0_register($13, 0) 1058 #define read_c0_cause() __read_32bit_c0_register($13, 0)
1059 #define write_c0_cause(val) __write_32bit_c0_register($13, 0, val) 1059 #define write_c0_cause(val) __write_32bit_c0_register($13, 0, val)
1060 1060
1061 #define read_c0_epc() __read_ulong_c0_register($14, 0) 1061 #define read_c0_epc() __read_ulong_c0_register($14, 0)
1062 #define write_c0_epc(val) __write_ulong_c0_register($14, 0, val) 1062 #define write_c0_epc(val) __write_ulong_c0_register($14, 0, val)
1063 1063
1064 #define read_c0_prid() __read_32bit_c0_register($15, 0) 1064 #define read_c0_prid() __read_32bit_c0_register($15, 0)
1065 1065
1066 #define read_c0_cmgcrbase() __read_ulong_c0_register($15, 3) 1066 #define read_c0_cmgcrbase() __read_ulong_c0_register($15, 3)
1067 1067
1068 #define read_c0_config() __read_32bit_c0_register($16, 0) 1068 #define read_c0_config() __read_32bit_c0_register($16, 0)
1069 #define read_c0_config1() __read_32bit_c0_register($16, 1) 1069 #define read_c0_config1() __read_32bit_c0_register($16, 1)
1070 #define read_c0_config2() __read_32bit_c0_register($16, 2) 1070 #define read_c0_config2() __read_32bit_c0_register($16, 2)
1071 #define read_c0_config3() __read_32bit_c0_register($16, 3) 1071 #define read_c0_config3() __read_32bit_c0_register($16, 3)
1072 #define read_c0_config4() __read_32bit_c0_register($16, 4) 1072 #define read_c0_config4() __read_32bit_c0_register($16, 4)
1073 #define read_c0_config5() __read_32bit_c0_register($16, 5) 1073 #define read_c0_config5() __read_32bit_c0_register($16, 5)
1074 #define read_c0_config6() __read_32bit_c0_register($16, 6) 1074 #define read_c0_config6() __read_32bit_c0_register($16, 6)
1075 #define read_c0_config7() __read_32bit_c0_register($16, 7) 1075 #define read_c0_config7() __read_32bit_c0_register($16, 7)
1076 #define write_c0_config(val) __write_32bit_c0_register($16, 0, val) 1076 #define write_c0_config(val) __write_32bit_c0_register($16, 0, val)
1077 #define write_c0_config1(val) __write_32bit_c0_register($16, 1, val) 1077 #define write_c0_config1(val) __write_32bit_c0_register($16, 1, val)
1078 #define write_c0_config2(val) __write_32bit_c0_register($16, 2, val) 1078 #define write_c0_config2(val) __write_32bit_c0_register($16, 2, val)
1079 #define write_c0_config3(val) __write_32bit_c0_register($16, 3, val) 1079 #define write_c0_config3(val) __write_32bit_c0_register($16, 3, val)
1080 #define write_c0_config4(val) __write_32bit_c0_register($16, 4, val) 1080 #define write_c0_config4(val) __write_32bit_c0_register($16, 4, val)
1081 #define write_c0_config5(val) __write_32bit_c0_register($16, 5, val) 1081 #define write_c0_config5(val) __write_32bit_c0_register($16, 5, val)
1082 #define write_c0_config6(val) __write_32bit_c0_register($16, 6, val) 1082 #define write_c0_config6(val) __write_32bit_c0_register($16, 6, val)
1083 #define write_c0_config7(val) __write_32bit_c0_register($16, 7, val) 1083 #define write_c0_config7(val) __write_32bit_c0_register($16, 7, val)
1084 1084
1085 #define read_c0_maar() __read_ulong_c0_register($17, 1) 1085 #define read_c0_maar() __read_ulong_c0_register($17, 1)
1086 #define write_c0_maar(val) __write_ulong_c0_register($17, 1, val) 1086 #define write_c0_maar(val) __write_ulong_c0_register($17, 1, val)
1087 #define read_c0_maari() __read_32bit_c0_register($17, 2) 1087 #define read_c0_maari() __read_32bit_c0_register($17, 2)
1088 #define write_c0_maari(val) __write_32bit_c0_register($17, 2, val) 1088 #define write_c0_maari(val) __write_32bit_c0_register($17, 2, val)
1089 1089
1090 /* 1090 /*
1091 * The WatchLo register. There may be up to 8 of them. 1091 * The WatchLo register. There may be up to 8 of them.
1092 */ 1092 */
1093 #define read_c0_watchlo0() __read_ulong_c0_register($18, 0) 1093 #define read_c0_watchlo0() __read_ulong_c0_register($18, 0)
1094 #define read_c0_watchlo1() __read_ulong_c0_register($18, 1) 1094 #define read_c0_watchlo1() __read_ulong_c0_register($18, 1)
1095 #define read_c0_watchlo2() __read_ulong_c0_register($18, 2) 1095 #define read_c0_watchlo2() __read_ulong_c0_register($18, 2)
1096 #define read_c0_watchlo3() __read_ulong_c0_register($18, 3) 1096 #define read_c0_watchlo3() __read_ulong_c0_register($18, 3)
1097 #define read_c0_watchlo4() __read_ulong_c0_register($18, 4) 1097 #define read_c0_watchlo4() __read_ulong_c0_register($18, 4)
1098 #define read_c0_watchlo5() __read_ulong_c0_register($18, 5) 1098 #define read_c0_watchlo5() __read_ulong_c0_register($18, 5)
1099 #define read_c0_watchlo6() __read_ulong_c0_register($18, 6) 1099 #define read_c0_watchlo6() __read_ulong_c0_register($18, 6)
1100 #define read_c0_watchlo7() __read_ulong_c0_register($18, 7) 1100 #define read_c0_watchlo7() __read_ulong_c0_register($18, 7)
1101 #define write_c0_watchlo0(val) __write_ulong_c0_register($18, 0, val) 1101 #define write_c0_watchlo0(val) __write_ulong_c0_register($18, 0, val)
1102 #define write_c0_watchlo1(val) __write_ulong_c0_register($18, 1, val) 1102 #define write_c0_watchlo1(val) __write_ulong_c0_register($18, 1, val)
1103 #define write_c0_watchlo2(val) __write_ulong_c0_register($18, 2, val) 1103 #define write_c0_watchlo2(val) __write_ulong_c0_register($18, 2, val)
1104 #define write_c0_watchlo3(val) __write_ulong_c0_register($18, 3, val) 1104 #define write_c0_watchlo3(val) __write_ulong_c0_register($18, 3, val)
1105 #define write_c0_watchlo4(val) __write_ulong_c0_register($18, 4, val) 1105 #define write_c0_watchlo4(val) __write_ulong_c0_register($18, 4, val)
1106 #define write_c0_watchlo5(val) __write_ulong_c0_register($18, 5, val) 1106 #define write_c0_watchlo5(val) __write_ulong_c0_register($18, 5, val)
1107 #define write_c0_watchlo6(val) __write_ulong_c0_register($18, 6, val) 1107 #define write_c0_watchlo6(val) __write_ulong_c0_register($18, 6, val)
1108 #define write_c0_watchlo7(val) __write_ulong_c0_register($18, 7, val) 1108 #define write_c0_watchlo7(val) __write_ulong_c0_register($18, 7, val)
1109 1109
1110 /* 1110 /*
1111 * The WatchHi register. There may be up to 8 of them. 1111 * The WatchHi register. There may be up to 8 of them.
1112 */ 1112 */
1113 #define read_c0_watchhi0() __read_32bit_c0_register($19, 0) 1113 #define read_c0_watchhi0() __read_32bit_c0_register($19, 0)
1114 #define read_c0_watchhi1() __read_32bit_c0_register($19, 1) 1114 #define read_c0_watchhi1() __read_32bit_c0_register($19, 1)
1115 #define read_c0_watchhi2() __read_32bit_c0_register($19, 2) 1115 #define read_c0_watchhi2() __read_32bit_c0_register($19, 2)
1116 #define read_c0_watchhi3() __read_32bit_c0_register($19, 3) 1116 #define read_c0_watchhi3() __read_32bit_c0_register($19, 3)
1117 #define read_c0_watchhi4() __read_32bit_c0_register($19, 4) 1117 #define read_c0_watchhi4() __read_32bit_c0_register($19, 4)
1118 #define read_c0_watchhi5() __read_32bit_c0_register($19, 5) 1118 #define read_c0_watchhi5() __read_32bit_c0_register($19, 5)
1119 #define read_c0_watchhi6() __read_32bit_c0_register($19, 6) 1119 #define read_c0_watchhi6() __read_32bit_c0_register($19, 6)
1120 #define read_c0_watchhi7() __read_32bit_c0_register($19, 7) 1120 #define read_c0_watchhi7() __read_32bit_c0_register($19, 7)
1121 1121
1122 #define write_c0_watchhi0(val) __write_32bit_c0_register($19, 0, val) 1122 #define write_c0_watchhi0(val) __write_32bit_c0_register($19, 0, val)
1123 #define write_c0_watchhi1(val) __write_32bit_c0_register($19, 1, val) 1123 #define write_c0_watchhi1(val) __write_32bit_c0_register($19, 1, val)
1124 #define write_c0_watchhi2(val) __write_32bit_c0_register($19, 2, val) 1124 #define write_c0_watchhi2(val) __write_32bit_c0_register($19, 2, val)
1125 #define write_c0_watchhi3(val) __write_32bit_c0_register($19, 3, val) 1125 #define write_c0_watchhi3(val) __write_32bit_c0_register($19, 3, val)
1126 #define write_c0_watchhi4(val) __write_32bit_c0_register($19, 4, val) 1126 #define write_c0_watchhi4(val) __write_32bit_c0_register($19, 4, val)
1127 #define write_c0_watchhi5(val) __write_32bit_c0_register($19, 5, val) 1127 #define write_c0_watchhi5(val) __write_32bit_c0_register($19, 5, val)
1128 #define write_c0_watchhi6(val) __write_32bit_c0_register($19, 6, val) 1128 #define write_c0_watchhi6(val) __write_32bit_c0_register($19, 6, val)
1129 #define write_c0_watchhi7(val) __write_32bit_c0_register($19, 7, val) 1129 #define write_c0_watchhi7(val) __write_32bit_c0_register($19, 7, val)
1130 1130
1131 #define read_c0_xcontext() __read_ulong_c0_register($20, 0) 1131 #define read_c0_xcontext() __read_ulong_c0_register($20, 0)
1132 #define write_c0_xcontext(val) __write_ulong_c0_register($20, 0, val) 1132 #define write_c0_xcontext(val) __write_ulong_c0_register($20, 0, val)
1133 1133
1134 #define read_c0_intcontrol() __read_32bit_c0_ctrl_register($20) 1134 #define read_c0_intcontrol() __read_32bit_c0_ctrl_register($20)
1135 #define write_c0_intcontrol(val) __write_32bit_c0_ctrl_register($20, val) 1135 #define write_c0_intcontrol(val) __write_32bit_c0_ctrl_register($20, val)
1136 1136
1137 #define read_c0_framemask() __read_32bit_c0_register($21, 0) 1137 #define read_c0_framemask() __read_32bit_c0_register($21, 0)
1138 #define write_c0_framemask(val) __write_32bit_c0_register($21, 0, val) 1138 #define write_c0_framemask(val) __write_32bit_c0_register($21, 0, val)
1139 1139
1140 #define read_c0_diag() __read_32bit_c0_register($22, 0) 1140 #define read_c0_diag() __read_32bit_c0_register($22, 0)
1141 #define write_c0_diag(val) __write_32bit_c0_register($22, 0, val) 1141 #define write_c0_diag(val) __write_32bit_c0_register($22, 0, val)
1142 1142
1143 #define read_c0_diag1() __read_32bit_c0_register($22, 1) 1143 #define read_c0_diag1() __read_32bit_c0_register($22, 1)
1144 #define write_c0_diag1(val) __write_32bit_c0_register($22, 1, val) 1144 #define write_c0_diag1(val) __write_32bit_c0_register($22, 1, val)
1145 1145
1146 #define read_c0_diag2() __read_32bit_c0_register($22, 2) 1146 #define read_c0_diag2() __read_32bit_c0_register($22, 2)
1147 #define write_c0_diag2(val) __write_32bit_c0_register($22, 2, val) 1147 #define write_c0_diag2(val) __write_32bit_c0_register($22, 2, val)
1148 1148
1149 #define read_c0_diag3() __read_32bit_c0_register($22, 3) 1149 #define read_c0_diag3() __read_32bit_c0_register($22, 3)
1150 #define write_c0_diag3(val) __write_32bit_c0_register($22, 3, val) 1150 #define write_c0_diag3(val) __write_32bit_c0_register($22, 3, val)
1151 1151
1152 #define read_c0_diag4() __read_32bit_c0_register($22, 4) 1152 #define read_c0_diag4() __read_32bit_c0_register($22, 4)
1153 #define write_c0_diag4(val) __write_32bit_c0_register($22, 4, val) 1153 #define write_c0_diag4(val) __write_32bit_c0_register($22, 4, val)
1154 1154
1155 #define read_c0_diag5() __read_32bit_c0_register($22, 5) 1155 #define read_c0_diag5() __read_32bit_c0_register($22, 5)
1156 #define write_c0_diag5(val) __write_32bit_c0_register($22, 5, val) 1156 #define write_c0_diag5(val) __write_32bit_c0_register($22, 5, val)
1157 1157
1158 #define read_c0_debug() __read_32bit_c0_register($23, 0) 1158 #define read_c0_debug() __read_32bit_c0_register($23, 0)
1159 #define write_c0_debug(val) __write_32bit_c0_register($23, 0, val) 1159 #define write_c0_debug(val) __write_32bit_c0_register($23, 0, val)
1160 1160
1161 #define read_c0_depc() __read_ulong_c0_register($24, 0) 1161 #define read_c0_depc() __read_ulong_c0_register($24, 0)
1162 #define write_c0_depc(val) __write_ulong_c0_register($24, 0, val) 1162 #define write_c0_depc(val) __write_ulong_c0_register($24, 0, val)
1163 1163
1164 /* 1164 /*
1165 * MIPS32 / MIPS64 performance counters 1165 * MIPS32 / MIPS64 performance counters
1166 */ 1166 */
1167 #define read_c0_perfctrl0() __read_32bit_c0_register($25, 0) 1167 #define read_c0_perfctrl0() __read_32bit_c0_register($25, 0)
1168 #define write_c0_perfctrl0(val) __write_32bit_c0_register($25, 0, val) 1168 #define write_c0_perfctrl0(val) __write_32bit_c0_register($25, 0, val)
1169 #define read_c0_perfcntr0() __read_32bit_c0_register($25, 1) 1169 #define read_c0_perfcntr0() __read_32bit_c0_register($25, 1)
1170 #define write_c0_perfcntr0(val) __write_32bit_c0_register($25, 1, val) 1170 #define write_c0_perfcntr0(val) __write_32bit_c0_register($25, 1, val)
1171 #define read_c0_perfcntr0_64() __read_64bit_c0_register($25, 1) 1171 #define read_c0_perfcntr0_64() __read_64bit_c0_register($25, 1)
1172 #define write_c0_perfcntr0_64(val) __write_64bit_c0_register($25, 1, val) 1172 #define write_c0_perfcntr0_64(val) __write_64bit_c0_register($25, 1, val)
1173 #define read_c0_perfctrl1() __read_32bit_c0_register($25, 2) 1173 #define read_c0_perfctrl1() __read_32bit_c0_register($25, 2)
1174 #define write_c0_perfctrl1(val) __write_32bit_c0_register($25, 2, val) 1174 #define write_c0_perfctrl1(val) __write_32bit_c0_register($25, 2, val)
1175 #define read_c0_perfcntr1() __read_32bit_c0_register($25, 3) 1175 #define read_c0_perfcntr1() __read_32bit_c0_register($25, 3)
1176 #define write_c0_perfcntr1(val) __write_32bit_c0_register($25, 3, val) 1176 #define write_c0_perfcntr1(val) __write_32bit_c0_register($25, 3, val)
1177 #define read_c0_perfcntr1_64() __read_64bit_c0_register($25, 3) 1177 #define read_c0_perfcntr1_64() __read_64bit_c0_register($25, 3)
1178 #define write_c0_perfcntr1_64(val) __write_64bit_c0_register($25, 3, val) 1178 #define write_c0_perfcntr1_64(val) __write_64bit_c0_register($25, 3, val)
1179 #define read_c0_perfctrl2() __read_32bit_c0_register($25, 4) 1179 #define read_c0_perfctrl2() __read_32bit_c0_register($25, 4)
1180 #define write_c0_perfctrl2(val) __write_32bit_c0_register($25, 4, val) 1180 #define write_c0_perfctrl2(val) __write_32bit_c0_register($25, 4, val)
1181 #define read_c0_perfcntr2() __read_32bit_c0_register($25, 5) 1181 #define read_c0_perfcntr2() __read_32bit_c0_register($25, 5)
1182 #define write_c0_perfcntr2(val) __write_32bit_c0_register($25, 5, val) 1182 #define write_c0_perfcntr2(val) __write_32bit_c0_register($25, 5, val)
1183 #define read_c0_perfcntr2_64() __read_64bit_c0_register($25, 5) 1183 #define read_c0_perfcntr2_64() __read_64bit_c0_register($25, 5)
1184 #define write_c0_perfcntr2_64(val) __write_64bit_c0_register($25, 5, val) 1184 #define write_c0_perfcntr2_64(val) __write_64bit_c0_register($25, 5, val)
1185 #define read_c0_perfctrl3() __read_32bit_c0_register($25, 6) 1185 #define read_c0_perfctrl3() __read_32bit_c0_register($25, 6)
1186 #define write_c0_perfctrl3(val) __write_32bit_c0_register($25, 6, val) 1186 #define write_c0_perfctrl3(val) __write_32bit_c0_register($25, 6, val)
1187 #define read_c0_perfcntr3() __read_32bit_c0_register($25, 7) 1187 #define read_c0_perfcntr3() __read_32bit_c0_register($25, 7)
1188 #define write_c0_perfcntr3(val) __write_32bit_c0_register($25, 7, val) 1188 #define write_c0_perfcntr3(val) __write_32bit_c0_register($25, 7, val)
1189 #define read_c0_perfcntr3_64() __read_64bit_c0_register($25, 7) 1189 #define read_c0_perfcntr3_64() __read_64bit_c0_register($25, 7)
1190 #define write_c0_perfcntr3_64(val) __write_64bit_c0_register($25, 7, val) 1190 #define write_c0_perfcntr3_64(val) __write_64bit_c0_register($25, 7, val)
1191 1191
1192 #define read_c0_ecc() __read_32bit_c0_register($26, 0) 1192 #define read_c0_ecc() __read_32bit_c0_register($26, 0)
1193 #define write_c0_ecc(val) __write_32bit_c0_register($26, 0, val) 1193 #define write_c0_ecc(val) __write_32bit_c0_register($26, 0, val)
1194 1194
1195 #define read_c0_derraddr0() __read_ulong_c0_register($26, 1) 1195 #define read_c0_derraddr0() __read_ulong_c0_register($26, 1)
1196 #define write_c0_derraddr0(val) __write_ulong_c0_register($26, 1, val) 1196 #define write_c0_derraddr0(val) __write_ulong_c0_register($26, 1, val)
1197 1197
1198 #define read_c0_cacheerr() __read_32bit_c0_register($27, 0) 1198 #define read_c0_cacheerr() __read_32bit_c0_register($27, 0)
1199 1199
1200 #define read_c0_derraddr1() __read_ulong_c0_register($27, 1) 1200 #define read_c0_derraddr1() __read_ulong_c0_register($27, 1)
1201 #define write_c0_derraddr1(val) __write_ulong_c0_register($27, 1, val) 1201 #define write_c0_derraddr1(val) __write_ulong_c0_register($27, 1, val)
1202 1202
1203 #define read_c0_taglo() __read_32bit_c0_register($28, 0) 1203 #define read_c0_taglo() __read_32bit_c0_register($28, 0)
1204 #define write_c0_taglo(val) __write_32bit_c0_register($28, 0, val) 1204 #define write_c0_taglo(val) __write_32bit_c0_register($28, 0, val)
1205 1205
1206 #define read_c0_dtaglo() __read_32bit_c0_register($28, 2) 1206 #define read_c0_dtaglo() __read_32bit_c0_register($28, 2)
1207 #define write_c0_dtaglo(val) __write_32bit_c0_register($28, 2, val) 1207 #define write_c0_dtaglo(val) __write_32bit_c0_register($28, 2, val)
1208 1208
1209 #define read_c0_ddatalo() __read_32bit_c0_register($28, 3) 1209 #define read_c0_ddatalo() __read_32bit_c0_register($28, 3)
1210 #define write_c0_ddatalo(val) __write_32bit_c0_register($28, 3, val) 1210 #define write_c0_ddatalo(val) __write_32bit_c0_register($28, 3, val)
1211 1211
1212 #define read_c0_staglo() __read_32bit_c0_register($28, 4) 1212 #define read_c0_staglo() __read_32bit_c0_register($28, 4)
1213 #define write_c0_staglo(val) __write_32bit_c0_register($28, 4, val) 1213 #define write_c0_staglo(val) __write_32bit_c0_register($28, 4, val)
1214 1214
1215 #define read_c0_taghi() __read_32bit_c0_register($29, 0) 1215 #define read_c0_taghi() __read_32bit_c0_register($29, 0)
1216 #define write_c0_taghi(val) __write_32bit_c0_register($29, 0, val) 1216 #define write_c0_taghi(val) __write_32bit_c0_register($29, 0, val)
1217 1217
1218 #define read_c0_errorepc() __read_ulong_c0_register($30, 0) 1218 #define read_c0_errorepc() __read_ulong_c0_register($30, 0)
1219 #define write_c0_errorepc(val) __write_ulong_c0_register($30, 0, val) 1219 #define write_c0_errorepc(val) __write_ulong_c0_register($30, 0, val)
1220 1220
1221 /* MIPSR2 */ 1221 /* MIPSR2 */
1222 #define read_c0_hwrena() __read_32bit_c0_register($7, 0) 1222 #define read_c0_hwrena() __read_32bit_c0_register($7, 0)
1223 #define write_c0_hwrena(val) __write_32bit_c0_register($7, 0, val) 1223 #define write_c0_hwrena(val) __write_32bit_c0_register($7, 0, val)
1224 1224
1225 #define read_c0_intctl() __read_32bit_c0_register($12, 1) 1225 #define read_c0_intctl() __read_32bit_c0_register($12, 1)
1226 #define write_c0_intctl(val) __write_32bit_c0_register($12, 1, val) 1226 #define write_c0_intctl(val) __write_32bit_c0_register($12, 1, val)
1227 1227
1228 #define read_c0_srsctl() __read_32bit_c0_register($12, 2) 1228 #define read_c0_srsctl() __read_32bit_c0_register($12, 2)
1229 #define write_c0_srsctl(val) __write_32bit_c0_register($12, 2, val) 1229 #define write_c0_srsctl(val) __write_32bit_c0_register($12, 2, val)
1230 1230
1231 #define read_c0_srsmap() __read_32bit_c0_register($12, 3) 1231 #define read_c0_srsmap() __read_32bit_c0_register($12, 3)
1232 #define write_c0_srsmap(val) __write_32bit_c0_register($12, 3, val) 1232 #define write_c0_srsmap(val) __write_32bit_c0_register($12, 3, val)
1233 1233
1234 #define read_c0_ebase() __read_32bit_c0_register($15, 1) 1234 #define read_c0_ebase() __read_32bit_c0_register($15, 1)
1235 #define write_c0_ebase(val) __write_32bit_c0_register($15, 1, val) 1235 #define write_c0_ebase(val) __write_32bit_c0_register($15, 1, val)
1236 1236
1237 /* MIPSR3 */ 1237 /* MIPSR3 */
1238 #define read_c0_segctl0() __read_32bit_c0_register($5, 2) 1238 #define read_c0_segctl0() __read_32bit_c0_register($5, 2)
1239 #define write_c0_segctl0(val) __write_32bit_c0_register($5, 2, val) 1239 #define write_c0_segctl0(val) __write_32bit_c0_register($5, 2, val)
1240 1240
1241 #define read_c0_segctl1() __read_32bit_c0_register($5, 3) 1241 #define read_c0_segctl1() __read_32bit_c0_register($5, 3)
1242 #define write_c0_segctl1(val) __write_32bit_c0_register($5, 3, val) 1242 #define write_c0_segctl1(val) __write_32bit_c0_register($5, 3, val)
1243 1243
1244 #define read_c0_segctl2() __read_32bit_c0_register($5, 4) 1244 #define read_c0_segctl2() __read_32bit_c0_register($5, 4)
1245 #define write_c0_segctl2(val) __write_32bit_c0_register($5, 4, val) 1245 #define write_c0_segctl2(val) __write_32bit_c0_register($5, 4, val)
1246 1246
1247 /* Hardware Page Table Walker */ 1247 /* Hardware Page Table Walker */
1248 #define read_c0_pwbase() __read_ulong_c0_register($5, 5) 1248 #define read_c0_pwbase() __read_ulong_c0_register($5, 5)
1249 #define write_c0_pwbase(val) __write_ulong_c0_register($5, 5, val) 1249 #define write_c0_pwbase(val) __write_ulong_c0_register($5, 5, val)
1250 1250
1251 #define read_c0_pwfield() __read_ulong_c0_register($5, 6) 1251 #define read_c0_pwfield() __read_ulong_c0_register($5, 6)
1252 #define write_c0_pwfield(val) __write_ulong_c0_register($5, 6, val) 1252 #define write_c0_pwfield(val) __write_ulong_c0_register($5, 6, val)
1253 1253
1254 #define read_c0_pwsize() __read_ulong_c0_register($5, 7) 1254 #define read_c0_pwsize() __read_ulong_c0_register($5, 7)
1255 #define write_c0_pwsize(val) __write_ulong_c0_register($5, 7, val) 1255 #define write_c0_pwsize(val) __write_ulong_c0_register($5, 7, val)
1256 1256
1257 #define read_c0_pwctl() __read_32bit_c0_register($6, 6) 1257 #define read_c0_pwctl() __read_32bit_c0_register($6, 6)
1258 #define write_c0_pwctl(val) __write_32bit_c0_register($6, 6, val) 1258 #define write_c0_pwctl(val) __write_32bit_c0_register($6, 6, val)
1259 1259
1260 /* Cavium OCTEON (cnMIPS) */ 1260 /* Cavium OCTEON (cnMIPS) */
1261 #define read_c0_cvmcount() __read_ulong_c0_register($9, 6) 1261 #define read_c0_cvmcount() __read_ulong_c0_register($9, 6)
1262 #define write_c0_cvmcount(val) __write_ulong_c0_register($9, 6, val) 1262 #define write_c0_cvmcount(val) __write_ulong_c0_register($9, 6, val)
1263 1263
1264 #define read_c0_cvmctl() __read_64bit_c0_register($9, 7) 1264 #define read_c0_cvmctl() __read_64bit_c0_register($9, 7)
1265 #define write_c0_cvmctl(val) __write_64bit_c0_register($9, 7, val) 1265 #define write_c0_cvmctl(val) __write_64bit_c0_register($9, 7, val)
1266 1266
1267 #define read_c0_cvmmemctl() __read_64bit_c0_register($11, 7) 1267 #define read_c0_cvmmemctl() __read_64bit_c0_register($11, 7)
1268 #define write_c0_cvmmemctl(val) __write_64bit_c0_register($11, 7, val) 1268 #define write_c0_cvmmemctl(val) __write_64bit_c0_register($11, 7, val)
1269 /* 1269 /*
1270 * The cacheerr registers are not standardized. On OCTEON, they are 1270 * The cacheerr registers are not standardized. On OCTEON, they are
1271 * 64 bits wide. 1271 * 64 bits wide.
1272 */ 1272 */
1273 #define read_octeon_c0_icacheerr() __read_64bit_c0_register($27, 0) 1273 #define read_octeon_c0_icacheerr() __read_64bit_c0_register($27, 0)
1274 #define write_octeon_c0_icacheerr(val) __write_64bit_c0_register($27, 0, val) 1274 #define write_octeon_c0_icacheerr(val) __write_64bit_c0_register($27, 0, val)
1275 1275
1276 #define read_octeon_c0_dcacheerr() __read_64bit_c0_register($27, 1) 1276 #define read_octeon_c0_dcacheerr() __read_64bit_c0_register($27, 1)
1277 #define write_octeon_c0_dcacheerr(val) __write_64bit_c0_register($27, 1, val) 1277 #define write_octeon_c0_dcacheerr(val) __write_64bit_c0_register($27, 1, val)
1278 1278
1279 /* BMIPS3300 */ 1279 /* BMIPS3300 */
1280 #define read_c0_brcm_config_0() __read_32bit_c0_register($22, 0) 1280 #define read_c0_brcm_config_0() __read_32bit_c0_register($22, 0)
1281 #define write_c0_brcm_config_0(val) __write_32bit_c0_register($22, 0, val) 1281 #define write_c0_brcm_config_0(val) __write_32bit_c0_register($22, 0, val)
1282 1282
1283 #define read_c0_brcm_bus_pll() __read_32bit_c0_register($22, 4) 1283 #define read_c0_brcm_bus_pll() __read_32bit_c0_register($22, 4)
1284 #define write_c0_brcm_bus_pll(val) __write_32bit_c0_register($22, 4, val) 1284 #define write_c0_brcm_bus_pll(val) __write_32bit_c0_register($22, 4, val)
1285 1285
1286 #define read_c0_brcm_reset() __read_32bit_c0_register($22, 5) 1286 #define read_c0_brcm_reset() __read_32bit_c0_register($22, 5)
1287 #define write_c0_brcm_reset(val) __write_32bit_c0_register($22, 5, val) 1287 #define write_c0_brcm_reset(val) __write_32bit_c0_register($22, 5, val)
1288 1288
1289 /* BMIPS43xx */ 1289 /* BMIPS43xx */
1290 #define read_c0_brcm_cmt_intr() __read_32bit_c0_register($22, 1) 1290 #define read_c0_brcm_cmt_intr() __read_32bit_c0_register($22, 1)
1291 #define write_c0_brcm_cmt_intr(val) __write_32bit_c0_register($22, 1, val) 1291 #define write_c0_brcm_cmt_intr(val) __write_32bit_c0_register($22, 1, val)
1292 1292
1293 #define read_c0_brcm_cmt_ctrl() __read_32bit_c0_register($22, 2) 1293 #define read_c0_brcm_cmt_ctrl() __read_32bit_c0_register($22, 2)
1294 #define write_c0_brcm_cmt_ctrl(val) __write_32bit_c0_register($22, 2, val) 1294 #define write_c0_brcm_cmt_ctrl(val) __write_32bit_c0_register($22, 2, val)
1295 1295
1296 #define read_c0_brcm_cmt_local() __read_32bit_c0_register($22, 3) 1296 #define read_c0_brcm_cmt_local() __read_32bit_c0_register($22, 3)
1297 #define write_c0_brcm_cmt_local(val) __write_32bit_c0_register($22, 3, val) 1297 #define write_c0_brcm_cmt_local(val) __write_32bit_c0_register($22, 3, val)
1298 1298
1299 #define read_c0_brcm_config_1() __read_32bit_c0_register($22, 5) 1299 #define read_c0_brcm_config_1() __read_32bit_c0_register($22, 5)
1300 #define write_c0_brcm_config_1(val) __write_32bit_c0_register($22, 5, val) 1300 #define write_c0_brcm_config_1(val) __write_32bit_c0_register($22, 5, val)
1301 1301
1302 #define read_c0_brcm_cbr() __read_32bit_c0_register($22, 6) 1302 #define read_c0_brcm_cbr() __read_32bit_c0_register($22, 6)
1303 #define write_c0_brcm_cbr(val) __write_32bit_c0_register($22, 6, val) 1303 #define write_c0_brcm_cbr(val) __write_32bit_c0_register($22, 6, val)
1304 1304
1305 /* BMIPS5000 */ 1305 /* BMIPS5000 */
1306 #define read_c0_brcm_config() __read_32bit_c0_register($22, 0) 1306 #define read_c0_brcm_config() __read_32bit_c0_register($22, 0)
1307 #define write_c0_brcm_config(val) __write_32bit_c0_register($22, 0, val) 1307 #define write_c0_brcm_config(val) __write_32bit_c0_register($22, 0, val)
1308 1308
1309 #define read_c0_brcm_mode() __read_32bit_c0_register($22, 1) 1309 #define read_c0_brcm_mode() __read_32bit_c0_register($22, 1)
1310 #define write_c0_brcm_mode(val) __write_32bit_c0_register($22, 1, val) 1310 #define write_c0_brcm_mode(val) __write_32bit_c0_register($22, 1, val)
1311 1311
1312 #define read_c0_brcm_action() __read_32bit_c0_register($22, 2) 1312 #define read_c0_brcm_action() __read_32bit_c0_register($22, 2)
1313 #define write_c0_brcm_action(val) __write_32bit_c0_register($22, 2, val) 1313 #define write_c0_brcm_action(val) __write_32bit_c0_register($22, 2, val)
1314 1314
1315 #define read_c0_brcm_edsp() __read_32bit_c0_register($22, 3) 1315 #define read_c0_brcm_edsp() __read_32bit_c0_register($22, 3)
1316 #define write_c0_brcm_edsp(val) __write_32bit_c0_register($22, 3, val) 1316 #define write_c0_brcm_edsp(val) __write_32bit_c0_register($22, 3, val)
1317 1317
1318 #define read_c0_brcm_bootvec() __read_32bit_c0_register($22, 4) 1318 #define read_c0_brcm_bootvec() __read_32bit_c0_register($22, 4)
1319 #define write_c0_brcm_bootvec(val) __write_32bit_c0_register($22, 4, val) 1319 #define write_c0_brcm_bootvec(val) __write_32bit_c0_register($22, 4, val)
1320 1320
1321 #define read_c0_brcm_sleepcount() __read_32bit_c0_register($22, 7) 1321 #define read_c0_brcm_sleepcount() __read_32bit_c0_register($22, 7)
1322 #define write_c0_brcm_sleepcount(val) __write_32bit_c0_register($22, 7, val) 1322 #define write_c0_brcm_sleepcount(val) __write_32bit_c0_register($22, 7, val)
1323 1323
1324 /* 1324 /*
1325 * Macros to access the floating point coprocessor control registers 1325 * Macros to access the floating point coprocessor control registers
1326 */ 1326 */
1327 #define read_32bit_cp1_register(source) \ 1327 #define _read_32bit_cp1_register(source, gas_hardfloat) \
1328 ({ \ 1328 ({ \
1329 int __res; \ 1329 int __res; \
1330 \ 1330 \
1331 __asm__ __volatile__( \ 1331 __asm__ __volatile__( \
1332 " .set push \n" \ 1332 " .set push \n" \
1333 " .set reorder \n" \ 1333 " .set reorder \n" \
1334 " # gas fails to assemble cfc1 for some archs, \n" \ 1334 " # gas fails to assemble cfc1 for some archs, \n" \
1335 " # like Octeon. \n" \ 1335 " # like Octeon. \n" \
1336 " .set mips1 \n" \ 1336 " .set mips1 \n" \
1337 " "STR(gas_hardfloat)" \n" \
1337 " cfc1 %0,"STR(source)" \n" \ 1338 " cfc1 %0,"STR(source)" \n" \
1338 " .set pop \n" \ 1339 " .set pop \n" \
1339 : "=r" (__res)); \ 1340 : "=r" (__res)); \
1340 __res; \ 1341 __res; \
1341 }) 1342 })
1343
1344 #ifdef GAS_HAS_SET_HARDFLOAT
1345 #define read_32bit_cp1_register(source) \
1346 _read_32bit_cp1_register(source, .set hardfloat)
1347 #else
1348 #define read_32bit_cp1_register(source) \
1349 _read_32bit_cp1_register(source, )
1350 #endif
1342 1351
1343 #ifdef HAVE_AS_DSP 1352 #ifdef HAVE_AS_DSP
1344 #define rddsp(mask) \ 1353 #define rddsp(mask) \
1345 ({ \ 1354 ({ \
1346 unsigned int __dspctl; \ 1355 unsigned int __dspctl; \
1347 \ 1356 \
1348 __asm__ __volatile__( \ 1357 __asm__ __volatile__( \
1349 " .set push \n" \ 1358 " .set push \n" \
1350 " .set dsp \n" \ 1359 " .set dsp \n" \
1351 " rddsp %0, %x1 \n" \ 1360 " rddsp %0, %x1 \n" \
1352 " .set pop \n" \ 1361 " .set pop \n" \
1353 : "=r" (__dspctl) \ 1362 : "=r" (__dspctl) \
1354 : "i" (mask)); \ 1363 : "i" (mask)); \
1355 __dspctl; \ 1364 __dspctl; \
1356 }) 1365 })
1357 1366
1358 #define wrdsp(val, mask) \ 1367 #define wrdsp(val, mask) \
1359 do { \ 1368 do { \
1360 __asm__ __volatile__( \ 1369 __asm__ __volatile__( \
1361 " .set push \n" \ 1370 " .set push \n" \
1362 " .set dsp \n" \ 1371 " .set dsp \n" \
1363 " wrdsp %0, %x1 \n" \ 1372 " wrdsp %0, %x1 \n" \
1364 " .set pop \n" \ 1373 " .set pop \n" \
1365 : \ 1374 : \
1366 : "r" (val), "i" (mask)); \ 1375 : "r" (val), "i" (mask)); \
1367 } while (0) 1376 } while (0)
1368 1377
1369 #define mflo0() \ 1378 #define mflo0() \
1370 ({ \ 1379 ({ \
1371 long mflo0; \ 1380 long mflo0; \
1372 __asm__( \ 1381 __asm__( \
1373 " .set push \n" \ 1382 " .set push \n" \
1374 " .set dsp \n" \ 1383 " .set dsp \n" \
1375 " mflo %0, $ac0 \n" \ 1384 " mflo %0, $ac0 \n" \
1376 " .set pop \n" \ 1385 " .set pop \n" \
1377 : "=r" (mflo0)); \ 1386 : "=r" (mflo0)); \
1378 mflo0; \ 1387 mflo0; \
1379 }) 1388 })
1380 1389
1381 #define mflo1() \ 1390 #define mflo1() \
1382 ({ \ 1391 ({ \
1383 long mflo1; \ 1392 long mflo1; \
1384 __asm__( \ 1393 __asm__( \
1385 " .set push \n" \ 1394 " .set push \n" \
1386 " .set dsp \n" \ 1395 " .set dsp \n" \
1387 " mflo %0, $ac1 \n" \ 1396 " mflo %0, $ac1 \n" \
1388 " .set pop \n" \ 1397 " .set pop \n" \
1389 : "=r" (mflo1)); \ 1398 : "=r" (mflo1)); \
1390 mflo1; \ 1399 mflo1; \
1391 }) 1400 })
1392 1401
1393 #define mflo2() \ 1402 #define mflo2() \
1394 ({ \ 1403 ({ \
1395 long mflo2; \ 1404 long mflo2; \
1396 __asm__( \ 1405 __asm__( \
1397 " .set push \n" \ 1406 " .set push \n" \
1398 " .set dsp \n" \ 1407 " .set dsp \n" \
1399 " mflo %0, $ac2 \n" \ 1408 " mflo %0, $ac2 \n" \
1400 " .set pop \n" \ 1409 " .set pop \n" \
1401 : "=r" (mflo2)); \ 1410 : "=r" (mflo2)); \
1402 mflo2; \ 1411 mflo2; \
1403 }) 1412 })
1404 1413
1405 #define mflo3() \ 1414 #define mflo3() \
1406 ({ \ 1415 ({ \
1407 long mflo3; \ 1416 long mflo3; \
1408 __asm__( \ 1417 __asm__( \
1409 " .set push \n" \ 1418 " .set push \n" \
1410 " .set dsp \n" \ 1419 " .set dsp \n" \
1411 " mflo %0, $ac3 \n" \ 1420 " mflo %0, $ac3 \n" \
1412 " .set pop \n" \ 1421 " .set pop \n" \
1413 : "=r" (mflo3)); \ 1422 : "=r" (mflo3)); \
1414 mflo3; \ 1423 mflo3; \
1415 }) 1424 })
1416 1425
1417 #define mfhi0() \ 1426 #define mfhi0() \
1418 ({ \ 1427 ({ \
1419 long mfhi0; \ 1428 long mfhi0; \
1420 __asm__( \ 1429 __asm__( \
1421 " .set push \n" \ 1430 " .set push \n" \
1422 " .set dsp \n" \ 1431 " .set dsp \n" \
1423 " mfhi %0, $ac0 \n" \ 1432 " mfhi %0, $ac0 \n" \
1424 " .set pop \n" \ 1433 " .set pop \n" \
1425 : "=r" (mfhi0)); \ 1434 : "=r" (mfhi0)); \
1426 mfhi0; \ 1435 mfhi0; \
1427 }) 1436 })
1428 1437
1429 #define mfhi1() \ 1438 #define mfhi1() \
1430 ({ \ 1439 ({ \
1431 long mfhi1; \ 1440 long mfhi1; \
1432 __asm__( \ 1441 __asm__( \
1433 " .set push \n" \ 1442 " .set push \n" \
1434 " .set dsp \n" \ 1443 " .set dsp \n" \
1435 " mfhi %0, $ac1 \n" \ 1444 " mfhi %0, $ac1 \n" \
1436 " .set pop \n" \ 1445 " .set pop \n" \
1437 : "=r" (mfhi1)); \ 1446 : "=r" (mfhi1)); \
1438 mfhi1; \ 1447 mfhi1; \
1439 }) 1448 })
1440 1449
1441 #define mfhi2() \ 1450 #define mfhi2() \
1442 ({ \ 1451 ({ \
1443 long mfhi2; \ 1452 long mfhi2; \
1444 __asm__( \ 1453 __asm__( \
1445 " .set push \n" \ 1454 " .set push \n" \
1446 " .set dsp \n" \ 1455 " .set dsp \n" \
1447 " mfhi %0, $ac2 \n" \ 1456 " mfhi %0, $ac2 \n" \
1448 " .set pop \n" \ 1457 " .set pop \n" \
1449 : "=r" (mfhi2)); \ 1458 : "=r" (mfhi2)); \
1450 mfhi2; \ 1459 mfhi2; \
1451 }) 1460 })
1452 1461
1453 #define mfhi3() \ 1462 #define mfhi3() \
1454 ({ \ 1463 ({ \
1455 long mfhi3; \ 1464 long mfhi3; \
1456 __asm__( \ 1465 __asm__( \
1457 " .set push \n" \ 1466 " .set push \n" \
1458 " .set dsp \n" \ 1467 " .set dsp \n" \
1459 " mfhi %0, $ac3 \n" \ 1468 " mfhi %0, $ac3 \n" \
1460 " .set pop \n" \ 1469 " .set pop \n" \
1461 : "=r" (mfhi3)); \ 1470 : "=r" (mfhi3)); \
1462 mfhi3; \ 1471 mfhi3; \
1463 }) 1472 })
1464 1473
1465 1474
1466 #define mtlo0(x) \ 1475 #define mtlo0(x) \
1467 ({ \ 1476 ({ \
1468 __asm__( \ 1477 __asm__( \
1469 " .set push \n" \ 1478 " .set push \n" \
1470 " .set dsp \n" \ 1479 " .set dsp \n" \
1471 " mtlo %0, $ac0 \n" \ 1480 " mtlo %0, $ac0 \n" \
1472 " .set pop \n" \ 1481 " .set pop \n" \
1473 : \ 1482 : \
1474 : "r" (x)); \ 1483 : "r" (x)); \
1475 }) 1484 })
1476 1485
1477 #define mtlo1(x) \ 1486 #define mtlo1(x) \
1478 ({ \ 1487 ({ \
1479 __asm__( \ 1488 __asm__( \
1480 " .set push \n" \ 1489 " .set push \n" \
1481 " .set dsp \n" \ 1490 " .set dsp \n" \
1482 " mtlo %0, $ac1 \n" \ 1491 " mtlo %0, $ac1 \n" \
1483 " .set pop \n" \ 1492 " .set pop \n" \
1484 : \ 1493 : \
1485 : "r" (x)); \ 1494 : "r" (x)); \
1486 }) 1495 })
1487 1496
1488 #define mtlo2(x) \ 1497 #define mtlo2(x) \
1489 ({ \ 1498 ({ \
1490 __asm__( \ 1499 __asm__( \
1491 " .set push \n" \ 1500 " .set push \n" \
1492 " .set dsp \n" \ 1501 " .set dsp \n" \
1493 " mtlo %0, $ac2 \n" \ 1502 " mtlo %0, $ac2 \n" \
1494 " .set pop \n" \ 1503 " .set pop \n" \
1495 : \ 1504 : \
1496 : "r" (x)); \ 1505 : "r" (x)); \
1497 }) 1506 })
1498 1507
1499 #define mtlo3(x) \ 1508 #define mtlo3(x) \
1500 ({ \ 1509 ({ \
1501 __asm__( \ 1510 __asm__( \
1502 " .set push \n" \ 1511 " .set push \n" \
1503 " .set dsp \n" \ 1512 " .set dsp \n" \
1504 " mtlo %0, $ac3 \n" \ 1513 " mtlo %0, $ac3 \n" \
1505 " .set pop \n" \ 1514 " .set pop \n" \
1506 : \ 1515 : \
1507 : "r" (x)); \ 1516 : "r" (x)); \
1508 }) 1517 })
1509 1518
1510 #define mthi0(x) \ 1519 #define mthi0(x) \
1511 ({ \ 1520 ({ \
1512 __asm__( \ 1521 __asm__( \
1513 " .set push \n" \ 1522 " .set push \n" \
1514 " .set dsp \n" \ 1523 " .set dsp \n" \
1515 " mthi %0, $ac0 \n" \ 1524 " mthi %0, $ac0 \n" \
1516 " .set pop \n" \ 1525 " .set pop \n" \
1517 : \ 1526 : \
1518 : "r" (x)); \ 1527 : "r" (x)); \
1519 }) 1528 })
1520 1529
1521 #define mthi1(x) \ 1530 #define mthi1(x) \
1522 ({ \ 1531 ({ \
1523 __asm__( \ 1532 __asm__( \
1524 " .set push \n" \ 1533 " .set push \n" \
1525 " .set dsp \n" \ 1534 " .set dsp \n" \
1526 " mthi %0, $ac1 \n" \ 1535 " mthi %0, $ac1 \n" \
1527 " .set pop \n" \ 1536 " .set pop \n" \
1528 : \ 1537 : \
1529 : "r" (x)); \ 1538 : "r" (x)); \
1530 }) 1539 })
1531 1540
1532 #define mthi2(x) \ 1541 #define mthi2(x) \
1533 ({ \ 1542 ({ \
1534 __asm__( \ 1543 __asm__( \
1535 " .set push \n" \ 1544 " .set push \n" \
1536 " .set dsp \n" \ 1545 " .set dsp \n" \
1537 " mthi %0, $ac2 \n" \ 1546 " mthi %0, $ac2 \n" \
1538 " .set pop \n" \ 1547 " .set pop \n" \
1539 : \ 1548 : \
1540 : "r" (x)); \ 1549 : "r" (x)); \
1541 }) 1550 })
1542 1551
1543 #define mthi3(x) \ 1552 #define mthi3(x) \
1544 ({ \ 1553 ({ \
1545 __asm__( \ 1554 __asm__( \
1546 " .set push \n" \ 1555 " .set push \n" \
1547 " .set dsp \n" \ 1556 " .set dsp \n" \
1548 " mthi %0, $ac3 \n" \ 1557 " mthi %0, $ac3 \n" \
1549 " .set pop \n" \ 1558 " .set pop \n" \
1550 : \ 1559 : \
1551 : "r" (x)); \ 1560 : "r" (x)); \
1552 }) 1561 })
1553 1562
1554 #else 1563 #else
1555 1564
1556 #ifdef CONFIG_CPU_MICROMIPS 1565 #ifdef CONFIG_CPU_MICROMIPS
1557 #define rddsp(mask) \ 1566 #define rddsp(mask) \
1558 ({ \ 1567 ({ \
1559 unsigned int __res; \ 1568 unsigned int __res; \
1560 \ 1569 \
1561 __asm__ __volatile__( \ 1570 __asm__ __volatile__( \
1562 " .set push \n" \ 1571 " .set push \n" \
1563 " .set noat \n" \ 1572 " .set noat \n" \
1564 " # rddsp $1, %x1 \n" \ 1573 " # rddsp $1, %x1 \n" \
1565 " .hword ((0x0020067c | (%x1 << 14)) >> 16) \n" \ 1574 " .hword ((0x0020067c | (%x1 << 14)) >> 16) \n" \
1566 " .hword ((0x0020067c | (%x1 << 14)) & 0xffff) \n" \ 1575 " .hword ((0x0020067c | (%x1 << 14)) & 0xffff) \n" \
1567 " move %0, $1 \n" \ 1576 " move %0, $1 \n" \
1568 " .set pop \n" \ 1577 " .set pop \n" \
1569 : "=r" (__res) \ 1578 : "=r" (__res) \
1570 : "i" (mask)); \ 1579 : "i" (mask)); \
1571 __res; \ 1580 __res; \
1572 }) 1581 })
1573 1582
1574 #define wrdsp(val, mask) \ 1583 #define wrdsp(val, mask) \
1575 do { \ 1584 do { \
1576 __asm__ __volatile__( \ 1585 __asm__ __volatile__( \
1577 " .set push \n" \ 1586 " .set push \n" \
1578 " .set noat \n" \ 1587 " .set noat \n" \
1579 " move $1, %0 \n" \ 1588 " move $1, %0 \n" \
1580 " # wrdsp $1, %x1 \n" \ 1589 " # wrdsp $1, %x1 \n" \
1581 " .hword ((0x0020167c | (%x1 << 14)) >> 16) \n" \ 1590 " .hword ((0x0020167c | (%x1 << 14)) >> 16) \n" \
1582 " .hword ((0x0020167c | (%x1 << 14)) & 0xffff) \n" \ 1591 " .hword ((0x0020167c | (%x1 << 14)) & 0xffff) \n" \
1583 " .set pop \n" \ 1592 " .set pop \n" \
1584 : \ 1593 : \
1585 : "r" (val), "i" (mask)); \ 1594 : "r" (val), "i" (mask)); \
1586 } while (0) 1595 } while (0)
1587 1596
1588 #define _umips_dsp_mfxxx(ins) \ 1597 #define _umips_dsp_mfxxx(ins) \
1589 ({ \ 1598 ({ \
1590 unsigned long __treg; \ 1599 unsigned long __treg; \
1591 \ 1600 \
1592 __asm__ __volatile__( \ 1601 __asm__ __volatile__( \
1593 " .set push \n" \ 1602 " .set push \n" \
1594 " .set noat \n" \ 1603 " .set noat \n" \
1595 " .hword 0x0001 \n" \ 1604 " .hword 0x0001 \n" \
1596 " .hword %x1 \n" \ 1605 " .hword %x1 \n" \
1597 " move %0, $1 \n" \ 1606 " move %0, $1 \n" \
1598 " .set pop \n" \ 1607 " .set pop \n" \
1599 : "=r" (__treg) \ 1608 : "=r" (__treg) \
1600 : "i" (ins)); \ 1609 : "i" (ins)); \
1601 __treg; \ 1610 __treg; \
1602 }) 1611 })
1603 1612
1604 #define _umips_dsp_mtxxx(val, ins) \ 1613 #define _umips_dsp_mtxxx(val, ins) \
1605 do { \ 1614 do { \
1606 __asm__ __volatile__( \ 1615 __asm__ __volatile__( \
1607 " .set push \n" \ 1616 " .set push \n" \
1608 " .set noat \n" \ 1617 " .set noat \n" \
1609 " move $1, %0 \n" \ 1618 " move $1, %0 \n" \
1610 " .hword 0x0001 \n" \ 1619 " .hword 0x0001 \n" \
1611 " .hword %x1 \n" \ 1620 " .hword %x1 \n" \
1612 " .set pop \n" \ 1621 " .set pop \n" \
1613 : \ 1622 : \
1614 : "r" (val), "i" (ins)); \ 1623 : "r" (val), "i" (ins)); \
1615 } while (0) 1624 } while (0)
1616 1625
1617 #define _umips_dsp_mflo(reg) _umips_dsp_mfxxx((reg << 14) | 0x107c) 1626 #define _umips_dsp_mflo(reg) _umips_dsp_mfxxx((reg << 14) | 0x107c)
1618 #define _umips_dsp_mfhi(reg) _umips_dsp_mfxxx((reg << 14) | 0x007c) 1627 #define _umips_dsp_mfhi(reg) _umips_dsp_mfxxx((reg << 14) | 0x007c)
1619 1628
1620 #define _umips_dsp_mtlo(val, reg) _umips_dsp_mtxxx(val, ((reg << 14) | 0x307c)) 1629 #define _umips_dsp_mtlo(val, reg) _umips_dsp_mtxxx(val, ((reg << 14) | 0x307c))
1621 #define _umips_dsp_mthi(val, reg) _umips_dsp_mtxxx(val, ((reg << 14) | 0x207c)) 1630 #define _umips_dsp_mthi(val, reg) _umips_dsp_mtxxx(val, ((reg << 14) | 0x207c))
1622 1631
1623 #define mflo0() _umips_dsp_mflo(0) 1632 #define mflo0() _umips_dsp_mflo(0)
1624 #define mflo1() _umips_dsp_mflo(1) 1633 #define mflo1() _umips_dsp_mflo(1)
1625 #define mflo2() _umips_dsp_mflo(2) 1634 #define mflo2() _umips_dsp_mflo(2)
1626 #define mflo3() _umips_dsp_mflo(3) 1635 #define mflo3() _umips_dsp_mflo(3)
1627 1636
1628 #define mfhi0() _umips_dsp_mfhi(0) 1637 #define mfhi0() _umips_dsp_mfhi(0)
1629 #define mfhi1() _umips_dsp_mfhi(1) 1638 #define mfhi1() _umips_dsp_mfhi(1)
1630 #define mfhi2() _umips_dsp_mfhi(2) 1639 #define mfhi2() _umips_dsp_mfhi(2)
1631 #define mfhi3() _umips_dsp_mfhi(3) 1640 #define mfhi3() _umips_dsp_mfhi(3)
1632 1641
1633 #define mtlo0(x) _umips_dsp_mtlo(x, 0) 1642 #define mtlo0(x) _umips_dsp_mtlo(x, 0)
1634 #define mtlo1(x) _umips_dsp_mtlo(x, 1) 1643 #define mtlo1(x) _umips_dsp_mtlo(x, 1)
1635 #define mtlo2(x) _umips_dsp_mtlo(x, 2) 1644 #define mtlo2(x) _umips_dsp_mtlo(x, 2)
1636 #define mtlo3(x) _umips_dsp_mtlo(x, 3) 1645 #define mtlo3(x) _umips_dsp_mtlo(x, 3)
1637 1646
1638 #define mthi0(x) _umips_dsp_mthi(x, 0) 1647 #define mthi0(x) _umips_dsp_mthi(x, 0)
1639 #define mthi1(x) _umips_dsp_mthi(x, 1) 1648 #define mthi1(x) _umips_dsp_mthi(x, 1)
1640 #define mthi2(x) _umips_dsp_mthi(x, 2) 1649 #define mthi2(x) _umips_dsp_mthi(x, 2)
1641 #define mthi3(x) _umips_dsp_mthi(x, 3) 1650 #define mthi3(x) _umips_dsp_mthi(x, 3)
1642 1651
1643 #else /* !CONFIG_CPU_MICROMIPS */ 1652 #else /* !CONFIG_CPU_MICROMIPS */
1644 #define rddsp(mask) \ 1653 #define rddsp(mask) \
1645 ({ \ 1654 ({ \
1646 unsigned int __res; \ 1655 unsigned int __res; \
1647 \ 1656 \
1648 __asm__ __volatile__( \ 1657 __asm__ __volatile__( \
1649 " .set push \n" \ 1658 " .set push \n" \
1650 " .set noat \n" \ 1659 " .set noat \n" \
1651 " # rddsp $1, %x1 \n" \ 1660 " # rddsp $1, %x1 \n" \
1652 " .word 0x7c000cb8 | (%x1 << 16) \n" \ 1661 " .word 0x7c000cb8 | (%x1 << 16) \n" \
1653 " move %0, $1 \n" \ 1662 " move %0, $1 \n" \
1654 " .set pop \n" \ 1663 " .set pop \n" \
1655 : "=r" (__res) \ 1664 : "=r" (__res) \
1656 : "i" (mask)); \ 1665 : "i" (mask)); \
1657 __res; \ 1666 __res; \
1658 }) 1667 })
1659 1668
1660 #define wrdsp(val, mask) \ 1669 #define wrdsp(val, mask) \
1661 do { \ 1670 do { \
1662 __asm__ __volatile__( \ 1671 __asm__ __volatile__( \
1663 " .set push \n" \ 1672 " .set push \n" \
1664 " .set noat \n" \ 1673 " .set noat \n" \
1665 " move $1, %0 \n" \ 1674 " move $1, %0 \n" \
1666 " # wrdsp $1, %x1 \n" \ 1675 " # wrdsp $1, %x1 \n" \
1667 " .word 0x7c2004f8 | (%x1 << 11) \n" \ 1676 " .word 0x7c2004f8 | (%x1 << 11) \n" \
1668 " .set pop \n" \ 1677 " .set pop \n" \
1669 : \ 1678 : \
1670 : "r" (val), "i" (mask)); \ 1679 : "r" (val), "i" (mask)); \
1671 } while (0) 1680 } while (0)
1672 1681
1673 #define _dsp_mfxxx(ins) \ 1682 #define _dsp_mfxxx(ins) \
1674 ({ \ 1683 ({ \
1675 unsigned long __treg; \ 1684 unsigned long __treg; \
1676 \ 1685 \
1677 __asm__ __volatile__( \ 1686 __asm__ __volatile__( \
1678 " .set push \n" \ 1687 " .set push \n" \
1679 " .set noat \n" \ 1688 " .set noat \n" \
1680 " .word (0x00000810 | %1) \n" \ 1689 " .word (0x00000810 | %1) \n" \
1681 " move %0, $1 \n" \ 1690 " move %0, $1 \n" \
1682 " .set pop \n" \ 1691 " .set pop \n" \
1683 : "=r" (__treg) \ 1692 : "=r" (__treg) \
1684 : "i" (ins)); \ 1693 : "i" (ins)); \
1685 __treg; \ 1694 __treg; \
1686 }) 1695 })
1687 1696
1688 #define _dsp_mtxxx(val, ins) \ 1697 #define _dsp_mtxxx(val, ins) \
1689 do { \ 1698 do { \
1690 __asm__ __volatile__( \ 1699 __asm__ __volatile__( \
1691 " .set push \n" \ 1700 " .set push \n" \
1692 " .set noat \n" \ 1701 " .set noat \n" \
1693 " move $1, %0 \n" \ 1702 " move $1, %0 \n" \
1694 " .word (0x00200011 | %1) \n" \ 1703 " .word (0x00200011 | %1) \n" \
1695 " .set pop \n" \ 1704 " .set pop \n" \
1696 : \ 1705 : \
1697 : "r" (val), "i" (ins)); \ 1706 : "r" (val), "i" (ins)); \
1698 } while (0) 1707 } while (0)
1699 1708
1700 #define _dsp_mflo(reg) _dsp_mfxxx((reg << 21) | 0x0002) 1709 #define _dsp_mflo(reg) _dsp_mfxxx((reg << 21) | 0x0002)
1701 #define _dsp_mfhi(reg) _dsp_mfxxx((reg << 21) | 0x0000) 1710 #define _dsp_mfhi(reg) _dsp_mfxxx((reg << 21) | 0x0000)
1702 1711
1703 #define _dsp_mtlo(val, reg) _dsp_mtxxx(val, ((reg << 11) | 0x0002)) 1712 #define _dsp_mtlo(val, reg) _dsp_mtxxx(val, ((reg << 11) | 0x0002))
1704 #define _dsp_mthi(val, reg) _dsp_mtxxx(val, ((reg << 11) | 0x0000)) 1713 #define _dsp_mthi(val, reg) _dsp_mtxxx(val, ((reg << 11) | 0x0000))
1705 1714
1706 #define mflo0() _dsp_mflo(0) 1715 #define mflo0() _dsp_mflo(0)
1707 #define mflo1() _dsp_mflo(1) 1716 #define mflo1() _dsp_mflo(1)
1708 #define mflo2() _dsp_mflo(2) 1717 #define mflo2() _dsp_mflo(2)
1709 #define mflo3() _dsp_mflo(3) 1718 #define mflo3() _dsp_mflo(3)
1710 1719
1711 #define mfhi0() _dsp_mfhi(0) 1720 #define mfhi0() _dsp_mfhi(0)
1712 #define mfhi1() _dsp_mfhi(1) 1721 #define mfhi1() _dsp_mfhi(1)
1713 #define mfhi2() _dsp_mfhi(2) 1722 #define mfhi2() _dsp_mfhi(2)
1714 #define mfhi3() _dsp_mfhi(3) 1723 #define mfhi3() _dsp_mfhi(3)
1715 1724
1716 #define mtlo0(x) _dsp_mtlo(x, 0) 1725 #define mtlo0(x) _dsp_mtlo(x, 0)
1717 #define mtlo1(x) _dsp_mtlo(x, 1) 1726 #define mtlo1(x) _dsp_mtlo(x, 1)
1718 #define mtlo2(x) _dsp_mtlo(x, 2) 1727 #define mtlo2(x) _dsp_mtlo(x, 2)
1719 #define mtlo3(x) _dsp_mtlo(x, 3) 1728 #define mtlo3(x) _dsp_mtlo(x, 3)
1720 1729
1721 #define mthi0(x) _dsp_mthi(x, 0) 1730 #define mthi0(x) _dsp_mthi(x, 0)
1722 #define mthi1(x) _dsp_mthi(x, 1) 1731 #define mthi1(x) _dsp_mthi(x, 1)
1723 #define mthi2(x) _dsp_mthi(x, 2) 1732 #define mthi2(x) _dsp_mthi(x, 2)
1724 #define mthi3(x) _dsp_mthi(x, 3) 1733 #define mthi3(x) _dsp_mthi(x, 3)
1725 1734
1726 #endif /* CONFIG_CPU_MICROMIPS */ 1735 #endif /* CONFIG_CPU_MICROMIPS */
1727 #endif 1736 #endif
1728 1737
1729 /* 1738 /*
1730 * TLB operations. 1739 * TLB operations.
1731 * 1740 *
1732 * It is responsibility of the caller to take care of any TLB hazards. 1741 * It is responsibility of the caller to take care of any TLB hazards.
1733 */ 1742 */
1734 static inline void tlb_probe(void) 1743 static inline void tlb_probe(void)
1735 { 1744 {
1736 __asm__ __volatile__( 1745 __asm__ __volatile__(
1737 ".set noreorder\n\t" 1746 ".set noreorder\n\t"
1738 "tlbp\n\t" 1747 "tlbp\n\t"
1739 ".set reorder"); 1748 ".set reorder");
1740 } 1749 }
1741 1750
1742 static inline void tlb_read(void) 1751 static inline void tlb_read(void)
1743 { 1752 {
1744 #if MIPS34K_MISSED_ITLB_WAR 1753 #if MIPS34K_MISSED_ITLB_WAR
1745 int res = 0; 1754 int res = 0;
1746 1755
1747 __asm__ __volatile__( 1756 __asm__ __volatile__(
1748 " .set push \n" 1757 " .set push \n"
1749 " .set noreorder \n" 1758 " .set noreorder \n"
1750 " .set noat \n" 1759 " .set noat \n"
1751 " .set mips32r2 \n" 1760 " .set mips32r2 \n"
1752 " .word 0x41610001 # dvpe $1 \n" 1761 " .word 0x41610001 # dvpe $1 \n"
1753 " move %0, $1 \n" 1762 " move %0, $1 \n"
1754 " ehb \n" 1763 " ehb \n"
1755 " .set pop \n" 1764 " .set pop \n"
1756 : "=r" (res)); 1765 : "=r" (res));
1757 1766
1758 instruction_hazard(); 1767 instruction_hazard();
1759 #endif 1768 #endif
1760 1769
1761 __asm__ __volatile__( 1770 __asm__ __volatile__(
1762 ".set noreorder\n\t" 1771 ".set noreorder\n\t"
1763 "tlbr\n\t" 1772 "tlbr\n\t"
1764 ".set reorder"); 1773 ".set reorder");
1765 1774
1766 #if MIPS34K_MISSED_ITLB_WAR 1775 #if MIPS34K_MISSED_ITLB_WAR
1767 if ((res & _ULCAST_(1))) 1776 if ((res & _ULCAST_(1)))
1768 __asm__ __volatile__( 1777 __asm__ __volatile__(
1769 " .set push \n" 1778 " .set push \n"
1770 " .set noreorder \n" 1779 " .set noreorder \n"
1771 " .set noat \n" 1780 " .set noat \n"
1772 " .set mips32r2 \n" 1781 " .set mips32r2 \n"
1773 " .word 0x41600021 # evpe \n" 1782 " .word 0x41600021 # evpe \n"
1774 " ehb \n" 1783 " ehb \n"
1775 " .set pop \n"); 1784 " .set pop \n");
1776 #endif 1785 #endif
1777 } 1786 }
1778 1787
1779 static inline void tlb_write_indexed(void) 1788 static inline void tlb_write_indexed(void)
1780 { 1789 {
1781 __asm__ __volatile__( 1790 __asm__ __volatile__(
1782 ".set noreorder\n\t" 1791 ".set noreorder\n\t"
1783 "tlbwi\n\t" 1792 "tlbwi\n\t"
1784 ".set reorder"); 1793 ".set reorder");
1785 } 1794 }
1786 1795
1787 static inline void tlb_write_random(void) 1796 static inline void tlb_write_random(void)
1788 { 1797 {
1789 __asm__ __volatile__( 1798 __asm__ __volatile__(
1790 ".set noreorder\n\t" 1799 ".set noreorder\n\t"
1791 "tlbwr\n\t" 1800 "tlbwr\n\t"
1792 ".set reorder"); 1801 ".set reorder");
1793 } 1802 }
1794 1803
1795 /* 1804 /*
1796 * Manipulate bits in a c0 register. 1805 * Manipulate bits in a c0 register.
1797 */ 1806 */
1798 #define __BUILD_SET_C0(name) \ 1807 #define __BUILD_SET_C0(name) \
1799 static inline unsigned int \ 1808 static inline unsigned int \
1800 set_c0_##name(unsigned int set) \ 1809 set_c0_##name(unsigned int set) \
1801 { \ 1810 { \
1802 unsigned int res, new; \ 1811 unsigned int res, new; \
1803 \ 1812 \
1804 res = read_c0_##name(); \ 1813 res = read_c0_##name(); \
1805 new = res | set; \ 1814 new = res | set; \
1806 write_c0_##name(new); \ 1815 write_c0_##name(new); \
1807 \ 1816 \
1808 return res; \ 1817 return res; \
1809 } \ 1818 } \
1810 \ 1819 \
1811 static inline unsigned int \ 1820 static inline unsigned int \
1812 clear_c0_##name(unsigned int clear) \ 1821 clear_c0_##name(unsigned int clear) \
1813 { \ 1822 { \
1814 unsigned int res, new; \ 1823 unsigned int res, new; \
1815 \ 1824 \
1816 res = read_c0_##name(); \ 1825 res = read_c0_##name(); \
1817 new = res & ~clear; \ 1826 new = res & ~clear; \
1818 write_c0_##name(new); \ 1827 write_c0_##name(new); \
1819 \ 1828 \
1820 return res; \ 1829 return res; \
1821 } \ 1830 } \
1822 \ 1831 \
1823 static inline unsigned int \ 1832 static inline unsigned int \
1824 change_c0_##name(unsigned int change, unsigned int val) \ 1833 change_c0_##name(unsigned int change, unsigned int val) \
1825 { \ 1834 { \
1826 unsigned int res, new; \ 1835 unsigned int res, new; \
1827 \ 1836 \
1828 res = read_c0_##name(); \ 1837 res = read_c0_##name(); \
1829 new = res & ~change; \ 1838 new = res & ~change; \
1830 new |= (val & change); \ 1839 new |= (val & change); \
1831 write_c0_##name(new); \ 1840 write_c0_##name(new); \
1832 \ 1841 \
1833 return res; \ 1842 return res; \
1834 } 1843 }
1835 1844
1836 __BUILD_SET_C0(status) 1845 __BUILD_SET_C0(status)
1837 __BUILD_SET_C0(cause) 1846 __BUILD_SET_C0(cause)
1838 __BUILD_SET_C0(config) 1847 __BUILD_SET_C0(config)
1839 __BUILD_SET_C0(config5) 1848 __BUILD_SET_C0(config5)
1840 __BUILD_SET_C0(intcontrol) 1849 __BUILD_SET_C0(intcontrol)
1841 __BUILD_SET_C0(intctl) 1850 __BUILD_SET_C0(intctl)
1842 __BUILD_SET_C0(srsmap) 1851 __BUILD_SET_C0(srsmap)
1843 __BUILD_SET_C0(brcm_config_0) 1852 __BUILD_SET_C0(brcm_config_0)
1844 __BUILD_SET_C0(brcm_bus_pll) 1853 __BUILD_SET_C0(brcm_bus_pll)
1845 __BUILD_SET_C0(brcm_reset) 1854 __BUILD_SET_C0(brcm_reset)
1846 __BUILD_SET_C0(brcm_cmt_intr) 1855 __BUILD_SET_C0(brcm_cmt_intr)
1847 __BUILD_SET_C0(brcm_cmt_ctrl) 1856 __BUILD_SET_C0(brcm_cmt_ctrl)
1848 __BUILD_SET_C0(brcm_config) 1857 __BUILD_SET_C0(brcm_config)
1849 __BUILD_SET_C0(brcm_mode) 1858 __BUILD_SET_C0(brcm_mode)
1850 1859
1851 /* 1860 /*
1852 * Return low 10 bits of ebase. 1861 * Return low 10 bits of ebase.
1853 * Note that under KVM (MIPSVZ) this returns vcpu id. 1862 * Note that under KVM (MIPSVZ) this returns vcpu id.
1854 */ 1863 */
1855 static inline unsigned int get_ebase_cpunum(void) 1864 static inline unsigned int get_ebase_cpunum(void)
1856 { 1865 {
1857 return read_c0_ebase() & 0x3ff; 1866 return read_c0_ebase() & 0x3ff;
1858 } 1867 }
1859 1868
1860 #endif /* !__ASSEMBLY__ */ 1869 #endif /* !__ASSEMBLY__ */
1861 1870
1862 #endif /* _ASM_MIPSREGS_H */ 1871 #endif /* _ASM_MIPSREGS_H */
1863 1872
arch/mips/kernel/branch.c
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1996, 97, 2000, 2001 by Ralf Baechle 6 * Copyright (C) 1996, 97, 2000, 2001 by Ralf Baechle
7 * Copyright (C) 2001 MIPS Technologies, Inc. 7 * Copyright (C) 2001 MIPS Technologies, Inc.
8 */ 8 */
9 #include <linux/kernel.h> 9 #include <linux/kernel.h>
10 #include <linux/sched.h> 10 #include <linux/sched.h>
11 #include <linux/signal.h> 11 #include <linux/signal.h>
12 #include <linux/module.h> 12 #include <linux/module.h>
13 #include <asm/branch.h> 13 #include <asm/branch.h>
14 #include <asm/cpu.h> 14 #include <asm/cpu.h>
15 #include <asm/cpu-features.h> 15 #include <asm/cpu-features.h>
16 #include <asm/fpu.h> 16 #include <asm/fpu.h>
17 #include <asm/fpu_emulator.h> 17 #include <asm/fpu_emulator.h>
18 #include <asm/inst.h> 18 #include <asm/inst.h>
19 #include <asm/ptrace.h> 19 #include <asm/ptrace.h>
20 #include <asm/uaccess.h> 20 #include <asm/uaccess.h>
21 21
22 /* 22 /*
23 * Calculate and return exception PC in case of branch delay slot 23 * Calculate and return exception PC in case of branch delay slot
24 * for microMIPS and MIPS16e. It does not clear the ISA mode bit. 24 * for microMIPS and MIPS16e. It does not clear the ISA mode bit.
25 */ 25 */
26 int __isa_exception_epc(struct pt_regs *regs) 26 int __isa_exception_epc(struct pt_regs *regs)
27 { 27 {
28 unsigned short inst; 28 unsigned short inst;
29 long epc = regs->cp0_epc; 29 long epc = regs->cp0_epc;
30 30
31 /* Calculate exception PC in branch delay slot. */ 31 /* Calculate exception PC in branch delay slot. */
32 if (__get_user(inst, (u16 __user *) msk_isa16_mode(epc))) { 32 if (__get_user(inst, (u16 __user *) msk_isa16_mode(epc))) {
33 /* This should never happen because delay slot was checked. */ 33 /* This should never happen because delay slot was checked. */
34 force_sig(SIGSEGV, current); 34 force_sig(SIGSEGV, current);
35 return epc; 35 return epc;
36 } 36 }
37 if (cpu_has_mips16) { 37 if (cpu_has_mips16) {
38 if (((union mips16e_instruction)inst).ri.opcode 38 if (((union mips16e_instruction)inst).ri.opcode
39 == MIPS16e_jal_op) 39 == MIPS16e_jal_op)
40 epc += 4; 40 epc += 4;
41 else 41 else
42 epc += 2; 42 epc += 2;
43 } else if (mm_insn_16bit(inst)) 43 } else if (mm_insn_16bit(inst))
44 epc += 2; 44 epc += 2;
45 else 45 else
46 epc += 4; 46 epc += 4;
47 47
48 return epc; 48 return epc;
49 } 49 }
50 50
51 /* (microMIPS) Convert 16-bit register encoding to 32-bit register encoding. */ 51 /* (microMIPS) Convert 16-bit register encoding to 32-bit register encoding. */
52 static const unsigned int reg16to32map[8] = {16, 17, 2, 3, 4, 5, 6, 7}; 52 static const unsigned int reg16to32map[8] = {16, 17, 2, 3, 4, 5, 6, 7};
53 53
54 int __mm_isBranchInstr(struct pt_regs *regs, struct mm_decoded_insn dec_insn, 54 int __mm_isBranchInstr(struct pt_regs *regs, struct mm_decoded_insn dec_insn,
55 unsigned long *contpc) 55 unsigned long *contpc)
56 { 56 {
57 union mips_instruction insn = (union mips_instruction)dec_insn.insn; 57 union mips_instruction insn = (union mips_instruction)dec_insn.insn;
58 int bc_false = 0; 58 int bc_false = 0;
59 unsigned int fcr31; 59 unsigned int fcr31;
60 unsigned int bit; 60 unsigned int bit;
61 61
62 if (!cpu_has_mmips) 62 if (!cpu_has_mmips)
63 return 0; 63 return 0;
64 64
65 switch (insn.mm_i_format.opcode) { 65 switch (insn.mm_i_format.opcode) {
66 case mm_pool32a_op: 66 case mm_pool32a_op:
67 if ((insn.mm_i_format.simmediate & MM_POOL32A_MINOR_MASK) == 67 if ((insn.mm_i_format.simmediate & MM_POOL32A_MINOR_MASK) ==
68 mm_pool32axf_op) { 68 mm_pool32axf_op) {
69 switch (insn.mm_i_format.simmediate >> 69 switch (insn.mm_i_format.simmediate >>
70 MM_POOL32A_MINOR_SHIFT) { 70 MM_POOL32A_MINOR_SHIFT) {
71 case mm_jalr_op: 71 case mm_jalr_op:
72 case mm_jalrhb_op: 72 case mm_jalrhb_op:
73 case mm_jalrs_op: 73 case mm_jalrs_op:
74 case mm_jalrshb_op: 74 case mm_jalrshb_op:
75 if (insn.mm_i_format.rt != 0) /* Not mm_jr */ 75 if (insn.mm_i_format.rt != 0) /* Not mm_jr */
76 regs->regs[insn.mm_i_format.rt] = 76 regs->regs[insn.mm_i_format.rt] =
77 regs->cp0_epc + 77 regs->cp0_epc +
78 dec_insn.pc_inc + 78 dec_insn.pc_inc +
79 dec_insn.next_pc_inc; 79 dec_insn.next_pc_inc;
80 *contpc = regs->regs[insn.mm_i_format.rs]; 80 *contpc = regs->regs[insn.mm_i_format.rs];
81 return 1; 81 return 1;
82 } 82 }
83 } 83 }
84 break; 84 break;
85 case mm_pool32i_op: 85 case mm_pool32i_op:
86 switch (insn.mm_i_format.rt) { 86 switch (insn.mm_i_format.rt) {
87 case mm_bltzals_op: 87 case mm_bltzals_op:
88 case mm_bltzal_op: 88 case mm_bltzal_op:
89 regs->regs[31] = regs->cp0_epc + 89 regs->regs[31] = regs->cp0_epc +
90 dec_insn.pc_inc + 90 dec_insn.pc_inc +
91 dec_insn.next_pc_inc; 91 dec_insn.next_pc_inc;
92 /* Fall through */ 92 /* Fall through */
93 case mm_bltz_op: 93 case mm_bltz_op:
94 if ((long)regs->regs[insn.mm_i_format.rs] < 0) 94 if ((long)regs->regs[insn.mm_i_format.rs] < 0)
95 *contpc = regs->cp0_epc + 95 *contpc = regs->cp0_epc +
96 dec_insn.pc_inc + 96 dec_insn.pc_inc +
97 (insn.mm_i_format.simmediate << 1); 97 (insn.mm_i_format.simmediate << 1);
98 else 98 else
99 *contpc = regs->cp0_epc + 99 *contpc = regs->cp0_epc +
100 dec_insn.pc_inc + 100 dec_insn.pc_inc +
101 dec_insn.next_pc_inc; 101 dec_insn.next_pc_inc;
102 return 1; 102 return 1;
103 case mm_bgezals_op: 103 case mm_bgezals_op:
104 case mm_bgezal_op: 104 case mm_bgezal_op:
105 regs->regs[31] = regs->cp0_epc + 105 regs->regs[31] = regs->cp0_epc +
106 dec_insn.pc_inc + 106 dec_insn.pc_inc +
107 dec_insn.next_pc_inc; 107 dec_insn.next_pc_inc;
108 /* Fall through */ 108 /* Fall through */
109 case mm_bgez_op: 109 case mm_bgez_op:
110 if ((long)regs->regs[insn.mm_i_format.rs] >= 0) 110 if ((long)regs->regs[insn.mm_i_format.rs] >= 0)
111 *contpc = regs->cp0_epc + 111 *contpc = regs->cp0_epc +
112 dec_insn.pc_inc + 112 dec_insn.pc_inc +
113 (insn.mm_i_format.simmediate << 1); 113 (insn.mm_i_format.simmediate << 1);
114 else 114 else
115 *contpc = regs->cp0_epc + 115 *contpc = regs->cp0_epc +
116 dec_insn.pc_inc + 116 dec_insn.pc_inc +
117 dec_insn.next_pc_inc; 117 dec_insn.next_pc_inc;
118 return 1; 118 return 1;
119 case mm_blez_op: 119 case mm_blez_op:
120 if ((long)regs->regs[insn.mm_i_format.rs] <= 0) 120 if ((long)regs->regs[insn.mm_i_format.rs] <= 0)
121 *contpc = regs->cp0_epc + 121 *contpc = regs->cp0_epc +
122 dec_insn.pc_inc + 122 dec_insn.pc_inc +
123 (insn.mm_i_format.simmediate << 1); 123 (insn.mm_i_format.simmediate << 1);
124 else 124 else
125 *contpc = regs->cp0_epc + 125 *contpc = regs->cp0_epc +
126 dec_insn.pc_inc + 126 dec_insn.pc_inc +
127 dec_insn.next_pc_inc; 127 dec_insn.next_pc_inc;
128 return 1; 128 return 1;
129 case mm_bgtz_op: 129 case mm_bgtz_op:
130 if ((long)regs->regs[insn.mm_i_format.rs] <= 0) 130 if ((long)regs->regs[insn.mm_i_format.rs] <= 0)
131 *contpc = regs->cp0_epc + 131 *contpc = regs->cp0_epc +
132 dec_insn.pc_inc + 132 dec_insn.pc_inc +
133 (insn.mm_i_format.simmediate << 1); 133 (insn.mm_i_format.simmediate << 1);
134 else 134 else
135 *contpc = regs->cp0_epc + 135 *contpc = regs->cp0_epc +
136 dec_insn.pc_inc + 136 dec_insn.pc_inc +
137 dec_insn.next_pc_inc; 137 dec_insn.next_pc_inc;
138 return 1; 138 return 1;
139 case mm_bc2f_op: 139 case mm_bc2f_op:
140 case mm_bc1f_op: 140 case mm_bc1f_op:
141 bc_false = 1; 141 bc_false = 1;
142 /* Fall through */ 142 /* Fall through */
143 case mm_bc2t_op: 143 case mm_bc2t_op:
144 case mm_bc1t_op: 144 case mm_bc1t_op:
145 preempt_disable(); 145 preempt_disable();
146 if (is_fpu_owner()) 146 if (is_fpu_owner())
147 asm volatile("cfc1\t%0,$31" : "=r" (fcr31)); 147 fcr31 = read_32bit_cp1_register(CP1_STATUS);
148 else 148 else
149 fcr31 = current->thread.fpu.fcr31; 149 fcr31 = current->thread.fpu.fcr31;
150 preempt_enable(); 150 preempt_enable();
151 151
152 if (bc_false) 152 if (bc_false)
153 fcr31 = ~fcr31; 153 fcr31 = ~fcr31;
154 154
155 bit = (insn.mm_i_format.rs >> 2); 155 bit = (insn.mm_i_format.rs >> 2);
156 bit += (bit != 0); 156 bit += (bit != 0);
157 bit += 23; 157 bit += 23;
158 if (fcr31 & (1 << bit)) 158 if (fcr31 & (1 << bit))
159 *contpc = regs->cp0_epc + 159 *contpc = regs->cp0_epc +
160 dec_insn.pc_inc + 160 dec_insn.pc_inc +
161 (insn.mm_i_format.simmediate << 1); 161 (insn.mm_i_format.simmediate << 1);
162 else 162 else
163 *contpc = regs->cp0_epc + 163 *contpc = regs->cp0_epc +
164 dec_insn.pc_inc + dec_insn.next_pc_inc; 164 dec_insn.pc_inc + dec_insn.next_pc_inc;
165 return 1; 165 return 1;
166 } 166 }
167 break; 167 break;
168 case mm_pool16c_op: 168 case mm_pool16c_op:
169 switch (insn.mm_i_format.rt) { 169 switch (insn.mm_i_format.rt) {
170 case mm_jalr16_op: 170 case mm_jalr16_op:
171 case mm_jalrs16_op: 171 case mm_jalrs16_op:
172 regs->regs[31] = regs->cp0_epc + 172 regs->regs[31] = regs->cp0_epc +
173 dec_insn.pc_inc + dec_insn.next_pc_inc; 173 dec_insn.pc_inc + dec_insn.next_pc_inc;
174 /* Fall through */ 174 /* Fall through */
175 case mm_jr16_op: 175 case mm_jr16_op:
176 *contpc = regs->regs[insn.mm_i_format.rs]; 176 *contpc = regs->regs[insn.mm_i_format.rs];
177 return 1; 177 return 1;
178 } 178 }
179 break; 179 break;
180 case mm_beqz16_op: 180 case mm_beqz16_op:
181 if ((long)regs->regs[reg16to32map[insn.mm_b1_format.rs]] == 0) 181 if ((long)regs->regs[reg16to32map[insn.mm_b1_format.rs]] == 0)
182 *contpc = regs->cp0_epc + 182 *contpc = regs->cp0_epc +
183 dec_insn.pc_inc + 183 dec_insn.pc_inc +
184 (insn.mm_b1_format.simmediate << 1); 184 (insn.mm_b1_format.simmediate << 1);
185 else 185 else
186 *contpc = regs->cp0_epc + 186 *contpc = regs->cp0_epc +
187 dec_insn.pc_inc + dec_insn.next_pc_inc; 187 dec_insn.pc_inc + dec_insn.next_pc_inc;
188 return 1; 188 return 1;
189 case mm_bnez16_op: 189 case mm_bnez16_op:
190 if ((long)regs->regs[reg16to32map[insn.mm_b1_format.rs]] != 0) 190 if ((long)regs->regs[reg16to32map[insn.mm_b1_format.rs]] != 0)
191 *contpc = regs->cp0_epc + 191 *contpc = regs->cp0_epc +
192 dec_insn.pc_inc + 192 dec_insn.pc_inc +
193 (insn.mm_b1_format.simmediate << 1); 193 (insn.mm_b1_format.simmediate << 1);
194 else 194 else
195 *contpc = regs->cp0_epc + 195 *contpc = regs->cp0_epc +
196 dec_insn.pc_inc + dec_insn.next_pc_inc; 196 dec_insn.pc_inc + dec_insn.next_pc_inc;
197 return 1; 197 return 1;
198 case mm_b16_op: 198 case mm_b16_op:
199 *contpc = regs->cp0_epc + dec_insn.pc_inc + 199 *contpc = regs->cp0_epc + dec_insn.pc_inc +
200 (insn.mm_b0_format.simmediate << 1); 200 (insn.mm_b0_format.simmediate << 1);
201 return 1; 201 return 1;
202 case mm_beq32_op: 202 case mm_beq32_op:
203 if (regs->regs[insn.mm_i_format.rs] == 203 if (regs->regs[insn.mm_i_format.rs] ==
204 regs->regs[insn.mm_i_format.rt]) 204 regs->regs[insn.mm_i_format.rt])
205 *contpc = regs->cp0_epc + 205 *contpc = regs->cp0_epc +
206 dec_insn.pc_inc + 206 dec_insn.pc_inc +
207 (insn.mm_i_format.simmediate << 1); 207 (insn.mm_i_format.simmediate << 1);
208 else 208 else
209 *contpc = regs->cp0_epc + 209 *contpc = regs->cp0_epc +
210 dec_insn.pc_inc + 210 dec_insn.pc_inc +
211 dec_insn.next_pc_inc; 211 dec_insn.next_pc_inc;
212 return 1; 212 return 1;
213 case mm_bne32_op: 213 case mm_bne32_op:
214 if (regs->regs[insn.mm_i_format.rs] != 214 if (regs->regs[insn.mm_i_format.rs] !=
215 regs->regs[insn.mm_i_format.rt]) 215 regs->regs[insn.mm_i_format.rt])
216 *contpc = regs->cp0_epc + 216 *contpc = regs->cp0_epc +
217 dec_insn.pc_inc + 217 dec_insn.pc_inc +
218 (insn.mm_i_format.simmediate << 1); 218 (insn.mm_i_format.simmediate << 1);
219 else 219 else
220 *contpc = regs->cp0_epc + 220 *contpc = regs->cp0_epc +
221 dec_insn.pc_inc + dec_insn.next_pc_inc; 221 dec_insn.pc_inc + dec_insn.next_pc_inc;
222 return 1; 222 return 1;
223 case mm_jalx32_op: 223 case mm_jalx32_op:
224 regs->regs[31] = regs->cp0_epc + 224 regs->regs[31] = regs->cp0_epc +
225 dec_insn.pc_inc + dec_insn.next_pc_inc; 225 dec_insn.pc_inc + dec_insn.next_pc_inc;
226 *contpc = regs->cp0_epc + dec_insn.pc_inc; 226 *contpc = regs->cp0_epc + dec_insn.pc_inc;
227 *contpc >>= 28; 227 *contpc >>= 28;
228 *contpc <<= 28; 228 *contpc <<= 28;
229 *contpc |= (insn.j_format.target << 2); 229 *contpc |= (insn.j_format.target << 2);
230 return 1; 230 return 1;
231 case mm_jals32_op: 231 case mm_jals32_op:
232 case mm_jal32_op: 232 case mm_jal32_op:
233 regs->regs[31] = regs->cp0_epc + 233 regs->regs[31] = regs->cp0_epc +
234 dec_insn.pc_inc + dec_insn.next_pc_inc; 234 dec_insn.pc_inc + dec_insn.next_pc_inc;
235 /* Fall through */ 235 /* Fall through */
236 case mm_j32_op: 236 case mm_j32_op:
237 *contpc = regs->cp0_epc + dec_insn.pc_inc; 237 *contpc = regs->cp0_epc + dec_insn.pc_inc;
238 *contpc >>= 27; 238 *contpc >>= 27;
239 *contpc <<= 27; 239 *contpc <<= 27;
240 *contpc |= (insn.j_format.target << 1); 240 *contpc |= (insn.j_format.target << 1);
241 set_isa16_mode(*contpc); 241 set_isa16_mode(*contpc);
242 return 1; 242 return 1;
243 } 243 }
244 return 0; 244 return 0;
245 } 245 }
246 246
247 /* 247 /*
248 * Compute return address and emulate branch in microMIPS mode after an 248 * Compute return address and emulate branch in microMIPS mode after an
249 * exception only. It does not handle compact branches/jumps and cannot 249 * exception only. It does not handle compact branches/jumps and cannot
250 * be used in interrupt context. (Compact branches/jumps do not cause 250 * be used in interrupt context. (Compact branches/jumps do not cause
251 * exceptions.) 251 * exceptions.)
252 */ 252 */
253 int __microMIPS_compute_return_epc(struct pt_regs *regs) 253 int __microMIPS_compute_return_epc(struct pt_regs *regs)
254 { 254 {
255 u16 __user *pc16; 255 u16 __user *pc16;
256 u16 halfword; 256 u16 halfword;
257 unsigned int word; 257 unsigned int word;
258 unsigned long contpc; 258 unsigned long contpc;
259 struct mm_decoded_insn mminsn = { 0 }; 259 struct mm_decoded_insn mminsn = { 0 };
260 260
261 mminsn.micro_mips_mode = 1; 261 mminsn.micro_mips_mode = 1;
262 262
263 /* This load never faults. */ 263 /* This load never faults. */
264 pc16 = (unsigned short __user *)msk_isa16_mode(regs->cp0_epc); 264 pc16 = (unsigned short __user *)msk_isa16_mode(regs->cp0_epc);
265 __get_user(halfword, pc16); 265 __get_user(halfword, pc16);
266 pc16++; 266 pc16++;
267 contpc = regs->cp0_epc + 2; 267 contpc = regs->cp0_epc + 2;
268 word = ((unsigned int)halfword << 16); 268 word = ((unsigned int)halfword << 16);
269 mminsn.pc_inc = 2; 269 mminsn.pc_inc = 2;
270 270
271 if (!mm_insn_16bit(halfword)) { 271 if (!mm_insn_16bit(halfword)) {
272 __get_user(halfword, pc16); 272 __get_user(halfword, pc16);
273 pc16++; 273 pc16++;
274 contpc = regs->cp0_epc + 4; 274 contpc = regs->cp0_epc + 4;
275 mminsn.pc_inc = 4; 275 mminsn.pc_inc = 4;
276 word |= halfword; 276 word |= halfword;
277 } 277 }
278 mminsn.insn = word; 278 mminsn.insn = word;
279 279
280 if (get_user(halfword, pc16)) 280 if (get_user(halfword, pc16))
281 goto sigsegv; 281 goto sigsegv;
282 mminsn.next_pc_inc = 2; 282 mminsn.next_pc_inc = 2;
283 word = ((unsigned int)halfword << 16); 283 word = ((unsigned int)halfword << 16);
284 284
285 if (!mm_insn_16bit(halfword)) { 285 if (!mm_insn_16bit(halfword)) {
286 pc16++; 286 pc16++;
287 if (get_user(halfword, pc16)) 287 if (get_user(halfword, pc16))
288 goto sigsegv; 288 goto sigsegv;
289 mminsn.next_pc_inc = 4; 289 mminsn.next_pc_inc = 4;
290 word |= halfword; 290 word |= halfword;
291 } 291 }
292 mminsn.next_insn = word; 292 mminsn.next_insn = word;
293 293
294 mm_isBranchInstr(regs, mminsn, &contpc); 294 mm_isBranchInstr(regs, mminsn, &contpc);
295 295
296 regs->cp0_epc = contpc; 296 regs->cp0_epc = contpc;
297 297
298 return 0; 298 return 0;
299 299
300 sigsegv: 300 sigsegv:
301 force_sig(SIGSEGV, current); 301 force_sig(SIGSEGV, current);
302 return -EFAULT; 302 return -EFAULT;
303 } 303 }
304 304
305 /* 305 /*
306 * Compute return address and emulate branch in MIPS16e mode after an 306 * Compute return address and emulate branch in MIPS16e mode after an
307 * exception only. It does not handle compact branches/jumps and cannot 307 * exception only. It does not handle compact branches/jumps and cannot
308 * be used in interrupt context. (Compact branches/jumps do not cause 308 * be used in interrupt context. (Compact branches/jumps do not cause
309 * exceptions.) 309 * exceptions.)
310 */ 310 */
311 int __MIPS16e_compute_return_epc(struct pt_regs *regs) 311 int __MIPS16e_compute_return_epc(struct pt_regs *regs)
312 { 312 {
313 u16 __user *addr; 313 u16 __user *addr;
314 union mips16e_instruction inst; 314 union mips16e_instruction inst;
315 u16 inst2; 315 u16 inst2;
316 u32 fullinst; 316 u32 fullinst;
317 long epc; 317 long epc;
318 318
319 epc = regs->cp0_epc; 319 epc = regs->cp0_epc;
320 320
321 /* Read the instruction. */ 321 /* Read the instruction. */
322 addr = (u16 __user *)msk_isa16_mode(epc); 322 addr = (u16 __user *)msk_isa16_mode(epc);
323 if (__get_user(inst.full, addr)) { 323 if (__get_user(inst.full, addr)) {
324 force_sig(SIGSEGV, current); 324 force_sig(SIGSEGV, current);
325 return -EFAULT; 325 return -EFAULT;
326 } 326 }
327 327
328 switch (inst.ri.opcode) { 328 switch (inst.ri.opcode) {
329 case MIPS16e_extend_op: 329 case MIPS16e_extend_op:
330 regs->cp0_epc += 4; 330 regs->cp0_epc += 4;
331 return 0; 331 return 0;
332 332
333 /* 333 /*
334 * JAL and JALX in MIPS16e mode 334 * JAL and JALX in MIPS16e mode
335 */ 335 */
336 case MIPS16e_jal_op: 336 case MIPS16e_jal_op:
337 addr += 1; 337 addr += 1;
338 if (__get_user(inst2, addr)) { 338 if (__get_user(inst2, addr)) {
339 force_sig(SIGSEGV, current); 339 force_sig(SIGSEGV, current);
340 return -EFAULT; 340 return -EFAULT;
341 } 341 }
342 fullinst = ((unsigned)inst.full << 16) | inst2; 342 fullinst = ((unsigned)inst.full << 16) | inst2;
343 regs->regs[31] = epc + 6; 343 regs->regs[31] = epc + 6;
344 epc += 4; 344 epc += 4;
345 epc >>= 28; 345 epc >>= 28;
346 epc <<= 28; 346 epc <<= 28;
347 /* 347 /*
348 * JAL:5 X:1 TARGET[20-16]:5 TARGET[25:21]:5 TARGET[15:0]:16 348 * JAL:5 X:1 TARGET[20-16]:5 TARGET[25:21]:5 TARGET[15:0]:16
349 * 349 *
350 * ......TARGET[15:0].................TARGET[20:16]........... 350 * ......TARGET[15:0].................TARGET[20:16]...........
351 * ......TARGET[25:21] 351 * ......TARGET[25:21]
352 */ 352 */
353 epc |= 353 epc |=
354 ((fullinst & 0xffff) << 2) | ((fullinst & 0x3e00000) >> 3) | 354 ((fullinst & 0xffff) << 2) | ((fullinst & 0x3e00000) >> 3) |
355 ((fullinst & 0x1f0000) << 7); 355 ((fullinst & 0x1f0000) << 7);
356 if (!inst.jal.x) 356 if (!inst.jal.x)
357 set_isa16_mode(epc); /* Set ISA mode bit. */ 357 set_isa16_mode(epc); /* Set ISA mode bit. */
358 regs->cp0_epc = epc; 358 regs->cp0_epc = epc;
359 return 0; 359 return 0;
360 360
361 /* 361 /*
362 * J(AL)R(C) 362 * J(AL)R(C)
363 */ 363 */
364 case MIPS16e_rr_op: 364 case MIPS16e_rr_op:
365 if (inst.rr.func == MIPS16e_jr_func) { 365 if (inst.rr.func == MIPS16e_jr_func) {
366 366
367 if (inst.rr.ra) 367 if (inst.rr.ra)
368 regs->cp0_epc = regs->regs[31]; 368 regs->cp0_epc = regs->regs[31];
369 else 369 else
370 regs->cp0_epc = 370 regs->cp0_epc =
371 regs->regs[reg16to32[inst.rr.rx]]; 371 regs->regs[reg16to32[inst.rr.rx]];
372 372
373 if (inst.rr.l) { 373 if (inst.rr.l) {
374 if (inst.rr.nd) 374 if (inst.rr.nd)
375 regs->regs[31] = epc + 2; 375 regs->regs[31] = epc + 2;
376 else 376 else
377 regs->regs[31] = epc + 4; 377 regs->regs[31] = epc + 4;
378 } 378 }
379 return 0; 379 return 0;
380 } 380 }
381 break; 381 break;
382 } 382 }
383 383
384 /* 384 /*
385 * All other cases have no branch delay slot and are 16-bits. 385 * All other cases have no branch delay slot and are 16-bits.
386 * Branches do not cause an exception. 386 * Branches do not cause an exception.
387 */ 387 */
388 regs->cp0_epc += 2; 388 regs->cp0_epc += 2;
389 389
390 return 0; 390 return 0;
391 } 391 }
392 392
393 /** 393 /**
394 * __compute_return_epc_for_insn - Computes the return address and do emulate 394 * __compute_return_epc_for_insn - Computes the return address and do emulate
395 * branch simulation, if required. 395 * branch simulation, if required.
396 * 396 *
397 * @regs: Pointer to pt_regs 397 * @regs: Pointer to pt_regs
398 * @insn: branch instruction to decode 398 * @insn: branch instruction to decode
399 * @returns: -EFAULT on error and forces SIGBUS, and on success 399 * @returns: -EFAULT on error and forces SIGBUS, and on success
400 * returns 0 or BRANCH_LIKELY_TAKEN as appropriate after 400 * returns 0 or BRANCH_LIKELY_TAKEN as appropriate after
401 * evaluating the branch. 401 * evaluating the branch.
402 */ 402 */
403 int __compute_return_epc_for_insn(struct pt_regs *regs, 403 int __compute_return_epc_for_insn(struct pt_regs *regs,
404 union mips_instruction insn) 404 union mips_instruction insn)
405 { 405 {
406 unsigned int bit, fcr31, dspcontrol; 406 unsigned int bit, fcr31, dspcontrol;
407 long epc = regs->cp0_epc; 407 long epc = regs->cp0_epc;
408 int ret = 0; 408 int ret = 0;
409 409
410 switch (insn.i_format.opcode) { 410 switch (insn.i_format.opcode) {
411 /* 411 /*
412 * jr and jalr are in r_format format. 412 * jr and jalr are in r_format format.
413 */ 413 */
414 case spec_op: 414 case spec_op:
415 switch (insn.r_format.func) { 415 switch (insn.r_format.func) {
416 case jalr_op: 416 case jalr_op:
417 regs->regs[insn.r_format.rd] = epc + 8; 417 regs->regs[insn.r_format.rd] = epc + 8;
418 /* Fall through */ 418 /* Fall through */
419 case jr_op: 419 case jr_op:
420 regs->cp0_epc = regs->regs[insn.r_format.rs]; 420 regs->cp0_epc = regs->regs[insn.r_format.rs];
421 break; 421 break;
422 } 422 }
423 break; 423 break;
424 424
425 /* 425 /*
426 * This group contains: 426 * This group contains:
427 * bltz_op, bgez_op, bltzl_op, bgezl_op, 427 * bltz_op, bgez_op, bltzl_op, bgezl_op,
428 * bltzal_op, bgezal_op, bltzall_op, bgezall_op. 428 * bltzal_op, bgezal_op, bltzall_op, bgezall_op.
429 */ 429 */
430 case bcond_op: 430 case bcond_op:
431 switch (insn.i_format.rt) { 431 switch (insn.i_format.rt) {
432 case bltz_op: 432 case bltz_op:
433 case bltzl_op: 433 case bltzl_op:
434 if ((long)regs->regs[insn.i_format.rs] < 0) { 434 if ((long)regs->regs[insn.i_format.rs] < 0) {
435 epc = epc + 4 + (insn.i_format.simmediate << 2); 435 epc = epc + 4 + (insn.i_format.simmediate << 2);
436 if (insn.i_format.rt == bltzl_op) 436 if (insn.i_format.rt == bltzl_op)
437 ret = BRANCH_LIKELY_TAKEN; 437 ret = BRANCH_LIKELY_TAKEN;
438 } else 438 } else
439 epc += 8; 439 epc += 8;
440 regs->cp0_epc = epc; 440 regs->cp0_epc = epc;
441 break; 441 break;
442 442
443 case bgez_op: 443 case bgez_op:
444 case bgezl_op: 444 case bgezl_op:
445 if ((long)regs->regs[insn.i_format.rs] >= 0) { 445 if ((long)regs->regs[insn.i_format.rs] >= 0) {
446 epc = epc + 4 + (insn.i_format.simmediate << 2); 446 epc = epc + 4 + (insn.i_format.simmediate << 2);
447 if (insn.i_format.rt == bgezl_op) 447 if (insn.i_format.rt == bgezl_op)
448 ret = BRANCH_LIKELY_TAKEN; 448 ret = BRANCH_LIKELY_TAKEN;
449 } else 449 } else
450 epc += 8; 450 epc += 8;
451 regs->cp0_epc = epc; 451 regs->cp0_epc = epc;
452 break; 452 break;
453 453
454 case bltzal_op: 454 case bltzal_op:
455 case bltzall_op: 455 case bltzall_op:
456 regs->regs[31] = epc + 8; 456 regs->regs[31] = epc + 8;
457 if ((long)regs->regs[insn.i_format.rs] < 0) { 457 if ((long)regs->regs[insn.i_format.rs] < 0) {
458 epc = epc + 4 + (insn.i_format.simmediate << 2); 458 epc = epc + 4 + (insn.i_format.simmediate << 2);
459 if (insn.i_format.rt == bltzall_op) 459 if (insn.i_format.rt == bltzall_op)
460 ret = BRANCH_LIKELY_TAKEN; 460 ret = BRANCH_LIKELY_TAKEN;
461 } else 461 } else
462 epc += 8; 462 epc += 8;
463 regs->cp0_epc = epc; 463 regs->cp0_epc = epc;
464 break; 464 break;
465 465
466 case bgezal_op: 466 case bgezal_op:
467 case bgezall_op: 467 case bgezall_op:
468 regs->regs[31] = epc + 8; 468 regs->regs[31] = epc + 8;
469 if ((long)regs->regs[insn.i_format.rs] >= 0) { 469 if ((long)regs->regs[insn.i_format.rs] >= 0) {
470 epc = epc + 4 + (insn.i_format.simmediate << 2); 470 epc = epc + 4 + (insn.i_format.simmediate << 2);
471 if (insn.i_format.rt == bgezall_op) 471 if (insn.i_format.rt == bgezall_op)
472 ret = BRANCH_LIKELY_TAKEN; 472 ret = BRANCH_LIKELY_TAKEN;
473 } else 473 } else
474 epc += 8; 474 epc += 8;
475 regs->cp0_epc = epc; 475 regs->cp0_epc = epc;
476 break; 476 break;
477 477
478 case bposge32_op: 478 case bposge32_op:
479 if (!cpu_has_dsp) 479 if (!cpu_has_dsp)
480 goto sigill; 480 goto sigill;
481 481
482 dspcontrol = rddsp(0x01); 482 dspcontrol = rddsp(0x01);
483 483
484 if (dspcontrol >= 32) { 484 if (dspcontrol >= 32) {
485 epc = epc + 4 + (insn.i_format.simmediate << 2); 485 epc = epc + 4 + (insn.i_format.simmediate << 2);
486 } else 486 } else
487 epc += 8; 487 epc += 8;
488 regs->cp0_epc = epc; 488 regs->cp0_epc = epc;
489 break; 489 break;
490 } 490 }
491 break; 491 break;
492 492
493 /* 493 /*
494 * These are unconditional and in j_format. 494 * These are unconditional and in j_format.
495 */ 495 */
496 case jal_op: 496 case jal_op:
497 regs->regs[31] = regs->cp0_epc + 8; 497 regs->regs[31] = regs->cp0_epc + 8;
498 case j_op: 498 case j_op:
499 epc += 4; 499 epc += 4;
500 epc >>= 28; 500 epc >>= 28;
501 epc <<= 28; 501 epc <<= 28;
502 epc |= (insn.j_format.target << 2); 502 epc |= (insn.j_format.target << 2);
503 regs->cp0_epc = epc; 503 regs->cp0_epc = epc;
504 if (insn.i_format.opcode == jalx_op) 504 if (insn.i_format.opcode == jalx_op)
505 set_isa16_mode(regs->cp0_epc); 505 set_isa16_mode(regs->cp0_epc);
506 break; 506 break;
507 507
508 /* 508 /*
509 * These are conditional and in i_format. 509 * These are conditional and in i_format.
510 */ 510 */
511 case beq_op: 511 case beq_op:
512 case beql_op: 512 case beql_op:
513 if (regs->regs[insn.i_format.rs] == 513 if (regs->regs[insn.i_format.rs] ==
514 regs->regs[insn.i_format.rt]) { 514 regs->regs[insn.i_format.rt]) {
515 epc = epc + 4 + (insn.i_format.simmediate << 2); 515 epc = epc + 4 + (insn.i_format.simmediate << 2);
516 if (insn.i_format.opcode == beql_op) 516 if (insn.i_format.opcode == beql_op)
517 ret = BRANCH_LIKELY_TAKEN; 517 ret = BRANCH_LIKELY_TAKEN;
518 } else 518 } else
519 epc += 8; 519 epc += 8;
520 regs->cp0_epc = epc; 520 regs->cp0_epc = epc;
521 break; 521 break;
522 522
523 case bne_op: 523 case bne_op:
524 case bnel_op: 524 case bnel_op:
525 if (regs->regs[insn.i_format.rs] != 525 if (regs->regs[insn.i_format.rs] !=
526 regs->regs[insn.i_format.rt]) { 526 regs->regs[insn.i_format.rt]) {
527 epc = epc + 4 + (insn.i_format.simmediate << 2); 527 epc = epc + 4 + (insn.i_format.simmediate << 2);
528 if (insn.i_format.opcode == bnel_op) 528 if (insn.i_format.opcode == bnel_op)
529 ret = BRANCH_LIKELY_TAKEN; 529 ret = BRANCH_LIKELY_TAKEN;
530 } else 530 } else
531 epc += 8; 531 epc += 8;
532 regs->cp0_epc = epc; 532 regs->cp0_epc = epc;
533 break; 533 break;
534 534
535 case blez_op: /* not really i_format */ 535 case blez_op: /* not really i_format */
536 case blezl_op: 536 case blezl_op:
537 /* rt field assumed to be zero */ 537 /* rt field assumed to be zero */
538 if ((long)regs->regs[insn.i_format.rs] <= 0) { 538 if ((long)regs->regs[insn.i_format.rs] <= 0) {
539 epc = epc + 4 + (insn.i_format.simmediate << 2); 539 epc = epc + 4 + (insn.i_format.simmediate << 2);
540 if (insn.i_format.opcode == blezl_op) 540 if (insn.i_format.opcode == blezl_op)
541 ret = BRANCH_LIKELY_TAKEN; 541 ret = BRANCH_LIKELY_TAKEN;
542 } else 542 } else
543 epc += 8; 543 epc += 8;
544 regs->cp0_epc = epc; 544 regs->cp0_epc = epc;
545 break; 545 break;
546 546
547 case bgtz_op: 547 case bgtz_op:
548 case bgtzl_op: 548 case bgtzl_op:
549 /* rt field assumed to be zero */ 549 /* rt field assumed to be zero */
550 if ((long)regs->regs[insn.i_format.rs] > 0) { 550 if ((long)regs->regs[insn.i_format.rs] > 0) {
551 epc = epc + 4 + (insn.i_format.simmediate << 2); 551 epc = epc + 4 + (insn.i_format.simmediate << 2);
552 if (insn.i_format.opcode == bgtzl_op) 552 if (insn.i_format.opcode == bgtzl_op)
553 ret = BRANCH_LIKELY_TAKEN; 553 ret = BRANCH_LIKELY_TAKEN;
554 } else 554 } else
555 epc += 8; 555 epc += 8;
556 regs->cp0_epc = epc; 556 regs->cp0_epc = epc;
557 break; 557 break;
558 558
559 /* 559 /*
560 * And now the FPA/cp1 branch instructions. 560 * And now the FPA/cp1 branch instructions.
561 */ 561 */
562 case cop1_op: 562 case cop1_op:
563 preempt_disable(); 563 preempt_disable();
564 if (is_fpu_owner()) 564 if (is_fpu_owner())
565 asm volatile( 565 fcr31 = read_32bit_cp1_register(CP1_STATUS);
566 ".set push\n"
567 "\t.set mips1\n"
568 "\tcfc1\t%0,$31\n"
569 "\t.set pop" : "=r" (fcr31));
570 else 566 else
571 fcr31 = current->thread.fpu.fcr31; 567 fcr31 = current->thread.fpu.fcr31;
572 preempt_enable(); 568 preempt_enable();
573 569
574 bit = (insn.i_format.rt >> 2); 570 bit = (insn.i_format.rt >> 2);
575 bit += (bit != 0); 571 bit += (bit != 0);
576 bit += 23; 572 bit += 23;
577 switch (insn.i_format.rt & 3) { 573 switch (insn.i_format.rt & 3) {
578 case 0: /* bc1f */ 574 case 0: /* bc1f */
579 case 2: /* bc1fl */ 575 case 2: /* bc1fl */
580 if (~fcr31 & (1 << bit)) { 576 if (~fcr31 & (1 << bit)) {
581 epc = epc + 4 + (insn.i_format.simmediate << 2); 577 epc = epc + 4 + (insn.i_format.simmediate << 2);
582 if (insn.i_format.rt == 2) 578 if (insn.i_format.rt == 2)
583 ret = BRANCH_LIKELY_TAKEN; 579 ret = BRANCH_LIKELY_TAKEN;
584 } else 580 } else
585 epc += 8; 581 epc += 8;
586 regs->cp0_epc = epc; 582 regs->cp0_epc = epc;
587 break; 583 break;
588 584
589 case 1: /* bc1t */ 585 case 1: /* bc1t */
590 case 3: /* bc1tl */ 586 case 3: /* bc1tl */
591 if (fcr31 & (1 << bit)) { 587 if (fcr31 & (1 << bit)) {
592 epc = epc + 4 + (insn.i_format.simmediate << 2); 588 epc = epc + 4 + (insn.i_format.simmediate << 2);
593 if (insn.i_format.rt == 3) 589 if (insn.i_format.rt == 3)
594 ret = BRANCH_LIKELY_TAKEN; 590 ret = BRANCH_LIKELY_TAKEN;
595 } else 591 } else
596 epc += 8; 592 epc += 8;
597 regs->cp0_epc = epc; 593 regs->cp0_epc = epc;
598 break; 594 break;
599 } 595 }
600 break; 596 break;
601 #ifdef CONFIG_CPU_CAVIUM_OCTEON 597 #ifdef CONFIG_CPU_CAVIUM_OCTEON
602 case lwc2_op: /* This is bbit0 on Octeon */ 598 case lwc2_op: /* This is bbit0 on Octeon */
603 if ((regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt)) 599 if ((regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt))
604 == 0) 600 == 0)
605 epc = epc + 4 + (insn.i_format.simmediate << 2); 601 epc = epc + 4 + (insn.i_format.simmediate << 2);
606 else 602 else
607 epc += 8; 603 epc += 8;
608 regs->cp0_epc = epc; 604 regs->cp0_epc = epc;
609 break; 605 break;
610 case ldc2_op: /* This is bbit032 on Octeon */ 606 case ldc2_op: /* This is bbit032 on Octeon */
611 if ((regs->regs[insn.i_format.rs] & 607 if ((regs->regs[insn.i_format.rs] &
612 (1ull<<(insn.i_format.rt+32))) == 0) 608 (1ull<<(insn.i_format.rt+32))) == 0)
613 epc = epc + 4 + (insn.i_format.simmediate << 2); 609 epc = epc + 4 + (insn.i_format.simmediate << 2);
614 else 610 else
615 epc += 8; 611 epc += 8;
616 regs->cp0_epc = epc; 612 regs->cp0_epc = epc;
617 break; 613 break;
618 case swc2_op: /* This is bbit1 on Octeon */ 614 case swc2_op: /* This is bbit1 on Octeon */
619 if (regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt)) 615 if (regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt))
620 epc = epc + 4 + (insn.i_format.simmediate << 2); 616 epc = epc + 4 + (insn.i_format.simmediate << 2);
621 else 617 else
622 epc += 8; 618 epc += 8;
623 regs->cp0_epc = epc; 619 regs->cp0_epc = epc;
624 break; 620 break;
625 case sdc2_op: /* This is bbit132 on Octeon */ 621 case sdc2_op: /* This is bbit132 on Octeon */
626 if (regs->regs[insn.i_format.rs] & 622 if (regs->regs[insn.i_format.rs] &
627 (1ull<<(insn.i_format.rt+32))) 623 (1ull<<(insn.i_format.rt+32)))
628 epc = epc + 4 + (insn.i_format.simmediate << 2); 624 epc = epc + 4 + (insn.i_format.simmediate << 2);
629 else 625 else
630 epc += 8; 626 epc += 8;
631 regs->cp0_epc = epc; 627 regs->cp0_epc = epc;
632 break; 628 break;
633 #endif 629 #endif
634 } 630 }
635 631
636 return ret; 632 return ret;
637 633
638 sigill: 634 sigill:
639 printk("%s: DSP branch but not DSP ASE - sending SIGBUS.\n", current->comm); 635 printk("%s: DSP branch but not DSP ASE - sending SIGBUS.\n", current->comm);
640 force_sig(SIGBUS, current); 636 force_sig(SIGBUS, current);
641 return -EFAULT; 637 return -EFAULT;
642 } 638 }
643 EXPORT_SYMBOL_GPL(__compute_return_epc_for_insn); 639 EXPORT_SYMBOL_GPL(__compute_return_epc_for_insn);
644 640
645 int __compute_return_epc(struct pt_regs *regs) 641 int __compute_return_epc(struct pt_regs *regs)
646 { 642 {
647 unsigned int __user *addr; 643 unsigned int __user *addr;
648 long epc; 644 long epc;
649 union mips_instruction insn; 645 union mips_instruction insn;
650 646
651 epc = regs->cp0_epc; 647 epc = regs->cp0_epc;
652 if (epc & 3) 648 if (epc & 3)
653 goto unaligned; 649 goto unaligned;
654 650
655 /* 651 /*
656 * Read the instruction 652 * Read the instruction
657 */ 653 */
658 addr = (unsigned int __user *) epc; 654 addr = (unsigned int __user *) epc;
659 if (__get_user(insn.word, addr)) { 655 if (__get_user(insn.word, addr)) {
660 force_sig(SIGSEGV, current); 656 force_sig(SIGSEGV, current);
661 return -EFAULT; 657 return -EFAULT;
662 } 658 }
663 659
664 return __compute_return_epc_for_insn(regs, insn); 660 return __compute_return_epc_for_insn(regs, insn);
665 661
666 unaligned: 662 unaligned:
667 printk("%s: unaligned epc - sending SIGBUS.\n", current->comm); 663 printk("%s: unaligned epc - sending SIGBUS.\n", current->comm);
668 force_sig(SIGBUS, current); 664 force_sig(SIGBUS, current);
669 return -EFAULT; 665 return -EFAULT;
670 } 666 }
671 667
arch/mips/kernel/genex.S
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1994 - 2000, 2001, 2003 Ralf Baechle 6 * Copyright (C) 1994 - 2000, 2001, 2003 Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2002, 2007 Maciej W. Rozycki 8 * Copyright (C) 2002, 2007 Maciej W. Rozycki
9 * Copyright (C) 2001, 2012 MIPS Technologies, Inc. All rights reserved. 9 * Copyright (C) 2001, 2012 MIPS Technologies, Inc. All rights reserved.
10 */ 10 */
11 #include <linux/init.h> 11 #include <linux/init.h>
12 12
13 #include <asm/asm.h> 13 #include <asm/asm.h>
14 #include <asm/asmmacro.h> 14 #include <asm/asmmacro.h>
15 #include <asm/cacheops.h> 15 #include <asm/cacheops.h>
16 #include <asm/irqflags.h> 16 #include <asm/irqflags.h>
17 #include <asm/regdef.h> 17 #include <asm/regdef.h>
18 #include <asm/fpregdef.h> 18 #include <asm/fpregdef.h>
19 #include <asm/mipsregs.h> 19 #include <asm/mipsregs.h>
20 #include <asm/stackframe.h> 20 #include <asm/stackframe.h>
21 #include <asm/war.h> 21 #include <asm/war.h>
22 #include <asm/thread_info.h> 22 #include <asm/thread_info.h>
23 23
24 __INIT 24 __INIT
25 25
26 /* 26 /*
27 * General exception vector for all other CPUs. 27 * General exception vector for all other CPUs.
28 * 28 *
29 * Be careful when changing this, it has to be at most 128 bytes 29 * Be careful when changing this, it has to be at most 128 bytes
30 * to fit into space reserved for the exception handler. 30 * to fit into space reserved for the exception handler.
31 */ 31 */
32 NESTED(except_vec3_generic, 0, sp) 32 NESTED(except_vec3_generic, 0, sp)
33 .set push 33 .set push
34 .set noat 34 .set noat
35 #if R5432_CP0_INTERRUPT_WAR 35 #if R5432_CP0_INTERRUPT_WAR
36 mfc0 k0, CP0_INDEX 36 mfc0 k0, CP0_INDEX
37 #endif 37 #endif
38 mfc0 k1, CP0_CAUSE 38 mfc0 k1, CP0_CAUSE
39 andi k1, k1, 0x7c 39 andi k1, k1, 0x7c
40 #ifdef CONFIG_64BIT 40 #ifdef CONFIG_64BIT
41 dsll k1, k1, 1 41 dsll k1, k1, 1
42 #endif 42 #endif
43 PTR_L k0, exception_handlers(k1) 43 PTR_L k0, exception_handlers(k1)
44 jr k0 44 jr k0
45 .set pop 45 .set pop
46 END(except_vec3_generic) 46 END(except_vec3_generic)
47 47
48 /* 48 /*
49 * General exception handler for CPUs with virtual coherency exception. 49 * General exception handler for CPUs with virtual coherency exception.
50 * 50 *
51 * Be careful when changing this, it has to be at most 256 (as a special 51 * Be careful when changing this, it has to be at most 256 (as a special
52 * exception) bytes to fit into space reserved for the exception handler. 52 * exception) bytes to fit into space reserved for the exception handler.
53 */ 53 */
54 NESTED(except_vec3_r4000, 0, sp) 54 NESTED(except_vec3_r4000, 0, sp)
55 .set push 55 .set push
56 .set arch=r4000 56 .set arch=r4000
57 .set noat 57 .set noat
58 mfc0 k1, CP0_CAUSE 58 mfc0 k1, CP0_CAUSE
59 li k0, 31<<2 59 li k0, 31<<2
60 andi k1, k1, 0x7c 60 andi k1, k1, 0x7c
61 .set push 61 .set push
62 .set noreorder 62 .set noreorder
63 .set nomacro 63 .set nomacro
64 beq k1, k0, handle_vced 64 beq k1, k0, handle_vced
65 li k0, 14<<2 65 li k0, 14<<2
66 beq k1, k0, handle_vcei 66 beq k1, k0, handle_vcei
67 #ifdef CONFIG_64BIT 67 #ifdef CONFIG_64BIT
68 dsll k1, k1, 1 68 dsll k1, k1, 1
69 #endif 69 #endif
70 .set pop 70 .set pop
71 PTR_L k0, exception_handlers(k1) 71 PTR_L k0, exception_handlers(k1)
72 jr k0 72 jr k0
73 73
74 /* 74 /*
75 * Big shit, we now may have two dirty primary cache lines for the same 75 * Big shit, we now may have two dirty primary cache lines for the same
76 * physical address. We can safely invalidate the line pointed to by 76 * physical address. We can safely invalidate the line pointed to by
77 * c0_badvaddr because after return from this exception handler the 77 * c0_badvaddr because after return from this exception handler the
78 * load / store will be re-executed. 78 * load / store will be re-executed.
79 */ 79 */
80 handle_vced: 80 handle_vced:
81 MFC0 k0, CP0_BADVADDR 81 MFC0 k0, CP0_BADVADDR
82 li k1, -4 # Is this ... 82 li k1, -4 # Is this ...
83 and k0, k1 # ... really needed? 83 and k0, k1 # ... really needed?
84 mtc0 zero, CP0_TAGLO 84 mtc0 zero, CP0_TAGLO
85 cache Index_Store_Tag_D, (k0) 85 cache Index_Store_Tag_D, (k0)
86 cache Hit_Writeback_Inv_SD, (k0) 86 cache Hit_Writeback_Inv_SD, (k0)
87 #ifdef CONFIG_PROC_FS 87 #ifdef CONFIG_PROC_FS
88 PTR_LA k0, vced_count 88 PTR_LA k0, vced_count
89 lw k1, (k0) 89 lw k1, (k0)
90 addiu k1, 1 90 addiu k1, 1
91 sw k1, (k0) 91 sw k1, (k0)
92 #endif 92 #endif
93 eret 93 eret
94 94
95 handle_vcei: 95 handle_vcei:
96 MFC0 k0, CP0_BADVADDR 96 MFC0 k0, CP0_BADVADDR
97 cache Hit_Writeback_Inv_SD, (k0) # also cleans pi 97 cache Hit_Writeback_Inv_SD, (k0) # also cleans pi
98 #ifdef CONFIG_PROC_FS 98 #ifdef CONFIG_PROC_FS
99 PTR_LA k0, vcei_count 99 PTR_LA k0, vcei_count
100 lw k1, (k0) 100 lw k1, (k0)
101 addiu k1, 1 101 addiu k1, 1
102 sw k1, (k0) 102 sw k1, (k0)
103 #endif 103 #endif
104 eret 104 eret
105 .set pop 105 .set pop
106 END(except_vec3_r4000) 106 END(except_vec3_r4000)
107 107
108 __FINIT 108 __FINIT
109 109
110 .align 5 /* 32 byte rollback region */ 110 .align 5 /* 32 byte rollback region */
111 LEAF(__r4k_wait) 111 LEAF(__r4k_wait)
112 .set push 112 .set push
113 .set noreorder 113 .set noreorder
114 /* start of rollback region */ 114 /* start of rollback region */
115 LONG_L t0, TI_FLAGS($28) 115 LONG_L t0, TI_FLAGS($28)
116 nop 116 nop
117 andi t0, _TIF_NEED_RESCHED 117 andi t0, _TIF_NEED_RESCHED
118 bnez t0, 1f 118 bnez t0, 1f
119 nop 119 nop
120 nop 120 nop
121 nop 121 nop
122 #ifdef CONFIG_CPU_MICROMIPS 122 #ifdef CONFIG_CPU_MICROMIPS
123 nop 123 nop
124 nop 124 nop
125 nop 125 nop
126 nop 126 nop
127 #endif 127 #endif
128 .set arch=r4000 128 .set arch=r4000
129 wait 129 wait
130 /* end of rollback region (the region size must be power of two) */ 130 /* end of rollback region (the region size must be power of two) */
131 1: 131 1:
132 jr ra 132 jr ra
133 nop 133 nop
134 .set pop 134 .set pop
135 END(__r4k_wait) 135 END(__r4k_wait)
136 136
137 .macro BUILD_ROLLBACK_PROLOGUE handler 137 .macro BUILD_ROLLBACK_PROLOGUE handler
138 FEXPORT(rollback_\handler) 138 FEXPORT(rollback_\handler)
139 .set push 139 .set push
140 .set noat 140 .set noat
141 MFC0 k0, CP0_EPC 141 MFC0 k0, CP0_EPC
142 PTR_LA k1, __r4k_wait 142 PTR_LA k1, __r4k_wait
143 ori k0, 0x1f /* 32 byte rollback region */ 143 ori k0, 0x1f /* 32 byte rollback region */
144 xori k0, 0x1f 144 xori k0, 0x1f
145 bne k0, k1, 9f 145 bne k0, k1, 9f
146 MTC0 k0, CP0_EPC 146 MTC0 k0, CP0_EPC
147 9: 147 9:
148 .set pop 148 .set pop
149 .endm 149 .endm
150 150
151 .align 5 151 .align 5
152 BUILD_ROLLBACK_PROLOGUE handle_int 152 BUILD_ROLLBACK_PROLOGUE handle_int
153 NESTED(handle_int, PT_SIZE, sp) 153 NESTED(handle_int, PT_SIZE, sp)
154 #ifdef CONFIG_TRACE_IRQFLAGS 154 #ifdef CONFIG_TRACE_IRQFLAGS
155 /* 155 /*
156 * Check to see if the interrupted code has just disabled 156 * Check to see if the interrupted code has just disabled
157 * interrupts and ignore this interrupt for now if so. 157 * interrupts and ignore this interrupt for now if so.
158 * 158 *
159 * local_irq_disable() disables interrupts and then calls 159 * local_irq_disable() disables interrupts and then calls
160 * trace_hardirqs_off() to track the state. If an interrupt is taken 160 * trace_hardirqs_off() to track the state. If an interrupt is taken
161 * after interrupts are disabled but before the state is updated 161 * after interrupts are disabled but before the state is updated
162 * it will appear to restore_all that it is incorrectly returning with 162 * it will appear to restore_all that it is incorrectly returning with
163 * interrupts disabled 163 * interrupts disabled
164 */ 164 */
165 .set push 165 .set push
166 .set noat 166 .set noat
167 mfc0 k0, CP0_STATUS 167 mfc0 k0, CP0_STATUS
168 #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 168 #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
169 and k0, ST0_IEP 169 and k0, ST0_IEP
170 bnez k0, 1f 170 bnez k0, 1f
171 171
172 mfc0 k0, CP0_EPC 172 mfc0 k0, CP0_EPC
173 .set noreorder 173 .set noreorder
174 j k0 174 j k0
175 rfe 175 rfe
176 #else 176 #else
177 and k0, ST0_IE 177 and k0, ST0_IE
178 bnez k0, 1f 178 bnez k0, 1f
179 179
180 eret 180 eret
181 #endif 181 #endif
182 1: 182 1:
183 .set pop 183 .set pop
184 #endif 184 #endif
185 SAVE_ALL 185 SAVE_ALL
186 CLI 186 CLI
187 TRACE_IRQS_OFF 187 TRACE_IRQS_OFF
188 188
189 LONG_L s0, TI_REGS($28) 189 LONG_L s0, TI_REGS($28)
190 LONG_S sp, TI_REGS($28) 190 LONG_S sp, TI_REGS($28)
191 PTR_LA ra, ret_from_irq 191 PTR_LA ra, ret_from_irq
192 PTR_LA v0, plat_irq_dispatch 192 PTR_LA v0, plat_irq_dispatch
193 jr v0 193 jr v0
194 #ifdef CONFIG_CPU_MICROMIPS 194 #ifdef CONFIG_CPU_MICROMIPS
195 nop 195 nop
196 #endif 196 #endif
197 END(handle_int) 197 END(handle_int)
198 198
199 __INIT 199 __INIT
200 200
201 /* 201 /*
202 * Special interrupt vector for MIPS64 ISA & embedded MIPS processors. 202 * Special interrupt vector for MIPS64 ISA & embedded MIPS processors.
203 * This is a dedicated interrupt exception vector which reduces the 203 * This is a dedicated interrupt exception vector which reduces the
204 * interrupt processing overhead. The jump instruction will be replaced 204 * interrupt processing overhead. The jump instruction will be replaced
205 * at the initialization time. 205 * at the initialization time.
206 * 206 *
207 * Be careful when changing this, it has to be at most 128 bytes 207 * Be careful when changing this, it has to be at most 128 bytes
208 * to fit into space reserved for the exception handler. 208 * to fit into space reserved for the exception handler.
209 */ 209 */
210 NESTED(except_vec4, 0, sp) 210 NESTED(except_vec4, 0, sp)
211 1: j 1b /* Dummy, will be replaced */ 211 1: j 1b /* Dummy, will be replaced */
212 END(except_vec4) 212 END(except_vec4)
213 213
214 /* 214 /*
215 * EJTAG debug exception handler. 215 * EJTAG debug exception handler.
216 * The EJTAG debug exception entry point is 0xbfc00480, which 216 * The EJTAG debug exception entry point is 0xbfc00480, which
217 * normally is in the boot PROM, so the boot PROM must do an 217 * normally is in the boot PROM, so the boot PROM must do an
218 * unconditional jump to this vector. 218 * unconditional jump to this vector.
219 */ 219 */
220 NESTED(except_vec_ejtag_debug, 0, sp) 220 NESTED(except_vec_ejtag_debug, 0, sp)
221 j ejtag_debug_handler 221 j ejtag_debug_handler
222 #ifdef CONFIG_CPU_MICROMIPS 222 #ifdef CONFIG_CPU_MICROMIPS
223 nop 223 nop
224 #endif 224 #endif
225 END(except_vec_ejtag_debug) 225 END(except_vec_ejtag_debug)
226 226
227 __FINIT 227 __FINIT
228 228
229 /* 229 /*
230 * Vectored interrupt handler. 230 * Vectored interrupt handler.
231 * This prototype is copied to ebase + n*IntCtl.VS and patched 231 * This prototype is copied to ebase + n*IntCtl.VS and patched
232 * to invoke the handler 232 * to invoke the handler
233 */ 233 */
234 BUILD_ROLLBACK_PROLOGUE except_vec_vi 234 BUILD_ROLLBACK_PROLOGUE except_vec_vi
235 NESTED(except_vec_vi, 0, sp) 235 NESTED(except_vec_vi, 0, sp)
236 SAVE_SOME 236 SAVE_SOME
237 SAVE_AT 237 SAVE_AT
238 .set push 238 .set push
239 .set noreorder 239 .set noreorder
240 PTR_LA v1, except_vec_vi_handler 240 PTR_LA v1, except_vec_vi_handler
241 FEXPORT(except_vec_vi_lui) 241 FEXPORT(except_vec_vi_lui)
242 lui v0, 0 /* Patched */ 242 lui v0, 0 /* Patched */
243 jr v1 243 jr v1
244 FEXPORT(except_vec_vi_ori) 244 FEXPORT(except_vec_vi_ori)
245 ori v0, 0 /* Patched */ 245 ori v0, 0 /* Patched */
246 .set pop 246 .set pop
247 END(except_vec_vi) 247 END(except_vec_vi)
248 EXPORT(except_vec_vi_end) 248 EXPORT(except_vec_vi_end)
249 249
250 /* 250 /*
251 * Common Vectored Interrupt code 251 * Common Vectored Interrupt code
252 * Complete the register saves and invoke the handler which is passed in $v0 252 * Complete the register saves and invoke the handler which is passed in $v0
253 */ 253 */
254 NESTED(except_vec_vi_handler, 0, sp) 254 NESTED(except_vec_vi_handler, 0, sp)
255 SAVE_TEMP 255 SAVE_TEMP
256 SAVE_STATIC 256 SAVE_STATIC
257 CLI 257 CLI
258 #ifdef CONFIG_TRACE_IRQFLAGS 258 #ifdef CONFIG_TRACE_IRQFLAGS
259 move s0, v0 259 move s0, v0
260 TRACE_IRQS_OFF 260 TRACE_IRQS_OFF
261 move v0, s0 261 move v0, s0
262 #endif 262 #endif
263 263
264 LONG_L s0, TI_REGS($28) 264 LONG_L s0, TI_REGS($28)
265 LONG_S sp, TI_REGS($28) 265 LONG_S sp, TI_REGS($28)
266 PTR_LA ra, ret_from_irq 266 PTR_LA ra, ret_from_irq
267 jr v0 267 jr v0
268 END(except_vec_vi_handler) 268 END(except_vec_vi_handler)
269 269
270 /* 270 /*
271 * EJTAG debug exception handler. 271 * EJTAG debug exception handler.
272 */ 272 */
273 NESTED(ejtag_debug_handler, PT_SIZE, sp) 273 NESTED(ejtag_debug_handler, PT_SIZE, sp)
274 .set push 274 .set push
275 .set noat 275 .set noat
276 MTC0 k0, CP0_DESAVE 276 MTC0 k0, CP0_DESAVE
277 mfc0 k0, CP0_DEBUG 277 mfc0 k0, CP0_DEBUG
278 278
279 sll k0, k0, 30 # Check for SDBBP. 279 sll k0, k0, 30 # Check for SDBBP.
280 bgez k0, ejtag_return 280 bgez k0, ejtag_return
281 281
282 PTR_LA k0, ejtag_debug_buffer 282 PTR_LA k0, ejtag_debug_buffer
283 LONG_S k1, 0(k0) 283 LONG_S k1, 0(k0)
284 SAVE_ALL 284 SAVE_ALL
285 move a0, sp 285 move a0, sp
286 jal ejtag_exception_handler 286 jal ejtag_exception_handler
287 RESTORE_ALL 287 RESTORE_ALL
288 PTR_LA k0, ejtag_debug_buffer 288 PTR_LA k0, ejtag_debug_buffer
289 LONG_L k1, 0(k0) 289 LONG_L k1, 0(k0)
290 290
291 ejtag_return: 291 ejtag_return:
292 MFC0 k0, CP0_DESAVE 292 MFC0 k0, CP0_DESAVE
293 .set mips32 293 .set mips32
294 deret 294 deret
295 .set pop 295 .set pop
296 END(ejtag_debug_handler) 296 END(ejtag_debug_handler)
297 297
298 /* 298 /*
299 * This buffer is reserved for the use of the EJTAG debug 299 * This buffer is reserved for the use of the EJTAG debug
300 * handler. 300 * handler.
301 */ 301 */
302 .data 302 .data
303 EXPORT(ejtag_debug_buffer) 303 EXPORT(ejtag_debug_buffer)
304 .fill LONGSIZE 304 .fill LONGSIZE
305 .previous 305 .previous
306 306
307 __INIT 307 __INIT
308 308
309 /* 309 /*
310 * NMI debug exception handler for MIPS reference boards. 310 * NMI debug exception handler for MIPS reference boards.
311 * The NMI debug exception entry point is 0xbfc00000, which 311 * The NMI debug exception entry point is 0xbfc00000, which
312 * normally is in the boot PROM, so the boot PROM must do a 312 * normally is in the boot PROM, so the boot PROM must do a
313 * unconditional jump to this vector. 313 * unconditional jump to this vector.
314 */ 314 */
315 NESTED(except_vec_nmi, 0, sp) 315 NESTED(except_vec_nmi, 0, sp)
316 j nmi_handler 316 j nmi_handler
317 #ifdef CONFIG_CPU_MICROMIPS 317 #ifdef CONFIG_CPU_MICROMIPS
318 nop 318 nop
319 #endif 319 #endif
320 END(except_vec_nmi) 320 END(except_vec_nmi)
321 321
322 __FINIT 322 __FINIT
323 323
324 NESTED(nmi_handler, PT_SIZE, sp) 324 NESTED(nmi_handler, PT_SIZE, sp)
325 .set push 325 .set push
326 .set noat 326 .set noat
327 /* 327 /*
328 * Clear ERL - restore segment mapping 328 * Clear ERL - restore segment mapping
329 * Clear BEV - required for page fault exception handler to work 329 * Clear BEV - required for page fault exception handler to work
330 */ 330 */
331 mfc0 k0, CP0_STATUS 331 mfc0 k0, CP0_STATUS
332 ori k0, k0, ST0_EXL 332 ori k0, k0, ST0_EXL
333 li k1, ~(ST0_BEV | ST0_ERL) 333 li k1, ~(ST0_BEV | ST0_ERL)
334 and k0, k0, k1 334 and k0, k0, k1
335 mtc0 k0, CP0_STATUS 335 mtc0 k0, CP0_STATUS
336 _ehb 336 _ehb
337 SAVE_ALL 337 SAVE_ALL
338 move a0, sp 338 move a0, sp
339 jal nmi_exception_handler 339 jal nmi_exception_handler
340 /* nmi_exception_handler never returns */ 340 /* nmi_exception_handler never returns */
341 .set pop 341 .set pop
342 END(nmi_handler) 342 END(nmi_handler)
343 343
344 .macro __build_clear_none 344 .macro __build_clear_none
345 .endm 345 .endm
346 346
347 .macro __build_clear_sti 347 .macro __build_clear_sti
348 TRACE_IRQS_ON 348 TRACE_IRQS_ON
349 STI 349 STI
350 .endm 350 .endm
351 351
352 .macro __build_clear_cli 352 .macro __build_clear_cli
353 CLI 353 CLI
354 TRACE_IRQS_OFF 354 TRACE_IRQS_OFF
355 .endm 355 .endm
356 356
357 .macro __build_clear_fpe 357 .macro __build_clear_fpe
358 .set push 358 .set push
359 /* gas fails to assemble cfc1 for some archs (octeon).*/ \ 359 /* gas fails to assemble cfc1 for some archs (octeon).*/ \
360 .set mips1 360 .set mips1
361 SET_HARDFLOAT
361 cfc1 a1, fcr31 362 cfc1 a1, fcr31
362 li a2, ~(0x3f << 12) 363 li a2, ~(0x3f << 12)
363 and a2, a1 364 and a2, a1
364 ctc1 a2, fcr31 365 ctc1 a2, fcr31
365 .set pop 366 .set pop
366 TRACE_IRQS_ON 367 TRACE_IRQS_ON
367 STI 368 STI
368 .endm 369 .endm
369 370
370 .macro __build_clear_ade 371 .macro __build_clear_ade
371 MFC0 t0, CP0_BADVADDR 372 MFC0 t0, CP0_BADVADDR
372 PTR_S t0, PT_BVADDR(sp) 373 PTR_S t0, PT_BVADDR(sp)
373 KMODE 374 KMODE
374 .endm 375 .endm
375 376
376 .macro __BUILD_silent exception 377 .macro __BUILD_silent exception
377 .endm 378 .endm
378 379
379 /* Gas tries to parse the PRINT argument as a string containing 380 /* Gas tries to parse the PRINT argument as a string containing
380 string escapes and emits bogus warnings if it believes to 381 string escapes and emits bogus warnings if it believes to
381 recognize an unknown escape code. So make the arguments 382 recognize an unknown escape code. So make the arguments
382 start with an n and gas will believe \n is ok ... */ 383 start with an n and gas will believe \n is ok ... */
383 .macro __BUILD_verbose nexception 384 .macro __BUILD_verbose nexception
384 LONG_L a1, PT_EPC(sp) 385 LONG_L a1, PT_EPC(sp)
385 #ifdef CONFIG_32BIT 386 #ifdef CONFIG_32BIT
386 PRINT("Got \nexception at %08lx\012") 387 PRINT("Got \nexception at %08lx\012")
387 #endif 388 #endif
388 #ifdef CONFIG_64BIT 389 #ifdef CONFIG_64BIT
389 PRINT("Got \nexception at %016lx\012") 390 PRINT("Got \nexception at %016lx\012")
390 #endif 391 #endif
391 .endm 392 .endm
392 393
393 .macro __BUILD_count exception 394 .macro __BUILD_count exception
394 LONG_L t0,exception_count_\exception 395 LONG_L t0,exception_count_\exception
395 LONG_ADDIU t0, 1 396 LONG_ADDIU t0, 1
396 LONG_S t0,exception_count_\exception 397 LONG_S t0,exception_count_\exception
397 .comm exception_count\exception, 8, 8 398 .comm exception_count\exception, 8, 8
398 .endm 399 .endm
399 400
400 .macro __BUILD_HANDLER exception handler clear verbose ext 401 .macro __BUILD_HANDLER exception handler clear verbose ext
401 .align 5 402 .align 5
402 NESTED(handle_\exception, PT_SIZE, sp) 403 NESTED(handle_\exception, PT_SIZE, sp)
403 .set noat 404 .set noat
404 SAVE_ALL 405 SAVE_ALL
405 FEXPORT(handle_\exception\ext) 406 FEXPORT(handle_\exception\ext)
406 __BUILD_clear_\clear 407 __BUILD_clear_\clear
407 .set at 408 .set at
408 __BUILD_\verbose \exception 409 __BUILD_\verbose \exception
409 move a0, sp 410 move a0, sp
410 PTR_LA ra, ret_from_exception 411 PTR_LA ra, ret_from_exception
411 j do_\handler 412 j do_\handler
412 END(handle_\exception) 413 END(handle_\exception)
413 .endm 414 .endm
414 415
415 .macro BUILD_HANDLER exception handler clear verbose 416 .macro BUILD_HANDLER exception handler clear verbose
416 __BUILD_HANDLER \exception \handler \clear \verbose _int 417 __BUILD_HANDLER \exception \handler \clear \verbose _int
417 .endm 418 .endm
418 419
419 BUILD_HANDLER adel ade ade silent /* #4 */ 420 BUILD_HANDLER adel ade ade silent /* #4 */
420 BUILD_HANDLER ades ade ade silent /* #5 */ 421 BUILD_HANDLER ades ade ade silent /* #5 */
421 BUILD_HANDLER ibe be cli silent /* #6 */ 422 BUILD_HANDLER ibe be cli silent /* #6 */
422 BUILD_HANDLER dbe be cli silent /* #7 */ 423 BUILD_HANDLER dbe be cli silent /* #7 */
423 BUILD_HANDLER bp bp sti silent /* #9 */ 424 BUILD_HANDLER bp bp sti silent /* #9 */
424 BUILD_HANDLER ri ri sti silent /* #10 */ 425 BUILD_HANDLER ri ri sti silent /* #10 */
425 BUILD_HANDLER cpu cpu sti silent /* #11 */ 426 BUILD_HANDLER cpu cpu sti silent /* #11 */
426 BUILD_HANDLER ov ov sti silent /* #12 */ 427 BUILD_HANDLER ov ov sti silent /* #12 */
427 BUILD_HANDLER tr tr sti silent /* #13 */ 428 BUILD_HANDLER tr tr sti silent /* #13 */
428 BUILD_HANDLER msa_fpe msa_fpe sti silent /* #14 */ 429 BUILD_HANDLER msa_fpe msa_fpe sti silent /* #14 */
429 BUILD_HANDLER fpe fpe fpe silent /* #15 */ 430 BUILD_HANDLER fpe fpe fpe silent /* #15 */
430 BUILD_HANDLER ftlb ftlb none silent /* #16 */ 431 BUILD_HANDLER ftlb ftlb none silent /* #16 */
431 BUILD_HANDLER msa msa sti silent /* #21 */ 432 BUILD_HANDLER msa msa sti silent /* #21 */
432 BUILD_HANDLER mdmx mdmx sti silent /* #22 */ 433 BUILD_HANDLER mdmx mdmx sti silent /* #22 */
433 #ifdef CONFIG_HARDWARE_WATCHPOINTS 434 #ifdef CONFIG_HARDWARE_WATCHPOINTS
434 /* 435 /*
435 * For watch, interrupts will be enabled after the watch 436 * For watch, interrupts will be enabled after the watch
436 * registers are read. 437 * registers are read.
437 */ 438 */
438 BUILD_HANDLER watch watch cli silent /* #23 */ 439 BUILD_HANDLER watch watch cli silent /* #23 */
439 #else 440 #else
440 BUILD_HANDLER watch watch sti verbose /* #23 */ 441 BUILD_HANDLER watch watch sti verbose /* #23 */
441 #endif 442 #endif
442 BUILD_HANDLER mcheck mcheck cli verbose /* #24 */ 443 BUILD_HANDLER mcheck mcheck cli verbose /* #24 */
443 BUILD_HANDLER mt mt sti silent /* #25 */ 444 BUILD_HANDLER mt mt sti silent /* #25 */
444 BUILD_HANDLER dsp dsp sti silent /* #26 */ 445 BUILD_HANDLER dsp dsp sti silent /* #26 */
445 BUILD_HANDLER reserved reserved sti verbose /* others */ 446 BUILD_HANDLER reserved reserved sti verbose /* others */
446 447
447 .align 5 448 .align 5
448 LEAF(handle_ri_rdhwr_vivt) 449 LEAF(handle_ri_rdhwr_vivt)
449 .set push 450 .set push
450 .set noat 451 .set noat
451 .set noreorder 452 .set noreorder
452 /* check if TLB contains a entry for EPC */ 453 /* check if TLB contains a entry for EPC */
453 MFC0 k1, CP0_ENTRYHI 454 MFC0 k1, CP0_ENTRYHI
454 andi k1, 0xff /* ASID_MASK */ 455 andi k1, 0xff /* ASID_MASK */
455 MFC0 k0, CP0_EPC 456 MFC0 k0, CP0_EPC
456 PTR_SRL k0, _PAGE_SHIFT + 1 457 PTR_SRL k0, _PAGE_SHIFT + 1
457 PTR_SLL k0, _PAGE_SHIFT + 1 458 PTR_SLL k0, _PAGE_SHIFT + 1
458 or k1, k0 459 or k1, k0
459 MTC0 k1, CP0_ENTRYHI 460 MTC0 k1, CP0_ENTRYHI
460 mtc0_tlbw_hazard 461 mtc0_tlbw_hazard
461 tlbp 462 tlbp
462 tlb_probe_hazard 463 tlb_probe_hazard
463 mfc0 k1, CP0_INDEX 464 mfc0 k1, CP0_INDEX
464 .set pop 465 .set pop
465 bltz k1, handle_ri /* slow path */ 466 bltz k1, handle_ri /* slow path */
466 /* fall thru */ 467 /* fall thru */
467 END(handle_ri_rdhwr_vivt) 468 END(handle_ri_rdhwr_vivt)
468 469
469 LEAF(handle_ri_rdhwr) 470 LEAF(handle_ri_rdhwr)
470 .set push 471 .set push
471 .set noat 472 .set noat
472 .set noreorder 473 .set noreorder
473 /* MIPS32: 0x7c03e83b: rdhwr v1,$29 */ 474 /* MIPS32: 0x7c03e83b: rdhwr v1,$29 */
474 /* microMIPS: 0x007d6b3c: rdhwr v1,$29 */ 475 /* microMIPS: 0x007d6b3c: rdhwr v1,$29 */
475 MFC0 k1, CP0_EPC 476 MFC0 k1, CP0_EPC
476 #if defined(CONFIG_CPU_MICROMIPS) || defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2) 477 #if defined(CONFIG_CPU_MICROMIPS) || defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)
477 and k0, k1, 1 478 and k0, k1, 1
478 beqz k0, 1f 479 beqz k0, 1f
479 xor k1, k0 480 xor k1, k0
480 lhu k0, (k1) 481 lhu k0, (k1)
481 lhu k1, 2(k1) 482 lhu k1, 2(k1)
482 ins k1, k0, 16, 16 483 ins k1, k0, 16, 16
483 lui k0, 0x007d 484 lui k0, 0x007d
484 b docheck 485 b docheck
485 ori k0, 0x6b3c 486 ori k0, 0x6b3c
486 1: 487 1:
487 lui k0, 0x7c03 488 lui k0, 0x7c03
488 lw k1, (k1) 489 lw k1, (k1)
489 ori k0, 0xe83b 490 ori k0, 0xe83b
490 #else 491 #else
491 andi k0, k1, 1 492 andi k0, k1, 1
492 bnez k0, handle_ri 493 bnez k0, handle_ri
493 lui k0, 0x7c03 494 lui k0, 0x7c03
494 lw k1, (k1) 495 lw k1, (k1)
495 ori k0, 0xe83b 496 ori k0, 0xe83b
496 #endif 497 #endif
497 .set reorder 498 .set reorder
498 docheck: 499 docheck:
499 bne k0, k1, handle_ri /* if not ours */ 500 bne k0, k1, handle_ri /* if not ours */
500 501
501 isrdhwr: 502 isrdhwr:
502 /* The insn is rdhwr. No need to check CAUSE.BD here. */ 503 /* The insn is rdhwr. No need to check CAUSE.BD here. */
503 get_saved_sp /* k1 := current_thread_info */ 504 get_saved_sp /* k1 := current_thread_info */
504 .set noreorder 505 .set noreorder
505 MFC0 k0, CP0_EPC 506 MFC0 k0, CP0_EPC
506 #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 507 #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
507 ori k1, _THREAD_MASK 508 ori k1, _THREAD_MASK
508 xori k1, _THREAD_MASK 509 xori k1, _THREAD_MASK
509 LONG_L v1, TI_TP_VALUE(k1) 510 LONG_L v1, TI_TP_VALUE(k1)
510 LONG_ADDIU k0, 4 511 LONG_ADDIU k0, 4
511 jr k0 512 jr k0
512 rfe 513 rfe
513 #else 514 #else
514 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 515 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
515 LONG_ADDIU k0, 4 /* stall on $k0 */ 516 LONG_ADDIU k0, 4 /* stall on $k0 */
516 #else 517 #else
517 .set at=v1 518 .set at=v1
518 LONG_ADDIU k0, 4 519 LONG_ADDIU k0, 4
519 .set noat 520 .set noat
520 #endif 521 #endif
521 MTC0 k0, CP0_EPC 522 MTC0 k0, CP0_EPC
522 /* I hope three instructions between MTC0 and ERET are enough... */ 523 /* I hope three instructions between MTC0 and ERET are enough... */
523 ori k1, _THREAD_MASK 524 ori k1, _THREAD_MASK
524 xori k1, _THREAD_MASK 525 xori k1, _THREAD_MASK
525 LONG_L v1, TI_TP_VALUE(k1) 526 LONG_L v1, TI_TP_VALUE(k1)
526 .set arch=r4000 527 .set arch=r4000
527 eret 528 eret
528 .set mips0 529 .set mips0
529 #endif 530 #endif
530 .set pop 531 .set pop
531 END(handle_ri_rdhwr) 532 END(handle_ri_rdhwr)
532 533
533 #ifdef CONFIG_64BIT 534 #ifdef CONFIG_64BIT
534 /* A temporary overflow handler used by check_daddi(). */ 535 /* A temporary overflow handler used by check_daddi(). */
535 536
536 __INIT 537 __INIT
537 538
538 BUILD_HANDLER daddi_ov daddi_ov none silent /* #12 */ 539 BUILD_HANDLER daddi_ov daddi_ov none silent /* #12 */
539 #endif 540 #endif
540 541
arch/mips/kernel/r2300_fpu.S
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1996, 1998 by Ralf Baechle 6 * Copyright (C) 1996, 1998 by Ralf Baechle
7 * 7 *
8 * Multi-arch abstraction and asm macros for easier reading: 8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 * 10 *
11 * Further modifications to make this work: 11 * Further modifications to make this work:
12 * Copyright (c) 1998 Harald Koerfgen 12 * Copyright (c) 1998 Harald Koerfgen
13 */ 13 */
14 #include <asm/asm.h> 14 #include <asm/asm.h>
15 #include <asm/errno.h> 15 #include <asm/errno.h>
16 #include <asm/fpregdef.h> 16 #include <asm/fpregdef.h>
17 #include <asm/mipsregs.h> 17 #include <asm/mipsregs.h>
18 #include <asm/asm-offsets.h> 18 #include <asm/asm-offsets.h>
19 #include <asm/regdef.h> 19 #include <asm/regdef.h>
20 20
21 #define EX(a,b) \ 21 #define EX(a,b) \
22 9: a,##b; \ 22 9: a,##b; \
23 .section __ex_table,"a"; \ 23 .section __ex_table,"a"; \
24 PTR 9b,bad_stack; \ 24 PTR 9b,bad_stack; \
25 .previous 25 .previous
26 26
27 .set noreorder 27 .set noreorder
28 .set mips1 28 .set mips1
29 /* Save floating point context */ 29 /* Save floating point context */
30 LEAF(_save_fp_context) 30 LEAF(_save_fp_context)
31 .set push
32 SET_HARDFLOAT
31 li v0, 0 # assume success 33 li v0, 0 # assume success
32 cfc1 t1,fcr31 34 cfc1 t1,fcr31
33 EX(swc1 $f0,(SC_FPREGS+0)(a0)) 35 EX(swc1 $f0,(SC_FPREGS+0)(a0))
34 EX(swc1 $f1,(SC_FPREGS+8)(a0)) 36 EX(swc1 $f1,(SC_FPREGS+8)(a0))
35 EX(swc1 $f2,(SC_FPREGS+16)(a0)) 37 EX(swc1 $f2,(SC_FPREGS+16)(a0))
36 EX(swc1 $f3,(SC_FPREGS+24)(a0)) 38 EX(swc1 $f3,(SC_FPREGS+24)(a0))
37 EX(swc1 $f4,(SC_FPREGS+32)(a0)) 39 EX(swc1 $f4,(SC_FPREGS+32)(a0))
38 EX(swc1 $f5,(SC_FPREGS+40)(a0)) 40 EX(swc1 $f5,(SC_FPREGS+40)(a0))
39 EX(swc1 $f6,(SC_FPREGS+48)(a0)) 41 EX(swc1 $f6,(SC_FPREGS+48)(a0))
40 EX(swc1 $f7,(SC_FPREGS+56)(a0)) 42 EX(swc1 $f7,(SC_FPREGS+56)(a0))
41 EX(swc1 $f8,(SC_FPREGS+64)(a0)) 43 EX(swc1 $f8,(SC_FPREGS+64)(a0))
42 EX(swc1 $f9,(SC_FPREGS+72)(a0)) 44 EX(swc1 $f9,(SC_FPREGS+72)(a0))
43 EX(swc1 $f10,(SC_FPREGS+80)(a0)) 45 EX(swc1 $f10,(SC_FPREGS+80)(a0))
44 EX(swc1 $f11,(SC_FPREGS+88)(a0)) 46 EX(swc1 $f11,(SC_FPREGS+88)(a0))
45 EX(swc1 $f12,(SC_FPREGS+96)(a0)) 47 EX(swc1 $f12,(SC_FPREGS+96)(a0))
46 EX(swc1 $f13,(SC_FPREGS+104)(a0)) 48 EX(swc1 $f13,(SC_FPREGS+104)(a0))
47 EX(swc1 $f14,(SC_FPREGS+112)(a0)) 49 EX(swc1 $f14,(SC_FPREGS+112)(a0))
48 EX(swc1 $f15,(SC_FPREGS+120)(a0)) 50 EX(swc1 $f15,(SC_FPREGS+120)(a0))
49 EX(swc1 $f16,(SC_FPREGS+128)(a0)) 51 EX(swc1 $f16,(SC_FPREGS+128)(a0))
50 EX(swc1 $f17,(SC_FPREGS+136)(a0)) 52 EX(swc1 $f17,(SC_FPREGS+136)(a0))
51 EX(swc1 $f18,(SC_FPREGS+144)(a0)) 53 EX(swc1 $f18,(SC_FPREGS+144)(a0))
52 EX(swc1 $f19,(SC_FPREGS+152)(a0)) 54 EX(swc1 $f19,(SC_FPREGS+152)(a0))
53 EX(swc1 $f20,(SC_FPREGS+160)(a0)) 55 EX(swc1 $f20,(SC_FPREGS+160)(a0))
54 EX(swc1 $f21,(SC_FPREGS+168)(a0)) 56 EX(swc1 $f21,(SC_FPREGS+168)(a0))
55 EX(swc1 $f22,(SC_FPREGS+176)(a0)) 57 EX(swc1 $f22,(SC_FPREGS+176)(a0))
56 EX(swc1 $f23,(SC_FPREGS+184)(a0)) 58 EX(swc1 $f23,(SC_FPREGS+184)(a0))
57 EX(swc1 $f24,(SC_FPREGS+192)(a0)) 59 EX(swc1 $f24,(SC_FPREGS+192)(a0))
58 EX(swc1 $f25,(SC_FPREGS+200)(a0)) 60 EX(swc1 $f25,(SC_FPREGS+200)(a0))
59 EX(swc1 $f26,(SC_FPREGS+208)(a0)) 61 EX(swc1 $f26,(SC_FPREGS+208)(a0))
60 EX(swc1 $f27,(SC_FPREGS+216)(a0)) 62 EX(swc1 $f27,(SC_FPREGS+216)(a0))
61 EX(swc1 $f28,(SC_FPREGS+224)(a0)) 63 EX(swc1 $f28,(SC_FPREGS+224)(a0))
62 EX(swc1 $f29,(SC_FPREGS+232)(a0)) 64 EX(swc1 $f29,(SC_FPREGS+232)(a0))
63 EX(swc1 $f30,(SC_FPREGS+240)(a0)) 65 EX(swc1 $f30,(SC_FPREGS+240)(a0))
64 EX(swc1 $f31,(SC_FPREGS+248)(a0)) 66 EX(swc1 $f31,(SC_FPREGS+248)(a0))
65 EX(sw t1,(SC_FPC_CSR)(a0)) 67 EX(sw t1,(SC_FPC_CSR)(a0))
66 cfc1 t0,$0 # implementation/version 68 cfc1 t0,$0 # implementation/version
67 jr ra 69 jr ra
70 .set pop
68 .set nomacro 71 .set nomacro
69 EX(sw t0,(SC_FPC_EIR)(a0)) 72 EX(sw t0,(SC_FPC_EIR)(a0))
70 .set macro 73 .set macro
71 END(_save_fp_context) 74 END(_save_fp_context)
72 75
73 /* 76 /*
74 * Restore FPU state: 77 * Restore FPU state:
75 * - fp gp registers 78 * - fp gp registers
76 * - cp1 status/control register 79 * - cp1 status/control register
77 * 80 *
78 * We base the decision which registers to restore from the signal stack 81 * We base the decision which registers to restore from the signal stack
79 * frame on the current content of c0_status, not on the content of the 82 * frame on the current content of c0_status, not on the content of the
80 * stack frame which might have been changed by the user. 83 * stack frame which might have been changed by the user.
81 */ 84 */
82 LEAF(_restore_fp_context) 85 LEAF(_restore_fp_context)
86 .set push
87 SET_HARDFLOAT
83 li v0, 0 # assume success 88 li v0, 0 # assume success
84 EX(lw t0,(SC_FPC_CSR)(a0)) 89 EX(lw t0,(SC_FPC_CSR)(a0))
85 EX(lwc1 $f0,(SC_FPREGS+0)(a0)) 90 EX(lwc1 $f0,(SC_FPREGS+0)(a0))
86 EX(lwc1 $f1,(SC_FPREGS+8)(a0)) 91 EX(lwc1 $f1,(SC_FPREGS+8)(a0))
87 EX(lwc1 $f2,(SC_FPREGS+16)(a0)) 92 EX(lwc1 $f2,(SC_FPREGS+16)(a0))
88 EX(lwc1 $f3,(SC_FPREGS+24)(a0)) 93 EX(lwc1 $f3,(SC_FPREGS+24)(a0))
89 EX(lwc1 $f4,(SC_FPREGS+32)(a0)) 94 EX(lwc1 $f4,(SC_FPREGS+32)(a0))
90 EX(lwc1 $f5,(SC_FPREGS+40)(a0)) 95 EX(lwc1 $f5,(SC_FPREGS+40)(a0))
91 EX(lwc1 $f6,(SC_FPREGS+48)(a0)) 96 EX(lwc1 $f6,(SC_FPREGS+48)(a0))
92 EX(lwc1 $f7,(SC_FPREGS+56)(a0)) 97 EX(lwc1 $f7,(SC_FPREGS+56)(a0))
93 EX(lwc1 $f8,(SC_FPREGS+64)(a0)) 98 EX(lwc1 $f8,(SC_FPREGS+64)(a0))
94 EX(lwc1 $f9,(SC_FPREGS+72)(a0)) 99 EX(lwc1 $f9,(SC_FPREGS+72)(a0))
95 EX(lwc1 $f10,(SC_FPREGS+80)(a0)) 100 EX(lwc1 $f10,(SC_FPREGS+80)(a0))
96 EX(lwc1 $f11,(SC_FPREGS+88)(a0)) 101 EX(lwc1 $f11,(SC_FPREGS+88)(a0))
97 EX(lwc1 $f12,(SC_FPREGS+96)(a0)) 102 EX(lwc1 $f12,(SC_FPREGS+96)(a0))
98 EX(lwc1 $f13,(SC_FPREGS+104)(a0)) 103 EX(lwc1 $f13,(SC_FPREGS+104)(a0))
99 EX(lwc1 $f14,(SC_FPREGS+112)(a0)) 104 EX(lwc1 $f14,(SC_FPREGS+112)(a0))
100 EX(lwc1 $f15,(SC_FPREGS+120)(a0)) 105 EX(lwc1 $f15,(SC_FPREGS+120)(a0))
101 EX(lwc1 $f16,(SC_FPREGS+128)(a0)) 106 EX(lwc1 $f16,(SC_FPREGS+128)(a0))
102 EX(lwc1 $f17,(SC_FPREGS+136)(a0)) 107 EX(lwc1 $f17,(SC_FPREGS+136)(a0))
103 EX(lwc1 $f18,(SC_FPREGS+144)(a0)) 108 EX(lwc1 $f18,(SC_FPREGS+144)(a0))
104 EX(lwc1 $f19,(SC_FPREGS+152)(a0)) 109 EX(lwc1 $f19,(SC_FPREGS+152)(a0))
105 EX(lwc1 $f20,(SC_FPREGS+160)(a0)) 110 EX(lwc1 $f20,(SC_FPREGS+160)(a0))
106 EX(lwc1 $f21,(SC_FPREGS+168)(a0)) 111 EX(lwc1 $f21,(SC_FPREGS+168)(a0))
107 EX(lwc1 $f22,(SC_FPREGS+176)(a0)) 112 EX(lwc1 $f22,(SC_FPREGS+176)(a0))
108 EX(lwc1 $f23,(SC_FPREGS+184)(a0)) 113 EX(lwc1 $f23,(SC_FPREGS+184)(a0))
109 EX(lwc1 $f24,(SC_FPREGS+192)(a0)) 114 EX(lwc1 $f24,(SC_FPREGS+192)(a0))
110 EX(lwc1 $f25,(SC_FPREGS+200)(a0)) 115 EX(lwc1 $f25,(SC_FPREGS+200)(a0))
111 EX(lwc1 $f26,(SC_FPREGS+208)(a0)) 116 EX(lwc1 $f26,(SC_FPREGS+208)(a0))
112 EX(lwc1 $f27,(SC_FPREGS+216)(a0)) 117 EX(lwc1 $f27,(SC_FPREGS+216)(a0))
113 EX(lwc1 $f28,(SC_FPREGS+224)(a0)) 118 EX(lwc1 $f28,(SC_FPREGS+224)(a0))
114 EX(lwc1 $f29,(SC_FPREGS+232)(a0)) 119 EX(lwc1 $f29,(SC_FPREGS+232)(a0))
115 EX(lwc1 $f30,(SC_FPREGS+240)(a0)) 120 EX(lwc1 $f30,(SC_FPREGS+240)(a0))
116 EX(lwc1 $f31,(SC_FPREGS+248)(a0)) 121 EX(lwc1 $f31,(SC_FPREGS+248)(a0))
117 jr ra 122 jr ra
118 ctc1 t0,fcr31 123 ctc1 t0,fcr31
124 .set pop
119 END(_restore_fp_context) 125 END(_restore_fp_context)
120 .set reorder 126 .set reorder
121 127
122 .type fault@function 128 .type fault@function
123 .ent fault 129 .ent fault
124 fault: li v0, -EFAULT 130 fault: li v0, -EFAULT
125 jr ra 131 jr ra
126 .end fault 132 .end fault
127 133
arch/mips/kernel/r2300_switch.S
1 /* 1 /*
2 * r2300_switch.S: R2300 specific task switching code. 2 * r2300_switch.S: R2300 specific task switching code.
3 * 3 *
4 * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle 4 * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle
5 * Copyright (C) 1994, 1995, 1996 by Andreas Busse 5 * Copyright (C) 1994, 1995, 1996 by Andreas Busse
6 * 6 *
7 * Multi-cpu abstraction and macros for easier reading: 7 * Multi-cpu abstraction and macros for easier reading:
8 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 8 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
9 * 9 *
10 * Further modifications to make this work: 10 * Further modifications to make this work:
11 * Copyright (c) 1998-2000 Harald Koerfgen 11 * Copyright (c) 1998-2000 Harald Koerfgen
12 */ 12 */
13 #include <asm/asm.h> 13 #include <asm/asm.h>
14 #include <asm/cachectl.h> 14 #include <asm/cachectl.h>
15 #include <asm/fpregdef.h> 15 #include <asm/fpregdef.h>
16 #include <asm/mipsregs.h> 16 #include <asm/mipsregs.h>
17 #include <asm/asm-offsets.h> 17 #include <asm/asm-offsets.h>
18 #include <asm/regdef.h> 18 #include <asm/regdef.h>
19 #include <asm/stackframe.h> 19 #include <asm/stackframe.h>
20 #include <asm/thread_info.h> 20 #include <asm/thread_info.h>
21 21
22 #include <asm/asmmacro.h> 22 #include <asm/asmmacro.h>
23 23
24 .set mips1 24 .set mips1
25 .align 5 25 .align 5
26 26
27 /* 27 /*
28 * Offset to the current process status flags, the first 32 bytes of the 28 * Offset to the current process status flags, the first 32 bytes of the
29 * stack are not used. 29 * stack are not used.
30 */ 30 */
31 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS) 31 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
32 32
33 /* 33 /*
34 * FPU context is saved iff the process has used it's FPU in the current 34 * FPU context is saved iff the process has used it's FPU in the current
35 * time slice as indicated by TIF_USEDFPU. In any case, the CU1 bit for user 35 * time slice as indicated by TIF_USEDFPU. In any case, the CU1 bit for user
36 * space STATUS register should be 0, so that a process *always* starts its 36 * space STATUS register should be 0, so that a process *always* starts its
37 * userland with FPU disabled after each context switch. 37 * userland with FPU disabled after each context switch.
38 * 38 *
39 * FPU will be enabled as soon as the process accesses FPU again, through 39 * FPU will be enabled as soon as the process accesses FPU again, through
40 * do_cpu() trap. 40 * do_cpu() trap.
41 */ 41 */
42 42
43 /* 43 /*
44 * task_struct *resume(task_struct *prev, task_struct *next, 44 * task_struct *resume(task_struct *prev, task_struct *next,
45 * struct thread_info *next_ti, int usedfpu) 45 * struct thread_info *next_ti, int usedfpu)
46 */ 46 */
47 LEAF(resume) 47 LEAF(resume)
48 mfc0 t1, CP0_STATUS 48 mfc0 t1, CP0_STATUS
49 sw t1, THREAD_STATUS(a0) 49 sw t1, THREAD_STATUS(a0)
50 cpu_save_nonscratch a0 50 cpu_save_nonscratch a0
51 sw ra, THREAD_REG31(a0) 51 sw ra, THREAD_REG31(a0)
52 52
53 beqz a3, 1f 53 beqz a3, 1f
54 54
55 PTR_L t3, TASK_THREAD_INFO(a0) 55 PTR_L t3, TASK_THREAD_INFO(a0)
56 56
57 /* 57 /*
58 * clear saved user stack CU1 bit 58 * clear saved user stack CU1 bit
59 */ 59 */
60 lw t0, ST_OFF(t3) 60 lw t0, ST_OFF(t3)
61 li t1, ~ST0_CU1 61 li t1, ~ST0_CU1
62 and t0, t0, t1 62 and t0, t0, t1
63 sw t0, ST_OFF(t3) 63 sw t0, ST_OFF(t3)
64 64
65 fpu_save_single a0, t0 # clobbers t0 65 fpu_save_single a0, t0 # clobbers t0
66 66
67 1: 67 1:
68 68
69 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP) 69 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
70 PTR_LA t8, __stack_chk_guard 70 PTR_LA t8, __stack_chk_guard
71 LONG_L t9, TASK_STACK_CANARY(a1) 71 LONG_L t9, TASK_STACK_CANARY(a1)
72 LONG_S t9, 0(t8) 72 LONG_S t9, 0(t8)
73 #endif 73 #endif
74 74
75 /* 75 /*
76 * The order of restoring the registers takes care of the race 76 * The order of restoring the registers takes care of the race
77 * updating $28, $29 and kernelsp without disabling ints. 77 * updating $28, $29 and kernelsp without disabling ints.
78 */ 78 */
79 move $28, a2 79 move $28, a2
80 cpu_restore_nonscratch a1 80 cpu_restore_nonscratch a1
81 81
82 addiu t1, $28, _THREAD_SIZE - 32 82 addiu t1, $28, _THREAD_SIZE - 32
83 sw t1, kernelsp 83 sw t1, kernelsp
84 84
85 mfc0 t1, CP0_STATUS /* Do we really need this? */ 85 mfc0 t1, CP0_STATUS /* Do we really need this? */
86 li a3, 0xff01 86 li a3, 0xff01
87 and t1, a3 87 and t1, a3
88 lw a2, THREAD_STATUS(a1) 88 lw a2, THREAD_STATUS(a1)
89 nor a3, $0, a3 89 nor a3, $0, a3
90 and a2, a3 90 and a2, a3
91 or a2, t1 91 or a2, t1
92 mtc0 a2, CP0_STATUS 92 mtc0 a2, CP0_STATUS
93 move v0, a0 93 move v0, a0
94 jr ra 94 jr ra
95 END(resume) 95 END(resume)
96 96
97 /* 97 /*
98 * Save a thread's fp context. 98 * Save a thread's fp context.
99 */ 99 */
100 LEAF(_save_fp) 100 LEAF(_save_fp)
101 fpu_save_single a0, t1 # clobbers t1 101 fpu_save_single a0, t1 # clobbers t1
102 jr ra 102 jr ra
103 END(_save_fp) 103 END(_save_fp)
104 104
105 /* 105 /*
106 * Restore a thread's fp context. 106 * Restore a thread's fp context.
107 */ 107 */
108 LEAF(_restore_fp) 108 LEAF(_restore_fp)
109 fpu_restore_single a0, t1 # clobbers t1 109 fpu_restore_single a0, t1 # clobbers t1
110 jr ra 110 jr ra
111 END(_restore_fp) 111 END(_restore_fp)
112 112
113 /* 113 /*
114 * Load the FPU with signalling NANS. This bit pattern we're using has 114 * Load the FPU with signalling NANS. This bit pattern we're using has
115 * the property that no matter whether considered as single or as double 115 * the property that no matter whether considered as single or as double
116 * precision represents signaling NANS. 116 * precision represents signaling NANS.
117 * 117 *
118 * We initialize fcr31 to rounding to nearest, no exceptions. 118 * We initialize fcr31 to rounding to nearest, no exceptions.
119 */ 119 */
120 120
121 #define FPU_DEFAULT 0x00000000 121 #define FPU_DEFAULT 0x00000000
122 122
123 .set push
124 SET_HARDFLOAT
125
123 LEAF(_init_fpu) 126 LEAF(_init_fpu)
124 mfc0 t0, CP0_STATUS 127 mfc0 t0, CP0_STATUS
125 li t1, ST0_CU1 128 li t1, ST0_CU1
126 or t0, t1 129 or t0, t1
127 mtc0 t0, CP0_STATUS 130 mtc0 t0, CP0_STATUS
128 131
129 li t1, FPU_DEFAULT 132 li t1, FPU_DEFAULT
130 ctc1 t1, fcr31 133 ctc1 t1, fcr31
131 134
132 li t0, -1 135 li t0, -1
133 136
134 mtc1 t0, $f0 137 mtc1 t0, $f0
135 mtc1 t0, $f1 138 mtc1 t0, $f1
136 mtc1 t0, $f2 139 mtc1 t0, $f2
137 mtc1 t0, $f3 140 mtc1 t0, $f3
138 mtc1 t0, $f4 141 mtc1 t0, $f4
139 mtc1 t0, $f5 142 mtc1 t0, $f5
140 mtc1 t0, $f6 143 mtc1 t0, $f6
141 mtc1 t0, $f7 144 mtc1 t0, $f7
142 mtc1 t0, $f8 145 mtc1 t0, $f8
143 mtc1 t0, $f9 146 mtc1 t0, $f9
144 mtc1 t0, $f10 147 mtc1 t0, $f10
145 mtc1 t0, $f11 148 mtc1 t0, $f11
146 mtc1 t0, $f12 149 mtc1 t0, $f12
147 mtc1 t0, $f13 150 mtc1 t0, $f13
148 mtc1 t0, $f14 151 mtc1 t0, $f14
149 mtc1 t0, $f15 152 mtc1 t0, $f15
150 mtc1 t0, $f16 153 mtc1 t0, $f16
151 mtc1 t0, $f17 154 mtc1 t0, $f17
152 mtc1 t0, $f18 155 mtc1 t0, $f18
153 mtc1 t0, $f19 156 mtc1 t0, $f19
154 mtc1 t0, $f20 157 mtc1 t0, $f20
155 mtc1 t0, $f21 158 mtc1 t0, $f21
156 mtc1 t0, $f22 159 mtc1 t0, $f22
157 mtc1 t0, $f23 160 mtc1 t0, $f23
158 mtc1 t0, $f24 161 mtc1 t0, $f24
159 mtc1 t0, $f25 162 mtc1 t0, $f25
160 mtc1 t0, $f26 163 mtc1 t0, $f26
161 mtc1 t0, $f27 164 mtc1 t0, $f27
162 mtc1 t0, $f28 165 mtc1 t0, $f28
163 mtc1 t0, $f29 166 mtc1 t0, $f29
164 mtc1 t0, $f30 167 mtc1 t0, $f30
165 mtc1 t0, $f31 168 mtc1 t0, $f31
166 jr ra 169 jr ra
167 END(_init_fpu) 170 END(_init_fpu)
171
172 .set pop
168 173
arch/mips/kernel/r4k_fpu.S
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle 6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
7 * 7 *
8 * Multi-arch abstraction and asm macros for easier reading: 8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 * 10 *
11 * Carsten Langgaard, carstenl@mips.com 11 * Carsten Langgaard, carstenl@mips.com
12 * Copyright (C) 2000 MIPS Technologies, Inc. 12 * Copyright (C) 2000 MIPS Technologies, Inc.
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc. 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
14 */ 14 */
15 #include <asm/asm.h> 15 #include <asm/asm.h>
16 #include <asm/errno.h> 16 #include <asm/errno.h>
17 #include <asm/fpregdef.h> 17 #include <asm/fpregdef.h>
18 #include <asm/mipsregs.h> 18 #include <asm/mipsregs.h>
19 #include <asm/asm-offsets.h> 19 #include <asm/asm-offsets.h>
20 #include <asm/regdef.h> 20 #include <asm/regdef.h>
21 21
22 /* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
23 #undef fp
24
22 .macro EX insn, reg, src 25 .macro EX insn, reg, src
23 .set push 26 .set push
27 SET_HARDFLOAT
24 .set nomacro 28 .set nomacro
25 .ex\@: \insn \reg, \src 29 .ex\@: \insn \reg, \src
26 .set pop 30 .set pop
27 .section __ex_table,"a" 31 .section __ex_table,"a"
28 PTR .ex\@, fault 32 PTR .ex\@, fault
29 .previous 33 .previous
30 .endm 34 .endm
31 35
32 .set noreorder 36 .set noreorder
33 .set arch=r4000 37 .set arch=r4000
34 38
35 LEAF(_save_fp_context) 39 LEAF(_save_fp_context)
40 .set push
41 SET_HARDFLOAT
36 cfc1 t1, fcr31 42 cfc1 t1, fcr31
43 .set pop
37 44
38 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 45 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
39 .set push 46 .set push
47 SET_HARDFLOAT
40 #ifdef CONFIG_CPU_MIPS32_R2 48 #ifdef CONFIG_CPU_MIPS32_R2
41 .set mips64r2 49 .set mips32r2
50 .set fp=64
42 mfc0 t0, CP0_STATUS 51 mfc0 t0, CP0_STATUS
43 sll t0, t0, 5 52 sll t0, t0, 5
44 bgez t0, 1f # skip storing odd if FR=0 53 bgez t0, 1f # skip storing odd if FR=0
45 nop 54 nop
46 #endif 55 #endif
47 /* Store the 16 odd double precision registers */ 56 /* Store the 16 odd double precision registers */
48 EX sdc1 $f1, SC_FPREGS+8(a0) 57 EX sdc1 $f1, SC_FPREGS+8(a0)
49 EX sdc1 $f3, SC_FPREGS+24(a0) 58 EX sdc1 $f3, SC_FPREGS+24(a0)
50 EX sdc1 $f5, SC_FPREGS+40(a0) 59 EX sdc1 $f5, SC_FPREGS+40(a0)
51 EX sdc1 $f7, SC_FPREGS+56(a0) 60 EX sdc1 $f7, SC_FPREGS+56(a0)
52 EX sdc1 $f9, SC_FPREGS+72(a0) 61 EX sdc1 $f9, SC_FPREGS+72(a0)
53 EX sdc1 $f11, SC_FPREGS+88(a0) 62 EX sdc1 $f11, SC_FPREGS+88(a0)
54 EX sdc1 $f13, SC_FPREGS+104(a0) 63 EX sdc1 $f13, SC_FPREGS+104(a0)
55 EX sdc1 $f15, SC_FPREGS+120(a0) 64 EX sdc1 $f15, SC_FPREGS+120(a0)
56 EX sdc1 $f17, SC_FPREGS+136(a0) 65 EX sdc1 $f17, SC_FPREGS+136(a0)
57 EX sdc1 $f19, SC_FPREGS+152(a0) 66 EX sdc1 $f19, SC_FPREGS+152(a0)
58 EX sdc1 $f21, SC_FPREGS+168(a0) 67 EX sdc1 $f21, SC_FPREGS+168(a0)
59 EX sdc1 $f23, SC_FPREGS+184(a0) 68 EX sdc1 $f23, SC_FPREGS+184(a0)
60 EX sdc1 $f25, SC_FPREGS+200(a0) 69 EX sdc1 $f25, SC_FPREGS+200(a0)
61 EX sdc1 $f27, SC_FPREGS+216(a0) 70 EX sdc1 $f27, SC_FPREGS+216(a0)
62 EX sdc1 $f29, SC_FPREGS+232(a0) 71 EX sdc1 $f29, SC_FPREGS+232(a0)
63 EX sdc1 $f31, SC_FPREGS+248(a0) 72 EX sdc1 $f31, SC_FPREGS+248(a0)
64 1: .set pop 73 1: .set pop
65 #endif 74 #endif
66 75
76 .set push
77 SET_HARDFLOAT
67 /* Store the 16 even double precision registers */ 78 /* Store the 16 even double precision registers */
68 EX sdc1 $f0, SC_FPREGS+0(a0) 79 EX sdc1 $f0, SC_FPREGS+0(a0)
69 EX sdc1 $f2, SC_FPREGS+16(a0) 80 EX sdc1 $f2, SC_FPREGS+16(a0)
70 EX sdc1 $f4, SC_FPREGS+32(a0) 81 EX sdc1 $f4, SC_FPREGS+32(a0)
71 EX sdc1 $f6, SC_FPREGS+48(a0) 82 EX sdc1 $f6, SC_FPREGS+48(a0)
72 EX sdc1 $f8, SC_FPREGS+64(a0) 83 EX sdc1 $f8, SC_FPREGS+64(a0)
73 EX sdc1 $f10, SC_FPREGS+80(a0) 84 EX sdc1 $f10, SC_FPREGS+80(a0)
74 EX sdc1 $f12, SC_FPREGS+96(a0) 85 EX sdc1 $f12, SC_FPREGS+96(a0)
75 EX sdc1 $f14, SC_FPREGS+112(a0) 86 EX sdc1 $f14, SC_FPREGS+112(a0)
76 EX sdc1 $f16, SC_FPREGS+128(a0) 87 EX sdc1 $f16, SC_FPREGS+128(a0)
77 EX sdc1 $f18, SC_FPREGS+144(a0) 88 EX sdc1 $f18, SC_FPREGS+144(a0)
78 EX sdc1 $f20, SC_FPREGS+160(a0) 89 EX sdc1 $f20, SC_FPREGS+160(a0)
79 EX sdc1 $f22, SC_FPREGS+176(a0) 90 EX sdc1 $f22, SC_FPREGS+176(a0)
80 EX sdc1 $f24, SC_FPREGS+192(a0) 91 EX sdc1 $f24, SC_FPREGS+192(a0)
81 EX sdc1 $f26, SC_FPREGS+208(a0) 92 EX sdc1 $f26, SC_FPREGS+208(a0)
82 EX sdc1 $f28, SC_FPREGS+224(a0) 93 EX sdc1 $f28, SC_FPREGS+224(a0)
83 EX sdc1 $f30, SC_FPREGS+240(a0) 94 EX sdc1 $f30, SC_FPREGS+240(a0)
84 EX sw t1, SC_FPC_CSR(a0) 95 EX sw t1, SC_FPC_CSR(a0)
85 jr ra 96 jr ra
86 li v0, 0 # success 97 li v0, 0 # success
98 .set pop
87 END(_save_fp_context) 99 END(_save_fp_context)
88 100
89 #ifdef CONFIG_MIPS32_COMPAT 101 #ifdef CONFIG_MIPS32_COMPAT
90 /* Save 32-bit process floating point context */ 102 /* Save 32-bit process floating point context */
91 LEAF(_save_fp_context32) 103 LEAF(_save_fp_context32)
104 .set push
105 SET_HARDFLOAT
92 cfc1 t1, fcr31 106 cfc1 t1, fcr31
93 107
94 mfc0 t0, CP0_STATUS 108 mfc0 t0, CP0_STATUS
95 sll t0, t0, 5 109 sll t0, t0, 5
96 bgez t0, 1f # skip storing odd if FR=0 110 bgez t0, 1f # skip storing odd if FR=0
97 nop 111 nop
98 112
99 /* Store the 16 odd double precision registers */ 113 /* Store the 16 odd double precision registers */
100 EX sdc1 $f1, SC32_FPREGS+8(a0) 114 EX sdc1 $f1, SC32_FPREGS+8(a0)
101 EX sdc1 $f3, SC32_FPREGS+24(a0) 115 EX sdc1 $f3, SC32_FPREGS+24(a0)
102 EX sdc1 $f5, SC32_FPREGS+40(a0) 116 EX sdc1 $f5, SC32_FPREGS+40(a0)
103 EX sdc1 $f7, SC32_FPREGS+56(a0) 117 EX sdc1 $f7, SC32_FPREGS+56(a0)
104 EX sdc1 $f9, SC32_FPREGS+72(a0) 118 EX sdc1 $f9, SC32_FPREGS+72(a0)
105 EX sdc1 $f11, SC32_FPREGS+88(a0) 119 EX sdc1 $f11, SC32_FPREGS+88(a0)
106 EX sdc1 $f13, SC32_FPREGS+104(a0) 120 EX sdc1 $f13, SC32_FPREGS+104(a0)
107 EX sdc1 $f15, SC32_FPREGS+120(a0) 121 EX sdc1 $f15, SC32_FPREGS+120(a0)
108 EX sdc1 $f17, SC32_FPREGS+136(a0) 122 EX sdc1 $f17, SC32_FPREGS+136(a0)
109 EX sdc1 $f19, SC32_FPREGS+152(a0) 123 EX sdc1 $f19, SC32_FPREGS+152(a0)
110 EX sdc1 $f21, SC32_FPREGS+168(a0) 124 EX sdc1 $f21, SC32_FPREGS+168(a0)
111 EX sdc1 $f23, SC32_FPREGS+184(a0) 125 EX sdc1 $f23, SC32_FPREGS+184(a0)
112 EX sdc1 $f25, SC32_FPREGS+200(a0) 126 EX sdc1 $f25, SC32_FPREGS+200(a0)
113 EX sdc1 $f27, SC32_FPREGS+216(a0) 127 EX sdc1 $f27, SC32_FPREGS+216(a0)
114 EX sdc1 $f29, SC32_FPREGS+232(a0) 128 EX sdc1 $f29, SC32_FPREGS+232(a0)
115 EX sdc1 $f31, SC32_FPREGS+248(a0) 129 EX sdc1 $f31, SC32_FPREGS+248(a0)
116 130
117 /* Store the 16 even double precision registers */ 131 /* Store the 16 even double precision registers */
118 1: EX sdc1 $f0, SC32_FPREGS+0(a0) 132 1: EX sdc1 $f0, SC32_FPREGS+0(a0)
119 EX sdc1 $f2, SC32_FPREGS+16(a0) 133 EX sdc1 $f2, SC32_FPREGS+16(a0)
120 EX sdc1 $f4, SC32_FPREGS+32(a0) 134 EX sdc1 $f4, SC32_FPREGS+32(a0)
121 EX sdc1 $f6, SC32_FPREGS+48(a0) 135 EX sdc1 $f6, SC32_FPREGS+48(a0)
122 EX sdc1 $f8, SC32_FPREGS+64(a0) 136 EX sdc1 $f8, SC32_FPREGS+64(a0)
123 EX sdc1 $f10, SC32_FPREGS+80(a0) 137 EX sdc1 $f10, SC32_FPREGS+80(a0)
124 EX sdc1 $f12, SC32_FPREGS+96(a0) 138 EX sdc1 $f12, SC32_FPREGS+96(a0)
125 EX sdc1 $f14, SC32_FPREGS+112(a0) 139 EX sdc1 $f14, SC32_FPREGS+112(a0)
126 EX sdc1 $f16, SC32_FPREGS+128(a0) 140 EX sdc1 $f16, SC32_FPREGS+128(a0)
127 EX sdc1 $f18, SC32_FPREGS+144(a0) 141 EX sdc1 $f18, SC32_FPREGS+144(a0)
128 EX sdc1 $f20, SC32_FPREGS+160(a0) 142 EX sdc1 $f20, SC32_FPREGS+160(a0)
129 EX sdc1 $f22, SC32_FPREGS+176(a0) 143 EX sdc1 $f22, SC32_FPREGS+176(a0)
130 EX sdc1 $f24, SC32_FPREGS+192(a0) 144 EX sdc1 $f24, SC32_FPREGS+192(a0)
131 EX sdc1 $f26, SC32_FPREGS+208(a0) 145 EX sdc1 $f26, SC32_FPREGS+208(a0)
132 EX sdc1 $f28, SC32_FPREGS+224(a0) 146 EX sdc1 $f28, SC32_FPREGS+224(a0)
133 EX sdc1 $f30, SC32_FPREGS+240(a0) 147 EX sdc1 $f30, SC32_FPREGS+240(a0)
134 EX sw t1, SC32_FPC_CSR(a0) 148 EX sw t1, SC32_FPC_CSR(a0)
135 cfc1 t0, $0 # implementation/version 149 cfc1 t0, $0 # implementation/version
136 EX sw t0, SC32_FPC_EIR(a0) 150 EX sw t0, SC32_FPC_EIR(a0)
151 .set pop
137 152
138 jr ra 153 jr ra
139 li v0, 0 # success 154 li v0, 0 # success
140 END(_save_fp_context32) 155 END(_save_fp_context32)
141 #endif 156 #endif
142 157
143 /* 158 /*
144 * Restore FPU state: 159 * Restore FPU state:
145 * - fp gp registers 160 * - fp gp registers
146 * - cp1 status/control register 161 * - cp1 status/control register
147 */ 162 */
148 LEAF(_restore_fp_context) 163 LEAF(_restore_fp_context)
149 EX lw t1, SC_FPC_CSR(a0) 164 EX lw t1, SC_FPC_CSR(a0)
150 165
151 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 166 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
152 .set push 167 .set push
168 SET_HARDFLOAT
153 #ifdef CONFIG_CPU_MIPS32_R2 169 #ifdef CONFIG_CPU_MIPS32_R2
154 .set mips64r2 170 .set mips32r2
171 .set fp=64
155 mfc0 t0, CP0_STATUS 172 mfc0 t0, CP0_STATUS
156 sll t0, t0, 5 173 sll t0, t0, 5
157 bgez t0, 1f # skip loading odd if FR=0 174 bgez t0, 1f # skip loading odd if FR=0
158 nop 175 nop
159 #endif 176 #endif
160 EX ldc1 $f1, SC_FPREGS+8(a0) 177 EX ldc1 $f1, SC_FPREGS+8(a0)
161 EX ldc1 $f3, SC_FPREGS+24(a0) 178 EX ldc1 $f3, SC_FPREGS+24(a0)
162 EX ldc1 $f5, SC_FPREGS+40(a0) 179 EX ldc1 $f5, SC_FPREGS+40(a0)
163 EX ldc1 $f7, SC_FPREGS+56(a0) 180 EX ldc1 $f7, SC_FPREGS+56(a0)
164 EX ldc1 $f9, SC_FPREGS+72(a0) 181 EX ldc1 $f9, SC_FPREGS+72(a0)
165 EX ldc1 $f11, SC_FPREGS+88(a0) 182 EX ldc1 $f11, SC_FPREGS+88(a0)
166 EX ldc1 $f13, SC_FPREGS+104(a0) 183 EX ldc1 $f13, SC_FPREGS+104(a0)
167 EX ldc1 $f15, SC_FPREGS+120(a0) 184 EX ldc1 $f15, SC_FPREGS+120(a0)
168 EX ldc1 $f17, SC_FPREGS+136(a0) 185 EX ldc1 $f17, SC_FPREGS+136(a0)
169 EX ldc1 $f19, SC_FPREGS+152(a0) 186 EX ldc1 $f19, SC_FPREGS+152(a0)
170 EX ldc1 $f21, SC_FPREGS+168(a0) 187 EX ldc1 $f21, SC_FPREGS+168(a0)
171 EX ldc1 $f23, SC_FPREGS+184(a0) 188 EX ldc1 $f23, SC_FPREGS+184(a0)
172 EX ldc1 $f25, SC_FPREGS+200(a0) 189 EX ldc1 $f25, SC_FPREGS+200(a0)
173 EX ldc1 $f27, SC_FPREGS+216(a0) 190 EX ldc1 $f27, SC_FPREGS+216(a0)
174 EX ldc1 $f29, SC_FPREGS+232(a0) 191 EX ldc1 $f29, SC_FPREGS+232(a0)
175 EX ldc1 $f31, SC_FPREGS+248(a0) 192 EX ldc1 $f31, SC_FPREGS+248(a0)
176 1: .set pop 193 1: .set pop
177 #endif 194 #endif
195 .set push
196 SET_HARDFLOAT
178 EX ldc1 $f0, SC_FPREGS+0(a0) 197 EX ldc1 $f0, SC_FPREGS+0(a0)
179 EX ldc1 $f2, SC_FPREGS+16(a0) 198 EX ldc1 $f2, SC_FPREGS+16(a0)
180 EX ldc1 $f4, SC_FPREGS+32(a0) 199 EX ldc1 $f4, SC_FPREGS+32(a0)
181 EX ldc1 $f6, SC_FPREGS+48(a0) 200 EX ldc1 $f6, SC_FPREGS+48(a0)
182 EX ldc1 $f8, SC_FPREGS+64(a0) 201 EX ldc1 $f8, SC_FPREGS+64(a0)
183 EX ldc1 $f10, SC_FPREGS+80(a0) 202 EX ldc1 $f10, SC_FPREGS+80(a0)
184 EX ldc1 $f12, SC_FPREGS+96(a0) 203 EX ldc1 $f12, SC_FPREGS+96(a0)
185 EX ldc1 $f14, SC_FPREGS+112(a0) 204 EX ldc1 $f14, SC_FPREGS+112(a0)
186 EX ldc1 $f16, SC_FPREGS+128(a0) 205 EX ldc1 $f16, SC_FPREGS+128(a0)
187 EX ldc1 $f18, SC_FPREGS+144(a0) 206 EX ldc1 $f18, SC_FPREGS+144(a0)
188 EX ldc1 $f20, SC_FPREGS+160(a0) 207 EX ldc1 $f20, SC_FPREGS+160(a0)
189 EX ldc1 $f22, SC_FPREGS+176(a0) 208 EX ldc1 $f22, SC_FPREGS+176(a0)
190 EX ldc1 $f24, SC_FPREGS+192(a0) 209 EX ldc1 $f24, SC_FPREGS+192(a0)
191 EX ldc1 $f26, SC_FPREGS+208(a0) 210 EX ldc1 $f26, SC_FPREGS+208(a0)
192 EX ldc1 $f28, SC_FPREGS+224(a0) 211 EX ldc1 $f28, SC_FPREGS+224(a0)
193 EX ldc1 $f30, SC_FPREGS+240(a0) 212 EX ldc1 $f30, SC_FPREGS+240(a0)
194 ctc1 t1, fcr31 213 ctc1 t1, fcr31
214 .set pop
195 jr ra 215 jr ra
196 li v0, 0 # success 216 li v0, 0 # success
197 END(_restore_fp_context) 217 END(_restore_fp_context)
198 218
199 #ifdef CONFIG_MIPS32_COMPAT 219 #ifdef CONFIG_MIPS32_COMPAT
200 LEAF(_restore_fp_context32) 220 LEAF(_restore_fp_context32)
201 /* Restore an o32 sigcontext. */ 221 /* Restore an o32 sigcontext. */
222 .set push
223 SET_HARDFLOAT
202 EX lw t1, SC32_FPC_CSR(a0) 224 EX lw t1, SC32_FPC_CSR(a0)
203 225
204 mfc0 t0, CP0_STATUS 226 mfc0 t0, CP0_STATUS
205 sll t0, t0, 5 227 sll t0, t0, 5
206 bgez t0, 1f # skip loading odd if FR=0 228 bgez t0, 1f # skip loading odd if FR=0
207 nop 229 nop
208 230
209 EX ldc1 $f1, SC32_FPREGS+8(a0) 231 EX ldc1 $f1, SC32_FPREGS+8(a0)
210 EX ldc1 $f3, SC32_FPREGS+24(a0) 232 EX ldc1 $f3, SC32_FPREGS+24(a0)
211 EX ldc1 $f5, SC32_FPREGS+40(a0) 233 EX ldc1 $f5, SC32_FPREGS+40(a0)
212 EX ldc1 $f7, SC32_FPREGS+56(a0) 234 EX ldc1 $f7, SC32_FPREGS+56(a0)
213 EX ldc1 $f9, SC32_FPREGS+72(a0) 235 EX ldc1 $f9, SC32_FPREGS+72(a0)
214 EX ldc1 $f11, SC32_FPREGS+88(a0) 236 EX ldc1 $f11, SC32_FPREGS+88(a0)
215 EX ldc1 $f13, SC32_FPREGS+104(a0) 237 EX ldc1 $f13, SC32_FPREGS+104(a0)
216 EX ldc1 $f15, SC32_FPREGS+120(a0) 238 EX ldc1 $f15, SC32_FPREGS+120(a0)
217 EX ldc1 $f17, SC32_FPREGS+136(a0) 239 EX ldc1 $f17, SC32_FPREGS+136(a0)
218 EX ldc1 $f19, SC32_FPREGS+152(a0) 240 EX ldc1 $f19, SC32_FPREGS+152(a0)
219 EX ldc1 $f21, SC32_FPREGS+168(a0) 241 EX ldc1 $f21, SC32_FPREGS+168(a0)
220 EX ldc1 $f23, SC32_FPREGS+184(a0) 242 EX ldc1 $f23, SC32_FPREGS+184(a0)
221 EX ldc1 $f25, SC32_FPREGS+200(a0) 243 EX ldc1 $f25, SC32_FPREGS+200(a0)
222 EX ldc1 $f27, SC32_FPREGS+216(a0) 244 EX ldc1 $f27, SC32_FPREGS+216(a0)
223 EX ldc1 $f29, SC32_FPREGS+232(a0) 245 EX ldc1 $f29, SC32_FPREGS+232(a0)
224 EX ldc1 $f31, SC32_FPREGS+248(a0) 246 EX ldc1 $f31, SC32_FPREGS+248(a0)
225 247
226 1: EX ldc1 $f0, SC32_FPREGS+0(a0) 248 1: EX ldc1 $f0, SC32_FPREGS+0(a0)
227 EX ldc1 $f2, SC32_FPREGS+16(a0) 249 EX ldc1 $f2, SC32_FPREGS+16(a0)
228 EX ldc1 $f4, SC32_FPREGS+32(a0) 250 EX ldc1 $f4, SC32_FPREGS+32(a0)
229 EX ldc1 $f6, SC32_FPREGS+48(a0) 251 EX ldc1 $f6, SC32_FPREGS+48(a0)
230 EX ldc1 $f8, SC32_FPREGS+64(a0) 252 EX ldc1 $f8, SC32_FPREGS+64(a0)
231 EX ldc1 $f10, SC32_FPREGS+80(a0) 253 EX ldc1 $f10, SC32_FPREGS+80(a0)
232 EX ldc1 $f12, SC32_FPREGS+96(a0) 254 EX ldc1 $f12, SC32_FPREGS+96(a0)
233 EX ldc1 $f14, SC32_FPREGS+112(a0) 255 EX ldc1 $f14, SC32_FPREGS+112(a0)
234 EX ldc1 $f16, SC32_FPREGS+128(a0) 256 EX ldc1 $f16, SC32_FPREGS+128(a0)
235 EX ldc1 $f18, SC32_FPREGS+144(a0) 257 EX ldc1 $f18, SC32_FPREGS+144(a0)
236 EX ldc1 $f20, SC32_FPREGS+160(a0) 258 EX ldc1 $f20, SC32_FPREGS+160(a0)
237 EX ldc1 $f22, SC32_FPREGS+176(a0) 259 EX ldc1 $f22, SC32_FPREGS+176(a0)
238 EX ldc1 $f24, SC32_FPREGS+192(a0) 260 EX ldc1 $f24, SC32_FPREGS+192(a0)
239 EX ldc1 $f26, SC32_FPREGS+208(a0) 261 EX ldc1 $f26, SC32_FPREGS+208(a0)
240 EX ldc1 $f28, SC32_FPREGS+224(a0) 262 EX ldc1 $f28, SC32_FPREGS+224(a0)
241 EX ldc1 $f30, SC32_FPREGS+240(a0) 263 EX ldc1 $f30, SC32_FPREGS+240(a0)
242 ctc1 t1, fcr31 264 ctc1 t1, fcr31
243 jr ra 265 jr ra
244 li v0, 0 # success 266 li v0, 0 # success
267 .set pop
245 END(_restore_fp_context32) 268 END(_restore_fp_context32)
246 #endif 269 #endif
247 270
248 .set reorder 271 .set reorder
249 272
250 .type fault@function 273 .type fault@function
251 .ent fault 274 .ent fault
252 fault: li v0, -EFAULT # failure 275 fault: li v0, -EFAULT # failure
253 jr ra 276 jr ra
254 .end fault 277 .end fault
255 278
arch/mips/kernel/r4k_switch.S
1 /* 1 /*
2 * This file is subject to the terms and conditions of the GNU General Public 2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle 6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse 8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc. 9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc. 10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com 11 * written by Carsten Langgaard, carstenl@mips.com
12 */ 12 */
13 #include <asm/asm.h> 13 #include <asm/asm.h>
14 #include <asm/cachectl.h> 14 #include <asm/cachectl.h>
15 #include <asm/fpregdef.h> 15 #include <asm/fpregdef.h>
16 #include <asm/mipsregs.h> 16 #include <asm/mipsregs.h>
17 #include <asm/asm-offsets.h> 17 #include <asm/asm-offsets.h>
18 #include <asm/pgtable-bits.h> 18 #include <asm/pgtable-bits.h>
19 #include <asm/regdef.h> 19 #include <asm/regdef.h>
20 #include <asm/stackframe.h> 20 #include <asm/stackframe.h>
21 #include <asm/thread_info.h> 21 #include <asm/thread_info.h>
22 22
23 #include <asm/asmmacro.h> 23 #include <asm/asmmacro.h>
24 24
25 /* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
26 #undef fp
27
25 /* 28 /*
26 * Offset to the current process status flags, the first 32 bytes of the 29 * Offset to the current process status flags, the first 32 bytes of the
27 * stack are not used. 30 * stack are not used.
28 */ 31 */
29 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS) 32 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
30 33
31 #ifndef USE_ALTERNATE_RESUME_IMPL 34 #ifndef USE_ALTERNATE_RESUME_IMPL
32 /* 35 /*
33 * task_struct *resume(task_struct *prev, task_struct *next, 36 * task_struct *resume(task_struct *prev, task_struct *next,
34 * struct thread_info *next_ti, s32 fp_save) 37 * struct thread_info *next_ti, s32 fp_save)
35 */ 38 */
36 .align 5 39 .align 5
37 LEAF(resume) 40 LEAF(resume)
38 mfc0 t1, CP0_STATUS 41 mfc0 t1, CP0_STATUS
39 LONG_S t1, THREAD_STATUS(a0) 42 LONG_S t1, THREAD_STATUS(a0)
40 cpu_save_nonscratch a0 43 cpu_save_nonscratch a0
41 LONG_S ra, THREAD_REG31(a0) 44 LONG_S ra, THREAD_REG31(a0)
42 45
43 /* 46 /*
44 * Check whether we need to save any FP context. FP context is saved 47 * Check whether we need to save any FP context. FP context is saved
45 * iff the process has used the context with the scalar FPU or the MSA 48 * iff the process has used the context with the scalar FPU or the MSA
46 * ASE in the current time slice, as indicated by _TIF_USEDFPU and 49 * ASE in the current time slice, as indicated by _TIF_USEDFPU and
47 * _TIF_USEDMSA respectively. switch_to will have set fp_save 50 * _TIF_USEDMSA respectively. switch_to will have set fp_save
48 * accordingly to an FP_SAVE_ enum value. 51 * accordingly to an FP_SAVE_ enum value.
49 */ 52 */
50 beqz a3, 2f 53 beqz a3, 2f
51 54
52 /* 55 /*
53 * We do. Clear the saved CU1 bit for prev, such that next time it is 56 * We do. Clear the saved CU1 bit for prev, such that next time it is
54 * scheduled it will start in userland with the FPU disabled. If the 57 * scheduled it will start in userland with the FPU disabled. If the
55 * task uses the FPU then it will be enabled again via the do_cpu trap. 58 * task uses the FPU then it will be enabled again via the do_cpu trap.
56 * This allows us to lazily restore the FP context. 59 * This allows us to lazily restore the FP context.
57 */ 60 */
58 PTR_L t3, TASK_THREAD_INFO(a0) 61 PTR_L t3, TASK_THREAD_INFO(a0)
59 LONG_L t0, ST_OFF(t3) 62 LONG_L t0, ST_OFF(t3)
60 li t1, ~ST0_CU1 63 li t1, ~ST0_CU1
61 and t0, t0, t1 64 and t0, t0, t1
62 LONG_S t0, ST_OFF(t3) 65 LONG_S t0, ST_OFF(t3)
63 66
64 /* Check whether we're saving scalar or vector context. */ 67 /* Check whether we're saving scalar or vector context. */
65 bgtz a3, 1f 68 bgtz a3, 1f
66 69
67 /* Save 128b MSA vector context + scalar FP control & status. */ 70 /* Save 128b MSA vector context + scalar FP control & status. */
71 .set push
72 SET_HARDFLOAT
68 cfc1 t1, fcr31 73 cfc1 t1, fcr31
69 msa_save_all a0 74 msa_save_all a0
75 .set pop /* SET_HARDFLOAT */
76
70 sw t1, THREAD_FCR31(a0) 77 sw t1, THREAD_FCR31(a0)
71 b 2f 78 b 2f
72 79
73 1: /* Save 32b/64b scalar FP context. */ 80 1: /* Save 32b/64b scalar FP context. */
74 fpu_save_double a0 t0 t1 # c0_status passed in t0 81 fpu_save_double a0 t0 t1 # c0_status passed in t0
75 # clobbers t1 82 # clobbers t1
76 2: 83 2:
77 84
78 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP) 85 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
79 PTR_LA t8, __stack_chk_guard 86 PTR_LA t8, __stack_chk_guard
80 LONG_L t9, TASK_STACK_CANARY(a1) 87 LONG_L t9, TASK_STACK_CANARY(a1)
81 LONG_S t9, 0(t8) 88 LONG_S t9, 0(t8)
82 #endif 89 #endif
83 90
84 /* 91 /*
85 * The order of restoring the registers takes care of the race 92 * The order of restoring the registers takes care of the race
86 * updating $28, $29 and kernelsp without disabling ints. 93 * updating $28, $29 and kernelsp without disabling ints.
87 */ 94 */
88 move $28, a2 95 move $28, a2
89 cpu_restore_nonscratch a1 96 cpu_restore_nonscratch a1
90 97
91 PTR_ADDU t0, $28, _THREAD_SIZE - 32 98 PTR_ADDU t0, $28, _THREAD_SIZE - 32
92 set_saved_sp t0, t1, t2 99 set_saved_sp t0, t1, t2
93 mfc0 t1, CP0_STATUS /* Do we really need this? */ 100 mfc0 t1, CP0_STATUS /* Do we really need this? */
94 li a3, 0xff01 101 li a3, 0xff01
95 and t1, a3 102 and t1, a3
96 LONG_L a2, THREAD_STATUS(a1) 103 LONG_L a2, THREAD_STATUS(a1)
97 nor a3, $0, a3 104 nor a3, $0, a3
98 and a2, a3 105 and a2, a3
99 or a2, t1 106 or a2, t1
100 mtc0 a2, CP0_STATUS 107 mtc0 a2, CP0_STATUS
101 move v0, a0 108 move v0, a0
102 jr ra 109 jr ra
103 END(resume) 110 END(resume)
104 111
105 #endif /* USE_ALTERNATE_RESUME_IMPL */ 112 #endif /* USE_ALTERNATE_RESUME_IMPL */
106 113
107 /* 114 /*
108 * Save a thread's fp context. 115 * Save a thread's fp context.
109 */ 116 */
110 LEAF(_save_fp) 117 LEAF(_save_fp)
111 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 118 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
112 mfc0 t0, CP0_STATUS 119 mfc0 t0, CP0_STATUS
113 #endif 120 #endif
114 fpu_save_double a0 t0 t1 # clobbers t1 121 fpu_save_double a0 t0 t1 # clobbers t1
115 jr ra 122 jr ra
116 END(_save_fp) 123 END(_save_fp)
117 124
118 /* 125 /*
119 * Restore a thread's fp context. 126 * Restore a thread's fp context.
120 */ 127 */
121 LEAF(_restore_fp) 128 LEAF(_restore_fp)
122 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 129 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
123 mfc0 t0, CP0_STATUS 130 mfc0 t0, CP0_STATUS
124 #endif 131 #endif
125 fpu_restore_double a0 t0 t1 # clobbers t1 132 fpu_restore_double a0 t0 t1 # clobbers t1
126 jr ra 133 jr ra
127 END(_restore_fp) 134 END(_restore_fp)
128 135
129 #ifdef CONFIG_CPU_HAS_MSA 136 #ifdef CONFIG_CPU_HAS_MSA
130 137
131 /* 138 /*
132 * Save a thread's MSA vector context. 139 * Save a thread's MSA vector context.
133 */ 140 */
134 LEAF(_save_msa) 141 LEAF(_save_msa)
135 msa_save_all a0 142 msa_save_all a0
136 jr ra 143 jr ra
137 END(_save_msa) 144 END(_save_msa)
138 145
139 /* 146 /*
140 * Restore a thread's MSA vector context. 147 * Restore a thread's MSA vector context.
141 */ 148 */
142 LEAF(_restore_msa) 149 LEAF(_restore_msa)
143 msa_restore_all a0 150 msa_restore_all a0
144 jr ra 151 jr ra
145 END(_restore_msa) 152 END(_restore_msa)
146 153
147 LEAF(_init_msa_upper) 154 LEAF(_init_msa_upper)
148 msa_init_all_upper 155 msa_init_all_upper
149 jr ra 156 jr ra
150 END(_init_msa_upper) 157 END(_init_msa_upper)
151 158
152 #endif 159 #endif
153 160
154 /* 161 /*
155 * Load the FPU with signalling NANS. This bit pattern we're using has 162 * Load the FPU with signalling NANS. This bit pattern we're using has
156 * the property that no matter whether considered as single or as double 163 * the property that no matter whether considered as single or as double
157 * precision represents signaling NANS. 164 * precision represents signaling NANS.
158 * 165 *
159 * We initialize fcr31 to rounding to nearest, no exceptions. 166 * We initialize fcr31 to rounding to nearest, no exceptions.
160 */ 167 */
161 168
162 #define FPU_DEFAULT 0x00000000 169 #define FPU_DEFAULT 0x00000000
163 170
171 .set push
172 SET_HARDFLOAT
173
164 LEAF(_init_fpu) 174 LEAF(_init_fpu)
165 mfc0 t0, CP0_STATUS 175 mfc0 t0, CP0_STATUS
166 li t1, ST0_CU1 176 li t1, ST0_CU1
167 or t0, t1 177 or t0, t1
168 mtc0 t0, CP0_STATUS 178 mtc0 t0, CP0_STATUS
169 enable_fpu_hazard 179 enable_fpu_hazard
170 180
171 li t1, FPU_DEFAULT 181 li t1, FPU_DEFAULT
172 ctc1 t1, fcr31 182 ctc1 t1, fcr31
173 183
174 li t1, -1 # SNaN 184 li t1, -1 # SNaN
175 185
176 #ifdef CONFIG_64BIT 186 #ifdef CONFIG_64BIT
177 sll t0, t0, 5 187 sll t0, t0, 5
178 bgez t0, 1f # 16 / 32 register mode? 188 bgez t0, 1f # 16 / 32 register mode?
179 189
180 dmtc1 t1, $f1 190 dmtc1 t1, $f1
181 dmtc1 t1, $f3 191 dmtc1 t1, $f3
182 dmtc1 t1, $f5 192 dmtc1 t1, $f5
183 dmtc1 t1, $f7 193 dmtc1 t1, $f7
184 dmtc1 t1, $f9 194 dmtc1 t1, $f9
185 dmtc1 t1, $f11 195 dmtc1 t1, $f11
186 dmtc1 t1, $f13 196 dmtc1 t1, $f13
187 dmtc1 t1, $f15 197 dmtc1 t1, $f15
188 dmtc1 t1, $f17 198 dmtc1 t1, $f17
189 dmtc1 t1, $f19 199 dmtc1 t1, $f19
190 dmtc1 t1, $f21 200 dmtc1 t1, $f21
191 dmtc1 t1, $f23 201 dmtc1 t1, $f23
192 dmtc1 t1, $f25 202 dmtc1 t1, $f25
193 dmtc1 t1, $f27 203 dmtc1 t1, $f27
194 dmtc1 t1, $f29 204 dmtc1 t1, $f29
195 dmtc1 t1, $f31 205 dmtc1 t1, $f31
196 1: 206 1:
197 #endif 207 #endif
198 208
199 #ifdef CONFIG_CPU_MIPS32 209 #ifdef CONFIG_CPU_MIPS32
200 mtc1 t1, $f0 210 mtc1 t1, $f0
201 mtc1 t1, $f1 211 mtc1 t1, $f1
202 mtc1 t1, $f2 212 mtc1 t1, $f2
203 mtc1 t1, $f3 213 mtc1 t1, $f3
204 mtc1 t1, $f4 214 mtc1 t1, $f4
205 mtc1 t1, $f5 215 mtc1 t1, $f5
206 mtc1 t1, $f6 216 mtc1 t1, $f6
207 mtc1 t1, $f7 217 mtc1 t1, $f7
208 mtc1 t1, $f8 218 mtc1 t1, $f8
209 mtc1 t1, $f9 219 mtc1 t1, $f9
210 mtc1 t1, $f10 220 mtc1 t1, $f10
211 mtc1 t1, $f11 221 mtc1 t1, $f11
212 mtc1 t1, $f12 222 mtc1 t1, $f12
213 mtc1 t1, $f13 223 mtc1 t1, $f13
214 mtc1 t1, $f14 224 mtc1 t1, $f14
215 mtc1 t1, $f15 225 mtc1 t1, $f15
216 mtc1 t1, $f16 226 mtc1 t1, $f16
217 mtc1 t1, $f17 227 mtc1 t1, $f17
218 mtc1 t1, $f18 228 mtc1 t1, $f18
219 mtc1 t1, $f19 229 mtc1 t1, $f19
220 mtc1 t1, $f20 230 mtc1 t1, $f20
221 mtc1 t1, $f21 231 mtc1 t1, $f21
222 mtc1 t1, $f22 232 mtc1 t1, $f22
223 mtc1 t1, $f23 233 mtc1 t1, $f23
224 mtc1 t1, $f24 234 mtc1 t1, $f24
225 mtc1 t1, $f25 235 mtc1 t1, $f25
226 mtc1 t1, $f26 236 mtc1 t1, $f26
227 mtc1 t1, $f27 237 mtc1 t1, $f27
228 mtc1 t1, $f28 238 mtc1 t1, $f28
229 mtc1 t1, $f29 239 mtc1 t1, $f29
230 mtc1 t1, $f30 240 mtc1 t1, $f30
231 mtc1 t1, $f31 241 mtc1 t1, $f31
232 242
233 #ifdef CONFIG_CPU_MIPS32_R2 243 #ifdef CONFIG_CPU_MIPS32_R2
234 .set push 244 .set push
235 .set mips64r2 245 .set mips32r2
246 .set fp=64
236 sll t0, t0, 5 # is Status.FR set? 247 sll t0, t0, 5 # is Status.FR set?
237 bgez t0, 1f # no: skip setting upper 32b 248 bgez t0, 1f # no: skip setting upper 32b
238 249
239 mthc1 t1, $f0 250 mthc1 t1, $f0
240 mthc1 t1, $f1 251 mthc1 t1, $f1
241 mthc1 t1, $f2 252 mthc1 t1, $f2
242 mthc1 t1, $f3 253 mthc1 t1, $f3
243 mthc1 t1, $f4 254 mthc1 t1, $f4
244 mthc1 t1, $f5 255 mthc1 t1, $f5
245 mthc1 t1, $f6 256 mthc1 t1, $f6
246 mthc1 t1, $f7 257 mthc1 t1, $f7
247 mthc1 t1, $f8 258 mthc1 t1, $f8
248 mthc1 t1, $f9 259 mthc1 t1, $f9
249 mthc1 t1, $f10 260 mthc1 t1, $f10
250 mthc1 t1, $f11 261 mthc1 t1, $f11
251 mthc1 t1, $f12 262 mthc1 t1, $f12
252 mthc1 t1, $f13 263 mthc1 t1, $f13
253 mthc1 t1, $f14 264 mthc1 t1, $f14
254 mthc1 t1, $f15 265 mthc1 t1, $f15
255 mthc1 t1, $f16 266 mthc1 t1, $f16
256 mthc1 t1, $f17 267 mthc1 t1, $f17
257 mthc1 t1, $f18 268 mthc1 t1, $f18
258 mthc1 t1, $f19 269 mthc1 t1, $f19
259 mthc1 t1, $f20 270 mthc1 t1, $f20
260 mthc1 t1, $f21 271 mthc1 t1, $f21
261 mthc1 t1, $f22 272 mthc1 t1, $f22
262 mthc1 t1, $f23 273 mthc1 t1, $f23
263 mthc1 t1, $f24 274 mthc1 t1, $f24
264 mthc1 t1, $f25 275 mthc1 t1, $f25
265 mthc1 t1, $f26 276 mthc1 t1, $f26
266 mthc1 t1, $f27 277 mthc1 t1, $f27
267 mthc1 t1, $f28 278 mthc1 t1, $f28
268 mthc1 t1, $f29 279 mthc1 t1, $f29
269 mthc1 t1, $f30 280 mthc1 t1, $f30
270 mthc1 t1, $f31 281 mthc1 t1, $f31
271 1: .set pop 282 1: .set pop
272 #endif /* CONFIG_CPU_MIPS32_R2 */ 283 #endif /* CONFIG_CPU_MIPS32_R2 */
273 #else 284 #else
274 .set arch=r4000 285 .set arch=r4000
275 dmtc1 t1, $f0 286 dmtc1 t1, $f0
276 dmtc1 t1, $f2 287 dmtc1 t1, $f2
277 dmtc1 t1, $f4 288 dmtc1 t1, $f4
278 dmtc1 t1, $f6 289 dmtc1 t1, $f6
279 dmtc1 t1, $f8 290 dmtc1 t1, $f8
280 dmtc1 t1, $f10 291 dmtc1 t1, $f10
281 dmtc1 t1, $f12 292 dmtc1 t1, $f12
282 dmtc1 t1, $f14 293 dmtc1 t1, $f14
283 dmtc1 t1, $f16 294 dmtc1 t1, $f16
284 dmtc1 t1, $f18 295 dmtc1 t1, $f18
285 dmtc1 t1, $f20 296 dmtc1 t1, $f20
286 dmtc1 t1, $f22 297 dmtc1 t1, $f22
287 dmtc1 t1, $f24 298 dmtc1 t1, $f24
288 dmtc1 t1, $f26 299 dmtc1 t1, $f26
289 dmtc1 t1, $f28 300 dmtc1 t1, $f28
290 dmtc1 t1, $f30 301 dmtc1 t1, $f30
291 #endif 302 #endif
292 jr ra 303 jr ra
293 END(_init_fpu) 304 END(_init_fpu)
305
306 .set pop /* SET_HARDFLOAT */
294 307
arch/mips/kernel/r6000_fpu.S
1 /* 1 /*
2 * r6000_fpu.S: Save/restore floating point context for signal handlers. 2 * r6000_fpu.S: Save/restore floating point context for signal handlers.
3 * 3 *
4 * This file is subject to the terms and conditions of the GNU General Public 4 * This file is subject to the terms and conditions of the GNU General Public
5 * License. See the file "COPYING" in the main directory of this archive 5 * License. See the file "COPYING" in the main directory of this archive
6 * for more details. 6 * for more details.
7 * 7 *
8 * Copyright (C) 1996 by Ralf Baechle 8 * Copyright (C) 1996 by Ralf Baechle
9 * 9 *
10 * Multi-arch abstraction and asm macros for easier reading: 10 * Multi-arch abstraction and asm macros for easier reading:
11 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 11 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
12 */ 12 */
13 #include <asm/asm.h> 13 #include <asm/asm.h>
14 #include <asm/fpregdef.h> 14 #include <asm/fpregdef.h>
15 #include <asm/mipsregs.h> 15 #include <asm/mipsregs.h>
16 #include <asm/asm-offsets.h> 16 #include <asm/asm-offsets.h>
17 #include <asm/regdef.h> 17 #include <asm/regdef.h>
18 18
19 .set noreorder 19 .set noreorder
20 .set mips2 20 .set mips2
21 .set push
22 SET_HARDFLOAT
23
21 /* Save floating point context */ 24 /* Save floating point context */
22 LEAF(_save_fp_context) 25 LEAF(_save_fp_context)
23 mfc0 t0,CP0_STATUS 26 mfc0 t0,CP0_STATUS
24 sll t0,t0,2 27 sll t0,t0,2
25 bgez t0,1f 28 bgez t0,1f
26 nop 29 nop
27 30
28 cfc1 t1,fcr31 31 cfc1 t1,fcr31
29 /* Store the 16 double precision registers */ 32 /* Store the 16 double precision registers */
30 sdc1 $f0,(SC_FPREGS+0)(a0) 33 sdc1 $f0,(SC_FPREGS+0)(a0)
31 sdc1 $f2,(SC_FPREGS+16)(a0) 34 sdc1 $f2,(SC_FPREGS+16)(a0)
32 sdc1 $f4,(SC_FPREGS+32)(a0) 35 sdc1 $f4,(SC_FPREGS+32)(a0)
33 sdc1 $f6,(SC_FPREGS+48)(a0) 36 sdc1 $f6,(SC_FPREGS+48)(a0)
34 sdc1 $f8,(SC_FPREGS+64)(a0) 37 sdc1 $f8,(SC_FPREGS+64)(a0)
35 sdc1 $f10,(SC_FPREGS+80)(a0) 38 sdc1 $f10,(SC_FPREGS+80)(a0)
36 sdc1 $f12,(SC_FPREGS+96)(a0) 39 sdc1 $f12,(SC_FPREGS+96)(a0)
37 sdc1 $f14,(SC_FPREGS+112)(a0) 40 sdc1 $f14,(SC_FPREGS+112)(a0)
38 sdc1 $f16,(SC_FPREGS+128)(a0) 41 sdc1 $f16,(SC_FPREGS+128)(a0)
39 sdc1 $f18,(SC_FPREGS+144)(a0) 42 sdc1 $f18,(SC_FPREGS+144)(a0)
40 sdc1 $f20,(SC_FPREGS+160)(a0) 43 sdc1 $f20,(SC_FPREGS+160)(a0)
41 sdc1 $f22,(SC_FPREGS+176)(a0) 44 sdc1 $f22,(SC_FPREGS+176)(a0)
42 sdc1 $f24,(SC_FPREGS+192)(a0) 45 sdc1 $f24,(SC_FPREGS+192)(a0)
43 sdc1 $f26,(SC_FPREGS+208)(a0) 46 sdc1 $f26,(SC_FPREGS+208)(a0)
44 sdc1 $f28,(SC_FPREGS+224)(a0) 47 sdc1 $f28,(SC_FPREGS+224)(a0)
45 sdc1 $f30,(SC_FPREGS+240)(a0) 48 sdc1 $f30,(SC_FPREGS+240)(a0)
46 jr ra 49 jr ra
47 sw t0,SC_FPC_CSR(a0) 50 sw t0,SC_FPC_CSR(a0)
48 1: jr ra 51 1: jr ra
49 nop 52 nop
50 END(_save_fp_context) 53 END(_save_fp_context)
51 54
52 /* Restore FPU state: 55 /* Restore FPU state:
53 * - fp gp registers 56 * - fp gp registers
54 * - cp1 status/control register 57 * - cp1 status/control register
55 * 58 *
56 * We base the decision which registers to restore from the signal stack 59 * We base the decision which registers to restore from the signal stack
57 * frame on the current content of c0_status, not on the content of the 60 * frame on the current content of c0_status, not on the content of the
58 * stack frame which might have been changed by the user. 61 * stack frame which might have been changed by the user.
59 */ 62 */
60 LEAF(_restore_fp_context) 63 LEAF(_restore_fp_context)
61 mfc0 t0,CP0_STATUS 64 mfc0 t0,CP0_STATUS
62 sll t0,t0,2 65 sll t0,t0,2
63 66
64 bgez t0,1f 67 bgez t0,1f
65 lw t0,SC_FPC_CSR(a0) 68 lw t0,SC_FPC_CSR(a0)
66 /* Restore the 16 double precision registers */ 69 /* Restore the 16 double precision registers */
67 ldc1 $f0,(SC_FPREGS+0)(a0) 70 ldc1 $f0,(SC_FPREGS+0)(a0)
68 ldc1 $f2,(SC_FPREGS+16)(a0) 71 ldc1 $f2,(SC_FPREGS+16)(a0)
69 ldc1 $f4,(SC_FPREGS+32)(a0) 72 ldc1 $f4,(SC_FPREGS+32)(a0)
70 ldc1 $f6,(SC_FPREGS+48)(a0) 73 ldc1 $f6,(SC_FPREGS+48)(a0)
71 ldc1 $f8,(SC_FPREGS+64)(a0) 74 ldc1 $f8,(SC_FPREGS+64)(a0)
72 ldc1 $f10,(SC_FPREGS+80)(a0) 75 ldc1 $f10,(SC_FPREGS+80)(a0)
73 ldc1 $f12,(SC_FPREGS+96)(a0) 76 ldc1 $f12,(SC_FPREGS+96)(a0)
74 ldc1 $f14,(SC_FPREGS+112)(a0) 77 ldc1 $f14,(SC_FPREGS+112)(a0)
75 ldc1 $f16,(SC_FPREGS+128)(a0) 78 ldc1 $f16,(SC_FPREGS+128)(a0)
76 ldc1 $f18,(SC_FPREGS+144)(a0) 79 ldc1 $f18,(SC_FPREGS+144)(a0)
77 ldc1 $f20,(SC_FPREGS+160)(a0) 80 ldc1 $f20,(SC_FPREGS+160)(a0)
78 ldc1 $f22,(SC_FPREGS+176)(a0) 81 ldc1 $f22,(SC_FPREGS+176)(a0)
79 ldc1 $f24,(SC_FPREGS+192)(a0) 82 ldc1 $f24,(SC_FPREGS+192)(a0)
80 ldc1 $f26,(SC_FPREGS+208)(a0) 83 ldc1 $f26,(SC_FPREGS+208)(a0)
81 ldc1 $f28,(SC_FPREGS+224)(a0) 84 ldc1 $f28,(SC_FPREGS+224)(a0)
82 ldc1 $f30,(SC_FPREGS+240)(a0) 85 ldc1 $f30,(SC_FPREGS+240)(a0)
83 jr ra 86 jr ra
84 ctc1 t0,fcr31 87 ctc1 t0,fcr31
85 1: jr ra 88 1: jr ra
86 nop 89 nop
87 END(_restore_fp_context) 90 END(_restore_fp_context)
91
92 .set pop /* SET_HARDFLOAT */
88 93
arch/mips/math-emu/cp1emu.c
1 /* 1 /*
2 * cp1emu.c: a MIPS coprocessor 1 (FPU) instruction emulator 2 * cp1emu.c: a MIPS coprocessor 1 (FPU) instruction emulator
3 * 3 *
4 * MIPS floating point support 4 * MIPS floating point support
5 * Copyright (C) 1994-2000 Algorithmics Ltd. 5 * Copyright (C) 1994-2000 Algorithmics Ltd.
6 * 6 *
7 * Kevin D. Kissell, kevink@mips.com and Carsten Langgaard, carstenl@mips.com 7 * Kevin D. Kissell, kevink@mips.com and Carsten Langgaard, carstenl@mips.com
8 * Copyright (C) 2000 MIPS Technologies, Inc. 8 * Copyright (C) 2000 MIPS Technologies, Inc.
9 * 9 *
10 * This program is free software; you can distribute it and/or modify it 10 * This program is free software; you can distribute it and/or modify it
11 * under the terms of the GNU General Public License (Version 2) as 11 * under the terms of the GNU General Public License (Version 2) as
12 * published by the Free Software Foundation. 12 * published by the Free Software Foundation.
13 * 13 *
14 * This program is distributed in the hope it will be useful, but WITHOUT 14 * This program is distributed in the hope it will be useful, but WITHOUT
15 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 * for more details. 17 * for more details.
18 * 18 *
19 * You should have received a copy of the GNU General Public License along 19 * You should have received a copy of the GNU General Public License along
20 * with this program; if not, write to the Free Software Foundation, Inc., 20 * with this program; if not, write to the Free Software Foundation, Inc.,
21 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 21 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
22 * 22 *
23 * A complete emulator for MIPS coprocessor 1 instructions. This is 23 * A complete emulator for MIPS coprocessor 1 instructions. This is
24 * required for #float(switch) or #float(trap), where it catches all 24 * required for #float(switch) or #float(trap), where it catches all
25 * COP1 instructions via the "CoProcessor Unusable" exception. 25 * COP1 instructions via the "CoProcessor Unusable" exception.
26 * 26 *
27 * More surprisingly it is also required for #float(ieee), to help out 27 * More surprisingly it is also required for #float(ieee), to help out
28 * the hardware FPU at the boundaries of the IEEE-754 representation 28 * the hardware FPU at the boundaries of the IEEE-754 representation
29 * (denormalised values, infinities, underflow, etc). It is made 29 * (denormalised values, infinities, underflow, etc). It is made
30 * quite nasty because emulation of some non-COP1 instructions is 30 * quite nasty because emulation of some non-COP1 instructions is
31 * required, e.g. in branch delay slots. 31 * required, e.g. in branch delay slots.
32 * 32 *
33 * Note if you know that you won't have an FPU, then you'll get much 33 * Note if you know that you won't have an FPU, then you'll get much
34 * better performance by compiling with -msoft-float! 34 * better performance by compiling with -msoft-float!
35 */ 35 */
36 #include <linux/sched.h> 36 #include <linux/sched.h>
37 #include <linux/debugfs.h> 37 #include <linux/debugfs.h>
38 #include <linux/kconfig.h> 38 #include <linux/kconfig.h>
39 #include <linux/percpu-defs.h> 39 #include <linux/percpu-defs.h>
40 #include <linux/perf_event.h> 40 #include <linux/perf_event.h>
41 41
42 #include <asm/branch.h> 42 #include <asm/branch.h>
43 #include <asm/inst.h> 43 #include <asm/inst.h>
44 #include <asm/ptrace.h> 44 #include <asm/ptrace.h>
45 #include <asm/signal.h> 45 #include <asm/signal.h>
46 #include <asm/uaccess.h> 46 #include <asm/uaccess.h>
47 47
48 #include <asm/processor.h> 48 #include <asm/processor.h>
49 #include <asm/fpu_emulator.h> 49 #include <asm/fpu_emulator.h>
50 #include <asm/fpu.h> 50 #include <asm/fpu.h>
51 51
52 #include "ieee754.h" 52 #include "ieee754.h"
53 53
54 /* Function which emulates a floating point instruction. */ 54 /* Function which emulates a floating point instruction. */
55 55
56 static int fpu_emu(struct pt_regs *, struct mips_fpu_struct *, 56 static int fpu_emu(struct pt_regs *, struct mips_fpu_struct *,
57 mips_instruction); 57 mips_instruction);
58 58
59 static int fpux_emu(struct pt_regs *, 59 static int fpux_emu(struct pt_regs *,
60 struct mips_fpu_struct *, mips_instruction, void *__user *); 60 struct mips_fpu_struct *, mips_instruction, void *__user *);
61 61
62 /* Control registers */ 62 /* Control registers */
63 63
64 #define FPCREG_RID 0 /* $0 = revision id */ 64 #define FPCREG_RID 0 /* $0 = revision id */
65 #define FPCREG_CSR 31 /* $31 = csr */ 65 #define FPCREG_CSR 31 /* $31 = csr */
66 66
67 /* Determine rounding mode from the RM bits of the FCSR */ 67 /* Determine rounding mode from the RM bits of the FCSR */
68 #define modeindex(v) ((v) & FPU_CSR_RM) 68 #define modeindex(v) ((v) & FPU_CSR_RM)
69 69
70 /* convert condition code register number to csr bit */ 70 /* convert condition code register number to csr bit */
71 static const unsigned int fpucondbit[8] = { 71 static const unsigned int fpucondbit[8] = {
72 FPU_CSR_COND0, 72 FPU_CSR_COND0,
73 FPU_CSR_COND1, 73 FPU_CSR_COND1,
74 FPU_CSR_COND2, 74 FPU_CSR_COND2,
75 FPU_CSR_COND3, 75 FPU_CSR_COND3,
76 FPU_CSR_COND4, 76 FPU_CSR_COND4,
77 FPU_CSR_COND5, 77 FPU_CSR_COND5,
78 FPU_CSR_COND6, 78 FPU_CSR_COND6,
79 FPU_CSR_COND7 79 FPU_CSR_COND7
80 }; 80 };
81 81
82 /* (microMIPS) Convert certain microMIPS instructions to MIPS32 format. */ 82 /* (microMIPS) Convert certain microMIPS instructions to MIPS32 format. */
83 static const int sd_format[] = {16, 17, 0, 0, 0, 0, 0, 0}; 83 static const int sd_format[] = {16, 17, 0, 0, 0, 0, 0, 0};
84 static const int sdps_format[] = {16, 17, 22, 0, 0, 0, 0, 0}; 84 static const int sdps_format[] = {16, 17, 22, 0, 0, 0, 0, 0};
85 static const int dwl_format[] = {17, 20, 21, 0, 0, 0, 0, 0}; 85 static const int dwl_format[] = {17, 20, 21, 0, 0, 0, 0, 0};
86 static const int swl_format[] = {16, 20, 21, 0, 0, 0, 0, 0}; 86 static const int swl_format[] = {16, 20, 21, 0, 0, 0, 0, 0};
87 87
88 /* 88 /*
89 * This functions translates a 32-bit microMIPS instruction 89 * This functions translates a 32-bit microMIPS instruction
90 * into a 32-bit MIPS32 instruction. Returns 0 on success 90 * into a 32-bit MIPS32 instruction. Returns 0 on success
91 * and SIGILL otherwise. 91 * and SIGILL otherwise.
92 */ 92 */
93 static int microMIPS32_to_MIPS32(union mips_instruction *insn_ptr) 93 static int microMIPS32_to_MIPS32(union mips_instruction *insn_ptr)
94 { 94 {
95 union mips_instruction insn = *insn_ptr; 95 union mips_instruction insn = *insn_ptr;
96 union mips_instruction mips32_insn = insn; 96 union mips_instruction mips32_insn = insn;
97 int func, fmt, op; 97 int func, fmt, op;
98 98
99 switch (insn.mm_i_format.opcode) { 99 switch (insn.mm_i_format.opcode) {
100 case mm_ldc132_op: 100 case mm_ldc132_op:
101 mips32_insn.mm_i_format.opcode = ldc1_op; 101 mips32_insn.mm_i_format.opcode = ldc1_op;
102 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs; 102 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs;
103 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt; 103 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt;
104 break; 104 break;
105 case mm_lwc132_op: 105 case mm_lwc132_op:
106 mips32_insn.mm_i_format.opcode = lwc1_op; 106 mips32_insn.mm_i_format.opcode = lwc1_op;
107 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs; 107 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs;
108 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt; 108 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt;
109 break; 109 break;
110 case mm_sdc132_op: 110 case mm_sdc132_op:
111 mips32_insn.mm_i_format.opcode = sdc1_op; 111 mips32_insn.mm_i_format.opcode = sdc1_op;
112 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs; 112 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs;
113 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt; 113 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt;
114 break; 114 break;
115 case mm_swc132_op: 115 case mm_swc132_op:
116 mips32_insn.mm_i_format.opcode = swc1_op; 116 mips32_insn.mm_i_format.opcode = swc1_op;
117 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs; 117 mips32_insn.mm_i_format.rt = insn.mm_i_format.rs;
118 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt; 118 mips32_insn.mm_i_format.rs = insn.mm_i_format.rt;
119 break; 119 break;
120 case mm_pool32i_op: 120 case mm_pool32i_op:
121 /* NOTE: offset is << by 1 if in microMIPS mode. */ 121 /* NOTE: offset is << by 1 if in microMIPS mode. */
122 if ((insn.mm_i_format.rt == mm_bc1f_op) || 122 if ((insn.mm_i_format.rt == mm_bc1f_op) ||
123 (insn.mm_i_format.rt == mm_bc1t_op)) { 123 (insn.mm_i_format.rt == mm_bc1t_op)) {
124 mips32_insn.fb_format.opcode = cop1_op; 124 mips32_insn.fb_format.opcode = cop1_op;
125 mips32_insn.fb_format.bc = bc_op; 125 mips32_insn.fb_format.bc = bc_op;
126 mips32_insn.fb_format.flag = 126 mips32_insn.fb_format.flag =
127 (insn.mm_i_format.rt == mm_bc1t_op) ? 1 : 0; 127 (insn.mm_i_format.rt == mm_bc1t_op) ? 1 : 0;
128 } else 128 } else
129 return SIGILL; 129 return SIGILL;
130 break; 130 break;
131 case mm_pool32f_op: 131 case mm_pool32f_op:
132 switch (insn.mm_fp0_format.func) { 132 switch (insn.mm_fp0_format.func) {
133 case mm_32f_01_op: 133 case mm_32f_01_op:
134 case mm_32f_11_op: 134 case mm_32f_11_op:
135 case mm_32f_02_op: 135 case mm_32f_02_op:
136 case mm_32f_12_op: 136 case mm_32f_12_op:
137 case mm_32f_41_op: 137 case mm_32f_41_op:
138 case mm_32f_51_op: 138 case mm_32f_51_op:
139 case mm_32f_42_op: 139 case mm_32f_42_op:
140 case mm_32f_52_op: 140 case mm_32f_52_op:
141 op = insn.mm_fp0_format.func; 141 op = insn.mm_fp0_format.func;
142 if (op == mm_32f_01_op) 142 if (op == mm_32f_01_op)
143 func = madd_s_op; 143 func = madd_s_op;
144 else if (op == mm_32f_11_op) 144 else if (op == mm_32f_11_op)
145 func = madd_d_op; 145 func = madd_d_op;
146 else if (op == mm_32f_02_op) 146 else if (op == mm_32f_02_op)
147 func = nmadd_s_op; 147 func = nmadd_s_op;
148 else if (op == mm_32f_12_op) 148 else if (op == mm_32f_12_op)
149 func = nmadd_d_op; 149 func = nmadd_d_op;
150 else if (op == mm_32f_41_op) 150 else if (op == mm_32f_41_op)
151 func = msub_s_op; 151 func = msub_s_op;
152 else if (op == mm_32f_51_op) 152 else if (op == mm_32f_51_op)
153 func = msub_d_op; 153 func = msub_d_op;
154 else if (op == mm_32f_42_op) 154 else if (op == mm_32f_42_op)
155 func = nmsub_s_op; 155 func = nmsub_s_op;
156 else 156 else
157 func = nmsub_d_op; 157 func = nmsub_d_op;
158 mips32_insn.fp6_format.opcode = cop1x_op; 158 mips32_insn.fp6_format.opcode = cop1x_op;
159 mips32_insn.fp6_format.fr = insn.mm_fp6_format.fr; 159 mips32_insn.fp6_format.fr = insn.mm_fp6_format.fr;
160 mips32_insn.fp6_format.ft = insn.mm_fp6_format.ft; 160 mips32_insn.fp6_format.ft = insn.mm_fp6_format.ft;
161 mips32_insn.fp6_format.fs = insn.mm_fp6_format.fs; 161 mips32_insn.fp6_format.fs = insn.mm_fp6_format.fs;
162 mips32_insn.fp6_format.fd = insn.mm_fp6_format.fd; 162 mips32_insn.fp6_format.fd = insn.mm_fp6_format.fd;
163 mips32_insn.fp6_format.func = func; 163 mips32_insn.fp6_format.func = func;
164 break; 164 break;
165 case mm_32f_10_op: 165 case mm_32f_10_op:
166 func = -1; /* Invalid */ 166 func = -1; /* Invalid */
167 op = insn.mm_fp5_format.op & 0x7; 167 op = insn.mm_fp5_format.op & 0x7;
168 if (op == mm_ldxc1_op) 168 if (op == mm_ldxc1_op)
169 func = ldxc1_op; 169 func = ldxc1_op;
170 else if (op == mm_sdxc1_op) 170 else if (op == mm_sdxc1_op)
171 func = sdxc1_op; 171 func = sdxc1_op;
172 else if (op == mm_lwxc1_op) 172 else if (op == mm_lwxc1_op)
173 func = lwxc1_op; 173 func = lwxc1_op;
174 else if (op == mm_swxc1_op) 174 else if (op == mm_swxc1_op)
175 func = swxc1_op; 175 func = swxc1_op;
176 176
177 if (func != -1) { 177 if (func != -1) {
178 mips32_insn.r_format.opcode = cop1x_op; 178 mips32_insn.r_format.opcode = cop1x_op;
179 mips32_insn.r_format.rs = 179 mips32_insn.r_format.rs =
180 insn.mm_fp5_format.base; 180 insn.mm_fp5_format.base;
181 mips32_insn.r_format.rt = 181 mips32_insn.r_format.rt =
182 insn.mm_fp5_format.index; 182 insn.mm_fp5_format.index;
183 mips32_insn.r_format.rd = 0; 183 mips32_insn.r_format.rd = 0;
184 mips32_insn.r_format.re = insn.mm_fp5_format.fd; 184 mips32_insn.r_format.re = insn.mm_fp5_format.fd;
185 mips32_insn.r_format.func = func; 185 mips32_insn.r_format.func = func;
186 } else 186 } else
187 return SIGILL; 187 return SIGILL;
188 break; 188 break;
189 case mm_32f_40_op: 189 case mm_32f_40_op:
190 op = -1; /* Invalid */ 190 op = -1; /* Invalid */
191 if (insn.mm_fp2_format.op == mm_fmovt_op) 191 if (insn.mm_fp2_format.op == mm_fmovt_op)
192 op = 1; 192 op = 1;
193 else if (insn.mm_fp2_format.op == mm_fmovf_op) 193 else if (insn.mm_fp2_format.op == mm_fmovf_op)
194 op = 0; 194 op = 0;
195 if (op != -1) { 195 if (op != -1) {
196 mips32_insn.fp0_format.opcode = cop1_op; 196 mips32_insn.fp0_format.opcode = cop1_op;
197 mips32_insn.fp0_format.fmt = 197 mips32_insn.fp0_format.fmt =
198 sdps_format[insn.mm_fp2_format.fmt]; 198 sdps_format[insn.mm_fp2_format.fmt];
199 mips32_insn.fp0_format.ft = 199 mips32_insn.fp0_format.ft =
200 (insn.mm_fp2_format.cc<<2) + op; 200 (insn.mm_fp2_format.cc<<2) + op;
201 mips32_insn.fp0_format.fs = 201 mips32_insn.fp0_format.fs =
202 insn.mm_fp2_format.fs; 202 insn.mm_fp2_format.fs;
203 mips32_insn.fp0_format.fd = 203 mips32_insn.fp0_format.fd =
204 insn.mm_fp2_format.fd; 204 insn.mm_fp2_format.fd;
205 mips32_insn.fp0_format.func = fmovc_op; 205 mips32_insn.fp0_format.func = fmovc_op;
206 } else 206 } else
207 return SIGILL; 207 return SIGILL;
208 break; 208 break;
209 case mm_32f_60_op: 209 case mm_32f_60_op:
210 func = -1; /* Invalid */ 210 func = -1; /* Invalid */
211 if (insn.mm_fp0_format.op == mm_fadd_op) 211 if (insn.mm_fp0_format.op == mm_fadd_op)
212 func = fadd_op; 212 func = fadd_op;
213 else if (insn.mm_fp0_format.op == mm_fsub_op) 213 else if (insn.mm_fp0_format.op == mm_fsub_op)
214 func = fsub_op; 214 func = fsub_op;
215 else if (insn.mm_fp0_format.op == mm_fmul_op) 215 else if (insn.mm_fp0_format.op == mm_fmul_op)
216 func = fmul_op; 216 func = fmul_op;
217 else if (insn.mm_fp0_format.op == mm_fdiv_op) 217 else if (insn.mm_fp0_format.op == mm_fdiv_op)
218 func = fdiv_op; 218 func = fdiv_op;
219 if (func != -1) { 219 if (func != -1) {
220 mips32_insn.fp0_format.opcode = cop1_op; 220 mips32_insn.fp0_format.opcode = cop1_op;
221 mips32_insn.fp0_format.fmt = 221 mips32_insn.fp0_format.fmt =
222 sdps_format[insn.mm_fp0_format.fmt]; 222 sdps_format[insn.mm_fp0_format.fmt];
223 mips32_insn.fp0_format.ft = 223 mips32_insn.fp0_format.ft =
224 insn.mm_fp0_format.ft; 224 insn.mm_fp0_format.ft;
225 mips32_insn.fp0_format.fs = 225 mips32_insn.fp0_format.fs =
226 insn.mm_fp0_format.fs; 226 insn.mm_fp0_format.fs;
227 mips32_insn.fp0_format.fd = 227 mips32_insn.fp0_format.fd =
228 insn.mm_fp0_format.fd; 228 insn.mm_fp0_format.fd;
229 mips32_insn.fp0_format.func = func; 229 mips32_insn.fp0_format.func = func;
230 } else 230 } else
231 return SIGILL; 231 return SIGILL;
232 break; 232 break;
233 case mm_32f_70_op: 233 case mm_32f_70_op:
234 func = -1; /* Invalid */ 234 func = -1; /* Invalid */
235 if (insn.mm_fp0_format.op == mm_fmovn_op) 235 if (insn.mm_fp0_format.op == mm_fmovn_op)
236 func = fmovn_op; 236 func = fmovn_op;
237 else if (insn.mm_fp0_format.op == mm_fmovz_op) 237 else if (insn.mm_fp0_format.op == mm_fmovz_op)
238 func = fmovz_op; 238 func = fmovz_op;
239 if (func != -1) { 239 if (func != -1) {
240 mips32_insn.fp0_format.opcode = cop1_op; 240 mips32_insn.fp0_format.opcode = cop1_op;
241 mips32_insn.fp0_format.fmt = 241 mips32_insn.fp0_format.fmt =
242 sdps_format[insn.mm_fp0_format.fmt]; 242 sdps_format[insn.mm_fp0_format.fmt];
243 mips32_insn.fp0_format.ft = 243 mips32_insn.fp0_format.ft =
244 insn.mm_fp0_format.ft; 244 insn.mm_fp0_format.ft;
245 mips32_insn.fp0_format.fs = 245 mips32_insn.fp0_format.fs =
246 insn.mm_fp0_format.fs; 246 insn.mm_fp0_format.fs;
247 mips32_insn.fp0_format.fd = 247 mips32_insn.fp0_format.fd =
248 insn.mm_fp0_format.fd; 248 insn.mm_fp0_format.fd;
249 mips32_insn.fp0_format.func = func; 249 mips32_insn.fp0_format.func = func;
250 } else 250 } else
251 return SIGILL; 251 return SIGILL;
252 break; 252 break;
253 case mm_32f_73_op: /* POOL32FXF */ 253 case mm_32f_73_op: /* POOL32FXF */
254 switch (insn.mm_fp1_format.op) { 254 switch (insn.mm_fp1_format.op) {
255 case mm_movf0_op: 255 case mm_movf0_op:
256 case mm_movf1_op: 256 case mm_movf1_op:
257 case mm_movt0_op: 257 case mm_movt0_op:
258 case mm_movt1_op: 258 case mm_movt1_op:
259 if ((insn.mm_fp1_format.op & 0x7f) == 259 if ((insn.mm_fp1_format.op & 0x7f) ==
260 mm_movf0_op) 260 mm_movf0_op)
261 op = 0; 261 op = 0;
262 else 262 else
263 op = 1; 263 op = 1;
264 mips32_insn.r_format.opcode = spec_op; 264 mips32_insn.r_format.opcode = spec_op;
265 mips32_insn.r_format.rs = insn.mm_fp4_format.fs; 265 mips32_insn.r_format.rs = insn.mm_fp4_format.fs;
266 mips32_insn.r_format.rt = 266 mips32_insn.r_format.rt =
267 (insn.mm_fp4_format.cc << 2) + op; 267 (insn.mm_fp4_format.cc << 2) + op;
268 mips32_insn.r_format.rd = insn.mm_fp4_format.rt; 268 mips32_insn.r_format.rd = insn.mm_fp4_format.rt;
269 mips32_insn.r_format.re = 0; 269 mips32_insn.r_format.re = 0;
270 mips32_insn.r_format.func = movc_op; 270 mips32_insn.r_format.func = movc_op;
271 break; 271 break;
272 case mm_fcvtd0_op: 272 case mm_fcvtd0_op:
273 case mm_fcvtd1_op: 273 case mm_fcvtd1_op:
274 case mm_fcvts0_op: 274 case mm_fcvts0_op:
275 case mm_fcvts1_op: 275 case mm_fcvts1_op:
276 if ((insn.mm_fp1_format.op & 0x7f) == 276 if ((insn.mm_fp1_format.op & 0x7f) ==
277 mm_fcvtd0_op) { 277 mm_fcvtd0_op) {
278 func = fcvtd_op; 278 func = fcvtd_op;
279 fmt = swl_format[insn.mm_fp3_format.fmt]; 279 fmt = swl_format[insn.mm_fp3_format.fmt];
280 } else { 280 } else {
281 func = fcvts_op; 281 func = fcvts_op;
282 fmt = dwl_format[insn.mm_fp3_format.fmt]; 282 fmt = dwl_format[insn.mm_fp3_format.fmt];
283 } 283 }
284 mips32_insn.fp0_format.opcode = cop1_op; 284 mips32_insn.fp0_format.opcode = cop1_op;
285 mips32_insn.fp0_format.fmt = fmt; 285 mips32_insn.fp0_format.fmt = fmt;
286 mips32_insn.fp0_format.ft = 0; 286 mips32_insn.fp0_format.ft = 0;
287 mips32_insn.fp0_format.fs = 287 mips32_insn.fp0_format.fs =
288 insn.mm_fp3_format.fs; 288 insn.mm_fp3_format.fs;
289 mips32_insn.fp0_format.fd = 289 mips32_insn.fp0_format.fd =
290 insn.mm_fp3_format.rt; 290 insn.mm_fp3_format.rt;
291 mips32_insn.fp0_format.func = func; 291 mips32_insn.fp0_format.func = func;
292 break; 292 break;
293 case mm_fmov0_op: 293 case mm_fmov0_op:
294 case mm_fmov1_op: 294 case mm_fmov1_op:
295 case mm_fabs0_op: 295 case mm_fabs0_op:
296 case mm_fabs1_op: 296 case mm_fabs1_op:
297 case mm_fneg0_op: 297 case mm_fneg0_op:
298 case mm_fneg1_op: 298 case mm_fneg1_op:
299 if ((insn.mm_fp1_format.op & 0x7f) == 299 if ((insn.mm_fp1_format.op & 0x7f) ==
300 mm_fmov0_op) 300 mm_fmov0_op)
301 func = fmov_op; 301 func = fmov_op;
302 else if ((insn.mm_fp1_format.op & 0x7f) == 302 else if ((insn.mm_fp1_format.op & 0x7f) ==
303 mm_fabs0_op) 303 mm_fabs0_op)
304 func = fabs_op; 304 func = fabs_op;
305 else 305 else
306 func = fneg_op; 306 func = fneg_op;
307 mips32_insn.fp0_format.opcode = cop1_op; 307 mips32_insn.fp0_format.opcode = cop1_op;
308 mips32_insn.fp0_format.fmt = 308 mips32_insn.fp0_format.fmt =
309 sdps_format[insn.mm_fp3_format.fmt]; 309 sdps_format[insn.mm_fp3_format.fmt];
310 mips32_insn.fp0_format.ft = 0; 310 mips32_insn.fp0_format.ft = 0;
311 mips32_insn.fp0_format.fs = 311 mips32_insn.fp0_format.fs =
312 insn.mm_fp3_format.fs; 312 insn.mm_fp3_format.fs;
313 mips32_insn.fp0_format.fd = 313 mips32_insn.fp0_format.fd =
314 insn.mm_fp3_format.rt; 314 insn.mm_fp3_format.rt;
315 mips32_insn.fp0_format.func = func; 315 mips32_insn.fp0_format.func = func;
316 break; 316 break;
317 case mm_ffloorl_op: 317 case mm_ffloorl_op:
318 case mm_ffloorw_op: 318 case mm_ffloorw_op:
319 case mm_fceill_op: 319 case mm_fceill_op:
320 case mm_fceilw_op: 320 case mm_fceilw_op:
321 case mm_ftruncl_op: 321 case mm_ftruncl_op:
322 case mm_ftruncw_op: 322 case mm_ftruncw_op:
323 case mm_froundl_op: 323 case mm_froundl_op:
324 case mm_froundw_op: 324 case mm_froundw_op:
325 case mm_fcvtl_op: 325 case mm_fcvtl_op:
326 case mm_fcvtw_op: 326 case mm_fcvtw_op:
327 if (insn.mm_fp1_format.op == mm_ffloorl_op) 327 if (insn.mm_fp1_format.op == mm_ffloorl_op)
328 func = ffloorl_op; 328 func = ffloorl_op;
329 else if (insn.mm_fp1_format.op == mm_ffloorw_op) 329 else if (insn.mm_fp1_format.op == mm_ffloorw_op)
330 func = ffloor_op; 330 func = ffloor_op;
331 else if (insn.mm_fp1_format.op == mm_fceill_op) 331 else if (insn.mm_fp1_format.op == mm_fceill_op)
332 func = fceill_op; 332 func = fceill_op;
333 else if (insn.mm_fp1_format.op == mm_fceilw_op) 333 else if (insn.mm_fp1_format.op == mm_fceilw_op)
334 func = fceil_op; 334 func = fceil_op;
335 else if (insn.mm_fp1_format.op == mm_ftruncl_op) 335 else if (insn.mm_fp1_format.op == mm_ftruncl_op)
336 func = ftruncl_op; 336 func = ftruncl_op;
337 else if (insn.mm_fp1_format.op == mm_ftruncw_op) 337 else if (insn.mm_fp1_format.op == mm_ftruncw_op)
338 func = ftrunc_op; 338 func = ftrunc_op;
339 else if (insn.mm_fp1_format.op == mm_froundl_op) 339 else if (insn.mm_fp1_format.op == mm_froundl_op)
340 func = froundl_op; 340 func = froundl_op;
341 else if (insn.mm_fp1_format.op == mm_froundw_op) 341 else if (insn.mm_fp1_format.op == mm_froundw_op)
342 func = fround_op; 342 func = fround_op;
343 else if (insn.mm_fp1_format.op == mm_fcvtl_op) 343 else if (insn.mm_fp1_format.op == mm_fcvtl_op)
344 func = fcvtl_op; 344 func = fcvtl_op;
345 else 345 else
346 func = fcvtw_op; 346 func = fcvtw_op;
347 mips32_insn.fp0_format.opcode = cop1_op; 347 mips32_insn.fp0_format.opcode = cop1_op;
348 mips32_insn.fp0_format.fmt = 348 mips32_insn.fp0_format.fmt =
349 sd_format[insn.mm_fp1_format.fmt]; 349 sd_format[insn.mm_fp1_format.fmt];
350 mips32_insn.fp0_format.ft = 0; 350 mips32_insn.fp0_format.ft = 0;
351 mips32_insn.fp0_format.fs = 351 mips32_insn.fp0_format.fs =
352 insn.mm_fp1_format.fs; 352 insn.mm_fp1_format.fs;
353 mips32_insn.fp0_format.fd = 353 mips32_insn.fp0_format.fd =
354 insn.mm_fp1_format.rt; 354 insn.mm_fp1_format.rt;
355 mips32_insn.fp0_format.func = func; 355 mips32_insn.fp0_format.func = func;
356 break; 356 break;
357 case mm_frsqrt_op: 357 case mm_frsqrt_op:
358 case mm_fsqrt_op: 358 case mm_fsqrt_op:
359 case mm_frecip_op: 359 case mm_frecip_op:
360 if (insn.mm_fp1_format.op == mm_frsqrt_op) 360 if (insn.mm_fp1_format.op == mm_frsqrt_op)
361 func = frsqrt_op; 361 func = frsqrt_op;
362 else if (insn.mm_fp1_format.op == mm_fsqrt_op) 362 else if (insn.mm_fp1_format.op == mm_fsqrt_op)
363 func = fsqrt_op; 363 func = fsqrt_op;
364 else 364 else
365 func = frecip_op; 365 func = frecip_op;
366 mips32_insn.fp0_format.opcode = cop1_op; 366 mips32_insn.fp0_format.opcode = cop1_op;
367 mips32_insn.fp0_format.fmt = 367 mips32_insn.fp0_format.fmt =
368 sdps_format[insn.mm_fp1_format.fmt]; 368 sdps_format[insn.mm_fp1_format.fmt];
369 mips32_insn.fp0_format.ft = 0; 369 mips32_insn.fp0_format.ft = 0;
370 mips32_insn.fp0_format.fs = 370 mips32_insn.fp0_format.fs =
371 insn.mm_fp1_format.fs; 371 insn.mm_fp1_format.fs;
372 mips32_insn.fp0_format.fd = 372 mips32_insn.fp0_format.fd =
373 insn.mm_fp1_format.rt; 373 insn.mm_fp1_format.rt;
374 mips32_insn.fp0_format.func = func; 374 mips32_insn.fp0_format.func = func;
375 break; 375 break;
376 case mm_mfc1_op: 376 case mm_mfc1_op:
377 case mm_mtc1_op: 377 case mm_mtc1_op:
378 case mm_cfc1_op: 378 case mm_cfc1_op:
379 case mm_ctc1_op: 379 case mm_ctc1_op:
380 case mm_mfhc1_op: 380 case mm_mfhc1_op:
381 case mm_mthc1_op: 381 case mm_mthc1_op:
382 if (insn.mm_fp1_format.op == mm_mfc1_op) 382 if (insn.mm_fp1_format.op == mm_mfc1_op)
383 op = mfc_op; 383 op = mfc_op;
384 else if (insn.mm_fp1_format.op == mm_mtc1_op) 384 else if (insn.mm_fp1_format.op == mm_mtc1_op)
385 op = mtc_op; 385 op = mtc_op;
386 else if (insn.mm_fp1_format.op == mm_cfc1_op) 386 else if (insn.mm_fp1_format.op == mm_cfc1_op)
387 op = cfc_op; 387 op = cfc_op;
388 else if (insn.mm_fp1_format.op == mm_ctc1_op) 388 else if (insn.mm_fp1_format.op == mm_ctc1_op)
389 op = ctc_op; 389 op = ctc_op;
390 else if (insn.mm_fp1_format.op == mm_mfhc1_op) 390 else if (insn.mm_fp1_format.op == mm_mfhc1_op)
391 op = mfhc_op; 391 op = mfhc_op;
392 else 392 else
393 op = mthc_op; 393 op = mthc_op;
394 mips32_insn.fp1_format.opcode = cop1_op; 394 mips32_insn.fp1_format.opcode = cop1_op;
395 mips32_insn.fp1_format.op = op; 395 mips32_insn.fp1_format.op = op;
396 mips32_insn.fp1_format.rt = 396 mips32_insn.fp1_format.rt =
397 insn.mm_fp1_format.rt; 397 insn.mm_fp1_format.rt;
398 mips32_insn.fp1_format.fs = 398 mips32_insn.fp1_format.fs =
399 insn.mm_fp1_format.fs; 399 insn.mm_fp1_format.fs;
400 mips32_insn.fp1_format.fd = 0; 400 mips32_insn.fp1_format.fd = 0;
401 mips32_insn.fp1_format.func = 0; 401 mips32_insn.fp1_format.func = 0;
402 break; 402 break;
403 default: 403 default:
404 return SIGILL; 404 return SIGILL;
405 } 405 }
406 break; 406 break;
407 case mm_32f_74_op: /* c.cond.fmt */ 407 case mm_32f_74_op: /* c.cond.fmt */
408 mips32_insn.fp0_format.opcode = cop1_op; 408 mips32_insn.fp0_format.opcode = cop1_op;
409 mips32_insn.fp0_format.fmt = 409 mips32_insn.fp0_format.fmt =
410 sdps_format[insn.mm_fp4_format.fmt]; 410 sdps_format[insn.mm_fp4_format.fmt];
411 mips32_insn.fp0_format.ft = insn.mm_fp4_format.rt; 411 mips32_insn.fp0_format.ft = insn.mm_fp4_format.rt;
412 mips32_insn.fp0_format.fs = insn.mm_fp4_format.fs; 412 mips32_insn.fp0_format.fs = insn.mm_fp4_format.fs;
413 mips32_insn.fp0_format.fd = insn.mm_fp4_format.cc << 2; 413 mips32_insn.fp0_format.fd = insn.mm_fp4_format.cc << 2;
414 mips32_insn.fp0_format.func = 414 mips32_insn.fp0_format.func =
415 insn.mm_fp4_format.cond | MM_MIPS32_COND_FC; 415 insn.mm_fp4_format.cond | MM_MIPS32_COND_FC;
416 break; 416 break;
417 default: 417 default:
418 return SIGILL; 418 return SIGILL;
419 } 419 }
420 break; 420 break;
421 default: 421 default:
422 return SIGILL; 422 return SIGILL;
423 } 423 }
424 424
425 *insn_ptr = mips32_insn; 425 *insn_ptr = mips32_insn;
426 return 0; 426 return 0;
427 } 427 }
428 428
429 /* 429 /*
430 * Redundant with logic already in kernel/branch.c, 430 * Redundant with logic already in kernel/branch.c,
431 * embedded in compute_return_epc. At some point, 431 * embedded in compute_return_epc. At some point,
432 * a single subroutine should be used across both 432 * a single subroutine should be used across both
433 * modules. 433 * modules.
434 */ 434 */
435 static int isBranchInstr(struct pt_regs *regs, struct mm_decoded_insn dec_insn, 435 static int isBranchInstr(struct pt_regs *regs, struct mm_decoded_insn dec_insn,
436 unsigned long *contpc) 436 unsigned long *contpc)
437 { 437 {
438 union mips_instruction insn = (union mips_instruction)dec_insn.insn; 438 union mips_instruction insn = (union mips_instruction)dec_insn.insn;
439 unsigned int fcr31; 439 unsigned int fcr31;
440 unsigned int bit = 0; 440 unsigned int bit = 0;
441 441
442 switch (insn.i_format.opcode) { 442 switch (insn.i_format.opcode) {
443 case spec_op: 443 case spec_op:
444 switch (insn.r_format.func) { 444 switch (insn.r_format.func) {
445 case jalr_op: 445 case jalr_op:
446 regs->regs[insn.r_format.rd] = 446 regs->regs[insn.r_format.rd] =
447 regs->cp0_epc + dec_insn.pc_inc + 447 regs->cp0_epc + dec_insn.pc_inc +
448 dec_insn.next_pc_inc; 448 dec_insn.next_pc_inc;
449 /* Fall through */ 449 /* Fall through */
450 case jr_op: 450 case jr_op:
451 *contpc = regs->regs[insn.r_format.rs]; 451 *contpc = regs->regs[insn.r_format.rs];
452 return 1; 452 return 1;
453 } 453 }
454 break; 454 break;
455 case bcond_op: 455 case bcond_op:
456 switch (insn.i_format.rt) { 456 switch (insn.i_format.rt) {
457 case bltzal_op: 457 case bltzal_op:
458 case bltzall_op: 458 case bltzall_op:
459 regs->regs[31] = regs->cp0_epc + 459 regs->regs[31] = regs->cp0_epc +
460 dec_insn.pc_inc + 460 dec_insn.pc_inc +
461 dec_insn.next_pc_inc; 461 dec_insn.next_pc_inc;
462 /* Fall through */ 462 /* Fall through */
463 case bltz_op: 463 case bltz_op:
464 case bltzl_op: 464 case bltzl_op:
465 if ((long)regs->regs[insn.i_format.rs] < 0) 465 if ((long)regs->regs[insn.i_format.rs] < 0)
466 *contpc = regs->cp0_epc + 466 *contpc = regs->cp0_epc +
467 dec_insn.pc_inc + 467 dec_insn.pc_inc +
468 (insn.i_format.simmediate << 2); 468 (insn.i_format.simmediate << 2);
469 else 469 else
470 *contpc = regs->cp0_epc + 470 *contpc = regs->cp0_epc +
471 dec_insn.pc_inc + 471 dec_insn.pc_inc +
472 dec_insn.next_pc_inc; 472 dec_insn.next_pc_inc;
473 return 1; 473 return 1;
474 case bgezal_op: 474 case bgezal_op:
475 case bgezall_op: 475 case bgezall_op:
476 regs->regs[31] = regs->cp0_epc + 476 regs->regs[31] = regs->cp0_epc +
477 dec_insn.pc_inc + 477 dec_insn.pc_inc +
478 dec_insn.next_pc_inc; 478 dec_insn.next_pc_inc;
479 /* Fall through */ 479 /* Fall through */
480 case bgez_op: 480 case bgez_op:
481 case bgezl_op: 481 case bgezl_op:
482 if ((long)regs->regs[insn.i_format.rs] >= 0) 482 if ((long)regs->regs[insn.i_format.rs] >= 0)
483 *contpc = regs->cp0_epc + 483 *contpc = regs->cp0_epc +
484 dec_insn.pc_inc + 484 dec_insn.pc_inc +
485 (insn.i_format.simmediate << 2); 485 (insn.i_format.simmediate << 2);
486 else 486 else
487 *contpc = regs->cp0_epc + 487 *contpc = regs->cp0_epc +
488 dec_insn.pc_inc + 488 dec_insn.pc_inc +
489 dec_insn.next_pc_inc; 489 dec_insn.next_pc_inc;
490 return 1; 490 return 1;
491 } 491 }
492 break; 492 break;
493 case jalx_op: 493 case jalx_op:
494 set_isa16_mode(bit); 494 set_isa16_mode(bit);
495 case jal_op: 495 case jal_op:
496 regs->regs[31] = regs->cp0_epc + 496 regs->regs[31] = regs->cp0_epc +
497 dec_insn.pc_inc + 497 dec_insn.pc_inc +
498 dec_insn.next_pc_inc; 498 dec_insn.next_pc_inc;
499 /* Fall through */ 499 /* Fall through */
500 case j_op: 500 case j_op:
501 *contpc = regs->cp0_epc + dec_insn.pc_inc; 501 *contpc = regs->cp0_epc + dec_insn.pc_inc;
502 *contpc >>= 28; 502 *contpc >>= 28;
503 *contpc <<= 28; 503 *contpc <<= 28;
504 *contpc |= (insn.j_format.target << 2); 504 *contpc |= (insn.j_format.target << 2);
505 /* Set microMIPS mode bit: XOR for jalx. */ 505 /* Set microMIPS mode bit: XOR for jalx. */
506 *contpc ^= bit; 506 *contpc ^= bit;
507 return 1; 507 return 1;
508 case beq_op: 508 case beq_op:
509 case beql_op: 509 case beql_op:
510 if (regs->regs[insn.i_format.rs] == 510 if (regs->regs[insn.i_format.rs] ==
511 regs->regs[insn.i_format.rt]) 511 regs->regs[insn.i_format.rt])
512 *contpc = regs->cp0_epc + 512 *contpc = regs->cp0_epc +
513 dec_insn.pc_inc + 513 dec_insn.pc_inc +
514 (insn.i_format.simmediate << 2); 514 (insn.i_format.simmediate << 2);
515 else 515 else
516 *contpc = regs->cp0_epc + 516 *contpc = regs->cp0_epc +
517 dec_insn.pc_inc + 517 dec_insn.pc_inc +
518 dec_insn.next_pc_inc; 518 dec_insn.next_pc_inc;
519 return 1; 519 return 1;
520 case bne_op: 520 case bne_op:
521 case bnel_op: 521 case bnel_op:
522 if (regs->regs[insn.i_format.rs] != 522 if (regs->regs[insn.i_format.rs] !=
523 regs->regs[insn.i_format.rt]) 523 regs->regs[insn.i_format.rt])
524 *contpc = regs->cp0_epc + 524 *contpc = regs->cp0_epc +
525 dec_insn.pc_inc + 525 dec_insn.pc_inc +
526 (insn.i_format.simmediate << 2); 526 (insn.i_format.simmediate << 2);
527 else 527 else
528 *contpc = regs->cp0_epc + 528 *contpc = regs->cp0_epc +
529 dec_insn.pc_inc + 529 dec_insn.pc_inc +
530 dec_insn.next_pc_inc; 530 dec_insn.next_pc_inc;
531 return 1; 531 return 1;
532 case blez_op: 532 case blez_op:
533 case blezl_op: 533 case blezl_op:
534 if ((long)regs->regs[insn.i_format.rs] <= 0) 534 if ((long)regs->regs[insn.i_format.rs] <= 0)
535 *contpc = regs->cp0_epc + 535 *contpc = regs->cp0_epc +
536 dec_insn.pc_inc + 536 dec_insn.pc_inc +
537 (insn.i_format.simmediate << 2); 537 (insn.i_format.simmediate << 2);
538 else 538 else
539 *contpc = regs->cp0_epc + 539 *contpc = regs->cp0_epc +
540 dec_insn.pc_inc + 540 dec_insn.pc_inc +
541 dec_insn.next_pc_inc; 541 dec_insn.next_pc_inc;
542 return 1; 542 return 1;
543 case bgtz_op: 543 case bgtz_op:
544 case bgtzl_op: 544 case bgtzl_op:
545 if ((long)regs->regs[insn.i_format.rs] > 0) 545 if ((long)regs->regs[insn.i_format.rs] > 0)
546 *contpc = regs->cp0_epc + 546 *contpc = regs->cp0_epc +
547 dec_insn.pc_inc + 547 dec_insn.pc_inc +
548 (insn.i_format.simmediate << 2); 548 (insn.i_format.simmediate << 2);
549 else 549 else
550 *contpc = regs->cp0_epc + 550 *contpc = regs->cp0_epc +
551 dec_insn.pc_inc + 551 dec_insn.pc_inc +
552 dec_insn.next_pc_inc; 552 dec_insn.next_pc_inc;
553 return 1; 553 return 1;
554 #ifdef CONFIG_CPU_CAVIUM_OCTEON 554 #ifdef CONFIG_CPU_CAVIUM_OCTEON
555 case lwc2_op: /* This is bbit0 on Octeon */ 555 case lwc2_op: /* This is bbit0 on Octeon */
556 if ((regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt)) == 0) 556 if ((regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt)) == 0)
557 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2); 557 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2);
558 else 558 else
559 *contpc = regs->cp0_epc + 8; 559 *contpc = regs->cp0_epc + 8;
560 return 1; 560 return 1;
561 case ldc2_op: /* This is bbit032 on Octeon */ 561 case ldc2_op: /* This is bbit032 on Octeon */
562 if ((regs->regs[insn.i_format.rs] & (1ull<<(insn.i_format.rt + 32))) == 0) 562 if ((regs->regs[insn.i_format.rs] & (1ull<<(insn.i_format.rt + 32))) == 0)
563 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2); 563 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2);
564 else 564 else
565 *contpc = regs->cp0_epc + 8; 565 *contpc = regs->cp0_epc + 8;
566 return 1; 566 return 1;
567 case swc2_op: /* This is bbit1 on Octeon */ 567 case swc2_op: /* This is bbit1 on Octeon */
568 if (regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt)) 568 if (regs->regs[insn.i_format.rs] & (1ull<<insn.i_format.rt))
569 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2); 569 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2);
570 else 570 else
571 *contpc = regs->cp0_epc + 8; 571 *contpc = regs->cp0_epc + 8;
572 return 1; 572 return 1;
573 case sdc2_op: /* This is bbit132 on Octeon */ 573 case sdc2_op: /* This is bbit132 on Octeon */
574 if (regs->regs[insn.i_format.rs] & (1ull<<(insn.i_format.rt + 32))) 574 if (regs->regs[insn.i_format.rs] & (1ull<<(insn.i_format.rt + 32)))
575 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2); 575 *contpc = regs->cp0_epc + 4 + (insn.i_format.simmediate << 2);
576 else 576 else
577 *contpc = regs->cp0_epc + 8; 577 *contpc = regs->cp0_epc + 8;
578 return 1; 578 return 1;
579 #endif 579 #endif
580 case cop0_op: 580 case cop0_op:
581 case cop1_op: 581 case cop1_op:
582 case cop2_op: 582 case cop2_op:
583 case cop1x_op: 583 case cop1x_op:
584 if (insn.i_format.rs == bc_op) { 584 if (insn.i_format.rs == bc_op) {
585 preempt_disable(); 585 preempt_disable();
586 if (is_fpu_owner()) 586 if (is_fpu_owner())
587 asm volatile( 587 fcr31 = read_32bit_cp1_register(CP1_STATUS);
588 ".set push\n"
589 "\t.set mips1\n"
590 "\tcfc1\t%0,$31\n"
591 "\t.set pop" : "=r" (fcr31));
592 else 588 else
593 fcr31 = current->thread.fpu.fcr31; 589 fcr31 = current->thread.fpu.fcr31;
594 preempt_enable(); 590 preempt_enable();
595 591
596 bit = (insn.i_format.rt >> 2); 592 bit = (insn.i_format.rt >> 2);
597 bit += (bit != 0); 593 bit += (bit != 0);
598 bit += 23; 594 bit += 23;
599 switch (insn.i_format.rt & 3) { 595 switch (insn.i_format.rt & 3) {
600 case 0: /* bc1f */ 596 case 0: /* bc1f */
601 case 2: /* bc1fl */ 597 case 2: /* bc1fl */
602 if (~fcr31 & (1 << bit)) 598 if (~fcr31 & (1 << bit))
603 *contpc = regs->cp0_epc + 599 *contpc = regs->cp0_epc +
604 dec_insn.pc_inc + 600 dec_insn.pc_inc +
605 (insn.i_format.simmediate << 2); 601 (insn.i_format.simmediate << 2);
606 else 602 else
607 *contpc = regs->cp0_epc + 603 *contpc = regs->cp0_epc +
608 dec_insn.pc_inc + 604 dec_insn.pc_inc +
609 dec_insn.next_pc_inc; 605 dec_insn.next_pc_inc;
610 return 1; 606 return 1;
611 case 1: /* bc1t */ 607 case 1: /* bc1t */
612 case 3: /* bc1tl */ 608 case 3: /* bc1tl */
613 if (fcr31 & (1 << bit)) 609 if (fcr31 & (1 << bit))
614 *contpc = regs->cp0_epc + 610 *contpc = regs->cp0_epc +
615 dec_insn.pc_inc + 611 dec_insn.pc_inc +
616 (insn.i_format.simmediate << 2); 612 (insn.i_format.simmediate << 2);
617 else 613 else
618 *contpc = regs->cp0_epc + 614 *contpc = regs->cp0_epc +
619 dec_insn.pc_inc + 615 dec_insn.pc_inc +
620 dec_insn.next_pc_inc; 616 dec_insn.next_pc_inc;
621 return 1; 617 return 1;
622 } 618 }
623 } 619 }
624 break; 620 break;
625 } 621 }
626 return 0; 622 return 0;
627 } 623 }
628 624
629 /* 625 /*
630 * In the Linux kernel, we support selection of FPR format on the 626 * In the Linux kernel, we support selection of FPR format on the
631 * basis of the Status.FR bit. If an FPU is not present, the FR bit 627 * basis of the Status.FR bit. If an FPU is not present, the FR bit
632 * is hardwired to zero, which would imply a 32-bit FPU even for 628 * is hardwired to zero, which would imply a 32-bit FPU even for
633 * 64-bit CPUs so we rather look at TIF_32BIT_FPREGS. 629 * 64-bit CPUs so we rather look at TIF_32BIT_FPREGS.
634 * FPU emu is slow and bulky and optimizing this function offers fairly 630 * FPU emu is slow and bulky and optimizing this function offers fairly
635 * sizeable benefits so we try to be clever and make this function return 631 * sizeable benefits so we try to be clever and make this function return
636 * a constant whenever possible, that is on 64-bit kernels without O32 632 * a constant whenever possible, that is on 64-bit kernels without O32
637 * compatibility enabled and on 32-bit without 64-bit FPU support. 633 * compatibility enabled and on 32-bit without 64-bit FPU support.
638 */ 634 */
639 static inline int cop1_64bit(struct pt_regs *xcp) 635 static inline int cop1_64bit(struct pt_regs *xcp)
640 { 636 {
641 if (config_enabled(CONFIG_64BIT) && !config_enabled(CONFIG_MIPS32_O32)) 637 if (config_enabled(CONFIG_64BIT) && !config_enabled(CONFIG_MIPS32_O32))
642 return 1; 638 return 1;
643 else if (config_enabled(CONFIG_32BIT) && 639 else if (config_enabled(CONFIG_32BIT) &&
644 !config_enabled(CONFIG_MIPS_O32_FP64_SUPPORT)) 640 !config_enabled(CONFIG_MIPS_O32_FP64_SUPPORT))
645 return 0; 641 return 0;
646 642
647 return !test_thread_flag(TIF_32BIT_FPREGS); 643 return !test_thread_flag(TIF_32BIT_FPREGS);
648 } 644 }
649 645
650 #define SIFROMREG(si, x) \ 646 #define SIFROMREG(si, x) \
651 do { \ 647 do { \
652 if (cop1_64bit(xcp)) \ 648 if (cop1_64bit(xcp)) \
653 (si) = (int)get_fpr32(&ctx->fpr[x], 0); \ 649 (si) = (int)get_fpr32(&ctx->fpr[x], 0); \
654 else \ 650 else \
655 (si) = (int)get_fpr32(&ctx->fpr[(x) & ~1], (x) & 1); \ 651 (si) = (int)get_fpr32(&ctx->fpr[(x) & ~1], (x) & 1); \
656 } while (0) 652 } while (0)
657 653
658 #define SITOREG(si, x) \ 654 #define SITOREG(si, x) \
659 do { \ 655 do { \
660 if (cop1_64bit(xcp)) { \ 656 if (cop1_64bit(xcp)) { \
661 unsigned i; \ 657 unsigned i; \
662 set_fpr32(&ctx->fpr[x], 0, si); \ 658 set_fpr32(&ctx->fpr[x], 0, si); \
663 for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val32); i++) \ 659 for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val32); i++) \
664 set_fpr32(&ctx->fpr[x], i, 0); \ 660 set_fpr32(&ctx->fpr[x], i, 0); \
665 } else { \ 661 } else { \
666 set_fpr32(&ctx->fpr[(x) & ~1], (x) & 1, si); \ 662 set_fpr32(&ctx->fpr[(x) & ~1], (x) & 1, si); \
667 } \ 663 } \
668 } while (0) 664 } while (0)
669 665
670 #define SIFROMHREG(si, x) ((si) = (int)get_fpr32(&ctx->fpr[x], 1)) 666 #define SIFROMHREG(si, x) ((si) = (int)get_fpr32(&ctx->fpr[x], 1))
671 667
672 #define SITOHREG(si, x) \ 668 #define SITOHREG(si, x) \
673 do { \ 669 do { \
674 unsigned i; \ 670 unsigned i; \
675 set_fpr32(&ctx->fpr[x], 1, si); \ 671 set_fpr32(&ctx->fpr[x], 1, si); \
676 for (i = 2; i < ARRAY_SIZE(ctx->fpr[x].val32); i++) \ 672 for (i = 2; i < ARRAY_SIZE(ctx->fpr[x].val32); i++) \
677 set_fpr32(&ctx->fpr[x], i, 0); \ 673 set_fpr32(&ctx->fpr[x], i, 0); \
678 } while (0) 674 } while (0)
679 675
680 #define DIFROMREG(di, x) \ 676 #define DIFROMREG(di, x) \
681 ((di) = get_fpr64(&ctx->fpr[(x) & ~(cop1_64bit(xcp) == 0)], 0)) 677 ((di) = get_fpr64(&ctx->fpr[(x) & ~(cop1_64bit(xcp) == 0)], 0))
682 678
683 #define DITOREG(di, x) \ 679 #define DITOREG(di, x) \
684 do { \ 680 do { \
685 unsigned fpr, i; \ 681 unsigned fpr, i; \
686 fpr = (x) & ~(cop1_64bit(xcp) == 0); \ 682 fpr = (x) & ~(cop1_64bit(xcp) == 0); \
687 set_fpr64(&ctx->fpr[fpr], 0, di); \ 683 set_fpr64(&ctx->fpr[fpr], 0, di); \
688 for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val64); i++) \ 684 for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val64); i++) \
689 set_fpr64(&ctx->fpr[fpr], i, 0); \ 685 set_fpr64(&ctx->fpr[fpr], i, 0); \
690 } while (0) 686 } while (0)
691 687
692 #define SPFROMREG(sp, x) SIFROMREG((sp).bits, x) 688 #define SPFROMREG(sp, x) SIFROMREG((sp).bits, x)
693 #define SPTOREG(sp, x) SITOREG((sp).bits, x) 689 #define SPTOREG(sp, x) SITOREG((sp).bits, x)
694 #define DPFROMREG(dp, x) DIFROMREG((dp).bits, x) 690 #define DPFROMREG(dp, x) DIFROMREG((dp).bits, x)
695 #define DPTOREG(dp, x) DITOREG((dp).bits, x) 691 #define DPTOREG(dp, x) DITOREG((dp).bits, x)
696 692
697 /* 693 /*
698 * Emulate the single floating point instruction pointed at by EPC. 694 * Emulate the single floating point instruction pointed at by EPC.
699 * Two instructions if the instruction is in a branch delay slot. 695 * Two instructions if the instruction is in a branch delay slot.
700 */ 696 */
701 697
702 static int cop1Emulate(struct pt_regs *xcp, struct mips_fpu_struct *ctx, 698 static int cop1Emulate(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
703 struct mm_decoded_insn dec_insn, void *__user *fault_addr) 699 struct mm_decoded_insn dec_insn, void *__user *fault_addr)
704 { 700 {
705 unsigned long contpc = xcp->cp0_epc + dec_insn.pc_inc; 701 unsigned long contpc = xcp->cp0_epc + dec_insn.pc_inc;
706 unsigned int cond, cbit; 702 unsigned int cond, cbit;
707 mips_instruction ir; 703 mips_instruction ir;
708 int likely, pc_inc; 704 int likely, pc_inc;
709 u32 __user *wva; 705 u32 __user *wva;
710 u64 __user *dva; 706 u64 __user *dva;
711 u32 value; 707 u32 value;
712 u32 wval; 708 u32 wval;
713 u64 dval; 709 u64 dval;
714 int sig; 710 int sig;
715 711
716 /* 712 /*
717 * These are giving gcc a gentle hint about what to expect in 713 * These are giving gcc a gentle hint about what to expect in
718 * dec_inst in order to do better optimization. 714 * dec_inst in order to do better optimization.
719 */ 715 */
720 if (!cpu_has_mmips && dec_insn.micro_mips_mode) 716 if (!cpu_has_mmips && dec_insn.micro_mips_mode)
721 unreachable(); 717 unreachable();
722 718
723 /* XXX NEC Vr54xx bug workaround */ 719 /* XXX NEC Vr54xx bug workaround */
724 if (delay_slot(xcp)) { 720 if (delay_slot(xcp)) {
725 if (dec_insn.micro_mips_mode) { 721 if (dec_insn.micro_mips_mode) {
726 if (!mm_isBranchInstr(xcp, dec_insn, &contpc)) 722 if (!mm_isBranchInstr(xcp, dec_insn, &contpc))
727 clear_delay_slot(xcp); 723 clear_delay_slot(xcp);
728 } else { 724 } else {
729 if (!isBranchInstr(xcp, dec_insn, &contpc)) 725 if (!isBranchInstr(xcp, dec_insn, &contpc))
730 clear_delay_slot(xcp); 726 clear_delay_slot(xcp);
731 } 727 }
732 } 728 }
733 729
734 if (delay_slot(xcp)) { 730 if (delay_slot(xcp)) {
735 /* 731 /*
736 * The instruction to be emulated is in a branch delay slot 732 * The instruction to be emulated is in a branch delay slot
737 * which means that we have to emulate the branch instruction 733 * which means that we have to emulate the branch instruction
738 * BEFORE we do the cop1 instruction. 734 * BEFORE we do the cop1 instruction.
739 * 735 *
740 * This branch could be a COP1 branch, but in that case we 736 * This branch could be a COP1 branch, but in that case we
741 * would have had a trap for that instruction, and would not 737 * would have had a trap for that instruction, and would not
742 * come through this route. 738 * come through this route.
743 * 739 *
744 * Linux MIPS branch emulator operates on context, updating the 740 * Linux MIPS branch emulator operates on context, updating the
745 * cp0_epc. 741 * cp0_epc.
746 */ 742 */
747 ir = dec_insn.next_insn; /* process delay slot instr */ 743 ir = dec_insn.next_insn; /* process delay slot instr */
748 pc_inc = dec_insn.next_pc_inc; 744 pc_inc = dec_insn.next_pc_inc;
749 } else { 745 } else {
750 ir = dec_insn.insn; /* process current instr */ 746 ir = dec_insn.insn; /* process current instr */
751 pc_inc = dec_insn.pc_inc; 747 pc_inc = dec_insn.pc_inc;
752 } 748 }
753 749
754 /* 750 /*
755 * Since microMIPS FPU instructios are a subset of MIPS32 FPU 751 * Since microMIPS FPU instructios are a subset of MIPS32 FPU
756 * instructions, we want to convert microMIPS FPU instructions 752 * instructions, we want to convert microMIPS FPU instructions
757 * into MIPS32 instructions so that we could reuse all of the 753 * into MIPS32 instructions so that we could reuse all of the
758 * FPU emulation code. 754 * FPU emulation code.
759 * 755 *
760 * NOTE: We cannot do this for branch instructions since they 756 * NOTE: We cannot do this for branch instructions since they
761 * are not a subset. Example: Cannot emulate a 16-bit 757 * are not a subset. Example: Cannot emulate a 16-bit
762 * aligned target address with a MIPS32 instruction. 758 * aligned target address with a MIPS32 instruction.
763 */ 759 */
764 if (dec_insn.micro_mips_mode) { 760 if (dec_insn.micro_mips_mode) {
765 /* 761 /*
766 * If next instruction is a 16-bit instruction, then it 762 * If next instruction is a 16-bit instruction, then it
767 * it cannot be a FPU instruction. This could happen 763 * it cannot be a FPU instruction. This could happen
768 * since we can be called for non-FPU instructions. 764 * since we can be called for non-FPU instructions.
769 */ 765 */
770 if ((pc_inc == 2) || 766 if ((pc_inc == 2) ||
771 (microMIPS32_to_MIPS32((union mips_instruction *)&ir) 767 (microMIPS32_to_MIPS32((union mips_instruction *)&ir)
772 == SIGILL)) 768 == SIGILL))
773 return SIGILL; 769 return SIGILL;
774 } 770 }
775 771
776 emul: 772 emul:
777 perf_sw_event(PERF_COUNT_SW_EMULATION_FAULTS, 1, xcp, 0); 773 perf_sw_event(PERF_COUNT_SW_EMULATION_FAULTS, 1, xcp, 0);
778 MIPS_FPU_EMU_INC_STATS(emulated); 774 MIPS_FPU_EMU_INC_STATS(emulated);
779 switch (MIPSInst_OPCODE(ir)) { 775 switch (MIPSInst_OPCODE(ir)) {
780 case ldc1_op: 776 case ldc1_op:
781 dva = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] + 777 dva = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] +
782 MIPSInst_SIMM(ir)); 778 MIPSInst_SIMM(ir));
783 MIPS_FPU_EMU_INC_STATS(loads); 779 MIPS_FPU_EMU_INC_STATS(loads);
784 780
785 if (!access_ok(VERIFY_READ, dva, sizeof(u64))) { 781 if (!access_ok(VERIFY_READ, dva, sizeof(u64))) {
786 MIPS_FPU_EMU_INC_STATS(errors); 782 MIPS_FPU_EMU_INC_STATS(errors);
787 *fault_addr = dva; 783 *fault_addr = dva;
788 return SIGBUS; 784 return SIGBUS;
789 } 785 }
790 if (__get_user(dval, dva)) { 786 if (__get_user(dval, dva)) {
791 MIPS_FPU_EMU_INC_STATS(errors); 787 MIPS_FPU_EMU_INC_STATS(errors);
792 *fault_addr = dva; 788 *fault_addr = dva;
793 return SIGSEGV; 789 return SIGSEGV;
794 } 790 }
795 DITOREG(dval, MIPSInst_RT(ir)); 791 DITOREG(dval, MIPSInst_RT(ir));
796 break; 792 break;
797 793
798 case sdc1_op: 794 case sdc1_op:
799 dva = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] + 795 dva = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] +
800 MIPSInst_SIMM(ir)); 796 MIPSInst_SIMM(ir));
801 MIPS_FPU_EMU_INC_STATS(stores); 797 MIPS_FPU_EMU_INC_STATS(stores);
802 DIFROMREG(dval, MIPSInst_RT(ir)); 798 DIFROMREG(dval, MIPSInst_RT(ir));
803 if (!access_ok(VERIFY_WRITE, dva, sizeof(u64))) { 799 if (!access_ok(VERIFY_WRITE, dva, sizeof(u64))) {
804 MIPS_FPU_EMU_INC_STATS(errors); 800 MIPS_FPU_EMU_INC_STATS(errors);
805 *fault_addr = dva; 801 *fault_addr = dva;
806 return SIGBUS; 802 return SIGBUS;
807 } 803 }
808 if (__put_user(dval, dva)) { 804 if (__put_user(dval, dva)) {
809 MIPS_FPU_EMU_INC_STATS(errors); 805 MIPS_FPU_EMU_INC_STATS(errors);
810 *fault_addr = dva; 806 *fault_addr = dva;
811 return SIGSEGV; 807 return SIGSEGV;
812 } 808 }
813 break; 809 break;
814 810
815 case lwc1_op: 811 case lwc1_op:
816 wva = (u32 __user *) (xcp->regs[MIPSInst_RS(ir)] + 812 wva = (u32 __user *) (xcp->regs[MIPSInst_RS(ir)] +
817 MIPSInst_SIMM(ir)); 813 MIPSInst_SIMM(ir));
818 MIPS_FPU_EMU_INC_STATS(loads); 814 MIPS_FPU_EMU_INC_STATS(loads);
819 if (!access_ok(VERIFY_READ, wva, sizeof(u32))) { 815 if (!access_ok(VERIFY_READ, wva, sizeof(u32))) {
820 MIPS_FPU_EMU_INC_STATS(errors); 816 MIPS_FPU_EMU_INC_STATS(errors);
821 *fault_addr = wva; 817 *fault_addr = wva;
822 return SIGBUS; 818 return SIGBUS;
823 } 819 }
824 if (__get_user(wval, wva)) { 820 if (__get_user(wval, wva)) {
825 MIPS_FPU_EMU_INC_STATS(errors); 821 MIPS_FPU_EMU_INC_STATS(errors);
826 *fault_addr = wva; 822 *fault_addr = wva;
827 return SIGSEGV; 823 return SIGSEGV;
828 } 824 }
829 SITOREG(wval, MIPSInst_RT(ir)); 825 SITOREG(wval, MIPSInst_RT(ir));
830 break; 826 break;
831 827
832 case swc1_op: 828 case swc1_op:
833 wva = (u32 __user *) (xcp->regs[MIPSInst_RS(ir)] + 829 wva = (u32 __user *) (xcp->regs[MIPSInst_RS(ir)] +
834 MIPSInst_SIMM(ir)); 830 MIPSInst_SIMM(ir));
835 MIPS_FPU_EMU_INC_STATS(stores); 831 MIPS_FPU_EMU_INC_STATS(stores);
836 SIFROMREG(wval, MIPSInst_RT(ir)); 832 SIFROMREG(wval, MIPSInst_RT(ir));
837 if (!access_ok(VERIFY_WRITE, wva, sizeof(u32))) { 833 if (!access_ok(VERIFY_WRITE, wva, sizeof(u32))) {
838 MIPS_FPU_EMU_INC_STATS(errors); 834 MIPS_FPU_EMU_INC_STATS(errors);
839 *fault_addr = wva; 835 *fault_addr = wva;
840 return SIGBUS; 836 return SIGBUS;
841 } 837 }
842 if (__put_user(wval, wva)) { 838 if (__put_user(wval, wva)) {
843 MIPS_FPU_EMU_INC_STATS(errors); 839 MIPS_FPU_EMU_INC_STATS(errors);
844 *fault_addr = wva; 840 *fault_addr = wva;
845 return SIGSEGV; 841 return SIGSEGV;
846 } 842 }
847 break; 843 break;
848 844
849 case cop1_op: 845 case cop1_op:
850 switch (MIPSInst_RS(ir)) { 846 switch (MIPSInst_RS(ir)) {
851 case dmfc_op: 847 case dmfc_op:
852 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 848 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
853 return SIGILL; 849 return SIGILL;
854 850
855 /* copregister fs -> gpr[rt] */ 851 /* copregister fs -> gpr[rt] */
856 if (MIPSInst_RT(ir) != 0) { 852 if (MIPSInst_RT(ir) != 0) {
857 DIFROMREG(xcp->regs[MIPSInst_RT(ir)], 853 DIFROMREG(xcp->regs[MIPSInst_RT(ir)],
858 MIPSInst_RD(ir)); 854 MIPSInst_RD(ir));
859 } 855 }
860 break; 856 break;
861 857
862 case dmtc_op: 858 case dmtc_op:
863 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 859 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
864 return SIGILL; 860 return SIGILL;
865 861
866 /* copregister fs <- rt */ 862 /* copregister fs <- rt */
867 DITOREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir)); 863 DITOREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir));
868 break; 864 break;
869 865
870 case mfhc_op: 866 case mfhc_op:
871 if (!cpu_has_mips_r2) 867 if (!cpu_has_mips_r2)
872 goto sigill; 868 goto sigill;
873 869
874 /* copregister rd -> gpr[rt] */ 870 /* copregister rd -> gpr[rt] */
875 if (MIPSInst_RT(ir) != 0) { 871 if (MIPSInst_RT(ir) != 0) {
876 SIFROMHREG(xcp->regs[MIPSInst_RT(ir)], 872 SIFROMHREG(xcp->regs[MIPSInst_RT(ir)],
877 MIPSInst_RD(ir)); 873 MIPSInst_RD(ir));
878 } 874 }
879 break; 875 break;
880 876
881 case mthc_op: 877 case mthc_op:
882 if (!cpu_has_mips_r2) 878 if (!cpu_has_mips_r2)
883 goto sigill; 879 goto sigill;
884 880
885 /* copregister rd <- gpr[rt] */ 881 /* copregister rd <- gpr[rt] */
886 SITOHREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir)); 882 SITOHREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir));
887 break; 883 break;
888 884
889 case mfc_op: 885 case mfc_op:
890 /* copregister rd -> gpr[rt] */ 886 /* copregister rd -> gpr[rt] */
891 if (MIPSInst_RT(ir) != 0) { 887 if (MIPSInst_RT(ir) != 0) {
892 SIFROMREG(xcp->regs[MIPSInst_RT(ir)], 888 SIFROMREG(xcp->regs[MIPSInst_RT(ir)],
893 MIPSInst_RD(ir)); 889 MIPSInst_RD(ir));
894 } 890 }
895 break; 891 break;
896 892
897 case mtc_op: 893 case mtc_op:
898 /* copregister rd <- rt */ 894 /* copregister rd <- rt */
899 SITOREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir)); 895 SITOREG(xcp->regs[MIPSInst_RT(ir)], MIPSInst_RD(ir));
900 break; 896 break;
901 897
902 case cfc_op: 898 case cfc_op:
903 /* cop control register rd -> gpr[rt] */ 899 /* cop control register rd -> gpr[rt] */
904 if (MIPSInst_RD(ir) == FPCREG_CSR) { 900 if (MIPSInst_RD(ir) == FPCREG_CSR) {
905 value = ctx->fcr31; 901 value = ctx->fcr31;
906 value = (value & ~FPU_CSR_RM) | modeindex(value); 902 value = (value & ~FPU_CSR_RM) | modeindex(value);
907 pr_debug("%p gpr[%d]<-csr=%08x\n", 903 pr_debug("%p gpr[%d]<-csr=%08x\n",
908 (void *) (xcp->cp0_epc), 904 (void *) (xcp->cp0_epc),
909 MIPSInst_RT(ir), value); 905 MIPSInst_RT(ir), value);
910 } 906 }
911 else if (MIPSInst_RD(ir) == FPCREG_RID) 907 else if (MIPSInst_RD(ir) == FPCREG_RID)
912 value = 0; 908 value = 0;
913 else 909 else
914 value = 0; 910 value = 0;
915 if (MIPSInst_RT(ir)) 911 if (MIPSInst_RT(ir))
916 xcp->regs[MIPSInst_RT(ir)] = value; 912 xcp->regs[MIPSInst_RT(ir)] = value;
917 break; 913 break;
918 914
919 case ctc_op: 915 case ctc_op:
920 /* copregister rd <- rt */ 916 /* copregister rd <- rt */
921 if (MIPSInst_RT(ir) == 0) 917 if (MIPSInst_RT(ir) == 0)
922 value = 0; 918 value = 0;
923 else 919 else
924 value = xcp->regs[MIPSInst_RT(ir)]; 920 value = xcp->regs[MIPSInst_RT(ir)];
925 921
926 /* we only have one writable control reg 922 /* we only have one writable control reg
927 */ 923 */
928 if (MIPSInst_RD(ir) == FPCREG_CSR) { 924 if (MIPSInst_RD(ir) == FPCREG_CSR) {
929 pr_debug("%p gpr[%d]->csr=%08x\n", 925 pr_debug("%p gpr[%d]->csr=%08x\n",
930 (void *) (xcp->cp0_epc), 926 (void *) (xcp->cp0_epc),
931 MIPSInst_RT(ir), value); 927 MIPSInst_RT(ir), value);
932 928
933 /* 929 /*
934 * Don't write reserved bits, 930 * Don't write reserved bits,
935 * and convert to ieee library modes 931 * and convert to ieee library modes
936 */ 932 */
937 ctx->fcr31 = (value & ~(FPU_CSR_RSVD | FPU_CSR_RM)) | 933 ctx->fcr31 = (value & ~(FPU_CSR_RSVD | FPU_CSR_RM)) |
938 modeindex(value); 934 modeindex(value);
939 } 935 }
940 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) { 936 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
941 return SIGFPE; 937 return SIGFPE;
942 } 938 }
943 break; 939 break;
944 940
945 case bc_op: 941 case bc_op:
946 if (delay_slot(xcp)) 942 if (delay_slot(xcp))
947 return SIGILL; 943 return SIGILL;
948 944
949 if (cpu_has_mips_4_5_r) 945 if (cpu_has_mips_4_5_r)
950 cbit = fpucondbit[MIPSInst_RT(ir) >> 2]; 946 cbit = fpucondbit[MIPSInst_RT(ir) >> 2];
951 else 947 else
952 cbit = FPU_CSR_COND; 948 cbit = FPU_CSR_COND;
953 cond = ctx->fcr31 & cbit; 949 cond = ctx->fcr31 & cbit;
954 950
955 likely = 0; 951 likely = 0;
956 switch (MIPSInst_RT(ir) & 3) { 952 switch (MIPSInst_RT(ir) & 3) {
957 case bcfl_op: 953 case bcfl_op:
958 likely = 1; 954 likely = 1;
959 case bcf_op: 955 case bcf_op:
960 cond = !cond; 956 cond = !cond;
961 break; 957 break;
962 case bctl_op: 958 case bctl_op:
963 likely = 1; 959 likely = 1;
964 case bct_op: 960 case bct_op:
965 break; 961 break;
966 default: 962 default:
967 /* thats an illegal instruction */ 963 /* thats an illegal instruction */
968 return SIGILL; 964 return SIGILL;
969 } 965 }
970 966
971 set_delay_slot(xcp); 967 set_delay_slot(xcp);
972 if (cond) { 968 if (cond) {
973 /* 969 /*
974 * Branch taken: emulate dslot instruction 970 * Branch taken: emulate dslot instruction
975 */ 971 */
976 xcp->cp0_epc += dec_insn.pc_inc; 972 xcp->cp0_epc += dec_insn.pc_inc;
977 973
978 contpc = MIPSInst_SIMM(ir); 974 contpc = MIPSInst_SIMM(ir);
979 ir = dec_insn.next_insn; 975 ir = dec_insn.next_insn;
980 if (dec_insn.micro_mips_mode) { 976 if (dec_insn.micro_mips_mode) {
981 contpc = (xcp->cp0_epc + (contpc << 1)); 977 contpc = (xcp->cp0_epc + (contpc << 1));
982 978
983 /* If 16-bit instruction, not FPU. */ 979 /* If 16-bit instruction, not FPU. */
984 if ((dec_insn.next_pc_inc == 2) || 980 if ((dec_insn.next_pc_inc == 2) ||
985 (microMIPS32_to_MIPS32((union mips_instruction *)&ir) == SIGILL)) { 981 (microMIPS32_to_MIPS32((union mips_instruction *)&ir) == SIGILL)) {
986 982
987 /* 983 /*
988 * Since this instruction will 984 * Since this instruction will
989 * be put on the stack with 985 * be put on the stack with
990 * 32-bit words, get around 986 * 32-bit words, get around
991 * this problem by putting a 987 * this problem by putting a
992 * NOP16 as the second one. 988 * NOP16 as the second one.
993 */ 989 */
994 if (dec_insn.next_pc_inc == 2) 990 if (dec_insn.next_pc_inc == 2)
995 ir = (ir & (~0xffff)) | MM_NOP16; 991 ir = (ir & (~0xffff)) | MM_NOP16;
996 992
997 /* 993 /*
998 * Single step the non-CP1 994 * Single step the non-CP1
999 * instruction in the dslot. 995 * instruction in the dslot.
1000 */ 996 */
1001 return mips_dsemul(xcp, ir, contpc); 997 return mips_dsemul(xcp, ir, contpc);
1002 } 998 }
1003 } else 999 } else
1004 contpc = (xcp->cp0_epc + (contpc << 2)); 1000 contpc = (xcp->cp0_epc + (contpc << 2));
1005 1001
1006 switch (MIPSInst_OPCODE(ir)) { 1002 switch (MIPSInst_OPCODE(ir)) {
1007 case lwc1_op: 1003 case lwc1_op:
1008 goto emul; 1004 goto emul;
1009 1005
1010 case swc1_op: 1006 case swc1_op:
1011 goto emul; 1007 goto emul;
1012 1008
1013 case ldc1_op: 1009 case ldc1_op:
1014 case sdc1_op: 1010 case sdc1_op:
1015 if (cpu_has_mips_2_3_4_5 || 1011 if (cpu_has_mips_2_3_4_5 ||
1016 cpu_has_mips64) 1012 cpu_has_mips64)
1017 goto emul; 1013 goto emul;
1018 1014
1019 return SIGILL; 1015 return SIGILL;
1020 goto emul; 1016 goto emul;
1021 1017
1022 case cop1_op: 1018 case cop1_op:
1023 goto emul; 1019 goto emul;
1024 1020
1025 case cop1x_op: 1021 case cop1x_op:
1026 if (cpu_has_mips_4_5 || cpu_has_mips64 || cpu_has_mips32r2) 1022 if (cpu_has_mips_4_5 || cpu_has_mips64 || cpu_has_mips32r2)
1027 /* its one of ours */ 1023 /* its one of ours */
1028 goto emul; 1024 goto emul;
1029 1025
1030 return SIGILL; 1026 return SIGILL;
1031 1027
1032 case spec_op: 1028 case spec_op:
1033 if (!cpu_has_mips_4_5_r) 1029 if (!cpu_has_mips_4_5_r)
1034 return SIGILL; 1030 return SIGILL;
1035 1031
1036 if (MIPSInst_FUNC(ir) == movc_op) 1032 if (MIPSInst_FUNC(ir) == movc_op)
1037 goto emul; 1033 goto emul;
1038 break; 1034 break;
1039 } 1035 }
1040 1036
1041 /* 1037 /*
1042 * Single step the non-cp1 1038 * Single step the non-cp1
1043 * instruction in the dslot 1039 * instruction in the dslot
1044 */ 1040 */
1045 return mips_dsemul(xcp, ir, contpc); 1041 return mips_dsemul(xcp, ir, contpc);
1046 } else if (likely) { /* branch not taken */ 1042 } else if (likely) { /* branch not taken */
1047 /* 1043 /*
1048 * branch likely nullifies 1044 * branch likely nullifies
1049 * dslot if not taken 1045 * dslot if not taken
1050 */ 1046 */
1051 xcp->cp0_epc += dec_insn.pc_inc; 1047 xcp->cp0_epc += dec_insn.pc_inc;
1052 contpc += dec_insn.pc_inc; 1048 contpc += dec_insn.pc_inc;
1053 /* 1049 /*
1054 * else continue & execute 1050 * else continue & execute
1055 * dslot as normal insn 1051 * dslot as normal insn
1056 */ 1052 */
1057 } 1053 }
1058 break; 1054 break;
1059 1055
1060 default: 1056 default:
1061 if (!(MIPSInst_RS(ir) & 0x10)) 1057 if (!(MIPSInst_RS(ir) & 0x10))
1062 return SIGILL; 1058 return SIGILL;
1063 1059
1064 /* a real fpu computation instruction */ 1060 /* a real fpu computation instruction */
1065 if ((sig = fpu_emu(xcp, ctx, ir))) 1061 if ((sig = fpu_emu(xcp, ctx, ir)))
1066 return sig; 1062 return sig;
1067 } 1063 }
1068 break; 1064 break;
1069 1065
1070 case cop1x_op: 1066 case cop1x_op:
1071 if (!cpu_has_mips_4_5 && !cpu_has_mips64 && !cpu_has_mips32r2) 1067 if (!cpu_has_mips_4_5 && !cpu_has_mips64 && !cpu_has_mips32r2)
1072 return SIGILL; 1068 return SIGILL;
1073 1069
1074 sig = fpux_emu(xcp, ctx, ir, fault_addr); 1070 sig = fpux_emu(xcp, ctx, ir, fault_addr);
1075 if (sig) 1071 if (sig)
1076 return sig; 1072 return sig;
1077 break; 1073 break;
1078 1074
1079 case spec_op: 1075 case spec_op:
1080 if (!cpu_has_mips_4_5_r) 1076 if (!cpu_has_mips_4_5_r)
1081 return SIGILL; 1077 return SIGILL;
1082 1078
1083 if (MIPSInst_FUNC(ir) != movc_op) 1079 if (MIPSInst_FUNC(ir) != movc_op)
1084 return SIGILL; 1080 return SIGILL;
1085 cond = fpucondbit[MIPSInst_RT(ir) >> 2]; 1081 cond = fpucondbit[MIPSInst_RT(ir) >> 2];
1086 if (((ctx->fcr31 & cond) != 0) == ((MIPSInst_RT(ir) & 1) != 0)) 1082 if (((ctx->fcr31 & cond) != 0) == ((MIPSInst_RT(ir) & 1) != 0))
1087 xcp->regs[MIPSInst_RD(ir)] = 1083 xcp->regs[MIPSInst_RD(ir)] =
1088 xcp->regs[MIPSInst_RS(ir)]; 1084 xcp->regs[MIPSInst_RS(ir)];
1089 break; 1085 break;
1090 default: 1086 default:
1091 sigill: 1087 sigill:
1092 return SIGILL; 1088 return SIGILL;
1093 } 1089 }
1094 1090
1095 /* we did it !! */ 1091 /* we did it !! */
1096 xcp->cp0_epc = contpc; 1092 xcp->cp0_epc = contpc;
1097 clear_delay_slot(xcp); 1093 clear_delay_slot(xcp);
1098 1094
1099 return 0; 1095 return 0;
1100 } 1096 }
1101 1097
1102 /* 1098 /*
1103 * Conversion table from MIPS compare ops 48-63 1099 * Conversion table from MIPS compare ops 48-63
1104 * cond = ieee754dp_cmp(x,y,IEEE754_UN,sig); 1100 * cond = ieee754dp_cmp(x,y,IEEE754_UN,sig);
1105 */ 1101 */
1106 static const unsigned char cmptab[8] = { 1102 static const unsigned char cmptab[8] = {
1107 0, /* cmp_0 (sig) cmp_sf */ 1103 0, /* cmp_0 (sig) cmp_sf */
1108 IEEE754_CUN, /* cmp_un (sig) cmp_ngle */ 1104 IEEE754_CUN, /* cmp_un (sig) cmp_ngle */
1109 IEEE754_CEQ, /* cmp_eq (sig) cmp_seq */ 1105 IEEE754_CEQ, /* cmp_eq (sig) cmp_seq */
1110 IEEE754_CEQ | IEEE754_CUN, /* cmp_ueq (sig) cmp_ngl */ 1106 IEEE754_CEQ | IEEE754_CUN, /* cmp_ueq (sig) cmp_ngl */
1111 IEEE754_CLT, /* cmp_olt (sig) cmp_lt */ 1107 IEEE754_CLT, /* cmp_olt (sig) cmp_lt */
1112 IEEE754_CLT | IEEE754_CUN, /* cmp_ult (sig) cmp_nge */ 1108 IEEE754_CLT | IEEE754_CUN, /* cmp_ult (sig) cmp_nge */
1113 IEEE754_CLT | IEEE754_CEQ, /* cmp_ole (sig) cmp_le */ 1109 IEEE754_CLT | IEEE754_CEQ, /* cmp_ole (sig) cmp_le */
1114 IEEE754_CLT | IEEE754_CEQ | IEEE754_CUN, /* cmp_ule (sig) cmp_ngt */ 1110 IEEE754_CLT | IEEE754_CEQ | IEEE754_CUN, /* cmp_ule (sig) cmp_ngt */
1115 }; 1111 };
1116 1112
1117 1113
1118 /* 1114 /*
1119 * Additional MIPS4 instructions 1115 * Additional MIPS4 instructions
1120 */ 1116 */
1121 1117
1122 #define DEF3OP(name, p, f1, f2, f3) \ 1118 #define DEF3OP(name, p, f1, f2, f3) \
1123 static union ieee754##p fpemu_##p##_##name(union ieee754##p r, \ 1119 static union ieee754##p fpemu_##p##_##name(union ieee754##p r, \
1124 union ieee754##p s, union ieee754##p t) \ 1120 union ieee754##p s, union ieee754##p t) \
1125 { \ 1121 { \
1126 struct _ieee754_csr ieee754_csr_save; \ 1122 struct _ieee754_csr ieee754_csr_save; \
1127 s = f1(s, t); \ 1123 s = f1(s, t); \
1128 ieee754_csr_save = ieee754_csr; \ 1124 ieee754_csr_save = ieee754_csr; \
1129 s = f2(s, r); \ 1125 s = f2(s, r); \
1130 ieee754_csr_save.cx |= ieee754_csr.cx; \ 1126 ieee754_csr_save.cx |= ieee754_csr.cx; \
1131 ieee754_csr_save.sx |= ieee754_csr.sx; \ 1127 ieee754_csr_save.sx |= ieee754_csr.sx; \
1132 s = f3(s); \ 1128 s = f3(s); \
1133 ieee754_csr.cx |= ieee754_csr_save.cx; \ 1129 ieee754_csr.cx |= ieee754_csr_save.cx; \
1134 ieee754_csr.sx |= ieee754_csr_save.sx; \ 1130 ieee754_csr.sx |= ieee754_csr_save.sx; \
1135 return s; \ 1131 return s; \
1136 } 1132 }
1137 1133
1138 static union ieee754dp fpemu_dp_recip(union ieee754dp d) 1134 static union ieee754dp fpemu_dp_recip(union ieee754dp d)
1139 { 1135 {
1140 return ieee754dp_div(ieee754dp_one(0), d); 1136 return ieee754dp_div(ieee754dp_one(0), d);
1141 } 1137 }
1142 1138
1143 static union ieee754dp fpemu_dp_rsqrt(union ieee754dp d) 1139 static union ieee754dp fpemu_dp_rsqrt(union ieee754dp d)
1144 { 1140 {
1145 return ieee754dp_div(ieee754dp_one(0), ieee754dp_sqrt(d)); 1141 return ieee754dp_div(ieee754dp_one(0), ieee754dp_sqrt(d));
1146 } 1142 }
1147 1143
1148 static union ieee754sp fpemu_sp_recip(union ieee754sp s) 1144 static union ieee754sp fpemu_sp_recip(union ieee754sp s)
1149 { 1145 {
1150 return ieee754sp_div(ieee754sp_one(0), s); 1146 return ieee754sp_div(ieee754sp_one(0), s);
1151 } 1147 }
1152 1148
1153 static union ieee754sp fpemu_sp_rsqrt(union ieee754sp s) 1149 static union ieee754sp fpemu_sp_rsqrt(union ieee754sp s)
1154 { 1150 {
1155 return ieee754sp_div(ieee754sp_one(0), ieee754sp_sqrt(s)); 1151 return ieee754sp_div(ieee754sp_one(0), ieee754sp_sqrt(s));
1156 } 1152 }
1157 1153
1158 DEF3OP(madd, sp, ieee754sp_mul, ieee754sp_add, ); 1154 DEF3OP(madd, sp, ieee754sp_mul, ieee754sp_add, );
1159 DEF3OP(msub, sp, ieee754sp_mul, ieee754sp_sub, ); 1155 DEF3OP(msub, sp, ieee754sp_mul, ieee754sp_sub, );
1160 DEF3OP(nmadd, sp, ieee754sp_mul, ieee754sp_add, ieee754sp_neg); 1156 DEF3OP(nmadd, sp, ieee754sp_mul, ieee754sp_add, ieee754sp_neg);
1161 DEF3OP(nmsub, sp, ieee754sp_mul, ieee754sp_sub, ieee754sp_neg); 1157 DEF3OP(nmsub, sp, ieee754sp_mul, ieee754sp_sub, ieee754sp_neg);
1162 DEF3OP(madd, dp, ieee754dp_mul, ieee754dp_add, ); 1158 DEF3OP(madd, dp, ieee754dp_mul, ieee754dp_add, );
1163 DEF3OP(msub, dp, ieee754dp_mul, ieee754dp_sub, ); 1159 DEF3OP(msub, dp, ieee754dp_mul, ieee754dp_sub, );
1164 DEF3OP(nmadd, dp, ieee754dp_mul, ieee754dp_add, ieee754dp_neg); 1160 DEF3OP(nmadd, dp, ieee754dp_mul, ieee754dp_add, ieee754dp_neg);
1165 DEF3OP(nmsub, dp, ieee754dp_mul, ieee754dp_sub, ieee754dp_neg); 1161 DEF3OP(nmsub, dp, ieee754dp_mul, ieee754dp_sub, ieee754dp_neg);
1166 1162
1167 static int fpux_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx, 1163 static int fpux_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
1168 mips_instruction ir, void *__user *fault_addr) 1164 mips_instruction ir, void *__user *fault_addr)
1169 { 1165 {
1170 unsigned rcsr = 0; /* resulting csr */ 1166 unsigned rcsr = 0; /* resulting csr */
1171 1167
1172 MIPS_FPU_EMU_INC_STATS(cp1xops); 1168 MIPS_FPU_EMU_INC_STATS(cp1xops);
1173 1169
1174 switch (MIPSInst_FMA_FFMT(ir)) { 1170 switch (MIPSInst_FMA_FFMT(ir)) {
1175 case s_fmt:{ /* 0 */ 1171 case s_fmt:{ /* 0 */
1176 1172
1177 union ieee754sp(*handler) (union ieee754sp, union ieee754sp, union ieee754sp); 1173 union ieee754sp(*handler) (union ieee754sp, union ieee754sp, union ieee754sp);
1178 union ieee754sp fd, fr, fs, ft; 1174 union ieee754sp fd, fr, fs, ft;
1179 u32 __user *va; 1175 u32 __user *va;
1180 u32 val; 1176 u32 val;
1181 1177
1182 switch (MIPSInst_FUNC(ir)) { 1178 switch (MIPSInst_FUNC(ir)) {
1183 case lwxc1_op: 1179 case lwxc1_op:
1184 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + 1180 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] +
1185 xcp->regs[MIPSInst_FT(ir)]); 1181 xcp->regs[MIPSInst_FT(ir)]);
1186 1182
1187 MIPS_FPU_EMU_INC_STATS(loads); 1183 MIPS_FPU_EMU_INC_STATS(loads);
1188 if (!access_ok(VERIFY_READ, va, sizeof(u32))) { 1184 if (!access_ok(VERIFY_READ, va, sizeof(u32))) {
1189 MIPS_FPU_EMU_INC_STATS(errors); 1185 MIPS_FPU_EMU_INC_STATS(errors);
1190 *fault_addr = va; 1186 *fault_addr = va;
1191 return SIGBUS; 1187 return SIGBUS;
1192 } 1188 }
1193 if (__get_user(val, va)) { 1189 if (__get_user(val, va)) {
1194 MIPS_FPU_EMU_INC_STATS(errors); 1190 MIPS_FPU_EMU_INC_STATS(errors);
1195 *fault_addr = va; 1191 *fault_addr = va;
1196 return SIGSEGV; 1192 return SIGSEGV;
1197 } 1193 }
1198 SITOREG(val, MIPSInst_FD(ir)); 1194 SITOREG(val, MIPSInst_FD(ir));
1199 break; 1195 break;
1200 1196
1201 case swxc1_op: 1197 case swxc1_op:
1202 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + 1198 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] +
1203 xcp->regs[MIPSInst_FT(ir)]); 1199 xcp->regs[MIPSInst_FT(ir)]);
1204 1200
1205 MIPS_FPU_EMU_INC_STATS(stores); 1201 MIPS_FPU_EMU_INC_STATS(stores);
1206 1202
1207 SIFROMREG(val, MIPSInst_FS(ir)); 1203 SIFROMREG(val, MIPSInst_FS(ir));
1208 if (!access_ok(VERIFY_WRITE, va, sizeof(u32))) { 1204 if (!access_ok(VERIFY_WRITE, va, sizeof(u32))) {
1209 MIPS_FPU_EMU_INC_STATS(errors); 1205 MIPS_FPU_EMU_INC_STATS(errors);
1210 *fault_addr = va; 1206 *fault_addr = va;
1211 return SIGBUS; 1207 return SIGBUS;
1212 } 1208 }
1213 if (put_user(val, va)) { 1209 if (put_user(val, va)) {
1214 MIPS_FPU_EMU_INC_STATS(errors); 1210 MIPS_FPU_EMU_INC_STATS(errors);
1215 *fault_addr = va; 1211 *fault_addr = va;
1216 return SIGSEGV; 1212 return SIGSEGV;
1217 } 1213 }
1218 break; 1214 break;
1219 1215
1220 case madd_s_op: 1216 case madd_s_op:
1221 handler = fpemu_sp_madd; 1217 handler = fpemu_sp_madd;
1222 goto scoptop; 1218 goto scoptop;
1223 case msub_s_op: 1219 case msub_s_op:
1224 handler = fpemu_sp_msub; 1220 handler = fpemu_sp_msub;
1225 goto scoptop; 1221 goto scoptop;
1226 case nmadd_s_op: 1222 case nmadd_s_op:
1227 handler = fpemu_sp_nmadd; 1223 handler = fpemu_sp_nmadd;
1228 goto scoptop; 1224 goto scoptop;
1229 case nmsub_s_op: 1225 case nmsub_s_op:
1230 handler = fpemu_sp_nmsub; 1226 handler = fpemu_sp_nmsub;
1231 goto scoptop; 1227 goto scoptop;
1232 1228
1233 scoptop: 1229 scoptop:
1234 SPFROMREG(fr, MIPSInst_FR(ir)); 1230 SPFROMREG(fr, MIPSInst_FR(ir));
1235 SPFROMREG(fs, MIPSInst_FS(ir)); 1231 SPFROMREG(fs, MIPSInst_FS(ir));
1236 SPFROMREG(ft, MIPSInst_FT(ir)); 1232 SPFROMREG(ft, MIPSInst_FT(ir));
1237 fd = (*handler) (fr, fs, ft); 1233 fd = (*handler) (fr, fs, ft);
1238 SPTOREG(fd, MIPSInst_FD(ir)); 1234 SPTOREG(fd, MIPSInst_FD(ir));
1239 1235
1240 copcsr: 1236 copcsr:
1241 if (ieee754_cxtest(IEEE754_INEXACT)) { 1237 if (ieee754_cxtest(IEEE754_INEXACT)) {
1242 MIPS_FPU_EMU_INC_STATS(ieee754_inexact); 1238 MIPS_FPU_EMU_INC_STATS(ieee754_inexact);
1243 rcsr |= FPU_CSR_INE_X | FPU_CSR_INE_S; 1239 rcsr |= FPU_CSR_INE_X | FPU_CSR_INE_S;
1244 } 1240 }
1245 if (ieee754_cxtest(IEEE754_UNDERFLOW)) { 1241 if (ieee754_cxtest(IEEE754_UNDERFLOW)) {
1246 MIPS_FPU_EMU_INC_STATS(ieee754_underflow); 1242 MIPS_FPU_EMU_INC_STATS(ieee754_underflow);
1247 rcsr |= FPU_CSR_UDF_X | FPU_CSR_UDF_S; 1243 rcsr |= FPU_CSR_UDF_X | FPU_CSR_UDF_S;
1248 } 1244 }
1249 if (ieee754_cxtest(IEEE754_OVERFLOW)) { 1245 if (ieee754_cxtest(IEEE754_OVERFLOW)) {
1250 MIPS_FPU_EMU_INC_STATS(ieee754_overflow); 1246 MIPS_FPU_EMU_INC_STATS(ieee754_overflow);
1251 rcsr |= FPU_CSR_OVF_X | FPU_CSR_OVF_S; 1247 rcsr |= FPU_CSR_OVF_X | FPU_CSR_OVF_S;
1252 } 1248 }
1253 if (ieee754_cxtest(IEEE754_INVALID_OPERATION)) { 1249 if (ieee754_cxtest(IEEE754_INVALID_OPERATION)) {
1254 MIPS_FPU_EMU_INC_STATS(ieee754_invalidop); 1250 MIPS_FPU_EMU_INC_STATS(ieee754_invalidop);
1255 rcsr |= FPU_CSR_INV_X | FPU_CSR_INV_S; 1251 rcsr |= FPU_CSR_INV_X | FPU_CSR_INV_S;
1256 } 1252 }
1257 1253
1258 ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr; 1254 ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr;
1259 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) { 1255 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
1260 /*printk ("SIGFPE: FPU csr = %08x\n", 1256 /*printk ("SIGFPE: FPU csr = %08x\n",
1261 ctx->fcr31); */ 1257 ctx->fcr31); */
1262 return SIGFPE; 1258 return SIGFPE;
1263 } 1259 }
1264 1260
1265 break; 1261 break;
1266 1262
1267 default: 1263 default:
1268 return SIGILL; 1264 return SIGILL;
1269 } 1265 }
1270 break; 1266 break;
1271 } 1267 }
1272 1268
1273 case d_fmt:{ /* 1 */ 1269 case d_fmt:{ /* 1 */
1274 union ieee754dp(*handler) (union ieee754dp, union ieee754dp, union ieee754dp); 1270 union ieee754dp(*handler) (union ieee754dp, union ieee754dp, union ieee754dp);
1275 union ieee754dp fd, fr, fs, ft; 1271 union ieee754dp fd, fr, fs, ft;
1276 u64 __user *va; 1272 u64 __user *va;
1277 u64 val; 1273 u64 val;
1278 1274
1279 switch (MIPSInst_FUNC(ir)) { 1275 switch (MIPSInst_FUNC(ir)) {
1280 case ldxc1_op: 1276 case ldxc1_op:
1281 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + 1277 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] +
1282 xcp->regs[MIPSInst_FT(ir)]); 1278 xcp->regs[MIPSInst_FT(ir)]);
1283 1279
1284 MIPS_FPU_EMU_INC_STATS(loads); 1280 MIPS_FPU_EMU_INC_STATS(loads);
1285 if (!access_ok(VERIFY_READ, va, sizeof(u64))) { 1281 if (!access_ok(VERIFY_READ, va, sizeof(u64))) {
1286 MIPS_FPU_EMU_INC_STATS(errors); 1282 MIPS_FPU_EMU_INC_STATS(errors);
1287 *fault_addr = va; 1283 *fault_addr = va;
1288 return SIGBUS; 1284 return SIGBUS;
1289 } 1285 }
1290 if (__get_user(val, va)) { 1286 if (__get_user(val, va)) {
1291 MIPS_FPU_EMU_INC_STATS(errors); 1287 MIPS_FPU_EMU_INC_STATS(errors);
1292 *fault_addr = va; 1288 *fault_addr = va;
1293 return SIGSEGV; 1289 return SIGSEGV;
1294 } 1290 }
1295 DITOREG(val, MIPSInst_FD(ir)); 1291 DITOREG(val, MIPSInst_FD(ir));
1296 break; 1292 break;
1297 1293
1298 case sdxc1_op: 1294 case sdxc1_op:
1299 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + 1295 va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] +
1300 xcp->regs[MIPSInst_FT(ir)]); 1296 xcp->regs[MIPSInst_FT(ir)]);
1301 1297
1302 MIPS_FPU_EMU_INC_STATS(stores); 1298 MIPS_FPU_EMU_INC_STATS(stores);
1303 DIFROMREG(val, MIPSInst_FS(ir)); 1299 DIFROMREG(val, MIPSInst_FS(ir));
1304 if (!access_ok(VERIFY_WRITE, va, sizeof(u64))) { 1300 if (!access_ok(VERIFY_WRITE, va, sizeof(u64))) {
1305 MIPS_FPU_EMU_INC_STATS(errors); 1301 MIPS_FPU_EMU_INC_STATS(errors);
1306 *fault_addr = va; 1302 *fault_addr = va;
1307 return SIGBUS; 1303 return SIGBUS;
1308 } 1304 }
1309 if (__put_user(val, va)) { 1305 if (__put_user(val, va)) {
1310 MIPS_FPU_EMU_INC_STATS(errors); 1306 MIPS_FPU_EMU_INC_STATS(errors);
1311 *fault_addr = va; 1307 *fault_addr = va;
1312 return SIGSEGV; 1308 return SIGSEGV;
1313 } 1309 }
1314 break; 1310 break;
1315 1311
1316 case madd_d_op: 1312 case madd_d_op:
1317 handler = fpemu_dp_madd; 1313 handler = fpemu_dp_madd;
1318 goto dcoptop; 1314 goto dcoptop;
1319 case msub_d_op: 1315 case msub_d_op:
1320 handler = fpemu_dp_msub; 1316 handler = fpemu_dp_msub;
1321 goto dcoptop; 1317 goto dcoptop;
1322 case nmadd_d_op: 1318 case nmadd_d_op:
1323 handler = fpemu_dp_nmadd; 1319 handler = fpemu_dp_nmadd;
1324 goto dcoptop; 1320 goto dcoptop;
1325 case nmsub_d_op: 1321 case nmsub_d_op:
1326 handler = fpemu_dp_nmsub; 1322 handler = fpemu_dp_nmsub;
1327 goto dcoptop; 1323 goto dcoptop;
1328 1324
1329 dcoptop: 1325 dcoptop:
1330 DPFROMREG(fr, MIPSInst_FR(ir)); 1326 DPFROMREG(fr, MIPSInst_FR(ir));
1331 DPFROMREG(fs, MIPSInst_FS(ir)); 1327 DPFROMREG(fs, MIPSInst_FS(ir));
1332 DPFROMREG(ft, MIPSInst_FT(ir)); 1328 DPFROMREG(ft, MIPSInst_FT(ir));
1333 fd = (*handler) (fr, fs, ft); 1329 fd = (*handler) (fr, fs, ft);
1334 DPTOREG(fd, MIPSInst_FD(ir)); 1330 DPTOREG(fd, MIPSInst_FD(ir));
1335 goto copcsr; 1331 goto copcsr;
1336 1332
1337 default: 1333 default:
1338 return SIGILL; 1334 return SIGILL;
1339 } 1335 }
1340 break; 1336 break;
1341 } 1337 }
1342 1338
1343 case 0x3: 1339 case 0x3:
1344 if (MIPSInst_FUNC(ir) != pfetch_op) 1340 if (MIPSInst_FUNC(ir) != pfetch_op)
1345 return SIGILL; 1341 return SIGILL;
1346 1342
1347 /* ignore prefx operation */ 1343 /* ignore prefx operation */
1348 break; 1344 break;
1349 1345
1350 default: 1346 default:
1351 return SIGILL; 1347 return SIGILL;
1352 } 1348 }
1353 1349
1354 return 0; 1350 return 0;
1355 } 1351 }
1356 1352
1357 1353
1358 1354
1359 /* 1355 /*
1360 * Emulate a single COP1 arithmetic instruction. 1356 * Emulate a single COP1 arithmetic instruction.
1361 */ 1357 */
1362 static int fpu_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx, 1358 static int fpu_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
1363 mips_instruction ir) 1359 mips_instruction ir)
1364 { 1360 {
1365 int rfmt; /* resulting format */ 1361 int rfmt; /* resulting format */
1366 unsigned rcsr = 0; /* resulting csr */ 1362 unsigned rcsr = 0; /* resulting csr */
1367 unsigned int oldrm; 1363 unsigned int oldrm;
1368 unsigned int cbit; 1364 unsigned int cbit;
1369 unsigned cond; 1365 unsigned cond;
1370 union { 1366 union {
1371 union ieee754dp d; 1367 union ieee754dp d;
1372 union ieee754sp s; 1368 union ieee754sp s;
1373 int w; 1369 int w;
1374 s64 l; 1370 s64 l;
1375 } rv; /* resulting value */ 1371 } rv; /* resulting value */
1376 u64 bits; 1372 u64 bits;
1377 1373
1378 MIPS_FPU_EMU_INC_STATS(cp1ops); 1374 MIPS_FPU_EMU_INC_STATS(cp1ops);
1379 switch (rfmt = (MIPSInst_FFMT(ir) & 0xf)) { 1375 switch (rfmt = (MIPSInst_FFMT(ir) & 0xf)) {
1380 case s_fmt: { /* 0 */ 1376 case s_fmt: { /* 0 */
1381 union { 1377 union {
1382 union ieee754sp(*b) (union ieee754sp, union ieee754sp); 1378 union ieee754sp(*b) (union ieee754sp, union ieee754sp);
1383 union ieee754sp(*u) (union ieee754sp); 1379 union ieee754sp(*u) (union ieee754sp);
1384 } handler; 1380 } handler;
1385 union ieee754sp fs, ft; 1381 union ieee754sp fs, ft;
1386 1382
1387 switch (MIPSInst_FUNC(ir)) { 1383 switch (MIPSInst_FUNC(ir)) {
1388 /* binary ops */ 1384 /* binary ops */
1389 case fadd_op: 1385 case fadd_op:
1390 handler.b = ieee754sp_add; 1386 handler.b = ieee754sp_add;
1391 goto scopbop; 1387 goto scopbop;
1392 case fsub_op: 1388 case fsub_op:
1393 handler.b = ieee754sp_sub; 1389 handler.b = ieee754sp_sub;
1394 goto scopbop; 1390 goto scopbop;
1395 case fmul_op: 1391 case fmul_op:
1396 handler.b = ieee754sp_mul; 1392 handler.b = ieee754sp_mul;
1397 goto scopbop; 1393 goto scopbop;
1398 case fdiv_op: 1394 case fdiv_op:
1399 handler.b = ieee754sp_div; 1395 handler.b = ieee754sp_div;
1400 goto scopbop; 1396 goto scopbop;
1401 1397
1402 /* unary ops */ 1398 /* unary ops */
1403 case fsqrt_op: 1399 case fsqrt_op:
1404 if (!cpu_has_mips_4_5_r) 1400 if (!cpu_has_mips_4_5_r)
1405 return SIGILL; 1401 return SIGILL;
1406 1402
1407 handler.u = ieee754sp_sqrt; 1403 handler.u = ieee754sp_sqrt;
1408 goto scopuop; 1404 goto scopuop;
1409 1405
1410 /* 1406 /*
1411 * Note that on some MIPS IV implementations such as the 1407 * Note that on some MIPS IV implementations such as the
1412 * R5000 and R8000 the FSQRT and FRECIP instructions do not 1408 * R5000 and R8000 the FSQRT and FRECIP instructions do not
1413 * achieve full IEEE-754 accuracy - however this emulator does. 1409 * achieve full IEEE-754 accuracy - however this emulator does.
1414 */ 1410 */
1415 case frsqrt_op: 1411 case frsqrt_op:
1416 if (!cpu_has_mips_4_5_r2) 1412 if (!cpu_has_mips_4_5_r2)
1417 return SIGILL; 1413 return SIGILL;
1418 1414
1419 handler.u = fpemu_sp_rsqrt; 1415 handler.u = fpemu_sp_rsqrt;
1420 goto scopuop; 1416 goto scopuop;
1421 1417
1422 case frecip_op: 1418 case frecip_op:
1423 if (!cpu_has_mips_4_5_r2) 1419 if (!cpu_has_mips_4_5_r2)
1424 return SIGILL; 1420 return SIGILL;
1425 1421
1426 handler.u = fpemu_sp_recip; 1422 handler.u = fpemu_sp_recip;
1427 goto scopuop; 1423 goto scopuop;
1428 1424
1429 case fmovc_op: 1425 case fmovc_op:
1430 if (!cpu_has_mips_4_5_r) 1426 if (!cpu_has_mips_4_5_r)
1431 return SIGILL; 1427 return SIGILL;
1432 1428
1433 cond = fpucondbit[MIPSInst_FT(ir) >> 2]; 1429 cond = fpucondbit[MIPSInst_FT(ir) >> 2];
1434 if (((ctx->fcr31 & cond) != 0) != 1430 if (((ctx->fcr31 & cond) != 0) !=
1435 ((MIPSInst_FT(ir) & 1) != 0)) 1431 ((MIPSInst_FT(ir) & 1) != 0))
1436 return 0; 1432 return 0;
1437 SPFROMREG(rv.s, MIPSInst_FS(ir)); 1433 SPFROMREG(rv.s, MIPSInst_FS(ir));
1438 break; 1434 break;
1439 1435
1440 case fmovz_op: 1436 case fmovz_op:
1441 if (!cpu_has_mips_4_5_r) 1437 if (!cpu_has_mips_4_5_r)
1442 return SIGILL; 1438 return SIGILL;
1443 1439
1444 if (xcp->regs[MIPSInst_FT(ir)] != 0) 1440 if (xcp->regs[MIPSInst_FT(ir)] != 0)
1445 return 0; 1441 return 0;
1446 SPFROMREG(rv.s, MIPSInst_FS(ir)); 1442 SPFROMREG(rv.s, MIPSInst_FS(ir));
1447 break; 1443 break;
1448 1444
1449 case fmovn_op: 1445 case fmovn_op:
1450 if (!cpu_has_mips_4_5_r) 1446 if (!cpu_has_mips_4_5_r)
1451 return SIGILL; 1447 return SIGILL;
1452 1448
1453 if (xcp->regs[MIPSInst_FT(ir)] == 0) 1449 if (xcp->regs[MIPSInst_FT(ir)] == 0)
1454 return 0; 1450 return 0;
1455 SPFROMREG(rv.s, MIPSInst_FS(ir)); 1451 SPFROMREG(rv.s, MIPSInst_FS(ir));
1456 break; 1452 break;
1457 1453
1458 case fabs_op: 1454 case fabs_op:
1459 handler.u = ieee754sp_abs; 1455 handler.u = ieee754sp_abs;
1460 goto scopuop; 1456 goto scopuop;
1461 1457
1462 case fneg_op: 1458 case fneg_op:
1463 handler.u = ieee754sp_neg; 1459 handler.u = ieee754sp_neg;
1464 goto scopuop; 1460 goto scopuop;
1465 1461
1466 case fmov_op: 1462 case fmov_op:
1467 /* an easy one */ 1463 /* an easy one */
1468 SPFROMREG(rv.s, MIPSInst_FS(ir)); 1464 SPFROMREG(rv.s, MIPSInst_FS(ir));
1469 goto copcsr; 1465 goto copcsr;
1470 1466
1471 /* binary op on handler */ 1467 /* binary op on handler */
1472 scopbop: 1468 scopbop:
1473 SPFROMREG(fs, MIPSInst_FS(ir)); 1469 SPFROMREG(fs, MIPSInst_FS(ir));
1474 SPFROMREG(ft, MIPSInst_FT(ir)); 1470 SPFROMREG(ft, MIPSInst_FT(ir));
1475 1471
1476 rv.s = (*handler.b) (fs, ft); 1472 rv.s = (*handler.b) (fs, ft);
1477 goto copcsr; 1473 goto copcsr;
1478 scopuop: 1474 scopuop:
1479 SPFROMREG(fs, MIPSInst_FS(ir)); 1475 SPFROMREG(fs, MIPSInst_FS(ir));
1480 rv.s = (*handler.u) (fs); 1476 rv.s = (*handler.u) (fs);
1481 goto copcsr; 1477 goto copcsr;
1482 copcsr: 1478 copcsr:
1483 if (ieee754_cxtest(IEEE754_INEXACT)) { 1479 if (ieee754_cxtest(IEEE754_INEXACT)) {
1484 MIPS_FPU_EMU_INC_STATS(ieee754_inexact); 1480 MIPS_FPU_EMU_INC_STATS(ieee754_inexact);
1485 rcsr |= FPU_CSR_INE_X | FPU_CSR_INE_S; 1481 rcsr |= FPU_CSR_INE_X | FPU_CSR_INE_S;
1486 } 1482 }
1487 if (ieee754_cxtest(IEEE754_UNDERFLOW)) { 1483 if (ieee754_cxtest(IEEE754_UNDERFLOW)) {
1488 MIPS_FPU_EMU_INC_STATS(ieee754_underflow); 1484 MIPS_FPU_EMU_INC_STATS(ieee754_underflow);
1489 rcsr |= FPU_CSR_UDF_X | FPU_CSR_UDF_S; 1485 rcsr |= FPU_CSR_UDF_X | FPU_CSR_UDF_S;
1490 } 1486 }
1491 if (ieee754_cxtest(IEEE754_OVERFLOW)) { 1487 if (ieee754_cxtest(IEEE754_OVERFLOW)) {
1492 MIPS_FPU_EMU_INC_STATS(ieee754_overflow); 1488 MIPS_FPU_EMU_INC_STATS(ieee754_overflow);
1493 rcsr |= FPU_CSR_OVF_X | FPU_CSR_OVF_S; 1489 rcsr |= FPU_CSR_OVF_X | FPU_CSR_OVF_S;
1494 } 1490 }
1495 if (ieee754_cxtest(IEEE754_ZERO_DIVIDE)) { 1491 if (ieee754_cxtest(IEEE754_ZERO_DIVIDE)) {
1496 MIPS_FPU_EMU_INC_STATS(ieee754_zerodiv); 1492 MIPS_FPU_EMU_INC_STATS(ieee754_zerodiv);
1497 rcsr |= FPU_CSR_DIV_X | FPU_CSR_DIV_S; 1493 rcsr |= FPU_CSR_DIV_X | FPU_CSR_DIV_S;
1498 } 1494 }
1499 if (ieee754_cxtest(IEEE754_INVALID_OPERATION)) { 1495 if (ieee754_cxtest(IEEE754_INVALID_OPERATION)) {
1500 MIPS_FPU_EMU_INC_STATS(ieee754_invalidop); 1496 MIPS_FPU_EMU_INC_STATS(ieee754_invalidop);
1501 rcsr |= FPU_CSR_INV_X | FPU_CSR_INV_S; 1497 rcsr |= FPU_CSR_INV_X | FPU_CSR_INV_S;
1502 } 1498 }
1503 break; 1499 break;
1504 1500
1505 /* unary conv ops */ 1501 /* unary conv ops */
1506 case fcvts_op: 1502 case fcvts_op:
1507 return SIGILL; /* not defined */ 1503 return SIGILL; /* not defined */
1508 1504
1509 case fcvtd_op: 1505 case fcvtd_op:
1510 SPFROMREG(fs, MIPSInst_FS(ir)); 1506 SPFROMREG(fs, MIPSInst_FS(ir));
1511 rv.d = ieee754dp_fsp(fs); 1507 rv.d = ieee754dp_fsp(fs);
1512 rfmt = d_fmt; 1508 rfmt = d_fmt;
1513 goto copcsr; 1509 goto copcsr;
1514 1510
1515 case fcvtw_op: 1511 case fcvtw_op:
1516 SPFROMREG(fs, MIPSInst_FS(ir)); 1512 SPFROMREG(fs, MIPSInst_FS(ir));
1517 rv.w = ieee754sp_tint(fs); 1513 rv.w = ieee754sp_tint(fs);
1518 rfmt = w_fmt; 1514 rfmt = w_fmt;
1519 goto copcsr; 1515 goto copcsr;
1520 1516
1521 case fround_op: 1517 case fround_op:
1522 case ftrunc_op: 1518 case ftrunc_op:
1523 case fceil_op: 1519 case fceil_op:
1524 case ffloor_op: 1520 case ffloor_op:
1525 if (!cpu_has_mips_2_3_4_5 && !cpu_has_mips64) 1521 if (!cpu_has_mips_2_3_4_5 && !cpu_has_mips64)
1526 return SIGILL; 1522 return SIGILL;
1527 1523
1528 oldrm = ieee754_csr.rm; 1524 oldrm = ieee754_csr.rm;
1529 SPFROMREG(fs, MIPSInst_FS(ir)); 1525 SPFROMREG(fs, MIPSInst_FS(ir));
1530 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir)); 1526 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir));
1531 rv.w = ieee754sp_tint(fs); 1527 rv.w = ieee754sp_tint(fs);
1532 ieee754_csr.rm = oldrm; 1528 ieee754_csr.rm = oldrm;
1533 rfmt = w_fmt; 1529 rfmt = w_fmt;
1534 goto copcsr; 1530 goto copcsr;
1535 1531
1536 case fcvtl_op: 1532 case fcvtl_op:
1537 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1533 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1538 return SIGILL; 1534 return SIGILL;
1539 1535
1540 SPFROMREG(fs, MIPSInst_FS(ir)); 1536 SPFROMREG(fs, MIPSInst_FS(ir));
1541 rv.l = ieee754sp_tlong(fs); 1537 rv.l = ieee754sp_tlong(fs);
1542 rfmt = l_fmt; 1538 rfmt = l_fmt;
1543 goto copcsr; 1539 goto copcsr;
1544 1540
1545 case froundl_op: 1541 case froundl_op:
1546 case ftruncl_op: 1542 case ftruncl_op:
1547 case fceill_op: 1543 case fceill_op:
1548 case ffloorl_op: 1544 case ffloorl_op:
1549 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1545 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1550 return SIGILL; 1546 return SIGILL;
1551 1547
1552 oldrm = ieee754_csr.rm; 1548 oldrm = ieee754_csr.rm;
1553 SPFROMREG(fs, MIPSInst_FS(ir)); 1549 SPFROMREG(fs, MIPSInst_FS(ir));
1554 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir)); 1550 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir));
1555 rv.l = ieee754sp_tlong(fs); 1551 rv.l = ieee754sp_tlong(fs);
1556 ieee754_csr.rm = oldrm; 1552 ieee754_csr.rm = oldrm;
1557 rfmt = l_fmt; 1553 rfmt = l_fmt;
1558 goto copcsr; 1554 goto copcsr;
1559 1555
1560 default: 1556 default:
1561 if (MIPSInst_FUNC(ir) >= fcmp_op) { 1557 if (MIPSInst_FUNC(ir) >= fcmp_op) {
1562 unsigned cmpop = MIPSInst_FUNC(ir) - fcmp_op; 1558 unsigned cmpop = MIPSInst_FUNC(ir) - fcmp_op;
1563 union ieee754sp fs, ft; 1559 union ieee754sp fs, ft;
1564 1560
1565 SPFROMREG(fs, MIPSInst_FS(ir)); 1561 SPFROMREG(fs, MIPSInst_FS(ir));
1566 SPFROMREG(ft, MIPSInst_FT(ir)); 1562 SPFROMREG(ft, MIPSInst_FT(ir));
1567 rv.w = ieee754sp_cmp(fs, ft, 1563 rv.w = ieee754sp_cmp(fs, ft,
1568 cmptab[cmpop & 0x7], cmpop & 0x8); 1564 cmptab[cmpop & 0x7], cmpop & 0x8);
1569 rfmt = -1; 1565 rfmt = -1;
1570 if ((cmpop & 0x8) && ieee754_cxtest 1566 if ((cmpop & 0x8) && ieee754_cxtest
1571 (IEEE754_INVALID_OPERATION)) 1567 (IEEE754_INVALID_OPERATION))
1572 rcsr = FPU_CSR_INV_X | FPU_CSR_INV_S; 1568 rcsr = FPU_CSR_INV_X | FPU_CSR_INV_S;
1573 else 1569 else
1574 goto copcsr; 1570 goto copcsr;
1575 1571
1576 } else 1572 } else
1577 return SIGILL; 1573 return SIGILL;
1578 break; 1574 break;
1579 } 1575 }
1580 break; 1576 break;
1581 } 1577 }
1582 1578
1583 case d_fmt: { 1579 case d_fmt: {
1584 union ieee754dp fs, ft; 1580 union ieee754dp fs, ft;
1585 union { 1581 union {
1586 union ieee754dp(*b) (union ieee754dp, union ieee754dp); 1582 union ieee754dp(*b) (union ieee754dp, union ieee754dp);
1587 union ieee754dp(*u) (union ieee754dp); 1583 union ieee754dp(*u) (union ieee754dp);
1588 } handler; 1584 } handler;
1589 1585
1590 switch (MIPSInst_FUNC(ir)) { 1586 switch (MIPSInst_FUNC(ir)) {
1591 /* binary ops */ 1587 /* binary ops */
1592 case fadd_op: 1588 case fadd_op:
1593 handler.b = ieee754dp_add; 1589 handler.b = ieee754dp_add;
1594 goto dcopbop; 1590 goto dcopbop;
1595 case fsub_op: 1591 case fsub_op:
1596 handler.b = ieee754dp_sub; 1592 handler.b = ieee754dp_sub;
1597 goto dcopbop; 1593 goto dcopbop;
1598 case fmul_op: 1594 case fmul_op:
1599 handler.b = ieee754dp_mul; 1595 handler.b = ieee754dp_mul;
1600 goto dcopbop; 1596 goto dcopbop;
1601 case fdiv_op: 1597 case fdiv_op:
1602 handler.b = ieee754dp_div; 1598 handler.b = ieee754dp_div;
1603 goto dcopbop; 1599 goto dcopbop;
1604 1600
1605 /* unary ops */ 1601 /* unary ops */
1606 case fsqrt_op: 1602 case fsqrt_op:
1607 if (!cpu_has_mips_2_3_4_5_r) 1603 if (!cpu_has_mips_2_3_4_5_r)
1608 return SIGILL; 1604 return SIGILL;
1609 1605
1610 handler.u = ieee754dp_sqrt; 1606 handler.u = ieee754dp_sqrt;
1611 goto dcopuop; 1607 goto dcopuop;
1612 /* 1608 /*
1613 * Note that on some MIPS IV implementations such as the 1609 * Note that on some MIPS IV implementations such as the
1614 * R5000 and R8000 the FSQRT and FRECIP instructions do not 1610 * R5000 and R8000 the FSQRT and FRECIP instructions do not
1615 * achieve full IEEE-754 accuracy - however this emulator does. 1611 * achieve full IEEE-754 accuracy - however this emulator does.
1616 */ 1612 */
1617 case frsqrt_op: 1613 case frsqrt_op:
1618 if (!cpu_has_mips_4_5_r2) 1614 if (!cpu_has_mips_4_5_r2)
1619 return SIGILL; 1615 return SIGILL;
1620 1616
1621 handler.u = fpemu_dp_rsqrt; 1617 handler.u = fpemu_dp_rsqrt;
1622 goto dcopuop; 1618 goto dcopuop;
1623 case frecip_op: 1619 case frecip_op:
1624 if (!cpu_has_mips_4_5_r2) 1620 if (!cpu_has_mips_4_5_r2)
1625 return SIGILL; 1621 return SIGILL;
1626 1622
1627 handler.u = fpemu_dp_recip; 1623 handler.u = fpemu_dp_recip;
1628 goto dcopuop; 1624 goto dcopuop;
1629 case fmovc_op: 1625 case fmovc_op:
1630 if (!cpu_has_mips_4_5_r) 1626 if (!cpu_has_mips_4_5_r)
1631 return SIGILL; 1627 return SIGILL;
1632 1628
1633 cond = fpucondbit[MIPSInst_FT(ir) >> 2]; 1629 cond = fpucondbit[MIPSInst_FT(ir) >> 2];
1634 if (((ctx->fcr31 & cond) != 0) != 1630 if (((ctx->fcr31 & cond) != 0) !=
1635 ((MIPSInst_FT(ir) & 1) != 0)) 1631 ((MIPSInst_FT(ir) & 1) != 0))
1636 return 0; 1632 return 0;
1637 DPFROMREG(rv.d, MIPSInst_FS(ir)); 1633 DPFROMREG(rv.d, MIPSInst_FS(ir));
1638 break; 1634 break;
1639 case fmovz_op: 1635 case fmovz_op:
1640 if (!cpu_has_mips_4_5_r) 1636 if (!cpu_has_mips_4_5_r)
1641 return SIGILL; 1637 return SIGILL;
1642 1638
1643 if (xcp->regs[MIPSInst_FT(ir)] != 0) 1639 if (xcp->regs[MIPSInst_FT(ir)] != 0)
1644 return 0; 1640 return 0;
1645 DPFROMREG(rv.d, MIPSInst_FS(ir)); 1641 DPFROMREG(rv.d, MIPSInst_FS(ir));
1646 break; 1642 break;
1647 case fmovn_op: 1643 case fmovn_op:
1648 if (!cpu_has_mips_4_5_r) 1644 if (!cpu_has_mips_4_5_r)
1649 return SIGILL; 1645 return SIGILL;
1650 1646
1651 if (xcp->regs[MIPSInst_FT(ir)] == 0) 1647 if (xcp->regs[MIPSInst_FT(ir)] == 0)
1652 return 0; 1648 return 0;
1653 DPFROMREG(rv.d, MIPSInst_FS(ir)); 1649 DPFROMREG(rv.d, MIPSInst_FS(ir));
1654 break; 1650 break;
1655 case fabs_op: 1651 case fabs_op:
1656 handler.u = ieee754dp_abs; 1652 handler.u = ieee754dp_abs;
1657 goto dcopuop; 1653 goto dcopuop;
1658 1654
1659 case fneg_op: 1655 case fneg_op:
1660 handler.u = ieee754dp_neg; 1656 handler.u = ieee754dp_neg;
1661 goto dcopuop; 1657 goto dcopuop;
1662 1658
1663 case fmov_op: 1659 case fmov_op:
1664 /* an easy one */ 1660 /* an easy one */
1665 DPFROMREG(rv.d, MIPSInst_FS(ir)); 1661 DPFROMREG(rv.d, MIPSInst_FS(ir));
1666 goto copcsr; 1662 goto copcsr;
1667 1663
1668 /* binary op on handler */ 1664 /* binary op on handler */
1669 dcopbop: 1665 dcopbop:
1670 DPFROMREG(fs, MIPSInst_FS(ir)); 1666 DPFROMREG(fs, MIPSInst_FS(ir));
1671 DPFROMREG(ft, MIPSInst_FT(ir)); 1667 DPFROMREG(ft, MIPSInst_FT(ir));
1672 1668
1673 rv.d = (*handler.b) (fs, ft); 1669 rv.d = (*handler.b) (fs, ft);
1674 goto copcsr; 1670 goto copcsr;
1675 dcopuop: 1671 dcopuop:
1676 DPFROMREG(fs, MIPSInst_FS(ir)); 1672 DPFROMREG(fs, MIPSInst_FS(ir));
1677 rv.d = (*handler.u) (fs); 1673 rv.d = (*handler.u) (fs);
1678 goto copcsr; 1674 goto copcsr;
1679 1675
1680 /* 1676 /*
1681 * unary conv ops 1677 * unary conv ops
1682 */ 1678 */
1683 case fcvts_op: 1679 case fcvts_op:
1684 DPFROMREG(fs, MIPSInst_FS(ir)); 1680 DPFROMREG(fs, MIPSInst_FS(ir));
1685 rv.s = ieee754sp_fdp(fs); 1681 rv.s = ieee754sp_fdp(fs);
1686 rfmt = s_fmt; 1682 rfmt = s_fmt;
1687 goto copcsr; 1683 goto copcsr;
1688 1684
1689 case fcvtd_op: 1685 case fcvtd_op:
1690 return SIGILL; /* not defined */ 1686 return SIGILL; /* not defined */
1691 1687
1692 case fcvtw_op: 1688 case fcvtw_op:
1693 DPFROMREG(fs, MIPSInst_FS(ir)); 1689 DPFROMREG(fs, MIPSInst_FS(ir));
1694 rv.w = ieee754dp_tint(fs); /* wrong */ 1690 rv.w = ieee754dp_tint(fs); /* wrong */
1695 rfmt = w_fmt; 1691 rfmt = w_fmt;
1696 goto copcsr; 1692 goto copcsr;
1697 1693
1698 case fround_op: 1694 case fround_op:
1699 case ftrunc_op: 1695 case ftrunc_op:
1700 case fceil_op: 1696 case fceil_op:
1701 case ffloor_op: 1697 case ffloor_op:
1702 if (!cpu_has_mips_2_3_4_5_r) 1698 if (!cpu_has_mips_2_3_4_5_r)
1703 return SIGILL; 1699 return SIGILL;
1704 1700
1705 oldrm = ieee754_csr.rm; 1701 oldrm = ieee754_csr.rm;
1706 DPFROMREG(fs, MIPSInst_FS(ir)); 1702 DPFROMREG(fs, MIPSInst_FS(ir));
1707 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir)); 1703 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir));
1708 rv.w = ieee754dp_tint(fs); 1704 rv.w = ieee754dp_tint(fs);
1709 ieee754_csr.rm = oldrm; 1705 ieee754_csr.rm = oldrm;
1710 rfmt = w_fmt; 1706 rfmt = w_fmt;
1711 goto copcsr; 1707 goto copcsr;
1712 1708
1713 case fcvtl_op: 1709 case fcvtl_op:
1714 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1710 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1715 return SIGILL; 1711 return SIGILL;
1716 1712
1717 DPFROMREG(fs, MIPSInst_FS(ir)); 1713 DPFROMREG(fs, MIPSInst_FS(ir));
1718 rv.l = ieee754dp_tlong(fs); 1714 rv.l = ieee754dp_tlong(fs);
1719 rfmt = l_fmt; 1715 rfmt = l_fmt;
1720 goto copcsr; 1716 goto copcsr;
1721 1717
1722 case froundl_op: 1718 case froundl_op:
1723 case ftruncl_op: 1719 case ftruncl_op:
1724 case fceill_op: 1720 case fceill_op:
1725 case ffloorl_op: 1721 case ffloorl_op:
1726 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1722 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1727 return SIGILL; 1723 return SIGILL;
1728 1724
1729 oldrm = ieee754_csr.rm; 1725 oldrm = ieee754_csr.rm;
1730 DPFROMREG(fs, MIPSInst_FS(ir)); 1726 DPFROMREG(fs, MIPSInst_FS(ir));
1731 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir)); 1727 ieee754_csr.rm = modeindex(MIPSInst_FUNC(ir));
1732 rv.l = ieee754dp_tlong(fs); 1728 rv.l = ieee754dp_tlong(fs);
1733 ieee754_csr.rm = oldrm; 1729 ieee754_csr.rm = oldrm;
1734 rfmt = l_fmt; 1730 rfmt = l_fmt;
1735 goto copcsr; 1731 goto copcsr;
1736 1732
1737 default: 1733 default:
1738 if (MIPSInst_FUNC(ir) >= fcmp_op) { 1734 if (MIPSInst_FUNC(ir) >= fcmp_op) {
1739 unsigned cmpop = MIPSInst_FUNC(ir) - fcmp_op; 1735 unsigned cmpop = MIPSInst_FUNC(ir) - fcmp_op;
1740 union ieee754dp fs, ft; 1736 union ieee754dp fs, ft;
1741 1737
1742 DPFROMREG(fs, MIPSInst_FS(ir)); 1738 DPFROMREG(fs, MIPSInst_FS(ir));
1743 DPFROMREG(ft, MIPSInst_FT(ir)); 1739 DPFROMREG(ft, MIPSInst_FT(ir));
1744 rv.w = ieee754dp_cmp(fs, ft, 1740 rv.w = ieee754dp_cmp(fs, ft,
1745 cmptab[cmpop & 0x7], cmpop & 0x8); 1741 cmptab[cmpop & 0x7], cmpop & 0x8);
1746 rfmt = -1; 1742 rfmt = -1;
1747 if ((cmpop & 0x8) 1743 if ((cmpop & 0x8)
1748 && 1744 &&
1749 ieee754_cxtest 1745 ieee754_cxtest
1750 (IEEE754_INVALID_OPERATION)) 1746 (IEEE754_INVALID_OPERATION))
1751 rcsr = FPU_CSR_INV_X | FPU_CSR_INV_S; 1747 rcsr = FPU_CSR_INV_X | FPU_CSR_INV_S;
1752 else 1748 else
1753 goto copcsr; 1749 goto copcsr;
1754 1750
1755 } 1751 }
1756 else { 1752 else {
1757 return SIGILL; 1753 return SIGILL;
1758 } 1754 }
1759 break; 1755 break;
1760 } 1756 }
1761 break; 1757 break;
1762 1758
1763 case w_fmt: 1759 case w_fmt:
1764 switch (MIPSInst_FUNC(ir)) { 1760 switch (MIPSInst_FUNC(ir)) {
1765 case fcvts_op: 1761 case fcvts_op:
1766 /* convert word to single precision real */ 1762 /* convert word to single precision real */
1767 SPFROMREG(fs, MIPSInst_FS(ir)); 1763 SPFROMREG(fs, MIPSInst_FS(ir));
1768 rv.s = ieee754sp_fint(fs.bits); 1764 rv.s = ieee754sp_fint(fs.bits);
1769 rfmt = s_fmt; 1765 rfmt = s_fmt;
1770 goto copcsr; 1766 goto copcsr;
1771 case fcvtd_op: 1767 case fcvtd_op:
1772 /* convert word to double precision real */ 1768 /* convert word to double precision real */
1773 SPFROMREG(fs, MIPSInst_FS(ir)); 1769 SPFROMREG(fs, MIPSInst_FS(ir));
1774 rv.d = ieee754dp_fint(fs.bits); 1770 rv.d = ieee754dp_fint(fs.bits);
1775 rfmt = d_fmt; 1771 rfmt = d_fmt;
1776 goto copcsr; 1772 goto copcsr;
1777 default: 1773 default:
1778 return SIGILL; 1774 return SIGILL;
1779 } 1775 }
1780 break; 1776 break;
1781 } 1777 }
1782 1778
1783 case l_fmt: 1779 case l_fmt:
1784 1780
1785 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1781 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1786 return SIGILL; 1782 return SIGILL;
1787 1783
1788 DIFROMREG(bits, MIPSInst_FS(ir)); 1784 DIFROMREG(bits, MIPSInst_FS(ir));
1789 1785
1790 switch (MIPSInst_FUNC(ir)) { 1786 switch (MIPSInst_FUNC(ir)) {
1791 case fcvts_op: 1787 case fcvts_op:
1792 /* convert long to single precision real */ 1788 /* convert long to single precision real */
1793 rv.s = ieee754sp_flong(bits); 1789 rv.s = ieee754sp_flong(bits);
1794 rfmt = s_fmt; 1790 rfmt = s_fmt;
1795 goto copcsr; 1791 goto copcsr;
1796 case fcvtd_op: 1792 case fcvtd_op:
1797 /* convert long to double precision real */ 1793 /* convert long to double precision real */
1798 rv.d = ieee754dp_flong(bits); 1794 rv.d = ieee754dp_flong(bits);
1799 rfmt = d_fmt; 1795 rfmt = d_fmt;
1800 goto copcsr; 1796 goto copcsr;
1801 default: 1797 default:
1802 return SIGILL; 1798 return SIGILL;
1803 } 1799 }
1804 break; 1800 break;
1805 1801
1806 default: 1802 default:
1807 return SIGILL; 1803 return SIGILL;
1808 } 1804 }
1809 1805
1810 /* 1806 /*
1811 * Update the fpu CSR register for this operation. 1807 * Update the fpu CSR register for this operation.
1812 * If an exception is required, generate a tidy SIGFPE exception, 1808 * If an exception is required, generate a tidy SIGFPE exception,
1813 * without updating the result register. 1809 * without updating the result register.
1814 * Note: cause exception bits do not accumulate, they are rewritten 1810 * Note: cause exception bits do not accumulate, they are rewritten
1815 * for each op; only the flag/sticky bits accumulate. 1811 * for each op; only the flag/sticky bits accumulate.
1816 */ 1812 */
1817 ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr; 1813 ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr;
1818 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) { 1814 if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
1819 /*printk ("SIGFPE: FPU csr = %08x\n",ctx->fcr31); */ 1815 /*printk ("SIGFPE: FPU csr = %08x\n",ctx->fcr31); */
1820 return SIGFPE; 1816 return SIGFPE;
1821 } 1817 }
1822 1818
1823 /* 1819 /*
1824 * Now we can safely write the result back to the register file. 1820 * Now we can safely write the result back to the register file.
1825 */ 1821 */
1826 switch (rfmt) { 1822 switch (rfmt) {
1827 case -1: 1823 case -1:
1828 1824
1829 if (cpu_has_mips_4_5_r) 1825 if (cpu_has_mips_4_5_r)
1830 cbit = fpucondbit[MIPSInst_FD(ir) >> 2]; 1826 cbit = fpucondbit[MIPSInst_FD(ir) >> 2];
1831 else 1827 else
1832 cbit = FPU_CSR_COND; 1828 cbit = FPU_CSR_COND;
1833 if (rv.w) 1829 if (rv.w)
1834 ctx->fcr31 |= cbit; 1830 ctx->fcr31 |= cbit;
1835 else 1831 else
1836 ctx->fcr31 &= ~cbit; 1832 ctx->fcr31 &= ~cbit;
1837 break; 1833 break;
1838 1834
1839 case d_fmt: 1835 case d_fmt:
1840 DPTOREG(rv.d, MIPSInst_FD(ir)); 1836 DPTOREG(rv.d, MIPSInst_FD(ir));
1841 break; 1837 break;
1842 case s_fmt: 1838 case s_fmt:
1843 SPTOREG(rv.s, MIPSInst_FD(ir)); 1839 SPTOREG(rv.s, MIPSInst_FD(ir));
1844 break; 1840 break;
1845 case w_fmt: 1841 case w_fmt:
1846 SITOREG(rv.w, MIPSInst_FD(ir)); 1842 SITOREG(rv.w, MIPSInst_FD(ir));
1847 break; 1843 break;
1848 case l_fmt: 1844 case l_fmt:
1849 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64) 1845 if (!cpu_has_mips_3_4_5 && !cpu_has_mips64)
1850 return SIGILL; 1846 return SIGILL;
1851 1847
1852 DITOREG(rv.l, MIPSInst_FD(ir)); 1848 DITOREG(rv.l, MIPSInst_FD(ir));
1853 break; 1849 break;
1854 default: 1850 default:
1855 return SIGILL; 1851 return SIGILL;
1856 } 1852 }
1857 1853
1858 return 0; 1854 return 0;
1859 } 1855 }
1860 1856
1861 int fpu_emulator_cop1Handler(struct pt_regs *xcp, struct mips_fpu_struct *ctx, 1857 int fpu_emulator_cop1Handler(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
1862 int has_fpu, void *__user *fault_addr) 1858 int has_fpu, void *__user *fault_addr)
1863 { 1859 {
1864 unsigned long oldepc, prevepc; 1860 unsigned long oldepc, prevepc;
1865 struct mm_decoded_insn dec_insn; 1861 struct mm_decoded_insn dec_insn;
1866 u16 instr[4]; 1862 u16 instr[4];
1867 u16 *instr_ptr; 1863 u16 *instr_ptr;
1868 int sig = 0; 1864 int sig = 0;
1869 1865
1870 oldepc = xcp->cp0_epc; 1866 oldepc = xcp->cp0_epc;
1871 do { 1867 do {
1872 prevepc = xcp->cp0_epc; 1868 prevepc = xcp->cp0_epc;
1873 1869
1874 if (get_isa16_mode(prevepc) && cpu_has_mmips) { 1870 if (get_isa16_mode(prevepc) && cpu_has_mmips) {
1875 /* 1871 /*
1876 * Get next 2 microMIPS instructions and convert them 1872 * Get next 2 microMIPS instructions and convert them
1877 * into 32-bit instructions. 1873 * into 32-bit instructions.
1878 */ 1874 */
1879 if ((get_user(instr[0], (u16 __user *)msk_isa16_mode(xcp->cp0_epc))) || 1875 if ((get_user(instr[0], (u16 __user *)msk_isa16_mode(xcp->cp0_epc))) ||
1880 (get_user(instr[1], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 2))) || 1876 (get_user(instr[1], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 2))) ||
1881 (get_user(instr[2], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 4))) || 1877 (get_user(instr[2], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 4))) ||
1882 (get_user(instr[3], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 6)))) { 1878 (get_user(instr[3], (u16 __user *)msk_isa16_mode(xcp->cp0_epc + 6)))) {
1883 MIPS_FPU_EMU_INC_STATS(errors); 1879 MIPS_FPU_EMU_INC_STATS(errors);
1884 return SIGBUS; 1880 return SIGBUS;
1885 } 1881 }
1886 instr_ptr = instr; 1882 instr_ptr = instr;
1887 1883
1888 /* Get first instruction. */ 1884 /* Get first instruction. */
1889 if (mm_insn_16bit(*instr_ptr)) { 1885 if (mm_insn_16bit(*instr_ptr)) {
1890 /* Duplicate the half-word. */ 1886 /* Duplicate the half-word. */
1891 dec_insn.insn = (*instr_ptr << 16) | 1887 dec_insn.insn = (*instr_ptr << 16) |
1892 (*instr_ptr); 1888 (*instr_ptr);
1893 /* 16-bit instruction. */ 1889 /* 16-bit instruction. */
1894 dec_insn.pc_inc = 2; 1890 dec_insn.pc_inc = 2;
1895 instr_ptr += 1; 1891 instr_ptr += 1;
1896 } else { 1892 } else {
1897 dec_insn.insn = (*instr_ptr << 16) | 1893 dec_insn.insn = (*instr_ptr << 16) |
1898 *(instr_ptr+1); 1894 *(instr_ptr+1);
1899 /* 32-bit instruction. */ 1895 /* 32-bit instruction. */
1900 dec_insn.pc_inc = 4; 1896 dec_insn.pc_inc = 4;
1901 instr_ptr += 2; 1897 instr_ptr += 2;
1902 } 1898 }
1903 /* Get second instruction. */ 1899 /* Get second instruction. */
1904 if (mm_insn_16bit(*instr_ptr)) { 1900 if (mm_insn_16bit(*instr_ptr)) {
1905 /* Duplicate the half-word. */ 1901 /* Duplicate the half-word. */
1906 dec_insn.next_insn = (*instr_ptr << 16) | 1902 dec_insn.next_insn = (*instr_ptr << 16) |
1907 (*instr_ptr); 1903 (*instr_ptr);
1908 /* 16-bit instruction. */ 1904 /* 16-bit instruction. */
1909 dec_insn.next_pc_inc = 2; 1905 dec_insn.next_pc_inc = 2;
1910 } else { 1906 } else {
1911 dec_insn.next_insn = (*instr_ptr << 16) | 1907 dec_insn.next_insn = (*instr_ptr << 16) |
1912 *(instr_ptr+1); 1908 *(instr_ptr+1);
1913 /* 32-bit instruction. */ 1909 /* 32-bit instruction. */
1914 dec_insn.next_pc_inc = 4; 1910 dec_insn.next_pc_inc = 4;
1915 } 1911 }
1916 dec_insn.micro_mips_mode = 1; 1912 dec_insn.micro_mips_mode = 1;
1917 } else { 1913 } else {
1918 if ((get_user(dec_insn.insn, 1914 if ((get_user(dec_insn.insn,
1919 (mips_instruction __user *) xcp->cp0_epc)) || 1915 (mips_instruction __user *) xcp->cp0_epc)) ||
1920 (get_user(dec_insn.next_insn, 1916 (get_user(dec_insn.next_insn,
1921 (mips_instruction __user *)(xcp->cp0_epc+4)))) { 1917 (mips_instruction __user *)(xcp->cp0_epc+4)))) {
1922 MIPS_FPU_EMU_INC_STATS(errors); 1918 MIPS_FPU_EMU_INC_STATS(errors);
1923 return SIGBUS; 1919 return SIGBUS;
1924 } 1920 }
1925 dec_insn.pc_inc = 4; 1921 dec_insn.pc_inc = 4;
1926 dec_insn.next_pc_inc = 4; 1922 dec_insn.next_pc_inc = 4;
1927 dec_insn.micro_mips_mode = 0; 1923 dec_insn.micro_mips_mode = 0;
1928 } 1924 }
1929 1925
1930 if ((dec_insn.insn == 0) || 1926 if ((dec_insn.insn == 0) ||
1931 ((dec_insn.pc_inc == 2) && 1927 ((dec_insn.pc_inc == 2) &&
1932 ((dec_insn.insn & 0xffff) == MM_NOP16))) 1928 ((dec_insn.insn & 0xffff) == MM_NOP16)))
1933 xcp->cp0_epc += dec_insn.pc_inc; /* Skip NOPs */ 1929 xcp->cp0_epc += dec_insn.pc_inc; /* Skip NOPs */
1934 else { 1930 else {
1935 /* 1931 /*
1936 * The 'ieee754_csr' is an alias of 1932 * The 'ieee754_csr' is an alias of
1937 * ctx->fcr31. No need to copy ctx->fcr31 to 1933 * ctx->fcr31. No need to copy ctx->fcr31 to
1938 * ieee754_csr. But ieee754_csr.rm is ieee 1934 * ieee754_csr. But ieee754_csr.rm is ieee
1939 * library modes. (not mips rounding mode) 1935 * library modes. (not mips rounding mode)
1940 */ 1936 */
1941 sig = cop1Emulate(xcp, ctx, dec_insn, fault_addr); 1937 sig = cop1Emulate(xcp, ctx, dec_insn, fault_addr);
1942 } 1938 }
1943 1939
1944 if (has_fpu) 1940 if (has_fpu)
1945 break; 1941 break;
1946 if (sig) 1942 if (sig)
1947 break; 1943 break;
1948 1944
1949 cond_resched(); 1945 cond_resched();
1950 } while (xcp->cp0_epc > prevepc); 1946 } while (xcp->cp0_epc > prevepc);
1951 1947
1952 /* SIGILL indicates a non-fpu instruction */ 1948 /* SIGILL indicates a non-fpu instruction */
1953 if (sig == SIGILL && xcp->cp0_epc != oldepc) 1949 if (sig == SIGILL && xcp->cp0_epc != oldepc)
1954 /* but if EPC has advanced, then ignore it */ 1950 /* but if EPC has advanced, then ignore it */
1955 sig = 0; 1951 sig = 0;
1956 1952
1957 return sig; 1953 return sig;
1958 } 1954 }
1959 1955