Commit 8b592783a2e8b7721a99730bd549aab5208f36af
1 parent
347c8b70b1
Exists in
master
and in
40 other branches
Thumb-2: Implement the unified arch/arm/lib functions
This patch adds the ARM/Thumb-2 unified support for the arch/arm/lib/* files. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Showing 20 changed files with 229 additions and 98 deletions Side-by-side Diff
- arch/arm/include/asm/assembler.h
- arch/arm/include/asm/uaccess.h
- arch/arm/lib/ashldi3.S
- arch/arm/lib/ashrdi3.S
- arch/arm/lib/backtrace.S
- arch/arm/lib/clear_user.S
- arch/arm/lib/copy_from_user.S
- arch/arm/lib/copy_template.S
- arch/arm/lib/copy_to_user.S
- arch/arm/lib/csumpartialcopyuser.S
- arch/arm/lib/div64.S
- arch/arm/lib/findbit.S
- arch/arm/lib/getuser.S
- arch/arm/lib/io-writesw-armv4.S
- arch/arm/lib/lshrdi3.S
- arch/arm/lib/memcpy.S
- arch/arm/lib/memmove.S
- arch/arm/lib/putuser.S
- arch/arm/lib/strncpy_from_user.S
- arch/arm/lib/strnlen_user.S
arch/arm/include/asm/assembler.h
... | ... | @@ -138,4 +138,77 @@ |
138 | 138 | msr cpsr_c, #\mode |
139 | 139 | .endm |
140 | 140 | #endif |
141 | + | |
142 | +/* | |
143 | + * STRT/LDRT access macros with ARM and Thumb-2 variants | |
144 | + */ | |
145 | +#ifdef CONFIG_THUMB2_KERNEL | |
146 | + | |
147 | + .macro usraccoff, instr, reg, ptr, inc, off, cond, abort | |
148 | +9999: | |
149 | + .if \inc == 1 | |
150 | + \instr\cond\()bt \reg, [\ptr, #\off] | |
151 | + .elseif \inc == 4 | |
152 | + \instr\cond\()t \reg, [\ptr, #\off] | |
153 | + .else | |
154 | + .error "Unsupported inc macro argument" | |
155 | + .endif | |
156 | + | |
157 | + .section __ex_table,"a" | |
158 | + .align 3 | |
159 | + .long 9999b, \abort | |
160 | + .previous | |
161 | + .endm | |
162 | + | |
163 | + .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
164 | + @ explicit IT instruction needed because of the label | |
165 | + @ introduced by the USER macro | |
166 | + .ifnc \cond,al | |
167 | + .if \rept == 1 | |
168 | + itt \cond | |
169 | + .elseif \rept == 2 | |
170 | + ittt \cond | |
171 | + .else | |
172 | + .error "Unsupported rept macro argument" | |
173 | + .endif | |
174 | + .endif | |
175 | + | |
176 | + @ Slightly optimised to avoid incrementing the pointer twice | |
177 | + usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
178 | + .if \rept == 2 | |
179 | + usraccoff \instr, \reg, \ptr, \inc, 4, \cond, \abort | |
180 | + .endif | |
181 | + | |
182 | + add\cond \ptr, #\rept * \inc | |
183 | + .endm | |
184 | + | |
185 | +#else /* !CONFIG_THUMB2_KERNEL */ | |
186 | + | |
187 | + .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
188 | + .rept \rept | |
189 | +9999: | |
190 | + .if \inc == 1 | |
191 | + \instr\cond\()bt \reg, [\ptr], #\inc | |
192 | + .elseif \inc == 4 | |
193 | + \instr\cond\()t \reg, [\ptr], #\inc | |
194 | + .else | |
195 | + .error "Unsupported inc macro argument" | |
196 | + .endif | |
197 | + | |
198 | + .section __ex_table,"a" | |
199 | + .align 3 | |
200 | + .long 9999b, \abort | |
201 | + .previous | |
202 | + .endr | |
203 | + .endm | |
204 | + | |
205 | +#endif /* CONFIG_THUMB2_KERNEL */ | |
206 | + | |
207 | + .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
208 | + usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
209 | + .endm | |
210 | + | |
211 | + .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
212 | + usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
213 | + .endm |
arch/arm/include/asm/uaccess.h
... | ... | @@ -17,6 +17,7 @@ |
17 | 17 | #include <asm/memory.h> |
18 | 18 | #include <asm/domain.h> |
19 | 19 | #include <asm/system.h> |
20 | +#include <asm/unified.h> | |
20 | 21 | |
21 | 22 | #define VERIFY_READ 0 |
22 | 23 | #define VERIFY_WRITE 1 |
... | ... | @@ -365,8 +366,10 @@ |
365 | 366 | |
366 | 367 | #define __put_user_asm_dword(x,__pu_addr,err) \ |
367 | 368 | __asm__ __volatile__( \ |
368 | - "1: strt " __reg_oper1 ", [%1], #4\n" \ | |
369 | - "2: strt " __reg_oper0 ", [%1]\n" \ | |
369 | + ARM( "1: strt " __reg_oper1 ", [%1], #4\n" ) \ | |
370 | + ARM( "2: strt " __reg_oper0 ", [%1]\n" ) \ | |
371 | + THUMB( "1: strt " __reg_oper1 ", [%1]\n" ) \ | |
372 | + THUMB( "2: strt " __reg_oper0 ", [%1, #4]\n" ) \ | |
370 | 373 | "3:\n" \ |
371 | 374 | " .section .fixup,\"ax\"\n" \ |
372 | 375 | " .align 2\n" \ |
arch/arm/lib/ashldi3.S
arch/arm/lib/ashrdi3.S
arch/arm/lib/backtrace.S
... | ... | @@ -38,7 +38,9 @@ |
38 | 38 | beq no_frame @ we have no stack frames |
39 | 39 | |
40 | 40 | tst r1, #0x10 @ 26 or 32-bit mode? |
41 | - moveq mask, #0xfc000003 @ mask for 26-bit | |
41 | + ARM( moveq mask, #0xfc000003 ) | |
42 | + THUMB( moveq mask, #0xfc000000 ) | |
43 | + THUMB( orreq mask, #0x03 ) | |
42 | 44 | movne mask, #0 @ mask for 32-bit |
43 | 45 | |
44 | 46 | 1: stmfd sp!, {pc} @ calculate offset of PC stored |
... | ... | @@ -126,7 +128,9 @@ |
126 | 128 | mov reg, #10 |
127 | 129 | mov r7, #0 |
128 | 130 | 1: mov r3, #1 |
129 | - tst instr, r3, lsl reg | |
131 | + ARM( tst instr, r3, lsl reg ) | |
132 | + THUMB( lsl r3, reg ) | |
133 | + THUMB( tst instr, r3 ) | |
130 | 134 | beq 2f |
131 | 135 | add r7, r7, #1 |
132 | 136 | teq r7, #6 |
arch/arm/lib/clear_user.S
... | ... | @@ -27,21 +27,20 @@ |
27 | 27 | ands ip, r0, #3 |
28 | 28 | beq 1f |
29 | 29 | cmp ip, #2 |
30 | -USER( strbt r2, [r0], #1) | |
31 | -USER( strlebt r2, [r0], #1) | |
32 | -USER( strltbt r2, [r0], #1) | |
30 | + strusr r2, r0, 1 | |
31 | + strusr r2, r0, 1, le | |
32 | + strusr r2, r0, 1, lt | |
33 | 33 | rsb ip, ip, #4 |
34 | 34 | sub r1, r1, ip @ 7 6 5 4 3 2 1 |
35 | 35 | 1: subs r1, r1, #8 @ -1 -2 -3 -4 -5 -6 -7 |
36 | -USER( strplt r2, [r0], #4) | |
37 | -USER( strplt r2, [r0], #4) | |
36 | + strusr r2, r0, 4, pl, rept=2 | |
38 | 37 | bpl 1b |
39 | 38 | adds r1, r1, #4 @ 3 2 1 0 -1 -2 -3 |
40 | -USER( strplt r2, [r0], #4) | |
39 | + strusr r2, r0, 4, pl | |
41 | 40 | 2: tst r1, #2 @ 1x 1x 0x 0x 1x 1x 0x |
42 | -USER( strnebt r2, [r0], #1) | |
43 | -USER( strnebt r2, [r0], #1) | |
41 | + strusr r2, r0, 1, ne, rept=2 | |
44 | 42 | tst r1, #1 @ x1 x0 x1 x0 x1 x0 x1 |
43 | + it ne @ explicit IT needed for the label | |
45 | 44 | USER( strnebt r2, [r0]) |
46 | 45 | mov r0, #0 |
47 | 46 | ldmfd sp!, {r1, pc} |
arch/arm/lib/copy_from_user.S
... | ... | @@ -33,11 +33,15 @@ |
33 | 33 | * Number of bytes NOT copied. |
34 | 34 | */ |
35 | 35 | |
36 | +#ifndef CONFIG_THUMB2_KERNEL | |
37 | +#define LDR1W_SHIFT 0 | |
38 | +#else | |
39 | +#define LDR1W_SHIFT 1 | |
40 | +#endif | |
41 | +#define STR1W_SHIFT 0 | |
42 | + | |
36 | 43 | .macro ldr1w ptr reg abort |
37 | -100: ldrt \reg, [\ptr], #4 | |
38 | - .section __ex_table, "a" | |
39 | - .long 100b, \abort | |
40 | - .previous | |
44 | + ldrusr \reg, \ptr, 4, abort=\abort | |
41 | 45 | .endm |
42 | 46 | |
43 | 47 | .macro ldr4w ptr reg1 reg2 reg3 reg4 abort |
44 | 48 | |
... | ... | @@ -53,14 +57,11 @@ |
53 | 57 | .endm |
54 | 58 | |
55 | 59 | .macro ldr1b ptr reg cond=al abort |
56 | -100: ldr\cond\()bt \reg, [\ptr], #1 | |
57 | - .section __ex_table, "a" | |
58 | - .long 100b, \abort | |
59 | - .previous | |
60 | + ldrusr \reg, \ptr, 1, \cond, abort=\abort | |
60 | 61 | .endm |
61 | 62 | |
62 | 63 | .macro str1w ptr reg abort |
63 | - str \reg, [\ptr], #4 | |
64 | + W(str) \reg, [\ptr], #4 | |
64 | 65 | .endm |
65 | 66 | |
66 | 67 | .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort |
arch/arm/lib/copy_template.S
... | ... | @@ -57,6 +57,13 @@ |
57 | 57 | * |
58 | 58 | * Restore registers with the values previously saved with the |
59 | 59 | * 'preserv' macro. Called upon code termination. |
60 | + * | |
61 | + * LDR1W_SHIFT | |
62 | + * STR1W_SHIFT | |
63 | + * | |
64 | + * Correction to be applied to the "ip" register when branching into | |
65 | + * the ldr1w or str1w instructions (some of these macros may expand to | |
66 | + * than one 32bit instruction in Thumb-2) | |
60 | 67 | */ |
61 | 68 | |
62 | 69 | |
63 | 70 | |
... | ... | @@ -99,9 +106,15 @@ |
99 | 106 | |
100 | 107 | 5: ands ip, r2, #28 |
101 | 108 | rsb ip, ip, #32 |
109 | +#if LDR1W_SHIFT > 0 | |
110 | + lsl ip, ip, #LDR1W_SHIFT | |
111 | +#endif | |
102 | 112 | addne pc, pc, ip @ C is always clear here |
103 | 113 | b 7f |
104 | -6: nop | |
114 | +6: | |
115 | + .rept (1 << LDR1W_SHIFT) | |
116 | + W(nop) | |
117 | + .endr | |
105 | 118 | ldr1w r1, r3, abort=20f |
106 | 119 | ldr1w r1, r4, abort=20f |
107 | 120 | ldr1w r1, r5, abort=20f |
108 | 121 | |
... | ... | @@ -110,9 +123,16 @@ |
110 | 123 | ldr1w r1, r8, abort=20f |
111 | 124 | ldr1w r1, lr, abort=20f |
112 | 125 | |
126 | +#if LDR1W_SHIFT < STR1W_SHIFT | |
127 | + lsl ip, ip, #STR1W_SHIFT - LDR1W_SHIFT | |
128 | +#elif LDR1W_SHIFT > STR1W_SHIFT | |
129 | + lsr ip, ip, #LDR1W_SHIFT - STR1W_SHIFT | |
130 | +#endif | |
113 | 131 | add pc, pc, ip |
114 | 132 | nop |
115 | - nop | |
133 | + .rept (1 << STR1W_SHIFT) | |
134 | + W(nop) | |
135 | + .endr | |
116 | 136 | str1w r0, r3, abort=20f |
117 | 137 | str1w r0, r4, abort=20f |
118 | 138 | str1w r0, r5, abort=20f |
arch/arm/lib/copy_to_user.S
... | ... | @@ -33,8 +33,15 @@ |
33 | 33 | * Number of bytes NOT copied. |
34 | 34 | */ |
35 | 35 | |
36 | +#define LDR1W_SHIFT 0 | |
37 | +#ifndef CONFIG_THUMB2_KERNEL | |
38 | +#define STR1W_SHIFT 0 | |
39 | +#else | |
40 | +#define STR1W_SHIFT 1 | |
41 | +#endif | |
42 | + | |
36 | 43 | .macro ldr1w ptr reg abort |
37 | - ldr \reg, [\ptr], #4 | |
44 | + W(ldr) \reg, [\ptr], #4 | |
38 | 45 | .endm |
39 | 46 | |
40 | 47 | .macro ldr4w ptr reg1 reg2 reg3 reg4 abort |
... | ... | @@ -50,10 +57,7 @@ |
50 | 57 | .endm |
51 | 58 | |
52 | 59 | .macro str1w ptr reg abort |
53 | -100: strt \reg, [\ptr], #4 | |
54 | - .section __ex_table, "a" | |
55 | - .long 100b, \abort | |
56 | - .previous | |
60 | + strusr \reg, \ptr, 4, abort=\abort | |
57 | 61 | .endm |
58 | 62 | |
59 | 63 | .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort |
... | ... | @@ -68,10 +72,7 @@ |
68 | 72 | .endm |
69 | 73 | |
70 | 74 | .macro str1b ptr reg cond=al abort |
71 | -100: str\cond\()bt \reg, [\ptr], #1 | |
72 | - .section __ex_table, "a" | |
73 | - .long 100b, \abort | |
74 | - .previous | |
75 | + strusr \reg, \ptr, 1, \cond, abort=\abort | |
75 | 76 | .endm |
76 | 77 | |
77 | 78 | .macro enter reg1 reg2 |
arch/arm/lib/csumpartialcopyuser.S
... | ... | @@ -26,50 +26,28 @@ |
26 | 26 | .endm |
27 | 27 | |
28 | 28 | .macro load1b, reg1 |
29 | -9999: ldrbt \reg1, [r0], $1 | |
30 | - .section __ex_table, "a" | |
31 | - .align 3 | |
32 | - .long 9999b, 6001f | |
33 | - .previous | |
29 | + ldrusr \reg1, r0, 1 | |
34 | 30 | .endm |
35 | 31 | |
36 | 32 | .macro load2b, reg1, reg2 |
37 | -9999: ldrbt \reg1, [r0], $1 | |
38 | -9998: ldrbt \reg2, [r0], $1 | |
39 | - .section __ex_table, "a" | |
40 | - .long 9999b, 6001f | |
41 | - .long 9998b, 6001f | |
42 | - .previous | |
33 | + ldrusr \reg1, r0, 1 | |
34 | + ldrusr \reg2, r0, 1 | |
43 | 35 | .endm |
44 | 36 | |
45 | 37 | .macro load1l, reg1 |
46 | -9999: ldrt \reg1, [r0], $4 | |
47 | - .section __ex_table, "a" | |
48 | - .align 3 | |
49 | - .long 9999b, 6001f | |
50 | - .previous | |
38 | + ldrusr \reg1, r0, 4 | |
51 | 39 | .endm |
52 | 40 | |
53 | 41 | .macro load2l, reg1, reg2 |
54 | -9999: ldrt \reg1, [r0], $4 | |
55 | -9998: ldrt \reg2, [r0], $4 | |
56 | - .section __ex_table, "a" | |
57 | - .long 9999b, 6001f | |
58 | - .long 9998b, 6001f | |
59 | - .previous | |
42 | + ldrusr \reg1, r0, 4 | |
43 | + ldrusr \reg2, r0, 4 | |
60 | 44 | .endm |
61 | 45 | |
62 | 46 | .macro load4l, reg1, reg2, reg3, reg4 |
63 | -9999: ldrt \reg1, [r0], $4 | |
64 | -9998: ldrt \reg2, [r0], $4 | |
65 | -9997: ldrt \reg3, [r0], $4 | |
66 | -9996: ldrt \reg4, [r0], $4 | |
67 | - .section __ex_table, "a" | |
68 | - .long 9999b, 6001f | |
69 | - .long 9998b, 6001f | |
70 | - .long 9997b, 6001f | |
71 | - .long 9996b, 6001f | |
72 | - .previous | |
47 | + ldrusr \reg1, r0, 4 | |
48 | + ldrusr \reg2, r0, 4 | |
49 | + ldrusr \reg3, r0, 4 | |
50 | + ldrusr \reg4, r0, 4 | |
73 | 51 | .endm |
74 | 52 | |
75 | 53 | /* |
76 | 54 | |
77 | 55 | |
... | ... | @@ -92,15 +70,15 @@ |
92 | 70 | */ |
93 | 71 | .section .fixup,"ax" |
94 | 72 | .align 4 |
95 | -6001: mov r4, #-EFAULT | |
73 | +9001: mov r4, #-EFAULT | |
96 | 74 | ldr r5, [fp, #4] @ *err_ptr |
97 | 75 | str r4, [r5] |
98 | 76 | ldmia sp, {r1, r2} @ retrieve dst, len |
99 | 77 | add r2, r2, r1 |
100 | 78 | mov r0, #0 @ zero the buffer |
101 | -6002: teq r2, r1 | |
79 | +9002: teq r2, r1 | |
102 | 80 | strneb r0, [r1], #1 |
103 | - bne 6002b | |
81 | + bne 9002b | |
104 | 82 | load_regs |
105 | 83 | .previous |
arch/arm/lib/div64.S
... | ... | @@ -177,7 +177,9 @@ |
177 | 177 | mov yh, xh, lsr ip |
178 | 178 | mov yl, xl, lsr ip |
179 | 179 | rsb ip, ip, #32 |
180 | - orr yl, yl, xh, lsl ip | |
180 | + ARM( orr yl, yl, xh, lsl ip ) | |
181 | + THUMB( lsl xh, xh, ip ) | |
182 | + THUMB( orr yl, yl, xh ) | |
181 | 183 | mov xh, xl, lsl ip |
182 | 184 | mov xh, xh, lsr ip |
183 | 185 | mov pc, lr |
arch/arm/lib/findbit.S
... | ... | @@ -25,7 +25,10 @@ |
25 | 25 | teq r1, #0 |
26 | 26 | beq 3f |
27 | 27 | mov r2, #0 |
28 | -1: ldrb r3, [r0, r2, lsr #3] | |
28 | +1: | |
29 | + ARM( ldrb r3, [r0, r2, lsr #3] ) | |
30 | + THUMB( lsr r3, r2, #3 ) | |
31 | + THUMB( ldrb r3, [r0, r3] ) | |
29 | 32 | eors r3, r3, #0xff @ invert bits |
30 | 33 | bne .L_found @ any now set - found zero bit |
31 | 34 | add r2, r2, #8 @ next bit pointer |
... | ... | @@ -44,7 +47,9 @@ |
44 | 47 | beq 3b |
45 | 48 | ands ip, r2, #7 |
46 | 49 | beq 1b @ If new byte, goto old routine |
47 | - ldrb r3, [r0, r2, lsr #3] | |
50 | + ARM( ldrb r3, [r0, r2, lsr #3] ) | |
51 | + THUMB( lsr r3, r2, #3 ) | |
52 | + THUMB( ldrb r3, [r0, r3] ) | |
48 | 53 | eor r3, r3, #0xff @ now looking for a 1 bit |
49 | 54 | movs r3, r3, lsr ip @ shift off unused bits |
50 | 55 | bne .L_found |
... | ... | @@ -61,7 +66,10 @@ |
61 | 66 | teq r1, #0 |
62 | 67 | beq 3f |
63 | 68 | mov r2, #0 |
64 | -1: ldrb r3, [r0, r2, lsr #3] | |
69 | +1: | |
70 | + ARM( ldrb r3, [r0, r2, lsr #3] ) | |
71 | + THUMB( lsr r3, r2, #3 ) | |
72 | + THUMB( ldrb r3, [r0, r3] ) | |
65 | 73 | movs r3, r3 |
66 | 74 | bne .L_found @ any now set - found zero bit |
67 | 75 | add r2, r2, #8 @ next bit pointer |
... | ... | @@ -80,7 +88,9 @@ |
80 | 88 | beq 3b |
81 | 89 | ands ip, r2, #7 |
82 | 90 | beq 1b @ If new byte, goto old routine |
83 | - ldrb r3, [r0, r2, lsr #3] | |
91 | + ARM( ldrb r3, [r0, r2, lsr #3] ) | |
92 | + THUMB( lsr r3, r2, #3 ) | |
93 | + THUMB( ldrb r3, [r0, r3] ) | |
84 | 94 | movs r3, r3, lsr ip @ shift off unused bits |
85 | 95 | bne .L_found |
86 | 96 | orr r2, r2, #7 @ if zero, then no bits here |
... | ... | @@ -95,7 +105,9 @@ |
95 | 105 | beq 3f |
96 | 106 | mov r2, #0 |
97 | 107 | 1: eor r3, r2, #0x18 @ big endian byte ordering |
98 | - ldrb r3, [r0, r3, lsr #3] | |
108 | + ARM( ldrb r3, [r0, r3, lsr #3] ) | |
109 | + THUMB( lsr r3, #3 ) | |
110 | + THUMB( ldrb r3, [r0, r3] ) | |
99 | 111 | eors r3, r3, #0xff @ invert bits |
100 | 112 | bne .L_found @ any now set - found zero bit |
101 | 113 | add r2, r2, #8 @ next bit pointer |
... | ... | @@ -111,7 +123,9 @@ |
111 | 123 | ands ip, r2, #7 |
112 | 124 | beq 1b @ If new byte, goto old routine |
113 | 125 | eor r3, r2, #0x18 @ big endian byte ordering |
114 | - ldrb r3, [r0, r3, lsr #3] | |
126 | + ARM( ldrb r3, [r0, r3, lsr #3] ) | |
127 | + THUMB( lsr r3, #3 ) | |
128 | + THUMB( ldrb r3, [r0, r3] ) | |
115 | 129 | eor r3, r3, #0xff @ now looking for a 1 bit |
116 | 130 | movs r3, r3, lsr ip @ shift off unused bits |
117 | 131 | bne .L_found |
... | ... | @@ -125,7 +139,9 @@ |
125 | 139 | beq 3f |
126 | 140 | mov r2, #0 |
127 | 141 | 1: eor r3, r2, #0x18 @ big endian byte ordering |
128 | - ldrb r3, [r0, r3, lsr #3] | |
142 | + ARM( ldrb r3, [r0, r3, lsr #3] ) | |
143 | + THUMB( lsr r3, #3 ) | |
144 | + THUMB( ldrb r3, [r0, r3] ) | |
129 | 145 | movs r3, r3 |
130 | 146 | bne .L_found @ any now set - found zero bit |
131 | 147 | add r2, r2, #8 @ next bit pointer |
... | ... | @@ -141,7 +157,9 @@ |
141 | 157 | ands ip, r2, #7 |
142 | 158 | beq 1b @ If new byte, goto old routine |
143 | 159 | eor r3, r2, #0x18 @ big endian byte ordering |
144 | - ldrb r3, [r0, r3, lsr #3] | |
160 | + ARM( ldrb r3, [r0, r3, lsr #3] ) | |
161 | + THUMB( lsr r3, #3 ) | |
162 | + THUMB( ldrb r3, [r0, r3] ) | |
145 | 163 | movs r3, r3, lsr ip @ shift off unused bits |
146 | 164 | bne .L_found |
147 | 165 | orr r2, r2, #7 @ if zero, then no bits here |
arch/arm/lib/getuser.S
... | ... | @@ -36,8 +36,13 @@ |
36 | 36 | ENDPROC(__get_user_1) |
37 | 37 | |
38 | 38 | ENTRY(__get_user_2) |
39 | +#ifdef CONFIG_THUMB2_KERNEL | |
40 | +2: ldrbt r2, [r0] | |
41 | +3: ldrbt r3, [r0, #1] | |
42 | +#else | |
39 | 43 | 2: ldrbt r2, [r0], #1 |
40 | 44 | 3: ldrbt r3, [r0] |
45 | +#endif | |
41 | 46 | #ifndef __ARMEB__ |
42 | 47 | orr r2, r2, r3, lsl #8 |
43 | 48 | #else |
arch/arm/lib/io-writesw-armv4.S
arch/arm/lib/lshrdi3.S
arch/arm/lib/memcpy.S
... | ... | @@ -13,8 +13,11 @@ |
13 | 13 | #include <linux/linkage.h> |
14 | 14 | #include <asm/assembler.h> |
15 | 15 | |
16 | +#define LDR1W_SHIFT 0 | |
17 | +#define STR1W_SHIFT 0 | |
18 | + | |
16 | 19 | .macro ldr1w ptr reg abort |
17 | - ldr \reg, [\ptr], #4 | |
20 | + W(ldr) \reg, [\ptr], #4 | |
18 | 21 | .endm |
19 | 22 | |
20 | 23 | .macro ldr4w ptr reg1 reg2 reg3 reg4 abort |
... | ... | @@ -30,7 +33,7 @@ |
30 | 33 | .endm |
31 | 34 | |
32 | 35 | .macro str1w ptr reg abort |
33 | - str \reg, [\ptr], #4 | |
36 | + W(str) \reg, [\ptr], #4 | |
34 | 37 | .endm |
35 | 38 | |
36 | 39 | .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort |
arch/arm/lib/memmove.S
... | ... | @@ -75,24 +75,24 @@ |
75 | 75 | addne pc, pc, ip @ C is always clear here |
76 | 76 | b 7f |
77 | 77 | 6: nop |
78 | - ldr r3, [r1, #-4]! | |
79 | - ldr r4, [r1, #-4]! | |
80 | - ldr r5, [r1, #-4]! | |
81 | - ldr r6, [r1, #-4]! | |
82 | - ldr r7, [r1, #-4]! | |
83 | - ldr r8, [r1, #-4]! | |
84 | - ldr lr, [r1, #-4]! | |
78 | + W(ldr) r3, [r1, #-4]! | |
79 | + W(ldr) r4, [r1, #-4]! | |
80 | + W(ldr) r5, [r1, #-4]! | |
81 | + W(ldr) r6, [r1, #-4]! | |
82 | + W(ldr) r7, [r1, #-4]! | |
83 | + W(ldr) r8, [r1, #-4]! | |
84 | + W(ldr) lr, [r1, #-4]! | |
85 | 85 | |
86 | 86 | add pc, pc, ip |
87 | 87 | nop |
88 | 88 | nop |
89 | - str r3, [r0, #-4]! | |
90 | - str r4, [r0, #-4]! | |
91 | - str r5, [r0, #-4]! | |
92 | - str r6, [r0, #-4]! | |
93 | - str r7, [r0, #-4]! | |
94 | - str r8, [r0, #-4]! | |
95 | - str lr, [r0, #-4]! | |
89 | + W(str) r3, [r0, #-4]! | |
90 | + W(str) r4, [r0, #-4]! | |
91 | + W(str) r5, [r0, #-4]! | |
92 | + W(str) r6, [r0, #-4]! | |
93 | + W(str) r7, [r0, #-4]! | |
94 | + W(str) r8, [r0, #-4]! | |
95 | + W(str) lr, [r0, #-4]! | |
96 | 96 | |
97 | 97 | CALGN( bcs 2b ) |
98 | 98 |
arch/arm/lib/putuser.S
... | ... | @@ -37,13 +37,23 @@ |
37 | 37 | |
38 | 38 | ENTRY(__put_user_2) |
39 | 39 | mov ip, r2, lsr #8 |
40 | +#ifdef CONFIG_THUMB2_KERNEL | |
40 | 41 | #ifndef __ARMEB__ |
42 | +2: strbt r2, [r0] | |
43 | +3: strbt ip, [r0, #1] | |
44 | +#else | |
45 | +2: strbt ip, [r0] | |
46 | +3: strbt r2, [r0, #1] | |
47 | +#endif | |
48 | +#else /* !CONFIG_THUMB2_KERNEL */ | |
49 | +#ifndef __ARMEB__ | |
41 | 50 | 2: strbt r2, [r0], #1 |
42 | 51 | 3: strbt ip, [r0] |
43 | 52 | #else |
44 | 53 | 2: strbt ip, [r0], #1 |
45 | 54 | 3: strbt r2, [r0] |
46 | 55 | #endif |
56 | +#endif /* CONFIG_THUMB2_KERNEL */ | |
47 | 57 | mov r0, #0 |
48 | 58 | mov pc, lr |
49 | 59 | ENDPROC(__put_user_2) |
50 | 60 | |
... | ... | @@ -55,8 +65,13 @@ |
55 | 65 | ENDPROC(__put_user_4) |
56 | 66 | |
57 | 67 | ENTRY(__put_user_8) |
68 | +#ifdef CONFIG_THUMB2_KERNEL | |
69 | +5: strt r2, [r0] | |
70 | +6: strt r3, [r0, #4] | |
71 | +#else | |
58 | 72 | 5: strt r2, [r0], #4 |
59 | 73 | 6: strt r3, [r0] |
74 | +#endif | |
60 | 75 | mov r0, #0 |
61 | 76 | mov pc, lr |
62 | 77 | ENDPROC(__put_user_8) |
arch/arm/lib/strncpy_from_user.S