Commit 2501cf768e4009a06287a5ee842fd93dd4fd690e
1 parent
c9034c3a1d
Exists in
smarc-l5.0.0_1.0.0-ga
and in
5 other branches
m68k: Fix xchg/cmpxchg to fail to link if given an inappropriate pointer
Fix the m68k versions of xchg() and cmpxchg() to fail to link if given an inappropriately sized pointer rather than BUG()'ing at runtime. Signed-off-by: David Howells <dhowells@redhat.com> Acked-by: Greg Ungerer <gerg@uclinux.org> cc: linux-m68k@lists.linux-m68k.org
Showing 1 changed file with 16 additions and 4 deletions Side-by-side Diff
arch/m68k/include/asm/system.h
... | ... | @@ -68,6 +68,8 @@ |
68 | 68 | struct __xchg_dummy { unsigned long a[100]; }; |
69 | 69 | #define __xg(x) ((volatile struct __xchg_dummy *)(x)) |
70 | 70 | |
71 | +extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int); | |
72 | + | |
71 | 73 | #ifndef CONFIG_RMW_INSNS |
72 | 74 | static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) |
73 | 75 | { |
... | ... | @@ -92,7 +94,8 @@ |
92 | 94 | x = tmp; |
93 | 95 | break; |
94 | 96 | default: |
95 | - BUG(); | |
97 | + tmp = __invalid_xchg_size(x, ptr, size); | |
98 | + break; | |
96 | 99 | } |
97 | 100 | |
98 | 101 | local_irq_restore(flags); |
... | ... | @@ -102,7 +105,7 @@ |
102 | 105 | static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) |
103 | 106 | { |
104 | 107 | switch (size) { |
105 | - case 1: | |
108 | + case 1: | |
106 | 109 | __asm__ __volatile__ |
107 | 110 | ("moveb %2,%0\n\t" |
108 | 111 | "1:\n\t" |
... | ... | @@ -110,7 +113,7 @@ |
110 | 113 | "jne 1b" |
111 | 114 | : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); |
112 | 115 | break; |
113 | - case 2: | |
116 | + case 2: | |
114 | 117 | __asm__ __volatile__ |
115 | 118 | ("movew %2,%0\n\t" |
116 | 119 | "1:\n\t" |
... | ... | @@ -118,7 +121,7 @@ |
118 | 121 | "jne 1b" |
119 | 122 | : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); |
120 | 123 | break; |
121 | - case 4: | |
124 | + case 4: | |
122 | 125 | __asm__ __volatile__ |
123 | 126 | ("movel %2,%0\n\t" |
124 | 127 | "1:\n\t" |
... | ... | @@ -126,6 +129,9 @@ |
126 | 129 | "jne 1b" |
127 | 130 | : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); |
128 | 131 | break; |
132 | + default: | |
133 | + x = __invalid_xchg_size(x, ptr, size); | |
134 | + break; | |
129 | 135 | } |
130 | 136 | return x; |
131 | 137 | } |
... | ... | @@ -135,6 +141,9 @@ |
135 | 141 | |
136 | 142 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
137 | 143 | |
144 | +extern unsigned long __invalid_cmpxchg_size(volatile void *, | |
145 | + unsigned long, unsigned long, int); | |
146 | + | |
138 | 147 | /* |
139 | 148 | * Atomic compare and exchange. Compare OLD with MEM, if identical, |
140 | 149 | * store NEW in MEM. Return the initial value in MEM. Success is |
... | ... | @@ -161,6 +170,9 @@ |
161 | 170 | __asm__ __volatile__ ("casl %0,%2,%1" |
162 | 171 | : "=d" (old), "=m" (*(int *)p) |
163 | 172 | : "d" (new), "0" (old), "m" (*(int *)p)); |
173 | + break; | |
174 | + default: | |
175 | + old = __invalid_cmpxchg_size(p, old, new, size); | |
164 | 176 | break; |
165 | 177 | } |
166 | 178 | return old; |