Commit c5386c200f55940eeeb827df172edf2e0305f23b
Committed by
Ingo Molnar
1 parent
26b7fcc4bd
Exists in
master
and in
7 other branches
include/asm-x86/system.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Showing 1 changed file with 51 additions and 53 deletions Side-by-side Diff
include/asm-x86/system.h
... | ... | @@ -38,35 +38,33 @@ |
38 | 38 | */ \ |
39 | 39 | unsigned long ebx, ecx, edx, esi, edi; \ |
40 | 40 | \ |
41 | - asm volatile( \ | |
42 | - "pushfl \n\t" /* save flags */ \ | |
43 | - "pushl %%ebp \n\t" /* save EBP */ \ | |
44 | - "movl %%esp,%[prev_sp] \n\t" /* save ESP */ \ | |
45 | - "movl %[next_sp],%%esp \n\t" /* restore ESP */ \ | |
46 | - "movl $1f,%[prev_ip] \n\t" /* save EIP */ \ | |
47 | - "pushl %[next_ip] \n\t" /* restore EIP */ \ | |
48 | - "jmp __switch_to \n" /* regparm call */ \ | |
49 | - "1: \t" \ | |
50 | - "popl %%ebp \n\t" /* restore EBP */ \ | |
51 | - "popfl \n" /* restore flags */ \ | |
41 | + asm volatile("pushfl\n\t" /* save flags */ \ | |
42 | + "pushl %%ebp\n\t" /* save EBP */ \ | |
43 | + "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \ | |
44 | + "movl %[next_sp],%%esp\n\t" /* restore ESP */ \ | |
45 | + "movl $1f,%[prev_ip]\n\t" /* save EIP */ \ | |
46 | + "pushl %[next_ip]\n\t" /* restore EIP */ \ | |
47 | + "jmp __switch_to\n" /* regparm call */ \ | |
48 | + "1:\t" \ | |
49 | + "popl %%ebp\n\t" /* restore EBP */ \ | |
50 | + "popfl\n" /* restore flags */ \ | |
52 | 51 | \ |
53 | - /* output parameters */ \ | |
54 | - : [prev_sp] "=m" (prev->thread.sp), \ | |
55 | - [prev_ip] "=m" (prev->thread.ip), \ | |
56 | - "=a" (last), \ | |
52 | + /* output parameters */ \ | |
53 | + : [prev_sp] "=m" (prev->thread.sp), \ | |
54 | + [prev_ip] "=m" (prev->thread.ip), \ | |
55 | + "=a" (last), \ | |
57 | 56 | \ |
58 | - /* clobbered output registers: */ \ | |
59 | - "=b" (ebx), "=c" (ecx), "=d" (edx), \ | |
60 | - "=S" (esi), "=D" (edi) \ | |
61 | - \ | |
62 | - /* input parameters: */ \ | |
63 | - : [next_sp] "m" (next->thread.sp), \ | |
64 | - [next_ip] "m" (next->thread.ip), \ | |
65 | - \ | |
66 | - /* regparm parameters for __switch_to(): */ \ | |
67 | - [prev] "a" (prev), \ | |
68 | - [next] "d" (next) \ | |
69 | - ); \ | |
57 | + /* clobbered output registers: */ \ | |
58 | + "=b" (ebx), "=c" (ecx), "=d" (edx), \ | |
59 | + "=S" (esi), "=D" (edi) \ | |
60 | + \ | |
61 | + /* input parameters: */ \ | |
62 | + : [next_sp] "m" (next->thread.sp), \ | |
63 | + [next_ip] "m" (next->thread.ip), \ | |
64 | + \ | |
65 | + /* regparm parameters for __switch_to(): */ \ | |
66 | + [prev] "a" (prev), \ | |
67 | + [next] "d" (next)); \ | |
70 | 68 | } while (0) |
71 | 69 | |
72 | 70 | /* |
73 | 71 | |
74 | 72 | |
75 | 73 | |
... | ... | @@ -146,35 +144,34 @@ |
146 | 144 | */ |
147 | 145 | #define loadsegment(seg, value) \ |
148 | 146 | asm volatile("\n" \ |
149 | - "1:\t" \ | |
150 | - "movl %k0,%%" #seg "\n" \ | |
151 | - "2:\n" \ | |
152 | - ".section .fixup,\"ax\"\n" \ | |
153 | - "3:\t" \ | |
154 | - "movl %k1, %%" #seg "\n\t" \ | |
155 | - "jmp 2b\n" \ | |
156 | - ".previous\n" \ | |
157 | - _ASM_EXTABLE(1b,3b) \ | |
158 | - : :"r" (value), "r" (0)) | |
147 | + "1:\t" \ | |
148 | + "movl %k0,%%" #seg "\n" \ | |
149 | + "2:\n" \ | |
150 | + ".section .fixup,\"ax\"\n" \ | |
151 | + "3:\t" \ | |
152 | + "movl %k1, %%" #seg "\n\t" \ | |
153 | + "jmp 2b\n" \ | |
154 | + ".previous\n" \ | |
155 | + _ASM_EXTABLE(1b,3b) \ | |
156 | + : :"r" (value), "r" (0)) | |
159 | 157 | |
160 | 158 | |
161 | 159 | /* |
162 | 160 | * Save a segment register away |
163 | 161 | */ |
164 | -#define savesegment(seg, value) \ | |
162 | +#define savesegment(seg, value) \ | |
165 | 163 | asm volatile("mov %%" #seg ",%0":"=rm" (value)) |
166 | 164 | |
167 | 165 | static inline unsigned long get_limit(unsigned long segment) |
168 | 166 | { |
169 | 167 | unsigned long __limit; |
170 | - __asm__("lsll %1,%0" | |
171 | - :"=r" (__limit):"r" (segment)); | |
172 | - return __limit+1; | |
168 | + asm("lsll %1,%0" : "=r" (__limit) : "r" (segment)); | |
169 | + return __limit + 1; | |
173 | 170 | } |
174 | 171 | |
175 | 172 | static inline void native_clts(void) |
176 | 173 | { |
177 | - asm volatile ("clts"); | |
174 | + asm volatile("clts"); | |
178 | 175 | } |
179 | 176 | |
180 | 177 | /* |
181 | 178 | |
182 | 179 | |
183 | 180 | |
184 | 181 | |
185 | 182 | |
186 | 183 | |
... | ... | @@ -189,43 +186,43 @@ |
189 | 186 | static inline unsigned long native_read_cr0(void) |
190 | 187 | { |
191 | 188 | unsigned long val; |
192 | - asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order)); | |
189 | + asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
193 | 190 | return val; |
194 | 191 | } |
195 | 192 | |
196 | 193 | static inline void native_write_cr0(unsigned long val) |
197 | 194 | { |
198 | - asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order)); | |
195 | + asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); | |
199 | 196 | } |
200 | 197 | |
201 | 198 | static inline unsigned long native_read_cr2(void) |
202 | 199 | { |
203 | 200 | unsigned long val; |
204 | - asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order)); | |
201 | + asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
205 | 202 | return val; |
206 | 203 | } |
207 | 204 | |
208 | 205 | static inline void native_write_cr2(unsigned long val) |
209 | 206 | { |
210 | - asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order)); | |
207 | + asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); | |
211 | 208 | } |
212 | 209 | |
213 | 210 | static inline unsigned long native_read_cr3(void) |
214 | 211 | { |
215 | 212 | unsigned long val; |
216 | - asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order)); | |
213 | + asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
217 | 214 | return val; |
218 | 215 | } |
219 | 216 | |
220 | 217 | static inline void native_write_cr3(unsigned long val) |
221 | 218 | { |
222 | - asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order)); | |
219 | + asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); | |
223 | 220 | } |
224 | 221 | |
225 | 222 | static inline unsigned long native_read_cr4(void) |
226 | 223 | { |
227 | 224 | unsigned long val; |
228 | - asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order)); | |
225 | + asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
229 | 226 | return val; |
230 | 227 | } |
231 | 228 | |
... | ... | @@ -237,7 +234,7 @@ |
237 | 234 | #ifdef CONFIG_X86_32 |
238 | 235 | asm volatile("1: mov %%cr4, %0\n" |
239 | 236 | "2:\n" |
240 | - _ASM_EXTABLE(1b,2b) | |
237 | + _ASM_EXTABLE(1b, 2b) | |
241 | 238 | : "=r" (val), "=m" (__force_order) : "0" (0)); |
242 | 239 | #else |
243 | 240 | val = native_read_cr4(); |
... | ... | @@ -247,7 +244,7 @@ |
247 | 244 | |
248 | 245 | static inline void native_write_cr4(unsigned long val) |
249 | 246 | { |
250 | - asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order)); | |
247 | + asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); | |
251 | 248 | } |
252 | 249 | |
253 | 250 | #ifdef CONFIG_X86_64 |
... | ... | @@ -268,6 +265,7 @@ |
268 | 265 | { |
269 | 266 | asm volatile("wbinvd": : :"memory"); |
270 | 267 | } |
268 | + | |
271 | 269 | #ifdef CONFIG_PARAVIRT |
272 | 270 | #include <asm/paravirt.h> |
273 | 271 | #else |
... | ... | @@ -300,7 +298,7 @@ |
300 | 298 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); |
301 | 299 | } |
302 | 300 | |
303 | -#define nop() __asm__ __volatile__ ("nop") | |
301 | +#define nop() asm volatile ("nop") | |
304 | 302 | |
305 | 303 | void disable_hlt(void); |
306 | 304 | void enable_hlt(void); |
... | ... | @@ -399,7 +397,7 @@ |
399 | 397 | # define smp_wmb() barrier() |
400 | 398 | #endif |
401 | 399 | #define smp_read_barrier_depends() read_barrier_depends() |
402 | -#define set_mb(var, value) do { (void) xchg(&var, value); } while (0) | |
400 | +#define set_mb(var, value) do { (void)xchg(&var, value); } while (0) | |
403 | 401 | #else |
404 | 402 | #define smp_mb() barrier() |
405 | 403 | #define smp_rmb() barrier() |