Commit 346050952cac11b25a98c7e1743412b416827314
Committed by
Ingo Molnar
1 parent
86d8a08616
Exists in
master
and in
7 other branches
include/asm-x86/byteorder.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Showing 1 changed file with 24 additions and 15 deletions Side-by-side Diff
include/asm-x86/byteorder.h
... | ... | @@ -8,50 +8,59 @@ |
8 | 8 | |
9 | 9 | #ifdef __i386__ |
10 | 10 | |
11 | -static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x) | |
11 | +static inline __attribute_const__ __u32 ___arch__swab32(__u32 x) | |
12 | 12 | { |
13 | 13 | #ifdef CONFIG_X86_BSWAP |
14 | - __asm__("bswap %0" : "=r" (x) : "0" (x)); | |
14 | + asm("bswap %0" : "=r" (x) : "0" (x)); | |
15 | 15 | #else |
16 | - __asm__("xchgb %b0,%h0\n\t" /* swap lower bytes */ | |
17 | - "rorl $16,%0\n\t" /* swap words */ | |
18 | - "xchgb %b0,%h0" /* swap higher bytes */ | |
19 | - :"=q" (x) | |
20 | - : "0" (x)); | |
16 | + asm("xchgb %b0,%h0\n\t" /* swap lower bytes */ | |
17 | + "rorl $16,%0\n\t" /* swap words */ | |
18 | + "xchgb %b0,%h0" /* swap higher bytes */ | |
19 | + : "=q" (x) | |
20 | + : "0" (x)); | |
21 | 21 | #endif |
22 | 22 | return x; |
23 | 23 | } |
24 | 24 | |
25 | -static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 val) | |
25 | +static inline __attribute_const__ __u64 ___arch__swab64(__u64 val) | |
26 | 26 | { |
27 | 27 | union { |
28 | - struct { __u32 a,b; } s; | |
28 | + struct { | |
29 | + __u32 a; | |
30 | + __u32 b; | |
31 | + } s; | |
29 | 32 | __u64 u; |
30 | 33 | } v; |
31 | 34 | v.u = val; |
32 | 35 | #ifdef CONFIG_X86_BSWAP |
33 | - __asm__("bswapl %0 ; bswapl %1 ; xchgl %0,%1" | |
36 | + asm("bswapl %0 ; bswapl %1 ; xchgl %0,%1" | |
34 | 37 | : "=r" (v.s.a), "=r" (v.s.b) |
35 | 38 | : "0" (v.s.a), "1" (v.s.b)); |
36 | 39 | #else |
37 | 40 | v.s.a = ___arch__swab32(v.s.a); |
38 | 41 | v.s.b = ___arch__swab32(v.s.b); |
39 | - __asm__("xchgl %0,%1" : "=r" (v.s.a), "=r" (v.s.b) : "0" (v.s.a), "1" (v.s.b)); | |
42 | + asm("xchgl %0,%1" | |
43 | + : "=r" (v.s.a), "=r" (v.s.b) | |
44 | + : "0" (v.s.a), "1" (v.s.b)); | |
40 | 45 | #endif |
41 | 46 | return v.u; |
42 | 47 | } |
43 | 48 | |
44 | 49 | #else /* __i386__ */ |
45 | 50 | |
46 | -static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 x) | |
51 | +static inline __attribute_const__ __u64 ___arch__swab64(__u64 x) | |
47 | 52 | { |
48 | - __asm__("bswapq %0" : "=r" (x) : "0" (x)); | |
53 | + asm("bswapq %0" | |
54 | + : "=r" (x) | |
55 | + : "0" (x)); | |
49 | 56 | return x; |
50 | 57 | } |
51 | 58 | |
52 | -static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x) | |
59 | +static inline __attribute_const__ __u32 ___arch__swab32(__u32 x) | |
53 | 60 | { |
54 | - __asm__("bswapl %0" : "=r" (x) : "0" (x)); | |
61 | + asm("bswapl %0" | |
62 | + : "=r" (x) | |
63 | + : "0" (x)); | |
55 | 64 | return x; |
56 | 65 | } |
57 | 66 |