Commit a559c91d77c3220be521453bd23815e1e1980a82

Authored by Becky Bruce
Committed by Paul Mackerras
1 parent 6b67f62cf6

[PATCH] powerpc: merge byteorder.h

powerpc: Merge byteorder.h

Essentially adopts the 64-bit version of this file.  The 32-bit version had
been using unsigned ints for arguments/return values that were actually
only 16 bits - the new file uses __u16 for these items as in the 64-bit
version of the header.  The order of some of the asm constraints
in the 64-bit version was slightly different than the 32-bit version,
but they produce identical code.

Signed-off-by: Becky Bruce <becky.bruce@freescale.com>
Signed-off-by: Kumar Gala <kumar.gala@freescale.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>

Showing 3 changed files with 89 additions and 162 deletions Side-by-side Diff

include/asm-powerpc/byteorder.h
  1 +#ifndef _ASM_POWERPC_BYTEORDER_H
  2 +#define _ASM_POWERPC_BYTEORDER_H
  3 +
  4 +/*
  5 + * This program is free software; you can redistribute it and/or
  6 + * modify it under the terms of the GNU General Public License
  7 + * as published by the Free Software Foundation; either version
  8 + * 2 of the License, or (at your option) any later version.
  9 + */
  10 +
  11 +#include <asm/types.h>
  12 +#include <linux/compiler.h>
  13 +
  14 +#ifdef __GNUC__
  15 +#ifdef __KERNEL__
  16 +
  17 +static __inline__ __u16 ld_le16(const volatile __u16 *addr)
  18 +{
  19 + __u16 val;
  20 +
  21 + __asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
  22 + return val;
  23 +}
  24 +
  25 +static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
  26 +{
  27 + __asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
  28 +}
  29 +
  30 +static __inline__ __u32 ld_le32(const volatile __u32 *addr)
  31 +{
  32 + __u32 val;
  33 +
  34 + __asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
  35 + return val;
  36 +}
  37 +
  38 +static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
  39 +{
  40 + __asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
  41 +}
  42 +
  43 +static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
  44 +{
  45 + __u16 result;
  46 +
  47 + __asm__("rlwimi %0,%1,8,16,23"
  48 + : "=r" (result)
  49 + : "r" (value), "0" (value >> 8));
  50 + return result;
  51 +}
  52 +
  53 +static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
  54 +{
  55 + __u32 result;
  56 +
  57 + __asm__("rlwimi %0,%1,24,16,23\n\t"
  58 + "rlwimi %0,%1,8,8,15\n\t"
  59 + "rlwimi %0,%1,24,0,7"
  60 + : "=r" (result)
  61 + : "r" (value), "0" (value >> 24));
  62 + return result;
  63 +}
  64 +
  65 +#define __arch__swab16(x) ___arch__swab16(x)
  66 +#define __arch__swab32(x) ___arch__swab32(x)
  67 +
  68 +/* The same, but returns converted value from the location pointer by addr. */
  69 +#define __arch__swab16p(addr) ld_le16(addr)
  70 +#define __arch__swab32p(addr) ld_le32(addr)
  71 +
  72 +/* The same, but do the conversion in situ, ie. put the value back to addr. */
  73 +#define __arch__swab16s(addr) st_le16(addr,*addr)
  74 +#define __arch__swab32s(addr) st_le32(addr,*addr)
  75 +
  76 +#endif /* __KERNEL__ */
  77 +
  78 +#ifndef __STRICT_ANSI__
  79 +#define __BYTEORDER_HAS_U64__
  80 +#ifndef __powerpc64__
  81 +#define __SWAB_64_THRU_32__
  82 +#endif /* __powerpc64__ */
  83 +#endif /* __STRICT_ANSI__ */
  84 +
  85 +#endif /* __GNUC__ */
  86 +
  87 +#include <linux/byteorder/big_endian.h>
  88 +
  89 +#endif /* _ASM_POWERPC_BYTEORDER_H */
include/asm-ppc/byteorder.h
1   -#ifndef _PPC_BYTEORDER_H
2   -#define _PPC_BYTEORDER_H
3   -
4   -#include <asm/types.h>
5   -#include <linux/compiler.h>
6   -
7   -#ifdef __GNUC__
8   -#ifdef __KERNEL__
9   -
10   -extern __inline__ unsigned ld_le16(const volatile unsigned short *addr)
11   -{
12   - unsigned val;
13   -
14   - __asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
15   - return val;
16   -}
17   -
18   -extern __inline__ void st_le16(volatile unsigned short *addr, const unsigned val)
19   -{
20   - __asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
21   -}
22   -
23   -extern __inline__ unsigned ld_le32(const volatile unsigned *addr)
24   -{
25   - unsigned val;
26   -
27   - __asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
28   - return val;
29   -}
30   -
31   -extern __inline__ void st_le32(volatile unsigned *addr, const unsigned val)
32   -{
33   - __asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
34   -}
35   -
36   -static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
37   -{
38   - __u16 result;
39   -
40   - __asm__("rlwimi %0,%2,8,16,23" : "=&r" (result) : "0" (value >> 8), "r" (value));
41   - return result;
42   -}
43   -
44   -static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
45   -{
46   - __u32 result;
47   -
48   - __asm__("rlwimi %0,%2,24,16,23" : "=&r" (result) : "0" (value>>24), "r" (value));
49   - __asm__("rlwimi %0,%2,8,8,15" : "=&r" (result) : "0" (result), "r" (value));
50   - __asm__("rlwimi %0,%2,24,0,7" : "=&r" (result) : "0" (result), "r" (value));
51   -
52   - return result;
53   -}
54   -#define __arch__swab32(x) ___arch__swab32(x)
55   -#define __arch__swab16(x) ___arch__swab16(x)
56   -
57   -/* The same, but returns converted value from the location pointer by addr. */
58   -#define __arch__swab16p(addr) ld_le16(addr)
59   -#define __arch__swab32p(addr) ld_le32(addr)
60   -
61   -/* The same, but do the conversion in situ, ie. put the value back to addr. */
62   -#define __arch__swab16s(addr) st_le16(addr,*addr)
63   -#define __arch__swab32s(addr) st_le32(addr,*addr)
64   -
65   -#endif /* __KERNEL__ */
66   -
67   -#if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
68   -# define __BYTEORDER_HAS_U64__
69   -# define __SWAB_64_THRU_32__
70   -#endif
71   -
72   -#endif /* __GNUC__ */
73   -
74   -#include <linux/byteorder/big_endian.h>
75   -
76   -#endif /* _PPC_BYTEORDER_H */
include/asm-ppc64/byteorder.h
1   -#ifndef _PPC64_BYTEORDER_H
2   -#define _PPC64_BYTEORDER_H
3   -
4   -/*
5   - * This program is free software; you can redistribute it and/or
6   - * modify it under the terms of the GNU General Public License
7   - * as published by the Free Software Foundation; either version
8   - * 2 of the License, or (at your option) any later version.
9   - */
10   -
11   -#include <asm/types.h>
12   -#include <linux/compiler.h>
13   -
14   -#ifdef __GNUC__
15   -#ifdef __KERNEL__
16   -
17   -static __inline__ __u16 ld_le16(const volatile __u16 *addr)
18   -{
19   - __u16 val;
20   -
21   - __asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
22   - return val;
23   -}
24   -
25   -static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
26   -{
27   - __asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
28   -}
29   -
30   -static __inline__ __u32 ld_le32(const volatile __u32 *addr)
31   -{
32   - __u32 val;
33   -
34   - __asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
35   - return val;
36   -}
37   -
38   -static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
39   -{
40   - __asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
41   -}
42   -
43   -static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
44   -{
45   - __u16 result;
46   -
47   - __asm__("rlwimi %0,%1,8,16,23"
48   - : "=r" (result)
49   - : "r" (value), "0" (value >> 8));
50   - return result;
51   -}
52   -
53   -static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
54   -{
55   - __u32 result;
56   -
57   - __asm__("rlwimi %0,%1,24,16,23\n\t"
58   - "rlwimi %0,%1,8,8,15\n\t"
59   - "rlwimi %0,%1,24,0,7"
60   - : "=r" (result)
61   - : "r" (value), "0" (value >> 24));
62   - return result;
63   -}
64   -
65   -#define __arch__swab16(x) ___arch__swab16(x)
66   -#define __arch__swab32(x) ___arch__swab32(x)
67   -
68   -/* The same, but returns converted value from the location pointer by addr. */
69   -#define __arch__swab16p(addr) ld_le16(addr)
70   -#define __arch__swab32p(addr) ld_le32(addr)
71   -
72   -/* The same, but do the conversion in situ, ie. put the value back to addr. */
73   -#define __arch__swab16s(addr) st_le16(addr,*addr)
74   -#define __arch__swab32s(addr) st_le32(addr,*addr)
75   -
76   -#endif /* __KERNEL__ */
77   -
78   -#ifndef __STRICT_ANSI__
79   -#define __BYTEORDER_HAS_U64__
80   -#endif
81   -
82   -#endif /* __GNUC__ */
83   -
84   -#include <linux/byteorder/big_endian.h>
85   -
86   -#endif /* _PPC64_BYTEORDER_H */