Commit ffbf670f5cd50501a34a5187981460da2216071e

Authored by Ingo Molnar
Committed by Ingo Molnar
1 parent f17578decc

[PATCH] mutex subsystem, add atomic_xchg() to all arches

add atomic_xchg() to all the architectures. Needed by the new mutex code.

Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Arjan van de Ven <arjan@infradead.org>

Showing 22 changed files with 30 additions and 0 deletions Side-by-side Diff

include/asm-alpha/atomic.h
... ... @@ -176,6 +176,7 @@
176 176 }
177 177  
178 178 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  179 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
179 180  
180 181 #define atomic_add_unless(v, a, u) \
181 182 ({ \
include/asm-arm/atomic.h
... ... @@ -175,6 +175,8 @@
175 175  
176 176 #endif /* __LINUX_ARM_ARCH__ */
177 177  
  178 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  179 +
178 180 static inline int atomic_add_unless(atomic_t *v, int a, int u)
179 181 {
180 182 int c, old;
include/asm-arm26/atomic.h
... ... @@ -76,6 +76,8 @@
76 76 return ret;
77 77 }
78 78  
  79 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  80 +
79 81 static inline int atomic_add_unless(atomic_t *v, int a, int u)
80 82 {
81 83 int ret;
include/asm-cris/atomic.h
... ... @@ -136,6 +136,8 @@
136 136 return ret;
137 137 }
138 138  
  139 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  140 +
139 141 static inline int atomic_add_unless(atomic_t *v, int a, int u)
140 142 {
141 143 int ret;
include/asm-frv/atomic.h
... ... @@ -328,6 +328,7 @@
328 328 #endif
329 329  
330 330 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  331 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
331 332  
332 333 #define atomic_add_unless(v, a, u) \
333 334 ({ \
include/asm-h8300/atomic.h
... ... @@ -95,6 +95,8 @@
95 95 return ret;
96 96 }
97 97  
  98 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  99 +
98 100 static inline int atomic_add_unless(atomic_t *v, int a, int u)
99 101 {
100 102 int ret;
include/asm-i386/atomic.h
... ... @@ -216,6 +216,7 @@
216 216 }
217 217  
218 218 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  219 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
219 220  
220 221 /**
221 222 * atomic_add_unless - add unless the number is a given value
include/asm-ia64/atomic.h
... ... @@ -89,6 +89,7 @@
89 89 }
90 90  
91 91 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  92 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
92 93  
93 94 #define atomic_add_unless(v, a, u) \
94 95 ({ \
include/asm-m32r/atomic.h
... ... @@ -243,6 +243,7 @@
243 243 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
244 244  
245 245 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  246 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
246 247  
247 248 /**
248 249 * atomic_add_unless - add unless the number is a given value
include/asm-m68k/atomic.h
... ... @@ -140,6 +140,7 @@
140 140 }
141 141  
142 142 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  143 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
143 144  
144 145 #define atomic_add_unless(v, a, u) \
145 146 ({ \
include/asm-m68knommu/atomic.h
... ... @@ -129,6 +129,7 @@
129 129 }
130 130  
131 131 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  132 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
132 133  
133 134 #define atomic_add_unless(v, a, u) \
134 135 ({ \
include/asm-mips/atomic.h
... ... @@ -289,6 +289,7 @@
289 289 }
290 290  
291 291 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  292 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
292 293  
293 294 /**
294 295 * atomic_add_unless - add unless the number is a given value
include/asm-parisc/atomic.h
... ... @@ -165,6 +165,7 @@
165 165  
166 166 /* exported interface */
167 167 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  168 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
168 169  
169 170 /**
170 171 * atomic_add_unless - add unless the number is a given value
include/asm-powerpc/atomic.h
... ... @@ -165,6 +165,7 @@
165 165 }
166 166  
167 167 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  168 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
168 169  
169 170 /**
170 171 * atomic_add_unless - add unless the number is a given value
include/asm-s390/atomic.h
... ... @@ -75,6 +75,8 @@
75 75 __CS_LOOP(v, mask, "or");
76 76 }
77 77  
  78 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  79 +
78 80 static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
79 81 {
80 82 __asm__ __volatile__(" cs %0,%3,0(%2)\n"
include/asm-sh/atomic.h
... ... @@ -101,6 +101,8 @@
101 101 return ret;
102 102 }
103 103  
  104 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  105 +
104 106 static inline int atomic_add_unless(atomic_t *v, int a, int u)
105 107 {
106 108 int ret;
include/asm-sh64/atomic.h
... ... @@ -113,6 +113,8 @@
113 113 return ret;
114 114 }
115 115  
  116 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  117 +
116 118 static inline int atomic_add_unless(atomic_t *v, int a, int u)
117 119 {
118 120 int ret;
include/asm-sparc/atomic.h
... ... @@ -20,6 +20,7 @@
20 20  
21 21 extern int __atomic_add_return(int, atomic_t *);
22 22 extern int atomic_cmpxchg(atomic_t *, int, int);
  23 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
23 24 extern int atomic_add_unless(atomic_t *, int, int);
24 25 extern void atomic_set(atomic_t *, int);
25 26  
include/asm-sparc64/atomic.h
... ... @@ -72,6 +72,7 @@
72 72 #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
73 73  
74 74 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  75 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
75 76  
76 77 #define atomic_add_unless(v, a, u) \
77 78 ({ \
include/asm-v850/atomic.h
... ... @@ -104,6 +104,8 @@
104 104 return ret;
105 105 }
106 106  
  107 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  108 +
107 109 static inline int atomic_add_unless(atomic_t *v, int a, int u)
108 110 {
109 111 int ret;
include/asm-x86_64/atomic.h
... ... @@ -389,6 +389,7 @@
389 389 #define atomic64_dec_return(v) (atomic64_sub_return(1,v))
390 390  
391 391 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  392 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
392 393  
393 394 /**
394 395 * atomic_add_unless - add unless the number is a given value
include/asm-xtensa/atomic.h
... ... @@ -224,6 +224,7 @@
224 224 #define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0)
225 225  
226 226 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  227 +#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
227 228  
228 229 /**
229 230 * atomic_add_unless - add unless the number is a given value