Commit d3cb487149bd706aa6aeb02042332a450978dc1c

Authored by Christoph Lameter
Committed by Linus Torvalds
1 parent 070f80326a

[PATCH] atomic_long_t & include/asm-generic/atomic.h V2

Several counters already have the need to use 64 atomic variables on 64 bit
platforms (see mm_counter_t in sched.h).  We have to do ugly ifdefs to fall
back to 32 bit atomic on 32 bit platforms.

The VM statistics patch that I am working on will also make more extensive
use of atomic64.

This patch introduces a new type atomic_long_t by providing definitions in
asm-generic/atomic.h that works similar to the c "long" type.  Its 32 bits
on 32 bit platforms and 64 bits on 64 bit platforms.

Also cleans up the determination of the mm_counter_t in sched.h.

Signed-off-by: Christoph Lameter <clameter@sgi.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 24 changed files with 144 additions and 19 deletions Side-by-side Diff

include/asm-alpha/atomic.h
... ... @@ -216,5 +216,6 @@
216 216 #define smp_mb__before_atomic_inc() smp_mb()
217 217 #define smp_mb__after_atomic_inc() smp_mb()
218 218  
  219 +#include <asm-generic/atomic.h>
219 220 #endif /* _ALPHA_ATOMIC_H */
include/asm-arm/atomic.h
... ... @@ -205,6 +205,7 @@
205 205 #define smp_mb__before_atomic_inc() barrier()
206 206 #define smp_mb__after_atomic_inc() barrier()
207 207  
  208 +#include <asm-generic/atomic.h>
208 209 #endif
209 210 #endif
include/asm-arm26/atomic.h
... ... @@ -118,6 +118,7 @@
118 118 #define smp_mb__before_atomic_inc() barrier()
119 119 #define smp_mb__after_atomic_inc() barrier()
120 120  
  121 +#include <asm-generic/atomic.h>
121 122 #endif
122 123 #endif
include/asm-cris/atomic.h
... ... @@ -156,5 +156,6 @@
156 156 #define smp_mb__before_atomic_inc() barrier()
157 157 #define smp_mb__after_atomic_inc() barrier()
158 158  
  159 +#include <asm-generic/atomic.h>
159 160 #endif
include/asm-frv/atomic.h
... ... @@ -426,5 +426,6 @@
426 426 })
427 427 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
428 428  
  429 +#include <asm-generic/atomic.h>
429 430 #endif /* _ASM_ATOMIC_H */
include/asm-generic/atomic.h
  1 +#ifndef _ASM_GENERIC_ATOMIC_H
  2 +#define _ASM_GENERIC_ATOMIC_H
  3 +/*
  4 + * Copyright (C) 2005 Silicon Graphics, Inc.
  5 + * Christoph Lameter <clameter@sgi.com>
  6 + *
  7 + * Allows to provide arch independent atomic definitions without the need to
  8 + * edit all arch specific atomic.h files.
  9 + */
  10 +
  11 +
  12 +/*
  13 + * Suppport for atomic_long_t
  14 + *
  15 + * Casts for parameters are avoided for existing atomic functions in order to
  16 + * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
  17 + * macros of a platform may have.
  18 + */
  19 +
  20 +#if BITS_PER_LONG == 64
  21 +
  22 +typedef atomic64_t atomic_long_t;
  23 +
  24 +#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
  25 +
  26 +static inline long atomic_long_read(atomic_long_t *l)
  27 +{
  28 + atomic64_t *v = (atomic64_t *)l;
  29 +
  30 + return (long)atomic64_read(v);
  31 +}
  32 +
  33 +static inline void atomic_long_set(atomic_long_t *l, long i)
  34 +{
  35 + atomic64_t *v = (atomic64_t *)l;
  36 +
  37 + atomic_set(v, i);
  38 +}
  39 +
  40 +static inline void atomic_long_inc(atomic_long_t *l)
  41 +{
  42 + atomic64_t *v = (atomic64_t *)l;
  43 +
  44 + atomic64_inc(v);
  45 +}
  46 +
  47 +static inline void atomic_long_dec(atomic_long_t *l)
  48 +{
  49 + atomic64_t *v = (atomic64_t *)l;
  50 +
  51 + atomic64_dec(v);
  52 +}
  53 +
  54 +static inline void atomic_long_add(long i, atomic_long_t *l)
  55 +{
  56 + atomic64_t *v = (atomic64_t *)l;
  57 +
  58 + atomic64_add(i, v);
  59 +}
  60 +
  61 +static inline void atomic_long_sub(long i, atomic_long_t *l)
  62 +{
  63 + atomic64_t *v = (atomic64_t *)l;
  64 +
  65 + atomic64_sub(i, v);
  66 +}
  67 +
  68 +#else
  69 +
  70 +typedef atomic_t atomic_long_t;
  71 +
  72 +#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
  73 +static inline long atomic_long_read(atomic_long_t *l)
  74 +{
  75 + atomic_t *v = (atomic_t *)l;
  76 +
  77 + return (long)atomic_read(v);
  78 +}
  79 +
  80 +static inline void atomic_long_set(atomic_long_t *l, long i)
  81 +{
  82 + atomic_t *v = (atomic_t *)l;
  83 +
  84 + atomic_set(v, i);
  85 +}
  86 +
  87 +static inline void atomic_long_inc(atomic_long_t *l)
  88 +{
  89 + atomic_t *v = (atomic_t *)l;
  90 +
  91 + atomic_inc(v);
  92 +}
  93 +
  94 +static inline void atomic_long_dec(atomic_long_t *l)
  95 +{
  96 + atomic_t *v = (atomic_t *)l;
  97 +
  98 + atomic_dec(v);
  99 +}
  100 +
  101 +static inline void atomic_long_add(long i, atomic_long_t *l)
  102 +{
  103 + atomic_t *v = (atomic_t *)l;
  104 +
  105 + atomic_add(i, v);
  106 +}
  107 +
  108 +static inline void atomic_long_sub(long i, atomic_long_t *l)
  109 +{
  110 + atomic_t *v = (atomic_t *)l;
  111 +
  112 + atomic_sub(i, v);
  113 +}
  114 +
  115 +#endif
  116 +#endif
include/asm-h8300/atomic.h
... ... @@ -137,5 +137,6 @@
137 137 #define smp_mb__before_atomic_inc() barrier()
138 138 #define smp_mb__after_atomic_inc() barrier()
139 139  
  140 +#include <asm-generic/atomic.h>
140 141 #endif /* __ARCH_H8300_ATOMIC __ */
include/asm-i386/atomic.h
... ... @@ -254,5 +254,6 @@
254 254 #define smp_mb__before_atomic_inc() barrier()
255 255 #define smp_mb__after_atomic_inc() barrier()
256 256  
  257 +#include <asm-generic/atomic.h>
257 258 #endif
include/asm-ia64/atomic.h
... ... @@ -192,5 +192,6 @@
192 192 #define smp_mb__before_atomic_inc() barrier()
193 193 #define smp_mb__after_atomic_inc() barrier()
194 194  
  195 +#include <asm-generic/atomic.h>
195 196 #endif /* _ASM_IA64_ATOMIC_H */
include/asm-m32r/atomic.h
... ... @@ -313,5 +313,6 @@
313 313 #define smp_mb__before_atomic_inc() barrier()
314 314 #define smp_mb__after_atomic_inc() barrier()
315 315  
  316 +#include <asm-generic/atomic.h>
316 317 #endif /* _ASM_M32R_ATOMIC_H */
include/asm-m68k/atomic.h
... ... @@ -157,5 +157,6 @@
157 157 #define smp_mb__before_atomic_inc() barrier()
158 158 #define smp_mb__after_atomic_inc() barrier()
159 159  
  160 +#include <asm-generic/atomic.h>
160 161 #endif /* __ARCH_M68K_ATOMIC __ */
include/asm-m68knommu/atomic.h
... ... @@ -143,5 +143,6 @@
143 143 #define atomic_dec_return(v) atomic_sub_return(1,(v))
144 144 #define atomic_inc_return(v) atomic_add_return(1,(v))
145 145  
  146 +#include <asm-generic/atomic.h>
146 147 #endif /* __ARCH_M68KNOMMU_ATOMIC __ */
include/asm-mips/atomic.h
... ... @@ -713,5 +713,6 @@
713 713 #define smp_mb__before_atomic_inc() smp_mb()
714 714 #define smp_mb__after_atomic_inc() smp_mb()
715 715  
  716 +#include <asm-generic/atomic.h>
716 717 #endif /* _ASM_ATOMIC_H */
include/asm-parisc/atomic.h
... ... @@ -216,5 +216,6 @@
216 216 #define smp_mb__before_atomic_inc() smp_mb()
217 217 #define smp_mb__after_atomic_inc() smp_mb()
218 218  
  219 +#include <asm-generic/atomic.h>
219 220 #endif
include/asm-powerpc/atomic.h
... ... @@ -402,6 +402,7 @@
402 402  
403 403 #endif /* __powerpc64__ */
404 404  
  405 +#include <asm-generic/atomic.h>
405 406 #endif /* __KERNEL__ */
406 407 #endif /* _ASM_POWERPC_ATOMIC_H_ */
include/asm-s390/atomic.h
... ... @@ -215,6 +215,7 @@
215 215 #define smp_mb__before_atomic_inc() smp_mb()
216 216 #define smp_mb__after_atomic_inc() smp_mb()
217 217  
  218 +#include <asm-generic/atomic.h>
218 219 #endif /* __KERNEL__ */
219 220 #endif /* __ARCH_S390_ATOMIC__ */
include/asm-sh/atomic.h
... ... @@ -140,5 +140,6 @@
140 140 #define smp_mb__before_atomic_inc() barrier()
141 141 #define smp_mb__after_atomic_inc() barrier()
142 142  
  143 +#include <asm-generic/atomic.h>
143 144 #endif /* __ASM_SH_ATOMIC_H */
include/asm-sh64/atomic.h
... ... @@ -152,5 +152,6 @@
152 152 #define smp_mb__before_atomic_inc() barrier()
153 153 #define smp_mb__after_atomic_inc() barrier()
154 154  
  155 +#include <asm-generic/atomic.h>
155 156 #endif /* __ASM_SH64_ATOMIC_H */
include/asm-sparc/atomic.h
... ... @@ -159,5 +159,6 @@
159 159  
160 160 #endif /* !(__KERNEL__) */
161 161  
  162 +#include <asm-generic/atomic.h>
162 163 #endif /* !(__ARCH_SPARC_ATOMIC__) */
include/asm-sparc64/atomic.h
... ... @@ -96,5 +96,6 @@
96 96 #define smp_mb__after_atomic_inc() barrier()
97 97 #endif
98 98  
  99 +#include <asm-generic/atomic.h>
99 100 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
include/asm-v850/atomic.h
... ... @@ -126,5 +126,6 @@
126 126 #define smp_mb__before_atomic_inc() barrier()
127 127 #define smp_mb__after_atomic_inc() barrier()
128 128  
  129 +#include <asm-generic/atomic.h>
129 130 #endif /* __V850_ATOMIC_H__ */
include/asm-x86_64/atomic.h
... ... @@ -424,5 +424,6 @@
424 424 #define smp_mb__before_atomic_inc() barrier()
425 425 #define smp_mb__after_atomic_inc() barrier()
426 426  
  427 +#include <asm-generic/atomic.h>
427 428 #endif
include/asm-xtensa/atomic.h
... ... @@ -286,6 +286,7 @@
286 286 #define smp_mb__before_atomic_inc() barrier()
287 287 #define smp_mb__after_atomic_inc() barrier()
288 288  
  289 +#include <asm-generic/atomic.h>
289 290 #endif /* __KERNEL__ */
290 291  
291 292 #endif /* _XTENSA_ATOMIC_H */
include/linux/sched.h
... ... @@ -254,25 +254,12 @@
254 254 * The mm counters are not protected by its page_table_lock,
255 255 * so must be incremented atomically.
256 256 */
257   -#ifdef ATOMIC64_INIT
258   -#define set_mm_counter(mm, member, value) atomic64_set(&(mm)->_##member, value)
259   -#define get_mm_counter(mm, member) ((unsigned long)atomic64_read(&(mm)->_##member))
260   -#define add_mm_counter(mm, member, value) atomic64_add(value, &(mm)->_##member)
261   -#define inc_mm_counter(mm, member) atomic64_inc(&(mm)->_##member)
262   -#define dec_mm_counter(mm, member) atomic64_dec(&(mm)->_##member)
263   -typedef atomic64_t mm_counter_t;
264   -#else /* !ATOMIC64_INIT */
265   -/*
266   - * The counters wrap back to 0 at 2^32 * PAGE_SIZE,
267   - * that is, at 16TB if using 4kB page size.
268   - */
269   -#define set_mm_counter(mm, member, value) atomic_set(&(mm)->_##member, value)
270   -#define get_mm_counter(mm, member) ((unsigned long)atomic_read(&(mm)->_##member))
271   -#define add_mm_counter(mm, member, value) atomic_add(value, &(mm)->_##member)
272   -#define inc_mm_counter(mm, member) atomic_inc(&(mm)->_##member)
273   -#define dec_mm_counter(mm, member) atomic_dec(&(mm)->_##member)
274   -typedef atomic_t mm_counter_t;
275   -#endif /* !ATOMIC64_INIT */
  257 +#define set_mm_counter(mm, member, value) atomic_long_set(&(mm)->_##member, value)
  258 +#define get_mm_counter(mm, member) ((unsigned long)atomic_long_read(&(mm)->_##member))
  259 +#define add_mm_counter(mm, member, value) atomic_long_add(value, &(mm)->_##member)
  260 +#define inc_mm_counter(mm, member) atomic_long_inc(&(mm)->_##member)
  261 +#define dec_mm_counter(mm, member) atomic_long_dec(&(mm)->_##member)
  262 +typedef atomic_long_t mm_counter_t;
276 263  
277 264 #else /* NR_CPUS < CONFIG_SPLIT_PTLOCK_CPUS */
278 265 /*