Commit 715b49ef2de6fcead0776d9349071670282faf65

Authored by Alan Cox
Committed by Linus Torvalds
1 parent 3213e913b0

[PATCH] EDAC: atomic scrub operations

EDAC requires a way to scrub memory if an ECC error is found and the chipset
does not do the work automatically.  That means rewriting memory locations
atomically with respect to all CPUs _and_ bus masters.  That means we can't
use atomic_add(foo, 0) as it gets optimised for non-SMP

This adds a function to include/asm-foo/atomic.h for the platforms currently
supported which implements a scrub of a mapped block.

It also adjusts a few other files include order where atomic.h is included
before types.h as this now causes an error as atomic_scrub uses u32.

Signed-off-by: Alan Cox <alan@redhat.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 7 changed files with 29 additions and 3 deletions Side-by-side Diff

... ... @@ -8,6 +8,7 @@
8 8 * completion notification.
9 9 */
10 10  
  11 +#include <asm/types.h>
11 12 #include <asm/atomic.h>
12 13  
13 14 #include <linux/blkdev.h>
... ... @@ -5,6 +5,7 @@
5 5 *
6 6 */
7 7 #include <linux/config.h>
  8 +#include <linux/types.h>
8 9 #include <linux/file.h>
9 10 #include <linux/fs.h>
10 11 #include <linux/sunrpc/svc.h>
include/asm-i386/atomic.h
... ... @@ -255,6 +255,18 @@
255 255 #define smp_mb__before_atomic_inc() barrier()
256 256 #define smp_mb__after_atomic_inc() barrier()
257 257  
  258 +/* ECC atomic, DMA, SMP and interrupt safe scrub function */
  259 +
  260 +static __inline__ void atomic_scrub(unsigned long *virt_addr, u32 size)
  261 +{
  262 + u32 i;
  263 + for (i = 0; i < size / 4; i++, virt_addr++)
  264 + /* Very carefully read and write to memory atomically
  265 + * so we are interrupt, DMA and SMP safe.
  266 + */
  267 + __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
  268 +}
  269 +
258 270 #include <asm-generic/atomic.h>
259 271 #endif
include/asm-x86_64/atomic.h
... ... @@ -426,6 +426,18 @@
426 426 #define smp_mb__before_atomic_inc() barrier()
427 427 #define smp_mb__after_atomic_inc() barrier()
428 428  
  429 +/* ECC atomic, DMA, SMP and interrupt safe scrub function */
  430 +
  431 +static __inline__ void atomic_scrub(u32 *virt_addr, u32 size)
  432 +{
  433 + u32 i;
  434 + for (i = 0; i < size / 4; i++, virt_addr++)
  435 + /* Very carefully read and write to memory atomically
  436 + * so we are interrupt, DMA and SMP safe.
  437 + */
  438 + __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
  439 +}
  440 +
429 441 #include <asm-generic/atomic.h>
430 442 #endif
... ... @@ -42,8 +42,8 @@
42 42 */
43 43  
44 44 #include <linux/init.h>
45   -#include <asm/atomic.h>
46 45 #include <asm/types.h>
  46 +#include <asm/atomic.h>
47 47 #include <linux/mm.h>
48 48 #include <linux/module.h>
49 49 #include <linux/err.h>
... ... @@ -30,8 +30,8 @@
30 30 */
31 31  
32 32 #include <linux/init.h>
33   -#include <asm/atomic.h>
34 33 #include <asm/types.h>
  34 +#include <asm/atomic.h>
35 35 #include <linux/mm.h>
36 36 #include <linux/module.h>
37 37 #include <linux/mount.h>
... ... @@ -40,12 +40,12 @@
40 40 */
41 41  
42 42 #include <linux/config.h>
  43 +#include <linux/types.h>
43 44 #include <asm/atomic.h>
44 45 #include <asm/byteorder.h>
45 46 #include <asm/current.h>
46 47 #include <asm/uaccess.h>
47 48 #include <asm/ioctls.h>
48   -#include <linux/types.h>
49 49 #include <linux/stddef.h>
50 50 #include <linux/slab.h>
51 51 #include <linux/errno.h>