Commit ef6edc9746dc2bfdacf44eefd5f881179971c478

Authored by Martin Schwidefsky
Committed by Linus Torvalds
1 parent e3e5fc91d9

[PATCH] Directed yield: cpu_relax variants for spinlocks and rw-locks

On systems running with virtual cpus there is optimization potential in
regard to spinlocks and rw-locks.  If the virtual cpu that has taken a lock
is known to a cpu that wants to acquire the same lock it is beneficial to
yield the timeslice of the virtual cpu in favour of the cpu that has the
lock (directed yield).

With CONFIG_PREEMPT="n" this can be implemented by the architecture without
common code changes.  Powerpc already does this.

With CONFIG_PREEMPT="y" the lock loops are coded with _raw_spin_trylock,
_raw_read_trylock and _raw_write_trylock in kernel/spinlock.c.  If the lock
could not be taken cpu_relax is called.  A directed yield is not possible
because cpu_relax doesn't know anything about the lock.  To be able to
yield the lock in favour of the current lock holder variants of cpu_relax
for spinlocks and rw-locks are needed.  The new _raw_spin_relax,
_raw_read_relax and _raw_write_relax primitives differ from cpu_relax
insofar that they have an argument: a pointer to the lock structure.

Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Cc: Ingo Molnar <mingo@elte.hu>
Cc: Paul Mackerras <paulus@samba.org>
Cc: Haavard Skinnemoen <hskinnemoen@atmel.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 16 changed files with 62 additions and 2 deletions Side-by-side Diff

include/asm-alpha/spinlock.h
... ... @@ -166,5 +166,9 @@
166 166 lock->lock = 0;
167 167 }
168 168  
  169 +#define _raw_spin_relax(lock) cpu_relax()
  170 +#define _raw_read_relax(lock) cpu_relax()
  171 +#define _raw_write_relax(lock) cpu_relax()
  172 +
169 173 #endif /* _ALPHA_SPINLOCK_H */
include/asm-arm/spinlock.h
... ... @@ -218,5 +218,9 @@
218 218 /* read_can_lock - would read_trylock() succeed? */
219 219 #define __raw_read_can_lock(x) ((x)->lock < 0x80000000)
220 220  
  221 +#define _raw_spin_relax(lock) cpu_relax()
  222 +#define _raw_read_relax(lock) cpu_relax()
  223 +#define _raw_write_relax(lock) cpu_relax()
  224 +
221 225 #endif /* __ASM_SPINLOCK_H */
include/asm-cris/arch-v32/spinlock.h
... ... @@ -160,5 +160,9 @@
160 160 return rw->counter < 0;
161 161 }
162 162  
  163 +#define _raw_spin_relax(lock) cpu_relax()
  164 +#define _raw_read_relax(lock) cpu_relax()
  165 +#define _raw_write_relax(lock) cpu_relax()
  166 +
163 167 #endif /* __ASM_ARCH_SPINLOCK_H */
include/asm-i386/spinlock.h
... ... @@ -205,5 +205,9 @@
205 205 : "+m" (rw->lock) : : "memory");
206 206 }
207 207  
  208 +#define _raw_spin_relax(lock) cpu_relax()
  209 +#define _raw_read_relax(lock) cpu_relax()
  210 +#define _raw_write_relax(lock) cpu_relax()
  211 +
208 212 #endif /* __ASM_SPINLOCK_H */
include/asm-ia64/spinlock.h
... ... @@ -213,5 +213,9 @@
213 213 return (u32)ia64_cmpxchg4_acq((__u32 *)(x), new.word, old.word) == old.word;
214 214 }
215 215  
  216 +#define _raw_spin_relax(lock) cpu_relax()
  217 +#define _raw_read_relax(lock) cpu_relax()
  218 +#define _raw_write_relax(lock) cpu_relax()
  219 +
216 220 #endif /* _ASM_IA64_SPINLOCK_H */
include/asm-m32r/spinlock.h
... ... @@ -316,5 +316,9 @@
316 316 return 0;
317 317 }
318 318  
  319 +#define _raw_spin_relax(lock) cpu_relax()
  320 +#define _raw_read_relax(lock) cpu_relax()
  321 +#define _raw_write_relax(lock) cpu_relax()
  322 +
319 323 #endif /* _ASM_M32R_SPINLOCK_H */
include/asm-mips/spinlock.h
... ... @@ -328,5 +328,9 @@
328 328 }
329 329  
330 330  
  331 +#define _raw_spin_relax(lock) cpu_relax()
  332 +#define _raw_read_relax(lock) cpu_relax()
  333 +#define _raw_write_relax(lock) cpu_relax()
  334 +
331 335 #endif /* _ASM_SPINLOCK_H */
include/asm-parisc/spinlock.h
... ... @@ -152,5 +152,9 @@
152 152 return !rw->counter;
153 153 }
154 154  
  155 +#define _raw_spin_relax(lock) cpu_relax()
  156 +#define _raw_read_relax(lock) cpu_relax()
  157 +#define _raw_write_relax(lock) cpu_relax()
  158 +
155 159 #endif /* __ASM_SPINLOCK_H */
include/asm-powerpc/spinlock.h
... ... @@ -285,6 +285,10 @@
285 285 rw->lock = 0;
286 286 }
287 287  
  288 +#define _raw_spin_relax(lock) cpu_relax()
  289 +#define _raw_read_relax(lock) cpu_relax()
  290 +#define _raw_write_relax(lock) cpu_relax()
  291 +
288 292 #endif /* __KERNEL__ */
289 293 #endif /* __ASM_SPINLOCK_H */
include/asm-ppc/spinlock.h
... ... @@ -161,5 +161,9 @@
161 161 rw->lock = 0;
162 162 }
163 163  
  164 +#define _raw_spin_relax(lock) cpu_relax()
  165 +#define _raw_read_relax(lock) cpu_relax()
  166 +#define _raw_write_relax(lock) cpu_relax()
  167 +
164 168 #endif /* __ASM_SPINLOCK_H */
include/asm-s390/spinlock.h
... ... @@ -154,5 +154,9 @@
154 154 return _raw_write_trylock_retry(rw);
155 155 }
156 156  
  157 +#define _raw_spin_relax(lock) cpu_relax()
  158 +#define _raw_read_relax(lock) cpu_relax()
  159 +#define _raw_write_relax(lock) cpu_relax()
  160 +
157 161 #endif /* __ASM_SPINLOCK_H */
include/asm-sh/spinlock.h
... ... @@ -100,5 +100,9 @@
100 100 return 0;
101 101 }
102 102  
  103 +#define _raw_spin_relax(lock) cpu_relax()
  104 +#define _raw_read_relax(lock) cpu_relax()
  105 +#define _raw_write_relax(lock) cpu_relax()
  106 +
103 107 #endif /* __ASM_SH_SPINLOCK_H */
include/asm-sparc/spinlock.h
... ... @@ -154,6 +154,10 @@
154 154 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
155 155 #define __raw_read_trylock(lock) generic__raw_read_trylock(lock)
156 156  
  157 +#define _raw_spin_relax(lock) cpu_relax()
  158 +#define _raw_read_relax(lock) cpu_relax()
  159 +#define _raw_write_relax(lock) cpu_relax()
  160 +
157 161 #define __raw_read_can_lock(rw) (!((rw)->lock & 0xff))
158 162 #define __raw_write_can_lock(rw) (!(rw)->lock)
159 163  
include/asm-sparc64/spinlock.h
... ... @@ -241,6 +241,10 @@
241 241 #define __raw_read_can_lock(rw) (!((rw)->lock & 0x80000000UL))
242 242 #define __raw_write_can_lock(rw) (!(rw)->lock)
243 243  
  244 +#define _raw_spin_relax(lock) cpu_relax()
  245 +#define _raw_read_relax(lock) cpu_relax()
  246 +#define _raw_write_relax(lock) cpu_relax()
  247 +
244 248 #endif /* !(__ASSEMBLY__) */
245 249  
246 250 #endif /* !(__SPARC64_SPINLOCK_H) */
include/asm-x86_64/spinlock.h
... ... @@ -133,5 +133,9 @@
133 133 : "=m" (rw->lock) : : "memory");
134 134 }
135 135  
  136 +#define _raw_spin_relax(lock) cpu_relax()
  137 +#define _raw_read_relax(lock) cpu_relax()
  138 +#define _raw_write_relax(lock) cpu_relax()
  139 +
136 140 #endif /* __ASM_SPINLOCK_H */
... ... @@ -215,7 +215,7 @@
215 215 if (!(lock)->break_lock) \
216 216 (lock)->break_lock = 1; \
217 217 while (!op##_can_lock(lock) && (lock)->break_lock) \
218   - cpu_relax(); \
  218 + _raw_##op##_relax(&lock->raw_lock); \
219 219 } \
220 220 (lock)->break_lock = 0; \
221 221 } \
... ... @@ -237,7 +237,7 @@
237 237 if (!(lock)->break_lock) \
238 238 (lock)->break_lock = 1; \
239 239 while (!op##_can_lock(lock) && (lock)->break_lock) \
240   - cpu_relax(); \
  240 + _raw_##op##_relax(&lock->raw_lock); \
241 241 } \
242 242 (lock)->break_lock = 0; \
243 243 return flags; \