Commit 841be8ddf92578e5b481ed9f9abb85649fc13238

Authored by Andi Kleen
Committed by Linus Torvalds
1 parent ceee882230

[PATCH] x86_64: Remove alternative_smp

The .fill causes miscompilations with some binutils version.

Instead just patch the lock prefix in the lock constructs. That is the
majority of the cost and should be good enough.

Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 2 changed files with 4 additions and 27 deletions Side-by-side Diff

include/asm-x86_64/alternative.h
... ... @@ -103,9 +103,6 @@
103 103 /*
104 104 * Alternative inline assembly for SMP.
105 105 *
106   - * alternative_smp() takes two versions (SMP first, UP second) and is
107   - * for more complex stuff such as spinlocks.
108   - *
109 106 * The LOCK_PREFIX macro defined here replaces the LOCK and
110 107 * LOCK_PREFIX macros used everywhere in the source tree.
111 108 *
... ... @@ -125,21 +122,6 @@
125 122 */
126 123  
127 124 #ifdef CONFIG_SMP
128   -#define alternative_smp(smpinstr, upinstr, args...) \
129   - asm volatile ("661:\n\t" smpinstr "\n662:\n" \
130   - ".section .smp_altinstructions,\"a\"\n" \
131   - " .align 8\n" \
132   - " .quad 661b\n" /* label */ \
133   - " .quad 663f\n" /* new instruction */ \
134   - " .byte " __stringify(X86_FEATURE_UP) "\n" \
135   - " .byte 662b-661b\n" /* sourcelen */ \
136   - " .byte 664f-663f\n" /* replacementlen */ \
137   - ".previous\n" \
138   - ".section .smp_altinstr_replacement,\"awx\"\n" \
139   - "663:\n\t" upinstr "\n" /* replacement */ \
140   - "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
141   - ".previous" : args)
142   -
143 125 #define LOCK_PREFIX \
144 126 ".section .smp_locks,\"a\"\n" \
145 127 " .align 8\n" \
... ... @@ -148,8 +130,6 @@
148 130 "661:\n\tlock; "
149 131  
150 132 #else /* ! CONFIG_SMP */
151   -#define alternative_smp(smpinstr, upinstr, args...) \
152   - asm volatile (upinstr : args)
153 133 #define LOCK_PREFIX ""
154 134 #endif
155 135  
include/asm-x86_64/spinlock.h
... ... @@ -21,7 +21,7 @@
21 21  
22 22 #define __raw_spin_lock_string \
23 23 "\n1:\t" \
24   - "lock ; decl %0\n\t" \
  24 + LOCK_PREFIX " ; decl %0\n\t" \
25 25 "js 2f\n" \
26 26 LOCK_SECTION_START("") \
27 27 "2:\t" \
... ... @@ -40,10 +40,7 @@
40 40  
41 41 static inline void __raw_spin_lock(raw_spinlock_t *lock)
42 42 {
43   - alternative_smp(
44   - __raw_spin_lock_string,
45   - __raw_spin_lock_string_up,
46   - "=m" (lock->slock) : : "memory");
  43 + asm volatile(__raw_spin_lock_string : "=m" (lock->slock) : : "memory");
47 44 }
48 45  
49 46 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
50 47  
... ... @@ -125,12 +122,12 @@
125 122  
126 123 static inline void __raw_read_unlock(raw_rwlock_t *rw)
127 124 {
128   - asm volatile("lock ; incl %0" :"=m" (rw->lock) : : "memory");
  125 + asm volatile(LOCK_PREFIX " ; incl %0" :"=m" (rw->lock) : : "memory");
129 126 }
130 127  
131 128 static inline void __raw_write_unlock(raw_rwlock_t *rw)
132 129 {
133   - asm volatile("lock ; addl $" RW_LOCK_BIAS_STR ",%0"
  130 + asm volatile(LOCK_PREFIX " ; addl $" RW_LOCK_BIAS_STR ",%0"
134 131 : "=m" (rw->lock) : : "memory");
135 132 }
136 133