Commit 7fda20f146d5d217684ffbc37c6b6c5f82c2dffd
1 parent
d93c870bad
Exists in
master
and in
7 other branches
x86: spinlock ops are always-inlined
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Showing 1 changed file with 6 additions and 6 deletions Side-by-side Diff
include/asm-x86/spinlock.h
... | ... | @@ -78,7 +78,7 @@ |
78 | 78 | return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1; |
79 | 79 | } |
80 | 80 | |
81 | -static inline void __raw_spin_lock(raw_spinlock_t *lock) | |
81 | +static __always_inline void __raw_spin_lock(raw_spinlock_t *lock) | |
82 | 82 | { |
83 | 83 | short inc = 0x0100; |
84 | 84 | |
... | ... | @@ -99,7 +99,7 @@ |
99 | 99 | |
100 | 100 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) |
101 | 101 | |
102 | -static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |
102 | +static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock) | |
103 | 103 | { |
104 | 104 | int tmp; |
105 | 105 | short new; |
... | ... | @@ -120,7 +120,7 @@ |
120 | 120 | return tmp; |
121 | 121 | } |
122 | 122 | |
123 | -static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |
123 | +static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock) | |
124 | 124 | { |
125 | 125 | asm volatile(UNLOCK_LOCK_PREFIX "incb %0" |
126 | 126 | : "+m" (lock->slock) |
... | ... | @@ -142,7 +142,7 @@ |
142 | 142 | return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1; |
143 | 143 | } |
144 | 144 | |
145 | -static inline void __raw_spin_lock(raw_spinlock_t *lock) | |
145 | +static __always_inline void __raw_spin_lock(raw_spinlock_t *lock) | |
146 | 146 | { |
147 | 147 | int inc = 0x00010000; |
148 | 148 | int tmp; |
... | ... | @@ -165,7 +165,7 @@ |
165 | 165 | |
166 | 166 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) |
167 | 167 | |
168 | -static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |
168 | +static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock) | |
169 | 169 | { |
170 | 170 | int tmp; |
171 | 171 | int new; |
... | ... | @@ -187,7 +187,7 @@ |
187 | 187 | return tmp; |
188 | 188 | } |
189 | 189 | |
190 | -static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |
190 | +static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock) | |
191 | 191 | { |
192 | 192 | asm volatile(UNLOCK_LOCK_PREFIX "incw %0" |
193 | 193 | : "+m" (lock->slock) |