Blame view

include/linux/spinlock_api_smp.h 5.41 KB
fb1c8f93d   Ingo Molnar   [PATCH] spinlock ...
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
  #ifndef __LINUX_SPINLOCK_API_SMP_H
  #define __LINUX_SPINLOCK_API_SMP_H
  
  #ifndef __LINUX_SPINLOCK_H
  # error "please don't include this file directly"
  #endif
  
  /*
   * include/linux/spinlock_api_smp.h
   *
   * spinlock API declarations on SMP (and debug)
   * (implemented in kernel/spinlock.c)
   *
   * portions Copyright 2005, Red Hat, Inc., Ingo Molnar
   * Released under the General Public License (GPL).
   */
  
  int in_lock_functions(unsigned long addr);
c2f21ce2e   Thomas Gleixner   locking: Implemen...
19
  #define assert_raw_spin_locked(x)	BUG_ON(!raw_spin_is_locked(x))
fb1c8f93d   Ingo Molnar   [PATCH] spinlock ...
20

9c1721aa4   Thomas Gleixner   locking: Cleanup ...
21
22
23
  void __lockfunc _raw_spin_lock(raw_spinlock_t *lock)		__acquires(lock);
  void __lockfunc _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass)
  								__acquires(lock);
c2f21ce2e   Thomas Gleixner   locking: Implemen...
24
  void __lockfunc
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
25
26
27
28
29
30
31
32
  _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *map)
  								__acquires(lock);
  void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock)		__acquires(lock);
  void __lockfunc _raw_spin_lock_irq(raw_spinlock_t *lock)
  								__acquires(lock);
  
  unsigned long __lockfunc _raw_spin_lock_irqsave(raw_spinlock_t *lock)
  								__acquires(lock);
c2f21ce2e   Thomas Gleixner   locking: Implemen...
33
  unsigned long __lockfunc
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
34
35
36
37
38
39
40
  _raw_spin_lock_irqsave_nested(raw_spinlock_t *lock, int subclass)
  								__acquires(lock);
  int __lockfunc _raw_spin_trylock(raw_spinlock_t *lock);
  int __lockfunc _raw_spin_trylock_bh(raw_spinlock_t *lock);
  void __lockfunc _raw_spin_unlock(raw_spinlock_t *lock)		__releases(lock);
  void __lockfunc _raw_spin_unlock_bh(raw_spinlock_t *lock)	__releases(lock);
  void __lockfunc _raw_spin_unlock_irq(raw_spinlock_t *lock)	__releases(lock);
c2f21ce2e   Thomas Gleixner   locking: Implemen...
41
  void __lockfunc
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
42
43
  _raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags)
  								__releases(lock);
fb1c8f93d   Ingo Molnar   [PATCH] spinlock ...
44

6beb00092   Thomas Gleixner   locking: Make inl...
45
  #ifdef CONFIG_INLINE_SPIN_LOCK
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
46
  #define _raw_spin_lock(lock) __raw_spin_lock(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
47
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
48
  #ifdef CONFIG_INLINE_SPIN_LOCK_BH
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
49
  #define _raw_spin_lock_bh(lock) __raw_spin_lock_bh(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
50
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
51
  #ifdef CONFIG_INLINE_SPIN_LOCK_IRQ
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
52
  #define _raw_spin_lock_irq(lock) __raw_spin_lock_irq(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
53
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
54
  #ifdef CONFIG_INLINE_SPIN_LOCK_IRQSAVE
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
55
  #define _raw_spin_lock_irqsave(lock) __raw_spin_lock_irqsave(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
56
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
57
  #ifdef CONFIG_INLINE_SPIN_TRYLOCK
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
58
  #define _raw_spin_trylock(lock) __raw_spin_trylock(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
59
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
60
  #ifdef CONFIG_INLINE_SPIN_TRYLOCK_BH
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
61
  #define _raw_spin_trylock_bh(lock) __raw_spin_trylock_bh(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
62
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
63
  #ifdef CONFIG_INLINE_SPIN_UNLOCK
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
64
  #define _raw_spin_unlock(lock) __raw_spin_unlock(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
65
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
66
  #ifdef CONFIG_INLINE_SPIN_UNLOCK_BH
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
67
  #define _raw_spin_unlock_bh(lock) __raw_spin_unlock_bh(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
68
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
69
  #ifdef CONFIG_INLINE_SPIN_UNLOCK_IRQ
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
70
  #define _raw_spin_unlock_irq(lock) __raw_spin_unlock_irq(lock)
892a7c67c   Heiko Carstens   locking: Allow ar...
71
  #endif
6beb00092   Thomas Gleixner   locking: Make inl...
72
  #ifdef CONFIG_INLINE_SPIN_UNLOCK_IRQRESTORE
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
73
  #define _raw_spin_unlock_irqrestore(lock, flags) __raw_spin_unlock_irqrestore(lock, flags)
892a7c67c   Heiko Carstens   locking: Allow ar...
74
  #endif
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
75
  static inline int __raw_spin_trylock(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
76
77
  {
  	preempt_disable();
9828ea9d7   Thomas Gleixner   locking: Further ...
78
  	if (do_raw_spin_trylock(lock)) {
69d0ee737   Heiko Carstens   locking: Move spi...
79
80
81
82
83
84
  		spin_acquire(&lock->dep_map, 0, 1, _RET_IP_);
  		return 1;
  	}
  	preempt_enable();
  	return 0;
  }
69d0ee737   Heiko Carstens   locking: Move spi...
85
86
87
88
89
90
  /*
   * If lockdep is enabled then we use the non-preemption spin-ops
   * even on CONFIG_PREEMPT, because lockdep assumes that interrupts are
   * not re-enabled during lock-acquire (which the preempt-spin-ops do):
   */
  #if !defined(CONFIG_GENERIC_LOCKBREAK) || defined(CONFIG_DEBUG_LOCK_ALLOC)
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
91
  static inline unsigned long __raw_spin_lock_irqsave(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
92
93
94
95
96
97
98
99
  {
  	unsigned long flags;
  
  	local_irq_save(flags);
  	preempt_disable();
  	spin_acquire(&lock->dep_map, 0, 0, _RET_IP_);
  	/*
  	 * On lockdep we dont want the hand-coded irq-enable of
9828ea9d7   Thomas Gleixner   locking: Further ...
100
  	 * do_raw_spin_lock_flags() code, because lockdep assumes
69d0ee737   Heiko Carstens   locking: Move spi...
101
102
103
  	 * that interrupts are not re-enabled during lock-acquire:
  	 */
  #ifdef CONFIG_LOCKDEP
9828ea9d7   Thomas Gleixner   locking: Further ...
104
  	LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock);
69d0ee737   Heiko Carstens   locking: Move spi...
105
  #else
9828ea9d7   Thomas Gleixner   locking: Further ...
106
  	do_raw_spin_lock_flags(lock, &flags);
69d0ee737   Heiko Carstens   locking: Move spi...
107
108
109
  #endif
  	return flags;
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
110
  static inline void __raw_spin_lock_irq(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
111
112
113
114
  {
  	local_irq_disable();
  	preempt_disable();
  	spin_acquire(&lock->dep_map, 0, 0, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
115
  	LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock);
69d0ee737   Heiko Carstens   locking: Move spi...
116
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
117
  static inline void __raw_spin_lock_bh(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
118
119
120
121
  {
  	local_bh_disable();
  	preempt_disable();
  	spin_acquire(&lock->dep_map, 0, 0, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
122
  	LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock);
69d0ee737   Heiko Carstens   locking: Move spi...
123
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
124
  static inline void __raw_spin_lock(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
125
126
127
  {
  	preempt_disable();
  	spin_acquire(&lock->dep_map, 0, 0, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
128
  	LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock);
69d0ee737   Heiko Carstens   locking: Move spi...
129
  }
69d0ee737   Heiko Carstens   locking: Move spi...
130
  #endif /* CONFIG_PREEMPT */
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
131
  static inline void __raw_spin_unlock(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
132
133
  {
  	spin_release(&lock->dep_map, 1, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
134
  	do_raw_spin_unlock(lock);
69d0ee737   Heiko Carstens   locking: Move spi...
135
136
  	preempt_enable();
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
137
  static inline void __raw_spin_unlock_irqrestore(raw_spinlock_t *lock,
69d0ee737   Heiko Carstens   locking: Move spi...
138
139
140
  					    unsigned long flags)
  {
  	spin_release(&lock->dep_map, 1, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
141
  	do_raw_spin_unlock(lock);
69d0ee737   Heiko Carstens   locking: Move spi...
142
143
144
  	local_irq_restore(flags);
  	preempt_enable();
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
145
  static inline void __raw_spin_unlock_irq(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
146
147
  {
  	spin_release(&lock->dep_map, 1, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
148
  	do_raw_spin_unlock(lock);
69d0ee737   Heiko Carstens   locking: Move spi...
149
150
151
  	local_irq_enable();
  	preempt_enable();
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
152
  static inline void __raw_spin_unlock_bh(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
153
154
  {
  	spin_release(&lock->dep_map, 1, _RET_IP_);
9828ea9d7   Thomas Gleixner   locking: Further ...
155
  	do_raw_spin_unlock(lock);
69d0ee737   Heiko Carstens   locking: Move spi...
156
157
158
  	preempt_enable_no_resched();
  	local_bh_enable_ip((unsigned long)__builtin_return_address(0));
  }
9c1721aa4   Thomas Gleixner   locking: Cleanup ...
159
  static inline int __raw_spin_trylock_bh(raw_spinlock_t *lock)
69d0ee737   Heiko Carstens   locking: Move spi...
160
161
162
  {
  	local_bh_disable();
  	preempt_disable();
9828ea9d7   Thomas Gleixner   locking: Further ...
163
  	if (do_raw_spin_trylock(lock)) {
69d0ee737   Heiko Carstens   locking: Move spi...
164
165
166
167
168
169
170
  		spin_acquire(&lock->dep_map, 0, 1, _RET_IP_);
  		return 1;
  	}
  	preempt_enable_no_resched();
  	local_bh_enable_ip((unsigned long)__builtin_return_address(0));
  	return 0;
  }
6b6b4792f   Thomas Gleixner   locking: Separate...
171
  #include <linux/rwlock_api_smp.h>
fb1c8f93d   Ingo Molnar   [PATCH] spinlock ...
172
  #endif /* __LINUX_SPINLOCK_API_SMP_H */