Blame view

lib/atomic64.c 4.46 KB
2874c5fd2   Thomas Gleixner   treewide: Replace...
1
  // SPDX-License-Identifier: GPL-2.0-or-later
09d4e0edd   Paul Mackerras   lib: Provide gene...
2
3
4
5
6
  /*
   * Generic implementation of 64-bit atomics using spinlocks,
   * useful on processors that don't have 64-bit atomic instructions.
   *
   * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
09d4e0edd   Paul Mackerras   lib: Provide gene...
7
8
9
10
11
   */
  #include <linux/types.h>
  #include <linux/cache.h>
  #include <linux/spinlock.h>
  #include <linux/init.h>
8bc3bcc93   Paul Gortmaker   lib: reduce the u...
12
  #include <linux/export.h>
60063497a   Arun Sharma   atomic: use <linu...
13
  #include <linux/atomic.h>
09d4e0edd   Paul Mackerras   lib: Provide gene...
14
15
16
17
18
19
20
21
22
23
24
25
26
27
  
  /*
   * We use a hashed array of spinlocks to provide exclusive access
   * to each atomic64_t variable.  Since this is expected to used on
   * systems with small numbers of CPUs (<= 4 or so), we use a
   * relatively small array of 16 spinlocks to avoid wasting too much
   * memory on the spinlock array.
   */
  #define NR_LOCKS	16
  
  /*
   * Ensure each lock is in a separate cacheline.
   */
  static union {
f59ca0587   Shan Hai   locking, lib/atom...
28
  	raw_spinlock_t lock;
09d4e0edd   Paul Mackerras   lib: Provide gene...
29
  	char pad[L1_CACHE_BYTES];
fcc16882a   Stephen Boyd   lib: atomic64: In...
30
31
32
33
34
  } atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = {
  	[0 ... (NR_LOCKS - 1)] = {
  		.lock =  __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock),
  	},
  };
09d4e0edd   Paul Mackerras   lib: Provide gene...
35

cb475de3d   Yong Zhang   lib: atomic64: Ch...
36
  static inline raw_spinlock_t *lock_addr(const atomic64_t *v)
09d4e0edd   Paul Mackerras   lib: Provide gene...
37
38
39
40
41
42
43
  {
  	unsigned long addr = (unsigned long) v;
  
  	addr >>= L1_CACHE_SHIFT;
  	addr ^= (addr >> 8) ^ (addr >> 16);
  	return &atomic64_lock[addr & (NR_LOCKS - 1)].lock;
  }
9255813d5   Mark Rutland   locking/atomic: U...
44
  s64 atomic64_read(const atomic64_t *v)
09d4e0edd   Paul Mackerras   lib: Provide gene...
45
46
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
47
  	raw_spinlock_t *lock = lock_addr(v);
9255813d5   Mark Rutland   locking/atomic: U...
48
  	s64 val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
49

f59ca0587   Shan Hai   locking, lib/atom...
50
  	raw_spin_lock_irqsave(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
51
  	val = v->counter;
f59ca0587   Shan Hai   locking, lib/atom...
52
  	raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
53
54
  	return val;
  }
3fc7b4b22   Roland Dreier   lib: export gener...
55
  EXPORT_SYMBOL(atomic64_read);
09d4e0edd   Paul Mackerras   lib: Provide gene...
56

9255813d5   Mark Rutland   locking/atomic: U...
57
  void atomic64_set(atomic64_t *v, s64 i)
09d4e0edd   Paul Mackerras   lib: Provide gene...
58
59
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
60
  	raw_spinlock_t *lock = lock_addr(v);
09d4e0edd   Paul Mackerras   lib: Provide gene...
61

f59ca0587   Shan Hai   locking, lib/atom...
62
  	raw_spin_lock_irqsave(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
63
  	v->counter = i;
f59ca0587   Shan Hai   locking, lib/atom...
64
  	raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
65
  }
3fc7b4b22   Roland Dreier   lib: export gener...
66
  EXPORT_SYMBOL(atomic64_set);
09d4e0edd   Paul Mackerras   lib: Provide gene...
67

560cb12a4   Peter Zijlstra   locking,arch: Rew...
68
  #define ATOMIC64_OP(op, c_op)						\
9255813d5   Mark Rutland   locking/atomic: U...
69
  void atomic64_##op(s64 a, atomic64_t *v)				\
560cb12a4   Peter Zijlstra   locking,arch: Rew...
70
71
72
73
74
75
76
77
78
79
80
  {									\
  	unsigned long flags;						\
  	raw_spinlock_t *lock = lock_addr(v);				\
  									\
  	raw_spin_lock_irqsave(lock, flags);				\
  	v->counter c_op a;						\
  	raw_spin_unlock_irqrestore(lock, flags);			\
  }									\
  EXPORT_SYMBOL(atomic64_##op);
  
  #define ATOMIC64_OP_RETURN(op, c_op)					\
9255813d5   Mark Rutland   locking/atomic: U...
81
  s64 atomic64_##op##_return(s64 a, atomic64_t *v)			\
560cb12a4   Peter Zijlstra   locking,arch: Rew...
82
83
84
  {									\
  	unsigned long flags;						\
  	raw_spinlock_t *lock = lock_addr(v);				\
9255813d5   Mark Rutland   locking/atomic: U...
85
  	s64 val;							\
560cb12a4   Peter Zijlstra   locking,arch: Rew...
86
87
88
89
90
91
92
  									\
  	raw_spin_lock_irqsave(lock, flags);				\
  	val = (v->counter c_op a);					\
  	raw_spin_unlock_irqrestore(lock, flags);			\
  	return val;							\
  }									\
  EXPORT_SYMBOL(atomic64_##op##_return);
28aa2bda2   Peter Zijlstra   locking/atomic: I...
93
  #define ATOMIC64_FETCH_OP(op, c_op)					\
9255813d5   Mark Rutland   locking/atomic: U...
94
  s64 atomic64_fetch_##op(s64 a, atomic64_t *v)				\
28aa2bda2   Peter Zijlstra   locking/atomic: I...
95
96
97
  {									\
  	unsigned long flags;						\
  	raw_spinlock_t *lock = lock_addr(v);				\
9255813d5   Mark Rutland   locking/atomic: U...
98
  	s64 val;							\
28aa2bda2   Peter Zijlstra   locking/atomic: I...
99
100
101
102
103
104
105
106
  									\
  	raw_spin_lock_irqsave(lock, flags);				\
  	val = v->counter;						\
  	v->counter c_op a;						\
  	raw_spin_unlock_irqrestore(lock, flags);			\
  	return val;							\
  }									\
  EXPORT_SYMBOL(atomic64_fetch_##op);
560cb12a4   Peter Zijlstra   locking,arch: Rew...
107
108
  #define ATOMIC64_OPS(op, c_op)						\
  	ATOMIC64_OP(op, c_op)						\
28aa2bda2   Peter Zijlstra   locking/atomic: I...
109
110
  	ATOMIC64_OP_RETURN(op, c_op)					\
  	ATOMIC64_FETCH_OP(op, c_op)
560cb12a4   Peter Zijlstra   locking,arch: Rew...
111
112
113
114
115
  
  ATOMIC64_OPS(add, +=)
  ATOMIC64_OPS(sub, -=)
  
  #undef ATOMIC64_OPS
28aa2bda2   Peter Zijlstra   locking/atomic: I...
116
117
118
119
120
121
122
123
124
125
126
  #define ATOMIC64_OPS(op, c_op)						\
  	ATOMIC64_OP(op, c_op)						\
  	ATOMIC64_OP_RETURN(op, c_op)					\
  	ATOMIC64_FETCH_OP(op, c_op)
  
  ATOMIC64_OPS(and, &=)
  ATOMIC64_OPS(or, |=)
  ATOMIC64_OPS(xor, ^=)
  
  #undef ATOMIC64_OPS
  #undef ATOMIC64_FETCH_OP
560cb12a4   Peter Zijlstra   locking,arch: Rew...
127
128
  #undef ATOMIC64_OP_RETURN
  #undef ATOMIC64_OP
09d4e0edd   Paul Mackerras   lib: Provide gene...
129

9255813d5   Mark Rutland   locking/atomic: U...
130
  s64 atomic64_dec_if_positive(atomic64_t *v)
09d4e0edd   Paul Mackerras   lib: Provide gene...
131
132
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
133
  	raw_spinlock_t *lock = lock_addr(v);
9255813d5   Mark Rutland   locking/atomic: U...
134
  	s64 val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
135

f59ca0587   Shan Hai   locking, lib/atom...
136
  	raw_spin_lock_irqsave(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
137
138
139
  	val = v->counter - 1;
  	if (val >= 0)
  		v->counter = val;
f59ca0587   Shan Hai   locking, lib/atom...
140
  	raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
141
142
  	return val;
  }
3fc7b4b22   Roland Dreier   lib: export gener...
143
  EXPORT_SYMBOL(atomic64_dec_if_positive);
09d4e0edd   Paul Mackerras   lib: Provide gene...
144

9255813d5   Mark Rutland   locking/atomic: U...
145
  s64 atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
09d4e0edd   Paul Mackerras   lib: Provide gene...
146
147
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
148
  	raw_spinlock_t *lock = lock_addr(v);
9255813d5   Mark Rutland   locking/atomic: U...
149
  	s64 val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
150

f59ca0587   Shan Hai   locking, lib/atom...
151
  	raw_spin_lock_irqsave(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
152
153
154
  	val = v->counter;
  	if (val == o)
  		v->counter = n;
f59ca0587   Shan Hai   locking, lib/atom...
155
  	raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
156
157
  	return val;
  }
3fc7b4b22   Roland Dreier   lib: export gener...
158
  EXPORT_SYMBOL(atomic64_cmpxchg);
09d4e0edd   Paul Mackerras   lib: Provide gene...
159

9255813d5   Mark Rutland   locking/atomic: U...
160
  s64 atomic64_xchg(atomic64_t *v, s64 new)
09d4e0edd   Paul Mackerras   lib: Provide gene...
161
162
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
163
  	raw_spinlock_t *lock = lock_addr(v);
9255813d5   Mark Rutland   locking/atomic: U...
164
  	s64 val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
165

f59ca0587   Shan Hai   locking, lib/atom...
166
  	raw_spin_lock_irqsave(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
167
168
  	val = v->counter;
  	v->counter = new;
f59ca0587   Shan Hai   locking, lib/atom...
169
  	raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd   Paul Mackerras   lib: Provide gene...
170
171
  	return val;
  }
3fc7b4b22   Roland Dreier   lib: export gener...
172
  EXPORT_SYMBOL(atomic64_xchg);
09d4e0edd   Paul Mackerras   lib: Provide gene...
173

9255813d5   Mark Rutland   locking/atomic: U...
174
  s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
09d4e0edd   Paul Mackerras   lib: Provide gene...
175
176
  {
  	unsigned long flags;
cb475de3d   Yong Zhang   lib: atomic64: Ch...
177
  	raw_spinlock_t *lock = lock_addr(v);
9255813d5   Mark Rutland   locking/atomic: U...
178
  	s64 val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
179

f59ca0587   Shan Hai   locking, lib/atom...
180
  	raw_spin_lock_irqsave(lock, flags);
00b808ab7   Mark Rutland   atomics/generic: ...
181
182
  	val = v->counter;
  	if (val != u)
09d4e0edd   Paul Mackerras   lib: Provide gene...
183
  		v->counter += a;
f59ca0587   Shan Hai   locking, lib/atom...
184
  	raw_spin_unlock_irqrestore(lock, flags);
00b808ab7   Mark Rutland   atomics/generic: ...
185
186
  
  	return val;
09d4e0edd   Paul Mackerras   lib: Provide gene...
187
  }
00b808ab7   Mark Rutland   atomics/generic: ...
188
  EXPORT_SYMBOL(atomic64_fetch_add_unless);