Blame view
lib/atomic64.c
4.6 KB
2874c5fd2 treewide: Replace... |
1 |
// SPDX-License-Identifier: GPL-2.0-or-later |
09d4e0edd lib: Provide gene... |
2 3 4 5 6 |
/* * Generic implementation of 64-bit atomics using spinlocks, * useful on processors that don't have 64-bit atomic instructions. * * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> |
09d4e0edd lib: Provide gene... |
7 8 9 10 11 |
*/ #include <linux/types.h> #include <linux/cache.h> #include <linux/spinlock.h> #include <linux/init.h> |
8bc3bcc93 lib: reduce the u... |
12 |
#include <linux/export.h> |
60063497a atomic: use <linu... |
13 |
#include <linux/atomic.h> |
09d4e0edd lib: Provide gene... |
14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
/* * We use a hashed array of spinlocks to provide exclusive access * to each atomic64_t variable. Since this is expected to used on * systems with small numbers of CPUs (<= 4 or so), we use a * relatively small array of 16 spinlocks to avoid wasting too much * memory on the spinlock array. */ #define NR_LOCKS 16 /* * Ensure each lock is in a separate cacheline. */ static union { |
f59ca0587 locking, lib/atom... |
28 |
raw_spinlock_t lock; |
09d4e0edd lib: Provide gene... |
29 |
char pad[L1_CACHE_BYTES]; |
fcc16882a lib: atomic64: In... |
30 31 32 33 34 |
} atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = { [0 ... (NR_LOCKS - 1)] = { .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock), }, }; |
09d4e0edd lib: Provide gene... |
35 |
|
cb475de3d lib: atomic64: Ch... |
36 |
static inline raw_spinlock_t *lock_addr(const atomic64_t *v) |
09d4e0edd lib: Provide gene... |
37 38 39 40 41 42 43 |
{ unsigned long addr = (unsigned long) v; addr >>= L1_CACHE_SHIFT; addr ^= (addr >> 8) ^ (addr >> 16); return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; } |
1bdadf46e locking/atomic: a... |
44 |
s64 generic_atomic64_read(const atomic64_t *v) |
09d4e0edd lib: Provide gene... |
45 46 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
47 |
raw_spinlock_t *lock = lock_addr(v); |
9255813d5 locking/atomic: U... |
48 |
s64 val; |
09d4e0edd lib: Provide gene... |
49 |
|
f59ca0587 locking, lib/atom... |
50 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
51 |
val = v->counter; |
f59ca0587 locking, lib/atom... |
52 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
53 54 |
return val; } |
1bdadf46e locking/atomic: a... |
55 |
EXPORT_SYMBOL(generic_atomic64_read); |
09d4e0edd lib: Provide gene... |
56 |
|
1bdadf46e locking/atomic: a... |
57 |
void generic_atomic64_set(atomic64_t *v, s64 i) |
09d4e0edd lib: Provide gene... |
58 59 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
60 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
61 |
|
f59ca0587 locking, lib/atom... |
62 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
63 |
v->counter = i; |
f59ca0587 locking, lib/atom... |
64 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
65 |
} |
1bdadf46e locking/atomic: a... |
66 |
EXPORT_SYMBOL(generic_atomic64_set); |
09d4e0edd lib: Provide gene... |
67 |
|
560cb12a4 locking,arch: Rew... |
68 |
#define ATOMIC64_OP(op, c_op) \ |
1bdadf46e locking/atomic: a... |
69 |
void generic_atomic64_##op(s64 a, atomic64_t *v) \ |
560cb12a4 locking,arch: Rew... |
70 71 72 73 74 75 76 77 |
{ \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ \ raw_spin_lock_irqsave(lock, flags); \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ } \ |
1bdadf46e locking/atomic: a... |
78 |
EXPORT_SYMBOL(generic_atomic64_##op); |
560cb12a4 locking,arch: Rew... |
79 80 |
#define ATOMIC64_OP_RETURN(op, c_op) \ |
1bdadf46e locking/atomic: a... |
81 |
s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v) \ |
560cb12a4 locking,arch: Rew... |
82 83 84 |
{ \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ |
9255813d5 locking/atomic: U... |
85 |
s64 val; \ |
560cb12a4 locking,arch: Rew... |
86 87 88 89 90 91 |
\ raw_spin_lock_irqsave(lock, flags); \ val = (v->counter c_op a); \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ |
1bdadf46e locking/atomic: a... |
92 |
EXPORT_SYMBOL(generic_atomic64_##op##_return); |
560cb12a4 locking,arch: Rew... |
93 |
|
28aa2bda2 locking/atomic: I... |
94 |
#define ATOMIC64_FETCH_OP(op, c_op) \ |
1bdadf46e locking/atomic: a... |
95 |
s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v) \ |
28aa2bda2 locking/atomic: I... |
96 97 98 |
{ \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ |
9255813d5 locking/atomic: U... |
99 |
s64 val; \ |
28aa2bda2 locking/atomic: I... |
100 101 102 103 104 105 106 |
\ raw_spin_lock_irqsave(lock, flags); \ val = v->counter; \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ |
1bdadf46e locking/atomic: a... |
107 |
EXPORT_SYMBOL(generic_atomic64_fetch_##op); |
28aa2bda2 locking/atomic: I... |
108 |
|
560cb12a4 locking,arch: Rew... |
109 110 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ |
28aa2bda2 locking/atomic: I... |
111 112 |
ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) |
560cb12a4 locking,arch: Rew... |
113 114 115 116 117 |
ATOMIC64_OPS(add, +=) ATOMIC64_OPS(sub, -=) #undef ATOMIC64_OPS |
28aa2bda2 locking/atomic: I... |
118 119 120 121 122 123 124 125 126 127 128 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) ATOMIC64_OPS(and, &=) ATOMIC64_OPS(or, |=) ATOMIC64_OPS(xor, ^=) #undef ATOMIC64_OPS #undef ATOMIC64_FETCH_OP |
560cb12a4 locking,arch: Rew... |
129 130 |
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP |
09d4e0edd lib: Provide gene... |
131 |
|
1bdadf46e locking/atomic: a... |
132 |
s64 generic_atomic64_dec_if_positive(atomic64_t *v) |
09d4e0edd lib: Provide gene... |
133 134 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
135 |
raw_spinlock_t *lock = lock_addr(v); |
9255813d5 locking/atomic: U... |
136 |
s64 val; |
09d4e0edd lib: Provide gene... |
137 |
|
f59ca0587 locking, lib/atom... |
138 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
139 140 141 |
val = v->counter - 1; if (val >= 0) v->counter = val; |
f59ca0587 locking, lib/atom... |
142 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
143 144 |
return val; } |
1bdadf46e locking/atomic: a... |
145 |
EXPORT_SYMBOL(generic_atomic64_dec_if_positive); |
09d4e0edd lib: Provide gene... |
146 |
|
1bdadf46e locking/atomic: a... |
147 |
s64 generic_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) |
09d4e0edd lib: Provide gene... |
148 149 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
150 |
raw_spinlock_t *lock = lock_addr(v); |
9255813d5 locking/atomic: U... |
151 |
s64 val; |
09d4e0edd lib: Provide gene... |
152 |
|
f59ca0587 locking, lib/atom... |
153 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
154 155 156 |
val = v->counter; if (val == o) v->counter = n; |
f59ca0587 locking, lib/atom... |
157 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
158 159 |
return val; } |
1bdadf46e locking/atomic: a... |
160 |
EXPORT_SYMBOL(generic_atomic64_cmpxchg); |
09d4e0edd lib: Provide gene... |
161 |
|
1bdadf46e locking/atomic: a... |
162 |
s64 generic_atomic64_xchg(atomic64_t *v, s64 new) |
09d4e0edd lib: Provide gene... |
163 164 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
165 |
raw_spinlock_t *lock = lock_addr(v); |
9255813d5 locking/atomic: U... |
166 |
s64 val; |
09d4e0edd lib: Provide gene... |
167 |
|
f59ca0587 locking, lib/atom... |
168 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
169 170 |
val = v->counter; v->counter = new; |
f59ca0587 locking, lib/atom... |
171 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
172 173 |
return val; } |
1bdadf46e locking/atomic: a... |
174 |
EXPORT_SYMBOL(generic_atomic64_xchg); |
09d4e0edd lib: Provide gene... |
175 |
|
1bdadf46e locking/atomic: a... |
176 |
s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) |
09d4e0edd lib: Provide gene... |
177 178 |
{ unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
179 |
raw_spinlock_t *lock = lock_addr(v); |
9255813d5 locking/atomic: U... |
180 |
s64 val; |
09d4e0edd lib: Provide gene... |
181 |
|
f59ca0587 locking, lib/atom... |
182 |
raw_spin_lock_irqsave(lock, flags); |
00b808ab7 atomics/generic: ... |
183 184 |
val = v->counter; if (val != u) |
09d4e0edd lib: Provide gene... |
185 |
v->counter += a; |
f59ca0587 locking, lib/atom... |
186 |
raw_spin_unlock_irqrestore(lock, flags); |
00b808ab7 atomics/generic: ... |
187 188 |
return val; |
09d4e0edd lib: Provide gene... |
189 |
} |
1bdadf46e locking/atomic: a... |
190 |
EXPORT_SYMBOL(generic_atomic64_fetch_add_unless); |