Blame view
lib/atomic64.c
4.59 KB
2874c5fd2
|
1 |
// SPDX-License-Identifier: GPL-2.0-or-later |
09d4e0edd
|
2 3 4 5 6 |
/* * Generic implementation of 64-bit atomics using spinlocks, * useful on processors that don't have 64-bit atomic instructions. * * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> |
09d4e0edd
|
7 8 9 10 11 |
*/ #include <linux/types.h> #include <linux/cache.h> #include <linux/spinlock.h> #include <linux/init.h> |
8bc3bcc93
|
12 |
#include <linux/export.h> |
60063497a
|
13 |
#include <linux/atomic.h> |
09d4e0edd
|
14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
/* * We use a hashed array of spinlocks to provide exclusive access * to each atomic64_t variable. Since this is expected to used on * systems with small numbers of CPUs (<= 4 or so), we use a * relatively small array of 16 spinlocks to avoid wasting too much * memory on the spinlock array. */ #define NR_LOCKS 16 /* * Ensure each lock is in a separate cacheline. */ static union { |
f59ca0587
|
28 |
raw_spinlock_t lock; |
09d4e0edd
|
29 |
char pad[L1_CACHE_BYTES]; |
fcc16882a
|
30 31 32 33 34 |
} atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = { [0 ... (NR_LOCKS - 1)] = { .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock), }, }; |
09d4e0edd
|
35 |
|
cb475de3d
|
36 |
static inline raw_spinlock_t *lock_addr(const atomic64_t *v) |
09d4e0edd
|
37 38 39 40 41 42 43 44 45 46 47 |
{ unsigned long addr = (unsigned long) v; addr >>= L1_CACHE_SHIFT; addr ^= (addr >> 8) ^ (addr >> 16); return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; } long long atomic64_read(const atomic64_t *v) { unsigned long flags; |
cb475de3d
|
48 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd
|
49 |
long long val; |
f59ca0587
|
50 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd
|
51 |
val = v->counter; |
f59ca0587
|
52 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd
|
53 54 |
return val; } |
3fc7b4b22
|
55 |
EXPORT_SYMBOL(atomic64_read); |
09d4e0edd
|
56 57 58 59 |
void atomic64_set(atomic64_t *v, long long i) { unsigned long flags; |
cb475de3d
|
60 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd
|
61 |
|
f59ca0587
|
62 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd
|
63 |
v->counter = i; |
f59ca0587
|
64 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd
|
65 |
} |
3fc7b4b22
|
66 |
EXPORT_SYMBOL(atomic64_set); |
09d4e0edd
|
67 |
|
560cb12a4
|
68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
#define ATOMIC64_OP(op, c_op) \ void atomic64_##op(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ \ raw_spin_lock_irqsave(lock, flags); \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ } \ EXPORT_SYMBOL(atomic64_##op); #define ATOMIC64_OP_RETURN(op, c_op) \ long long atomic64_##op##_return(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ long long val; \ \ raw_spin_lock_irqsave(lock, flags); \ val = (v->counter c_op a); \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ EXPORT_SYMBOL(atomic64_##op##_return); |
28aa2bda2
|
93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
#define ATOMIC64_FETCH_OP(op, c_op) \ long long atomic64_fetch_##op(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ long long val; \ \ raw_spin_lock_irqsave(lock, flags); \ val = v->counter; \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ EXPORT_SYMBOL(atomic64_fetch_##op); |
560cb12a4
|
107 108 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ |
28aa2bda2
|
109 110 |
ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) |
560cb12a4
|
111 112 113 114 115 |
ATOMIC64_OPS(add, +=) ATOMIC64_OPS(sub, -=) #undef ATOMIC64_OPS |
28aa2bda2
|
116 117 118 119 120 121 122 123 124 125 126 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) ATOMIC64_OPS(and, &=) ATOMIC64_OPS(or, |=) ATOMIC64_OPS(xor, ^=) #undef ATOMIC64_OPS #undef ATOMIC64_FETCH_OP |
560cb12a4
|
127 128 |
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP |
09d4e0edd
|
129 130 131 132 |
long long atomic64_dec_if_positive(atomic64_t *v) { unsigned long flags; |
cb475de3d
|
133 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd
|
134 |
long long val; |
f59ca0587
|
135 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd
|
136 137 138 |
val = v->counter - 1; if (val >= 0) v->counter = val; |
f59ca0587
|
139 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd
|
140 141 |
return val; } |
3fc7b4b22
|
142 |
EXPORT_SYMBOL(atomic64_dec_if_positive); |
09d4e0edd
|
143 144 145 146 |
long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) { unsigned long flags; |
cb475de3d
|
147 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd
|
148 |
long long val; |
f59ca0587
|
149 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd
|
150 151 152 |
val = v->counter; if (val == o) v->counter = n; |
f59ca0587
|
153 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd
|
154 155 |
return val; } |
3fc7b4b22
|
156 |
EXPORT_SYMBOL(atomic64_cmpxchg); |
09d4e0edd
|
157 158 159 160 |
long long atomic64_xchg(atomic64_t *v, long long new) { unsigned long flags; |
cb475de3d
|
161 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd
|
162 |
long long val; |
f59ca0587
|
163 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd
|
164 165 |
val = v->counter; v->counter = new; |
f59ca0587
|
166 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd
|
167 168 |
return val; } |
3fc7b4b22
|
169 |
EXPORT_SYMBOL(atomic64_xchg); |
09d4e0edd
|
170 |
|
00b808ab7
|
171 |
long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u) |
09d4e0edd
|
172 173 |
{ unsigned long flags; |
cb475de3d
|
174 |
raw_spinlock_t *lock = lock_addr(v); |
00b808ab7
|
175 |
long long val; |
09d4e0edd
|
176 |
|
f59ca0587
|
177 |
raw_spin_lock_irqsave(lock, flags); |
00b808ab7
|
178 179 |
val = v->counter; if (val != u) |
09d4e0edd
|
180 |
v->counter += a; |
f59ca0587
|
181 |
raw_spin_unlock_irqrestore(lock, flags); |
00b808ab7
|
182 183 |
return val; |
09d4e0edd
|
184 |
} |
00b808ab7
|
185 |
EXPORT_SYMBOL(atomic64_fetch_add_unless); |