Blame view
lib/atomic64.c
4.78 KB
09d4e0edd lib: Provide gene... |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
/* * Generic implementation of 64-bit atomics using spinlocks, * useful on processors that don't have 64-bit atomic instructions. * * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version * 2 of the License, or (at your option) any later version. */ #include <linux/types.h> #include <linux/cache.h> #include <linux/spinlock.h> #include <linux/init.h> |
8bc3bcc93 lib: reduce the u... |
16 |
#include <linux/export.h> |
60063497a atomic: use <linu... |
17 |
#include <linux/atomic.h> |
09d4e0edd lib: Provide gene... |
18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
/* * We use a hashed array of spinlocks to provide exclusive access * to each atomic64_t variable. Since this is expected to used on * systems with small numbers of CPUs (<= 4 or so), we use a * relatively small array of 16 spinlocks to avoid wasting too much * memory on the spinlock array. */ #define NR_LOCKS 16 /* * Ensure each lock is in a separate cacheline. */ static union { |
f59ca0587 locking, lib/atom... |
32 |
raw_spinlock_t lock; |
09d4e0edd lib: Provide gene... |
33 |
char pad[L1_CACHE_BYTES]; |
fcc16882a lib: atomic64: In... |
34 35 36 37 38 |
} atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = { [0 ... (NR_LOCKS - 1)] = { .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock), }, }; |
09d4e0edd lib: Provide gene... |
39 |
|
cb475de3d lib: atomic64: Ch... |
40 |
static inline raw_spinlock_t *lock_addr(const atomic64_t *v) |
09d4e0edd lib: Provide gene... |
41 42 43 44 45 46 47 48 49 50 51 |
{ unsigned long addr = (unsigned long) v; addr >>= L1_CACHE_SHIFT; addr ^= (addr >> 8) ^ (addr >> 16); return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; } long long atomic64_read(const atomic64_t *v) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
52 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
53 |
long long val; |
f59ca0587 locking, lib/atom... |
54 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
55 |
val = v->counter; |
f59ca0587 locking, lib/atom... |
56 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
57 58 |
return val; } |
3fc7b4b22 lib: export gener... |
59 |
EXPORT_SYMBOL(atomic64_read); |
09d4e0edd lib: Provide gene... |
60 61 62 63 |
void atomic64_set(atomic64_t *v, long long i) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
64 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
65 |
|
f59ca0587 locking, lib/atom... |
66 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
67 |
v->counter = i; |
f59ca0587 locking, lib/atom... |
68 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
69 |
} |
3fc7b4b22 lib: export gener... |
70 |
EXPORT_SYMBOL(atomic64_set); |
09d4e0edd lib: Provide gene... |
71 |
|
560cb12a4 locking,arch: Rew... |
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
#define ATOMIC64_OP(op, c_op) \ void atomic64_##op(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ \ raw_spin_lock_irqsave(lock, flags); \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ } \ EXPORT_SYMBOL(atomic64_##op); #define ATOMIC64_OP_RETURN(op, c_op) \ long long atomic64_##op##_return(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ long long val; \ \ raw_spin_lock_irqsave(lock, flags); \ val = (v->counter c_op a); \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ EXPORT_SYMBOL(atomic64_##op##_return); |
28aa2bda2 locking/atomic: I... |
97 98 99 100 101 102 103 104 105 106 107 108 109 110 |
#define ATOMIC64_FETCH_OP(op, c_op) \ long long atomic64_fetch_##op(long long a, atomic64_t *v) \ { \ unsigned long flags; \ raw_spinlock_t *lock = lock_addr(v); \ long long val; \ \ raw_spin_lock_irqsave(lock, flags); \ val = v->counter; \ v->counter c_op a; \ raw_spin_unlock_irqrestore(lock, flags); \ return val; \ } \ EXPORT_SYMBOL(atomic64_fetch_##op); |
560cb12a4 locking,arch: Rew... |
111 112 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ |
28aa2bda2 locking/atomic: I... |
113 114 |
ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) |
560cb12a4 locking,arch: Rew... |
115 116 117 118 119 |
ATOMIC64_OPS(add, +=) ATOMIC64_OPS(sub, -=) #undef ATOMIC64_OPS |
28aa2bda2 locking/atomic: I... |
120 121 122 123 124 125 126 127 128 129 130 |
#define ATOMIC64_OPS(op, c_op) \ ATOMIC64_OP(op, c_op) \ ATOMIC64_OP_RETURN(op, c_op) \ ATOMIC64_FETCH_OP(op, c_op) ATOMIC64_OPS(and, &=) ATOMIC64_OPS(or, |=) ATOMIC64_OPS(xor, ^=) #undef ATOMIC64_OPS #undef ATOMIC64_FETCH_OP |
560cb12a4 locking,arch: Rew... |
131 132 |
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP |
09d4e0edd lib: Provide gene... |
133 134 135 136 |
long long atomic64_dec_if_positive(atomic64_t *v) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
137 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
138 |
long long val; |
f59ca0587 locking, lib/atom... |
139 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
140 141 142 |
val = v->counter - 1; if (val >= 0) v->counter = val; |
f59ca0587 locking, lib/atom... |
143 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
144 145 |
return val; } |
3fc7b4b22 lib: export gener... |
146 |
EXPORT_SYMBOL(atomic64_dec_if_positive); |
09d4e0edd lib: Provide gene... |
147 148 149 150 |
long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
151 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
152 |
long long val; |
f59ca0587 locking, lib/atom... |
153 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
154 155 156 |
val = v->counter; if (val == o) v->counter = n; |
f59ca0587 locking, lib/atom... |
157 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
158 159 |
return val; } |
3fc7b4b22 lib: export gener... |
160 |
EXPORT_SYMBOL(atomic64_cmpxchg); |
09d4e0edd lib: Provide gene... |
161 162 163 164 |
long long atomic64_xchg(atomic64_t *v, long long new) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
165 |
raw_spinlock_t *lock = lock_addr(v); |
09d4e0edd lib: Provide gene... |
166 |
long long val; |
f59ca0587 locking, lib/atom... |
167 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
168 169 |
val = v->counter; v->counter = new; |
f59ca0587 locking, lib/atom... |
170 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
171 172 |
return val; } |
3fc7b4b22 lib: export gener... |
173 |
EXPORT_SYMBOL(atomic64_xchg); |
09d4e0edd lib: Provide gene... |
174 175 176 177 |
int atomic64_add_unless(atomic64_t *v, long long a, long long u) { unsigned long flags; |
cb475de3d lib: atomic64: Ch... |
178 |
raw_spinlock_t *lock = lock_addr(v); |
97577896f lib: Fix atomic64... |
179 |
int ret = 0; |
09d4e0edd lib: Provide gene... |
180 |
|
f59ca0587 locking, lib/atom... |
181 |
raw_spin_lock_irqsave(lock, flags); |
09d4e0edd lib: Provide gene... |
182 183 |
if (v->counter != u) { v->counter += a; |
97577896f lib: Fix atomic64... |
184 |
ret = 1; |
09d4e0edd lib: Provide gene... |
185 |
} |
f59ca0587 locking, lib/atom... |
186 |
raw_spin_unlock_irqrestore(lock, flags); |
09d4e0edd lib: Provide gene... |
187 188 |
return ret; } |
3fc7b4b22 lib: export gener... |
189 |
EXPORT_SYMBOL(atomic64_add_unless); |