Commit fb5eeeee44edb248b4837416966f19731f497f79

Authored by Paul Jackson
Committed by Linus Torvalds
1 parent 28a42b9ea7

[PATCH] cpusets: bitmap and mask remap operators

In the forthcoming task migration support, a key calculation will be
mapping cpu and node numbers from the old set to the new set while
preserving cpuset-relative offset.

For example, if a task and its pages on nodes 8-11 are being migrated to
nodes 24-27, then pages on node 9 (the 2nd node in the old set) should be
moved to node 25 (the 2nd node in the new set.)

As with other bitmap operations, the proper way to code this is to provide
the underlying calculation in lib/bitmap.c, and then to provide the usual
cpumask and nodemask wrappers.

This patch provides that.  These operations are termed 'remap' operations.
Both remapping a single bit and a set of bits is supported.

Signed-off-by: Paul Jackson <pj@sgi.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 4 changed files with 212 additions and 0 deletions Inline Diff

include/linux/bitmap.h
1 #ifndef __LINUX_BITMAP_H 1 #ifndef __LINUX_BITMAP_H
2 #define __LINUX_BITMAP_H 2 #define __LINUX_BITMAP_H
3 3
4 #ifndef __ASSEMBLY__ 4 #ifndef __ASSEMBLY__
5 5
6 #include <linux/types.h> 6 #include <linux/types.h>
7 #include <linux/bitops.h> 7 #include <linux/bitops.h>
8 #include <linux/string.h> 8 #include <linux/string.h>
9 9
10 /* 10 /*
11 * bitmaps provide bit arrays that consume one or more unsigned 11 * bitmaps provide bit arrays that consume one or more unsigned
12 * longs. The bitmap interface and available operations are listed 12 * longs. The bitmap interface and available operations are listed
13 * here, in bitmap.h 13 * here, in bitmap.h
14 * 14 *
15 * Function implementations generic to all architectures are in 15 * Function implementations generic to all architectures are in
16 * lib/bitmap.c. Functions implementations that are architecture 16 * lib/bitmap.c. Functions implementations that are architecture
17 * specific are in various include/asm-<arch>/bitops.h headers 17 * specific are in various include/asm-<arch>/bitops.h headers
18 * and other arch/<arch> specific files. 18 * and other arch/<arch> specific files.
19 * 19 *
20 * See lib/bitmap.c for more details. 20 * See lib/bitmap.c for more details.
21 */ 21 */
22 22
23 /* 23 /*
24 * The available bitmap operations and their rough meaning in the 24 * The available bitmap operations and their rough meaning in the
25 * case that the bitmap is a single unsigned long are thus: 25 * case that the bitmap is a single unsigned long are thus:
26 * 26 *
27 * bitmap_zero(dst, nbits) *dst = 0UL 27 * bitmap_zero(dst, nbits) *dst = 0UL
28 * bitmap_fill(dst, nbits) *dst = ~0UL 28 * bitmap_fill(dst, nbits) *dst = ~0UL
29 * bitmap_copy(dst, src, nbits) *dst = *src 29 * bitmap_copy(dst, src, nbits) *dst = *src
30 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2 30 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2
31 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2 31 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2
32 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2 32 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2
33 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2) 33 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2)
34 * bitmap_complement(dst, src, nbits) *dst = ~(*src) 34 * bitmap_complement(dst, src, nbits) *dst = ~(*src)
35 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal? 35 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal?
36 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap? 36 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap?
37 * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2? 37 * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2?
38 * bitmap_empty(src, nbits) Are all bits zero in *src? 38 * bitmap_empty(src, nbits) Are all bits zero in *src?
39 * bitmap_full(src, nbits) Are all bits set in *src? 39 * bitmap_full(src, nbits) Are all bits set in *src?
40 * bitmap_weight(src, nbits) Hamming Weight: number set bits 40 * bitmap_weight(src, nbits) Hamming Weight: number set bits
41 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n 41 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n
42 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n 42 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n
43 * bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src)
44 * bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit)
43 * bitmap_scnprintf(buf, len, src, nbits) Print bitmap src to buf 45 * bitmap_scnprintf(buf, len, src, nbits) Print bitmap src to buf
44 * bitmap_parse(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf 46 * bitmap_parse(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf
45 * bitmap_scnlistprintf(buf, len, src, nbits) Print bitmap src as list to buf 47 * bitmap_scnlistprintf(buf, len, src, nbits) Print bitmap src as list to buf
46 * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from list 48 * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from list
47 */ 49 */
48 50
49 /* 51 /*
50 * Also the following operations in asm/bitops.h apply to bitmaps. 52 * Also the following operations in asm/bitops.h apply to bitmaps.
51 * 53 *
52 * set_bit(bit, addr) *addr |= bit 54 * set_bit(bit, addr) *addr |= bit
53 * clear_bit(bit, addr) *addr &= ~bit 55 * clear_bit(bit, addr) *addr &= ~bit
54 * change_bit(bit, addr) *addr ^= bit 56 * change_bit(bit, addr) *addr ^= bit
55 * test_bit(bit, addr) Is bit set in *addr? 57 * test_bit(bit, addr) Is bit set in *addr?
56 * test_and_set_bit(bit, addr) Set bit and return old value 58 * test_and_set_bit(bit, addr) Set bit and return old value
57 * test_and_clear_bit(bit, addr) Clear bit and return old value 59 * test_and_clear_bit(bit, addr) Clear bit and return old value
58 * test_and_change_bit(bit, addr) Change bit and return old value 60 * test_and_change_bit(bit, addr) Change bit and return old value
59 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr 61 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr
60 * find_first_bit(addr, nbits) Position first set bit in *addr 62 * find_first_bit(addr, nbits) Position first set bit in *addr
61 * find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit 63 * find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit
62 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit 64 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit
63 */ 65 */
64 66
65 /* 67 /*
66 * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used 68 * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used
67 * to declare an array named 'name' of just enough unsigned longs to 69 * to declare an array named 'name' of just enough unsigned longs to
68 * contain all bit positions from 0 to 'bits' - 1. 70 * contain all bit positions from 0 to 'bits' - 1.
69 */ 71 */
70 72
71 /* 73 /*
72 * lib/bitmap.c provides these functions: 74 * lib/bitmap.c provides these functions:
73 */ 75 */
74 76
75 extern int __bitmap_empty(const unsigned long *bitmap, int bits); 77 extern int __bitmap_empty(const unsigned long *bitmap, int bits);
76 extern int __bitmap_full(const unsigned long *bitmap, int bits); 78 extern int __bitmap_full(const unsigned long *bitmap, int bits);
77 extern int __bitmap_equal(const unsigned long *bitmap1, 79 extern int __bitmap_equal(const unsigned long *bitmap1,
78 const unsigned long *bitmap2, int bits); 80 const unsigned long *bitmap2, int bits);
79 extern void __bitmap_complement(unsigned long *dst, const unsigned long *src, 81 extern void __bitmap_complement(unsigned long *dst, const unsigned long *src,
80 int bits); 82 int bits);
81 extern void __bitmap_shift_right(unsigned long *dst, 83 extern void __bitmap_shift_right(unsigned long *dst,
82 const unsigned long *src, int shift, int bits); 84 const unsigned long *src, int shift, int bits);
83 extern void __bitmap_shift_left(unsigned long *dst, 85 extern void __bitmap_shift_left(unsigned long *dst,
84 const unsigned long *src, int shift, int bits); 86 const unsigned long *src, int shift, int bits);
85 extern void __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, 87 extern void __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
86 const unsigned long *bitmap2, int bits); 88 const unsigned long *bitmap2, int bits);
87 extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, 89 extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
88 const unsigned long *bitmap2, int bits); 90 const unsigned long *bitmap2, int bits);
89 extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, 91 extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
90 const unsigned long *bitmap2, int bits); 92 const unsigned long *bitmap2, int bits);
91 extern void __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, 93 extern void __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
92 const unsigned long *bitmap2, int bits); 94 const unsigned long *bitmap2, int bits);
93 extern int __bitmap_intersects(const unsigned long *bitmap1, 95 extern int __bitmap_intersects(const unsigned long *bitmap1,
94 const unsigned long *bitmap2, int bits); 96 const unsigned long *bitmap2, int bits);
95 extern int __bitmap_subset(const unsigned long *bitmap1, 97 extern int __bitmap_subset(const unsigned long *bitmap1,
96 const unsigned long *bitmap2, int bits); 98 const unsigned long *bitmap2, int bits);
97 extern int __bitmap_weight(const unsigned long *bitmap, int bits); 99 extern int __bitmap_weight(const unsigned long *bitmap, int bits);
98 100
99 extern int bitmap_scnprintf(char *buf, unsigned int len, 101 extern int bitmap_scnprintf(char *buf, unsigned int len,
100 const unsigned long *src, int nbits); 102 const unsigned long *src, int nbits);
101 extern int bitmap_parse(const char __user *ubuf, unsigned int ulen, 103 extern int bitmap_parse(const char __user *ubuf, unsigned int ulen,
102 unsigned long *dst, int nbits); 104 unsigned long *dst, int nbits);
103 extern int bitmap_scnlistprintf(char *buf, unsigned int len, 105 extern int bitmap_scnlistprintf(char *buf, unsigned int len,
104 const unsigned long *src, int nbits); 106 const unsigned long *src, int nbits);
105 extern int bitmap_parselist(const char *buf, unsigned long *maskp, 107 extern int bitmap_parselist(const char *buf, unsigned long *maskp,
106 int nmaskbits); 108 int nmaskbits);
109 extern void bitmap_remap(unsigned long *dst, const unsigned long *src,
110 const unsigned long *old, const unsigned long *new, int bits);
111 extern int bitmap_bitremap(int oldbit,
112 const unsigned long *old, const unsigned long *new, int bits);
107 extern int bitmap_find_free_region(unsigned long *bitmap, int bits, int order); 113 extern int bitmap_find_free_region(unsigned long *bitmap, int bits, int order);
108 extern void bitmap_release_region(unsigned long *bitmap, int pos, int order); 114 extern void bitmap_release_region(unsigned long *bitmap, int pos, int order);
109 extern int bitmap_allocate_region(unsigned long *bitmap, int pos, int order); 115 extern int bitmap_allocate_region(unsigned long *bitmap, int pos, int order);
110 116
111 #define BITMAP_LAST_WORD_MASK(nbits) \ 117 #define BITMAP_LAST_WORD_MASK(nbits) \
112 ( \ 118 ( \
113 ((nbits) % BITS_PER_LONG) ? \ 119 ((nbits) % BITS_PER_LONG) ? \
114 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL \ 120 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL \
115 ) 121 )
116 122
117 static inline void bitmap_zero(unsigned long *dst, int nbits) 123 static inline void bitmap_zero(unsigned long *dst, int nbits)
118 { 124 {
119 if (nbits <= BITS_PER_LONG) 125 if (nbits <= BITS_PER_LONG)
120 *dst = 0UL; 126 *dst = 0UL;
121 else { 127 else {
122 int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); 128 int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
123 memset(dst, 0, len); 129 memset(dst, 0, len);
124 } 130 }
125 } 131 }
126 132
127 static inline void bitmap_fill(unsigned long *dst, int nbits) 133 static inline void bitmap_fill(unsigned long *dst, int nbits)
128 { 134 {
129 size_t nlongs = BITS_TO_LONGS(nbits); 135 size_t nlongs = BITS_TO_LONGS(nbits);
130 if (nlongs > 1) { 136 if (nlongs > 1) {
131 int len = (nlongs - 1) * sizeof(unsigned long); 137 int len = (nlongs - 1) * sizeof(unsigned long);
132 memset(dst, 0xff, len); 138 memset(dst, 0xff, len);
133 } 139 }
134 dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits); 140 dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits);
135 } 141 }
136 142
137 static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, 143 static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
138 int nbits) 144 int nbits)
139 { 145 {
140 if (nbits <= BITS_PER_LONG) 146 if (nbits <= BITS_PER_LONG)
141 *dst = *src; 147 *dst = *src;
142 else { 148 else {
143 int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); 149 int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
144 memcpy(dst, src, len); 150 memcpy(dst, src, len);
145 } 151 }
146 } 152 }
147 153
148 static inline void bitmap_and(unsigned long *dst, const unsigned long *src1, 154 static inline void bitmap_and(unsigned long *dst, const unsigned long *src1,
149 const unsigned long *src2, int nbits) 155 const unsigned long *src2, int nbits)
150 { 156 {
151 if (nbits <= BITS_PER_LONG) 157 if (nbits <= BITS_PER_LONG)
152 *dst = *src1 & *src2; 158 *dst = *src1 & *src2;
153 else 159 else
154 __bitmap_and(dst, src1, src2, nbits); 160 __bitmap_and(dst, src1, src2, nbits);
155 } 161 }
156 162
157 static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, 163 static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
158 const unsigned long *src2, int nbits) 164 const unsigned long *src2, int nbits)
159 { 165 {
160 if (nbits <= BITS_PER_LONG) 166 if (nbits <= BITS_PER_LONG)
161 *dst = *src1 | *src2; 167 *dst = *src1 | *src2;
162 else 168 else
163 __bitmap_or(dst, src1, src2, nbits); 169 __bitmap_or(dst, src1, src2, nbits);
164 } 170 }
165 171
166 static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, 172 static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
167 const unsigned long *src2, int nbits) 173 const unsigned long *src2, int nbits)
168 { 174 {
169 if (nbits <= BITS_PER_LONG) 175 if (nbits <= BITS_PER_LONG)
170 *dst = *src1 ^ *src2; 176 *dst = *src1 ^ *src2;
171 else 177 else
172 __bitmap_xor(dst, src1, src2, nbits); 178 __bitmap_xor(dst, src1, src2, nbits);
173 } 179 }
174 180
175 static inline void bitmap_andnot(unsigned long *dst, const unsigned long *src1, 181 static inline void bitmap_andnot(unsigned long *dst, const unsigned long *src1,
176 const unsigned long *src2, int nbits) 182 const unsigned long *src2, int nbits)
177 { 183 {
178 if (nbits <= BITS_PER_LONG) 184 if (nbits <= BITS_PER_LONG)
179 *dst = *src1 & ~(*src2); 185 *dst = *src1 & ~(*src2);
180 else 186 else
181 __bitmap_andnot(dst, src1, src2, nbits); 187 __bitmap_andnot(dst, src1, src2, nbits);
182 } 188 }
183 189
184 static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, 190 static inline void bitmap_complement(unsigned long *dst, const unsigned long *src,
185 int nbits) 191 int nbits)
186 { 192 {
187 if (nbits <= BITS_PER_LONG) 193 if (nbits <= BITS_PER_LONG)
188 *dst = ~(*src) & BITMAP_LAST_WORD_MASK(nbits); 194 *dst = ~(*src) & BITMAP_LAST_WORD_MASK(nbits);
189 else 195 else
190 __bitmap_complement(dst, src, nbits); 196 __bitmap_complement(dst, src, nbits);
191 } 197 }
192 198
193 static inline int bitmap_equal(const unsigned long *src1, 199 static inline int bitmap_equal(const unsigned long *src1,
194 const unsigned long *src2, int nbits) 200 const unsigned long *src2, int nbits)
195 { 201 {
196 if (nbits <= BITS_PER_LONG) 202 if (nbits <= BITS_PER_LONG)
197 return ! ((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits)); 203 return ! ((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
198 else 204 else
199 return __bitmap_equal(src1, src2, nbits); 205 return __bitmap_equal(src1, src2, nbits);
200 } 206 }
201 207
202 static inline int bitmap_intersects(const unsigned long *src1, 208 static inline int bitmap_intersects(const unsigned long *src1,
203 const unsigned long *src2, int nbits) 209 const unsigned long *src2, int nbits)
204 { 210 {
205 if (nbits <= BITS_PER_LONG) 211 if (nbits <= BITS_PER_LONG)
206 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; 212 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
207 else 213 else
208 return __bitmap_intersects(src1, src2, nbits); 214 return __bitmap_intersects(src1, src2, nbits);
209 } 215 }
210 216
211 static inline int bitmap_subset(const unsigned long *src1, 217 static inline int bitmap_subset(const unsigned long *src1,
212 const unsigned long *src2, int nbits) 218 const unsigned long *src2, int nbits)
213 { 219 {
214 if (nbits <= BITS_PER_LONG) 220 if (nbits <= BITS_PER_LONG)
215 return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits)); 221 return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits));
216 else 222 else
217 return __bitmap_subset(src1, src2, nbits); 223 return __bitmap_subset(src1, src2, nbits);
218 } 224 }
219 225
220 static inline int bitmap_empty(const unsigned long *src, int nbits) 226 static inline int bitmap_empty(const unsigned long *src, int nbits)
221 { 227 {
222 if (nbits <= BITS_PER_LONG) 228 if (nbits <= BITS_PER_LONG)
223 return ! (*src & BITMAP_LAST_WORD_MASK(nbits)); 229 return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
224 else 230 else
225 return __bitmap_empty(src, nbits); 231 return __bitmap_empty(src, nbits);
226 } 232 }
227 233
228 static inline int bitmap_full(const unsigned long *src, int nbits) 234 static inline int bitmap_full(const unsigned long *src, int nbits)
229 { 235 {
230 if (nbits <= BITS_PER_LONG) 236 if (nbits <= BITS_PER_LONG)
231 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits)); 237 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
232 else 238 else
233 return __bitmap_full(src, nbits); 239 return __bitmap_full(src, nbits);
234 } 240 }
235 241
236 static inline int bitmap_weight(const unsigned long *src, int nbits) 242 static inline int bitmap_weight(const unsigned long *src, int nbits)
237 { 243 {
238 return __bitmap_weight(src, nbits); 244 return __bitmap_weight(src, nbits);
239 } 245 }
240 246
241 static inline void bitmap_shift_right(unsigned long *dst, 247 static inline void bitmap_shift_right(unsigned long *dst,
242 const unsigned long *src, int n, int nbits) 248 const unsigned long *src, int n, int nbits)
243 { 249 {
244 if (nbits <= BITS_PER_LONG) 250 if (nbits <= BITS_PER_LONG)
245 *dst = *src >> n; 251 *dst = *src >> n;
246 else 252 else
247 __bitmap_shift_right(dst, src, n, nbits); 253 __bitmap_shift_right(dst, src, n, nbits);
248 } 254 }
249 255
250 static inline void bitmap_shift_left(unsigned long *dst, 256 static inline void bitmap_shift_left(unsigned long *dst,
251 const unsigned long *src, int n, int nbits) 257 const unsigned long *src, int n, int nbits)
252 { 258 {
253 if (nbits <= BITS_PER_LONG) 259 if (nbits <= BITS_PER_LONG)
254 *dst = (*src << n) & BITMAP_LAST_WORD_MASK(nbits); 260 *dst = (*src << n) & BITMAP_LAST_WORD_MASK(nbits);
255 else 261 else
256 __bitmap_shift_left(dst, src, n, nbits); 262 __bitmap_shift_left(dst, src, n, nbits);
257 } 263 }
258 264
259 #endif /* __ASSEMBLY__ */ 265 #endif /* __ASSEMBLY__ */
260 266
261 #endif /* __LINUX_BITMAP_H */ 267 #endif /* __LINUX_BITMAP_H */
262 268
include/linux/cpumask.h
1 #ifndef __LINUX_CPUMASK_H 1 #ifndef __LINUX_CPUMASK_H
2 #define __LINUX_CPUMASK_H 2 #define __LINUX_CPUMASK_H
3 3
4 /* 4 /*
5 * Cpumasks provide a bitmap suitable for representing the 5 * Cpumasks provide a bitmap suitable for representing the
6 * set of CPU's in a system, one bit position per CPU number. 6 * set of CPU's in a system, one bit position per CPU number.
7 * 7 *
8 * See detailed comments in the file linux/bitmap.h describing the 8 * See detailed comments in the file linux/bitmap.h describing the
9 * data type on which these cpumasks are based. 9 * data type on which these cpumasks are based.
10 * 10 *
11 * For details of cpumask_scnprintf() and cpumask_parse(), 11 * For details of cpumask_scnprintf() and cpumask_parse(),
12 * see bitmap_scnprintf() and bitmap_parse() in lib/bitmap.c. 12 * see bitmap_scnprintf() and bitmap_parse() in lib/bitmap.c.
13 * For details of cpulist_scnprintf() and cpulist_parse(), see 13 * For details of cpulist_scnprintf() and cpulist_parse(), see
14 * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. 14 * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c.
15 * For details of cpu_remap(), see bitmap_bitremap in lib/bitmap.c
16 * For details of cpus_remap(), see bitmap_remap in lib/bitmap.c.
15 * 17 *
16 * The available cpumask operations are: 18 * The available cpumask operations are:
17 * 19 *
18 * void cpu_set(cpu, mask) turn on bit 'cpu' in mask 20 * void cpu_set(cpu, mask) turn on bit 'cpu' in mask
19 * void cpu_clear(cpu, mask) turn off bit 'cpu' in mask 21 * void cpu_clear(cpu, mask) turn off bit 'cpu' in mask
20 * void cpus_setall(mask) set all bits 22 * void cpus_setall(mask) set all bits
21 * void cpus_clear(mask) clear all bits 23 * void cpus_clear(mask) clear all bits
22 * int cpu_isset(cpu, mask) true iff bit 'cpu' set in mask 24 * int cpu_isset(cpu, mask) true iff bit 'cpu' set in mask
23 * int cpu_test_and_set(cpu, mask) test and set bit 'cpu' in mask 25 * int cpu_test_and_set(cpu, mask) test and set bit 'cpu' in mask
24 * 26 *
25 * void cpus_and(dst, src1, src2) dst = src1 & src2 [intersection] 27 * void cpus_and(dst, src1, src2) dst = src1 & src2 [intersection]
26 * void cpus_or(dst, src1, src2) dst = src1 | src2 [union] 28 * void cpus_or(dst, src1, src2) dst = src1 | src2 [union]
27 * void cpus_xor(dst, src1, src2) dst = src1 ^ src2 29 * void cpus_xor(dst, src1, src2) dst = src1 ^ src2
28 * void cpus_andnot(dst, src1, src2) dst = src1 & ~src2 30 * void cpus_andnot(dst, src1, src2) dst = src1 & ~src2
29 * void cpus_complement(dst, src) dst = ~src 31 * void cpus_complement(dst, src) dst = ~src
30 * 32 *
31 * int cpus_equal(mask1, mask2) Does mask1 == mask2? 33 * int cpus_equal(mask1, mask2) Does mask1 == mask2?
32 * int cpus_intersects(mask1, mask2) Do mask1 and mask2 intersect? 34 * int cpus_intersects(mask1, mask2) Do mask1 and mask2 intersect?
33 * int cpus_subset(mask1, mask2) Is mask1 a subset of mask2? 35 * int cpus_subset(mask1, mask2) Is mask1 a subset of mask2?
34 * int cpus_empty(mask) Is mask empty (no bits sets)? 36 * int cpus_empty(mask) Is mask empty (no bits sets)?
35 * int cpus_full(mask) Is mask full (all bits sets)? 37 * int cpus_full(mask) Is mask full (all bits sets)?
36 * int cpus_weight(mask) Hamming weigh - number of set bits 38 * int cpus_weight(mask) Hamming weigh - number of set bits
37 * 39 *
38 * void cpus_shift_right(dst, src, n) Shift right 40 * void cpus_shift_right(dst, src, n) Shift right
39 * void cpus_shift_left(dst, src, n) Shift left 41 * void cpus_shift_left(dst, src, n) Shift left
40 * 42 *
41 * int first_cpu(mask) Number lowest set bit, or NR_CPUS 43 * int first_cpu(mask) Number lowest set bit, or NR_CPUS
42 * int next_cpu(cpu, mask) Next cpu past 'cpu', or NR_CPUS 44 * int next_cpu(cpu, mask) Next cpu past 'cpu', or NR_CPUS
43 * 45 *
44 * cpumask_t cpumask_of_cpu(cpu) Return cpumask with bit 'cpu' set 46 * cpumask_t cpumask_of_cpu(cpu) Return cpumask with bit 'cpu' set
45 * CPU_MASK_ALL Initializer - all bits set 47 * CPU_MASK_ALL Initializer - all bits set
46 * CPU_MASK_NONE Initializer - no bits set 48 * CPU_MASK_NONE Initializer - no bits set
47 * unsigned long *cpus_addr(mask) Array of unsigned long's in mask 49 * unsigned long *cpus_addr(mask) Array of unsigned long's in mask
48 * 50 *
49 * int cpumask_scnprintf(buf, len, mask) Format cpumask for printing 51 * int cpumask_scnprintf(buf, len, mask) Format cpumask for printing
50 * int cpumask_parse(ubuf, ulen, mask) Parse ascii string as cpumask 52 * int cpumask_parse(ubuf, ulen, mask) Parse ascii string as cpumask
51 * int cpulist_scnprintf(buf, len, mask) Format cpumask as list for printing 53 * int cpulist_scnprintf(buf, len, mask) Format cpumask as list for printing
52 * int cpulist_parse(buf, map) Parse ascii string as cpulist 54 * int cpulist_parse(buf, map) Parse ascii string as cpulist
55 * int cpu_remap(oldbit, old, new) newbit = map(old, new)(oldbit)
56 * int cpus_remap(dst, src, old, new) *dst = map(old, new)(src)
53 * 57 *
54 * for_each_cpu_mask(cpu, mask) for-loop cpu over mask 58 * for_each_cpu_mask(cpu, mask) for-loop cpu over mask
55 * 59 *
56 * int num_online_cpus() Number of online CPUs 60 * int num_online_cpus() Number of online CPUs
57 * int num_possible_cpus() Number of all possible CPUs 61 * int num_possible_cpus() Number of all possible CPUs
58 * int num_present_cpus() Number of present CPUs 62 * int num_present_cpus() Number of present CPUs
59 * 63 *
60 * int cpu_online(cpu) Is some cpu online? 64 * int cpu_online(cpu) Is some cpu online?
61 * int cpu_possible(cpu) Is some cpu possible? 65 * int cpu_possible(cpu) Is some cpu possible?
62 * int cpu_present(cpu) Is some cpu present (can schedule)? 66 * int cpu_present(cpu) Is some cpu present (can schedule)?
63 * 67 *
64 * int any_online_cpu(mask) First online cpu in mask 68 * int any_online_cpu(mask) First online cpu in mask
65 * 69 *
66 * for_each_cpu(cpu) for-loop cpu over cpu_possible_map 70 * for_each_cpu(cpu) for-loop cpu over cpu_possible_map
67 * for_each_online_cpu(cpu) for-loop cpu over cpu_online_map 71 * for_each_online_cpu(cpu) for-loop cpu over cpu_online_map
68 * for_each_present_cpu(cpu) for-loop cpu over cpu_present_map 72 * for_each_present_cpu(cpu) for-loop cpu over cpu_present_map
69 * 73 *
70 * Subtlety: 74 * Subtlety:
71 * 1) The 'type-checked' form of cpu_isset() causes gcc (3.3.2, anyway) 75 * 1) The 'type-checked' form of cpu_isset() causes gcc (3.3.2, anyway)
72 * to generate slightly worse code. Note for example the additional 76 * to generate slightly worse code. Note for example the additional
73 * 40 lines of assembly code compiling the "for each possible cpu" 77 * 40 lines of assembly code compiling the "for each possible cpu"
74 * loops buried in the disk_stat_read() macros calls when compiling 78 * loops buried in the disk_stat_read() macros calls when compiling
75 * drivers/block/genhd.c (arch i386, CONFIG_SMP=y). So use a simple 79 * drivers/block/genhd.c (arch i386, CONFIG_SMP=y). So use a simple
76 * one-line #define for cpu_isset(), instead of wrapping an inline 80 * one-line #define for cpu_isset(), instead of wrapping an inline
77 * inside a macro, the way we do the other calls. 81 * inside a macro, the way we do the other calls.
78 */ 82 */
79 83
80 #include <linux/kernel.h> 84 #include <linux/kernel.h>
81 #include <linux/threads.h> 85 #include <linux/threads.h>
82 #include <linux/bitmap.h> 86 #include <linux/bitmap.h>
83 #include <asm/bug.h> 87 #include <asm/bug.h>
84 88
85 typedef struct { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t; 89 typedef struct { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t;
86 extern cpumask_t _unused_cpumask_arg_; 90 extern cpumask_t _unused_cpumask_arg_;
87 91
88 #define cpu_set(cpu, dst) __cpu_set((cpu), &(dst)) 92 #define cpu_set(cpu, dst) __cpu_set((cpu), &(dst))
89 static inline void __cpu_set(int cpu, volatile cpumask_t *dstp) 93 static inline void __cpu_set(int cpu, volatile cpumask_t *dstp)
90 { 94 {
91 set_bit(cpu, dstp->bits); 95 set_bit(cpu, dstp->bits);
92 } 96 }
93 97
94 #define cpu_clear(cpu, dst) __cpu_clear((cpu), &(dst)) 98 #define cpu_clear(cpu, dst) __cpu_clear((cpu), &(dst))
95 static inline void __cpu_clear(int cpu, volatile cpumask_t *dstp) 99 static inline void __cpu_clear(int cpu, volatile cpumask_t *dstp)
96 { 100 {
97 clear_bit(cpu, dstp->bits); 101 clear_bit(cpu, dstp->bits);
98 } 102 }
99 103
100 #define cpus_setall(dst) __cpus_setall(&(dst), NR_CPUS) 104 #define cpus_setall(dst) __cpus_setall(&(dst), NR_CPUS)
101 static inline void __cpus_setall(cpumask_t *dstp, int nbits) 105 static inline void __cpus_setall(cpumask_t *dstp, int nbits)
102 { 106 {
103 bitmap_fill(dstp->bits, nbits); 107 bitmap_fill(dstp->bits, nbits);
104 } 108 }
105 109
106 #define cpus_clear(dst) __cpus_clear(&(dst), NR_CPUS) 110 #define cpus_clear(dst) __cpus_clear(&(dst), NR_CPUS)
107 static inline void __cpus_clear(cpumask_t *dstp, int nbits) 111 static inline void __cpus_clear(cpumask_t *dstp, int nbits)
108 { 112 {
109 bitmap_zero(dstp->bits, nbits); 113 bitmap_zero(dstp->bits, nbits);
110 } 114 }
111 115
112 /* No static inline type checking - see Subtlety (1) above. */ 116 /* No static inline type checking - see Subtlety (1) above. */
113 #define cpu_isset(cpu, cpumask) test_bit((cpu), (cpumask).bits) 117 #define cpu_isset(cpu, cpumask) test_bit((cpu), (cpumask).bits)
114 118
115 #define cpu_test_and_set(cpu, cpumask) __cpu_test_and_set((cpu), &(cpumask)) 119 #define cpu_test_and_set(cpu, cpumask) __cpu_test_and_set((cpu), &(cpumask))
116 static inline int __cpu_test_and_set(int cpu, cpumask_t *addr) 120 static inline int __cpu_test_and_set(int cpu, cpumask_t *addr)
117 { 121 {
118 return test_and_set_bit(cpu, addr->bits); 122 return test_and_set_bit(cpu, addr->bits);
119 } 123 }
120 124
121 #define cpus_and(dst, src1, src2) __cpus_and(&(dst), &(src1), &(src2), NR_CPUS) 125 #define cpus_and(dst, src1, src2) __cpus_and(&(dst), &(src1), &(src2), NR_CPUS)
122 static inline void __cpus_and(cpumask_t *dstp, const cpumask_t *src1p, 126 static inline void __cpus_and(cpumask_t *dstp, const cpumask_t *src1p,
123 const cpumask_t *src2p, int nbits) 127 const cpumask_t *src2p, int nbits)
124 { 128 {
125 bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits); 129 bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits);
126 } 130 }
127 131
128 #define cpus_or(dst, src1, src2) __cpus_or(&(dst), &(src1), &(src2), NR_CPUS) 132 #define cpus_or(dst, src1, src2) __cpus_or(&(dst), &(src1), &(src2), NR_CPUS)
129 static inline void __cpus_or(cpumask_t *dstp, const cpumask_t *src1p, 133 static inline void __cpus_or(cpumask_t *dstp, const cpumask_t *src1p,
130 const cpumask_t *src2p, int nbits) 134 const cpumask_t *src2p, int nbits)
131 { 135 {
132 bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits); 136 bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits);
133 } 137 }
134 138
135 #define cpus_xor(dst, src1, src2) __cpus_xor(&(dst), &(src1), &(src2), NR_CPUS) 139 #define cpus_xor(dst, src1, src2) __cpus_xor(&(dst), &(src1), &(src2), NR_CPUS)
136 static inline void __cpus_xor(cpumask_t *dstp, const cpumask_t *src1p, 140 static inline void __cpus_xor(cpumask_t *dstp, const cpumask_t *src1p,
137 const cpumask_t *src2p, int nbits) 141 const cpumask_t *src2p, int nbits)
138 { 142 {
139 bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits); 143 bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits);
140 } 144 }
141 145
142 #define cpus_andnot(dst, src1, src2) \ 146 #define cpus_andnot(dst, src1, src2) \
143 __cpus_andnot(&(dst), &(src1), &(src2), NR_CPUS) 147 __cpus_andnot(&(dst), &(src1), &(src2), NR_CPUS)
144 static inline void __cpus_andnot(cpumask_t *dstp, const cpumask_t *src1p, 148 static inline void __cpus_andnot(cpumask_t *dstp, const cpumask_t *src1p,
145 const cpumask_t *src2p, int nbits) 149 const cpumask_t *src2p, int nbits)
146 { 150 {
147 bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); 151 bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits);
148 } 152 }
149 153
150 #define cpus_complement(dst, src) __cpus_complement(&(dst), &(src), NR_CPUS) 154 #define cpus_complement(dst, src) __cpus_complement(&(dst), &(src), NR_CPUS)
151 static inline void __cpus_complement(cpumask_t *dstp, 155 static inline void __cpus_complement(cpumask_t *dstp,
152 const cpumask_t *srcp, int nbits) 156 const cpumask_t *srcp, int nbits)
153 { 157 {
154 bitmap_complement(dstp->bits, srcp->bits, nbits); 158 bitmap_complement(dstp->bits, srcp->bits, nbits);
155 } 159 }
156 160
157 #define cpus_equal(src1, src2) __cpus_equal(&(src1), &(src2), NR_CPUS) 161 #define cpus_equal(src1, src2) __cpus_equal(&(src1), &(src2), NR_CPUS)
158 static inline int __cpus_equal(const cpumask_t *src1p, 162 static inline int __cpus_equal(const cpumask_t *src1p,
159 const cpumask_t *src2p, int nbits) 163 const cpumask_t *src2p, int nbits)
160 { 164 {
161 return bitmap_equal(src1p->bits, src2p->bits, nbits); 165 return bitmap_equal(src1p->bits, src2p->bits, nbits);
162 } 166 }
163 167
164 #define cpus_intersects(src1, src2) __cpus_intersects(&(src1), &(src2), NR_CPUS) 168 #define cpus_intersects(src1, src2) __cpus_intersects(&(src1), &(src2), NR_CPUS)
165 static inline int __cpus_intersects(const cpumask_t *src1p, 169 static inline int __cpus_intersects(const cpumask_t *src1p,
166 const cpumask_t *src2p, int nbits) 170 const cpumask_t *src2p, int nbits)
167 { 171 {
168 return bitmap_intersects(src1p->bits, src2p->bits, nbits); 172 return bitmap_intersects(src1p->bits, src2p->bits, nbits);
169 } 173 }
170 174
171 #define cpus_subset(src1, src2) __cpus_subset(&(src1), &(src2), NR_CPUS) 175 #define cpus_subset(src1, src2) __cpus_subset(&(src1), &(src2), NR_CPUS)
172 static inline int __cpus_subset(const cpumask_t *src1p, 176 static inline int __cpus_subset(const cpumask_t *src1p,
173 const cpumask_t *src2p, int nbits) 177 const cpumask_t *src2p, int nbits)
174 { 178 {
175 return bitmap_subset(src1p->bits, src2p->bits, nbits); 179 return bitmap_subset(src1p->bits, src2p->bits, nbits);
176 } 180 }
177 181
178 #define cpus_empty(src) __cpus_empty(&(src), NR_CPUS) 182 #define cpus_empty(src) __cpus_empty(&(src), NR_CPUS)
179 static inline int __cpus_empty(const cpumask_t *srcp, int nbits) 183 static inline int __cpus_empty(const cpumask_t *srcp, int nbits)
180 { 184 {
181 return bitmap_empty(srcp->bits, nbits); 185 return bitmap_empty(srcp->bits, nbits);
182 } 186 }
183 187
184 #define cpus_full(cpumask) __cpus_full(&(cpumask), NR_CPUS) 188 #define cpus_full(cpumask) __cpus_full(&(cpumask), NR_CPUS)
185 static inline int __cpus_full(const cpumask_t *srcp, int nbits) 189 static inline int __cpus_full(const cpumask_t *srcp, int nbits)
186 { 190 {
187 return bitmap_full(srcp->bits, nbits); 191 return bitmap_full(srcp->bits, nbits);
188 } 192 }
189 193
190 #define cpus_weight(cpumask) __cpus_weight(&(cpumask), NR_CPUS) 194 #define cpus_weight(cpumask) __cpus_weight(&(cpumask), NR_CPUS)
191 static inline int __cpus_weight(const cpumask_t *srcp, int nbits) 195 static inline int __cpus_weight(const cpumask_t *srcp, int nbits)
192 { 196 {
193 return bitmap_weight(srcp->bits, nbits); 197 return bitmap_weight(srcp->bits, nbits);
194 } 198 }
195 199
196 #define cpus_shift_right(dst, src, n) \ 200 #define cpus_shift_right(dst, src, n) \
197 __cpus_shift_right(&(dst), &(src), (n), NR_CPUS) 201 __cpus_shift_right(&(dst), &(src), (n), NR_CPUS)
198 static inline void __cpus_shift_right(cpumask_t *dstp, 202 static inline void __cpus_shift_right(cpumask_t *dstp,
199 const cpumask_t *srcp, int n, int nbits) 203 const cpumask_t *srcp, int n, int nbits)
200 { 204 {
201 bitmap_shift_right(dstp->bits, srcp->bits, n, nbits); 205 bitmap_shift_right(dstp->bits, srcp->bits, n, nbits);
202 } 206 }
203 207
204 #define cpus_shift_left(dst, src, n) \ 208 #define cpus_shift_left(dst, src, n) \
205 __cpus_shift_left(&(dst), &(src), (n), NR_CPUS) 209 __cpus_shift_left(&(dst), &(src), (n), NR_CPUS)
206 static inline void __cpus_shift_left(cpumask_t *dstp, 210 static inline void __cpus_shift_left(cpumask_t *dstp,
207 const cpumask_t *srcp, int n, int nbits) 211 const cpumask_t *srcp, int n, int nbits)
208 { 212 {
209 bitmap_shift_left(dstp->bits, srcp->bits, n, nbits); 213 bitmap_shift_left(dstp->bits, srcp->bits, n, nbits);
210 } 214 }
211 215
212 #define first_cpu(src) __first_cpu(&(src), NR_CPUS) 216 #define first_cpu(src) __first_cpu(&(src), NR_CPUS)
213 static inline int __first_cpu(const cpumask_t *srcp, int nbits) 217 static inline int __first_cpu(const cpumask_t *srcp, int nbits)
214 { 218 {
215 return min_t(int, nbits, find_first_bit(srcp->bits, nbits)); 219 return min_t(int, nbits, find_first_bit(srcp->bits, nbits));
216 } 220 }
217 221
218 #define next_cpu(n, src) __next_cpu((n), &(src), NR_CPUS) 222 #define next_cpu(n, src) __next_cpu((n), &(src), NR_CPUS)
219 static inline int __next_cpu(int n, const cpumask_t *srcp, int nbits) 223 static inline int __next_cpu(int n, const cpumask_t *srcp, int nbits)
220 { 224 {
221 return min_t(int, nbits, find_next_bit(srcp->bits, nbits, n+1)); 225 return min_t(int, nbits, find_next_bit(srcp->bits, nbits, n+1));
222 } 226 }
223 227
224 #define cpumask_of_cpu(cpu) \ 228 #define cpumask_of_cpu(cpu) \
225 ({ \ 229 ({ \
226 typeof(_unused_cpumask_arg_) m; \ 230 typeof(_unused_cpumask_arg_) m; \
227 if (sizeof(m) == sizeof(unsigned long)) { \ 231 if (sizeof(m) == sizeof(unsigned long)) { \
228 m.bits[0] = 1UL<<(cpu); \ 232 m.bits[0] = 1UL<<(cpu); \
229 } else { \ 233 } else { \
230 cpus_clear(m); \ 234 cpus_clear(m); \
231 cpu_set((cpu), m); \ 235 cpu_set((cpu), m); \
232 } \ 236 } \
233 m; \ 237 m; \
234 }) 238 })
235 239
236 #define CPU_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(NR_CPUS) 240 #define CPU_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(NR_CPUS)
237 241
238 #if NR_CPUS <= BITS_PER_LONG 242 #if NR_CPUS <= BITS_PER_LONG
239 243
240 #define CPU_MASK_ALL \ 244 #define CPU_MASK_ALL \
241 (cpumask_t) { { \ 245 (cpumask_t) { { \
242 [BITS_TO_LONGS(NR_CPUS)-1] = CPU_MASK_LAST_WORD \ 246 [BITS_TO_LONGS(NR_CPUS)-1] = CPU_MASK_LAST_WORD \
243 } } 247 } }
244 248
245 #else 249 #else
246 250
247 #define CPU_MASK_ALL \ 251 #define CPU_MASK_ALL \
248 (cpumask_t) { { \ 252 (cpumask_t) { { \
249 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \ 253 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
250 [BITS_TO_LONGS(NR_CPUS)-1] = CPU_MASK_LAST_WORD \ 254 [BITS_TO_LONGS(NR_CPUS)-1] = CPU_MASK_LAST_WORD \
251 } } 255 } }
252 256
253 #endif 257 #endif
254 258
255 #define CPU_MASK_NONE \ 259 #define CPU_MASK_NONE \
256 (cpumask_t) { { \ 260 (cpumask_t) { { \
257 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \ 261 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
258 } } 262 } }
259 263
260 #define CPU_MASK_CPU0 \ 264 #define CPU_MASK_CPU0 \
261 (cpumask_t) { { \ 265 (cpumask_t) { { \
262 [0] = 1UL \ 266 [0] = 1UL \
263 } } 267 } }
264 268
265 #define cpus_addr(src) ((src).bits) 269 #define cpus_addr(src) ((src).bits)
266 270
267 #define cpumask_scnprintf(buf, len, src) \ 271 #define cpumask_scnprintf(buf, len, src) \
268 __cpumask_scnprintf((buf), (len), &(src), NR_CPUS) 272 __cpumask_scnprintf((buf), (len), &(src), NR_CPUS)
269 static inline int __cpumask_scnprintf(char *buf, int len, 273 static inline int __cpumask_scnprintf(char *buf, int len,
270 const cpumask_t *srcp, int nbits) 274 const cpumask_t *srcp, int nbits)
271 { 275 {
272 return bitmap_scnprintf(buf, len, srcp->bits, nbits); 276 return bitmap_scnprintf(buf, len, srcp->bits, nbits);
273 } 277 }
274 278
275 #define cpumask_parse(ubuf, ulen, dst) \ 279 #define cpumask_parse(ubuf, ulen, dst) \
276 __cpumask_parse((ubuf), (ulen), &(dst), NR_CPUS) 280 __cpumask_parse((ubuf), (ulen), &(dst), NR_CPUS)
277 static inline int __cpumask_parse(const char __user *buf, int len, 281 static inline int __cpumask_parse(const char __user *buf, int len,
278 cpumask_t *dstp, int nbits) 282 cpumask_t *dstp, int nbits)
279 { 283 {
280 return bitmap_parse(buf, len, dstp->bits, nbits); 284 return bitmap_parse(buf, len, dstp->bits, nbits);
281 } 285 }
282 286
283 #define cpulist_scnprintf(buf, len, src) \ 287 #define cpulist_scnprintf(buf, len, src) \
284 __cpulist_scnprintf((buf), (len), &(src), NR_CPUS) 288 __cpulist_scnprintf((buf), (len), &(src), NR_CPUS)
285 static inline int __cpulist_scnprintf(char *buf, int len, 289 static inline int __cpulist_scnprintf(char *buf, int len,
286 const cpumask_t *srcp, int nbits) 290 const cpumask_t *srcp, int nbits)
287 { 291 {
288 return bitmap_scnlistprintf(buf, len, srcp->bits, nbits); 292 return bitmap_scnlistprintf(buf, len, srcp->bits, nbits);
289 } 293 }
290 294
291 #define cpulist_parse(buf, dst) __cpulist_parse((buf), &(dst), NR_CPUS) 295 #define cpulist_parse(buf, dst) __cpulist_parse((buf), &(dst), NR_CPUS)
292 static inline int __cpulist_parse(const char *buf, cpumask_t *dstp, int nbits) 296 static inline int __cpulist_parse(const char *buf, cpumask_t *dstp, int nbits)
293 { 297 {
294 return bitmap_parselist(buf, dstp->bits, nbits); 298 return bitmap_parselist(buf, dstp->bits, nbits);
299 }
300
301 #define cpu_remap(oldbit, old, new) \
302 __cpu_remap((oldbit), &(old), &(new), NR_CPUS)
303 static inline int __cpu_remap(int oldbit,
304 const cpumask_t *oldp, const cpumask_t *newp, int nbits)
305 {
306 return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits);
307 }
308
309 #define cpus_remap(dst, src, old, new) \
310 __cpus_remap(&(dst), &(src), &(old), &(new), NR_CPUS)
311 static inline void __cpus_remap(cpumask_t *dstp, const cpumask_t *srcp,
312 const cpumask_t *oldp, const cpumask_t *newp, int nbits)
313 {
314 bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits);
295 } 315 }
296 316
297 #if NR_CPUS > 1 317 #if NR_CPUS > 1
298 #define for_each_cpu_mask(cpu, mask) \ 318 #define for_each_cpu_mask(cpu, mask) \
299 for ((cpu) = first_cpu(mask); \ 319 for ((cpu) = first_cpu(mask); \
300 (cpu) < NR_CPUS; \ 320 (cpu) < NR_CPUS; \
301 (cpu) = next_cpu((cpu), (mask))) 321 (cpu) = next_cpu((cpu), (mask)))
302 #else /* NR_CPUS == 1 */ 322 #else /* NR_CPUS == 1 */
303 #define for_each_cpu_mask(cpu, mask) for ((cpu) = 0; (cpu) < 1; (cpu)++) 323 #define for_each_cpu_mask(cpu, mask) for ((cpu) = 0; (cpu) < 1; (cpu)++)
304 #endif /* NR_CPUS */ 324 #endif /* NR_CPUS */
305 325
306 /* 326 /*
307 * The following particular system cpumasks and operations manage 327 * The following particular system cpumasks and operations manage
308 * possible, present and online cpus. Each of them is a fixed size 328 * possible, present and online cpus. Each of them is a fixed size
309 * bitmap of size NR_CPUS. 329 * bitmap of size NR_CPUS.
310 * 330 *
311 * #ifdef CONFIG_HOTPLUG_CPU 331 * #ifdef CONFIG_HOTPLUG_CPU
312 * cpu_possible_map - all NR_CPUS bits set 332 * cpu_possible_map - all NR_CPUS bits set
313 * cpu_present_map - has bit 'cpu' set iff cpu is populated 333 * cpu_present_map - has bit 'cpu' set iff cpu is populated
314 * cpu_online_map - has bit 'cpu' set iff cpu available to scheduler 334 * cpu_online_map - has bit 'cpu' set iff cpu available to scheduler
315 * #else 335 * #else
316 * cpu_possible_map - has bit 'cpu' set iff cpu is populated 336 * cpu_possible_map - has bit 'cpu' set iff cpu is populated
317 * cpu_present_map - copy of cpu_possible_map 337 * cpu_present_map - copy of cpu_possible_map
318 * cpu_online_map - has bit 'cpu' set iff cpu available to scheduler 338 * cpu_online_map - has bit 'cpu' set iff cpu available to scheduler
319 * #endif 339 * #endif
320 * 340 *
321 * In either case, NR_CPUS is fixed at compile time, as the static 341 * In either case, NR_CPUS is fixed at compile time, as the static
322 * size of these bitmaps. The cpu_possible_map is fixed at boot 342 * size of these bitmaps. The cpu_possible_map is fixed at boot
323 * time, as the set of CPU id's that it is possible might ever 343 * time, as the set of CPU id's that it is possible might ever
324 * be plugged in at anytime during the life of that system boot. 344 * be plugged in at anytime during the life of that system boot.
325 * The cpu_present_map is dynamic(*), representing which CPUs 345 * The cpu_present_map is dynamic(*), representing which CPUs
326 * are currently plugged in. And cpu_online_map is the dynamic 346 * are currently plugged in. And cpu_online_map is the dynamic
327 * subset of cpu_present_map, indicating those CPUs available 347 * subset of cpu_present_map, indicating those CPUs available
328 * for scheduling. 348 * for scheduling.
329 * 349 *
330 * If HOTPLUG is enabled, then cpu_possible_map is forced to have 350 * If HOTPLUG is enabled, then cpu_possible_map is forced to have
331 * all NR_CPUS bits set, otherwise it is just the set of CPUs that 351 * all NR_CPUS bits set, otherwise it is just the set of CPUs that
332 * ACPI reports present at boot. 352 * ACPI reports present at boot.
333 * 353 *
334 * If HOTPLUG is enabled, then cpu_present_map varies dynamically, 354 * If HOTPLUG is enabled, then cpu_present_map varies dynamically,
335 * depending on what ACPI reports as currently plugged in, otherwise 355 * depending on what ACPI reports as currently plugged in, otherwise
336 * cpu_present_map is just a copy of cpu_possible_map. 356 * cpu_present_map is just a copy of cpu_possible_map.
337 * 357 *
338 * (*) Well, cpu_present_map is dynamic in the hotplug case. If not 358 * (*) Well, cpu_present_map is dynamic in the hotplug case. If not
339 * hotplug, it's a copy of cpu_possible_map, hence fixed at boot. 359 * hotplug, it's a copy of cpu_possible_map, hence fixed at boot.
340 * 360 *
341 * Subtleties: 361 * Subtleties:
342 * 1) UP arch's (NR_CPUS == 1, CONFIG_SMP not defined) hardcode 362 * 1) UP arch's (NR_CPUS == 1, CONFIG_SMP not defined) hardcode
343 * assumption that their single CPU is online. The UP 363 * assumption that their single CPU is online. The UP
344 * cpu_{online,possible,present}_maps are placebos. Changing them 364 * cpu_{online,possible,present}_maps are placebos. Changing them
345 * will have no useful affect on the following num_*_cpus() 365 * will have no useful affect on the following num_*_cpus()
346 * and cpu_*() macros in the UP case. This ugliness is a UP 366 * and cpu_*() macros in the UP case. This ugliness is a UP
347 * optimization - don't waste any instructions or memory references 367 * optimization - don't waste any instructions or memory references
348 * asking if you're online or how many CPUs there are if there is 368 * asking if you're online or how many CPUs there are if there is
349 * only one CPU. 369 * only one CPU.
350 * 2) Most SMP arch's #define some of these maps to be some 370 * 2) Most SMP arch's #define some of these maps to be some
351 * other map specific to that arch. Therefore, the following 371 * other map specific to that arch. Therefore, the following
352 * must be #define macros, not inlines. To see why, examine 372 * must be #define macros, not inlines. To see why, examine
353 * the assembly code produced by the following. Note that 373 * the assembly code produced by the following. Note that
354 * set1() writes phys_x_map, but set2() writes x_map: 374 * set1() writes phys_x_map, but set2() writes x_map:
355 * int x_map, phys_x_map; 375 * int x_map, phys_x_map;
356 * #define set1(a) x_map = a 376 * #define set1(a) x_map = a
357 * inline void set2(int a) { x_map = a; } 377 * inline void set2(int a) { x_map = a; }
358 * #define x_map phys_x_map 378 * #define x_map phys_x_map
359 * main(){ set1(3); set2(5); } 379 * main(){ set1(3); set2(5); }
360 */ 380 */
361 381
362 extern cpumask_t cpu_possible_map; 382 extern cpumask_t cpu_possible_map;
363 extern cpumask_t cpu_online_map; 383 extern cpumask_t cpu_online_map;
364 extern cpumask_t cpu_present_map; 384 extern cpumask_t cpu_present_map;
365 385
366 #if NR_CPUS > 1 386 #if NR_CPUS > 1
367 #define num_online_cpus() cpus_weight(cpu_online_map) 387 #define num_online_cpus() cpus_weight(cpu_online_map)
368 #define num_possible_cpus() cpus_weight(cpu_possible_map) 388 #define num_possible_cpus() cpus_weight(cpu_possible_map)
369 #define num_present_cpus() cpus_weight(cpu_present_map) 389 #define num_present_cpus() cpus_weight(cpu_present_map)
370 #define cpu_online(cpu) cpu_isset((cpu), cpu_online_map) 390 #define cpu_online(cpu) cpu_isset((cpu), cpu_online_map)
371 #define cpu_possible(cpu) cpu_isset((cpu), cpu_possible_map) 391 #define cpu_possible(cpu) cpu_isset((cpu), cpu_possible_map)
372 #define cpu_present(cpu) cpu_isset((cpu), cpu_present_map) 392 #define cpu_present(cpu) cpu_isset((cpu), cpu_present_map)
373 #else 393 #else
374 #define num_online_cpus() 1 394 #define num_online_cpus() 1
375 #define num_possible_cpus() 1 395 #define num_possible_cpus() 1
376 #define num_present_cpus() 1 396 #define num_present_cpus() 1
377 #define cpu_online(cpu) ((cpu) == 0) 397 #define cpu_online(cpu) ((cpu) == 0)
378 #define cpu_possible(cpu) ((cpu) == 0) 398 #define cpu_possible(cpu) ((cpu) == 0)
379 #define cpu_present(cpu) ((cpu) == 0) 399 #define cpu_present(cpu) ((cpu) == 0)
380 #endif 400 #endif
381 401
382 #define any_online_cpu(mask) \ 402 #define any_online_cpu(mask) \
383 ({ \ 403 ({ \
384 int cpu; \ 404 int cpu; \
385 for_each_cpu_mask(cpu, (mask)) \ 405 for_each_cpu_mask(cpu, (mask)) \
386 if (cpu_online(cpu)) \ 406 if (cpu_online(cpu)) \
387 break; \ 407 break; \
388 cpu; \ 408 cpu; \
389 }) 409 })
390 410
391 #define for_each_cpu(cpu) for_each_cpu_mask((cpu), cpu_possible_map) 411 #define for_each_cpu(cpu) for_each_cpu_mask((cpu), cpu_possible_map)
392 #define for_each_online_cpu(cpu) for_each_cpu_mask((cpu), cpu_online_map) 412 #define for_each_online_cpu(cpu) for_each_cpu_mask((cpu), cpu_online_map)
393 #define for_each_present_cpu(cpu) for_each_cpu_mask((cpu), cpu_present_map) 413 #define for_each_present_cpu(cpu) for_each_cpu_mask((cpu), cpu_present_map)
394 414
395 /* Find the highest possible smp_processor_id() */ 415 /* Find the highest possible smp_processor_id() */
396 #define highest_possible_processor_id() \ 416 #define highest_possible_processor_id() \
397 ({ \ 417 ({ \
398 unsigned int cpu, highest = 0; \ 418 unsigned int cpu, highest = 0; \
399 for_each_cpu_mask(cpu, cpu_possible_map) \ 419 for_each_cpu_mask(cpu, cpu_possible_map) \
400 highest = cpu; \ 420 highest = cpu; \
401 highest; \ 421 highest; \
402 }) 422 })
403 423
404 424
405 #endif /* __LINUX_CPUMASK_H */ 425 #endif /* __LINUX_CPUMASK_H */
406 426
include/linux/nodemask.h
1 #ifndef __LINUX_NODEMASK_H 1 #ifndef __LINUX_NODEMASK_H
2 #define __LINUX_NODEMASK_H 2 #define __LINUX_NODEMASK_H
3 3
4 /* 4 /*
5 * Nodemasks provide a bitmap suitable for representing the 5 * Nodemasks provide a bitmap suitable for representing the
6 * set of Node's in a system, one bit position per Node number. 6 * set of Node's in a system, one bit position per Node number.
7 * 7 *
8 * See detailed comments in the file linux/bitmap.h describing the 8 * See detailed comments in the file linux/bitmap.h describing the
9 * data type on which these nodemasks are based. 9 * data type on which these nodemasks are based.
10 * 10 *
11 * For details of nodemask_scnprintf() and nodemask_parse(), 11 * For details of nodemask_scnprintf() and nodemask_parse(),
12 * see bitmap_scnprintf() and bitmap_parse() in lib/bitmap.c. 12 * see bitmap_scnprintf() and bitmap_parse() in lib/bitmap.c.
13 * For details of nodelist_scnprintf() and nodelist_parse(), see 13 * For details of nodelist_scnprintf() and nodelist_parse(), see
14 * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. 14 * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c.
15 * For details of node_remap(), see bitmap_bitremap in lib/bitmap.c.
16 * For details of nodes_remap(), see bitmap_remap in lib/bitmap.c.
15 * 17 *
16 * The available nodemask operations are: 18 * The available nodemask operations are:
17 * 19 *
18 * void node_set(node, mask) turn on bit 'node' in mask 20 * void node_set(node, mask) turn on bit 'node' in mask
19 * void node_clear(node, mask) turn off bit 'node' in mask 21 * void node_clear(node, mask) turn off bit 'node' in mask
20 * void nodes_setall(mask) set all bits 22 * void nodes_setall(mask) set all bits
21 * void nodes_clear(mask) clear all bits 23 * void nodes_clear(mask) clear all bits
22 * int node_isset(node, mask) true iff bit 'node' set in mask 24 * int node_isset(node, mask) true iff bit 'node' set in mask
23 * int node_test_and_set(node, mask) test and set bit 'node' in mask 25 * int node_test_and_set(node, mask) test and set bit 'node' in mask
24 * 26 *
25 * void nodes_and(dst, src1, src2) dst = src1 & src2 [intersection] 27 * void nodes_and(dst, src1, src2) dst = src1 & src2 [intersection]
26 * void nodes_or(dst, src1, src2) dst = src1 | src2 [union] 28 * void nodes_or(dst, src1, src2) dst = src1 | src2 [union]
27 * void nodes_xor(dst, src1, src2) dst = src1 ^ src2 29 * void nodes_xor(dst, src1, src2) dst = src1 ^ src2
28 * void nodes_andnot(dst, src1, src2) dst = src1 & ~src2 30 * void nodes_andnot(dst, src1, src2) dst = src1 & ~src2
29 * void nodes_complement(dst, src) dst = ~src 31 * void nodes_complement(dst, src) dst = ~src
30 * 32 *
31 * int nodes_equal(mask1, mask2) Does mask1 == mask2? 33 * int nodes_equal(mask1, mask2) Does mask1 == mask2?
32 * int nodes_intersects(mask1, mask2) Do mask1 and mask2 intersect? 34 * int nodes_intersects(mask1, mask2) Do mask1 and mask2 intersect?
33 * int nodes_subset(mask1, mask2) Is mask1 a subset of mask2? 35 * int nodes_subset(mask1, mask2) Is mask1 a subset of mask2?
34 * int nodes_empty(mask) Is mask empty (no bits sets)? 36 * int nodes_empty(mask) Is mask empty (no bits sets)?
35 * int nodes_full(mask) Is mask full (all bits sets)? 37 * int nodes_full(mask) Is mask full (all bits sets)?
36 * int nodes_weight(mask) Hamming weight - number of set bits 38 * int nodes_weight(mask) Hamming weight - number of set bits
37 * 39 *
38 * void nodes_shift_right(dst, src, n) Shift right 40 * void nodes_shift_right(dst, src, n) Shift right
39 * void nodes_shift_left(dst, src, n) Shift left 41 * void nodes_shift_left(dst, src, n) Shift left
40 * 42 *
41 * int first_node(mask) Number lowest set bit, or MAX_NUMNODES 43 * int first_node(mask) Number lowest set bit, or MAX_NUMNODES
42 * int next_node(node, mask) Next node past 'node', or MAX_NUMNODES 44 * int next_node(node, mask) Next node past 'node', or MAX_NUMNODES
43 * int first_unset_node(mask) First node not set in mask, or 45 * int first_unset_node(mask) First node not set in mask, or
44 * MAX_NUMNODES. 46 * MAX_NUMNODES.
45 * 47 *
46 * nodemask_t nodemask_of_node(node) Return nodemask with bit 'node' set 48 * nodemask_t nodemask_of_node(node) Return nodemask with bit 'node' set
47 * NODE_MASK_ALL Initializer - all bits set 49 * NODE_MASK_ALL Initializer - all bits set
48 * NODE_MASK_NONE Initializer - no bits set 50 * NODE_MASK_NONE Initializer - no bits set
49 * unsigned long *nodes_addr(mask) Array of unsigned long's in mask 51 * unsigned long *nodes_addr(mask) Array of unsigned long's in mask
50 * 52 *
51 * int nodemask_scnprintf(buf, len, mask) Format nodemask for printing 53 * int nodemask_scnprintf(buf, len, mask) Format nodemask for printing
52 * int nodemask_parse(ubuf, ulen, mask) Parse ascii string as nodemask 54 * int nodemask_parse(ubuf, ulen, mask) Parse ascii string as nodemask
53 * int nodelist_scnprintf(buf, len, mask) Format nodemask as list for printing 55 * int nodelist_scnprintf(buf, len, mask) Format nodemask as list for printing
54 * int nodelist_parse(buf, map) Parse ascii string as nodelist 56 * int nodelist_parse(buf, map) Parse ascii string as nodelist
57 * int node_remap(oldbit, old, new) newbit = map(old, new)(oldbit)
58 * int nodes_remap(dst, src, old, new) *dst = map(old, new)(dst)
55 * 59 *
56 * for_each_node_mask(node, mask) for-loop node over mask 60 * for_each_node_mask(node, mask) for-loop node over mask
57 * 61 *
58 * int num_online_nodes() Number of online Nodes 62 * int num_online_nodes() Number of online Nodes
59 * int num_possible_nodes() Number of all possible Nodes 63 * int num_possible_nodes() Number of all possible Nodes
60 * 64 *
61 * int node_online(node) Is some node online? 65 * int node_online(node) Is some node online?
62 * int node_possible(node) Is some node possible? 66 * int node_possible(node) Is some node possible?
63 * 67 *
64 * int any_online_node(mask) First online node in mask 68 * int any_online_node(mask) First online node in mask
65 * 69 *
66 * node_set_online(node) set bit 'node' in node_online_map 70 * node_set_online(node) set bit 'node' in node_online_map
67 * node_set_offline(node) clear bit 'node' in node_online_map 71 * node_set_offline(node) clear bit 'node' in node_online_map
68 * 72 *
69 * for_each_node(node) for-loop node over node_possible_map 73 * for_each_node(node) for-loop node over node_possible_map
70 * for_each_online_node(node) for-loop node over node_online_map 74 * for_each_online_node(node) for-loop node over node_online_map
71 * 75 *
72 * Subtlety: 76 * Subtlety:
73 * 1) The 'type-checked' form of node_isset() causes gcc (3.3.2, anyway) 77 * 1) The 'type-checked' form of node_isset() causes gcc (3.3.2, anyway)
74 * to generate slightly worse code. So use a simple one-line #define 78 * to generate slightly worse code. So use a simple one-line #define
75 * for node_isset(), instead of wrapping an inline inside a macro, the 79 * for node_isset(), instead of wrapping an inline inside a macro, the
76 * way we do the other calls. 80 * way we do the other calls.
77 */ 81 */
78 82
79 #include <linux/kernel.h> 83 #include <linux/kernel.h>
80 #include <linux/threads.h> 84 #include <linux/threads.h>
81 #include <linux/bitmap.h> 85 #include <linux/bitmap.h>
82 #include <linux/numa.h> 86 #include <linux/numa.h>
83 #include <asm/bug.h> 87 #include <asm/bug.h>
84 88
85 typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t; 89 typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t;
86 extern nodemask_t _unused_nodemask_arg_; 90 extern nodemask_t _unused_nodemask_arg_;
87 91
88 #define node_set(node, dst) __node_set((node), &(dst)) 92 #define node_set(node, dst) __node_set((node), &(dst))
89 static inline void __node_set(int node, volatile nodemask_t *dstp) 93 static inline void __node_set(int node, volatile nodemask_t *dstp)
90 { 94 {
91 set_bit(node, dstp->bits); 95 set_bit(node, dstp->bits);
92 } 96 }
93 97
94 #define node_clear(node, dst) __node_clear((node), &(dst)) 98 #define node_clear(node, dst) __node_clear((node), &(dst))
95 static inline void __node_clear(int node, volatile nodemask_t *dstp) 99 static inline void __node_clear(int node, volatile nodemask_t *dstp)
96 { 100 {
97 clear_bit(node, dstp->bits); 101 clear_bit(node, dstp->bits);
98 } 102 }
99 103
100 #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES) 104 #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES)
101 static inline void __nodes_setall(nodemask_t *dstp, int nbits) 105 static inline void __nodes_setall(nodemask_t *dstp, int nbits)
102 { 106 {
103 bitmap_fill(dstp->bits, nbits); 107 bitmap_fill(dstp->bits, nbits);
104 } 108 }
105 109
106 #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES) 110 #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES)
107 static inline void __nodes_clear(nodemask_t *dstp, int nbits) 111 static inline void __nodes_clear(nodemask_t *dstp, int nbits)
108 { 112 {
109 bitmap_zero(dstp->bits, nbits); 113 bitmap_zero(dstp->bits, nbits);
110 } 114 }
111 115
112 /* No static inline type checking - see Subtlety (1) above. */ 116 /* No static inline type checking - see Subtlety (1) above. */
113 #define node_isset(node, nodemask) test_bit((node), (nodemask).bits) 117 #define node_isset(node, nodemask) test_bit((node), (nodemask).bits)
114 118
115 #define node_test_and_set(node, nodemask) \ 119 #define node_test_and_set(node, nodemask) \
116 __node_test_and_set((node), &(nodemask)) 120 __node_test_and_set((node), &(nodemask))
117 static inline int __node_test_and_set(int node, nodemask_t *addr) 121 static inline int __node_test_and_set(int node, nodemask_t *addr)
118 { 122 {
119 return test_and_set_bit(node, addr->bits); 123 return test_and_set_bit(node, addr->bits);
120 } 124 }
121 125
122 #define nodes_and(dst, src1, src2) \ 126 #define nodes_and(dst, src1, src2) \
123 __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES) 127 __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES)
124 static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p, 128 static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p,
125 const nodemask_t *src2p, int nbits) 129 const nodemask_t *src2p, int nbits)
126 { 130 {
127 bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits); 131 bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits);
128 } 132 }
129 133
130 #define nodes_or(dst, src1, src2) \ 134 #define nodes_or(dst, src1, src2) \
131 __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES) 135 __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES)
132 static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p, 136 static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p,
133 const nodemask_t *src2p, int nbits) 137 const nodemask_t *src2p, int nbits)
134 { 138 {
135 bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits); 139 bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits);
136 } 140 }
137 141
138 #define nodes_xor(dst, src1, src2) \ 142 #define nodes_xor(dst, src1, src2) \
139 __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES) 143 __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES)
140 static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p, 144 static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p,
141 const nodemask_t *src2p, int nbits) 145 const nodemask_t *src2p, int nbits)
142 { 146 {
143 bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits); 147 bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits);
144 } 148 }
145 149
146 #define nodes_andnot(dst, src1, src2) \ 150 #define nodes_andnot(dst, src1, src2) \
147 __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES) 151 __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES)
148 static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p, 152 static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p,
149 const nodemask_t *src2p, int nbits) 153 const nodemask_t *src2p, int nbits)
150 { 154 {
151 bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); 155 bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits);
152 } 156 }
153 157
154 #define nodes_complement(dst, src) \ 158 #define nodes_complement(dst, src) \
155 __nodes_complement(&(dst), &(src), MAX_NUMNODES) 159 __nodes_complement(&(dst), &(src), MAX_NUMNODES)
156 static inline void __nodes_complement(nodemask_t *dstp, 160 static inline void __nodes_complement(nodemask_t *dstp,
157 const nodemask_t *srcp, int nbits) 161 const nodemask_t *srcp, int nbits)
158 { 162 {
159 bitmap_complement(dstp->bits, srcp->bits, nbits); 163 bitmap_complement(dstp->bits, srcp->bits, nbits);
160 } 164 }
161 165
162 #define nodes_equal(src1, src2) \ 166 #define nodes_equal(src1, src2) \
163 __nodes_equal(&(src1), &(src2), MAX_NUMNODES) 167 __nodes_equal(&(src1), &(src2), MAX_NUMNODES)
164 static inline int __nodes_equal(const nodemask_t *src1p, 168 static inline int __nodes_equal(const nodemask_t *src1p,
165 const nodemask_t *src2p, int nbits) 169 const nodemask_t *src2p, int nbits)
166 { 170 {
167 return bitmap_equal(src1p->bits, src2p->bits, nbits); 171 return bitmap_equal(src1p->bits, src2p->bits, nbits);
168 } 172 }
169 173
170 #define nodes_intersects(src1, src2) \ 174 #define nodes_intersects(src1, src2) \
171 __nodes_intersects(&(src1), &(src2), MAX_NUMNODES) 175 __nodes_intersects(&(src1), &(src2), MAX_NUMNODES)
172 static inline int __nodes_intersects(const nodemask_t *src1p, 176 static inline int __nodes_intersects(const nodemask_t *src1p,
173 const nodemask_t *src2p, int nbits) 177 const nodemask_t *src2p, int nbits)
174 { 178 {
175 return bitmap_intersects(src1p->bits, src2p->bits, nbits); 179 return bitmap_intersects(src1p->bits, src2p->bits, nbits);
176 } 180 }
177 181
178 #define nodes_subset(src1, src2) \ 182 #define nodes_subset(src1, src2) \
179 __nodes_subset(&(src1), &(src2), MAX_NUMNODES) 183 __nodes_subset(&(src1), &(src2), MAX_NUMNODES)
180 static inline int __nodes_subset(const nodemask_t *src1p, 184 static inline int __nodes_subset(const nodemask_t *src1p,
181 const nodemask_t *src2p, int nbits) 185 const nodemask_t *src2p, int nbits)
182 { 186 {
183 return bitmap_subset(src1p->bits, src2p->bits, nbits); 187 return bitmap_subset(src1p->bits, src2p->bits, nbits);
184 } 188 }
185 189
186 #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES) 190 #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES)
187 static inline int __nodes_empty(const nodemask_t *srcp, int nbits) 191 static inline int __nodes_empty(const nodemask_t *srcp, int nbits)
188 { 192 {
189 return bitmap_empty(srcp->bits, nbits); 193 return bitmap_empty(srcp->bits, nbits);
190 } 194 }
191 195
192 #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES) 196 #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES)
193 static inline int __nodes_full(const nodemask_t *srcp, int nbits) 197 static inline int __nodes_full(const nodemask_t *srcp, int nbits)
194 { 198 {
195 return bitmap_full(srcp->bits, nbits); 199 return bitmap_full(srcp->bits, nbits);
196 } 200 }
197 201
198 #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES) 202 #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES)
199 static inline int __nodes_weight(const nodemask_t *srcp, int nbits) 203 static inline int __nodes_weight(const nodemask_t *srcp, int nbits)
200 { 204 {
201 return bitmap_weight(srcp->bits, nbits); 205 return bitmap_weight(srcp->bits, nbits);
202 } 206 }
203 207
204 #define nodes_shift_right(dst, src, n) \ 208 #define nodes_shift_right(dst, src, n) \
205 __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES) 209 __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES)
206 static inline void __nodes_shift_right(nodemask_t *dstp, 210 static inline void __nodes_shift_right(nodemask_t *dstp,
207 const nodemask_t *srcp, int n, int nbits) 211 const nodemask_t *srcp, int n, int nbits)
208 { 212 {
209 bitmap_shift_right(dstp->bits, srcp->bits, n, nbits); 213 bitmap_shift_right(dstp->bits, srcp->bits, n, nbits);
210 } 214 }
211 215
212 #define nodes_shift_left(dst, src, n) \ 216 #define nodes_shift_left(dst, src, n) \
213 __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES) 217 __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES)
214 static inline void __nodes_shift_left(nodemask_t *dstp, 218 static inline void __nodes_shift_left(nodemask_t *dstp,
215 const nodemask_t *srcp, int n, int nbits) 219 const nodemask_t *srcp, int n, int nbits)
216 { 220 {
217 bitmap_shift_left(dstp->bits, srcp->bits, n, nbits); 221 bitmap_shift_left(dstp->bits, srcp->bits, n, nbits);
218 } 222 }
219 223
220 /* FIXME: better would be to fix all architectures to never return 224 /* FIXME: better would be to fix all architectures to never return
221 > MAX_NUMNODES, then the silly min_ts could be dropped. */ 225 > MAX_NUMNODES, then the silly min_ts could be dropped. */
222 226
223 #define first_node(src) __first_node(&(src)) 227 #define first_node(src) __first_node(&(src))
224 static inline int __first_node(const nodemask_t *srcp) 228 static inline int __first_node(const nodemask_t *srcp)
225 { 229 {
226 return min_t(int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES)); 230 return min_t(int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES));
227 } 231 }
228 232
229 #define next_node(n, src) __next_node((n), &(src)) 233 #define next_node(n, src) __next_node((n), &(src))
230 static inline int __next_node(int n, const nodemask_t *srcp) 234 static inline int __next_node(int n, const nodemask_t *srcp)
231 { 235 {
232 return min_t(int,MAX_NUMNODES,find_next_bit(srcp->bits, MAX_NUMNODES, n+1)); 236 return min_t(int,MAX_NUMNODES,find_next_bit(srcp->bits, MAX_NUMNODES, n+1));
233 } 237 }
234 238
235 #define nodemask_of_node(node) \ 239 #define nodemask_of_node(node) \
236 ({ \ 240 ({ \
237 typeof(_unused_nodemask_arg_) m; \ 241 typeof(_unused_nodemask_arg_) m; \
238 if (sizeof(m) == sizeof(unsigned long)) { \ 242 if (sizeof(m) == sizeof(unsigned long)) { \
239 m.bits[0] = 1UL<<(node); \ 243 m.bits[0] = 1UL<<(node); \
240 } else { \ 244 } else { \
241 nodes_clear(m); \ 245 nodes_clear(m); \
242 node_set((node), m); \ 246 node_set((node), m); \
243 } \ 247 } \
244 m; \ 248 m; \
245 }) 249 })
246 250
247 #define first_unset_node(mask) __first_unset_node(&(mask)) 251 #define first_unset_node(mask) __first_unset_node(&(mask))
248 static inline int __first_unset_node(const nodemask_t *maskp) 252 static inline int __first_unset_node(const nodemask_t *maskp)
249 { 253 {
250 return min_t(int,MAX_NUMNODES, 254 return min_t(int,MAX_NUMNODES,
251 find_first_zero_bit(maskp->bits, MAX_NUMNODES)); 255 find_first_zero_bit(maskp->bits, MAX_NUMNODES));
252 } 256 }
253 257
254 #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES) 258 #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES)
255 259
256 #if MAX_NUMNODES <= BITS_PER_LONG 260 #if MAX_NUMNODES <= BITS_PER_LONG
257 261
258 #define NODE_MASK_ALL \ 262 #define NODE_MASK_ALL \
259 ((nodemask_t) { { \ 263 ((nodemask_t) { { \
260 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ 264 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \
261 } }) 265 } })
262 266
263 #else 267 #else
264 268
265 #define NODE_MASK_ALL \ 269 #define NODE_MASK_ALL \
266 ((nodemask_t) { { \ 270 ((nodemask_t) { { \
267 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL, \ 271 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL, \
268 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ 272 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \
269 } }) 273 } })
270 274
271 #endif 275 #endif
272 276
273 #define NODE_MASK_NONE \ 277 #define NODE_MASK_NONE \
274 ((nodemask_t) { { \ 278 ((nodemask_t) { { \
275 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] = 0UL \ 279 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] = 0UL \
276 } }) 280 } })
277 281
278 #define nodes_addr(src) ((src).bits) 282 #define nodes_addr(src) ((src).bits)
279 283
280 #define nodemask_scnprintf(buf, len, src) \ 284 #define nodemask_scnprintf(buf, len, src) \
281 __nodemask_scnprintf((buf), (len), &(src), MAX_NUMNODES) 285 __nodemask_scnprintf((buf), (len), &(src), MAX_NUMNODES)
282 static inline int __nodemask_scnprintf(char *buf, int len, 286 static inline int __nodemask_scnprintf(char *buf, int len,
283 const nodemask_t *srcp, int nbits) 287 const nodemask_t *srcp, int nbits)
284 { 288 {
285 return bitmap_scnprintf(buf, len, srcp->bits, nbits); 289 return bitmap_scnprintf(buf, len, srcp->bits, nbits);
286 } 290 }
287 291
288 #define nodemask_parse(ubuf, ulen, dst) \ 292 #define nodemask_parse(ubuf, ulen, dst) \
289 __nodemask_parse((ubuf), (ulen), &(dst), MAX_NUMNODES) 293 __nodemask_parse((ubuf), (ulen), &(dst), MAX_NUMNODES)
290 static inline int __nodemask_parse(const char __user *buf, int len, 294 static inline int __nodemask_parse(const char __user *buf, int len,
291 nodemask_t *dstp, int nbits) 295 nodemask_t *dstp, int nbits)
292 { 296 {
293 return bitmap_parse(buf, len, dstp->bits, nbits); 297 return bitmap_parse(buf, len, dstp->bits, nbits);
294 } 298 }
295 299
296 #define nodelist_scnprintf(buf, len, src) \ 300 #define nodelist_scnprintf(buf, len, src) \
297 __nodelist_scnprintf((buf), (len), &(src), MAX_NUMNODES) 301 __nodelist_scnprintf((buf), (len), &(src), MAX_NUMNODES)
298 static inline int __nodelist_scnprintf(char *buf, int len, 302 static inline int __nodelist_scnprintf(char *buf, int len,
299 const nodemask_t *srcp, int nbits) 303 const nodemask_t *srcp, int nbits)
300 { 304 {
301 return bitmap_scnlistprintf(buf, len, srcp->bits, nbits); 305 return bitmap_scnlistprintf(buf, len, srcp->bits, nbits);
302 } 306 }
303 307
304 #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES) 308 #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES)
305 static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits) 309 static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits)
306 { 310 {
307 return bitmap_parselist(buf, dstp->bits, nbits); 311 return bitmap_parselist(buf, dstp->bits, nbits);
312 }
313
314 #define node_remap(oldbit, old, new) \
315 __node_remap((oldbit), &(old), &(new), MAX_NUMNODES)
316 static inline int __node_remap(int oldbit,
317 const nodemask_t *oldp, const nodemask_t *newp, int nbits)
318 {
319 return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits);
320 }
321
322 #define nodes_remap(dst, src, old, new) \
323 __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES)
324 static inline void __nodes_remap(nodemask_t *dstp, const nodemask_t *srcp,
325 const nodemask_t *oldp, const nodemask_t *newp, int nbits)
326 {
327 bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits);
308 } 328 }
309 329
310 #if MAX_NUMNODES > 1 330 #if MAX_NUMNODES > 1
311 #define for_each_node_mask(node, mask) \ 331 #define for_each_node_mask(node, mask) \
312 for ((node) = first_node(mask); \ 332 for ((node) = first_node(mask); \
313 (node) < MAX_NUMNODES; \ 333 (node) < MAX_NUMNODES; \
314 (node) = next_node((node), (mask))) 334 (node) = next_node((node), (mask)))
315 #else /* MAX_NUMNODES == 1 */ 335 #else /* MAX_NUMNODES == 1 */
316 #define for_each_node_mask(node, mask) \ 336 #define for_each_node_mask(node, mask) \
317 if (!nodes_empty(mask)) \ 337 if (!nodes_empty(mask)) \
318 for ((node) = 0; (node) < 1; (node)++) 338 for ((node) = 0; (node) < 1; (node)++)
319 #endif /* MAX_NUMNODES */ 339 #endif /* MAX_NUMNODES */
320 340
321 /* 341 /*
322 * The following particular system nodemasks and operations 342 * The following particular system nodemasks and operations
323 * on them manage all possible and online nodes. 343 * on them manage all possible and online nodes.
324 */ 344 */
325 345
326 extern nodemask_t node_online_map; 346 extern nodemask_t node_online_map;
327 extern nodemask_t node_possible_map; 347 extern nodemask_t node_possible_map;
328 348
329 #if MAX_NUMNODES > 1 349 #if MAX_NUMNODES > 1
330 #define num_online_nodes() nodes_weight(node_online_map) 350 #define num_online_nodes() nodes_weight(node_online_map)
331 #define num_possible_nodes() nodes_weight(node_possible_map) 351 #define num_possible_nodes() nodes_weight(node_possible_map)
332 #define node_online(node) node_isset((node), node_online_map) 352 #define node_online(node) node_isset((node), node_online_map)
333 #define node_possible(node) node_isset((node), node_possible_map) 353 #define node_possible(node) node_isset((node), node_possible_map)
334 #else 354 #else
335 #define num_online_nodes() 1 355 #define num_online_nodes() 1
336 #define num_possible_nodes() 1 356 #define num_possible_nodes() 1
337 #define node_online(node) ((node) == 0) 357 #define node_online(node) ((node) == 0)
338 #define node_possible(node) ((node) == 0) 358 #define node_possible(node) ((node) == 0)
339 #endif 359 #endif
340 360
341 #define any_online_node(mask) \ 361 #define any_online_node(mask) \
342 ({ \ 362 ({ \
343 int node; \ 363 int node; \
344 for_each_node_mask(node, (mask)) \ 364 for_each_node_mask(node, (mask)) \
345 if (node_online(node)) \ 365 if (node_online(node)) \
346 break; \ 366 break; \
347 node; \ 367 node; \
348 }) 368 })
349 369
350 #define node_set_online(node) set_bit((node), node_online_map.bits) 370 #define node_set_online(node) set_bit((node), node_online_map.bits)
351 #define node_set_offline(node) clear_bit((node), node_online_map.bits) 371 #define node_set_offline(node) clear_bit((node), node_online_map.bits)
352 372
353 #define for_each_node(node) for_each_node_mask((node), node_possible_map) 373 #define for_each_node(node) for_each_node_mask((node), node_possible_map)
354 #define for_each_online_node(node) for_each_node_mask((node), node_online_map) 374 #define for_each_online_node(node) for_each_node_mask((node), node_online_map)
355 375
356 #endif /* __LINUX_NODEMASK_H */ 376 #endif /* __LINUX_NODEMASK_H */
357 377
1 /* 1 /*
2 * lib/bitmap.c 2 * lib/bitmap.c
3 * Helper functions for bitmap.h. 3 * Helper functions for bitmap.h.
4 * 4 *
5 * This source code is licensed under the GNU General Public License, 5 * This source code is licensed under the GNU General Public License,
6 * Version 2. See the file COPYING for more details. 6 * Version 2. See the file COPYING for more details.
7 */ 7 */
8 #include <linux/module.h> 8 #include <linux/module.h>
9 #include <linux/ctype.h> 9 #include <linux/ctype.h>
10 #include <linux/errno.h> 10 #include <linux/errno.h>
11 #include <linux/bitmap.h> 11 #include <linux/bitmap.h>
12 #include <linux/bitops.h> 12 #include <linux/bitops.h>
13 #include <asm/uaccess.h> 13 #include <asm/uaccess.h>
14 14
15 /* 15 /*
16 * bitmaps provide an array of bits, implemented using an an 16 * bitmaps provide an array of bits, implemented using an an
17 * array of unsigned longs. The number of valid bits in a 17 * array of unsigned longs. The number of valid bits in a
18 * given bitmap does _not_ need to be an exact multiple of 18 * given bitmap does _not_ need to be an exact multiple of
19 * BITS_PER_LONG. 19 * BITS_PER_LONG.
20 * 20 *
21 * The possible unused bits in the last, partially used word 21 * The possible unused bits in the last, partially used word
22 * of a bitmap are 'don't care'. The implementation makes 22 * of a bitmap are 'don't care'. The implementation makes
23 * no particular effort to keep them zero. It ensures that 23 * no particular effort to keep them zero. It ensures that
24 * their value will not affect the results of any operation. 24 * their value will not affect the results of any operation.
25 * The bitmap operations that return Boolean (bitmap_empty, 25 * The bitmap operations that return Boolean (bitmap_empty,
26 * for example) or scalar (bitmap_weight, for example) results 26 * for example) or scalar (bitmap_weight, for example) results
27 * carefully filter out these unused bits from impacting their 27 * carefully filter out these unused bits from impacting their
28 * results. 28 * results.
29 * 29 *
30 * These operations actually hold to a slightly stronger rule: 30 * These operations actually hold to a slightly stronger rule:
31 * if you don't input any bitmaps to these ops that have some 31 * if you don't input any bitmaps to these ops that have some
32 * unused bits set, then they won't output any set unused bits 32 * unused bits set, then they won't output any set unused bits
33 * in output bitmaps. 33 * in output bitmaps.
34 * 34 *
35 * The byte ordering of bitmaps is more natural on little 35 * The byte ordering of bitmaps is more natural on little
36 * endian architectures. See the big-endian headers 36 * endian architectures. See the big-endian headers
37 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h 37 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
38 * for the best explanations of this ordering. 38 * for the best explanations of this ordering.
39 */ 39 */
40 40
41 int __bitmap_empty(const unsigned long *bitmap, int bits) 41 int __bitmap_empty(const unsigned long *bitmap, int bits)
42 { 42 {
43 int k, lim = bits/BITS_PER_LONG; 43 int k, lim = bits/BITS_PER_LONG;
44 for (k = 0; k < lim; ++k) 44 for (k = 0; k < lim; ++k)
45 if (bitmap[k]) 45 if (bitmap[k])
46 return 0; 46 return 0;
47 47
48 if (bits % BITS_PER_LONG) 48 if (bits % BITS_PER_LONG)
49 if (bitmap[k] & BITMAP_LAST_WORD_MASK(bits)) 49 if (bitmap[k] & BITMAP_LAST_WORD_MASK(bits))
50 return 0; 50 return 0;
51 51
52 return 1; 52 return 1;
53 } 53 }
54 EXPORT_SYMBOL(__bitmap_empty); 54 EXPORT_SYMBOL(__bitmap_empty);
55 55
56 int __bitmap_full(const unsigned long *bitmap, int bits) 56 int __bitmap_full(const unsigned long *bitmap, int bits)
57 { 57 {
58 int k, lim = bits/BITS_PER_LONG; 58 int k, lim = bits/BITS_PER_LONG;
59 for (k = 0; k < lim; ++k) 59 for (k = 0; k < lim; ++k)
60 if (~bitmap[k]) 60 if (~bitmap[k])
61 return 0; 61 return 0;
62 62
63 if (bits % BITS_PER_LONG) 63 if (bits % BITS_PER_LONG)
64 if (~bitmap[k] & BITMAP_LAST_WORD_MASK(bits)) 64 if (~bitmap[k] & BITMAP_LAST_WORD_MASK(bits))
65 return 0; 65 return 0;
66 66
67 return 1; 67 return 1;
68 } 68 }
69 EXPORT_SYMBOL(__bitmap_full); 69 EXPORT_SYMBOL(__bitmap_full);
70 70
71 int __bitmap_equal(const unsigned long *bitmap1, 71 int __bitmap_equal(const unsigned long *bitmap1,
72 const unsigned long *bitmap2, int bits) 72 const unsigned long *bitmap2, int bits)
73 { 73 {
74 int k, lim = bits/BITS_PER_LONG; 74 int k, lim = bits/BITS_PER_LONG;
75 for (k = 0; k < lim; ++k) 75 for (k = 0; k < lim; ++k)
76 if (bitmap1[k] != bitmap2[k]) 76 if (bitmap1[k] != bitmap2[k])
77 return 0; 77 return 0;
78 78
79 if (bits % BITS_PER_LONG) 79 if (bits % BITS_PER_LONG)
80 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 80 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
81 return 0; 81 return 0;
82 82
83 return 1; 83 return 1;
84 } 84 }
85 EXPORT_SYMBOL(__bitmap_equal); 85 EXPORT_SYMBOL(__bitmap_equal);
86 86
87 void __bitmap_complement(unsigned long *dst, const unsigned long *src, int bits) 87 void __bitmap_complement(unsigned long *dst, const unsigned long *src, int bits)
88 { 88 {
89 int k, lim = bits/BITS_PER_LONG; 89 int k, lim = bits/BITS_PER_LONG;
90 for (k = 0; k < lim; ++k) 90 for (k = 0; k < lim; ++k)
91 dst[k] = ~src[k]; 91 dst[k] = ~src[k];
92 92
93 if (bits % BITS_PER_LONG) 93 if (bits % BITS_PER_LONG)
94 dst[k] = ~src[k] & BITMAP_LAST_WORD_MASK(bits); 94 dst[k] = ~src[k] & BITMAP_LAST_WORD_MASK(bits);
95 } 95 }
96 EXPORT_SYMBOL(__bitmap_complement); 96 EXPORT_SYMBOL(__bitmap_complement);
97 97
98 /* 98 /*
99 * __bitmap_shift_right - logical right shift of the bits in a bitmap 99 * __bitmap_shift_right - logical right shift of the bits in a bitmap
100 * @dst - destination bitmap 100 * @dst - destination bitmap
101 * @src - source bitmap 101 * @src - source bitmap
102 * @nbits - shift by this many bits 102 * @nbits - shift by this many bits
103 * @bits - bitmap size, in bits 103 * @bits - bitmap size, in bits
104 * 104 *
105 * Shifting right (dividing) means moving bits in the MS -> LS bit 105 * Shifting right (dividing) means moving bits in the MS -> LS bit
106 * direction. Zeros are fed into the vacated MS positions and the 106 * direction. Zeros are fed into the vacated MS positions and the
107 * LS bits shifted off the bottom are lost. 107 * LS bits shifted off the bottom are lost.
108 */ 108 */
109 void __bitmap_shift_right(unsigned long *dst, 109 void __bitmap_shift_right(unsigned long *dst,
110 const unsigned long *src, int shift, int bits) 110 const unsigned long *src, int shift, int bits)
111 { 111 {
112 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG; 112 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG;
113 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; 113 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
114 unsigned long mask = (1UL << left) - 1; 114 unsigned long mask = (1UL << left) - 1;
115 for (k = 0; off + k < lim; ++k) { 115 for (k = 0; off + k < lim; ++k) {
116 unsigned long upper, lower; 116 unsigned long upper, lower;
117 117
118 /* 118 /*
119 * If shift is not word aligned, take lower rem bits of 119 * If shift is not word aligned, take lower rem bits of
120 * word above and make them the top rem bits of result. 120 * word above and make them the top rem bits of result.
121 */ 121 */
122 if (!rem || off + k + 1 >= lim) 122 if (!rem || off + k + 1 >= lim)
123 upper = 0; 123 upper = 0;
124 else { 124 else {
125 upper = src[off + k + 1]; 125 upper = src[off + k + 1];
126 if (off + k + 1 == lim - 1 && left) 126 if (off + k + 1 == lim - 1 && left)
127 upper &= mask; 127 upper &= mask;
128 } 128 }
129 lower = src[off + k]; 129 lower = src[off + k];
130 if (left && off + k == lim - 1) 130 if (left && off + k == lim - 1)
131 lower &= mask; 131 lower &= mask;
132 dst[k] = upper << (BITS_PER_LONG - rem) | lower >> rem; 132 dst[k] = upper << (BITS_PER_LONG - rem) | lower >> rem;
133 if (left && k == lim - 1) 133 if (left && k == lim - 1)
134 dst[k] &= mask; 134 dst[k] &= mask;
135 } 135 }
136 if (off) 136 if (off)
137 memset(&dst[lim - off], 0, off*sizeof(unsigned long)); 137 memset(&dst[lim - off], 0, off*sizeof(unsigned long));
138 } 138 }
139 EXPORT_SYMBOL(__bitmap_shift_right); 139 EXPORT_SYMBOL(__bitmap_shift_right);
140 140
141 141
142 /* 142 /*
143 * __bitmap_shift_left - logical left shift of the bits in a bitmap 143 * __bitmap_shift_left - logical left shift of the bits in a bitmap
144 * @dst - destination bitmap 144 * @dst - destination bitmap
145 * @src - source bitmap 145 * @src - source bitmap
146 * @nbits - shift by this many bits 146 * @nbits - shift by this many bits
147 * @bits - bitmap size, in bits 147 * @bits - bitmap size, in bits
148 * 148 *
149 * Shifting left (multiplying) means moving bits in the LS -> MS 149 * Shifting left (multiplying) means moving bits in the LS -> MS
150 * direction. Zeros are fed into the vacated LS bit positions 150 * direction. Zeros are fed into the vacated LS bit positions
151 * and those MS bits shifted off the top are lost. 151 * and those MS bits shifted off the top are lost.
152 */ 152 */
153 153
154 void __bitmap_shift_left(unsigned long *dst, 154 void __bitmap_shift_left(unsigned long *dst,
155 const unsigned long *src, int shift, int bits) 155 const unsigned long *src, int shift, int bits)
156 { 156 {
157 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG; 157 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG;
158 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; 158 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
159 for (k = lim - off - 1; k >= 0; --k) { 159 for (k = lim - off - 1; k >= 0; --k) {
160 unsigned long upper, lower; 160 unsigned long upper, lower;
161 161
162 /* 162 /*
163 * If shift is not word aligned, take upper rem bits of 163 * If shift is not word aligned, take upper rem bits of
164 * word below and make them the bottom rem bits of result. 164 * word below and make them the bottom rem bits of result.
165 */ 165 */
166 if (rem && k > 0) 166 if (rem && k > 0)
167 lower = src[k - 1]; 167 lower = src[k - 1];
168 else 168 else
169 lower = 0; 169 lower = 0;
170 upper = src[k]; 170 upper = src[k];
171 if (left && k == lim - 1) 171 if (left && k == lim - 1)
172 upper &= (1UL << left) - 1; 172 upper &= (1UL << left) - 1;
173 dst[k + off] = lower >> (BITS_PER_LONG - rem) | upper << rem; 173 dst[k + off] = lower >> (BITS_PER_LONG - rem) | upper << rem;
174 if (left && k + off == lim - 1) 174 if (left && k + off == lim - 1)
175 dst[k + off] &= (1UL << left) - 1; 175 dst[k + off] &= (1UL << left) - 1;
176 } 176 }
177 if (off) 177 if (off)
178 memset(dst, 0, off*sizeof(unsigned long)); 178 memset(dst, 0, off*sizeof(unsigned long));
179 } 179 }
180 EXPORT_SYMBOL(__bitmap_shift_left); 180 EXPORT_SYMBOL(__bitmap_shift_left);
181 181
182 void __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, 182 void __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
183 const unsigned long *bitmap2, int bits) 183 const unsigned long *bitmap2, int bits)
184 { 184 {
185 int k; 185 int k;
186 int nr = BITS_TO_LONGS(bits); 186 int nr = BITS_TO_LONGS(bits);
187 187
188 for (k = 0; k < nr; k++) 188 for (k = 0; k < nr; k++)
189 dst[k] = bitmap1[k] & bitmap2[k]; 189 dst[k] = bitmap1[k] & bitmap2[k];
190 } 190 }
191 EXPORT_SYMBOL(__bitmap_and); 191 EXPORT_SYMBOL(__bitmap_and);
192 192
193 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, 193 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
194 const unsigned long *bitmap2, int bits) 194 const unsigned long *bitmap2, int bits)
195 { 195 {
196 int k; 196 int k;
197 int nr = BITS_TO_LONGS(bits); 197 int nr = BITS_TO_LONGS(bits);
198 198
199 for (k = 0; k < nr; k++) 199 for (k = 0; k < nr; k++)
200 dst[k] = bitmap1[k] | bitmap2[k]; 200 dst[k] = bitmap1[k] | bitmap2[k];
201 } 201 }
202 EXPORT_SYMBOL(__bitmap_or); 202 EXPORT_SYMBOL(__bitmap_or);
203 203
204 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, 204 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
205 const unsigned long *bitmap2, int bits) 205 const unsigned long *bitmap2, int bits)
206 { 206 {
207 int k; 207 int k;
208 int nr = BITS_TO_LONGS(bits); 208 int nr = BITS_TO_LONGS(bits);
209 209
210 for (k = 0; k < nr; k++) 210 for (k = 0; k < nr; k++)
211 dst[k] = bitmap1[k] ^ bitmap2[k]; 211 dst[k] = bitmap1[k] ^ bitmap2[k];
212 } 212 }
213 EXPORT_SYMBOL(__bitmap_xor); 213 EXPORT_SYMBOL(__bitmap_xor);
214 214
215 void __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, 215 void __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
216 const unsigned long *bitmap2, int bits) 216 const unsigned long *bitmap2, int bits)
217 { 217 {
218 int k; 218 int k;
219 int nr = BITS_TO_LONGS(bits); 219 int nr = BITS_TO_LONGS(bits);
220 220
221 for (k = 0; k < nr; k++) 221 for (k = 0; k < nr; k++)
222 dst[k] = bitmap1[k] & ~bitmap2[k]; 222 dst[k] = bitmap1[k] & ~bitmap2[k];
223 } 223 }
224 EXPORT_SYMBOL(__bitmap_andnot); 224 EXPORT_SYMBOL(__bitmap_andnot);
225 225
226 int __bitmap_intersects(const unsigned long *bitmap1, 226 int __bitmap_intersects(const unsigned long *bitmap1,
227 const unsigned long *bitmap2, int bits) 227 const unsigned long *bitmap2, int bits)
228 { 228 {
229 int k, lim = bits/BITS_PER_LONG; 229 int k, lim = bits/BITS_PER_LONG;
230 for (k = 0; k < lim; ++k) 230 for (k = 0; k < lim; ++k)
231 if (bitmap1[k] & bitmap2[k]) 231 if (bitmap1[k] & bitmap2[k])
232 return 1; 232 return 1;
233 233
234 if (bits % BITS_PER_LONG) 234 if (bits % BITS_PER_LONG)
235 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 235 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
236 return 1; 236 return 1;
237 return 0; 237 return 0;
238 } 238 }
239 EXPORT_SYMBOL(__bitmap_intersects); 239 EXPORT_SYMBOL(__bitmap_intersects);
240 240
241 int __bitmap_subset(const unsigned long *bitmap1, 241 int __bitmap_subset(const unsigned long *bitmap1,
242 const unsigned long *bitmap2, int bits) 242 const unsigned long *bitmap2, int bits)
243 { 243 {
244 int k, lim = bits/BITS_PER_LONG; 244 int k, lim = bits/BITS_PER_LONG;
245 for (k = 0; k < lim; ++k) 245 for (k = 0; k < lim; ++k)
246 if (bitmap1[k] & ~bitmap2[k]) 246 if (bitmap1[k] & ~bitmap2[k])
247 return 0; 247 return 0;
248 248
249 if (bits % BITS_PER_LONG) 249 if (bits % BITS_PER_LONG)
250 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 250 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
251 return 0; 251 return 0;
252 return 1; 252 return 1;
253 } 253 }
254 EXPORT_SYMBOL(__bitmap_subset); 254 EXPORT_SYMBOL(__bitmap_subset);
255 255
256 #if BITS_PER_LONG == 32 256 #if BITS_PER_LONG == 32
257 int __bitmap_weight(const unsigned long *bitmap, int bits) 257 int __bitmap_weight(const unsigned long *bitmap, int bits)
258 { 258 {
259 int k, w = 0, lim = bits/BITS_PER_LONG; 259 int k, w = 0, lim = bits/BITS_PER_LONG;
260 260
261 for (k = 0; k < lim; k++) 261 for (k = 0; k < lim; k++)
262 w += hweight32(bitmap[k]); 262 w += hweight32(bitmap[k]);
263 263
264 if (bits % BITS_PER_LONG) 264 if (bits % BITS_PER_LONG)
265 w += hweight32(bitmap[k] & BITMAP_LAST_WORD_MASK(bits)); 265 w += hweight32(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
266 266
267 return w; 267 return w;
268 } 268 }
269 #else 269 #else
270 int __bitmap_weight(const unsigned long *bitmap, int bits) 270 int __bitmap_weight(const unsigned long *bitmap, int bits)
271 { 271 {
272 int k, w = 0, lim = bits/BITS_PER_LONG; 272 int k, w = 0, lim = bits/BITS_PER_LONG;
273 273
274 for (k = 0; k < lim; k++) 274 for (k = 0; k < lim; k++)
275 w += hweight64(bitmap[k]); 275 w += hweight64(bitmap[k]);
276 276
277 if (bits % BITS_PER_LONG) 277 if (bits % BITS_PER_LONG)
278 w += hweight64(bitmap[k] & BITMAP_LAST_WORD_MASK(bits)); 278 w += hweight64(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
279 279
280 return w; 280 return w;
281 } 281 }
282 #endif 282 #endif
283 EXPORT_SYMBOL(__bitmap_weight); 283 EXPORT_SYMBOL(__bitmap_weight);
284 284
285 /* 285 /*
286 * Bitmap printing & parsing functions: first version by Bill Irwin, 286 * Bitmap printing & parsing functions: first version by Bill Irwin,
287 * second version by Paul Jackson, third by Joe Korty. 287 * second version by Paul Jackson, third by Joe Korty.
288 */ 288 */
289 289
290 #define CHUNKSZ 32 290 #define CHUNKSZ 32
291 #define nbits_to_hold_value(val) fls(val) 291 #define nbits_to_hold_value(val) fls(val)
292 #define unhex(c) (isdigit(c) ? (c - '0') : (toupper(c) - 'A' + 10)) 292 #define unhex(c) (isdigit(c) ? (c - '0') : (toupper(c) - 'A' + 10))
293 #define BASEDEC 10 /* fancier cpuset lists input in decimal */ 293 #define BASEDEC 10 /* fancier cpuset lists input in decimal */
294 294
295 /** 295 /**
296 * bitmap_scnprintf - convert bitmap to an ASCII hex string. 296 * bitmap_scnprintf - convert bitmap to an ASCII hex string.
297 * @buf: byte buffer into which string is placed 297 * @buf: byte buffer into which string is placed
298 * @buflen: reserved size of @buf, in bytes 298 * @buflen: reserved size of @buf, in bytes
299 * @maskp: pointer to bitmap to convert 299 * @maskp: pointer to bitmap to convert
300 * @nmaskbits: size of bitmap, in bits 300 * @nmaskbits: size of bitmap, in bits
301 * 301 *
302 * Exactly @nmaskbits bits are displayed. Hex digits are grouped into 302 * Exactly @nmaskbits bits are displayed. Hex digits are grouped into
303 * comma-separated sets of eight digits per set. 303 * comma-separated sets of eight digits per set.
304 */ 304 */
305 int bitmap_scnprintf(char *buf, unsigned int buflen, 305 int bitmap_scnprintf(char *buf, unsigned int buflen,
306 const unsigned long *maskp, int nmaskbits) 306 const unsigned long *maskp, int nmaskbits)
307 { 307 {
308 int i, word, bit, len = 0; 308 int i, word, bit, len = 0;
309 unsigned long val; 309 unsigned long val;
310 const char *sep = ""; 310 const char *sep = "";
311 int chunksz; 311 int chunksz;
312 u32 chunkmask; 312 u32 chunkmask;
313 313
314 chunksz = nmaskbits & (CHUNKSZ - 1); 314 chunksz = nmaskbits & (CHUNKSZ - 1);
315 if (chunksz == 0) 315 if (chunksz == 0)
316 chunksz = CHUNKSZ; 316 chunksz = CHUNKSZ;
317 317
318 i = ALIGN(nmaskbits, CHUNKSZ) - CHUNKSZ; 318 i = ALIGN(nmaskbits, CHUNKSZ) - CHUNKSZ;
319 for (; i >= 0; i -= CHUNKSZ) { 319 for (; i >= 0; i -= CHUNKSZ) {
320 chunkmask = ((1ULL << chunksz) - 1); 320 chunkmask = ((1ULL << chunksz) - 1);
321 word = i / BITS_PER_LONG; 321 word = i / BITS_PER_LONG;
322 bit = i % BITS_PER_LONG; 322 bit = i % BITS_PER_LONG;
323 val = (maskp[word] >> bit) & chunkmask; 323 val = (maskp[word] >> bit) & chunkmask;
324 len += scnprintf(buf+len, buflen-len, "%s%0*lx", sep, 324 len += scnprintf(buf+len, buflen-len, "%s%0*lx", sep,
325 (chunksz+3)/4, val); 325 (chunksz+3)/4, val);
326 chunksz = CHUNKSZ; 326 chunksz = CHUNKSZ;
327 sep = ","; 327 sep = ",";
328 } 328 }
329 return len; 329 return len;
330 } 330 }
331 EXPORT_SYMBOL(bitmap_scnprintf); 331 EXPORT_SYMBOL(bitmap_scnprintf);
332 332
333 /** 333 /**
334 * bitmap_parse - convert an ASCII hex string into a bitmap. 334 * bitmap_parse - convert an ASCII hex string into a bitmap.
335 * @buf: pointer to buffer in user space containing string. 335 * @buf: pointer to buffer in user space containing string.
336 * @buflen: buffer size in bytes. If string is smaller than this 336 * @buflen: buffer size in bytes. If string is smaller than this
337 * then it must be terminated with a \0. 337 * then it must be terminated with a \0.
338 * @maskp: pointer to bitmap array that will contain result. 338 * @maskp: pointer to bitmap array that will contain result.
339 * @nmaskbits: size of bitmap, in bits. 339 * @nmaskbits: size of bitmap, in bits.
340 * 340 *
341 * Commas group hex digits into chunks. Each chunk defines exactly 32 341 * Commas group hex digits into chunks. Each chunk defines exactly 32
342 * bits of the resultant bitmask. No chunk may specify a value larger 342 * bits of the resultant bitmask. No chunk may specify a value larger
343 * than 32 bits (-EOVERFLOW), and if a chunk specifies a smaller value 343 * than 32 bits (-EOVERFLOW), and if a chunk specifies a smaller value
344 * then leading 0-bits are prepended. -EINVAL is returned for illegal 344 * then leading 0-bits are prepended. -EINVAL is returned for illegal
345 * characters and for grouping errors such as "1,,5", ",44", "," and "". 345 * characters and for grouping errors such as "1,,5", ",44", "," and "".
346 * Leading and trailing whitespace accepted, but not embedded whitespace. 346 * Leading and trailing whitespace accepted, but not embedded whitespace.
347 */ 347 */
348 int bitmap_parse(const char __user *ubuf, unsigned int ubuflen, 348 int bitmap_parse(const char __user *ubuf, unsigned int ubuflen,
349 unsigned long *maskp, int nmaskbits) 349 unsigned long *maskp, int nmaskbits)
350 { 350 {
351 int c, old_c, totaldigits, ndigits, nchunks, nbits; 351 int c, old_c, totaldigits, ndigits, nchunks, nbits;
352 u32 chunk; 352 u32 chunk;
353 353
354 bitmap_zero(maskp, nmaskbits); 354 bitmap_zero(maskp, nmaskbits);
355 355
356 nchunks = nbits = totaldigits = c = 0; 356 nchunks = nbits = totaldigits = c = 0;
357 do { 357 do {
358 chunk = ndigits = 0; 358 chunk = ndigits = 0;
359 359
360 /* Get the next chunk of the bitmap */ 360 /* Get the next chunk of the bitmap */
361 while (ubuflen) { 361 while (ubuflen) {
362 old_c = c; 362 old_c = c;
363 if (get_user(c, ubuf++)) 363 if (get_user(c, ubuf++))
364 return -EFAULT; 364 return -EFAULT;
365 ubuflen--; 365 ubuflen--;
366 if (isspace(c)) 366 if (isspace(c))
367 continue; 367 continue;
368 368
369 /* 369 /*
370 * If the last character was a space and the current 370 * If the last character was a space and the current
371 * character isn't '\0', we've got embedded whitespace. 371 * character isn't '\0', we've got embedded whitespace.
372 * This is a no-no, so throw an error. 372 * This is a no-no, so throw an error.
373 */ 373 */
374 if (totaldigits && c && isspace(old_c)) 374 if (totaldigits && c && isspace(old_c))
375 return -EINVAL; 375 return -EINVAL;
376 376
377 /* A '\0' or a ',' signal the end of the chunk */ 377 /* A '\0' or a ',' signal the end of the chunk */
378 if (c == '\0' || c == ',') 378 if (c == '\0' || c == ',')
379 break; 379 break;
380 380
381 if (!isxdigit(c)) 381 if (!isxdigit(c))
382 return -EINVAL; 382 return -EINVAL;
383 383
384 /* 384 /*
385 * Make sure there are at least 4 free bits in 'chunk'. 385 * Make sure there are at least 4 free bits in 'chunk'.
386 * If not, this hexdigit will overflow 'chunk', so 386 * If not, this hexdigit will overflow 'chunk', so
387 * throw an error. 387 * throw an error.
388 */ 388 */
389 if (chunk & ~((1UL << (CHUNKSZ - 4)) - 1)) 389 if (chunk & ~((1UL << (CHUNKSZ - 4)) - 1))
390 return -EOVERFLOW; 390 return -EOVERFLOW;
391 391
392 chunk = (chunk << 4) | unhex(c); 392 chunk = (chunk << 4) | unhex(c);
393 ndigits++; totaldigits++; 393 ndigits++; totaldigits++;
394 } 394 }
395 if (ndigits == 0) 395 if (ndigits == 0)
396 return -EINVAL; 396 return -EINVAL;
397 if (nchunks == 0 && chunk == 0) 397 if (nchunks == 0 && chunk == 0)
398 continue; 398 continue;
399 399
400 __bitmap_shift_left(maskp, maskp, CHUNKSZ, nmaskbits); 400 __bitmap_shift_left(maskp, maskp, CHUNKSZ, nmaskbits);
401 *maskp |= chunk; 401 *maskp |= chunk;
402 nchunks++; 402 nchunks++;
403 nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ; 403 nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ;
404 if (nbits > nmaskbits) 404 if (nbits > nmaskbits)
405 return -EOVERFLOW; 405 return -EOVERFLOW;
406 } while (ubuflen && c == ','); 406 } while (ubuflen && c == ',');
407 407
408 return 0; 408 return 0;
409 } 409 }
410 EXPORT_SYMBOL(bitmap_parse); 410 EXPORT_SYMBOL(bitmap_parse);
411 411
412 /* 412 /*
413 * bscnl_emit(buf, buflen, rbot, rtop, bp) 413 * bscnl_emit(buf, buflen, rbot, rtop, bp)
414 * 414 *
415 * Helper routine for bitmap_scnlistprintf(). Write decimal number 415 * Helper routine for bitmap_scnlistprintf(). Write decimal number
416 * or range to buf, suppressing output past buf+buflen, with optional 416 * or range to buf, suppressing output past buf+buflen, with optional
417 * comma-prefix. Return len of what would be written to buf, if it 417 * comma-prefix. Return len of what would be written to buf, if it
418 * all fit. 418 * all fit.
419 */ 419 */
420 static inline int bscnl_emit(char *buf, int buflen, int rbot, int rtop, int len) 420 static inline int bscnl_emit(char *buf, int buflen, int rbot, int rtop, int len)
421 { 421 {
422 if (len > 0) 422 if (len > 0)
423 len += scnprintf(buf + len, buflen - len, ","); 423 len += scnprintf(buf + len, buflen - len, ",");
424 if (rbot == rtop) 424 if (rbot == rtop)
425 len += scnprintf(buf + len, buflen - len, "%d", rbot); 425 len += scnprintf(buf + len, buflen - len, "%d", rbot);
426 else 426 else
427 len += scnprintf(buf + len, buflen - len, "%d-%d", rbot, rtop); 427 len += scnprintf(buf + len, buflen - len, "%d-%d", rbot, rtop);
428 return len; 428 return len;
429 } 429 }
430 430
431 /** 431 /**
432 * bitmap_scnlistprintf - convert bitmap to list format ASCII string 432 * bitmap_scnlistprintf - convert bitmap to list format ASCII string
433 * @buf: byte buffer into which string is placed 433 * @buf: byte buffer into which string is placed
434 * @buflen: reserved size of @buf, in bytes 434 * @buflen: reserved size of @buf, in bytes
435 * @maskp: pointer to bitmap to convert 435 * @maskp: pointer to bitmap to convert
436 * @nmaskbits: size of bitmap, in bits 436 * @nmaskbits: size of bitmap, in bits
437 * 437 *
438 * Output format is a comma-separated list of decimal numbers and 438 * Output format is a comma-separated list of decimal numbers and
439 * ranges. Consecutively set bits are shown as two hyphen-separated 439 * ranges. Consecutively set bits are shown as two hyphen-separated
440 * decimal numbers, the smallest and largest bit numbers set in 440 * decimal numbers, the smallest and largest bit numbers set in
441 * the range. Output format is compatible with the format 441 * the range. Output format is compatible with the format
442 * accepted as input by bitmap_parselist(). 442 * accepted as input by bitmap_parselist().
443 * 443 *
444 * The return value is the number of characters which would be 444 * The return value is the number of characters which would be
445 * generated for the given input, excluding the trailing '\0', as 445 * generated for the given input, excluding the trailing '\0', as
446 * per ISO C99. 446 * per ISO C99.
447 */ 447 */
448 int bitmap_scnlistprintf(char *buf, unsigned int buflen, 448 int bitmap_scnlistprintf(char *buf, unsigned int buflen,
449 const unsigned long *maskp, int nmaskbits) 449 const unsigned long *maskp, int nmaskbits)
450 { 450 {
451 int len = 0; 451 int len = 0;
452 /* current bit is 'cur', most recently seen range is [rbot, rtop] */ 452 /* current bit is 'cur', most recently seen range is [rbot, rtop] */
453 int cur, rbot, rtop; 453 int cur, rbot, rtop;
454 454
455 rbot = cur = find_first_bit(maskp, nmaskbits); 455 rbot = cur = find_first_bit(maskp, nmaskbits);
456 while (cur < nmaskbits) { 456 while (cur < nmaskbits) {
457 rtop = cur; 457 rtop = cur;
458 cur = find_next_bit(maskp, nmaskbits, cur+1); 458 cur = find_next_bit(maskp, nmaskbits, cur+1);
459 if (cur >= nmaskbits || cur > rtop + 1) { 459 if (cur >= nmaskbits || cur > rtop + 1) {
460 len = bscnl_emit(buf, buflen, rbot, rtop, len); 460 len = bscnl_emit(buf, buflen, rbot, rtop, len);
461 rbot = cur; 461 rbot = cur;
462 } 462 }
463 } 463 }
464 return len; 464 return len;
465 } 465 }
466 EXPORT_SYMBOL(bitmap_scnlistprintf); 466 EXPORT_SYMBOL(bitmap_scnlistprintf);
467 467
468 /** 468 /**
469 * bitmap_parselist - convert list format ASCII string to bitmap 469 * bitmap_parselist - convert list format ASCII string to bitmap
470 * @buf: read nul-terminated user string from this buffer 470 * @buf: read nul-terminated user string from this buffer
471 * @mask: write resulting mask here 471 * @mask: write resulting mask here
472 * @nmaskbits: number of bits in mask to be written 472 * @nmaskbits: number of bits in mask to be written
473 * 473 *
474 * Input format is a comma-separated list of decimal numbers and 474 * Input format is a comma-separated list of decimal numbers and
475 * ranges. Consecutively set bits are shown as two hyphen-separated 475 * ranges. Consecutively set bits are shown as two hyphen-separated
476 * decimal numbers, the smallest and largest bit numbers set in 476 * decimal numbers, the smallest and largest bit numbers set in
477 * the range. 477 * the range.
478 * 478 *
479 * Returns 0 on success, -errno on invalid input strings: 479 * Returns 0 on success, -errno on invalid input strings:
480 * -EINVAL: second number in range smaller than first 480 * -EINVAL: second number in range smaller than first
481 * -EINVAL: invalid character in string 481 * -EINVAL: invalid character in string
482 * -ERANGE: bit number specified too large for mask 482 * -ERANGE: bit number specified too large for mask
483 */ 483 */
484 int bitmap_parselist(const char *bp, unsigned long *maskp, int nmaskbits) 484 int bitmap_parselist(const char *bp, unsigned long *maskp, int nmaskbits)
485 { 485 {
486 unsigned a, b; 486 unsigned a, b;
487 487
488 bitmap_zero(maskp, nmaskbits); 488 bitmap_zero(maskp, nmaskbits);
489 do { 489 do {
490 if (!isdigit(*bp)) 490 if (!isdigit(*bp))
491 return -EINVAL; 491 return -EINVAL;
492 b = a = simple_strtoul(bp, (char **)&bp, BASEDEC); 492 b = a = simple_strtoul(bp, (char **)&bp, BASEDEC);
493 if (*bp == '-') { 493 if (*bp == '-') {
494 bp++; 494 bp++;
495 if (!isdigit(*bp)) 495 if (!isdigit(*bp))
496 return -EINVAL; 496 return -EINVAL;
497 b = simple_strtoul(bp, (char **)&bp, BASEDEC); 497 b = simple_strtoul(bp, (char **)&bp, BASEDEC);
498 } 498 }
499 if (!(a <= b)) 499 if (!(a <= b))
500 return -EINVAL; 500 return -EINVAL;
501 if (b >= nmaskbits) 501 if (b >= nmaskbits)
502 return -ERANGE; 502 return -ERANGE;
503 while (a <= b) { 503 while (a <= b) {
504 set_bit(a, maskp); 504 set_bit(a, maskp);
505 a++; 505 a++;
506 } 506 }
507 if (*bp == ',') 507 if (*bp == ',')
508 bp++; 508 bp++;
509 } while (*bp != '\0' && *bp != '\n'); 509 } while (*bp != '\0' && *bp != '\n');
510 return 0; 510 return 0;
511 } 511 }
512 EXPORT_SYMBOL(bitmap_parselist); 512 EXPORT_SYMBOL(bitmap_parselist);
513 513
514 /*
515 * bitmap_pos_to_ord(buf, pos, bits)
516 * @buf: pointer to a bitmap
517 * @pos: a bit position in @buf (0 <= @pos < @bits)
518 * @bits: number of valid bit positions in @buf
519 *
520 * Map the bit at position @pos in @buf (of length @bits) to the
521 * ordinal of which set bit it is. If it is not set or if @pos
522 * is not a valid bit position, map to zero (0).
523 *
524 * If for example, just bits 4 through 7 are set in @buf, then @pos
525 * values 4 through 7 will get mapped to 0 through 3, respectively,
526 * and other @pos values will get mapped to 0. When @pos value 7
527 * gets mapped to (returns) @ord value 3 in this example, that means
528 * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
529 *
530 * The bit positions 0 through @bits are valid positions in @buf.
531 */
532 static int bitmap_pos_to_ord(const unsigned long *buf, int pos, int bits)
533 {
534 int ord = 0;
535
536 if (pos >= 0 && pos < bits) {
537 int i;
538
539 for (i = find_first_bit(buf, bits);
540 i < pos;
541 i = find_next_bit(buf, bits, i + 1))
542 ord++;
543 if (i > pos)
544 ord = 0;
545 }
546 return ord;
547 }
548
549 /**
550 * bitmap_ord_to_pos(buf, ord, bits)
551 * @buf: pointer to bitmap
552 * @ord: ordinal bit position (n-th set bit, n >= 0)
553 * @bits: number of valid bit positions in @buf
554 *
555 * Map the ordinal offset of bit @ord in @buf to its position in @buf.
556 * If @ord is not the ordinal offset of a set bit in @buf, map to zero (0).
557 *
558 * If for example, just bits 4 through 7 are set in @buf, then @ord
559 * values 0 through 3 will get mapped to 4 through 7, respectively,
560 * and all other @ord valuds will get mapped to 0. When @ord value 3
561 * gets mapped to (returns) @pos value 7 in this example, that means
562 * that the 3rd set bit (starting with 0th) is at position 7 in @buf.
563 *
564 * The bit positions 0 through @bits are valid positions in @buf.
565 */
566 static int bitmap_ord_to_pos(const unsigned long *buf, int ord, int bits)
567 {
568 int pos = 0;
569
570 if (ord >= 0 && ord < bits) {
571 int i;
572
573 for (i = find_first_bit(buf, bits);
574 i < bits && ord > 0;
575 i = find_next_bit(buf, bits, i + 1))
576 ord--;
577 if (i < bits && ord == 0)
578 pos = i;
579 }
580
581 return pos;
582 }
583
584 /**
585 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
586 * @src: subset to be remapped
587 * @dst: remapped result
588 * @old: defines domain of map
589 * @new: defines range of map
590 * @bits: number of bits in each of these bitmaps
591 *
592 * Let @old and @new define a mapping of bit positions, such that
593 * whatever position is held by the n-th set bit in @old is mapped
594 * to the n-th set bit in @new. In the more general case, allowing
595 * for the possibility that the weight 'w' of @new is less than the
596 * weight of @old, map the position of the n-th set bit in @old to
597 * the position of the m-th set bit in @new, where m == n % w.
598 *
599 * If either of the @old and @new bitmaps are empty, or if@src and @dst
600 * point to the same location, then this routine does nothing.
601 *
602 * The positions of unset bits in @old are mapped to the position of
603 * the first set bit in @new.
604 *
605 * Apply the above specified mapping to @src, placing the result in
606 * @dst, clearing any bits previously set in @dst.
607 *
608 * The resulting value of @dst will have either the same weight as
609 * @src, or less weight in the general case that the mapping wasn't
610 * injective due to the weight of @new being less than that of @old.
611 * The resulting value of @dst will never have greater weight than
612 * that of @src, except perhaps in the case that one of the above
613 * conditions was not met and this routine just returned.
614 *
615 * For example, lets say that @old has bits 4 through 7 set, and
616 * @new has bits 12 through 15 set. This defines the mapping of bit
617 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
618 * bit positions to 12 (the first set bit in @new. So if say @src
619 * comes into this routine with bits 1, 5 and 7 set, then @dst should
620 * leave with bits 12, 13 and 15 set.
621 */
622 void bitmap_remap(unsigned long *dst, const unsigned long *src,
623 const unsigned long *old, const unsigned long *new,
624 int bits)
625 {
626 int s;
627
628 if (bitmap_weight(old, bits) == 0)
629 return;
630 if (bitmap_weight(new, bits) == 0)
631 return;
632 if (dst == src) /* following doesn't handle inplace remaps */
633 return;
634
635 bitmap_zero(dst, bits);
636 for (s = find_first_bit(src, bits);
637 s < bits;
638 s = find_next_bit(src, bits, s + 1)) {
639 int x = bitmap_pos_to_ord(old, s, bits);
640 int y = bitmap_ord_to_pos(new, x, bits);
641 set_bit(y, dst);
642 }
643 }
644 EXPORT_SYMBOL(bitmap_remap);
645
646 /**
647 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
648 * @oldbit - bit position to be mapped
649 * @old: defines domain of map
650 * @new: defines range of map
651 * @bits: number of bits in each of these bitmaps
652 *
653 * Let @old and @new define a mapping of bit positions, such that
654 * whatever position is held by the n-th set bit in @old is mapped
655 * to the n-th set bit in @new. In the more general case, allowing
656 * for the possibility that the weight 'w' of @new is less than the
657 * weight of @old, map the position of the n-th set bit in @old to
658 * the position of the m-th set bit in @new, where m == n % w.
659 *
660 * The positions of unset bits in @old are mapped to the position of
661 * the first set bit in @new.
662 *
663 * Apply the above specified mapping to bit position @oldbit, returning
664 * the new bit position.
665 *
666 * For example, lets say that @old has bits 4 through 7 set, and
667 * @new has bits 12 through 15 set. This defines the mapping of bit
668 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
669 * bit positions to 12 (the first set bit in @new. So if say @oldbit
670 * is 5, then this routine returns 13.
671 */
672 int bitmap_bitremap(int oldbit, const unsigned long *old,
673 const unsigned long *new, int bits)
674 {
675 int x = bitmap_pos_to_ord(old, oldbit, bits);
676 return bitmap_ord_to_pos(new, x, bits);
677 }
678 EXPORT_SYMBOL(bitmap_bitremap);
679
514 /** 680 /**
515 * bitmap_find_free_region - find a contiguous aligned mem region 681 * bitmap_find_free_region - find a contiguous aligned mem region
516 * @bitmap: an array of unsigned longs corresponding to the bitmap 682 * @bitmap: an array of unsigned longs corresponding to the bitmap
517 * @bits: number of bits in the bitmap 683 * @bits: number of bits in the bitmap
518 * @order: region size to find (size is actually 1<<order) 684 * @order: region size to find (size is actually 1<<order)
519 * 685 *
520 * This is used to allocate a memory region from a bitmap. The idea is 686 * This is used to allocate a memory region from a bitmap. The idea is
521 * that the region has to be 1<<order sized and 1<<order aligned (this 687 * that the region has to be 1<<order sized and 1<<order aligned (this
522 * makes the search algorithm much faster). 688 * makes the search algorithm much faster).
523 * 689 *
524 * The region is marked as set bits in the bitmap if a free one is 690 * The region is marked as set bits in the bitmap if a free one is
525 * found. 691 * found.
526 * 692 *
527 * Returns either beginning of region or negative error 693 * Returns either beginning of region or negative error
528 */ 694 */
529 int bitmap_find_free_region(unsigned long *bitmap, int bits, int order) 695 int bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
530 { 696 {
531 unsigned long mask; 697 unsigned long mask;
532 int pages = 1 << order; 698 int pages = 1 << order;
533 int i; 699 int i;
534 700
535 if(pages > BITS_PER_LONG) 701 if(pages > BITS_PER_LONG)
536 return -EINVAL; 702 return -EINVAL;
537 703
538 /* make a mask of the order */ 704 /* make a mask of the order */
539 mask = (1ul << (pages - 1)); 705 mask = (1ul << (pages - 1));
540 mask += mask - 1; 706 mask += mask - 1;
541 707
542 /* run up the bitmap pages bits at a time */ 708 /* run up the bitmap pages bits at a time */
543 for (i = 0; i < bits; i += pages) { 709 for (i = 0; i < bits; i += pages) {
544 int index = i/BITS_PER_LONG; 710 int index = i/BITS_PER_LONG;
545 int offset = i - (index * BITS_PER_LONG); 711 int offset = i - (index * BITS_PER_LONG);
546 if((bitmap[index] & (mask << offset)) == 0) { 712 if((bitmap[index] & (mask << offset)) == 0) {
547 /* set region in bimap */ 713 /* set region in bimap */
548 bitmap[index] |= (mask << offset); 714 bitmap[index] |= (mask << offset);
549 return i; 715 return i;
550 } 716 }
551 } 717 }
552 return -ENOMEM; 718 return -ENOMEM;
553 } 719 }
554 EXPORT_SYMBOL(bitmap_find_free_region); 720 EXPORT_SYMBOL(bitmap_find_free_region);
555 721
556 /** 722 /**
557 * bitmap_release_region - release allocated bitmap region 723 * bitmap_release_region - release allocated bitmap region
558 * @bitmap: a pointer to the bitmap 724 * @bitmap: a pointer to the bitmap
559 * @pos: the beginning of the region 725 * @pos: the beginning of the region
560 * @order: the order of the bits to release (number is 1<<order) 726 * @order: the order of the bits to release (number is 1<<order)
561 * 727 *
562 * This is the complement to __bitmap_find_free_region and releases 728 * This is the complement to __bitmap_find_free_region and releases
563 * the found region (by clearing it in the bitmap). 729 * the found region (by clearing it in the bitmap).
564 */ 730 */
565 void bitmap_release_region(unsigned long *bitmap, int pos, int order) 731 void bitmap_release_region(unsigned long *bitmap, int pos, int order)
566 { 732 {
567 int pages = 1 << order; 733 int pages = 1 << order;
568 unsigned long mask = (1ul << (pages - 1)); 734 unsigned long mask = (1ul << (pages - 1));
569 int index = pos/BITS_PER_LONG; 735 int index = pos/BITS_PER_LONG;
570 int offset = pos - (index * BITS_PER_LONG); 736 int offset = pos - (index * BITS_PER_LONG);
571 mask += mask - 1; 737 mask += mask - 1;
572 bitmap[index] &= ~(mask << offset); 738 bitmap[index] &= ~(mask << offset);
573 } 739 }
574 EXPORT_SYMBOL(bitmap_release_region); 740 EXPORT_SYMBOL(bitmap_release_region);
575 741
576 int bitmap_allocate_region(unsigned long *bitmap, int pos, int order) 742 int bitmap_allocate_region(unsigned long *bitmap, int pos, int order)
577 { 743 {
578 int pages = 1 << order; 744 int pages = 1 << order;
579 unsigned long mask = (1ul << (pages - 1)); 745 unsigned long mask = (1ul << (pages - 1));
580 int index = pos/BITS_PER_LONG; 746 int index = pos/BITS_PER_LONG;
581 int offset = pos - (index * BITS_PER_LONG); 747 int offset = pos - (index * BITS_PER_LONG);
582 748
583 /* We don't do regions of pages > BITS_PER_LONG. The 749 /* We don't do regions of pages > BITS_PER_LONG. The
584 * algorithm would be a simple look for multiple zeros in the 750 * algorithm would be a simple look for multiple zeros in the
585 * array, but there's no driver today that needs this. If you 751 * array, but there's no driver today that needs this. If you
586 * trip this BUG(), you get to code it... */ 752 * trip this BUG(), you get to code it... */
587 BUG_ON(pages > BITS_PER_LONG); 753 BUG_ON(pages > BITS_PER_LONG);
588 mask += mask - 1; 754 mask += mask - 1;
589 if (bitmap[index] & (mask << offset)) 755 if (bitmap[index] & (mask << offset))
590 return -EBUSY; 756 return -EBUSY;
591 bitmap[index] |= (mask << offset); 757 bitmap[index] |= (mask << offset);
592 return 0; 758 return 0;
593 } 759 }
594 EXPORT_SYMBOL(bitmap_allocate_region); 760 EXPORT_SYMBOL(bitmap_allocate_region);
595 761