Commit 323f54ed0f3ce20e9946c961fc928ccdb80d9345
Committed by
Ingo Molnar
1 parent
87e3c8ae1c
Exists in
smarc-imx_3.14.28_1.0.0_ga
and in
1 other branch
numa: Mark __node_set() as __always_inline
It is posible for some compilers to decide that __node_set() does not need to be made turned into an inline function. When the compiler does this on an __init function calling it on __initdata we get a section mismatch warning now. Use __always_inline to ensure that we will be inlined. Reported-by: Paul Bolle <pebolle@tiscali.nl> Cc: Jianpeng Ma <majianpeng@gmail.com> Cc: Rusty Russell <rusty@rustcorp.com.au> Cc: Lai Jiangshan <laijs@cn.fujitsu.com> Cc: Yasuaki Ishimatsu <isimatu.yasuaki@jp.fujitsu.com> Cc: Wen Congyang <wency@cn.fujitsu.com> Cc: Jiang Liu <jiang.liu@huawei.com> Cc: KOSAKI Motohiro <kosaki.motohiro@jp.fujitsu.com> Cc: Minchan Kim <minchan.kim@gmail.com> Cc: Mel Gorman <mgorman@suse.de> Cc: David Rientjes <rientjes@google.com> Cc: Yinghai Lu <yinghai@kernel.org> Cc: Greg KH <greg@kroah.com> Signed-off-by: Tom Rini <trini@ti.com> Link: http://lkml.kernel.org/r/1374776770-32361-1-git-send-email-trini@ti.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
Showing 1 changed file with 10 additions and 1 deletions Inline Diff
include/linux/nodemask.h
1 | #ifndef __LINUX_NODEMASK_H | 1 | #ifndef __LINUX_NODEMASK_H |
2 | #define __LINUX_NODEMASK_H | 2 | #define __LINUX_NODEMASK_H |
3 | 3 | ||
4 | /* | 4 | /* |
5 | * Nodemasks provide a bitmap suitable for representing the | 5 | * Nodemasks provide a bitmap suitable for representing the |
6 | * set of Node's in a system, one bit position per Node number. | 6 | * set of Node's in a system, one bit position per Node number. |
7 | * | 7 | * |
8 | * See detailed comments in the file linux/bitmap.h describing the | 8 | * See detailed comments in the file linux/bitmap.h describing the |
9 | * data type on which these nodemasks are based. | 9 | * data type on which these nodemasks are based. |
10 | * | 10 | * |
11 | * For details of nodemask_scnprintf() and nodemask_parse_user(), | 11 | * For details of nodemask_scnprintf() and nodemask_parse_user(), |
12 | * see bitmap_scnprintf() and bitmap_parse_user() in lib/bitmap.c. | 12 | * see bitmap_scnprintf() and bitmap_parse_user() in lib/bitmap.c. |
13 | * For details of nodelist_scnprintf() and nodelist_parse(), see | 13 | * For details of nodelist_scnprintf() and nodelist_parse(), see |
14 | * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. | 14 | * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. |
15 | * For details of node_remap(), see bitmap_bitremap in lib/bitmap.c. | 15 | * For details of node_remap(), see bitmap_bitremap in lib/bitmap.c. |
16 | * For details of nodes_remap(), see bitmap_remap in lib/bitmap.c. | 16 | * For details of nodes_remap(), see bitmap_remap in lib/bitmap.c. |
17 | * For details of nodes_onto(), see bitmap_onto in lib/bitmap.c. | 17 | * For details of nodes_onto(), see bitmap_onto in lib/bitmap.c. |
18 | * For details of nodes_fold(), see bitmap_fold in lib/bitmap.c. | 18 | * For details of nodes_fold(), see bitmap_fold in lib/bitmap.c. |
19 | * | 19 | * |
20 | * The available nodemask operations are: | 20 | * The available nodemask operations are: |
21 | * | 21 | * |
22 | * void node_set(node, mask) turn on bit 'node' in mask | 22 | * void node_set(node, mask) turn on bit 'node' in mask |
23 | * void node_clear(node, mask) turn off bit 'node' in mask | 23 | * void node_clear(node, mask) turn off bit 'node' in mask |
24 | * void nodes_setall(mask) set all bits | 24 | * void nodes_setall(mask) set all bits |
25 | * void nodes_clear(mask) clear all bits | 25 | * void nodes_clear(mask) clear all bits |
26 | * int node_isset(node, mask) true iff bit 'node' set in mask | 26 | * int node_isset(node, mask) true iff bit 'node' set in mask |
27 | * int node_test_and_set(node, mask) test and set bit 'node' in mask | 27 | * int node_test_and_set(node, mask) test and set bit 'node' in mask |
28 | * | 28 | * |
29 | * void nodes_and(dst, src1, src2) dst = src1 & src2 [intersection] | 29 | * void nodes_and(dst, src1, src2) dst = src1 & src2 [intersection] |
30 | * void nodes_or(dst, src1, src2) dst = src1 | src2 [union] | 30 | * void nodes_or(dst, src1, src2) dst = src1 | src2 [union] |
31 | * void nodes_xor(dst, src1, src2) dst = src1 ^ src2 | 31 | * void nodes_xor(dst, src1, src2) dst = src1 ^ src2 |
32 | * void nodes_andnot(dst, src1, src2) dst = src1 & ~src2 | 32 | * void nodes_andnot(dst, src1, src2) dst = src1 & ~src2 |
33 | * void nodes_complement(dst, src) dst = ~src | 33 | * void nodes_complement(dst, src) dst = ~src |
34 | * | 34 | * |
35 | * int nodes_equal(mask1, mask2) Does mask1 == mask2? | 35 | * int nodes_equal(mask1, mask2) Does mask1 == mask2? |
36 | * int nodes_intersects(mask1, mask2) Do mask1 and mask2 intersect? | 36 | * int nodes_intersects(mask1, mask2) Do mask1 and mask2 intersect? |
37 | * int nodes_subset(mask1, mask2) Is mask1 a subset of mask2? | 37 | * int nodes_subset(mask1, mask2) Is mask1 a subset of mask2? |
38 | * int nodes_empty(mask) Is mask empty (no bits sets)? | 38 | * int nodes_empty(mask) Is mask empty (no bits sets)? |
39 | * int nodes_full(mask) Is mask full (all bits sets)? | 39 | * int nodes_full(mask) Is mask full (all bits sets)? |
40 | * int nodes_weight(mask) Hamming weight - number of set bits | 40 | * int nodes_weight(mask) Hamming weight - number of set bits |
41 | * | 41 | * |
42 | * void nodes_shift_right(dst, src, n) Shift right | 42 | * void nodes_shift_right(dst, src, n) Shift right |
43 | * void nodes_shift_left(dst, src, n) Shift left | 43 | * void nodes_shift_left(dst, src, n) Shift left |
44 | * | 44 | * |
45 | * int first_node(mask) Number lowest set bit, or MAX_NUMNODES | 45 | * int first_node(mask) Number lowest set bit, or MAX_NUMNODES |
46 | * int next_node(node, mask) Next node past 'node', or MAX_NUMNODES | 46 | * int next_node(node, mask) Next node past 'node', or MAX_NUMNODES |
47 | * int first_unset_node(mask) First node not set in mask, or | 47 | * int first_unset_node(mask) First node not set in mask, or |
48 | * MAX_NUMNODES. | 48 | * MAX_NUMNODES. |
49 | * | 49 | * |
50 | * nodemask_t nodemask_of_node(node) Return nodemask with bit 'node' set | 50 | * nodemask_t nodemask_of_node(node) Return nodemask with bit 'node' set |
51 | * NODE_MASK_ALL Initializer - all bits set | 51 | * NODE_MASK_ALL Initializer - all bits set |
52 | * NODE_MASK_NONE Initializer - no bits set | 52 | * NODE_MASK_NONE Initializer - no bits set |
53 | * unsigned long *nodes_addr(mask) Array of unsigned long's in mask | 53 | * unsigned long *nodes_addr(mask) Array of unsigned long's in mask |
54 | * | 54 | * |
55 | * int nodemask_scnprintf(buf, len, mask) Format nodemask for printing | 55 | * int nodemask_scnprintf(buf, len, mask) Format nodemask for printing |
56 | * int nodemask_parse_user(ubuf, ulen, mask) Parse ascii string as nodemask | 56 | * int nodemask_parse_user(ubuf, ulen, mask) Parse ascii string as nodemask |
57 | * int nodelist_scnprintf(buf, len, mask) Format nodemask as list for printing | 57 | * int nodelist_scnprintf(buf, len, mask) Format nodemask as list for printing |
58 | * int nodelist_parse(buf, map) Parse ascii string as nodelist | 58 | * int nodelist_parse(buf, map) Parse ascii string as nodelist |
59 | * int node_remap(oldbit, old, new) newbit = map(old, new)(oldbit) | 59 | * int node_remap(oldbit, old, new) newbit = map(old, new)(oldbit) |
60 | * void nodes_remap(dst, src, old, new) *dst = map(old, new)(src) | 60 | * void nodes_remap(dst, src, old, new) *dst = map(old, new)(src) |
61 | * void nodes_onto(dst, orig, relmap) *dst = orig relative to relmap | 61 | * void nodes_onto(dst, orig, relmap) *dst = orig relative to relmap |
62 | * void nodes_fold(dst, orig, sz) dst bits = orig bits mod sz | 62 | * void nodes_fold(dst, orig, sz) dst bits = orig bits mod sz |
63 | * | 63 | * |
64 | * for_each_node_mask(node, mask) for-loop node over mask | 64 | * for_each_node_mask(node, mask) for-loop node over mask |
65 | * | 65 | * |
66 | * int num_online_nodes() Number of online Nodes | 66 | * int num_online_nodes() Number of online Nodes |
67 | * int num_possible_nodes() Number of all possible Nodes | 67 | * int num_possible_nodes() Number of all possible Nodes |
68 | * | 68 | * |
69 | * int node_random(mask) Random node with set bit in mask | 69 | * int node_random(mask) Random node with set bit in mask |
70 | * | 70 | * |
71 | * int node_online(node) Is some node online? | 71 | * int node_online(node) Is some node online? |
72 | * int node_possible(node) Is some node possible? | 72 | * int node_possible(node) Is some node possible? |
73 | * | 73 | * |
74 | * node_set_online(node) set bit 'node' in node_online_map | 74 | * node_set_online(node) set bit 'node' in node_online_map |
75 | * node_set_offline(node) clear bit 'node' in node_online_map | 75 | * node_set_offline(node) clear bit 'node' in node_online_map |
76 | * | 76 | * |
77 | * for_each_node(node) for-loop node over node_possible_map | 77 | * for_each_node(node) for-loop node over node_possible_map |
78 | * for_each_online_node(node) for-loop node over node_online_map | 78 | * for_each_online_node(node) for-loop node over node_online_map |
79 | * | 79 | * |
80 | * Subtlety: | 80 | * Subtlety: |
81 | * 1) The 'type-checked' form of node_isset() causes gcc (3.3.2, anyway) | 81 | * 1) The 'type-checked' form of node_isset() causes gcc (3.3.2, anyway) |
82 | * to generate slightly worse code. So use a simple one-line #define | 82 | * to generate slightly worse code. So use a simple one-line #define |
83 | * for node_isset(), instead of wrapping an inline inside a macro, the | 83 | * for node_isset(), instead of wrapping an inline inside a macro, the |
84 | * way we do the other calls. | 84 | * way we do the other calls. |
85 | * | 85 | * |
86 | * NODEMASK_SCRATCH | 86 | * NODEMASK_SCRATCH |
87 | * When doing above logical AND, OR, XOR, Remap operations the callers tend to | 87 | * When doing above logical AND, OR, XOR, Remap operations the callers tend to |
88 | * need temporary nodemask_t's on the stack. But if NODES_SHIFT is large, | 88 | * need temporary nodemask_t's on the stack. But if NODES_SHIFT is large, |
89 | * nodemask_t's consume too much stack space. NODEMASK_SCRATCH is a helper | 89 | * nodemask_t's consume too much stack space. NODEMASK_SCRATCH is a helper |
90 | * for such situations. See below and CPUMASK_ALLOC also. | 90 | * for such situations. See below and CPUMASK_ALLOC also. |
91 | */ | 91 | */ |
92 | 92 | ||
93 | #include <linux/kernel.h> | 93 | #include <linux/kernel.h> |
94 | #include <linux/threads.h> | 94 | #include <linux/threads.h> |
95 | #include <linux/bitmap.h> | 95 | #include <linux/bitmap.h> |
96 | #include <linux/numa.h> | 96 | #include <linux/numa.h> |
97 | 97 | ||
98 | typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t; | 98 | typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t; |
99 | extern nodemask_t _unused_nodemask_arg_; | 99 | extern nodemask_t _unused_nodemask_arg_; |
100 | 100 | ||
101 | /* | ||
102 | * The inline keyword gives the compiler room to decide to inline, or | ||
103 | * not inline a function as it sees best. However, as these functions | ||
104 | * are called in both __init and non-__init functions, if they are not | ||
105 | * inlined we will end up with a section mis-match error (of the type of | ||
106 | * freeable items not being freed). So we must use __always_inline here | ||
107 | * to fix the problem. If other functions in the future also end up in | ||
108 | * this situation they will also need to be annotated as __always_inline | ||
109 | */ | ||
101 | #define node_set(node, dst) __node_set((node), &(dst)) | 110 | #define node_set(node, dst) __node_set((node), &(dst)) |
102 | static inline void __node_set(int node, volatile nodemask_t *dstp) | 111 | static __always_inline void __node_set(int node, volatile nodemask_t *dstp) |
103 | { | 112 | { |
104 | set_bit(node, dstp->bits); | 113 | set_bit(node, dstp->bits); |
105 | } | 114 | } |
106 | 115 | ||
107 | #define node_clear(node, dst) __node_clear((node), &(dst)) | 116 | #define node_clear(node, dst) __node_clear((node), &(dst)) |
108 | static inline void __node_clear(int node, volatile nodemask_t *dstp) | 117 | static inline void __node_clear(int node, volatile nodemask_t *dstp) |
109 | { | 118 | { |
110 | clear_bit(node, dstp->bits); | 119 | clear_bit(node, dstp->bits); |
111 | } | 120 | } |
112 | 121 | ||
113 | #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES) | 122 | #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES) |
114 | static inline void __nodes_setall(nodemask_t *dstp, int nbits) | 123 | static inline void __nodes_setall(nodemask_t *dstp, int nbits) |
115 | { | 124 | { |
116 | bitmap_fill(dstp->bits, nbits); | 125 | bitmap_fill(dstp->bits, nbits); |
117 | } | 126 | } |
118 | 127 | ||
119 | #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES) | 128 | #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES) |
120 | static inline void __nodes_clear(nodemask_t *dstp, int nbits) | 129 | static inline void __nodes_clear(nodemask_t *dstp, int nbits) |
121 | { | 130 | { |
122 | bitmap_zero(dstp->bits, nbits); | 131 | bitmap_zero(dstp->bits, nbits); |
123 | } | 132 | } |
124 | 133 | ||
125 | /* No static inline type checking - see Subtlety (1) above. */ | 134 | /* No static inline type checking - see Subtlety (1) above. */ |
126 | #define node_isset(node, nodemask) test_bit((node), (nodemask).bits) | 135 | #define node_isset(node, nodemask) test_bit((node), (nodemask).bits) |
127 | 136 | ||
128 | #define node_test_and_set(node, nodemask) \ | 137 | #define node_test_and_set(node, nodemask) \ |
129 | __node_test_and_set((node), &(nodemask)) | 138 | __node_test_and_set((node), &(nodemask)) |
130 | static inline int __node_test_and_set(int node, nodemask_t *addr) | 139 | static inline int __node_test_and_set(int node, nodemask_t *addr) |
131 | { | 140 | { |
132 | return test_and_set_bit(node, addr->bits); | 141 | return test_and_set_bit(node, addr->bits); |
133 | } | 142 | } |
134 | 143 | ||
135 | #define nodes_and(dst, src1, src2) \ | 144 | #define nodes_and(dst, src1, src2) \ |
136 | __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES) | 145 | __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES) |
137 | static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p, | 146 | static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p, |
138 | const nodemask_t *src2p, int nbits) | 147 | const nodemask_t *src2p, int nbits) |
139 | { | 148 | { |
140 | bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits); | 149 | bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits); |
141 | } | 150 | } |
142 | 151 | ||
143 | #define nodes_or(dst, src1, src2) \ | 152 | #define nodes_or(dst, src1, src2) \ |
144 | __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES) | 153 | __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES) |
145 | static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p, | 154 | static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p, |
146 | const nodemask_t *src2p, int nbits) | 155 | const nodemask_t *src2p, int nbits) |
147 | { | 156 | { |
148 | bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits); | 157 | bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits); |
149 | } | 158 | } |
150 | 159 | ||
151 | #define nodes_xor(dst, src1, src2) \ | 160 | #define nodes_xor(dst, src1, src2) \ |
152 | __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES) | 161 | __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES) |
153 | static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p, | 162 | static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p, |
154 | const nodemask_t *src2p, int nbits) | 163 | const nodemask_t *src2p, int nbits) |
155 | { | 164 | { |
156 | bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits); | 165 | bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits); |
157 | } | 166 | } |
158 | 167 | ||
159 | #define nodes_andnot(dst, src1, src2) \ | 168 | #define nodes_andnot(dst, src1, src2) \ |
160 | __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES) | 169 | __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES) |
161 | static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p, | 170 | static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p, |
162 | const nodemask_t *src2p, int nbits) | 171 | const nodemask_t *src2p, int nbits) |
163 | { | 172 | { |
164 | bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); | 173 | bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); |
165 | } | 174 | } |
166 | 175 | ||
167 | #define nodes_complement(dst, src) \ | 176 | #define nodes_complement(dst, src) \ |
168 | __nodes_complement(&(dst), &(src), MAX_NUMNODES) | 177 | __nodes_complement(&(dst), &(src), MAX_NUMNODES) |
169 | static inline void __nodes_complement(nodemask_t *dstp, | 178 | static inline void __nodes_complement(nodemask_t *dstp, |
170 | const nodemask_t *srcp, int nbits) | 179 | const nodemask_t *srcp, int nbits) |
171 | { | 180 | { |
172 | bitmap_complement(dstp->bits, srcp->bits, nbits); | 181 | bitmap_complement(dstp->bits, srcp->bits, nbits); |
173 | } | 182 | } |
174 | 183 | ||
175 | #define nodes_equal(src1, src2) \ | 184 | #define nodes_equal(src1, src2) \ |
176 | __nodes_equal(&(src1), &(src2), MAX_NUMNODES) | 185 | __nodes_equal(&(src1), &(src2), MAX_NUMNODES) |
177 | static inline int __nodes_equal(const nodemask_t *src1p, | 186 | static inline int __nodes_equal(const nodemask_t *src1p, |
178 | const nodemask_t *src2p, int nbits) | 187 | const nodemask_t *src2p, int nbits) |
179 | { | 188 | { |
180 | return bitmap_equal(src1p->bits, src2p->bits, nbits); | 189 | return bitmap_equal(src1p->bits, src2p->bits, nbits); |
181 | } | 190 | } |
182 | 191 | ||
183 | #define nodes_intersects(src1, src2) \ | 192 | #define nodes_intersects(src1, src2) \ |
184 | __nodes_intersects(&(src1), &(src2), MAX_NUMNODES) | 193 | __nodes_intersects(&(src1), &(src2), MAX_NUMNODES) |
185 | static inline int __nodes_intersects(const nodemask_t *src1p, | 194 | static inline int __nodes_intersects(const nodemask_t *src1p, |
186 | const nodemask_t *src2p, int nbits) | 195 | const nodemask_t *src2p, int nbits) |
187 | { | 196 | { |
188 | return bitmap_intersects(src1p->bits, src2p->bits, nbits); | 197 | return bitmap_intersects(src1p->bits, src2p->bits, nbits); |
189 | } | 198 | } |
190 | 199 | ||
191 | #define nodes_subset(src1, src2) \ | 200 | #define nodes_subset(src1, src2) \ |
192 | __nodes_subset(&(src1), &(src2), MAX_NUMNODES) | 201 | __nodes_subset(&(src1), &(src2), MAX_NUMNODES) |
193 | static inline int __nodes_subset(const nodemask_t *src1p, | 202 | static inline int __nodes_subset(const nodemask_t *src1p, |
194 | const nodemask_t *src2p, int nbits) | 203 | const nodemask_t *src2p, int nbits) |
195 | { | 204 | { |
196 | return bitmap_subset(src1p->bits, src2p->bits, nbits); | 205 | return bitmap_subset(src1p->bits, src2p->bits, nbits); |
197 | } | 206 | } |
198 | 207 | ||
199 | #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES) | 208 | #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES) |
200 | static inline int __nodes_empty(const nodemask_t *srcp, int nbits) | 209 | static inline int __nodes_empty(const nodemask_t *srcp, int nbits) |
201 | { | 210 | { |
202 | return bitmap_empty(srcp->bits, nbits); | 211 | return bitmap_empty(srcp->bits, nbits); |
203 | } | 212 | } |
204 | 213 | ||
205 | #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES) | 214 | #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES) |
206 | static inline int __nodes_full(const nodemask_t *srcp, int nbits) | 215 | static inline int __nodes_full(const nodemask_t *srcp, int nbits) |
207 | { | 216 | { |
208 | return bitmap_full(srcp->bits, nbits); | 217 | return bitmap_full(srcp->bits, nbits); |
209 | } | 218 | } |
210 | 219 | ||
211 | #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES) | 220 | #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES) |
212 | static inline int __nodes_weight(const nodemask_t *srcp, int nbits) | 221 | static inline int __nodes_weight(const nodemask_t *srcp, int nbits) |
213 | { | 222 | { |
214 | return bitmap_weight(srcp->bits, nbits); | 223 | return bitmap_weight(srcp->bits, nbits); |
215 | } | 224 | } |
216 | 225 | ||
217 | #define nodes_shift_right(dst, src, n) \ | 226 | #define nodes_shift_right(dst, src, n) \ |
218 | __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES) | 227 | __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES) |
219 | static inline void __nodes_shift_right(nodemask_t *dstp, | 228 | static inline void __nodes_shift_right(nodemask_t *dstp, |
220 | const nodemask_t *srcp, int n, int nbits) | 229 | const nodemask_t *srcp, int n, int nbits) |
221 | { | 230 | { |
222 | bitmap_shift_right(dstp->bits, srcp->bits, n, nbits); | 231 | bitmap_shift_right(dstp->bits, srcp->bits, n, nbits); |
223 | } | 232 | } |
224 | 233 | ||
225 | #define nodes_shift_left(dst, src, n) \ | 234 | #define nodes_shift_left(dst, src, n) \ |
226 | __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES) | 235 | __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES) |
227 | static inline void __nodes_shift_left(nodemask_t *dstp, | 236 | static inline void __nodes_shift_left(nodemask_t *dstp, |
228 | const nodemask_t *srcp, int n, int nbits) | 237 | const nodemask_t *srcp, int n, int nbits) |
229 | { | 238 | { |
230 | bitmap_shift_left(dstp->bits, srcp->bits, n, nbits); | 239 | bitmap_shift_left(dstp->bits, srcp->bits, n, nbits); |
231 | } | 240 | } |
232 | 241 | ||
233 | /* FIXME: better would be to fix all architectures to never return | 242 | /* FIXME: better would be to fix all architectures to never return |
234 | > MAX_NUMNODES, then the silly min_ts could be dropped. */ | 243 | > MAX_NUMNODES, then the silly min_ts could be dropped. */ |
235 | 244 | ||
236 | #define first_node(src) __first_node(&(src)) | 245 | #define first_node(src) __first_node(&(src)) |
237 | static inline int __first_node(const nodemask_t *srcp) | 246 | static inline int __first_node(const nodemask_t *srcp) |
238 | { | 247 | { |
239 | return min_t(int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES)); | 248 | return min_t(int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES)); |
240 | } | 249 | } |
241 | 250 | ||
242 | #define next_node(n, src) __next_node((n), &(src)) | 251 | #define next_node(n, src) __next_node((n), &(src)) |
243 | static inline int __next_node(int n, const nodemask_t *srcp) | 252 | static inline int __next_node(int n, const nodemask_t *srcp) |
244 | { | 253 | { |
245 | return min_t(int,MAX_NUMNODES,find_next_bit(srcp->bits, MAX_NUMNODES, n+1)); | 254 | return min_t(int,MAX_NUMNODES,find_next_bit(srcp->bits, MAX_NUMNODES, n+1)); |
246 | } | 255 | } |
247 | 256 | ||
248 | static inline void init_nodemask_of_node(nodemask_t *mask, int node) | 257 | static inline void init_nodemask_of_node(nodemask_t *mask, int node) |
249 | { | 258 | { |
250 | nodes_clear(*mask); | 259 | nodes_clear(*mask); |
251 | node_set(node, *mask); | 260 | node_set(node, *mask); |
252 | } | 261 | } |
253 | 262 | ||
254 | #define nodemask_of_node(node) \ | 263 | #define nodemask_of_node(node) \ |
255 | ({ \ | 264 | ({ \ |
256 | typeof(_unused_nodemask_arg_) m; \ | 265 | typeof(_unused_nodemask_arg_) m; \ |
257 | if (sizeof(m) == sizeof(unsigned long)) { \ | 266 | if (sizeof(m) == sizeof(unsigned long)) { \ |
258 | m.bits[0] = 1UL << (node); \ | 267 | m.bits[0] = 1UL << (node); \ |
259 | } else { \ | 268 | } else { \ |
260 | init_nodemask_of_node(&m, (node)); \ | 269 | init_nodemask_of_node(&m, (node)); \ |
261 | } \ | 270 | } \ |
262 | m; \ | 271 | m; \ |
263 | }) | 272 | }) |
264 | 273 | ||
265 | #define first_unset_node(mask) __first_unset_node(&(mask)) | 274 | #define first_unset_node(mask) __first_unset_node(&(mask)) |
266 | static inline int __first_unset_node(const nodemask_t *maskp) | 275 | static inline int __first_unset_node(const nodemask_t *maskp) |
267 | { | 276 | { |
268 | return min_t(int,MAX_NUMNODES, | 277 | return min_t(int,MAX_NUMNODES, |
269 | find_first_zero_bit(maskp->bits, MAX_NUMNODES)); | 278 | find_first_zero_bit(maskp->bits, MAX_NUMNODES)); |
270 | } | 279 | } |
271 | 280 | ||
272 | #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES) | 281 | #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES) |
273 | 282 | ||
274 | #if MAX_NUMNODES <= BITS_PER_LONG | 283 | #if MAX_NUMNODES <= BITS_PER_LONG |
275 | 284 | ||
276 | #define NODE_MASK_ALL \ | 285 | #define NODE_MASK_ALL \ |
277 | ((nodemask_t) { { \ | 286 | ((nodemask_t) { { \ |
278 | [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ | 287 | [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ |
279 | } }) | 288 | } }) |
280 | 289 | ||
281 | #else | 290 | #else |
282 | 291 | ||
283 | #define NODE_MASK_ALL \ | 292 | #define NODE_MASK_ALL \ |
284 | ((nodemask_t) { { \ | 293 | ((nodemask_t) { { \ |
285 | [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL, \ | 294 | [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL, \ |
286 | [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ | 295 | [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \ |
287 | } }) | 296 | } }) |
288 | 297 | ||
289 | #endif | 298 | #endif |
290 | 299 | ||
291 | #define NODE_MASK_NONE \ | 300 | #define NODE_MASK_NONE \ |
292 | ((nodemask_t) { { \ | 301 | ((nodemask_t) { { \ |
293 | [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] = 0UL \ | 302 | [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] = 0UL \ |
294 | } }) | 303 | } }) |
295 | 304 | ||
296 | #define nodes_addr(src) ((src).bits) | 305 | #define nodes_addr(src) ((src).bits) |
297 | 306 | ||
298 | #define nodemask_scnprintf(buf, len, src) \ | 307 | #define nodemask_scnprintf(buf, len, src) \ |
299 | __nodemask_scnprintf((buf), (len), &(src), MAX_NUMNODES) | 308 | __nodemask_scnprintf((buf), (len), &(src), MAX_NUMNODES) |
300 | static inline int __nodemask_scnprintf(char *buf, int len, | 309 | static inline int __nodemask_scnprintf(char *buf, int len, |
301 | const nodemask_t *srcp, int nbits) | 310 | const nodemask_t *srcp, int nbits) |
302 | { | 311 | { |
303 | return bitmap_scnprintf(buf, len, srcp->bits, nbits); | 312 | return bitmap_scnprintf(buf, len, srcp->bits, nbits); |
304 | } | 313 | } |
305 | 314 | ||
306 | #define nodemask_parse_user(ubuf, ulen, dst) \ | 315 | #define nodemask_parse_user(ubuf, ulen, dst) \ |
307 | __nodemask_parse_user((ubuf), (ulen), &(dst), MAX_NUMNODES) | 316 | __nodemask_parse_user((ubuf), (ulen), &(dst), MAX_NUMNODES) |
308 | static inline int __nodemask_parse_user(const char __user *buf, int len, | 317 | static inline int __nodemask_parse_user(const char __user *buf, int len, |
309 | nodemask_t *dstp, int nbits) | 318 | nodemask_t *dstp, int nbits) |
310 | { | 319 | { |
311 | return bitmap_parse_user(buf, len, dstp->bits, nbits); | 320 | return bitmap_parse_user(buf, len, dstp->bits, nbits); |
312 | } | 321 | } |
313 | 322 | ||
314 | #define nodelist_scnprintf(buf, len, src) \ | 323 | #define nodelist_scnprintf(buf, len, src) \ |
315 | __nodelist_scnprintf((buf), (len), &(src), MAX_NUMNODES) | 324 | __nodelist_scnprintf((buf), (len), &(src), MAX_NUMNODES) |
316 | static inline int __nodelist_scnprintf(char *buf, int len, | 325 | static inline int __nodelist_scnprintf(char *buf, int len, |
317 | const nodemask_t *srcp, int nbits) | 326 | const nodemask_t *srcp, int nbits) |
318 | { | 327 | { |
319 | return bitmap_scnlistprintf(buf, len, srcp->bits, nbits); | 328 | return bitmap_scnlistprintf(buf, len, srcp->bits, nbits); |
320 | } | 329 | } |
321 | 330 | ||
322 | #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES) | 331 | #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES) |
323 | static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits) | 332 | static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits) |
324 | { | 333 | { |
325 | return bitmap_parselist(buf, dstp->bits, nbits); | 334 | return bitmap_parselist(buf, dstp->bits, nbits); |
326 | } | 335 | } |
327 | 336 | ||
328 | #define node_remap(oldbit, old, new) \ | 337 | #define node_remap(oldbit, old, new) \ |
329 | __node_remap((oldbit), &(old), &(new), MAX_NUMNODES) | 338 | __node_remap((oldbit), &(old), &(new), MAX_NUMNODES) |
330 | static inline int __node_remap(int oldbit, | 339 | static inline int __node_remap(int oldbit, |
331 | const nodemask_t *oldp, const nodemask_t *newp, int nbits) | 340 | const nodemask_t *oldp, const nodemask_t *newp, int nbits) |
332 | { | 341 | { |
333 | return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits); | 342 | return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits); |
334 | } | 343 | } |
335 | 344 | ||
336 | #define nodes_remap(dst, src, old, new) \ | 345 | #define nodes_remap(dst, src, old, new) \ |
337 | __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES) | 346 | __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES) |
338 | static inline void __nodes_remap(nodemask_t *dstp, const nodemask_t *srcp, | 347 | static inline void __nodes_remap(nodemask_t *dstp, const nodemask_t *srcp, |
339 | const nodemask_t *oldp, const nodemask_t *newp, int nbits) | 348 | const nodemask_t *oldp, const nodemask_t *newp, int nbits) |
340 | { | 349 | { |
341 | bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits); | 350 | bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits); |
342 | } | 351 | } |
343 | 352 | ||
344 | #define nodes_onto(dst, orig, relmap) \ | 353 | #define nodes_onto(dst, orig, relmap) \ |
345 | __nodes_onto(&(dst), &(orig), &(relmap), MAX_NUMNODES) | 354 | __nodes_onto(&(dst), &(orig), &(relmap), MAX_NUMNODES) |
346 | static inline void __nodes_onto(nodemask_t *dstp, const nodemask_t *origp, | 355 | static inline void __nodes_onto(nodemask_t *dstp, const nodemask_t *origp, |
347 | const nodemask_t *relmapp, int nbits) | 356 | const nodemask_t *relmapp, int nbits) |
348 | { | 357 | { |
349 | bitmap_onto(dstp->bits, origp->bits, relmapp->bits, nbits); | 358 | bitmap_onto(dstp->bits, origp->bits, relmapp->bits, nbits); |
350 | } | 359 | } |
351 | 360 | ||
352 | #define nodes_fold(dst, orig, sz) \ | 361 | #define nodes_fold(dst, orig, sz) \ |
353 | __nodes_fold(&(dst), &(orig), sz, MAX_NUMNODES) | 362 | __nodes_fold(&(dst), &(orig), sz, MAX_NUMNODES) |
354 | static inline void __nodes_fold(nodemask_t *dstp, const nodemask_t *origp, | 363 | static inline void __nodes_fold(nodemask_t *dstp, const nodemask_t *origp, |
355 | int sz, int nbits) | 364 | int sz, int nbits) |
356 | { | 365 | { |
357 | bitmap_fold(dstp->bits, origp->bits, sz, nbits); | 366 | bitmap_fold(dstp->bits, origp->bits, sz, nbits); |
358 | } | 367 | } |
359 | 368 | ||
360 | #if MAX_NUMNODES > 1 | 369 | #if MAX_NUMNODES > 1 |
361 | #define for_each_node_mask(node, mask) \ | 370 | #define for_each_node_mask(node, mask) \ |
362 | for ((node) = first_node(mask); \ | 371 | for ((node) = first_node(mask); \ |
363 | (node) < MAX_NUMNODES; \ | 372 | (node) < MAX_NUMNODES; \ |
364 | (node) = next_node((node), (mask))) | 373 | (node) = next_node((node), (mask))) |
365 | #else /* MAX_NUMNODES == 1 */ | 374 | #else /* MAX_NUMNODES == 1 */ |
366 | #define for_each_node_mask(node, mask) \ | 375 | #define for_each_node_mask(node, mask) \ |
367 | if (!nodes_empty(mask)) \ | 376 | if (!nodes_empty(mask)) \ |
368 | for ((node) = 0; (node) < 1; (node)++) | 377 | for ((node) = 0; (node) < 1; (node)++) |
369 | #endif /* MAX_NUMNODES */ | 378 | #endif /* MAX_NUMNODES */ |
370 | 379 | ||
371 | /* | 380 | /* |
372 | * Bitmasks that are kept for all the nodes. | 381 | * Bitmasks that are kept for all the nodes. |
373 | */ | 382 | */ |
374 | enum node_states { | 383 | enum node_states { |
375 | N_POSSIBLE, /* The node could become online at some point */ | 384 | N_POSSIBLE, /* The node could become online at some point */ |
376 | N_ONLINE, /* The node is online */ | 385 | N_ONLINE, /* The node is online */ |
377 | N_NORMAL_MEMORY, /* The node has regular memory */ | 386 | N_NORMAL_MEMORY, /* The node has regular memory */ |
378 | #ifdef CONFIG_HIGHMEM | 387 | #ifdef CONFIG_HIGHMEM |
379 | N_HIGH_MEMORY, /* The node has regular or high memory */ | 388 | N_HIGH_MEMORY, /* The node has regular or high memory */ |
380 | #else | 389 | #else |
381 | N_HIGH_MEMORY = N_NORMAL_MEMORY, | 390 | N_HIGH_MEMORY = N_NORMAL_MEMORY, |
382 | #endif | 391 | #endif |
383 | #ifdef CONFIG_MOVABLE_NODE | 392 | #ifdef CONFIG_MOVABLE_NODE |
384 | N_MEMORY, /* The node has memory(regular, high, movable) */ | 393 | N_MEMORY, /* The node has memory(regular, high, movable) */ |
385 | #else | 394 | #else |
386 | N_MEMORY = N_HIGH_MEMORY, | 395 | N_MEMORY = N_HIGH_MEMORY, |
387 | #endif | 396 | #endif |
388 | N_CPU, /* The node has one or more cpus */ | 397 | N_CPU, /* The node has one or more cpus */ |
389 | NR_NODE_STATES | 398 | NR_NODE_STATES |
390 | }; | 399 | }; |
391 | 400 | ||
392 | /* | 401 | /* |
393 | * The following particular system nodemasks and operations | 402 | * The following particular system nodemasks and operations |
394 | * on them manage all possible and online nodes. | 403 | * on them manage all possible and online nodes. |
395 | */ | 404 | */ |
396 | 405 | ||
397 | extern nodemask_t node_states[NR_NODE_STATES]; | 406 | extern nodemask_t node_states[NR_NODE_STATES]; |
398 | 407 | ||
399 | #if MAX_NUMNODES > 1 | 408 | #if MAX_NUMNODES > 1 |
400 | static inline int node_state(int node, enum node_states state) | 409 | static inline int node_state(int node, enum node_states state) |
401 | { | 410 | { |
402 | return node_isset(node, node_states[state]); | 411 | return node_isset(node, node_states[state]); |
403 | } | 412 | } |
404 | 413 | ||
405 | static inline void node_set_state(int node, enum node_states state) | 414 | static inline void node_set_state(int node, enum node_states state) |
406 | { | 415 | { |
407 | __node_set(node, &node_states[state]); | 416 | __node_set(node, &node_states[state]); |
408 | } | 417 | } |
409 | 418 | ||
410 | static inline void node_clear_state(int node, enum node_states state) | 419 | static inline void node_clear_state(int node, enum node_states state) |
411 | { | 420 | { |
412 | __node_clear(node, &node_states[state]); | 421 | __node_clear(node, &node_states[state]); |
413 | } | 422 | } |
414 | 423 | ||
415 | static inline int num_node_state(enum node_states state) | 424 | static inline int num_node_state(enum node_states state) |
416 | { | 425 | { |
417 | return nodes_weight(node_states[state]); | 426 | return nodes_weight(node_states[state]); |
418 | } | 427 | } |
419 | 428 | ||
420 | #define for_each_node_state(__node, __state) \ | 429 | #define for_each_node_state(__node, __state) \ |
421 | for_each_node_mask((__node), node_states[__state]) | 430 | for_each_node_mask((__node), node_states[__state]) |
422 | 431 | ||
423 | #define first_online_node first_node(node_states[N_ONLINE]) | 432 | #define first_online_node first_node(node_states[N_ONLINE]) |
424 | #define next_online_node(nid) next_node((nid), node_states[N_ONLINE]) | 433 | #define next_online_node(nid) next_node((nid), node_states[N_ONLINE]) |
425 | 434 | ||
426 | extern int nr_node_ids; | 435 | extern int nr_node_ids; |
427 | extern int nr_online_nodes; | 436 | extern int nr_online_nodes; |
428 | 437 | ||
429 | static inline void node_set_online(int nid) | 438 | static inline void node_set_online(int nid) |
430 | { | 439 | { |
431 | node_set_state(nid, N_ONLINE); | 440 | node_set_state(nid, N_ONLINE); |
432 | nr_online_nodes = num_node_state(N_ONLINE); | 441 | nr_online_nodes = num_node_state(N_ONLINE); |
433 | } | 442 | } |
434 | 443 | ||
435 | static inline void node_set_offline(int nid) | 444 | static inline void node_set_offline(int nid) |
436 | { | 445 | { |
437 | node_clear_state(nid, N_ONLINE); | 446 | node_clear_state(nid, N_ONLINE); |
438 | nr_online_nodes = num_node_state(N_ONLINE); | 447 | nr_online_nodes = num_node_state(N_ONLINE); |
439 | } | 448 | } |
440 | 449 | ||
441 | #else | 450 | #else |
442 | 451 | ||
443 | static inline int node_state(int node, enum node_states state) | 452 | static inline int node_state(int node, enum node_states state) |
444 | { | 453 | { |
445 | return node == 0; | 454 | return node == 0; |
446 | } | 455 | } |
447 | 456 | ||
448 | static inline void node_set_state(int node, enum node_states state) | 457 | static inline void node_set_state(int node, enum node_states state) |
449 | { | 458 | { |
450 | } | 459 | } |
451 | 460 | ||
452 | static inline void node_clear_state(int node, enum node_states state) | 461 | static inline void node_clear_state(int node, enum node_states state) |
453 | { | 462 | { |
454 | } | 463 | } |
455 | 464 | ||
456 | static inline int num_node_state(enum node_states state) | 465 | static inline int num_node_state(enum node_states state) |
457 | { | 466 | { |
458 | return 1; | 467 | return 1; |
459 | } | 468 | } |
460 | 469 | ||
461 | #define for_each_node_state(node, __state) \ | 470 | #define for_each_node_state(node, __state) \ |
462 | for ( (node) = 0; (node) == 0; (node) = 1) | 471 | for ( (node) = 0; (node) == 0; (node) = 1) |
463 | 472 | ||
464 | #define first_online_node 0 | 473 | #define first_online_node 0 |
465 | #define next_online_node(nid) (MAX_NUMNODES) | 474 | #define next_online_node(nid) (MAX_NUMNODES) |
466 | #define nr_node_ids 1 | 475 | #define nr_node_ids 1 |
467 | #define nr_online_nodes 1 | 476 | #define nr_online_nodes 1 |
468 | 477 | ||
469 | #define node_set_online(node) node_set_state((node), N_ONLINE) | 478 | #define node_set_online(node) node_set_state((node), N_ONLINE) |
470 | #define node_set_offline(node) node_clear_state((node), N_ONLINE) | 479 | #define node_set_offline(node) node_clear_state((node), N_ONLINE) |
471 | 480 | ||
472 | #endif | 481 | #endif |
473 | 482 | ||
474 | #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1) | 483 | #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1) |
475 | extern int node_random(const nodemask_t *maskp); | 484 | extern int node_random(const nodemask_t *maskp); |
476 | #else | 485 | #else |
477 | static inline int node_random(const nodemask_t *mask) | 486 | static inline int node_random(const nodemask_t *mask) |
478 | { | 487 | { |
479 | return 0; | 488 | return 0; |
480 | } | 489 | } |
481 | #endif | 490 | #endif |
482 | 491 | ||
483 | #define node_online_map node_states[N_ONLINE] | 492 | #define node_online_map node_states[N_ONLINE] |
484 | #define node_possible_map node_states[N_POSSIBLE] | 493 | #define node_possible_map node_states[N_POSSIBLE] |
485 | 494 | ||
486 | #define num_online_nodes() num_node_state(N_ONLINE) | 495 | #define num_online_nodes() num_node_state(N_ONLINE) |
487 | #define num_possible_nodes() num_node_state(N_POSSIBLE) | 496 | #define num_possible_nodes() num_node_state(N_POSSIBLE) |
488 | #define node_online(node) node_state((node), N_ONLINE) | 497 | #define node_online(node) node_state((node), N_ONLINE) |
489 | #define node_possible(node) node_state((node), N_POSSIBLE) | 498 | #define node_possible(node) node_state((node), N_POSSIBLE) |
490 | 499 | ||
491 | #define for_each_node(node) for_each_node_state(node, N_POSSIBLE) | 500 | #define for_each_node(node) for_each_node_state(node, N_POSSIBLE) |
492 | #define for_each_online_node(node) for_each_node_state(node, N_ONLINE) | 501 | #define for_each_online_node(node) for_each_node_state(node, N_ONLINE) |
493 | 502 | ||
494 | /* | 503 | /* |
495 | * For nodemask scrach area. | 504 | * For nodemask scrach area. |
496 | * NODEMASK_ALLOC(type, name) allocates an object with a specified type and | 505 | * NODEMASK_ALLOC(type, name) allocates an object with a specified type and |
497 | * name. | 506 | * name. |
498 | */ | 507 | */ |
499 | #if NODES_SHIFT > 8 /* nodemask_t > 256 bytes */ | 508 | #if NODES_SHIFT > 8 /* nodemask_t > 256 bytes */ |
500 | #define NODEMASK_ALLOC(type, name, gfp_flags) \ | 509 | #define NODEMASK_ALLOC(type, name, gfp_flags) \ |
501 | type *name = kmalloc(sizeof(*name), gfp_flags) | 510 | type *name = kmalloc(sizeof(*name), gfp_flags) |
502 | #define NODEMASK_FREE(m) kfree(m) | 511 | #define NODEMASK_FREE(m) kfree(m) |
503 | #else | 512 | #else |
504 | #define NODEMASK_ALLOC(type, name, gfp_flags) type _##name, *name = &_##name | 513 | #define NODEMASK_ALLOC(type, name, gfp_flags) type _##name, *name = &_##name |
505 | #define NODEMASK_FREE(m) do {} while (0) | 514 | #define NODEMASK_FREE(m) do {} while (0) |
506 | #endif | 515 | #endif |
507 | 516 | ||
508 | /* A example struture for using NODEMASK_ALLOC, used in mempolicy. */ | 517 | /* A example struture for using NODEMASK_ALLOC, used in mempolicy. */ |
509 | struct nodemask_scratch { | 518 | struct nodemask_scratch { |
510 | nodemask_t mask1; | 519 | nodemask_t mask1; |
511 | nodemask_t mask2; | 520 | nodemask_t mask2; |
512 | }; | 521 | }; |
513 | 522 | ||
514 | #define NODEMASK_SCRATCH(x) \ | 523 | #define NODEMASK_SCRATCH(x) \ |
515 | NODEMASK_ALLOC(struct nodemask_scratch, x, \ | 524 | NODEMASK_ALLOC(struct nodemask_scratch, x, \ |
516 | GFP_KERNEL | __GFP_NORETRY) | 525 | GFP_KERNEL | __GFP_NORETRY) |
517 | #define NODEMASK_SCRATCH_FREE(x) NODEMASK_FREE(x) | 526 | #define NODEMASK_SCRATCH_FREE(x) NODEMASK_FREE(x) |
518 | 527 | ||
519 | 528 | ||
520 | #endif /* __LINUX_NODEMASK_H */ | 529 | #endif /* __LINUX_NODEMASK_H */ |
521 | 530 |