Blame view
lib/hweight.c
1.95 KB
8bc3bcc93 lib: reduce the u... |
1 |
#include <linux/export.h> |
1977f0327 remove asm/bitops... |
2 |
#include <linux/bitops.h> |
3b9ed1a5d [PATCH] bitops: g... |
3 4 5 6 7 8 9 10 |
#include <asm/types.h> /** * hweightN - returns the hamming weight of a N-bit word * @x: the word to weigh * * The Hamming Weight of a number is the total number of bits set in it. */ |
f5967101e x86/hweight: Get ... |
11 |
#ifndef __HAVE_ARCH_SW_HWEIGHT |
d61931d89 x86: Add optimize... |
12 |
unsigned int __sw_hweight32(unsigned int w) |
3b9ed1a5d [PATCH] bitops: g... |
13 |
{ |
72d931046 Make ARCH_HAS_FAS... |
14 |
#ifdef CONFIG_ARCH_HAS_FAST_MULTIPLIER |
39d997b51 x86, core: Optimi... |
15 16 17 18 19 |
w -= (w >> 1) & 0x55555555; w = (w & 0x33333333) + ((w >> 2) & 0x33333333); w = (w + (w >> 4)) & 0x0f0f0f0f; return (w * 0x01010101) >> 24; #else |
f9b419292 [PATCH] bitops: h... |
20 |
unsigned int res = w - ((w >> 1) & 0x55555555); |
3b9ed1a5d [PATCH] bitops: g... |
21 |
res = (res & 0x33333333) + ((res >> 2) & 0x33333333); |
f9b419292 [PATCH] bitops: h... |
22 23 24 |
res = (res + (res >> 4)) & 0x0F0F0F0F; res = res + (res >> 8); return (res + (res >> 16)) & 0x000000FF; |
39d997b51 x86, core: Optimi... |
25 |
#endif |
3b9ed1a5d [PATCH] bitops: g... |
26 |
} |
d61931d89 x86: Add optimize... |
27 |
EXPORT_SYMBOL(__sw_hweight32); |
f5967101e x86/hweight: Get ... |
28 |
#endif |
3b9ed1a5d [PATCH] bitops: g... |
29 |
|
d61931d89 x86: Add optimize... |
30 |
unsigned int __sw_hweight16(unsigned int w) |
3b9ed1a5d [PATCH] bitops: g... |
31 |
{ |
f9b419292 [PATCH] bitops: h... |
32 |
unsigned int res = w - ((w >> 1) & 0x5555); |
3b9ed1a5d [PATCH] bitops: g... |
33 |
res = (res & 0x3333) + ((res >> 2) & 0x3333); |
f9b419292 [PATCH] bitops: h... |
34 35 |
res = (res + (res >> 4)) & 0x0F0F; return (res + (res >> 8)) & 0x00FF; |
3b9ed1a5d [PATCH] bitops: g... |
36 |
} |
d61931d89 x86: Add optimize... |
37 |
EXPORT_SYMBOL(__sw_hweight16); |
3b9ed1a5d [PATCH] bitops: g... |
38 |
|
d61931d89 x86: Add optimize... |
39 |
unsigned int __sw_hweight8(unsigned int w) |
3b9ed1a5d [PATCH] bitops: g... |
40 |
{ |
f9b419292 [PATCH] bitops: h... |
41 |
unsigned int res = w - ((w >> 1) & 0x55); |
3b9ed1a5d [PATCH] bitops: g... |
42 |
res = (res & 0x33) + ((res >> 2) & 0x33); |
f9b419292 [PATCH] bitops: h... |
43 |
return (res + (res >> 4)) & 0x0F; |
3b9ed1a5d [PATCH] bitops: g... |
44 |
} |
d61931d89 x86: Add optimize... |
45 |
EXPORT_SYMBOL(__sw_hweight8); |
3b9ed1a5d [PATCH] bitops: g... |
46 |
|
f5967101e x86/hweight: Get ... |
47 |
#ifndef __HAVE_ARCH_SW_HWEIGHT |
d61931d89 x86: Add optimize... |
48 |
unsigned long __sw_hweight64(__u64 w) |
3b9ed1a5d [PATCH] bitops: g... |
49 50 |
{ #if BITS_PER_LONG == 32 |
d61931d89 x86: Add optimize... |
51 52 |
return __sw_hweight32((unsigned int)(w >> 32)) + __sw_hweight32((unsigned int)w); |
3b9ed1a5d [PATCH] bitops: g... |
53 |
#elif BITS_PER_LONG == 64 |
72d931046 Make ARCH_HAS_FAS... |
54 |
#ifdef CONFIG_ARCH_HAS_FAST_MULTIPLIER |
0136611c6 [PATCH] optimize ... |
55 56 57 58 59 |
w -= (w >> 1) & 0x5555555555555555ul; w = (w & 0x3333333333333333ul) + ((w >> 2) & 0x3333333333333333ul); w = (w + (w >> 4)) & 0x0f0f0f0f0f0f0f0ful; return (w * 0x0101010101010101ul) >> 56; #else |
f9b419292 [PATCH] bitops: h... |
60 |
__u64 res = w - ((w >> 1) & 0x5555555555555555ul); |
3b9ed1a5d [PATCH] bitops: g... |
61 |
res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul); |
f9b419292 [PATCH] bitops: h... |
62 63 64 65 |
res = (res + (res >> 4)) & 0x0F0F0F0F0F0F0F0Ful; res = res + (res >> 8); res = res + (res >> 16); return (res + (res >> 32)) & 0x00000000000000FFul; |
0136611c6 [PATCH] optimize ... |
66 |
#endif |
3b9ed1a5d [PATCH] bitops: g... |
67 68 |
#endif } |
d61931d89 x86: Add optimize... |
69 |
EXPORT_SYMBOL(__sw_hweight64); |
f5967101e x86/hweight: Get ... |
70 |
#endif |