summaryrefslogtreecommitdiff
path: root/arch/riscv/include/asm/arch_hweight.h
blob: c20236a0725b9e27a31d28b580db1bd1ad2c945a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
/* SPDX-License-Identifier: GPL-2.0 */
/*
 * Based on arch/x86/include/asm/arch_hweight.h
 */

#ifndef _ASM_RISCV_HWEIGHT_H
#define _ASM_RISCV_HWEIGHT_H

#include <asm/alternative-macros.h>
#include <asm/hwcap.h>

#if (BITS_PER_LONG == 64)
#define CPOPW	"cpopw "
#elif (BITS_PER_LONG == 32)
#define CPOPW	"cpop "
#else
#error "Unexpected BITS_PER_LONG"
#endif

static __always_inline unsigned int __arch_hweight32(unsigned int w)
{
#ifdef CONFIG_RISCV_ISA_ZBB
	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
				      RISCV_ISA_EXT_ZBB, 1)
			  : : : : legacy);

	asm (".option push\n"
	     ".option arch,+zbb\n"
	     CPOPW "%0, %0\n"
	     ".option pop\n"
	     : "+r" (w) : :);

	return w;

legacy:
#endif
	return __sw_hweight32(w);
}

static inline unsigned int __arch_hweight16(unsigned int w)
{
	return __arch_hweight32(w & 0xffff);
}

static inline unsigned int __arch_hweight8(unsigned int w)
{
	return __arch_hweight32(w & 0xff);
}

#if BITS_PER_LONG == 64
static __always_inline unsigned long __arch_hweight64(__u64 w)
{
# ifdef CONFIG_RISCV_ISA_ZBB
	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
				      RISCV_ISA_EXT_ZBB, 1)
			  : : : : legacy);

	asm (".option push\n"
	     ".option arch,+zbb\n"
	     "cpop %0, %0\n"
	     ".option pop\n"
	     : "+r" (w) : :);

	return w;

legacy:
# endif
	return __sw_hweight64(w);
}
#else /* BITS_PER_LONG == 64 */
static inline unsigned long __arch_hweight64(__u64 w)
{
	return  __arch_hweight32((u32)w) +
		__arch_hweight32((u32)(w >> 32));
}
#endif /* !(BITS_PER_LONG == 64) */

#endif /* _ASM_RISCV_HWEIGHT_H */