x86/kconfig: Disable CONFIG_GENERIC_HWEIGHT and remove __HAVE_ARCH_SW_HWEIGHT

Remove an unnecessary arch complication:

arch/x86/include/asm/arch_hweight.h uses __sw_hweight{32,64} as
alternatives, and they are implemented in arch/x86/lib/hweight.S

x86 does not rely on the generic C implementation lib/hweight.c
at all, so CONFIG_GENERIC_HWEIGHT should be disabled.

__HAVE_ARCH_SW_HWEIGHT is not necessary either.

No change in functionality intended.

Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Uros Bizjak <ubizjak@gmail.com>
Link: http://lkml.kernel.org/r/1557665521-17570-1-git-send-email-yamada.masahiro@socionext.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
Masahiro Yamada 2019-05-12 21:52:01 +09:00 committed by Ingo Molnar
parent eccd906484
commit 409ca45526
3 changed files with 0 additions and 9 deletions

View File

@ -261,9 +261,6 @@ config GENERIC_BUG
config GENERIC_BUG_RELATIVE_POINTERS config GENERIC_BUG_RELATIVE_POINTERS
bool bool
config GENERIC_HWEIGHT
def_bool y
config ARCH_MAY_HAVE_PC_FDC config ARCH_MAY_HAVE_PC_FDC
def_bool y def_bool y
depends on ISA_DMA_API depends on ISA_DMA_API

View File

@ -12,8 +12,6 @@
#define REG_OUT "a" #define REG_OUT "a"
#endif #endif
#define __HAVE_ARCH_SW_HWEIGHT
static __always_inline unsigned int __arch_hweight32(unsigned int w) static __always_inline unsigned int __arch_hweight32(unsigned int w)
{ {
unsigned int res; unsigned int res;

View File

@ -10,7 +10,6 @@
* The Hamming Weight of a number is the total number of bits set in it. * The Hamming Weight of a number is the total number of bits set in it.
*/ */
#ifndef __HAVE_ARCH_SW_HWEIGHT
unsigned int __sw_hweight32(unsigned int w) unsigned int __sw_hweight32(unsigned int w)
{ {
#ifdef CONFIG_ARCH_HAS_FAST_MULTIPLIER #ifdef CONFIG_ARCH_HAS_FAST_MULTIPLIER
@ -27,7 +26,6 @@ unsigned int __sw_hweight32(unsigned int w)
#endif #endif
} }
EXPORT_SYMBOL(__sw_hweight32); EXPORT_SYMBOL(__sw_hweight32);
#endif
unsigned int __sw_hweight16(unsigned int w) unsigned int __sw_hweight16(unsigned int w)
{ {
@ -46,7 +44,6 @@ unsigned int __sw_hweight8(unsigned int w)
} }
EXPORT_SYMBOL(__sw_hweight8); EXPORT_SYMBOL(__sw_hweight8);
#ifndef __HAVE_ARCH_SW_HWEIGHT
unsigned long __sw_hweight64(__u64 w) unsigned long __sw_hweight64(__u64 w)
{ {
#if BITS_PER_LONG == 32 #if BITS_PER_LONG == 32
@ -69,4 +66,3 @@ unsigned long __sw_hweight64(__u64 w)
#endif #endif
} }
EXPORT_SYMBOL(__sw_hweight64); EXPORT_SYMBOL(__sw_hweight64);
#endif