From 64970b68d2b3ed32b964b0b30b1b98518fde388e Mon Sep 17 00:00:00 2001 From: Alexander van Heukelum Date: Tue, 11 Mar 2008 16:17:19 +0100 Subject: x86, generic: optimize find_next_(zero_)bit for small constant-size bitmaps This moves an optimization for searching constant-sized small bitmaps form x86_64-specific to generic code. On an i386 defconfig (the x86#testing one), the size of vmlinux hardly changes with this applied. I have observed only four places where this optimization avoids a call into find_next_bit: In the functions return_unused_surplus_pages, alloc_fresh_huge_page, and adjust_pool_surplus, this patch avoids a call for a 1-bit bitmap. In __next_cpu a call is avoided for a 32-bit bitmap. That's it. On x86_64, 52 locations are optimized with a minimal increase in code size: Current #testing defconfig: 146 x bsf, 27 x find_next_*bit text data bss dec hex filename 5392637 846592 724424 6963653 6a41c5 vmlinux After removing the x86_64 specific optimization for find_next_*bit: 94 x bsf, 79 x find_next_*bit text data bss dec hex filename 5392358 846592 724424 6963374 6a40ae vmlinux After this patch (making the optimization generic): 146 x bsf, 27 x find_next_*bit text data bss dec hex filename 5392396 846592 724424 6963412 6a40d4 vmlinux [ tglx@linutronix.de: build fixes ] Signed-off-by: Ingo Molnar --- include/asm-x86/bitops.h | 6 ------ include/asm-x86/bitops_64.h | 10 ---------- 2 files changed, 16 deletions(-) (limited to 'include/asm-x86') diff --git a/include/asm-x86/bitops.h b/include/asm-x86/bitops.h index 31e408de90c..1ae7b270a1e 100644 --- a/include/asm-x86/bitops.h +++ b/include/asm-x86/bitops.h @@ -306,12 +306,6 @@ static int test_bit(int nr, const volatile unsigned long *addr); #undef BIT_ADDR #undef ADDR -unsigned long find_next_bit(const unsigned long *addr, - unsigned long size, unsigned long offset); -unsigned long find_next_zero_bit(const unsigned long *addr, - unsigned long size, unsigned long offset); - - #ifdef CONFIG_X86_32 # include "bitops_32.h" #else diff --git a/include/asm-x86/bitops_64.h b/include/asm-x86/bitops_64.h index 65b20fb2ae7..7118ef2cc4e 100644 --- a/include/asm-x86/bitops_64.h +++ b/include/asm-x86/bitops_64.h @@ -15,16 +15,6 @@ static inline long __scanbit(unsigned long val, unsigned long max) return val; } -#define find_next_bit(addr,size,off) \ -((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \ - ((off) + (__scanbit((*(unsigned long *)addr) >> (off),(size)-(off)))) : \ - find_next_bit(addr,size,off))) - -#define find_next_zero_bit(addr,size,off) \ -((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \ - ((off)+(__scanbit(~(((*(unsigned long *)addr)) >> (off)),(size)-(off)))) : \ - find_next_zero_bit(addr,size,off))) - #define find_first_bit(addr, size) \ ((__builtin_constant_p((size)) && (size) <= BITS_PER_LONG \ ? (__scanbit(*(unsigned long *)(addr), (size))) \ -- cgit v1.2.3