Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 5757bd61 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'core-types-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull inlining tuning from Ingo Molnar:
 "A handful of inlining optimizations inspired by x86 work but
  applicable in general"

* 'core-types-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  jiffies: Force inlining of {m,u}msecs_to_jiffies()
  x86/hweight: Force inlining of __arch_hweight{32,64}()
  linux/bitmap: Force inlining of bitmap weight functions
parents 7073bc66 accd0b9e
Loading
Loading
Loading
Loading
+8 −5
Original line number Original line Diff line number Diff line
@@ -21,7 +21,7 @@
 * ARCH_HWEIGHT_CFLAGS in <arch/x86/Kconfig> for the respective
 * ARCH_HWEIGHT_CFLAGS in <arch/x86/Kconfig> for the respective
 * compiler switches.
 * compiler switches.
 */
 */
static inline unsigned int __arch_hweight32(unsigned int w)
static __always_inline unsigned int __arch_hweight32(unsigned int w)
{
{
	unsigned int res = 0;
	unsigned int res = 0;


@@ -42,20 +42,23 @@ static inline unsigned int __arch_hweight8(unsigned int w)
	return __arch_hweight32(w & 0xff);
	return __arch_hweight32(w & 0xff);
}
}


#ifdef CONFIG_X86_32
static inline unsigned long __arch_hweight64(__u64 w)
static inline unsigned long __arch_hweight64(__u64 w)
{
{
	unsigned long res = 0;

#ifdef CONFIG_X86_32
	return  __arch_hweight32((u32)w) +
	return  __arch_hweight32((u32)w) +
		__arch_hweight32((u32)(w >> 32));
		__arch_hweight32((u32)(w >> 32));
}
#else
#else
static __always_inline unsigned long __arch_hweight64(__u64 w)
{
	unsigned long res = 0;

	asm (ALTERNATIVE("call __sw_hweight64", POPCNT64, X86_FEATURE_POPCNT)
	asm (ALTERNATIVE("call __sw_hweight64", POPCNT64, X86_FEATURE_POPCNT)
		     : "="REG_OUT (res)
		     : "="REG_OUT (res)
		     : REG_IN (w));
		     : REG_IN (w));
#endif /* CONFIG_X86_32 */


	return res;
	return res;
}
}
#endif /* CONFIG_X86_32 */


#endif
#endif
+1 −1
Original line number Original line Diff line number Diff line
@@ -295,7 +295,7 @@ static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
	return find_first_zero_bit(src, nbits) == nbits;
	return find_first_zero_bit(src, nbits) == nbits;
}
}


static inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
{
{
	if (small_const_nbits(nbits))
	if (small_const_nbits(nbits))
		return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
		return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
+3 −3
Original line number Original line Diff line number Diff line
@@ -57,7 +57,7 @@ extern unsigned long __sw_hweight64(__u64 w);
	     (bit) < (size);					\
	     (bit) < (size);					\
	     (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
	     (bit) = find_next_zero_bit((addr), (size), (bit) + 1))


static __inline__ int get_bitmask_order(unsigned int count)
static inline int get_bitmask_order(unsigned int count)
{
{
	int order;
	int order;


@@ -65,7 +65,7 @@ static __inline__ int get_bitmask_order(unsigned int count)
	return order;	/* We could be slightly more clever with -1 here... */
	return order;	/* We could be slightly more clever with -1 here... */
}
}


static __inline__ int get_count_order(unsigned int count)
static inline int get_count_order(unsigned int count)
{
{
	int order;
	int order;


@@ -75,7 +75,7 @@ static __inline__ int get_count_order(unsigned int count)
	return order;
	return order;
}
}


static inline unsigned long hweight_long(unsigned long w)
static __always_inline unsigned long hweight_long(unsigned long w)
{
{
	return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
	return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
}
}
+2 −2
Original line number Original line Diff line number Diff line
@@ -351,7 +351,7 @@ static inline unsigned long _msecs_to_jiffies(const unsigned int m)
 * directly here and from __msecs_to_jiffies() in the case where
 * directly here and from __msecs_to_jiffies() in the case where
 * constant folding is not possible.
 * constant folding is not possible.
 */
 */
static inline unsigned long msecs_to_jiffies(const unsigned int m)
static __always_inline unsigned long msecs_to_jiffies(const unsigned int m)
{
{
	if (__builtin_constant_p(m)) {
	if (__builtin_constant_p(m)) {
		if ((int)m < 0)
		if ((int)m < 0)
@@ -405,7 +405,7 @@ static inline unsigned long _usecs_to_jiffies(const unsigned int u)
 * directly here and from __msecs_to_jiffies() in the case where
 * directly here and from __msecs_to_jiffies() in the case where
 * constant folding is not possible.
 * constant folding is not possible.
 */
 */
static inline unsigned long usecs_to_jiffies(const unsigned int u)
static __always_inline unsigned long usecs_to_jiffies(const unsigned int u)
{
{
	if (__builtin_constant_p(u)) {
	if (__builtin_constant_p(u)) {
		if (u > jiffies_to_usecs(MAX_JIFFY_OFFSET))
		if (u > jiffies_to_usecs(MAX_JIFFY_OFFSET))