summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2015-08-31 18:37:33 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2015-08-31 18:37:33 -0700
commit5757bd6157f523ff0d448a4ec93938523ca441a9 (patch)
tree713885bff513324e1935874653e546162cdbe540 /include
parent7073bc66126e3ab742cce9416ad6b4be8b03c4f7 (diff)
parentaccd0b9ec015d611eb7783dd86f1bb31bf8d62ab (diff)
Merge branch 'core-types-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull inlining tuning from Ingo Molnar: "A handful of inlining optimizations inspired by x86 work but applicable in general" * 'core-types-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: jiffies: Force inlining of {m,u}msecs_to_jiffies() x86/hweight: Force inlining of __arch_hweight{32,64}() linux/bitmap: Force inlining of bitmap weight functions
Diffstat (limited to 'include')
-rw-r--r--include/linux/bitmap.h2
-rw-r--r--include/linux/bitops.h6
-rw-r--r--include/linux/jiffies.h4
3 files changed, 6 insertions, 6 deletions
diff --git a/include/linux/bitmap.h b/include/linux/bitmap.h
index ea17cca9e685..9653fdb76a42 100644
--- a/include/linux/bitmap.h
+++ b/include/linux/bitmap.h
@@ -295,7 +295,7 @@ static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
return find_first_zero_bit(src, nbits) == nbits;
}
-static inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
+static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
{
if (small_const_nbits(nbits))
return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
diff --git a/include/linux/bitops.h b/include/linux/bitops.h
index 297f5bda4fdf..e63553386ae7 100644
--- a/include/linux/bitops.h
+++ b/include/linux/bitops.h
@@ -57,7 +57,7 @@ extern unsigned long __sw_hweight64(__u64 w);
(bit) < (size); \
(bit) = find_next_zero_bit((addr), (size), (bit) + 1))
-static __inline__ int get_bitmask_order(unsigned int count)
+static inline int get_bitmask_order(unsigned int count)
{
int order;
@@ -65,7 +65,7 @@ static __inline__ int get_bitmask_order(unsigned int count)
return order; /* We could be slightly more clever with -1 here... */
}
-static __inline__ int get_count_order(unsigned int count)
+static inline int get_count_order(unsigned int count)
{
int order;
@@ -75,7 +75,7 @@ static __inline__ int get_count_order(unsigned int count)
return order;
}
-static inline unsigned long hweight_long(unsigned long w)
+static __always_inline unsigned long hweight_long(unsigned long w)
{
return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
}
diff --git a/include/linux/jiffies.h b/include/linux/jiffies.h
index 535fd3bb1ba8..1ba48a18c1d7 100644
--- a/include/linux/jiffies.h
+++ b/include/linux/jiffies.h
@@ -351,7 +351,7 @@ static inline unsigned long _msecs_to_jiffies(const unsigned int m)
* directly here and from __msecs_to_jiffies() in the case where
* constant folding is not possible.
*/
-static inline unsigned long msecs_to_jiffies(const unsigned int m)
+static __always_inline unsigned long msecs_to_jiffies(const unsigned int m)
{
if (__builtin_constant_p(m)) {
if ((int)m < 0)
@@ -405,7 +405,7 @@ static inline unsigned long _usecs_to_jiffies(const unsigned int u)
* directly here and from __msecs_to_jiffies() in the case where
* constant folding is not possible.
*/
-static inline unsigned long usecs_to_jiffies(const unsigned int u)
+static __always_inline unsigned long usecs_to_jiffies(const unsigned int u)
{
if (__builtin_constant_p(u)) {
if (u > jiffies_to_usecs(MAX_JIFFY_OFFSET))