summaryrefslogtreecommitdiff
path: root/include/asm-x86_64
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64')
-rw-r--r--include/asm-x86_64/atomic.h8
-rw-r--r--include/asm-x86_64/cache.h2
-rw-r--r--include/asm-x86_64/percpu.h7
3 files changed, 12 insertions, 5 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 4b5cd553e772..cecbf7baa6aa 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -405,8 +405,14 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
({ \
int c, old; \
c = atomic_read(v); \
- while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+ for (;;) { \
+ if (unlikely(c == (u))) \
+ break; \
+ old = atomic_cmpxchg((v), c, c + (a)); \
+ if (likely(old == c)) \
+ break; \
c = old; \
+ } \
c != (u); \
})
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
diff --git a/include/asm-x86_64/cache.h b/include/asm-x86_64/cache.h
index 263f0a211ed7..c8043a16152e 100644
--- a/include/asm-x86_64/cache.h
+++ b/include/asm-x86_64/cache.h
@@ -20,6 +20,8 @@
__attribute__((__section__(".data.page_aligned")))
#endif
+#define __read_mostly __attribute__((__section__(".data.read_mostly")))
+
#endif
#endif
diff --git a/include/asm-x86_64/percpu.h b/include/asm-x86_64/percpu.h
index 29a6b0408f75..4405b4adeaba 100644
--- a/include/asm-x86_64/percpu.h
+++ b/include/asm-x86_64/percpu.h
@@ -26,10 +26,9 @@
#define percpu_modcopy(pcpudst, src, size) \
do { \
unsigned int __i; \
- for (__i = 0; __i < NR_CPUS; __i++) \
- if (cpu_possible(__i)) \
- memcpy((pcpudst)+__per_cpu_offset(__i), \
- (src), (size)); \
+ for_each_cpu(__i) \
+ memcpy((pcpudst)+__per_cpu_offset(__i), \
+ (src), (size)); \
} while (0)
extern void setup_per_cpu_areas(void);