aboutsummaryrefslogtreecommitdiff
path: root/include/asm-i386/cmpxchg.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/cmpxchg.h')
-rw-r--r--include/asm-i386/cmpxchg.h14
1 files changed, 5 insertions, 9 deletions
diff --git a/include/asm-i386/cmpxchg.h b/include/asm-i386/cmpxchg.h
index 7adcef0cd53..64dcdf46117 100644
--- a/include/asm-i386/cmpxchg.h
+++ b/include/asm-i386/cmpxchg.h
@@ -3,14 +3,16 @@
#include <linux/bitops.h> /* for LOCK_PREFIX */
+/*
+ * Note: if you use set64_bit(), __cmpxchg64(), or their variants, you
+ * you need to test for the feature in boot_cpu_data.
+ */
+
#define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
struct __xchg_dummy { unsigned long a[100]; };
#define __xg(x) ((struct __xchg_dummy *)(x))
-
-#ifdef CONFIG_X86_CMPXCHG64
-
/*
* The semantics of XCHGCMP8B are a bit strange, this is why
* there is a loop and the loading of %%eax and %%edx has to
@@ -65,8 +67,6 @@ static inline void __set_64bit_var (unsigned long long *ptr,
__set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : \
__set_64bit(ptr, ll_low(value), ll_high(value)) )
-#endif
-
/*
* Note: no "lock" prefix even on SMP: xchg always implies lock anyway
* Note 2: xchg has side effect, so that attribute volatile is necessary,
@@ -252,8 +252,6 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
})
#endif
-#ifdef CONFIG_X86_CMPXCHG64
-
static inline unsigned long long __cmpxchg64(volatile void *ptr, unsigned long long old,
unsigned long long new)
{
@@ -289,5 +287,3 @@ static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
((__typeof__(*(ptr)))__cmpxchg64_local((ptr),(unsigned long long)(o),\
(unsigned long long)(n)))
#endif
-
-#endif