Loading arch/x86/Kconfig.cpu +3 −0 Original line number Diff line number Diff line Loading @@ -312,6 +312,9 @@ config X86_CMPXCHG config CMPXCHG_LOCAL def_bool X86_64 || (X86_32 && !M386) config CMPXCHG_DOUBLE def_bool y config X86_L1_CACHE_SHIFT int default "7" if MPENTIUM4 || MPSC Loading arch/x86/include/asm/cmpxchg_32.h +48 −0 Original line number Diff line number Diff line Loading @@ -280,4 +280,52 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, #endif #define cmpxchg8b(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __dummy; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile(LOCK_PREFIX "cmpxchg8b %2; setz %1" \ : "=d"(__dummy), "=a" (__ret), "+m" (*ptr)\ : "a" (__old1), "d"(__old2), \ "b" (__new1), "c" (__new2) \ : "memory"); \ __ret; }) #define cmpxchg8b_local(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __dummy; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile("cmpxchg8b %2; setz %1" \ : "=d"(__dummy), "=a"(__ret), "+m" (*ptr)\ : "a" (__old), "d"(__old2), \ "b" (__new1), "c" (__new2), \ : "memory"); \ __ret; }) #define cmpxchg_double(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ VM_BUG_ON((unsigned long)(ptr) % 8); \ cmpxchg8b((ptr), (o1), (o2), (n1), (n2)); \ }) #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ VM_BUG_ON((unsigned long)(ptr) % 8); \ cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \ }) #define system_has_cmpxchg_double() cpu_has_cx8 #endif /* _ASM_X86_CMPXCHG_32_H */ arch/x86/include/asm/cmpxchg_64.h +45 −0 Original line number Diff line number Diff line Loading @@ -151,4 +151,49 @@ extern void __cmpxchg_wrong_size(void); cmpxchg_local((ptr), (o), (n)); \ }) #define cmpxchg16b(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __junk; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1" \ : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ : "b"(__new1), "c"(__new2), \ "a"(__old1), "d"(__old2)); \ __ret; }) #define cmpxchg16b_local(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __junk; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile("cmpxchg16b %2;setz %1" \ : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ : "b"(__new1), "c"(__new2), \ "a"(__old1), "d"(__old2)); \ __ret; }) #define cmpxchg_double(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ VM_BUG_ON((unsigned long)(ptr) % 16); \ cmpxchg16b((ptr), (o1), (o2), (n1), (n2)); \ }) #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ VM_BUG_ON((unsigned long)(ptr) % 16); \ cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \ }) #define system_has_cmpxchg_double() cpu_has_cx16 #endif /* _ASM_X86_CMPXCHG_64_H */ arch/x86/include/asm/cpufeature.h +2 −0 Original line number Diff line number Diff line Loading @@ -288,6 +288,8 @@ extern const char * const x86_power_flags[32]; #define cpu_has_hypervisor boot_cpu_has(X86_FEATURE_HYPERVISOR) #define cpu_has_pclmulqdq boot_cpu_has(X86_FEATURE_PCLMULQDQ) #define cpu_has_perfctr_core boot_cpu_has(X86_FEATURE_PERFCTR_CORE) #define cpu_has_cx8 boot_cpu_has(X86_FEATURE_CX8) #define cpu_has_cx16 boot_cpu_has(X86_FEATURE_CX16) #if defined(CONFIG_X86_INVLPG) || defined(CONFIG_X86_64) # define cpu_has_invlpg 1 Loading Loading
arch/x86/Kconfig.cpu +3 −0 Original line number Diff line number Diff line Loading @@ -312,6 +312,9 @@ config X86_CMPXCHG config CMPXCHG_LOCAL def_bool X86_64 || (X86_32 && !M386) config CMPXCHG_DOUBLE def_bool y config X86_L1_CACHE_SHIFT int default "7" if MPENTIUM4 || MPSC Loading
arch/x86/include/asm/cmpxchg_32.h +48 −0 Original line number Diff line number Diff line Loading @@ -280,4 +280,52 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, #endif #define cmpxchg8b(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __dummy; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile(LOCK_PREFIX "cmpxchg8b %2; setz %1" \ : "=d"(__dummy), "=a" (__ret), "+m" (*ptr)\ : "a" (__old1), "d"(__old2), \ "b" (__new1), "c" (__new2) \ : "memory"); \ __ret; }) #define cmpxchg8b_local(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __dummy; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile("cmpxchg8b %2; setz %1" \ : "=d"(__dummy), "=a"(__ret), "+m" (*ptr)\ : "a" (__old), "d"(__old2), \ "b" (__new1), "c" (__new2), \ : "memory"); \ __ret; }) #define cmpxchg_double(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ VM_BUG_ON((unsigned long)(ptr) % 8); \ cmpxchg8b((ptr), (o1), (o2), (n1), (n2)); \ }) #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ VM_BUG_ON((unsigned long)(ptr) % 8); \ cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \ }) #define system_has_cmpxchg_double() cpu_has_cx8 #endif /* _ASM_X86_CMPXCHG_32_H */
arch/x86/include/asm/cmpxchg_64.h +45 −0 Original line number Diff line number Diff line Loading @@ -151,4 +151,49 @@ extern void __cmpxchg_wrong_size(void); cmpxchg_local((ptr), (o), (n)); \ }) #define cmpxchg16b(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __junk; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1" \ : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ : "b"(__new1), "c"(__new2), \ "a"(__old1), "d"(__old2)); \ __ret; }) #define cmpxchg16b_local(ptr, o1, o2, n1, n2) \ ({ \ char __ret; \ __typeof__(o2) __junk; \ __typeof__(*(ptr)) __old1 = (o1); \ __typeof__(o2) __old2 = (o2); \ __typeof__(*(ptr)) __new1 = (n1); \ __typeof__(o2) __new2 = (n2); \ asm volatile("cmpxchg16b %2;setz %1" \ : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ : "b"(__new1), "c"(__new2), \ "a"(__old1), "d"(__old2)); \ __ret; }) #define cmpxchg_double(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ VM_BUG_ON((unsigned long)(ptr) % 16); \ cmpxchg16b((ptr), (o1), (o2), (n1), (n2)); \ }) #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \ ({ \ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ VM_BUG_ON((unsigned long)(ptr) % 16); \ cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \ }) #define system_has_cmpxchg_double() cpu_has_cx16 #endif /* _ASM_X86_CMPXCHG_64_H */
arch/x86/include/asm/cpufeature.h +2 −0 Original line number Diff line number Diff line Loading @@ -288,6 +288,8 @@ extern const char * const x86_power_flags[32]; #define cpu_has_hypervisor boot_cpu_has(X86_FEATURE_HYPERVISOR) #define cpu_has_pclmulqdq boot_cpu_has(X86_FEATURE_PCLMULQDQ) #define cpu_has_perfctr_core boot_cpu_has(X86_FEATURE_PERFCTR_CORE) #define cpu_has_cx8 boot_cpu_has(X86_FEATURE_CX8) #define cpu_has_cx16 boot_cpu_has(X86_FEATURE_CX16) #if defined(CONFIG_X86_INVLPG) || defined(CONFIG_X86_64) # define cpu_has_invlpg 1 Loading