Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 67c707e4 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'x86-cleanups-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 cleanups from Ingo Molnar:
 "The main changes in this cycle were:

   - code patching and cpu_has cleanups (Borislav Petkov)

   - paravirt cleanups (Juergen Gross)

   - TSC cleanup (Thomas Gleixner)

   - ptrace cleanup (Chen Gang)"

* 'x86-cleanups-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  arch/x86/kernel/ptrace.c: Remove unused arg_offs_table
  x86/mm: Align macro defines
  x86/cpu: Provide a config option to disable static_cpu_has
  x86/cpufeature: Remove unused and seldomly used cpu_has_xx macros
  x86/cpufeature: Cleanup get_cpu_cap()
  x86/cpufeature: Move some of the scattered feature bits to x86_capability
  x86/paravirt: Remove paravirt ops pmd_update[_defer] and pte_update_defer
  x86/paravirt: Remove unused pv_apic_ops structure
  x86/tsc: Remove unused tsc_pre_init() hook
  x86: Remove unused function cpu_has_ht_siblings()
  x86/paravirt: Kill some unused patching functions
parents 463eb8ac 0105c8d8
Loading
Loading
Loading
Loading
+11 −0
Original line number Diff line number Diff line
@@ -349,6 +349,17 @@ config X86_FEATURE_NAMES

	  If in doubt, say Y.

config X86_FAST_FEATURE_TESTS
	bool "Fast CPU feature tests" if EMBEDDED
	default y
	---help---
	  Some fast-paths in the kernel depend on the capabilities of the CPU.
	  Say Y here for the kernel to patch in the appropriate code at runtime
	  based on the capabilities of the CPU. The infrastructure for patching
	  code at runtime takes up some additional space; space-constrained
	  embedded systems may wish to say N here to produce smaller, slightly
	  slower code.

config X86_X2APIC
	bool "Support x2apic"
	depends on X86_LOCAL_APIC && X86_64 && (IRQ_REMAP || HYPERVISOR_GUEST)
+1 −1
Original line number Diff line number Diff line
@@ -125,7 +125,7 @@ static struct crypto_alg alg = {

static int __init chacha20_simd_mod_init(void)
{
	if (!cpu_has_ssse3)
	if (!boot_cpu_has(X86_FEATURE_SSSE3))
		return -ENODEV;

#ifdef CONFIG_AS_AVX2
+1 −1
Original line number Diff line number Diff line
@@ -257,7 +257,7 @@ static int __init crc32c_intel_mod_init(void)
	if (!x86_match_cpu(crc32c_cpu_id))
		return -ENODEV;
#ifdef CONFIG_X86_64
	if (cpu_has_pclmulqdq) {
	if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
		alg.update = crc32c_pcl_intel_update;
		alg.finup = crc32c_pcl_intel_finup;
		alg.digest = crc32c_pcl_intel_digest;
+1 −1
Original line number Diff line number Diff line
@@ -109,6 +109,6 @@ static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)

#endif

#define system_has_cmpxchg_double() cpu_has_cx8
#define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX8)

#endif /* _ASM_X86_CMPXCHG_32_H */
+1 −1
Original line number Diff line number Diff line
@@ -18,6 +18,6 @@ static inline void set_64bit(volatile u64 *ptr, u64 val)
	cmpxchg_local((ptr), (o), (n));					\
})

#define system_has_cmpxchg_double() cpu_has_cx16
#define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX16)

#endif /* _ASM_X86_CMPXCHG_64_H */
Loading