Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e7037bd9 authored by Marc Zyngier's avatar Marc Zyngier Committed by Greg Kroah-Hartman
Browse files

arm64: Add ARCH_WORKAROUND_2 probing



commit a725e3dda1813ed306734823ac4c65ca04e38500 upstream.

As for Spectre variant-2, we rely on SMCCC 1.1 to provide the
discovery mechanism for detecting the SSBD mitigation.

A new capability is also allocated for that purpose, and a
config option.

Reviewed-by: default avatarJulien Grall <julien.grall@arm.com>
Reviewed-by: default avatarMark Rutland <mark.rutland@arm.com>
Acked-by: default avatarWill Deacon <will.deacon@arm.com>
Reviewed-by: default avatarSuzuki K Poulose <suzuki.poulose@arm.com>
Signed-off-by: default avatarMarc Zyngier <marc.zyngier@arm.com>
Signed-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarMarc Zyngier <marc.zyngier@arm.com>
Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent d8174bd7
Loading
Loading
Loading
Loading
+9 −0
Original line number Diff line number Diff line
@@ -776,6 +776,15 @@ config HARDEN_BRANCH_PREDICTOR

	  If unsure, say Y.

config ARM64_SSBD
	bool "Speculative Store Bypass Disable" if EXPERT
	default y
	help
	  This enables mitigation of the bypassing of previous stores
	  by speculative loads.

	  If unsure, say Y.

menuconfig ARMV8_DEPRECATED
	bool "Emulate deprecated/obsolete ARMv8 instructions"
	depends on COMPAT
+2 −1
Original line number Diff line number Diff line
@@ -36,7 +36,8 @@
#define ARM64_MISMATCHED_CACHE_LINE_SIZE	15
#define ARM64_UNMAP_KERNEL_AT_EL0		16
#define ARM64_HARDEN_BRANCH_PREDICTOR		17
#define ARM64_SSBD				18

#define ARM64_NCAPS				18
#define ARM64_NCAPS				19

#endif /* __ASM_CPUCAPS_H */
+69 −0
Original line number Diff line number Diff line
@@ -211,6 +211,67 @@ void __init arm64_update_smccc_conduit(struct alt_instr *alt,

	*updptr = cpu_to_le32(insn);
}

static void arm64_set_ssbd_mitigation(bool state)
{
	switch (psci_ops.conduit) {
	case PSCI_CONDUIT_HVC:
		arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
		break;

	case PSCI_CONDUIT_SMC:
		arm_smccc_1_1_smc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
		break;

	default:
		WARN_ON_ONCE(1);
		break;
	}
}

static bool has_ssbd_mitigation(const struct arm64_cpu_capabilities *entry,
				    int scope)
{
	struct arm_smccc_res res;
	bool supported = true;

	WARN_ON(scope != SCOPE_LOCAL_CPU || preemptible());

	if (psci_ops.smccc_version == SMCCC_VERSION_1_0)
		return false;

	/*
	 * The probe function return value is either negative
	 * (unsupported or mitigated), positive (unaffected), or zero
	 * (requires mitigation). We only need to do anything in the
	 * last case.
	 */
	switch (psci_ops.conduit) {
	case PSCI_CONDUIT_HVC:
		arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
				  ARM_SMCCC_ARCH_WORKAROUND_2, &res);
		if ((int)res.a0 != 0)
			supported = false;
		break;

	case PSCI_CONDUIT_SMC:
		arm_smccc_1_1_smc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
				  ARM_SMCCC_ARCH_WORKAROUND_2, &res);
		if ((int)res.a0 != 0)
			supported = false;
		break;

	default:
		supported = false;
	}

	if (supported) {
		__this_cpu_write(arm64_ssbd_callback_required, 1);
		arm64_set_ssbd_mitigation(true);
	}

	return supported;
}
#endif	/* CONFIG_ARM64_SSBD */

#define MIDR_RANGE(model, min, max) \
@@ -335,6 +396,14 @@ const struct arm64_cpu_capabilities arm64_errata[] = {
		MIDR_ALL_VERSIONS(MIDR_CAVIUM_THUNDERX2),
		.enable = enable_smccc_arch_workaround_1,
	},
#endif
#ifdef CONFIG_ARM64_SSBD
	{
		.desc = "Speculative Store Bypass Disable",
		.def_scope = SCOPE_LOCAL_CPU,
		.capability = ARM64_SSBD,
		.matches = has_ssbd_mitigation,
	},
#endif
	{
	}