In a similar manner to the renaming of ARM64_HARDEN_BRANCH_PREDICTOR
to ARM64_SPECTRE_V2, rename ARM64_SSBD to ARM64_SPECTRE_V4. This isn't
_entirely_ accurate, as we also need to take into account the interaction
with SSBS, but that will be taken care of in subsequent patches.
Signed-off-by: Will Deacon <will@kernel.org>
#define ARM64_HAS_CACHE_IDC 27
#define ARM64_HAS_CACHE_DIC 28
#define ARM64_HW_DBM 29
-#define ARM64_SSBD 30
+#define ARM64_SPECTRE_V4 30
#define ARM64_MISMATCHED_CACHE_TYPE 31
#define ARM64_HAS_STAGE2_FWB 32
#define ARM64_HAS_CRC32 33
#endif
{
.desc = "Speculative Store Bypass Disable",
- .capability = ARM64_SSBD,
+ .capability = ARM64_SPECTRE_V4,
.type = ARM64_CPUCAP_LOCAL_CPU_ERRATUM,
.matches = has_ssbd_mitigation,
.cpu_enable = cpu_enable_ssbd_mitigation,
static inline bool __needs_ssbd_off(struct kvm_vcpu *vcpu)
{
- if (!cpus_have_final_cap(ARM64_SSBD))
+ if (!cpus_have_final_cap(ARM64_SPECTRE_V4))
return false;
return !(vcpu->arch.workaround_flags & VCPU_WORKAROUND_2_FLAG);