#endif /* GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT */
+ .macro __mitigate_spectre_bhb_loop tmp
+#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
+ mov \tmp, #32
+.Lspectre_bhb_loop\@:
+ b . + 4
+ subs \tmp, \tmp, #1
+ b.ne .Lspectre_bhb_loop\@
+ sb
+#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
+ .endm
+
+ /* Save/restores x0-x3 to the stack */
+ .macro __mitigate_spectre_bhb_fw
+#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
+ stp x0, x1, [sp, #-16]!
+ stp x2, x3, [sp, #-16]!
+ mov w0, #ARM_SMCCC_ARCH_WORKAROUND_3
+alternative_cb smccc_patch_fw_mitigation_conduit
+ nop // Patched to SMC/HVC #0
+alternative_cb_end
+ ldp x2, x3, [sp], #16
+ ldp x0, x1, [sp], #16
+#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
+ .endm
#endif /* __ASM_ASSEMBLER_H */
sub \dst, \dst, PAGE_SIZE
.endm
- .macro tramp_ventry, vector_start, regsize, kpti
+
+#define BHB_MITIGATION_NONE 0
+#define BHB_MITIGATION_LOOP 1
+#define BHB_MITIGATION_FW 2
+
+ .macro tramp_ventry, vector_start, regsize, kpti, bhb
.align 7
1:
.if \regsize == 64
msr tpidrro_el0, x30 // Restored in kernel_ventry
.endif
+ .if \bhb == BHB_MITIGATION_LOOP
+ /*
+ * This sequence must appear before the first indirect branch. i.e. the
+ * ret out of tramp_ventry. It appears here because x30 is free.
+ */
+ __mitigate_spectre_bhb_loop x30
+ .endif // \bhb == BHB_MITIGATION_LOOP
+
.if \kpti == 1
/*
* Defend against branch aliasing attacks by pushing a dummy
ldr x30, =vectors
.endif // \kpti == 1
+ .if \bhb == BHB_MITIGATION_FW
+ /*
+ * The firmware sequence must appear before the first indirect branch.
+ * i.e. the ret out of tramp_ventry. But it also needs the stack to be
+ * mapped to save/restore the registers the SMC clobbers.
+ */
+ __mitigate_spectre_bhb_fw
+ .endif // \bhb == BHB_MITIGATION_FW
+
add x30, x30, #(1b - \vector_start + 4)
ret
.org 1b + 128 // Did we overflow the ventry slot?
.macro tramp_exit, regsize = 64
adr x30, tramp_vectors
+#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
+ add x30, x30, SZ_4K
+#endif
msr vbar_el1, x30
ldr lr, [sp, #S_LR]
tramp_unmap_kernel x29
sb
.endm
- .macro generate_tramp_vector, kpti
+ .macro generate_tramp_vector, kpti, bhb
.Lvector_start\@:
.space 0x400
.rept 4
- tramp_ventry .Lvector_start\@, 64, \kpti
+ tramp_ventry .Lvector_start\@, 64, \kpti, \bhb
.endr
.rept 4
- tramp_ventry .Lvector_start\@, 32, \kpti
+ tramp_ventry .Lvector_start\@, 32, \kpti, \bhb
.endr
.endm
#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
/*
* Exception vectors trampoline.
+ * The order must match __bp_harden_el1_vectors and the
+ * arm64_bp_harden_el1_vectors enum.
*/
.pushsection ".entry.tramp.text", "ax"
.align 11
SYM_CODE_START_NOALIGN(tramp_vectors)
- generate_tramp_vector kpti=1
+#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
+ generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_LOOP
+ generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_FW
+#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
+ generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_NONE
SYM_CODE_END(tramp_vectors)
SYM_CODE_START(tramp_exit_native)
* Exception vectors for spectre mitigations on entry from EL1 when
* kpti is not in use.
*/
- .macro generate_el1_vector
+ .macro generate_el1_vector, bhb
.Lvector_start\@:
kernel_ventry 1, t, 64, sync // Synchronous EL1t
kernel_ventry 1, t, 64, irq // IRQ EL1t
kernel_ventry 1, h, 64, error // Error EL1h
.rept 4
- tramp_ventry .Lvector_start\@, 64, kpti=0
+ tramp_ventry .Lvector_start\@, 64, 0, \bhb
.endr
.rept 4
- tramp_ventry .Lvector_start\@, 32, kpti=0
+ tramp_ventry .Lvector_start\@, 32, 0, \bhb
.endr
.endm
+/* The order must match tramp_vecs and the arm64_bp_harden_el1_vectors enum. */
.pushsection ".entry.text", "ax"
.align 11
SYM_CODE_START(__bp_harden_el1_vectors)
- generate_el1_vector
+#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
+ generate_el1_vector bhb=BHB_MITIGATION_LOOP
+ generate_el1_vector bhb=BHB_MITIGATION_FW
+#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
SYM_CODE_END(__bp_harden_el1_vectors)
.popsection