From cacf7f19b1708a5c97cfdcbda7627c27f5a9c642 Mon Sep 17 00:00:00 2001 From: Mark Brown Date: Tue, 14 Dec 2021 15:27:14 +0000 Subject: [PATCH] arm64: Use BTI C directly and unconditionally Now we have a macro for BTI C that looks like a regular instruction change all the users of the current BTI_C macro to just emit a BTI C directly and remove the macro. This does mean that we now unconditionally BTI annotate all assembly functions, meaning that they are worse in this respect than code generated by the compiler. The overhead should be minimal for implementations with a reasonable HINT implementation. Signed-off-by: Mark Brown Reviewed-by: Ard Biesheuvel Acked-by: Mark Rutland Link: https://lore.kernel.org/r/20211214152714.2380849-4-broonie@kernel.org Signed-off-by: Catalin Marinas --- arch/arm64/include/asm/linkage.h | 22 ++++++---------------- arch/arm64/kernel/entry-ftrace.S | 8 ++------ arch/arm64/lib/kasan_sw_tags.S | 4 +--- 3 files changed, 9 insertions(+), 25 deletions(-) diff --git a/arch/arm64/include/asm/linkage.h b/arch/arm64/include/asm/linkage.h index 1cfa8bb33eddf..9065e4749b42a 100644 --- a/arch/arm64/include/asm/linkage.h +++ b/arch/arm64/include/asm/linkage.h @@ -4,16 +4,6 @@ #define __ALIGN .align 2 #define __ALIGN_STR ".align 2" -#if defined(CONFIG_ARM64_BTI_KERNEL) && defined(__aarch64__) - -#define BTI_C bti c ; - -#else - -#define BTI_C - -#endif - /* * When using in-kernel BTI we need to ensure that PCS-conformant * assembly functions have suitable annotations. Override @@ -23,27 +13,27 @@ */ #define SYM_FUNC_START(name) \ SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN) \ - BTI_C + bti c ; #define SYM_FUNC_START_NOALIGN(name) \ SYM_START(name, SYM_L_GLOBAL, SYM_A_NONE) \ - BTI_C + bti c ; #define SYM_FUNC_START_LOCAL(name) \ SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN) \ - BTI_C + bti c ; #define SYM_FUNC_START_LOCAL_NOALIGN(name) \ SYM_START(name, SYM_L_LOCAL, SYM_A_NONE) \ - BTI_C + bti c ; #define SYM_FUNC_START_WEAK(name) \ SYM_START(name, SYM_L_WEAK, SYM_A_ALIGN) \ - BTI_C + bti c ; #define SYM_FUNC_START_WEAK_NOALIGN(name) \ SYM_START(name, SYM_L_WEAK, SYM_A_NONE) \ - BTI_C + bti c ; /* * Annotate a function as position independent, i.e., safe to be called before diff --git a/arch/arm64/kernel/entry-ftrace.S b/arch/arm64/kernel/entry-ftrace.S index 8cf970d219f5d..e535480a4069b 100644 --- a/arch/arm64/kernel/entry-ftrace.S +++ b/arch/arm64/kernel/entry-ftrace.S @@ -77,17 +77,13 @@ .endm SYM_CODE_START(ftrace_regs_caller) -#ifdef BTI_C - BTI_C -#endif + bti c ftrace_regs_entry 1 b ftrace_common SYM_CODE_END(ftrace_regs_caller) SYM_CODE_START(ftrace_caller) -#ifdef BTI_C - BTI_C -#endif + bti c ftrace_regs_entry 0 b ftrace_common SYM_CODE_END(ftrace_caller) diff --git a/arch/arm64/lib/kasan_sw_tags.S b/arch/arm64/lib/kasan_sw_tags.S index 5b04464c045eb..20784ce75defb 100644 --- a/arch/arm64/lib/kasan_sw_tags.S +++ b/arch/arm64/lib/kasan_sw_tags.S @@ -38,9 +38,7 @@ * incremented by 256 prior to return). */ SYM_CODE_START(__hwasan_tag_mismatch) -#ifdef BTI_C - BTI_C -#endif + bti c add x29, sp, #232 stp x2, x3, [sp, #8 * 2] stp x4, x5, [sp, #8 * 4] -- 2.39.5