u8 mte_get_random_tag(void);
void *mte_set_mem_tag_range(void *addr, size_t size, u8 tag);
+void mte_enable_kernel(void);
+
#else /* CONFIG_ARM64_MTE */
static inline u8 mte_get_ptr_tag(void *ptr)
return addr;
}
+static inline void mte_enable_kernel(void)
+{
+}
+
#endif /* CONFIG_ARM64_MTE */
#endif /* __ASSEMBLY__ */
return ptr;
}
+void mte_enable_kernel(void)
+{
+ /* Enable MTE Sync Mode for EL1. */
+ sysreg_clear_set(sctlr_el1, SCTLR_ELx_TCF_MASK, SCTLR_ELx_TCF_SYNC);
+ isb();
+}
+
static void update_sctlr_el1_tcf0(u64 tcf0)
{
/* ISB required for the kernel uaccess routines */
#define TCR_CACHE_FLAGS TCR_IRGN_WBWA | TCR_ORGN_WBWA
#ifdef CONFIG_KASAN_SW_TAGS
-#define TCR_KASAN_FLAGS TCR_TBI1 | TCR_TBID1
+#define TCR_KASAN_SW_FLAGS TCR_TBI1 | TCR_TBID1
#else
-#define TCR_KASAN_FLAGS 0
+#define TCR_KASAN_SW_FLAGS 0
+#endif
+
+#ifdef CONFIG_KASAN_HW_TAGS
+#define TCR_KASAN_HW_FLAGS SYS_TCR_EL1_TCMA1 | TCR_TBI1
+#else
+#define TCR_KASAN_HW_FLAGS 0
#endif
/*
*/
mov_q x5, MAIR_EL1_SET
#ifdef CONFIG_ARM64_MTE
+ mte_tcr .req x20
+
+ mov mte_tcr, #0
+
/*
* Update MAIR_EL1, GCR_EL1 and TFSR*_EL1 if MTE is supported
* (ID_AA64PFR1_EL1[11:8] > 1).
/* clear any pending tag check faults in TFSR*_EL1 */
msr_s SYS_TFSR_EL1, xzr
msr_s SYS_TFSRE0_EL1, xzr
+
+ /* set the TCR_EL1 bits */
+ mov_q mte_tcr, TCR_KASAN_HW_FLAGS
1:
#endif
msr mair_el1, x5
*/
mov_q x10, TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_ASID16 | \
- TCR_TBI0 | TCR_A1 | TCR_KASAN_FLAGS
+ TCR_TBI0 | TCR_A1 | TCR_KASAN_SW_FLAGS
+#ifdef CONFIG_ARM64_MTE
+ orr x10, x10, mte_tcr
+ .unreq mte_tcr
+#endif
tcr_clear_errata_bits x10, x9, x5
#ifdef CONFIG_ARM64_VA_BITS_52