* registers are only 32-bits wide but are stored as 64-bit values for
* convenience
******************************************************************************/
-#define CTX_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
+#define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
#define CTX_SPSR_EL1 U(0x0)
#define CTX_ELR_EL1 U(0x8)
#define CTX_SCTLR_EL1 U(0x10)
#endif /* CTX_INCLUDE_MTE_REGS */
/*
- * S-EL2 register set
+ * End of system registers.
+ */
+#define CTX_EL1_SYSREGS_END CTX_MTE_REGS_END
+
+/*
+ * EL2 register set
*/
#if CTX_INCLUDE_EL2_REGS
* AMEVCNTVOFF1<n>_EL2
* ICH_LR<n>_EL2
*/
-#define CTX_ACTLR_EL2 (CTX_MTE_REGS_END + U(0x0))
-#define CTX_AFSR0_EL2 (CTX_MTE_REGS_END + U(0x8))
-#define CTX_AFSR1_EL2 (CTX_MTE_REGS_END + U(0x10))
-#define CTX_AMAIR_EL2 (CTX_MTE_REGS_END + U(0x18))
-#define CTX_CNTHCTL_EL2 (CTX_MTE_REGS_END + U(0x20))
-#define CTX_CNTHP_CTL_EL2 (CTX_MTE_REGS_END + U(0x28))
-#define CTX_CNTHP_CVAL_EL2 (CTX_MTE_REGS_END + U(0x30))
-#define CTX_CNTHP_TVAL_EL2 (CTX_MTE_REGS_END + U(0x38))
-#define CTX_CNTPOFF_EL2 (CTX_MTE_REGS_END + U(0x40))
-#define CTX_CNTVOFF_EL2 (CTX_MTE_REGS_END + U(0x48))
-#define CTX_CPTR_EL2 (CTX_MTE_REGS_END + U(0x50))
-#define CTX_DBGVCR32_EL2 (CTX_MTE_REGS_END + U(0x58))
-#define CTX_ELR_EL2 (CTX_MTE_REGS_END + U(0x60))
-#define CTX_ESR_EL2 (CTX_MTE_REGS_END + U(0x68))
-#define CTX_FAR_EL2 (CTX_MTE_REGS_END + U(0x70))
-#define CTX_FPEXC32_EL2 (CTX_MTE_REGS_END + U(0x78))
-#define CTX_HACR_EL2 (CTX_MTE_REGS_END + U(0x80))
-#define CTX_HAFGRTR_EL2 (CTX_MTE_REGS_END + U(0x88))
-#define CTX_HCR_EL2 (CTX_MTE_REGS_END + U(0x90))
-#define CTX_HDFGRTR_EL2 (CTX_MTE_REGS_END + U(0x98))
-#define CTX_HDFGWTR_EL2 (CTX_MTE_REGS_END + U(0xA0))
-#define CTX_HFGITR_EL2 (CTX_MTE_REGS_END + U(0xA8))
-#define CTX_HFGRTR_EL2 (CTX_MTE_REGS_END + U(0xB0))
-#define CTX_HFGWTR_EL2 (CTX_MTE_REGS_END + U(0xB8))
-#define CTX_HPFAR_EL2 (CTX_MTE_REGS_END + U(0xC0))
-#define CTX_HSTR_EL2 (CTX_MTE_REGS_END + U(0xC8))
-#define CTX_ICC_SRE_EL2 (CTX_MTE_REGS_END + U(0xD0))
-#define CTX_ICH_EISR_EL2 (CTX_MTE_REGS_END + U(0xD8))
-#define CTX_ICH_ELRSR_EL2 (CTX_MTE_REGS_END + U(0xE0))
-#define CTX_ICH_HCR_EL2 (CTX_MTE_REGS_END + U(0xE8))
-#define CTX_ICH_MISR_EL2 (CTX_MTE_REGS_END + U(0xF0))
-#define CTX_ICH_VMCR_EL2 (CTX_MTE_REGS_END + U(0xF8))
-#define CTX_ICH_VTR_EL2 (CTX_MTE_REGS_END + U(0x100))
-#define CTX_MAIR_EL2 (CTX_MTE_REGS_END + U(0x108))
-#define CTX_MDCR_EL2 (CTX_MTE_REGS_END + U(0x110))
-#define CTX_MPAM2_EL2 (CTX_MTE_REGS_END + U(0x118))
-#define CTX_MPAMHCR_EL2 (CTX_MTE_REGS_END + U(0x120))
-#define CTX_MPAMVPM0_EL2 (CTX_MTE_REGS_END + U(0x128))
-#define CTX_MPAMVPM1_EL2 (CTX_MTE_REGS_END + U(0x130))
-#define CTX_MPAMVPM2_EL2 (CTX_MTE_REGS_END + U(0x138))
-#define CTX_MPAMVPM3_EL2 (CTX_MTE_REGS_END + U(0x140))
-#define CTX_MPAMVPM4_EL2 (CTX_MTE_REGS_END + U(0x148))
-#define CTX_MPAMVPM5_EL2 (CTX_MTE_REGS_END + U(0x150))
-#define CTX_MPAMVPM6_EL2 (CTX_MTE_REGS_END + U(0x158))
-#define CTX_MPAMVPM7_EL2 (CTX_MTE_REGS_END + U(0x160))
-#define CTX_MPAMVPMV_EL2 (CTX_MTE_REGS_END + U(0x168))
-#define CTX_RMR_EL2 (CTX_MTE_REGS_END + U(0x170))
-#define CTX_SCTLR_EL2 (CTX_MTE_REGS_END + U(0x178))
-#define CTX_SPSR_EL2 (CTX_MTE_REGS_END + U(0x180))
-#define CTX_SP_EL2 (CTX_MTE_REGS_END + U(0x188))
-#define CTX_TCR_EL2 (CTX_MTE_REGS_END + U(0x190))
-#define CTX_TPIDR_EL2 (CTX_MTE_REGS_END + U(0x198))
-#define CTX_TTBR0_EL2 (CTX_MTE_REGS_END + U(0x1A0))
-#define CTX_VBAR_EL2 (CTX_MTE_REGS_END + U(0x1A8))
-#define CTX_VMPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
-#define CTX_VPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B8))
-#define CTX_VTCR_EL2 (CTX_MTE_REGS_END + U(0x1C0))
-#define CTX_VTTBR_EL2 (CTX_MTE_REGS_END + U(0x1C8))
-#define CTX_ZCR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
-
+#define CTX_EL2_SYSREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
+
+#define CTX_ACTLR_EL2 U(0x0)
+#define CTX_AFSR0_EL2 U(0x8)
+#define CTX_AFSR1_EL2 U(0x10)
+#define CTX_AMAIR_EL2 U(0x18)
+#define CTX_CNTHCTL_EL2 U(0x20)
+#define CTX_CNTHP_CTL_EL2 U(0x28)
+#define CTX_CNTHP_CVAL_EL2 U(0x30)
+#define CTX_CNTHP_TVAL_EL2 U(0x38)
+#define CTX_CNTVOFF_EL2 U(0x40)
+#define CTX_CPTR_EL2 U(0x48)
+#define CTX_DBGVCR32_EL2 U(0x50)
+#define CTX_ELR_EL2 U(0x58)
+#define CTX_ESR_EL2 U(0x60)
+#define CTX_FAR_EL2 U(0x68)
+#define CTX_FPEXC32_EL2 U(0x70)
+#define CTX_HACR_EL2 U(0x78)
+#define CTX_HCR_EL2 U(0x80)
+#define CTX_HPFAR_EL2 U(0x88)
+#define CTX_HSTR_EL2 U(0x90)
+#define CTX_ICC_SRE_EL2 U(0x98)
+#define CTX_ICH_HCR_EL2 U(0xa0)
+#define CTX_ICH_VMCR_EL2 U(0xa8)
+#define CTX_MAIR_EL2 U(0xb0)
+#define CTX_MDCR_EL2 U(0xb8)
+#define CTX_PMSCR_EL2 U(0xc0)
+#define CTX_SCTLR_EL2 U(0xc8)
+#define CTX_SPSR_EL2 U(0xd0)
+#define CTX_SP_EL2 U(0xd8)
+#define CTX_TCR_EL2 U(0xe0)
+#define CTX_TRFCR_EL2 U(0xe8)
+#define CTX_TTBR0_EL2 U(0xf0)
+#define CTX_VBAR_EL2 U(0xf8)
+#define CTX_VMPIDR_EL2 U(0x100)
+#define CTX_VPIDR_EL2 U(0x108)
+#define CTX_VTCR_EL2 U(0x110)
+#define CTX_VTTBR_EL2 U(0x118)
+
+// Only if MTE registers in use
+#define CTX_TFSR_EL2 U(0x120)
+
+// Only if ENABLE_MPAM_FOR_LOWER_ELS==1
+#define CTX_MPAM2_EL2 U(0x128)
+#define CTX_MPAMHCR_EL2 U(0x130)
+#define CTX_MPAMVPM0_EL2 U(0x138)
+#define CTX_MPAMVPM1_EL2 U(0x140)
+#define CTX_MPAMVPM2_EL2 U(0x148)
+#define CTX_MPAMVPM3_EL2 U(0x150)
+#define CTX_MPAMVPM4_EL2 U(0x158)
+#define CTX_MPAMVPM5_EL2 U(0x160)
+#define CTX_MPAMVPM6_EL2 U(0x168)
+#define CTX_MPAMVPM7_EL2 U(0x170)
+#define CTX_MPAMVPMV_EL2 U(0x178)
+
+// Starting with Armv8.6
+#define CTX_HAFGRTR_EL2 U(0x180)
+#define CTX_HDFGRTR_EL2 U(0x188)
+#define CTX_HDFGWTR_EL2 U(0x190)
+#define CTX_HFGITR_EL2 U(0x198)
+#define CTX_HFGRTR_EL2 U(0x1a0)
+#define CTX_HFGWTR_EL2 U(0x1a8)
+#define CTX_CNTPOFF_EL2 U(0x1b0)
+
+// Starting with Armv8.4
+#define CTX_CNTHPS_CTL_EL2 U(0x1b8)
+#define CTX_CNTHPS_CVAL_EL2 U(0x1c0)
+#define CTX_CNTHPS_TVAL_EL2 U(0x1c8)
+#define CTX_CNTHVS_CTL_EL2 U(0x1d0)
+#define CTX_CNTHVS_CVAL_EL2 U(0x1d8)
+#define CTX_CNTHVS_TVAL_EL2 U(0x1e0)
+#define CTX_CNTHV_CTL_EL2 U(0x1e8)
+#define CTX_CNTHV_CVAL_EL2 U(0x1f0)
+#define CTX_CNTHV_TVAL_EL2 U(0x1f8)
+#define CTX_CONTEXTIDR_EL2 U(0x200)
+#define CTX_SDER32_EL2 U(0x208)
+#define CTX_TTBR1_EL2 U(0x210)
+#define CTX_VDISR_EL2 U(0x218)
+#define CTX_VNCR_EL2 U(0x220)
+#define CTX_VSESR_EL2 U(0x228)
+#define CTX_VSTCR_EL2 U(0x230)
+#define CTX_VSTTBR_EL2 U(0x238)
+
+// Starting with Armv8.5
+#define CTX_SCXTNUM_EL2 U(0x240)
/* Align to the next 16 byte boundary */
-#define CTX_EL2_REGS_END (CTX_MTE_REGS_END + U(0x1C0))
-#else
-#define CTX_EL2_REGS_END CTX_MTE_REGS_END
+#define CTX_EL2_SYSREGS_END U(0x250)
#endif /* CTX_INCLUDE_EL2_REGS */
-/*
- * End of system registers.
- */
-#define CTX_SYSREGS_END CTX_EL2_REGS_END
-
/*******************************************************************************
* Constants that allow assembler code to access members of and the 'fp_regs'
* structure at their correct offsets.
******************************************************************************/
-#define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END)
+#if CTX_INCLUDE_EL2_REGS
+# define CTX_FPREGS_OFFSET (CTX_EL2_SYSREGS_OFFSET + CTX_EL2_SYSREGS_END)
+#else
+# define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
+#endif
#if CTX_INCLUDE_FPREGS
#define CTX_FP_Q0 U(0x0)
#define CTX_FP_Q1 U(0x10)
/* Constants to determine the size of individual context structures */
#define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT)
-#define CTX_SYSREG_ALL (CTX_SYSREGS_END >> DWORD_SHIFT)
+#define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT)
+#if CTX_INCLUDE_EL2_REGS
+# define CTX_EL2_SYSREGS_ALL (CTX_EL2_SYSREGS_END >> DWORD_SHIFT)
+#endif
#if CTX_INCLUDE_FPREGS
# define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT)
#endif
DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
/*
- * AArch64 EL1/EL2 system register context structure for preserving the
+ * AArch64 EL1 system register context structure for preserving the
* architectural state during world switches.
*/
-DEFINE_REG_STRUCT(sys_regs, CTX_SYSREG_ALL);
+DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL);
+
+
+/*
+ * AArch64 EL2 system register context structure for preserving the
+ * architectural state during world switches.
+ */
+#if CTX_INCLUDE_EL2_REGS
+DEFINE_REG_STRUCT(el2_sysregs, CTX_EL2_SYSREGS_ALL);
+#endif
/*
* AArch64 floating point register context structure for preserving
typedef struct cpu_context {
gp_regs_t gpregs_ctx;
el3_state_t el3state_ctx;
- sys_regs_t sysregs_ctx;
+ el1_sysregs_t el1_sysregs_ctx;
+#if CTX_INCLUDE_EL2_REGS
+ el2_sysregs_t el2_sysregs_ctx;
+#endif
#if CTX_INCLUDE_FPREGS
fp_regs_t fpregs_ctx;
#endif
#if CTX_INCLUDE_FPREGS
# define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx)
#endif
-#define get_sysregs_ctx(h) (&((cpu_context_t *) h)->sysregs_ctx)
+#define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx)
+#if CTX_INCLUDE_EL2_REGS
+# define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx)
+#endif
#define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx)
#define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx)
#if CTX_INCLUDE_PAUTH_REGS
*/
CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), \
assert_core_context_gp_offset_mismatch);
-CASSERT(CTX_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, sysregs_ctx), \
- assert_core_context_sys_offset_mismatch);
+CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), \
+ assert_core_context_el1_sys_offset_mismatch);
+#if CTX_INCLUDE_EL2_REGS
+CASSERT(CTX_EL2_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el2_sysregs_ctx), \
+ assert_core_context_el2_sys_offset_mismatch);
+#endif
#if CTX_INCLUDE_FPREGS
CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \
assert_core_context_fp_offset_mismatch);
/*******************************************************************************
* Function prototypes
******************************************************************************/
-void el1_sysregs_context_save(sys_regs_t *regs);
-void el1_sysregs_context_restore(sys_regs_t *regs);
+void el1_sysregs_context_save(el1_sysregs_t *regs);
+void el1_sysregs_context_restore(el1_sysregs_t *regs);
#if CTX_INCLUDE_EL2_REGS
-void el2_sysregs_context_save(sys_regs_t *regs);
-void el2_sysregs_context_restore(sys_regs_t *regs);
+void el2_sysregs_context_save(el2_sysregs_t *regs);
+void el2_sysregs_context_restore(el2_sysregs_t *regs);
#endif
#if CTX_INCLUDE_FPREGS
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
- * to save EL1 system register context. It assumes that
- * 'x0' is pointing to a 'el1_sys_regs' structure where
+ * to save EL2 system register context. It assumes that
+ * 'x0' is pointing to a 'el2_sys_regs' structure where
* the register context will be saved.
+ *
+ * The following registers are not added.
+ * AMEVCNTVOFF0<n>_EL2
+ * AMEVCNTVOFF1<n>_EL2
+ * ICH_AP0R<n>_EL2
+ * ICH_AP1R<n>_EL2
+ * ICH_LR<n>_EL2
* -----------------------------------------------------
*/
-func el2_sysregs_context_save
+func el2_sysregs_context_save
mrs x9, actlr_el2
- str x9, [x0, #CTX_ACTLR_EL2]
-
- mrs x9, afsr0_el2
- str x9, [x0, #CTX_AFSR0_EL2]
-
- mrs x9, afsr1_el2
- str x9, [x0, #CTX_AFSR1_EL2]
-
- mrs x9, amair_el2
- str x9, [x0, #CTX_AMAIR_EL2]
-
- mrs x9, cnthctl_el2
- str x9, [x0, #CTX_CNTHCTL_EL2]
-
- mrs x9, cnthp_ctl_el2
- str x9, [x0, #CTX_CNTHP_CTL_EL2]
+ mrs x10, afsr0_el2
+ stp x9, x10, [x0, #CTX_ACTLR_EL2]
- mrs x9, cnthp_cval_el2
- str x9, [x0, #CTX_CNTHP_CVAL_EL2]
+ mrs x11, afsr1_el2
+ mrs x12, amair_el2
+ stp x11, x12, [x0, #CTX_AFSR1_EL2]
- mrs x9, cnthp_tval_el2
- str x9, [x0, #CTX_CNTHP_TVAL_EL2]
+ mrs x13, cnthctl_el2
+ mrs x14, cnthp_ctl_el2
+ stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
- mrs x9, CNTPOFF_EL2
- str x9, [x0, #CTX_CNTPOFF_EL2]
-
- mrs x9, cntvoff_el2
- str x9, [x0, #CTX_CNTVOFF_EL2]
+ mrs x15, cnthp_cval_el2
+ mrs x16, cnthp_tval_el2
+ stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
+ mrs x17, cntvoff_el2
mrs x9, cptr_el2
- str x9, [x0, #CTX_CPTR_EL2]
-
- mrs x9, dbgvcr32_el2
- str x9, [x0, #CTX_DBGVCR32_EL2]
-
- mrs x9, elr_el2
- str x9, [x0, #CTX_ELR_EL2]
-
- mrs x9, esr_el2
- str x9, [x0, #CTX_ESR_EL2]
+ stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
- mrs x9, far_el2
- str x9, [x0, #CTX_FAR_EL2]
+ mrs x10, dbgvcr32_el2
+ mrs x11, elr_el2
+ stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
- mrs x9, fpexc32_el2
- str x9, [x0, #CTX_FPEXC32_EL2]
+ mrs x14, esr_el2
+ mrs x15, far_el2
+ stp x14, x15, [x0, #CTX_ESR_EL2]
- mrs x9, hacr_el2
- str x9, [x0, #CTX_HACR_EL2]
-
- mrs x9, HAFGRTR_EL2
- str x9, [x0, #CTX_HAFGRTR_EL2]
+ mrs x16, fpexc32_el2
+ mrs x17, hacr_el2
+ stp x16, x17, [x0, #CTX_FPEXC32_EL2]
mrs x9, hcr_el2
- str x9, [x0, #CTX_HCR_EL2]
-
- mrs x9, HDFGRTR_EL2
- str x9, [x0, #CTX_HDFGRTR_EL2]
-
- mrs x9, HDFGWTR_EL2
- str x9, [x0, #CTX_HDFGWTR_EL2]
-
- mrs x9, HFGITR_EL2
- str x9, [x0, #CTX_HFGITR_EL2]
+ mrs x10, hpfar_el2
+ stp x9, x10, [x0, #CTX_HCR_EL2]
- mrs x9, HFGRTR_EL2
- str x9, [x0, #CTX_HFGRTR_EL2]
+ mrs x11, hstr_el2
+ mrs x12, ICC_SRE_EL2
+ stp x11, x12, [x0, #CTX_HSTR_EL2]
- mrs x9, HFGWTR_EL2
- str x9, [x0, #CTX_HFGWTR_EL2]
+ mrs x13, ICH_HCR_EL2
+ mrs x14, ICH_VMCR_EL2
+ stp x13, x14, [x0, #CTX_ICH_HCR_EL2]
- mrs x9, hpfar_el2
- str x9, [x0, #CTX_HPFAR_EL2]
+ mrs x15, mair_el2
+ mrs x16, mdcr_el2
+ stp x15, x16, [x0, #CTX_MAIR_EL2]
- mrs x9, hstr_el2
- str x9, [x0, #CTX_HSTR_EL2]
-
- mrs x9, ICC_SRE_EL2
- str x9, [x0, #CTX_ICC_SRE_EL2]
-
- mrs x9, ICH_EISR_EL2
- str x9, [x0, #CTX_ICH_EISR_EL2]
-
- mrs x9, ICH_ELRSR_EL2
- str x9, [x0, #CTX_ICH_ELRSR_EL2]
+ mrs x17, PMSCR_EL2
+ mrs x9, sctlr_el2
+ stp x17, x9, [x0, #CTX_PMSCR_EL2]
- mrs x9, ICH_HCR_EL2
- str x9, [x0, #CTX_ICH_HCR_EL2]
+ mrs x10, spsr_el2
+ mrs x11, sp_el2
+ stp x10, x11, [x0, #CTX_SPSR_EL2]
- mrs x9, ICH_MISR_EL2
- str x9, [x0, #CTX_ICH_MISR_EL2]
+ mrs x12, tcr_el2
+ mrs x13, TRFCR_EL2
+ stp x12, x13, [x0, #CTX_TCR_EL2]
- mrs x9, ICH_VMCR_EL2
- str x9, [x0, #CTX_ICH_VMCR_EL2]
+ mrs x14, ttbr0_el2
+ mrs x15, vbar_el2
+ stp x14, x15, [x0, #CTX_TTBR0_EL2]
- mrs x9, ICH_VTR_EL2
- str x9, [x0, #CTX_ICH_VTR_EL2]
+ mrs x16, vmpidr_el2
+ mrs x17, vpidr_el2
+ stp x16, x17, [x0, #CTX_VMPIDR_EL2]
- mrs x9, mair_el2
- str x9, [x0, #CTX_MAIR_EL2]
+ mrs x9, vtcr_el2
+ mrs x10, vttbr_el2
+ stp x9, x10, [x0, #CTX_VTCR_EL2]
- mrs x9, mdcr_el2
- str x9, [x0, #CTX_MDCR_EL2]
+#if CTX_INCLUDE_MTE_REGS
+ mrs x11, TFSR_EL2
+ str x11, [x0, #CTX_TFSR_EL2]
+#endif
+#if ENABLE_MPAM_FOR_LOWER_ELS
mrs x9, MPAM2_EL2
- str x9, [x0, #CTX_MPAM2_EL2]
+ mrs x10, MPAMHCR_EL2
+ stp x9, x10, [x0, #CTX_MPAM2_EL2]
- mrs x9, MPAMHCR_EL2
- str x9, [x0, #CTX_MPAMHCR_EL2]
+ mrs x11, MPAMVPM0_EL2
+ mrs x12, MPAMVPM1_EL2
+ stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
- mrs x9, MPAMVPM0_EL2
- str x9, [x0, #CTX_MPAMVPM0_EL2]
+ mrs x13, MPAMVPM2_EL2
+ mrs x14, MPAMVPM3_EL2
+ stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
- mrs x9, MPAMVPM1_EL2
- str x9, [x0, #CTX_MPAMVPM1_EL2]
+ mrs x15, MPAMVPM4_EL2
+ mrs x16, MPAMVPM5_EL2
+ stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
- mrs x9, MPAMVPM2_EL2
- str x9, [x0, #CTX_MPAMVPM2_EL2]
+ mrs x17, MPAMVPM6_EL2
+ mrs x9, MPAMVPM7_EL2
+ stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
- mrs x9, MPAMVPM3_EL2
- str x9, [x0, #CTX_MPAMVPM3_EL2]
+ mrs x10, MPAMVPMV_EL2
+ str x10, [x0, #CTX_MPAMVPMV_EL2]
+#endif
- mrs x9, MPAMVPM4_EL2
- str x9, [x0, #CTX_MPAMVPM4_EL2]
- mrs x9, MPAMVPM5_EL2
- str x9, [x0, #CTX_MPAMVPM5_EL2]
+#if ARM_ARCH_AT_LEAST(8, 6)
+ mrs x11, HAFGRTR_EL2
+ mrs x12, HDFGRTR_EL2
+ stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
- mrs x9, MPAMVPM6_EL2
- str x9, [x0, #CTX_MPAMVPM6_EL2]
+ mrs x13, HDFGWTR_EL2
+ mrs x14, HFGITR_EL2
+ stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
- mrs x9, MPAMVPM7_EL2
- str x9, [x0, #CTX_MPAMVPM7_EL2]
+ mrs x15, HFGRTR_EL2
+ mrs x16, HFGWTR_EL2
+ stp x15, x16, [x0, #CTX_HFGRTR_EL2]
- mrs x9, MPAMVPMV_EL2
- str x9, [x0, #CTX_MPAMVPMV_EL2]
+ mrs x17, CNTPOFF_EL2
+ str x17, [x0, #CTX_CNTPOFF_EL2]
+#endif
- mrs x9, rmr_el2
- str x9, [x0, #CTX_RMR_EL2]
+#if ARM_ARCH_AT_LEAST(8, 4)
+ mrs x9, cnthps_ctl_el2
+ mrs x10, cnthps_cval_el2
+ stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
- mrs x9, sctlr_el2
- str x9, [x0, #CTX_SCTLR_EL2]
+ mrs x11, cnthps_tval_el2
+ mrs x12, cnthvs_ctl_el2
+ stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
- mrs x9, spsr_el2
- str x9, [x0, #CTX_SPSR_EL2]
+ mrs x13, cnthvs_cval_el2
+ mrs x14, cnthvs_tval_el2
+ stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
- mrs x9, sp_el2
- str x9, [x0, #CTX_SP_EL2]
+ mrs x15, cnthv_ctl_el2
+ mrs x16, cnthv_cval_el2
+ stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
- mrs x9, tcr_el2
- str x9, [x0, #CTX_TCR_EL2]
+ mrs x17, cnthv_tval_el2
+ mrs x9, contextidr_el2
+ stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
- mrs x9, tpidr_el2
- str x9, [x0, #CTX_TPIDR_EL2]
+ mrs x10, sder32_el2
+ str x10, [x0, #CTX_SDER32_EL2]
- mrs x9, ttbr0_el2
- str x9, [x0, #CTX_TTBR0_EL2]
+ mrs x11, ttbr1_el2
+ str x11, [x0, #CTX_TTBR1_EL2]
- mrs x9, vbar_el2
- str x9, [x0, #CTX_VBAR_EL2]
+ mrs x12, vdisr_el2
+ str x12, [x0, #CTX_VDISR_EL2]
- mrs x9, vmpidr_el2
- str x9, [x0, #CTX_VMPIDR_EL2]
+ mrs x13, vncr_el2
+ str x13, [x0, #CTX_VNCR_EL2]
- mrs x9, vpidr_el2
- str x9, [x0, #CTX_VPIDR_EL2]
+ mrs x14, vsesr_el2
+ str x14, [x0, #CTX_VSESR_EL2]
- mrs x9, vtcr_el2
- str x9, [x0, #CTX_VTCR_EL2]
+ mrs x15, vstcr_el2
+ str x15, [x0, #CTX_VSTCR_EL2]
- mrs x9, vttbr_el2
- str x9, [x0, #CTX_VTTBR_EL2]
+ mrs x16, vsttbr_el2
+ str x16, [x0, #CTX_VSTTBR_EL2]
+#endif
- mrs x9, ZCR_EL2
- str x9, [x0, #CTX_ZCR_EL2]
+#if ARM_ARCH_AT_LEAST(8, 5)
+ mrs x17, scxtnum_el2
+ str x17, [x0, #CTX_SCXTNUM_EL2]
+#endif
ret
endfunc el2_sysregs_context_save
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
- * to restore EL1 system register context. It assumes
- * that 'x0' is pointing to a 'el1_sys_regs' structure
+ * to restore EL2 system register context. It assumes
+ * that 'x0' is pointing to a 'el2_sys_regs' structure
* from where the register context will be restored
+
+ * The following registers are not restored
+ * AMEVCNTVOFF0<n>_EL2
+ * AMEVCNTVOFF1<n>_EL2
+ * ICH_AP0R<n>_EL2
+ * ICH_AP1R<n>_EL2
+ * ICH_LR<n>_EL2
* -----------------------------------------------------
*/
func el2_sysregs_context_restore
- ldr x9, [x0, #CTX_ACTLR_EL2]
+ ldp x9, x10, [x0, #CTX_ACTLR_EL2]
msr actlr_el2, x9
+ msr afsr0_el2, x10
- ldr x9, [x0, #CTX_AFSR0_EL2]
- msr afsr0_el2, x9
-
- ldr x9, [x0, #CTX_AFSR1_EL2]
- msr afsr1_el2, x9
-
- ldr x9, [x0, #CTX_AMAIR_EL2]
- msr amair_el2, x9
-
- ldr x9, [x0, #CTX_CNTHCTL_EL2]
- msr cnthctl_el2, x9
+ ldp x11, x12, [x0, #CTX_AFSR1_EL2]
+ msr afsr1_el2, x11
+ msr amair_el2, x12
- ldr x9, [x0, #CTX_CNTHP_CTL_EL2]
- msr cnthp_ctl_el2, x9
+ ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
+ msr cnthctl_el2, x13
+ msr cnthp_ctl_el2, x14
- ldr x9, [x0, #CTX_CNTHP_CVAL_EL2]
- msr cnthp_cval_el2, x9
+ ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
+ msr cnthp_cval_el2, x15
+ msr cnthp_tval_el2, x16
- ldr x9, [x0, #CTX_CNTHP_TVAL_EL2]
- msr cnthp_tval_el2, x9
-
- ldr x9, [x0, #CTX_CNTPOFF_EL2]
- msr CNTPOFF_EL2, x9
-
- ldr x9, [x0, #CTX_CNTVOFF_EL2]
- msr cntvoff_el2, x9
-
- ldr x9, [x0, #CTX_CPTR_EL2]
+ ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
+ msr cntvoff_el2, x17
msr cptr_el2, x9
- ldr x9, [x0, #CTX_DBGVCR32_EL2]
- msr dbgvcr32_el2, x9
-
- ldr x9, [x0, #CTX_ELR_EL2]
- msr elr_el2, x9
+ ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
+ msr dbgvcr32_el2, x10
+ msr elr_el2, x11
- ldr x9, [x0, #CTX_ESR_EL2]
- msr esr_el2, x9
+ ldp x14, x15, [x0, #CTX_ESR_EL2]
+ msr esr_el2, x14
+ msr far_el2, x15
- ldr x9, [x0, #CTX_FAR_EL2]
- msr far_el2, x9
+ ldp x16, x17, [x0, #CTX_FPEXC32_EL2]
+ msr fpexc32_el2, x16
+ msr hacr_el2, x17
- ldr x9, [x0, #CTX_FPEXC32_EL2]
- msr fpexc32_el2, x9
-
- ldr x9, [x0, #CTX_HACR_EL2]
- msr hacr_el2, x9
-
- ldr x9, [x0, #CTX_HAFGRTR_EL2]
- msr HAFGRTR_EL2, x9
-
- ldr x9, [x0, #CTX_HCR_EL2]
+ ldp x9, x10, [x0, #CTX_HCR_EL2]
msr hcr_el2, x9
+ msr hpfar_el2, x10
- ldr x9, [x0, #CTX_HDFGRTR_EL2]
- msr HDFGRTR_EL2, x9
-
- ldr x9, [x0, #CTX_HDFGWTR_EL2]
- msr HDFGWTR_EL2, x9
-
- ldr x9, [x0, #CTX_HFGITR_EL2]
- msr HFGITR_EL2, x9
-
- ldr x9, [x0, #CTX_HFGRTR_EL2]
- msr HFGRTR_EL2, x9
-
- ldr x9, [x0, #CTX_HFGWTR_EL2]
- msr HFGWTR_EL2, x9
+ ldp x11, x12, [x0, #CTX_HSTR_EL2]
+ msr hstr_el2, x11
+ msr ICC_SRE_EL2, x12
- ldr x9, [x0, #CTX_HPFAR_EL2]
- msr hpfar_el2, x9
+ ldp x13, x14, [x0, #CTX_ICH_HCR_EL2]
+ msr ICH_HCR_EL2, x13
+ msr ICH_VMCR_EL2, x14
- ldr x9, [x0, #CTX_HSTR_EL2]
- msr hstr_el2, x9
+ ldp x15, x16, [x0, #CTX_MAIR_EL2]
+ msr mair_el2, x15
+ msr mdcr_el2, x16
- ldr x9, [x0, #CTX_ICC_SRE_EL2]
- msr ICC_SRE_EL2, x9
-
- ldr x9, [x0, #CTX_ICH_EISR_EL2]
- msr ICH_EISR_EL2, x9
-
- ldr x9, [x0, #CTX_ICH_ELRSR_EL2]
- msr ICH_ELRSR_EL2, x9
+ ldp x17, x9, [x0, #CTX_PMSCR_EL2]
+ msr PMSCR_EL2, x17
+ msr sctlr_el2, x9
- ldr x9, [x0, #CTX_ICH_HCR_EL2]
- msr ICH_HCR_EL2, x9
+ ldp x10, x11, [x0, #CTX_SPSR_EL2]
+ msr spsr_el2, x10
+ msr sp_el2, x11
- ldr x9, [x0, #CTX_ICH_MISR_EL2]
- msr ICH_MISR_EL2, x9
+ ldp x12, x13, [x0, #CTX_TCR_EL2]
+ msr tcr_el2, x12
+ msr TRFCR_EL2, x13
- ldr x9, [x0, #CTX_ICH_VMCR_EL2]
- msr ICH_VMCR_EL2, x9
+ ldp x14, x15, [x0, #CTX_TTBR0_EL2]
+ msr ttbr0_el2, x14
+ msr vbar_el2, x15
- ldr x9, [x0, #CTX_ICH_VTR_EL2]
- msr ICH_VTR_EL2, x9
+ ldp x16, x17, [x0, #CTX_VMPIDR_EL2]
+ msr vmpidr_el2, x16
+ msr vpidr_el2, x17
- ldr x9, [x0, #CTX_MAIR_EL2]
- msr mair_el2, x9
+ ldp x9, x10, [x0, #CTX_VTCR_EL2]
+ msr vtcr_el2, x9
+ msr vttbr_el2, x10
- ldr x9, [x0, #CTX_MDCR_EL2]
- msr mdcr_el2, x9
+#if CTX_INCLUDE_MTE_REGS
+ ldr x11, [x0, #CTX_TFSR_EL2]
+ msr TFSR_EL2, x11
+#endif
- ldr x9, [x0, #CTX_MPAM2_EL2]
+#if ENABLE_MPAM_FOR_LOWER_ELS
+ ldp x9, x10, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x9
+ msr MPAMHCR_EL2, x10
- ldr x9, [x0, #CTX_MPAMHCR_EL2]
- msr MPAMHCR_EL2, x9
+ ldp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
+ msr MPAMVPM0_EL2, x11
+ msr MPAMVPM1_EL2, x12
- ldr x9, [x0, #CTX_MPAMVPM0_EL2]
- msr MPAMVPM0_EL2, x9
+ ldp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
+ msr MPAMVPM2_EL2, x13
+ msr MPAMVPM3_EL2, x14
- ldr x9, [x0, #CTX_MPAMVPM1_EL2]
- msr MPAMVPM1_EL2, x9
+ ldp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
+ msr MPAMVPM4_EL2, x15
+ msr MPAMVPM5_EL2, x16
- ldr x9, [x0, #CTX_MPAMVPM2_EL2]
- msr MPAMVPM2_EL2, x9
-
- ldr x9, [x0, #CTX_MPAMVPM3_EL2]
- msr MPAMVPM3_EL2, x9
+ ldp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
+ msr MPAMVPM6_EL2, x17
+ msr MPAMVPM7_EL2, x9
- ldr x9, [x0, #CTX_MPAMVPM4_EL2]
- msr MPAMVPM4_EL2, x9
+ ldr x10, [x0, #CTX_MPAMVPMV_EL2]
+ msr MPAMVPMV_EL2, x10
+#endif
- ldr x9, [x0, #CTX_MPAMVPM5_EL2]
- msr MPAMVPM5_EL2, x9
+#if ARM_ARCH_AT_LEAST(8, 6)
+ ldp x11, x12, [x0, #CTX_HAFGRTR_EL2]
+ msr HAFGRTR_EL2, x11
+ msr HDFGRTR_EL2, x12
- ldr x9, [x0, #CTX_MPAMVPM6_EL2]
- msr MPAMVPM6_EL2, x9
+ ldp x13, x14, [x0, #CTX_HDFGWTR_EL2]
+ msr HDFGWTR_EL2, x13
+ msr HFGITR_EL2, x14
- ldr x9, [x0, #CTX_MPAMVPM7_EL2]
- msr MPAMVPM7_EL2, x9
+ ldp x15, x16, [x0, #CTX_HFGRTR_EL2]
+ msr HFGRTR_EL2, x15
+ msr HFGWTR_EL2, x16
- ldr x9, [x0, #CTX_MPAMVPMV_EL2]
- msr MPAMVPMV_EL2, x9
+ ldr x17, [x0, #CTX_CNTPOFF_EL2]
+ msr CNTPOFF_EL2, x17
+#endif
- ldr x9, [x0, #CTX_RMR_EL2]
- msr rmr_el2, x9
+#if ARM_ARCH_AT_LEAST(8, 4)
+ ldp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
+ msr cnthps_ctl_el2, x9
+ msr cnthps_cval_el2, x10
- ldr x9, [x0, #CTX_SCTLR_EL2]
- msr sctlr_el2, x9
+ ldp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
+ msr cnthps_tval_el2, x11
+ msr cnthvs_ctl_el2, x12
- ldr x9, [x0, #CTX_SPSR_EL2]
- msr spsr_el2, x9
+ ldp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
+ msr cnthvs_cval_el2, x13
+ msr cnthvs_tval_el2, x14
- ldr x9, [x0, #CTX_SP_EL2]
- msr sp_el2, x9
+ ldp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
+ msr cnthv_ctl_el2, x15
+ msr cnthv_cval_el2, x16
- ldr x9, [x0, #CTX_TCR_EL2]
- msr tcr_el2, x9
+ ldp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
+ msr cnthv_tval_el2, x17
+ msr contextidr_el2, x9
- ldr x9, [x0, #CTX_TPIDR_EL2]
- msr tpidr_el2, x9
+ ldr x10, [x0, #CTX_SDER32_EL2]
+ msr sder32_el2, x10
- ldr x9, [x0, #CTX_TTBR0_EL2]
- msr ttbr0_el2, x9
+ ldr x11, [x0, #CTX_TTBR1_EL2]
+ msr ttbr1_el2, x11
- ldr x9, [x0, #CTX_VBAR_EL2]
- msr vbar_el2, x9
+ ldr x12, [x0, #CTX_VDISR_EL2]
+ msr vdisr_el2, x12
- ldr x9, [x0, #CTX_VMPIDR_EL2]
- msr vmpidr_el2, x9
+ ldr x13, [x0, #CTX_VNCR_EL2]
+ msr vncr_el2, x13
- ldr x9, [x0, #CTX_VPIDR_EL2]
- msr vpidr_el2, x9
+ ldr x14, [x0, #CTX_VSESR_EL2]
+ msr vsesr_el2, x14
- ldr x9, [x0, #CTX_VTCR_EL2]
- msr vtcr_el2, x9
+ ldr x15, [x0, #CTX_VSTCR_EL2]
+ msr vstcr_el2, x15
- ldr x9, [x0, #CTX_VTTBR_EL2]
- msr vttbr_el2, x9
+ ldr x16, [x0, #CTX_VSTTBR_EL2]
+ msr vsttbr_el2, x16
+#endif
- ldr x9, [x0, #CTX_ZCR_EL2]
- msr ZCR_EL2, x9
+#if ARM_ARCH_AT_LEAST(8, 5)
+ ldr x17, [x0, #CTX_SCXTNUM_EL2]
+ msr scxtnum_el2, x17
+#endif
ret
endfunc el2_sysregs_context_restore