.globl fiq_aarch32
.globl serror_aarch32
+ /*
+ * Save LR and make x30 available as most of the routines in vector entry
+ * need a free register
+ */
+ .macro save_x30
+ str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ .endm
+
/*
* Macro that prepares entry to EL3 upon taking an exception.
*
/* Unmask the SError interrupt */
msr daifclr, #DAIF_ABT_BIT
- /*
- * Explicitly save x30 so as to free up a register and to enable
- * branching
- */
- str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
-
/* Check for SErrors synchronized by the ESB instruction */
mrs x30, DISR_EL1
tbz x30, #DISR_A_BIT, 1f
/* Use ISB for the above unmask operation to take effect immediately */
isb
- /*
- * Refer Note 1.
- * No need to restore X30 as macros following this modify x30 anyway.
- */
- str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ /* Refer Note 1. */
mov x30, #1
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
dmb sy
* triggered due to explicit synchronization in EL3. Refer Note 1.
*/
/* Assumes SP_EL3 on entry */
- str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ save_x30
ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
cbnz x30, 1f
* to a valid cpu context where the general purpose and system register
* state can be saved.
*/
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_sync_exception
end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception irq_aarch64
end_vector_entry irq_aarch64
vector_entry fiq_aarch64
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception fiq_aarch64
end_vector_entry fiq_aarch64
vector_entry serror_aarch64
+ save_x30
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
* to a valid cpu context where the general purpose and system register
* state can be saved.
*/
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_sync_exception
end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception irq_aarch32
end_vector_entry irq_aarch32
vector_entry fiq_aarch32
+ save_x30
apply_at_speculative_wa
check_and_unmask_ea
handle_interrupt_exception fiq_aarch32
end_vector_entry fiq_aarch32
vector_entry serror_aarch32
+ save_x30
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
.macro apply_at_speculative_wa
#if ERRATA_SPECULATIVE_AT
/*
- * Explicitly save x30 so as to free up a register and to enable
- * branching and also, save x29 which will be used in the called
- * function
+ * This function expects x30 has been saved.
+ * Also, save x29 which will be used in the called function.
*/
- stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ str x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
bl save_and_update_ptw_el1_sys_regs
- ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ ldr x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
#endif
.endm
.macro apply_at_speculative_wa
#if ERRATA_SPECULATIVE_AT
/*
- * Explicitly save x30 so as to free up a register and to enable
- * branching and also, save x29 which will be used in the called
- * function
+ * This function expects x30 has been saved.
+ * Also, save x29 which will be used in the called function.
*/
- stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ str x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
bl save_and_update_ptw_el1_sys_regs
- ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ ldr x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
#endif
.endm