refactor(el3_runtime): unify handle/enter_lower_el_async_ea

handle_lower_el_async_ea and enter_lower_el_async_ea are same except for
saving x30 register, with previous patch x30 is now freed before calling
these function we don't need both of them.

This patch also unifies the naming convention, now we have 3 handlers
 - handle_lower_el_ea_esb
 - handle_lower_el_sync_ea
 - handle_lower_el_async_ea

Signed-off-by: Manish Pandey <manish.pandey2@arm.com>
Change-Id: I63b584cf059bac80195aa334981d50fa6272cf49
This commit is contained in:
Manish Pandey 2023-01-11 21:53:02 +00:00
parent d87c0e277f
commit 6f7de9a871
2 changed files with 19 additions and 30 deletions

View file

@ -16,9 +16,8 @@
#include <context.h> #include <context.h>
.globl handle_lower_el_ea_esb .globl handle_lower_el_ea_esb
.globl handle_lower_el_sync_ea
.globl handle_lower_el_async_ea .globl handle_lower_el_async_ea
.globl enter_lower_el_sync_ea
.globl enter_lower_el_async_ea
/* /*
@ -42,17 +41,12 @@ endfunc handle_lower_el_ea_esb
* Implementation Defined Exceptions. If any other kind of exception is detected, * Implementation Defined Exceptions. If any other kind of exception is detected,
* then this function reports unhandled exception. * then this function reports unhandled exception.
* *
* Since it's part of exception vector, this function doesn't expect any GP * It delegates the handling of the EA to platform handler, and upon successfully
* registers to have been saved. It delegates the handling of the EA to platform * handling the EA, exits EL3; otherwise panics.
* handler, and upon successfully handling the EA, exits EL3; otherwise panics. *
* This function assumes x30 has been saved.
*/ */
func enter_lower_el_sync_ea func handle_lower_el_sync_ea
/*
* Explicitly save x30 so as to free up a register and to enable
* branching.
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mrs x30, esr_el3 mrs x30, esr_el3
ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
@ -114,24 +108,19 @@ func enter_lower_el_sync_ea
/* Synchronous exceptions other than the above are assumed to be EA */ /* Synchronous exceptions other than the above are assumed to be EA */
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
no_ret report_unhandled_exception no_ret report_unhandled_exception
endfunc enter_lower_el_sync_ea endfunc handle_lower_el_sync_ea
/* /*
* This function handles SErrors from lower ELs. * This function handles SErrors from lower ELs.
* *
* Since it's part of exception vector, this function doesn't expect any GP * It delegates the handling of the EA to platform handler, and upon successfully
* registers to have been saved. It delegates the handling of the EA to platform * handling the EA, exits EL3; otherwise panics.
* handler, and upon successfully handling the EA, exits EL3; otherwise panics. *
* This function assumes x30 has been saved.
*/ */
func enter_lower_el_async_ea func handle_lower_el_async_ea
/*
* Explicitly save x30 so as to free up a register and to enable
* branching
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_lower_el_async_ea:
/* /*
* Save general purpose and ARMv8.3-PAuth registers (if enabled). * Save general purpose and ARMv8.3-PAuth registers (if enabled).
* If Secure Cycle Counter is not disabled in MDCR_EL3 when * If Secure Cycle Counter is not disabled in MDCR_EL3 when
@ -153,7 +142,7 @@ handle_lower_el_async_ea:
/* el3_exit assumes SP_EL0 on entry */ /* el3_exit assumes SP_EL0 on entry */
msr spsel, #MODE_SP_EL0 msr spsel, #MODE_SP_EL0
b el3_exit b el3_exit
endfunc enter_lower_el_async_ea endfunc handle_lower_el_async_ea
/* /*

View file

@ -151,7 +151,7 @@
/* Synchronous exceptions other than the above are assumed to be EA */ /* Synchronous exceptions other than the above are assumed to be EA */
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
b enter_lower_el_sync_ea b handle_lower_el_sync_ea
.endm .endm
@ -361,11 +361,11 @@ vector_entry serror_aarch64
apply_at_speculative_wa apply_at_speculative_wa
#if RAS_EXTENSION #if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else #else
check_and_unmask_ea check_and_unmask_ea
b handle_lower_el_async_ea
#endif #endif
b handle_lower_el_async_ea
end_vector_entry serror_aarch64 end_vector_entry serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
@ -404,11 +404,11 @@ vector_entry serror_aarch32
apply_at_speculative_wa apply_at_speculative_wa
#if RAS_EXTENSION #if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else #else
check_and_unmask_ea check_and_unmask_ea
b handle_lower_el_async_ea
#endif #endif
b handle_lower_el_async_ea
end_vector_entry serror_aarch32 end_vector_entry serror_aarch32
#ifdef MONITOR_TRAPS #ifdef MONITOR_TRAPS