mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-19 02:54:24 +00:00
Merge changes from topic "mp/undef_injection" into integration
* changes: feat(el3-runtime): introduce UNDEF injection to lower EL feat(cpufeat): added few helper functions
This commit is contained in:
commit
c2f9ba88f4
9 changed files with 356 additions and 19 deletions
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
|
* Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -476,21 +476,33 @@ sysreg_handler64:
|
||||||
bl handle_sysreg_trap
|
bl handle_sysreg_trap
|
||||||
/*
|
/*
|
||||||
* returns:
|
* returns:
|
||||||
* -1: unhandled trap, panic
|
* -1: unhandled trap, UNDEF injection into lower EL
|
||||||
* 0: handled trap, return to the trapping instruction (repeating it)
|
* 0: handled trap, return to the trapping instruction (repeating it)
|
||||||
* 1: handled trap, return to the next instruction
|
* 1: handled trap, return to the next instruction
|
||||||
*/
|
*/
|
||||||
|
|
||||||
tst w0, w0
|
tst w0, w0
|
||||||
b.mi elx_panic /* negative return value: panic */
|
b.mi 2f /* negative: undefined exception injection */
|
||||||
b.eq 1f /* zero: do not change ELR_EL3 */
|
|
||||||
|
|
||||||
/* advance the PC to continue after the instruction */
|
b.eq 1f /* zero: do not change ELR_EL3 */
|
||||||
|
/* positive: advance the PC to continue after the instruction */
|
||||||
ldr x1, [x19, #CTX_EL3STATE_OFFSET + CTX_ELR_EL3]
|
ldr x1, [x19, #CTX_EL3STATE_OFFSET + CTX_ELR_EL3]
|
||||||
add x1, x1, #4
|
add x1, x1, #4
|
||||||
str x1, [x19, #CTX_EL3STATE_OFFSET + CTX_ELR_EL3]
|
str x1, [x19, #CTX_EL3STATE_OFFSET + CTX_ELR_EL3]
|
||||||
1:
|
1:
|
||||||
b el3_exit
|
b el3_exit
|
||||||
|
2:
|
||||||
|
/*
|
||||||
|
* UNDEF injection to lower EL, the support is only provided for lower
|
||||||
|
* EL in AArch64 mode, for AArch32 mode it will do elx_panic as before.
|
||||||
|
*/
|
||||||
|
mrs x0, spsr_el3
|
||||||
|
tst x0, #(SPSR_M_MASK << SPSR_M_SHIFT)
|
||||||
|
b.ne elx_panic
|
||||||
|
/* Pass context pointer as an argument to inject_undef64 */
|
||||||
|
mov x0, x19
|
||||||
|
bl inject_undef64
|
||||||
|
b el3_exit
|
||||||
|
|
||||||
smc_unknown:
|
smc_unknown:
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2022, ARM Limited. All rights reserved.
|
* Copyright (c) 2022-2024, Arm Limited. All rights reserved.
|
||||||
* Copyright (c) 2023, NVIDIA Corporation. All rights reserved.
|
* Copyright (c) 2023, NVIDIA Corporation. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
@ -7,8 +7,11 @@
|
||||||
* Dispatch synchronous system register traps from lower ELs.
|
* Dispatch synchronous system register traps from lower ELs.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <arch_features.h>
|
||||||
|
#include <arch_helpers.h>
|
||||||
#include <bl31/sync_handle.h>
|
#include <bl31/sync_handle.h>
|
||||||
#include <context.h>
|
#include <context.h>
|
||||||
|
#include <lib/el3_runtime/context_mgmt.h>
|
||||||
|
|
||||||
int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx)
|
int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx)
|
||||||
{
|
{
|
||||||
|
@ -28,3 +31,205 @@ int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx)
|
||||||
|
|
||||||
return TRAP_RET_UNHANDLED;
|
return TRAP_RET_UNHANDLED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool is_tge_enabled(void)
|
||||||
|
{
|
||||||
|
u_register_t hcr_el2 = read_hcr_el2();
|
||||||
|
|
||||||
|
return ((read_feat_vhe_id_field() != 0U) && ((hcr_el2 & HCR_TGE_BIT) != 0U));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This function is to ensure that undef injection does not happen into
|
||||||
|
* non-existent S-EL2. This could happen when trap happens from S-EL{1,0}
|
||||||
|
* and non-secure world is running with TGE bit set, considering EL3 does
|
||||||
|
* not save/restore EL2 registers if only one world has EL2 enabled.
|
||||||
|
* So reading hcr_el2.TGE would give NS world value.
|
||||||
|
*/
|
||||||
|
static bool is_secure_trap_without_sel2(u_register_t scr)
|
||||||
|
{
|
||||||
|
return ((scr & (SCR_NS_BIT | SCR_EEL2_BIT)) == 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
static unsigned int target_el(unsigned int from_el, u_register_t scr)
|
||||||
|
{
|
||||||
|
if (from_el > MODE_EL1) {
|
||||||
|
return from_el;
|
||||||
|
} else if (is_tge_enabled() && !is_secure_trap_without_sel2(scr)) {
|
||||||
|
return MODE_EL2;
|
||||||
|
} else {
|
||||||
|
return MODE_EL1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static u_register_t get_elr_el3(u_register_t spsr_el3, u_register_t vbar, unsigned int target_el)
|
||||||
|
{
|
||||||
|
unsigned int outgoing_el = GET_EL(spsr_el3);
|
||||||
|
u_register_t elr_el3 = 0;
|
||||||
|
|
||||||
|
if (outgoing_el == target_el) {
|
||||||
|
/*
|
||||||
|
* Target EL is either EL1 or EL2, lsb can tell us the SPsel
|
||||||
|
* Thread mode : 0
|
||||||
|
* Handler mode : 1
|
||||||
|
*/
|
||||||
|
if ((spsr_el3 & (MODE_SP_MASK << MODE_SP_SHIFT)) == MODE_SP_ELX) {
|
||||||
|
elr_el3 = vbar + CURRENT_EL_SPX;
|
||||||
|
} else {
|
||||||
|
elr_el3 = vbar + CURRENT_EL_SP0;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
/* Vector address for Lower EL using Aarch64 */
|
||||||
|
elr_el3 = vbar + LOWER_EL_AARCH64;
|
||||||
|
}
|
||||||
|
|
||||||
|
return elr_el3;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Explicitly create all bits of SPSR to get PSTATE at exception return.
|
||||||
|
*
|
||||||
|
* The code is based on "Aarch64.exceptions.takeexception" described in
|
||||||
|
* DDI0602 revision 2023-06.
|
||||||
|
* "https://developer.arm.com/documentation/ddi0602/2023-06/Shared-Pseudocode/
|
||||||
|
* aarch64-exceptions-takeexception"
|
||||||
|
*
|
||||||
|
* NOTE: This piece of code must be reviewed every release to ensure that
|
||||||
|
* we keep up with new ARCH features which introduces a new SPSR bit.
|
||||||
|
*/
|
||||||
|
static u_register_t create_spsr(u_register_t old_spsr, unsigned int target_el)
|
||||||
|
{
|
||||||
|
u_register_t new_spsr = 0;
|
||||||
|
u_register_t sctlr;
|
||||||
|
|
||||||
|
/* Set M bits for target EL in AArch64 mode, also get sctlr */
|
||||||
|
if (target_el == MODE_EL2) {
|
||||||
|
sctlr = read_sctlr_el2();
|
||||||
|
new_spsr |= (SPSR_M_AARCH64 << SPSR_M_SHIFT) | SPSR_M_EL2H;
|
||||||
|
} else {
|
||||||
|
sctlr = read_sctlr_el1();
|
||||||
|
new_spsr |= (SPSR_M_AARCH64 << SPSR_M_SHIFT) | SPSR_M_EL1H;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mask all exceptions, update DAIF bits */
|
||||||
|
new_spsr |= SPSR_DAIF_MASK << SPSR_DAIF_SHIFT;
|
||||||
|
|
||||||
|
/* If FEAT_BTI is present, clear BTYPE bits */
|
||||||
|
new_spsr |= old_spsr & (SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
|
||||||
|
if (is_armv8_5_bti_present()) {
|
||||||
|
new_spsr &= ~(SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If SSBS is implemented, take the value from SCTLR.DSSBS */
|
||||||
|
new_spsr |= old_spsr & SPSR_SSBS_BIT_AARCH64;
|
||||||
|
if (is_feat_ssbs_present()) {
|
||||||
|
if ((sctlr & SCTLR_DSSBS_BIT) != 0U) {
|
||||||
|
new_spsr |= SPSR_SSBS_BIT_AARCH64;
|
||||||
|
} else {
|
||||||
|
new_spsr &= ~SPSR_SSBS_BIT_AARCH64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If FEAT_NMI is implemented, ALLINT = !(SCTLR.SPINTMASK) */
|
||||||
|
new_spsr |= old_spsr & SPSR_ALLINT_BIT_AARCH64;
|
||||||
|
if (is_feat_nmi_present()) {
|
||||||
|
if ((sctlr & SCTLR_SPINTMASK_BIT) != 0U) {
|
||||||
|
new_spsr &= ~SPSR_ALLINT_BIT_AARCH64;
|
||||||
|
} else {
|
||||||
|
new_spsr |= SPSR_ALLINT_BIT_AARCH64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Clear PSTATE.IL bit explicitly */
|
||||||
|
new_spsr &= ~SPSR_IL_BIT;
|
||||||
|
|
||||||
|
/* Clear PSTATE.SS bit explicitly */
|
||||||
|
new_spsr &= ~SPSR_SS_BIT;
|
||||||
|
|
||||||
|
/* Update PSTATE.PAN bit */
|
||||||
|
new_spsr |= old_spsr & SPSR_PAN_BIT;
|
||||||
|
if (is_feat_pan_present() &&
|
||||||
|
((target_el == MODE_EL1) || ((target_el == MODE_EL2) && is_tge_enabled())) &&
|
||||||
|
((sctlr & SCTLR_SPAN_BIT) == 0U)) {
|
||||||
|
new_spsr |= SPSR_PAN_BIT;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Clear UAO bit if FEAT_UAO is present */
|
||||||
|
new_spsr |= old_spsr & SPSR_UAO_BIT_AARCH64;
|
||||||
|
if (is_feat_uao_present()) {
|
||||||
|
new_spsr &= ~SPSR_UAO_BIT_AARCH64;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* DIT bits are unchanged */
|
||||||
|
new_spsr |= old_spsr & SPSR_DIT_BIT;
|
||||||
|
|
||||||
|
/* If FEAT_MTE2 is implemented mask tag faults by setting TCO bit */
|
||||||
|
new_spsr |= old_spsr & SPSR_TCO_BIT_AARCH64;
|
||||||
|
if (read_feat_mte_id_field() >= MTE_IMPLEMENTED_ELX) {
|
||||||
|
new_spsr |= SPSR_TCO_BIT_AARCH64;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NZCV bits are unchanged */
|
||||||
|
new_spsr |= old_spsr & SPSR_NZCV;
|
||||||
|
|
||||||
|
/* If FEAT_EBEP is present set PM bit */
|
||||||
|
new_spsr |= old_spsr & SPSR_PM_BIT_AARCH64;
|
||||||
|
if (is_feat_ebep_present()) {
|
||||||
|
new_spsr |= SPSR_PM_BIT_AARCH64;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If FEAT_SEBEP is present clear PPEND bit */
|
||||||
|
new_spsr |= old_spsr & SPSR_PPEND_BIT;
|
||||||
|
if (is_feat_sebep_present()) {
|
||||||
|
new_spsr &= ~SPSR_PPEND_BIT;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If FEAT_GCS is present, update EXLOCK bit */
|
||||||
|
new_spsr |= old_spsr & SPSR_EXLOCK_BIT_AARCH64;
|
||||||
|
if (is_feat_gcs_present()) {
|
||||||
|
u_register_t gcscr;
|
||||||
|
if (target_el == MODE_EL2) {
|
||||||
|
gcscr = read_gcscr_el2();
|
||||||
|
} else {
|
||||||
|
gcscr = read_gcscr_el1();
|
||||||
|
}
|
||||||
|
new_spsr |= (gcscr & GCSCR_EXLOCK_EN_BIT) ? SPSR_EXLOCK_BIT_AARCH64 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new_spsr;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Handler for injecting Undefined exception to lower EL which is caused by
|
||||||
|
* lower EL accessing system registers of which (old)EL3 firmware is unaware.
|
||||||
|
*
|
||||||
|
* This is a safety net to avoid EL3 panics caused by system register access
|
||||||
|
* that triggers an exception syndrome EC=0x18.
|
||||||
|
*/
|
||||||
|
void inject_undef64(cpu_context_t *ctx)
|
||||||
|
{
|
||||||
|
u_register_t esr = (EC_UNKNOWN << ESR_EC_SHIFT) | ESR_IL_BIT;
|
||||||
|
el3_state_t *state = get_el3state_ctx(ctx);
|
||||||
|
u_register_t elr_el3 = read_ctx_reg(state, CTX_ELR_EL3);
|
||||||
|
u_register_t old_spsr = read_ctx_reg(state, CTX_SPSR_EL3);
|
||||||
|
u_register_t scr_el3 = read_ctx_reg(state, CTX_SCR_EL3);
|
||||||
|
u_register_t new_spsr = 0;
|
||||||
|
unsigned int to_el = target_el(GET_EL(old_spsr), scr_el3);
|
||||||
|
|
||||||
|
if (to_el == MODE_EL2) {
|
||||||
|
write_elr_el2(elr_el3);
|
||||||
|
elr_el3 = get_elr_el3(old_spsr, read_vbar_el2(), to_el);
|
||||||
|
write_esr_el2(esr);
|
||||||
|
write_spsr_el2(old_spsr);
|
||||||
|
} else {
|
||||||
|
write_elr_el1(elr_el3);
|
||||||
|
elr_el3 = get_elr_el3(old_spsr, read_vbar_el1(), to_el);
|
||||||
|
write_esr_el1(esr);
|
||||||
|
write_spsr_el1(old_spsr);
|
||||||
|
}
|
||||||
|
|
||||||
|
new_spsr = create_spsr(old_spsr, to_el);
|
||||||
|
|
||||||
|
write_ctx_reg(state, CTX_SPSR_EL3, new_spsr);
|
||||||
|
write_ctx_reg(state, CTX_ELR_EL3, elr_el3);
|
||||||
|
}
|
||||||
|
|
|
@ -163,6 +163,11 @@
|
||||||
#define ID_PFR1_SEC_MASK U(0xf)
|
#define ID_PFR1_SEC_MASK U(0xf)
|
||||||
#define ID_PFR1_ELx_ENABLED U(1)
|
#define ID_PFR1_ELx_ENABLED U(1)
|
||||||
|
|
||||||
|
/* ID_PFR2 definitions */
|
||||||
|
#define ID_PFR2_SSBS_SHIFT U(4)
|
||||||
|
#define ID_PFR2_SSBS_MASK U(0xf)
|
||||||
|
#define SSBS_UNAVAILABLE U(0)
|
||||||
|
|
||||||
/* SCTLR definitions */
|
/* SCTLR definitions */
|
||||||
#define SCTLR_RES1_DEF ((U(1) << 23) | (U(1) << 22) | (U(1) << 4) | \
|
#define SCTLR_RES1_DEF ((U(1) << 23) | (U(1) << 22) | (U(1) << 4) | \
|
||||||
(U(1) << 3))
|
(U(1) << 3))
|
||||||
|
@ -552,6 +557,7 @@
|
||||||
#define ID_DFR1 p15, 0, c0, c3, 5
|
#define ID_DFR1 p15, 0, c0, c3, 5
|
||||||
#define ID_PFR0 p15, 0, c0, c1, 0
|
#define ID_PFR0 p15, 0, c0, c1, 0
|
||||||
#define ID_PFR1 p15, 0, c0, c1, 1
|
#define ID_PFR1 p15, 0, c0, c1, 1
|
||||||
|
#define ID_PFR2 p15, 0, c0, c3, 4
|
||||||
#define MAIR0 p15, 0, c10, c2, 0
|
#define MAIR0 p15, 0, c10, c2, 0
|
||||||
#define MAIR1 p15, 0, c10, c2, 1
|
#define MAIR1 p15, 0, c10, c2, 1
|
||||||
#define TTBCR p15, 0, c2, c0, 2
|
#define TTBCR p15, 0, c2, c0, 2
|
||||||
|
|
|
@ -128,6 +128,17 @@ static inline bool is_feat_pan_supported(void)
|
||||||
return read_feat_pan_id_field() != 0U;
|
return read_feat_pan_id_field() != 0U;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_pan_present(void)
|
||||||
|
{
|
||||||
|
return read_feat_pan_id_field() != 0U;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline unsigned int is_feat_ssbs_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_pfr2() >> ID_PFR2_SSBS_SHIFT) &
|
||||||
|
ID_PFR2_SSBS_MASK) != SSBS_UNAVAILABLE;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* TWED, ECV, CSV2, RAS are only used by the AArch64 EL2 context switch
|
* TWED, ECV, CSV2, RAS are only used by the AArch64 EL2 context switch
|
||||||
* code. In fact, EL2 context switching is only needed for AArch64 (since
|
* code. In fact, EL2 context switching is only needed for AArch64 (since
|
||||||
|
@ -164,6 +175,10 @@ static inline bool is_feat_sxpoe_supported(void) { return false; }
|
||||||
static inline bool is_feat_s2pie_supported(void) { return false; }
|
static inline bool is_feat_s2pie_supported(void) { return false; }
|
||||||
static inline bool is_feat_s1pie_supported(void) { return false; }
|
static inline bool is_feat_s1pie_supported(void) { return false; }
|
||||||
static inline bool is_feat_sxpie_supported(void) { return false; }
|
static inline bool is_feat_sxpie_supported(void) { return false; }
|
||||||
|
static inline bool is_feat_uao_present(void) { return false; }
|
||||||
|
static inline bool is_feat_nmi_present(void) { return false; }
|
||||||
|
static inline bool is_feat_ebep_present(void) { return false; }
|
||||||
|
static inline bool is_feat_sebep_present(void) { return false; }
|
||||||
|
|
||||||
static inline unsigned int read_feat_pmuv3_id_field(void)
|
static inline unsigned int read_feat_pmuv3_id_field(void)
|
||||||
{
|
{
|
||||||
|
|
|
@ -224,6 +224,7 @@ DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
|
||||||
DEFINE_COPROCR_READ_FUNC(id_dfr1, ID_DFR1)
|
DEFINE_COPROCR_READ_FUNC(id_dfr1, ID_DFR1)
|
||||||
DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
|
DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
|
||||||
DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
|
DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
|
||||||
|
DEFINE_COPROCR_READ_FUNC(id_pfr2, ID_PFR2)
|
||||||
DEFINE_COPROCR_READ_FUNC(isr, ISR)
|
DEFINE_COPROCR_READ_FUNC(isr, ISR)
|
||||||
DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
|
DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
|
||||||
DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
|
DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
|
||||||
|
|
|
@ -74,6 +74,19 @@
|
||||||
*/
|
*/
|
||||||
#define INVALID_MPID U(0xFFFFFFFF)
|
#define INVALID_MPID U(0xFFFFFFFF)
|
||||||
|
|
||||||
|
/*******************************************************************************
|
||||||
|
* Definitions for Exception vector offsets
|
||||||
|
******************************************************************************/
|
||||||
|
#define CURRENT_EL_SP0 0x0
|
||||||
|
#define CURRENT_EL_SPX 0x200
|
||||||
|
#define LOWER_EL_AARCH64 0x400
|
||||||
|
#define LOWER_EL_AARCH32 0x600
|
||||||
|
|
||||||
|
#define SYNC_EXCEPTION 0x0
|
||||||
|
#define IRQ_EXCEPTION 0x80
|
||||||
|
#define FIQ_EXCEPTION 0x100
|
||||||
|
#define SERROR_EXCEPTION 0x180
|
||||||
|
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Definitions for CPU system register interface to GICv3
|
* Definitions for CPU system register interface to GICv3
|
||||||
******************************************************************************/
|
******************************************************************************/
|
||||||
|
@ -231,6 +244,11 @@
|
||||||
#define ID_AA64DFR0_PMUVER_PMUV3P7 U(7)
|
#define ID_AA64DFR0_PMUVER_PMUV3P7 U(7)
|
||||||
#define ID_AA64DFR0_PMUVER_IMP_DEF U(0xf)
|
#define ID_AA64DFR0_PMUVER_IMP_DEF U(0xf)
|
||||||
|
|
||||||
|
/* ID_AA64DFR0_EL1.SEBEP definitions */
|
||||||
|
#define ID_AA64DFR0_SEBEP_SHIFT U(24)
|
||||||
|
#define ID_AA64DFR0_SEBEP_MASK ULL(0xf)
|
||||||
|
#define SEBEP_IMPLEMENTED ULL(1)
|
||||||
|
|
||||||
/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
|
/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
|
||||||
#define ID_AA64DFR0_PMS_SHIFT U(32)
|
#define ID_AA64DFR0_PMS_SHIFT U(32)
|
||||||
#define ID_AA64DFR0_PMS_MASK ULL(0xf)
|
#define ID_AA64DFR0_PMS_MASK ULL(0xf)
|
||||||
|
@ -253,6 +271,11 @@
|
||||||
#define ID_AA64DFR0_BRBE_MASK ULL(0xf)
|
#define ID_AA64DFR0_BRBE_MASK ULL(0xf)
|
||||||
#define ID_AA64DFR0_BRBE_SUPPORTED ULL(1)
|
#define ID_AA64DFR0_BRBE_SUPPORTED ULL(1)
|
||||||
|
|
||||||
|
/* ID_AA64DFR1_EL1 definitions */
|
||||||
|
#define ID_AA64DFR1_EBEP_SHIFT U(48)
|
||||||
|
#define ID_AA64DFR1_EBEP_MASK ULL(0xf)
|
||||||
|
#define EBEP_IMPLEMENTED ULL(1)
|
||||||
|
|
||||||
/* ID_AA64ISAR0_EL1 definitions */
|
/* ID_AA64ISAR0_EL1 definitions */
|
||||||
#define ID_AA64ISAR0_RNDR_SHIFT U(60)
|
#define ID_AA64ISAR0_RNDR_SHIFT U(60)
|
||||||
#define ID_AA64ISAR0_RNDR_MASK ULL(0xf)
|
#define ID_AA64ISAR0_RNDR_MASK ULL(0xf)
|
||||||
|
@ -358,6 +381,9 @@
|
||||||
#define ID_AA64MMFR2_EL1_CCIDX_MASK ULL(0xf)
|
#define ID_AA64MMFR2_EL1_CCIDX_MASK ULL(0xf)
|
||||||
#define ID_AA64MMFR2_EL1_CCIDX_LENGTH U(4)
|
#define ID_AA64MMFR2_EL1_CCIDX_LENGTH U(4)
|
||||||
|
|
||||||
|
#define ID_AA64MMFR2_EL1_UAO_SHIFT U(4)
|
||||||
|
#define ID_AA64MMFR2_EL1_UAO_MASK ULL(0xf)
|
||||||
|
|
||||||
#define ID_AA64MMFR2_EL1_CNP_SHIFT U(0)
|
#define ID_AA64MMFR2_EL1_CNP_SHIFT U(0)
|
||||||
#define ID_AA64MMFR2_EL1_CNP_MASK ULL(0xf)
|
#define ID_AA64MMFR2_EL1_CNP_MASK ULL(0xf)
|
||||||
|
|
||||||
|
@ -386,25 +412,29 @@
|
||||||
#define ID_AA64MMFR3_EL1_TCRX_MASK ULL(0xf)
|
#define ID_AA64MMFR3_EL1_TCRX_MASK ULL(0xf)
|
||||||
|
|
||||||
/* ID_AA64PFR1_EL1 definitions */
|
/* ID_AA64PFR1_EL1 definitions */
|
||||||
#define ID_AA64PFR1_EL1_GCS_SHIFT U(44)
|
|
||||||
#define ID_AA64PFR1_EL1_GCS_MASK ULL(0xf)
|
|
||||||
|
|
||||||
#define ID_AA64PFR1_EL1_SSBS_SHIFT U(4)
|
|
||||||
#define ID_AA64PFR1_EL1_SSBS_MASK ULL(0xf)
|
|
||||||
|
|
||||||
#define SSBS_UNAVAILABLE ULL(0) /* No architectural SSBS support */
|
|
||||||
|
|
||||||
#define ID_AA64PFR1_EL1_BT_SHIFT U(0)
|
#define ID_AA64PFR1_EL1_BT_SHIFT U(0)
|
||||||
#define ID_AA64PFR1_EL1_BT_MASK ULL(0xf)
|
#define ID_AA64PFR1_EL1_BT_MASK ULL(0xf)
|
||||||
|
|
||||||
#define BTI_IMPLEMENTED ULL(1) /* The BTI mechanism is implemented */
|
#define BTI_IMPLEMENTED ULL(1) /* The BTI mechanism is implemented */
|
||||||
|
|
||||||
|
#define ID_AA64PFR1_EL1_SSBS_SHIFT U(4)
|
||||||
|
#define ID_AA64PFR1_EL1_SSBS_MASK ULL(0xf)
|
||||||
|
#define SSBS_UNAVAILABLE ULL(0) /* No architectural SSBS support */
|
||||||
|
|
||||||
#define ID_AA64PFR1_EL1_MTE_SHIFT U(8)
|
#define ID_AA64PFR1_EL1_MTE_SHIFT U(8)
|
||||||
#define ID_AA64PFR1_EL1_MTE_MASK ULL(0xf)
|
#define ID_AA64PFR1_EL1_MTE_MASK ULL(0xf)
|
||||||
|
|
||||||
#define ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT U(28)
|
#define ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT U(28)
|
||||||
#define ID_AA64PFR1_EL1_RNDR_TRAP_MASK U(0xf)
|
#define ID_AA64PFR1_EL1_RNDR_TRAP_MASK U(0xf)
|
||||||
|
|
||||||
|
#define ID_AA64PFR1_EL1_NMI_SHIFT U(36)
|
||||||
|
#define ID_AA64PFR1_EL1_NMI_MASK ULL(0xf)
|
||||||
|
#define NMI_IMPLEMENTED ULL(1)
|
||||||
|
|
||||||
|
#define ID_AA64PFR1_EL1_GCS_SHIFT U(44)
|
||||||
|
#define ID_AA64PFR1_EL1_GCS_MASK ULL(0xf)
|
||||||
|
#define GCS_IMPLEMENTED ULL(1)
|
||||||
|
|
||||||
#define ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED ULL(0x1)
|
#define ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED ULL(0x1)
|
||||||
#define ID_AA64PFR1_EL1_RNG_TRAP_NOT_SUPPORTED ULL(0x0)
|
#define ID_AA64PFR1_EL1_RNG_TRAP_NOT_SUPPORTED ULL(0x0)
|
||||||
|
|
||||||
|
@ -503,6 +533,7 @@
|
||||||
#define SCTLR_TCF0_SHIFT U(38)
|
#define SCTLR_TCF0_SHIFT U(38)
|
||||||
#define SCTLR_TCF0_MASK ULL(3)
|
#define SCTLR_TCF0_MASK ULL(3)
|
||||||
#define SCTLR_ENTP2_BIT (ULL(1) << 60)
|
#define SCTLR_ENTP2_BIT (ULL(1) << 60)
|
||||||
|
#define SCTLR_SPINTMASK_BIT (ULL(1) << 62)
|
||||||
|
|
||||||
/* Tag Check Faults in EL0 have no effect on the PE */
|
/* Tag Check Faults in EL0 have no effect on the PE */
|
||||||
#define SCTLR_TCF0_NO_EFFECT U(0)
|
#define SCTLR_TCF0_NO_EFFECT U(0)
|
||||||
|
@ -730,6 +761,10 @@
|
||||||
#define DAIF_IRQ_BIT (U(1) << 1)
|
#define DAIF_IRQ_BIT (U(1) << 1)
|
||||||
#define DAIF_ABT_BIT (U(1) << 2)
|
#define DAIF_ABT_BIT (U(1) << 2)
|
||||||
#define DAIF_DBG_BIT (U(1) << 3)
|
#define DAIF_DBG_BIT (U(1) << 3)
|
||||||
|
#define SPSR_V_BIT (U(1) << 28)
|
||||||
|
#define SPSR_C_BIT (U(1) << 29)
|
||||||
|
#define SPSR_Z_BIT (U(1) << 30)
|
||||||
|
#define SPSR_N_BIT (U(1) << 31)
|
||||||
#define SPSR_DAIF_SHIFT U(6)
|
#define SPSR_DAIF_SHIFT U(6)
|
||||||
#define SPSR_DAIF_MASK U(0xf)
|
#define SPSR_DAIF_MASK U(0xf)
|
||||||
|
|
||||||
|
@ -750,25 +785,32 @@
|
||||||
#define SPSR_M_MASK U(0x1)
|
#define SPSR_M_MASK U(0x1)
|
||||||
#define SPSR_M_AARCH64 U(0x0)
|
#define SPSR_M_AARCH64 U(0x0)
|
||||||
#define SPSR_M_AARCH32 U(0x1)
|
#define SPSR_M_AARCH32 U(0x1)
|
||||||
|
#define SPSR_M_EL1H U(0x5)
|
||||||
#define SPSR_M_EL2H U(0x9)
|
#define SPSR_M_EL2H U(0x9)
|
||||||
|
|
||||||
#define SPSR_EL_SHIFT U(2)
|
#define SPSR_EL_SHIFT U(2)
|
||||||
#define SPSR_EL_WIDTH U(2)
|
#define SPSR_EL_WIDTH U(2)
|
||||||
|
|
||||||
#define SPSR_SSBS_SHIFT_AARCH64 U(12)
|
#define SPSR_BTYPE_SHIFT_AARCH64 U(10)
|
||||||
|
#define SPSR_BTYPE_MASK_AARCH64 U(0x3)
|
||||||
|
#define SPSR_SSBS_SHIFT_AARCH64 U(12)
|
||||||
#define SPSR_SSBS_BIT_AARCH64 (ULL(1) << SPSR_SSBS_SHIFT_AARCH64)
|
#define SPSR_SSBS_BIT_AARCH64 (ULL(1) << SPSR_SSBS_SHIFT_AARCH64)
|
||||||
#define SPSR_SSBS_SHIFT_AARCH32 U(23)
|
#define SPSR_SSBS_SHIFT_AARCH32 U(23)
|
||||||
#define SPSR_SSBS_BIT_AARCH32 (ULL(1) << SPSR_SSBS_SHIFT_AARCH32)
|
#define SPSR_SSBS_BIT_AARCH32 (ULL(1) << SPSR_SSBS_SHIFT_AARCH32)
|
||||||
|
#define SPSR_ALLINT_BIT_AARCH64 BIT_64(13)
|
||||||
|
#define SPSR_IL_BIT BIT_64(20)
|
||||||
|
#define SPSR_SS_BIT BIT_64(21)
|
||||||
#define SPSR_PAN_BIT BIT_64(22)
|
#define SPSR_PAN_BIT BIT_64(22)
|
||||||
|
#define SPSR_UAO_BIT_AARCH64 BIT_64(23)
|
||||||
#define SPSR_DIT_BIT BIT(24)
|
#define SPSR_DIT_BIT BIT(24)
|
||||||
|
|
||||||
#define SPSR_TCO_BIT_AARCH64 BIT_64(25)
|
#define SPSR_TCO_BIT_AARCH64 BIT_64(25)
|
||||||
|
#define SPSR_PM_BIT_AARCH64 BIT_64(32)
|
||||||
|
#define SPSR_PPEND_BIT BIT(33)
|
||||||
|
#define SPSR_EXLOCK_BIT_AARCH64 BIT_64(34)
|
||||||
|
#define SPSR_NZCV (SPSR_V_BIT | SPSR_C_BIT | SPSR_Z_BIT | SPSR_N_BIT)
|
||||||
|
|
||||||
#define DISABLE_ALL_EXCEPTIONS \
|
#define DISABLE_ALL_EXCEPTIONS \
|
||||||
(DAIF_FIQ_BIT | DAIF_IRQ_BIT | DAIF_ABT_BIT | DAIF_DBG_BIT)
|
(DAIF_FIQ_BIT | DAIF_IRQ_BIT | DAIF_ABT_BIT | DAIF_DBG_BIT)
|
||||||
|
|
||||||
#define DISABLE_INTERRUPTS (DAIF_FIQ_BIT | DAIF_IRQ_BIT)
|
#define DISABLE_INTERRUPTS (DAIF_FIQ_BIT | DAIF_IRQ_BIT)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -946,6 +988,7 @@
|
||||||
#define ESR_EC_LENGTH U(6)
|
#define ESR_EC_LENGTH U(6)
|
||||||
#define ESR_ISS_SHIFT U(0)
|
#define ESR_ISS_SHIFT U(0)
|
||||||
#define ESR_ISS_LENGTH U(25)
|
#define ESR_ISS_LENGTH U(25)
|
||||||
|
#define ESR_IL_BIT (U(1) << 25)
|
||||||
#define EC_UNKNOWN U(0x0)
|
#define EC_UNKNOWN U(0x0)
|
||||||
#define EC_WFE_WFI U(0x1)
|
#define EC_WFE_WFI U(0x1)
|
||||||
#define EC_AARCH32_CP15_MRC_MCR U(0x3)
|
#define EC_AARCH32_CP15_MRC_MCR U(0x3)
|
||||||
|
@ -1408,6 +1451,9 @@
|
||||||
******************************************************************************/
|
******************************************************************************/
|
||||||
#define GCSCR_EL2 S3_4_C2_C5_0
|
#define GCSCR_EL2 S3_4_C2_C5_0
|
||||||
#define GCSPR_EL2 S3_4_C2_C5_1
|
#define GCSPR_EL2 S3_4_C2_C5_1
|
||||||
|
#define GCSCR_EL1 S3_0_C2_C5_0
|
||||||
|
|
||||||
|
#define GCSCR_EXLOCK_EN_BIT (UL(1) << 6)
|
||||||
|
|
||||||
/*******************************************************************************
|
/*******************************************************************************
|
||||||
* Definitions for DynamicIQ Shared Unit registers
|
* Definitions for DynamicIQ Shared Unit registers
|
||||||
|
|
|
@ -42,6 +42,11 @@ static inline bool is_armv7_gentimer_present(void)
|
||||||
|
|
||||||
CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
|
CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
|
||||||
ENABLE_FEAT_PAN)
|
ENABLE_FEAT_PAN)
|
||||||
|
static inline bool is_feat_pan_present(void)
|
||||||
|
{
|
||||||
|
return read_feat_pan_id_field() != 0U;
|
||||||
|
}
|
||||||
|
|
||||||
CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
|
CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
|
||||||
ENABLE_FEAT_VHE)
|
ENABLE_FEAT_VHE)
|
||||||
|
|
||||||
|
@ -51,6 +56,12 @@ static inline bool is_armv8_2_ttcnp_present(void)
|
||||||
ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
|
ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_uao_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_UAO_SHIFT) &
|
||||||
|
ID_AA64MMFR2_EL1_UAO_MASK) != 0U;
|
||||||
|
}
|
||||||
|
|
||||||
static inline bool is_feat_pacqarma3_present(void)
|
static inline bool is_feat_pacqarma3_present(void)
|
||||||
{
|
{
|
||||||
uint64_t mask_id_aa64isar2 =
|
uint64_t mask_id_aa64isar2 =
|
||||||
|
@ -89,6 +100,42 @@ static inline bool is_armv8_5_bti_present(void)
|
||||||
ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
|
ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline unsigned int get_armv8_5_mte_support(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) &
|
||||||
|
ID_AA64PFR1_EL1_MTE_MASK);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_ssbs_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
|
||||||
|
ID_AA64PFR1_EL1_SSBS_MASK) != SSBS_UNAVAILABLE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_nmi_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_NMI_SHIFT) &
|
||||||
|
ID_AA64PFR1_EL1_NMI_MASK) == NMI_IMPLEMENTED;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_gcs_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_GCS_SHIFT) &
|
||||||
|
ID_AA64PFR1_EL1_GCS_MASK) == GCS_IMPLEMENTED;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_ebep_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64dfr1_el1() >> ID_AA64DFR1_EBEP_SHIFT) &
|
||||||
|
ID_AA64DFR1_EBEP_MASK) == EBEP_IMPLEMENTED;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline bool is_feat_sebep_present(void)
|
||||||
|
{
|
||||||
|
return ((read_id_aa64dfr0_el1() >> ID_AA64DFR0_SEBEP_SHIFT) &
|
||||||
|
ID_AA64DFR0_SEBEP_MASK) == SEBEP_IMPLEMENTED;
|
||||||
|
}
|
||||||
|
|
||||||
CREATE_FEATURE_FUNCS(feat_mte, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
|
CREATE_FEATURE_FUNCS(feat_mte, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
|
||||||
ENABLE_FEAT_MTE)
|
ENABLE_FEAT_MTE)
|
||||||
CREATE_FEATURE_FUNCS_VER(feat_mte2, read_feat_mte_id_field, MTE_IMPLEMENTED_ELX,
|
CREATE_FEATURE_FUNCS_VER(feat_mte2, read_feat_mte_id_field, MTE_IMPLEMENTED_ELX,
|
||||||
|
|
|
@ -272,6 +272,7 @@ DEFINE_IDREG_READ_FUNC(id_aa64pfr0_el1)
|
||||||
DEFINE_IDREG_READ_FUNC(id_aa64pfr1_el1)
|
DEFINE_IDREG_READ_FUNC(id_aa64pfr1_el1)
|
||||||
DEFINE_RENAME_IDREG_READ_FUNC(id_aa64pfr2_el1, ID_AA64PFR2_EL1)
|
DEFINE_RENAME_IDREG_READ_FUNC(id_aa64pfr2_el1, ID_AA64PFR2_EL1)
|
||||||
DEFINE_IDREG_READ_FUNC(id_aa64dfr0_el1)
|
DEFINE_IDREG_READ_FUNC(id_aa64dfr0_el1)
|
||||||
|
DEFINE_IDREG_READ_FUNC(id_aa64dfr1_el1)
|
||||||
DEFINE_IDREG_READ_FUNC(id_afr0_el1)
|
DEFINE_IDREG_READ_FUNC(id_afr0_el1)
|
||||||
DEFINE_SYSREG_READ_FUNC(CurrentEl)
|
DEFINE_SYSREG_READ_FUNC(CurrentEl)
|
||||||
DEFINE_SYSREG_READ_FUNC(ctr_el0)
|
DEFINE_SYSREG_READ_FUNC(ctr_el0)
|
||||||
|
@ -646,6 +647,7 @@ DEFINE_RENAME_SYSREG_RW_FUNCS(por_el2, POR_EL2)
|
||||||
/* FEAT_GCS Registers */
|
/* FEAT_GCS Registers */
|
||||||
DEFINE_RENAME_SYSREG_RW_FUNCS(gcscr_el2, GCSCR_EL2)
|
DEFINE_RENAME_SYSREG_RW_FUNCS(gcscr_el2, GCSCR_EL2)
|
||||||
DEFINE_RENAME_SYSREG_RW_FUNCS(gcspr_el2, GCSPR_EL2)
|
DEFINE_RENAME_SYSREG_RW_FUNCS(gcspr_el2, GCSPR_EL2)
|
||||||
|
DEFINE_RENAME_SYSREG_RW_FUNCS(gcscr_el1, GCSCR_EL1)
|
||||||
|
|
||||||
/* DynamIQ Shared Unit power management */
|
/* DynamIQ Shared Unit power management */
|
||||||
DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpwrdn_el1, CLUSTERPWRDN_EL1)
|
DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpwrdn_el1, CLUSTERPWRDN_EL1)
|
||||||
|
|
|
@ -55,6 +55,9 @@ static inline bool is_sysreg_iss_write(uint64_t esr)
|
||||||
*/
|
*/
|
||||||
int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
||||||
|
|
||||||
|
/* Handler for injecting UNDEF exception to lower EL */
|
||||||
|
void inject_undef64(cpu_context_t *ctx);
|
||||||
|
|
||||||
/* Prototypes for system register emulation handlers provided by platforms. */
|
/* Prototypes for system register emulation handlers provided by platforms. */
|
||||||
int plat_handle_impdef_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
int plat_handle_impdef_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
||||||
int plat_handle_rng_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
int plat_handle_rng_trap(uint64_t esr_el3, cpu_context_t *ctx);
|
||||||
|
|
Loading…
Add table
Reference in a new issue