refactor(cpufeat): separate the EL2 and EL3 enablement code

Combining the EL2 and EL3 enablement code necessitates that it must be
called at el3_exit, which is the only place with enough context to make
the decision of what needs to be set.
Decouple them to allow them to be called from elsewhere.

Signed-off-by: Boyan Karatotev <boyan.karatotev@arm.com>
Change-Id: I147764c42771e7d4100699ec8fae98dac0a505c0
This commit is contained in:
Boyan Karatotev 2023-02-16 15:12:45 +00:00 committed by Jayanth Dodderi Chidanand
parent 8e31faa05b
commit 60d330dc4d
23 changed files with 248 additions and 147 deletions

View file

@ -8,9 +8,9 @@
#define BRBE_H #define BRBE_H
#if ENABLE_BRBE_FOR_NS #if ENABLE_BRBE_FOR_NS
void brbe_enable(void); void brbe_init_el3(void);
#else #else
static inline void brbe_enable(void) static inline void brbe_init_el3(void)
{ {
} }
#endif /* ENABLE_BRBE_FOR_NS */ #endif /* ENABLE_BRBE_FOR_NS */

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -10,11 +10,15 @@
#include <stdbool.h> #include <stdbool.h>
#if ENABLE_MPAM_FOR_LOWER_ELS #if ENABLE_MPAM_FOR_LOWER_ELS
void mpam_enable(bool el2_unused); void mpam_init_el3(void);
void mpam_init_el2_unused(void);
#else #else
static inline void mpam_enable(bool el2_unused) static inline void mpam_init_el3(void)
{ {
} }
#endif static inline void mpam_init_el2_unused(void)
{
}
#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
#endif /* MPAM_H */ #endif /* MPAM_H */

View file

@ -9,7 +9,7 @@
#include <context.h> #include <context.h>
void pmuv3_disable_el3(void); void pmuv3_init_el3(void);
#ifdef __aarch64__ #ifdef __aarch64__
void pmuv3_enable(cpu_context_t *ctx); void pmuv3_enable(cpu_context_t *ctx);

View file

@ -22,11 +22,19 @@
#if ENABLE_SME_FOR_NS #if ENABLE_SME_FOR_NS
void sme_enable(cpu_context_t *context); void sme_enable(cpu_context_t *context);
void sme_init_el3(void);
void sme_init_el2_unused(void);
void sme_disable(cpu_context_t *context); void sme_disable(cpu_context_t *context);
#else #else
static inline void sme_enable(cpu_context_t *context) static inline void sme_enable(cpu_context_t *context)
{ {
} }
static inline void sme_init_el3(void)
{
}
static inline void sme_init_el2_unused(void)
{
}
static inline void sme_disable(cpu_context_t *context) static inline void sme_disable(cpu_context_t *context)
{ {
} }

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -10,15 +10,19 @@
#include <stdbool.h> #include <stdbool.h>
#if ENABLE_SPE_FOR_NS #if ENABLE_SPE_FOR_NS
void spe_enable(bool el2_unused); void spe_init_el3(void);
void spe_init_el2_unused(void);
void spe_disable(void); void spe_disable(void);
#else #else
static inline void spe_enable(bool el2_unused) static inline void spe_init_el3(void)
{
}
static inline void spe_init_el2_unused(void)
{ {
} }
static inline void spe_disable(void) static inline void spe_disable(void)
{ {
} }
#endif #endif /* ENABLE_SPE_FOR_NS */
#endif /* SPE_H */ #endif /* SPE_H */

View file

@ -11,11 +11,15 @@
#if (ENABLE_SME_FOR_NS || ENABLE_SVE_FOR_NS) #if (ENABLE_SME_FOR_NS || ENABLE_SVE_FOR_NS)
void sve_enable(cpu_context_t *context); void sve_enable(cpu_context_t *context);
void sve_init_el2_unused(void);
void sve_disable(cpu_context_t *context); void sve_disable(cpu_context_t *context);
#else #else
static inline void sve_enable(cpu_context_t *context) static inline void sve_enable(cpu_context_t *context)
{ {
} }
static inline void sve_init_el2_unused(void)
{
}
static inline void sve_disable(cpu_context_t *context) static inline void sve_disable(cpu_context_t *context)
{ {
} }

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -10,10 +10,12 @@
#include <context.h> #include <context.h>
#if ENABLE_SYS_REG_TRACE_FOR_NS #if ENABLE_SYS_REG_TRACE_FOR_NS
#if __aarch64__ #if __aarch64__
void sys_reg_trace_enable(cpu_context_t *context); void sys_reg_trace_enable(cpu_context_t *context);
void sys_reg_trace_init_el2_unused(void);
#else #else
void sys_reg_trace_enable(void); void sys_reg_trace_init_el3(void);
#endif /* __aarch64__ */ #endif /* __aarch64__ */
#else /* !ENABLE_SYS_REG_TRACE_FOR_NS */ #else /* !ENABLE_SYS_REG_TRACE_FOR_NS */
@ -22,11 +24,18 @@ void sys_reg_trace_enable(void);
static inline void sys_reg_trace_enable(cpu_context_t *context) static inline void sys_reg_trace_enable(cpu_context_t *context)
{ {
} }
static inline void sys_reg_trace_disable(cpu_context_t *context)
{
}
static inline void sys_reg_trace_init_el2_unused(void)
{
}
#else #else
static inline void sys_reg_trace_enable(void) static inline void sys_reg_trace_init_el3(void)
{ {
} }
#endif /* __aarch64__ */ #endif /* __aarch64__ */
#endif /* ENABLE_SYS_REG_TRACE_FOR_NS */ #endif /* ENABLE_SYS_REG_TRACE_FOR_NS */
#endif /* SYS_REG_TRACE_H */ #endif /* SYS_REG_TRACE_H */

View file

@ -8,9 +8,13 @@
#define TRBE_H #define TRBE_H
#if ENABLE_TRBE_FOR_NS #if ENABLE_TRBE_FOR_NS
void trbe_enable(void); void trbe_init_el3(void);
void trbe_init_el2_unused(void);
#else #else
static inline void trbe_enable(void) static inline void trbe_init_el3(void)
{
}
static inline void trbe_init_el2_unused(void)
{ {
} }
#endif /* ENABLE_TRBE_FOR_NS */ #endif /* ENABLE_TRBE_FOR_NS */

View file

@ -8,9 +8,13 @@
#define TRF_H #define TRF_H
#if ENABLE_TRF_FOR_NS #if ENABLE_TRF_FOR_NS
void trf_enable(void); void trf_init_el3(void);
void trf_init_el2_unused(void);
#else #else
static inline void trf_enable(void) static inline void trf_init_el3(void)
{
}
static inline void trf_init_el2_unused(void)
{ {
} }
#endif /* ENABLE_TRF_FOR_NS */ #endif /* ENABLE_TRF_FOR_NS */

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2016-2023, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -142,19 +142,19 @@ static void enable_extensions_nonsecure(bool el2_unused)
} }
if (is_feat_sys_reg_trace_supported()) { if (is_feat_sys_reg_trace_supported()) {
sys_reg_trace_enable(); sys_reg_trace_init_el3();
} }
if (is_feat_trf_supported()) { if (is_feat_trf_supported()) {
trf_enable(); trf_init_el3();
} }
/* /*
* Also applies to PMU < v3. The PMU is only disabled for EL3 and Secure * Also applies to PMU < v3. The PMU is only disabled for EL3 and Secure
* state execution. This does not affect lower NS ELs. * state execution. This does not affect lower NS ELs.
*/ */
pmuv3_disable_el3(); pmuv3_init_el3();
#endif #endif /* IMAGE_BL32 */
} }
/******************************************************************************* /*******************************************************************************

View file

@ -505,43 +505,10 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
static void manage_extensions_nonsecure_mixed(bool el2_unused, cpu_context_t *ctx) static void manage_extensions_nonsecure_mixed(bool el2_unused, cpu_context_t *ctx)
{ {
#if IMAGE_BL31 #if IMAGE_BL31
if (is_feat_spe_supported()) {
spe_enable(el2_unused);
}
if (is_feat_amu_supported()) { if (is_feat_amu_supported()) {
amu_enable(el2_unused, ctx); amu_enable(el2_unused, ctx);
} }
#endif /* IMAGE_BL31 */
/* Enable SVE and FPU/SIMD */
if (is_feat_sve_supported()) {
sve_enable(ctx);
}
if (is_feat_sme_supported()) {
sme_enable(ctx);
}
if (is_feat_mpam_supported()) {
mpam_enable(el2_unused);
}
if (is_feat_trbe_supported()) {
trbe_enable();
}
if (is_feat_brbe_supported()) {
brbe_enable();
}
if (is_feat_sys_reg_trace_supported()) {
sys_reg_trace_enable(ctx);
}
if (is_feat_trf_supported()) {
trf_enable();
}
#endif
} }
/******************************************************************************* /*******************************************************************************
@ -552,7 +519,31 @@ static void manage_extensions_nonsecure_mixed(bool el2_unused, cpu_context_t *ct
#if IMAGE_BL31 #if IMAGE_BL31
void cm_manage_extensions_el3(void) void cm_manage_extensions_el3(void)
{ {
pmuv3_disable_el3(); if (is_feat_spe_supported()) {
spe_init_el3();
}
if (is_feat_sme_supported()) {
sme_init_el3();
}
if (is_feat_mpam_supported()) {
mpam_init_el3();
}
if (is_feat_trbe_supported()) {
trbe_init_el3();
}
if (is_feat_brbe_supported()) {
brbe_init_el3();
}
if (is_feat_trf_supported()) {
trf_init_el3();
}
pmuv3_init_el3();
} }
#endif /* IMAGE_BL31 */ #endif /* IMAGE_BL31 */
@ -562,6 +553,19 @@ void cm_manage_extensions_el3(void)
static void manage_extensions_nonsecure(cpu_context_t *ctx) static void manage_extensions_nonsecure(cpu_context_t *ctx)
{ {
#if IMAGE_BL31 #if IMAGE_BL31
/* Enable SVE and FPU/SIMD */
if (is_feat_sve_supported()) {
sve_enable(ctx);
}
if (is_feat_sme_supported()) {
sme_enable(ctx);
}
if (is_feat_sys_reg_trace_supported()) {
sys_reg_trace_enable(ctx);
}
pmuv3_enable(ctx); pmuv3_enable(ctx);
#endif /* IMAGE_BL31 */ #endif /* IMAGE_BL31 */
} }
@ -573,7 +577,35 @@ static void manage_extensions_nonsecure(cpu_context_t *ctx)
static void manage_extensions_nonsecure_el2_unused(void) static void manage_extensions_nonsecure_el2_unused(void)
{ {
#if IMAGE_BL31 #if IMAGE_BL31
if (is_feat_spe_supported()) {
spe_init_el2_unused();
}
if (is_feat_mpam_supported()) {
mpam_init_el2_unused();
}
if (is_feat_trbe_supported()) {
trbe_init_el2_unused();
}
if (is_feat_sys_reg_trace_supported()) {
sys_reg_trace_init_el2_unused();
}
if (is_feat_trf_supported()) {
trf_init_el2_unused();
}
pmuv3_init_el2_unused(); pmuv3_init_el2_unused();
if (is_feat_sve_supported()) {
sve_init_el2_unused();
}
if (is_feat_sme_supported()) {
sme_init_el2_unused();
}
#endif /* IMAGE_BL31 */ #endif /* IMAGE_BL31 */
} }
@ -606,6 +638,7 @@ static void manage_extensions_secure(cpu_context_t *ctx)
* Enable SME, SVE, FPU/SIMD in secure context, secure manager * Enable SME, SVE, FPU/SIMD in secure context, secure manager
* must ensure SME, SVE, and FPU/SIMD context properly managed. * must ensure SME, SVE, and FPU/SIMD context properly managed.
*/ */
sme_init_el3();
sme_enable(ctx); sme_enable(ctx);
} else { } else {
/* /*
@ -719,24 +752,8 @@ void cm_prepare_el3_exit(uint32_t security_state)
* Initialise CPTR_EL2 setting all fields rather than * Initialise CPTR_EL2 setting all fields rather than
* relying on the hw. All fields have architecturally * relying on the hw. All fields have architecturally
* UNKNOWN reset values. * UNKNOWN reset values.
*
* CPTR_EL2.TCPAC: Set to zero so that Non-secure EL1
* accesses to the CPACR_EL1 or CPACR from both
* Execution states do not trap to EL2.
*
* CPTR_EL2.TTA: Set to zero so that Non-secure System
* register accesses to the trace registers from both
* Execution states do not trap to EL2.
* If PE trace unit System registers are not implemented
* then this bit is reserved, and must be set to zero.
*
* CPTR_EL2.TFP: Set to zero so that Non-secure accesses
* to SIMD and floating-point functionality from both
* Execution states do not trap to EL2.
*/ */
write_cptr_el2(CPTR_EL2_RESET_VAL & write_cptr_el2(CPTR_EL2_RESET_VAL);
~(CPTR_EL2_TCPAC_BIT | CPTR_EL2_TTA_BIT
| CPTR_EL2_TFP_BIT));
/* /*
* Initialise CNTHCTL_EL2. All fields are * Initialise CNTHCTL_EL2. All fields are
@ -787,16 +804,6 @@ void cm_prepare_el3_exit(uint32_t security_state)
* relying on hw. Some fields are architecturally * relying on hw. Some fields are architecturally
* UNKNOWN on reset. * UNKNOWN on reset.
* *
* MDCR_EL2.TTRF: Set to zero so that access to Trace
* Filter Control register TRFCR_EL1 at EL1 is not
* trapped to EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.4.
*
* MDCR_EL2.TPMS: Set to zero so that accesses to
* Statistical Profiling control registers from EL1
* do not trap to EL2. This bit is RES0 when SPE is
* not implemented.
*
* MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and * MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and
* EL1 System register accesses to the Debug ROM * EL1 System register accesses to the Debug ROM
* registers are not trapped to EL2. * registers are not trapped to EL2.
@ -810,16 +817,10 @@ void cm_prepare_el3_exit(uint32_t security_state)
* *
* MDCR_EL2.TDE: Set to zero so that debug exceptions * MDCR_EL2.TDE: Set to zero so that debug exceptions
* are not routed to EL2. * are not routed to EL2.
*
* MDCR_EL2.E2TB: Set to zero so that the trace Buffer
* owning exception level is NS-EL1 and, tracing is
* prohibited at NS-EL2. These bits are RES0 when
* FEAT_TRBE is not implemented.
*/ */
mdcr_el2 = ((MDCR_EL2_RESET_VAL) & ~(MDCR_EL2_TTRF | mdcr_el2 = ((MDCR_EL2_RESET_VAL) &
MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT | ~(MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT |
MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT | MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT));
MDCR_EL2_E2TB(MDCR_EL2_E2TB_EL1)));
write_mdcr_el2(mdcr_el2); write_mdcr_el2(mdcr_el2);

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2022, Arm Limited. All rights reserved. * Copyright (c) 2022-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -7,8 +7,9 @@
#include <arch.h> #include <arch.h>
#include <arch_features.h> #include <arch_features.h>
#include <arch_helpers.h> #include <arch_helpers.h>
#include <lib/extensions/brbe.h>
void brbe_enable(void) void brbe_init_el3(void)
{ {
uint64_t val; uint64_t val;

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -11,7 +11,7 @@
#include <arch_helpers.h> #include <arch_helpers.h>
#include <lib/extensions/mpam.h> #include <lib/extensions/mpam.h>
void mpam_enable(bool el2_unused) void mpam_init_el3(void)
{ {
/* /*
* Enable MPAM, and disable trapping to EL3 when lower ELs access their * Enable MPAM, and disable trapping to EL3 when lower ELs access their
@ -19,15 +19,18 @@ void mpam_enable(bool el2_unused)
*/ */
write_mpam3_el3(MPAM3_EL3_MPAMEN_BIT); write_mpam3_el3(MPAM3_EL3_MPAMEN_BIT);
/* }
* If EL2 is implemented but unused, disable trapping to EL2 when lower
* ELs access their own MPAM registers. /*
*/ * If EL2 is implemented but unused, disable trapping to EL2 when lower ELs
if (el2_unused) { * access their own MPAM registers.
write_mpam2_el2(0ULL); */
void mpam_init_el2_unused(void)
if ((read_mpamidr_el1() & MPAMIDR_HAS_HCR_BIT) != 0U) { {
write_mpamhcr_el2(0ULL); write_mpam2_el2(0ULL);
}
} if ((read_mpamidr_el1() & MPAMIDR_HAS_HCR_BIT) != 0U) {
write_mpamhcr_el2(0ULL);
}
} }

View file

@ -29,7 +29,7 @@ static u_register_t mtpmu_disable_el3(u_register_t sdcr)
* Applies to all PMU versions. Name is PMUv3 for compatibility with aarch64 and * Applies to all PMU versions. Name is PMUv3 for compatibility with aarch64 and
* to not clash with platforms which reuse the PMU name * to not clash with platforms which reuse the PMU name
*/ */
void pmuv3_disable_el3(void) void pmuv3_init_el3(void)
{ {
u_register_t sdcr = read_sdcr(); u_register_t sdcr = read_sdcr();

View file

@ -48,7 +48,7 @@ static u_register_t mtpmu_disable_el3(u_register_t mdcr_el3)
return mdcr_el3; return mdcr_el3;
} }
void pmuv3_disable_el3(void) void pmuv3_init_el3(void)
{ {
u_register_t mdcr_el3 = read_mdcr_el3(); u_register_t mdcr_el3 = read_mdcr_el3();

View file

@ -17,7 +17,6 @@
void sme_enable(cpu_context_t *context) void sme_enable(cpu_context_t *context)
{ {
u_register_t reg; u_register_t reg;
u_register_t cptr_el3;
el3_state_t *state; el3_state_t *state;
/* Get the context state. */ /* Get the context state. */
@ -32,9 +31,14 @@ void sme_enable(cpu_context_t *context)
reg = read_ctx_reg(state, CTX_SCR_EL3); reg = read_ctx_reg(state, CTX_SCR_EL3);
reg |= SCR_ENTP2_BIT; reg |= SCR_ENTP2_BIT;
write_ctx_reg(state, CTX_SCR_EL3, reg); write_ctx_reg(state, CTX_SCR_EL3, reg);
}
/* Set CPTR_EL3.ESM bit so we can write SMCR_EL3 without trapping. */ void sme_init_el3(void)
cptr_el3 = read_cptr_el3(); {
u_register_t cptr_el3 = read_cptr_el3();
u_register_t smcr_el3;
/* Set CPTR_EL3.ESM bit so we can access SMCR_EL3 without trapping. */
write_cptr_el3(cptr_el3 | ESM_BIT); write_cptr_el3(cptr_el3 | ESM_BIT);
isb(); isb();
@ -43,11 +47,10 @@ void sme_enable(cpu_context_t *context)
* to be the least restrictive, then lower ELs can restrict as needed * to be the least restrictive, then lower ELs can restrict as needed
* using SMCR_EL2 and SMCR_EL1. * using SMCR_EL2 and SMCR_EL1.
*/ */
reg = SMCR_ELX_LEN_MAX; smcr_el3 = SMCR_ELX_LEN_MAX;
if (read_feat_sme_fa64_id_field() != 0U) { if (read_feat_sme_fa64_id_field() != 0U) {
VERBOSE("[SME] FA64 enabled\n"); VERBOSE("[SME] FA64 enabled\n");
reg |= SMCR_ELX_FA64_BIT; smcr_el3 |= SMCR_ELX_FA64_BIT;
} }
/* /*
@ -58,15 +61,24 @@ void sme_enable(cpu_context_t *context)
*/ */
if (is_feat_sme2_supported()) { if (is_feat_sme2_supported()) {
VERBOSE("SME2 enabled\n"); VERBOSE("SME2 enabled\n");
reg |= SMCR_ELX_EZT0_BIT; smcr_el3 |= SMCR_ELX_EZT0_BIT;
} }
write_smcr_el3(reg); write_smcr_el3(smcr_el3);
/* Reset CPTR_EL3 value. */ /* Reset CPTR_EL3 value. */
write_cptr_el3(cptr_el3); write_cptr_el3(cptr_el3);
isb(); isb();
} }
void sme_init_el2_unused(void)
{
/*
* CPTR_EL2.TCPAC: Set to zero so that Non-secure EL1 accesses to the
* CPACR_EL1 or CPACR from both Execution states do not trap to EL2.
*/
write_cptr_el2(read_cptr_el2() & ~CPTR_EL2_TCPAC_BIT);
}
void sme_disable(cpu_context_t *context) void sme_disable(cpu_context_t *context)
{ {
u_register_t reg; u_register_t reg;

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved. * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -21,25 +21,10 @@ static inline void psb_csync(void)
__asm__ volatile("hint #17"); __asm__ volatile("hint #17");
} }
void spe_enable(bool el2_unused) void spe_init_el3(void)
{ {
uint64_t v; uint64_t v;
if (el2_unused) {
/*
* MDCR_EL2.TPMS (ARM v8.2): Do not trap statistical
* profiling controls to EL2.
*
* MDCR_EL2.E2PB (ARM v8.2): SPE enabled in Non-secure
* state. Accesses to profiling buffer controls at
* Non-secure EL1 are not trapped to EL2.
*/
v = read_mdcr_el2();
v &= ~MDCR_EL2_TPMS;
v |= MDCR_EL2_E2PB(MDCR_EL2_E2PB_EL1);
write_mdcr_el2(v);
}
/* /*
* MDCR_EL2.NSPB (ARM v8.2): SPE enabled in Non-secure state * MDCR_EL2.NSPB (ARM v8.2): SPE enabled in Non-secure state
* and disabled in secure state. Accesses to SPE registers at * and disabled in secure state. Accesses to SPE registers at
@ -55,6 +40,24 @@ void spe_enable(bool el2_unused)
write_mdcr_el3(v); write_mdcr_el3(v);
} }
void spe_init_el2_unused(void)
{
uint64_t v;
/*
* MDCR_EL2.TPMS (ARM v8.2): Do not trap statistical
* profiling controls to EL2.
*
* MDCR_EL2.E2PB (ARM v8.2): SPE enabled in Non-secure
* state. Accesses to profiling buffer controls at
* Non-secure EL1 are not trapped to EL2.
*/
v = read_mdcr_el2();
v &= ~MDCR_EL2_TPMS;
v |= MDCR_EL2_E2PB(MDCR_EL2_E2PB_EL1);
write_mdcr_el2(v);
}
void spe_disable(void) void spe_disable(void)
{ {
uint64_t v; uint64_t v;

View file

@ -37,6 +37,16 @@ void sve_enable(cpu_context_t *context)
(ZCR_EL3_LEN_MASK & CONVERT_SVE_LENGTH(SVE_VECTOR_LEN))); (ZCR_EL3_LEN_MASK & CONVERT_SVE_LENGTH(SVE_VECTOR_LEN)));
} }
void sve_init_el2_unused(void)
{
/*
* CPTR_EL2.TFP: Set to zero so that Non-secure accesses to Advanced
* SIMD and floating-point functionality from both Execution states do
* not trap to EL2.
*/
write_cptr_el2(read_cptr_el2() & ~CPTR_EL2_TFP_BIT);
}
void sve_disable(cpu_context_t *context) void sve_disable(cpu_context_t *context)
{ {
u_register_t reg; u_register_t reg;

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -10,7 +10,7 @@
#include <arch_helpers.h> #include <arch_helpers.h>
#include <lib/extensions/sys_reg_trace.h> #include <lib/extensions/sys_reg_trace.h>
void sys_reg_trace_enable(void) void sys_reg_trace_init_el3(void)
{ {
uint32_t val; uint32_t val;

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -24,3 +24,14 @@ void sys_reg_trace_enable(cpu_context_t *ctx)
val &= ~TTA_BIT; val &= ~TTA_BIT;
write_ctx_reg(get_el3state_ctx(ctx), CTX_CPTR_EL3, val); write_ctx_reg(get_el3state_ctx(ctx), CTX_CPTR_EL3, val);
} }
void sys_reg_trace_init_el2_unused(void)
{
/*
* CPTR_EL2.TTA: Set to zero so that Non-secure System register accesses
* to the trace registers from both Execution states do not trap to
* EL2. If PE trace unit System registers are not implemented then this
* bit is reserved, and must be set to zero.
*/
write_cptr_el2(read_cptr_el2() & ~CPTR_EL2_TTA_BIT);
}

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021-2022, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -19,9 +19,9 @@ static void tsb_csync(void)
__asm__ volatile("hint #18"); __asm__ volatile("hint #18");
} }
void trbe_enable(void) void trbe_init_el3(void)
{ {
uint64_t val; u_register_t val;
/* /*
* MDCR_EL3.NSTB = 0b11 * MDCR_EL3.NSTB = 0b11
@ -34,6 +34,17 @@ void trbe_enable(void)
write_mdcr_el3(val); write_mdcr_el3(val);
} }
void trbe_init_el2_unused(void)
{
/*
* MDCR_EL2.E2TB: Set to zero so that the trace Buffer
* owning exception level is NS-EL1 and, tracing is
* prohibited at NS-EL2. These bits are RES0 when
* FEAT_TRBE is not implemented.
*/
write_mdcr_el2(read_mdcr_el2() & ~MDCR_EL2_E2TB(MDCR_EL2_E2TB_EL1));
}
static void *trbe_drain_trace_buffers_hook(const void *arg __unused) static void *trbe_drain_trace_buffers_hook(const void *arg __unused)
{ {
if (is_feat_trbe_supported()) { if (is_feat_trbe_supported()) {

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -10,7 +10,7 @@
#include <arch_helpers.h> #include <arch_helpers.h>
#include <lib/extensions/trf.h> #include <lib/extensions/trf.h>
void trf_enable(void) void trf_init_el3(void)
{ {
uint32_t val; uint32_t val;

View file

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2021, Arm Limited. All rights reserved. * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
@ -9,9 +9,9 @@
#include <arch_helpers.h> #include <arch_helpers.h>
#include <lib/extensions/trf.h> #include <lib/extensions/trf.h>
void trf_enable(void) void trf_init_el3(void)
{ {
uint64_t val; u_register_t val;
/* /*
* MDCR_EL3.TTRF = b0 * MDCR_EL3.TTRF = b0
@ -22,3 +22,15 @@ void trf_enable(void)
val &= ~MDCR_TTRF_BIT; val &= ~MDCR_TTRF_BIT;
write_mdcr_el3(val); write_mdcr_el3(val);
} }
void trf_init_el2_unused(void)
{
/*
* MDCR_EL2.TTRF: Set to zero so that access to Trace
* Filter Control register TRFCR_EL1 at EL1 is not
* trapped to EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.4.
*
*/
write_mdcr_el2(read_mdcr_el2() & ~MDCR_EL2_TTRF);
}