diff --git a/Makefile b/Makefile index 6f080b441..45b0b21a7 100644 --- a/Makefile +++ b/Makefile @@ -959,6 +959,10 @@ ifeq (${ARCH},aarch32) ifneq (${ENABLE_FEAT_FPMR},0) $(error "ENABLE_FEAT_FPMR cannot be used with ARCH=aarch32") endif + + ifeq (${ARCH_FEATURE_AVAILABILITY},1) + $(error "ARCH_FEATURE_AVAILABILITY cannot be used with ARCH=aarch32") + endif endif #(ARCH=aarch32) ifneq (${ENABLE_FEAT_FPMR},0) @@ -1207,6 +1211,7 @@ $(eval $(call assert_booleans,\ PROGRAMMABLE_RESET_ADDRESS \ PSCI_EXTENDED_STATE_ID \ PSCI_OS_INIT_MODE \ + ARCH_FEATURE_AVAILABILITY \ RESET_TO_BL31 \ SAVE_KEYS \ SEPARATE_CODE_AND_RODATA \ @@ -1388,6 +1393,7 @@ $(eval $(call add_defines,\ PROGRAMMABLE_RESET_ADDRESS \ PSCI_EXTENDED_STATE_ID \ PSCI_OS_INIT_MODE \ + ARCH_FEATURE_AVAILABILITY \ RESET_TO_BL31 \ RME_GPT_BITLOCK_BLOCK \ RME_GPT_MAX_BLOCK \ diff --git a/docs/getting_started/build-options.rst b/docs/getting_started/build-options.rst index d2d2eb537..a8184e643 100644 --- a/docs/getting_started/build-options.rst +++ b/docs/getting_started/build-options.rst @@ -858,6 +858,11 @@ Common build options - ``PSCI_OS_INIT_MODE``: Boolean flag to enable support for optional PSCI OS-initiated mode. This option defaults to 0. +- ``ARCH_FEATURE_AVAILABILITY``: Boolean flag to enable support for the + optional SMCCC_ARCH_FEATURE_AVAILABILITY call. This option implicitly + interacts with IMPDEF_SYSREG_TRAP and software emulation. This option + defaults to 0. + - ``ENABLE_FEAT_RAS``: Boolean flag to enable Armv8.2 RAS features. RAS features are an optional extension for pre-Armv8.2 CPUs, but are mandatory for Armv8.2 or later CPUs. This flag can take the values 0 or 1. The default value is 0. diff --git a/docs/porting-guide.rst b/docs/porting-guide.rst index 5cb20fd9e..0f37368ac 100644 --- a/docs/porting-guide.rst +++ b/docs/porting-guide.rst @@ -3553,7 +3553,10 @@ Function : plat_handle_impdef_trap This function is invoked by BL31's exception handler when there is a synchronous system register trap caused by access to the implementation defined registers. It allows platforms enabling ``IMPDEF_SYSREG_TRAP`` to emulate those system -registers choosing to program bits of their choice. +registers choosing to program bits of their choice. If using in combination with +``ARCH_FEATURE_AVAILABILITY``, the macros +{SCR,MDCR,CPTR}_PLAT_{BITS,IGNORED,FLIPPED} should be defined to report correct +results. The first parameter (``uint64_t esr_el3``) contains the content of the ESR_EL3 syndrome register, which encodes the instruction that was trapped. diff --git a/include/services/arm_arch_svc.h b/include/services/arm_arch_svc.h index 645b388fe..c2b1f41a1 100644 --- a/include/services/arm_arch_svc.h +++ b/include/services/arm_arch_svc.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -13,8 +13,310 @@ #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) #define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF) #define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF) +#define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003) #define SMCCC_GET_SOC_VERSION U(0) #define SMCCC_GET_SOC_REVISION U(1) +#ifndef __ASSEMBLER__ +#if ARCH_FEATURE_AVAILABILITY +#include + +#if ENABLE_FEAT_FGT2 +#define SCR_FEAT_FGT2 SCR_FGTEN2_BIT +#else +#define SCR_FEAT_FGT2 (0) +#endif + +#if ENABLE_FEAT_FPMR +#define SCR_FEAT_FPMR SCR_EnFPM_BIT +#else +#define SCR_FEAT_FPMR +#endif + +#if ENABLE_FEAT_D128 +#define SCR_FEAT_D128 SCR_D128En_BIT +#else +#define SCR_FEAT_D128 (0) +#endif + +#if ENABLE_FEAT_S1PIE +#define SCR_FEAT_S1PIE SCR_PIEN_BIT +#else +#define SCR_FEAT_S1PIE (0) +#endif + +#if ENABLE_FEAT_SCTLR2 +#define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT +#else +#define SCR_FEAT_SCTLR2 (0) +#endif + +#if ENABLE_FEAT_TCR2 +#define SCR_FEAT_TCR2 SCR_TCR2EN_BIT +#else +#define SCR_FEAT_TCR2 (0) +#endif + +#if ENABLE_FEAT_THE +#define SCR_FEAT_THE SCR_RCWMASKEn_BIT +#else +#define SCR_FEAT_THE (0) +#endif + +#if ENABLE_SME_FOR_NS +#define SCR_FEAT_SME SCR_ENTP2_BIT +#else +#define SCR_FEAT_SME (0) +#endif + +#if ENABLE_FEAT_GCS +#define SCR_FEAT_GCS SCR_GCSEn_BIT +#else +#define SCR_FEAT_GCS (0) +#endif + +#if ENABLE_FEAT_HCX +#define SCR_FEAT_HCX SCR_HXEn_BIT +#else +#define SCR_FEAT_HCX (0) +#endif + +#if ENABLE_FEAT_LS64_ACCDATA +#define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT) +#else +#define SCR_FEAT_LS64_ACCDATA (0) +#endif + +#if ENABLE_FEAT_AMUv1p1 +#define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT +#else +#define SCR_FEAT_AMUv1p1 (0) +#endif + +#if ENABLE_FEAT_ECV +#define SCR_FEAT_ECV SCR_ECVEN_BIT +#else +#define SCR_FEAT_ECV (0) +#endif + +#if ENABLE_FEAT_FGT +#define SCR_FEAT_FGT SCR_FGTEN_BIT +#else +#define SCR_FEAT_FGT (0) +#endif + +#if ENABLE_FEAT_MTE2 +#define SCR_FEAT_MTE2 SCR_ATA_BIT +#else +#define SCR_FEAT_MTE2 (0) +#endif + +#if ENABLE_FEAT_CSV2_2 +#define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT +#else +#define SCR_FEAT_CSV2_2 (0) +#endif + +#if ENABLE_FEAT_RAS +#define SCR_FEAT_RAS SCR_TERR_BIT +#else +#define SCR_FEAT_RAS (0) +#endif + +#ifndef SCR_PLAT_FEATS +#define SCR_PLAT_FEATS (0) +#endif +#ifndef SCR_PLAT_FLIPPED +#define SCR_PLAT_FLIPPED (0) +#endif +#ifndef SCR_PLAT_IGNORED +#define SCR_PLAT_IGNORED (0) +#endif + +#ifndef CPTR_PLAT_FEATS +#define CPTR_PLAT_FEATS (0) +#endif +#ifndef CPTR_PLAT_FLIPPED +#define CPTR_PLAT_FLIPPED (0) +#endif + +#ifndef MDCR_PLAT_FEATS +#define MDCR_PLAT_FEATS (0) +#endif +#ifndef MDCR_PLAT_FLIPPED +#define MDCR_PLAT_FLIPPED (0) +#endif +#ifndef MDCR_PLAT_IGNORED +#define MDCR_PLAT_IGNORED (0) +#endif +/* + * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's + * a constant list based on what features are expected. This relies on the fact + * that if the feature is in any way disabled, then the relevant bit will not be + * written by context management. + * + * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The + * spec always uses active 1 to mean that the feature will not trap. + * + * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature + * enablement and should not be reported to lower ELs + */ +#define SCR_EL3_FEATS ( \ + SCR_FEAT_FGT2 | \ + SCR_FEAT_FPMR | \ + SCR_FEAT_D128 | \ + SCR_FEAT_S1PIE | \ + SCR_FEAT_SCTLR2 | \ + SCR_FEAT_TCR2 | \ + SCR_FEAT_THE | \ + SCR_FEAT_SME | \ + SCR_FEAT_GCS | \ + SCR_FEAT_HCX | \ + SCR_FEAT_LS64_ACCDATA | \ + SCR_FEAT_AMUv1p1 | \ + SCR_FEAT_ECV | \ + SCR_FEAT_FGT | \ + SCR_FEAT_MTE2 | \ + SCR_FEAT_CSV2_2 | \ + SCR_APK_BIT | /* FEAT_Pauth */ \ + SCR_FEAT_RAS | \ + SCR_PLAT_FEATS) +#define SCR_EL3_FLIPPED ( \ + SCR_FEAT_RAS | \ + SCR_PLAT_FLIPPED) +#define SCR_EL3_IGNORED ( \ + SCR_API_BIT | \ + SCR_RW_BIT | \ + SCR_SIF_BIT | \ + SCR_HCE_BIT | \ + SCR_FIQ_BIT | \ + SCR_IRQ_BIT | \ + SCR_NS_BIT | \ + SCR_RES1_BITS | \ + SCR_PLAT_IGNORED) +CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored); +CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat); + +#if ENABLE_SYS_REG_TRACE_FOR_NS +#define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT) +#else +#define CPTR_SYS_REG_TRACE (0) +#endif + +#if ENABLE_FEAT_AMU +#define CPTR_FEAT_AMU TAM_BIT +#else +#define CPTR_FEAT_AMU (0) +#endif + +#if ENABLE_SME_FOR_NS +#define CPTR_FEAT_SME ESM_BIT +#else +#define CPTR_FEAT_SME (0) +#endif + +#if ENABLE_SVE_FOR_NS +#define CPTR_FEAT_SVE CPTR_EZ_BIT +#else +#define CPTR_FEAT_SVE (0) +#endif + +#define CPTR_EL3_FEATS ( \ + CPTR_SYS_REG_TRACE | \ + CPTR_FEAT_AMU | \ + CPTR_FEAT_SME | \ + TFP_BIT | \ + CPTR_FEAT_SVE | \ + CPTR_PLAT_FEATS) +#define CPTR_EL3_FLIPPED ( \ + CPTR_SYS_REG_TRACE | \ + CPTR_FEAT_AMU | \ + TFP_BIT | \ + CPTR_PLAT_FLIPPED) +CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat); + +/* + * Some features enables are expressed with more than 1 bit in order to cater + * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit + * is used and reported. This (ab)uses the convenient fact that the last bit + * always means "enabled for this world" when context switched correctly. + * The per-world values have been adjusted such that this is always true. + */ +#if ENABLE_BRBE_FOR_NS +#define MDCR_FEAT_BRBE MDCR_SBRBE(1UL) +#else +#define MDCR_FEAT_BRBE (0) +#endif + +#if ENABLE_FEAT_FGT +#define MDCR_FEAT_FGT MDCR_TDCC_BIT +#else +#define MDCR_FEAT_FGT (0) +#endif + +#if ENABLE_TRBE_FOR_NS +#define MDCR_FEAT_TRBE MDCR_NSTB(1UL) +#else +#define MDCR_FEAT_TRBE (0) +#endif + +#if ENABLE_TRF_FOR_NS +#define MDCR_FEAT_TRF MDCR_TTRF_BIT +#else +#define MDCR_FEAT_TRF (0) +#endif + +#if ENABLE_SPE_FOR_NS +#define MDCR_FEAT_SPE MDCR_NSPB(1UL) +#else +#define MDCR_FEAT_SPE (0) +#endif + +#define MDCR_EL3_FEATS ( \ + MDCR_FEAT_BRBE | \ + MDCR_FEAT_FGT | \ + MDCR_FEAT_TRBE | \ + MDCR_FEAT_TRF | \ + MDCR_FEAT_SPE | \ + MDCR_TDOSA_BIT | \ + MDCR_TDA_BIT | \ + MDCR_TPM_BIT | /* FEAT_PMUv3 */ \ + MDCR_PLAT_FEATS) +#define MDCR_EL3_FLIPPED ( \ + MDCR_FEAT_FGT | \ + MDCR_FEAT_TRF | \ + MDCR_TDOSA_BIT | \ + MDCR_TDA_BIT | \ + MDCR_TPM_BIT | \ + MDCR_PLAT_FLIPPED) +#define MDCR_EL3_IGNORED ( \ + MDCR_EBWE_BIT | \ + MDCR_EnPMSN_BIT | \ + MDCR_SBRBE(2UL) | \ + MDCR_MTPME_BIT | \ + MDCR_NSTBE_BIT | \ + MDCR_NSTB(2UL) | \ + MDCR_SDD_BIT | \ + MDCR_SPD32(3UL) | \ + MDCR_NSPB(2UL) | \ + MDCR_NSPBE_BIT | \ + MDCR_PLAT_IGNORED) +CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored); +CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat); + +#define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT) +#define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT) +#define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT) +CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored); +CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat); + +/* The hex representations of these registers' S3 encoding */ +#define SCR_EL3_OPCODE U(0x1E1100) +#define CPTR_EL3_OPCODE U(0x1E1140) +#define MDCR_EL3_OPCODE U(0x1E1320) +#define MPAM3_EL3_OPCODE U(0x1EA500) + +#endif /* ARCH_FEATURE_AVAILABILITY */ +#endif /* __ASSEMBLER__ */ #endif /* ARM_ARCH_SVC_H */ diff --git a/make_helpers/defaults.mk b/make_helpers/defaults.mk index 8a0975bcb..4985c0c5a 100644 --- a/make_helpers/defaults.mk +++ b/make_helpers/defaults.mk @@ -210,6 +210,9 @@ PSCI_EXTENDED_STATE_ID := 0 # Enable PSCI OS-initiated mode support PSCI_OS_INIT_MODE := 0 +# SMCCC_ARCH_FEATURE_AVAILABILITY support +ARCH_FEATURE_AVAILABILITY := 0 + # By default, BL1 acts as the reset handler, not BL31 RESET_TO_BL31 := 0 diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c index 545616469..6acd1b60f 100644 --- a/services/arm_arch_svc/arm_arch_svc_setup.c +++ b/services/arm_arch_svc/arm_arch_svc_setup.c @@ -14,6 +14,9 @@ #include #include #include +#include +#include +#include static int32_t smccc_version(void) { @@ -90,6 +93,12 @@ static int32_t smccc_arch_features(u_register_t arg1) } return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ #endif + +#if ARCH_FEATURE_AVAILABILITY + case SMCCC_ARCH_FEATURE_AVAILABILITY: + return SMC_ARCH_CALL_SUCCESS; +#endif /* ARCH_FEATURE_AVAILABILITY */ + #endif /* __aarch64__ */ /* Fallthrough */ @@ -112,6 +121,91 @@ static int32_t smccc_arch_id(u_register_t arg1) return SMC_ARCH_CALL_INVAL_PARAM; } +/* + * Reads a system register, sanitises its value, and returns a bitmask + * representing which feature in that sysreg has been enabled by firmware. The + * bitmask is a 1:1 mapping to the register's fields. + */ +#if ARCH_FEATURE_AVAILABILITY +static uintptr_t smccc_arch_feature_availability(u_register_t reg, + void *handle, + u_register_t flags) +{ + cpu_context_t *caller_context; + per_world_context_t *caller_per_world_context; + el3_state_t *state; + u_register_t bitmask, check; + + /* check the caller security state */ + if (is_caller_secure(flags)) { + caller_context = cm_get_context(SECURE); + caller_per_world_context = &per_world_context[CPU_CONTEXT_SECURE]; + } else if (is_caller_non_secure(flags)) { + caller_context = cm_get_context(NON_SECURE); + caller_per_world_context = &per_world_context[CPU_CONTEXT_NS]; + } else { +#if ENABLE_RME + caller_context = cm_get_context(REALM); + caller_per_world_context = &per_world_context[CPU_CONTEXT_REALM]; +#else /* !ENABLE_RME */ + assert(0); /* shouldn't be possible */ +#endif /* ENABLE_RME */ + } + + state = get_el3state_ctx(caller_context); + + switch (reg) { + case SCR_EL3_OPCODE: + bitmask = read_ctx_reg(state, CTX_SCR_EL3); + bitmask &= ~SCR_EL3_IGNORED; + check = bitmask & ~SCR_EL3_FEATS; + bitmask &= SCR_EL3_FEATS; + bitmask ^= SCR_EL3_FLIPPED; + /* will only report 0 if neither is implemented */ + if (is_feat_rng_trap_supported() || is_feat_rng_present()) + bitmask |= SCR_TRNDR_BIT; + break; + case CPTR_EL3_OPCODE: + bitmask = caller_per_world_context->ctx_cptr_el3; + check = bitmask & ~CPTR_EL3_FEATS; + bitmask &= CPTR_EL3_FEATS; + bitmask ^= CPTR_EL3_FLIPPED; + break; + case MDCR_EL3_OPCODE: + bitmask = read_ctx_reg(state, CTX_MDCR_EL3); + bitmask &= ~MDCR_EL3_IGNORED; + check = bitmask & ~MDCR_EL3_FEATS; + bitmask &= MDCR_EL3_FEATS; + bitmask ^= MDCR_EL3_FLIPPED; + break; +#if ENABLE_FEAT_MPAM + case MPAM3_EL3_OPCODE: + bitmask = caller_per_world_context->ctx_mpam3_el3; + bitmask &= ~MPAM3_EL3_IGNORED; + check = bitmask & ~MPAM3_EL3_FEATS; + bitmask &= MPAM3_EL3_FEATS; + bitmask ^= MPAM3_EL3_FLIPPED; + break; +#endif /* ENABLE_FEAT_MPAM */ + default: + SMC_RET2(handle, SMC_INVALID_PARAM, ULL(0)); + } + + /* + * failing this means that the requested register has a bit set that + * hasn't been declared as a known feature bit or an ignore bit. This is + * likely to happen when support for a new feature is added but the + * bitmask macros are not updated. + */ + if (ENABLE_ASSERTIONS && check != 0) { + ERROR("Unexpected bits 0x%lx were set in register %lx!\n", check, reg); + assert(0); + } + + SMC_RET2(handle, SMC_ARCH_CALL_SUCCESS, bitmask); +} +#endif /* ARCH_FEATURE_AVAILABILITY */ + /* * Top-level Arm Architectural Service SMC handler. */ @@ -161,6 +255,11 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, SMC_RET0(handle); #endif #endif /* __aarch64__ */ +#if ARCH_FEATURE_AVAILABILITY + /* return is 64 bit so only reply on SMC64 requests */ + case SMCCC_ARCH_FEATURE_AVAILABILITY | (SMC_64 << FUNCID_CC_SHIFT): + return smccc_arch_feature_availability(x1, handle, flags); +#endif /* ARCH_FEATURE_AVAILABILITY */ default: WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", smc_fid);