mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-15 00:54:22 +00:00

SMCCC_ARCH_FEATURE_AVAILABILITY [1] is a call to query firmware about the features it is aware of and enables. This is useful when a feature is not enabled at EL3, eg due to an older FW image, but it is present in hardware. In those cases, the EL1 ID registers do not reflect the usable feature set and this call should provide the necessary information to remedy that. The call itself is very lightweight - effectively a sanitised read of the relevant system register. Bits that are not relevant to feature enablement are masked out and active low bits are converted to active high. The implementation is also very simple. All relevant, irrelevant, and inverted bits combined into bitmasks at build time. Then at runtime the masks are unconditionally applied to produce the right result. This assumes that context managers will make sure that disabled features do not have their bits set and the registers are context switched if any fields in them make enablement ambiguous. Features that are not yet supported in TF-A have not been added. On debug builds, calling this function will fail an assert if any bits that are not expected are set. In combination with CI this should allow for this feature to to stay up to date as new architectural features are added. If a call for MPAM3_EL3 is made when MPAM is not enabled, the call will return INVALID_PARAM, while if it is FEAT_STATE_CHECK, it will return zero. This should be fairly consistent with feature detection. The bitmask is meant to be interpreted as the logical AND of the relevant ID registers. It would be permissible for this to return 1 while the ID returns 0. Despite this, this implementation takes steps not to. In the general case, the two should match exactly. Finally, it is not entirely clear whether this call replies to SMC32 requests. However, it will not, as the return values are all 64 bits. [1]: https://developer.arm.com/documentation/den0028/galp1/?lang=en Co-developed-by: Charlie Bareham <charlie.bareham@arm.com> Signed-off-by: Boyan Karatotev <boyan.karatotev@arm.com> Change-Id: I1a74e7d0b3459b1396961b8fa27f84e3f0ad6a6f
322 lines
7.5 KiB
C
322 lines
7.5 KiB
C
/*
|
|
* Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved.
|
|
*
|
|
* SPDX-License-Identifier: BSD-3-Clause
|
|
*/
|
|
|
|
#ifndef ARM_ARCH_SVC_H
|
|
#define ARM_ARCH_SVC_H
|
|
|
|
#define SMCCC_VERSION U(0x80000000)
|
|
#define SMCCC_ARCH_FEATURES U(0x80000001)
|
|
#define SMCCC_ARCH_SOC_ID U(0x80000002)
|
|
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
|
|
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
|
|
#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF)
|
|
#define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003)
|
|
|
|
#define SMCCC_GET_SOC_VERSION U(0)
|
|
#define SMCCC_GET_SOC_REVISION U(1)
|
|
|
|
#ifndef __ASSEMBLER__
|
|
#if ARCH_FEATURE_AVAILABILITY
|
|
#include <lib/cassert.h>
|
|
|
|
#if ENABLE_FEAT_FGT2
|
|
#define SCR_FEAT_FGT2 SCR_FGTEN2_BIT
|
|
#else
|
|
#define SCR_FEAT_FGT2 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_FPMR
|
|
#define SCR_FEAT_FPMR SCR_EnFPM_BIT
|
|
#else
|
|
#define SCR_FEAT_FPMR
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_D128
|
|
#define SCR_FEAT_D128 SCR_D128En_BIT
|
|
#else
|
|
#define SCR_FEAT_D128 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_S1PIE
|
|
#define SCR_FEAT_S1PIE SCR_PIEN_BIT
|
|
#else
|
|
#define SCR_FEAT_S1PIE (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_SCTLR2
|
|
#define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT
|
|
#else
|
|
#define SCR_FEAT_SCTLR2 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_TCR2
|
|
#define SCR_FEAT_TCR2 SCR_TCR2EN_BIT
|
|
#else
|
|
#define SCR_FEAT_TCR2 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_THE
|
|
#define SCR_FEAT_THE SCR_RCWMASKEn_BIT
|
|
#else
|
|
#define SCR_FEAT_THE (0)
|
|
#endif
|
|
|
|
#if ENABLE_SME_FOR_NS
|
|
#define SCR_FEAT_SME SCR_ENTP2_BIT
|
|
#else
|
|
#define SCR_FEAT_SME (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_GCS
|
|
#define SCR_FEAT_GCS SCR_GCSEn_BIT
|
|
#else
|
|
#define SCR_FEAT_GCS (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_HCX
|
|
#define SCR_FEAT_HCX SCR_HXEn_BIT
|
|
#else
|
|
#define SCR_FEAT_HCX (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_LS64_ACCDATA
|
|
#define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT)
|
|
#else
|
|
#define SCR_FEAT_LS64_ACCDATA (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_AMUv1p1
|
|
#define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT
|
|
#else
|
|
#define SCR_FEAT_AMUv1p1 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_ECV
|
|
#define SCR_FEAT_ECV SCR_ECVEN_BIT
|
|
#else
|
|
#define SCR_FEAT_ECV (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_FGT
|
|
#define SCR_FEAT_FGT SCR_FGTEN_BIT
|
|
#else
|
|
#define SCR_FEAT_FGT (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_MTE2
|
|
#define SCR_FEAT_MTE2 SCR_ATA_BIT
|
|
#else
|
|
#define SCR_FEAT_MTE2 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_CSV2_2
|
|
#define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT
|
|
#else
|
|
#define SCR_FEAT_CSV2_2 (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_RAS
|
|
#define SCR_FEAT_RAS SCR_TERR_BIT
|
|
#else
|
|
#define SCR_FEAT_RAS (0)
|
|
#endif
|
|
|
|
#ifndef SCR_PLAT_FEATS
|
|
#define SCR_PLAT_FEATS (0)
|
|
#endif
|
|
#ifndef SCR_PLAT_FLIPPED
|
|
#define SCR_PLAT_FLIPPED (0)
|
|
#endif
|
|
#ifndef SCR_PLAT_IGNORED
|
|
#define SCR_PLAT_IGNORED (0)
|
|
#endif
|
|
|
|
#ifndef CPTR_PLAT_FEATS
|
|
#define CPTR_PLAT_FEATS (0)
|
|
#endif
|
|
#ifndef CPTR_PLAT_FLIPPED
|
|
#define CPTR_PLAT_FLIPPED (0)
|
|
#endif
|
|
|
|
#ifndef MDCR_PLAT_FEATS
|
|
#define MDCR_PLAT_FEATS (0)
|
|
#endif
|
|
#ifndef MDCR_PLAT_FLIPPED
|
|
#define MDCR_PLAT_FLIPPED (0)
|
|
#endif
|
|
#ifndef MDCR_PLAT_IGNORED
|
|
#define MDCR_PLAT_IGNORED (0)
|
|
#endif
|
|
/*
|
|
* XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's
|
|
* a constant list based on what features are expected. This relies on the fact
|
|
* that if the feature is in any way disabled, then the relevant bit will not be
|
|
* written by context management.
|
|
*
|
|
* XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The
|
|
* spec always uses active 1 to mean that the feature will not trap.
|
|
*
|
|
* XYZ_EL3_IGNORED - list of all bits that are not relevant for feature
|
|
* enablement and should not be reported to lower ELs
|
|
*/
|
|
#define SCR_EL3_FEATS ( \
|
|
SCR_FEAT_FGT2 | \
|
|
SCR_FEAT_FPMR | \
|
|
SCR_FEAT_D128 | \
|
|
SCR_FEAT_S1PIE | \
|
|
SCR_FEAT_SCTLR2 | \
|
|
SCR_FEAT_TCR2 | \
|
|
SCR_FEAT_THE | \
|
|
SCR_FEAT_SME | \
|
|
SCR_FEAT_GCS | \
|
|
SCR_FEAT_HCX | \
|
|
SCR_FEAT_LS64_ACCDATA | \
|
|
SCR_FEAT_AMUv1p1 | \
|
|
SCR_FEAT_ECV | \
|
|
SCR_FEAT_FGT | \
|
|
SCR_FEAT_MTE2 | \
|
|
SCR_FEAT_CSV2_2 | \
|
|
SCR_APK_BIT | /* FEAT_Pauth */ \
|
|
SCR_FEAT_RAS | \
|
|
SCR_PLAT_FEATS)
|
|
#define SCR_EL3_FLIPPED ( \
|
|
SCR_FEAT_RAS | \
|
|
SCR_PLAT_FLIPPED)
|
|
#define SCR_EL3_IGNORED ( \
|
|
SCR_API_BIT | \
|
|
SCR_RW_BIT | \
|
|
SCR_SIF_BIT | \
|
|
SCR_HCE_BIT | \
|
|
SCR_FIQ_BIT | \
|
|
SCR_IRQ_BIT | \
|
|
SCR_NS_BIT | \
|
|
SCR_RES1_BITS | \
|
|
SCR_PLAT_IGNORED)
|
|
CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored);
|
|
CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat);
|
|
|
|
#if ENABLE_SYS_REG_TRACE_FOR_NS
|
|
#define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT)
|
|
#else
|
|
#define CPTR_SYS_REG_TRACE (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_AMU
|
|
#define CPTR_FEAT_AMU TAM_BIT
|
|
#else
|
|
#define CPTR_FEAT_AMU (0)
|
|
#endif
|
|
|
|
#if ENABLE_SME_FOR_NS
|
|
#define CPTR_FEAT_SME ESM_BIT
|
|
#else
|
|
#define CPTR_FEAT_SME (0)
|
|
#endif
|
|
|
|
#if ENABLE_SVE_FOR_NS
|
|
#define CPTR_FEAT_SVE CPTR_EZ_BIT
|
|
#else
|
|
#define CPTR_FEAT_SVE (0)
|
|
#endif
|
|
|
|
#define CPTR_EL3_FEATS ( \
|
|
CPTR_SYS_REG_TRACE | \
|
|
CPTR_FEAT_AMU | \
|
|
CPTR_FEAT_SME | \
|
|
TFP_BIT | \
|
|
CPTR_FEAT_SVE | \
|
|
CPTR_PLAT_FEATS)
|
|
#define CPTR_EL3_FLIPPED ( \
|
|
CPTR_SYS_REG_TRACE | \
|
|
CPTR_FEAT_AMU | \
|
|
TFP_BIT | \
|
|
CPTR_PLAT_FLIPPED)
|
|
CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat);
|
|
|
|
/*
|
|
* Some features enables are expressed with more than 1 bit in order to cater
|
|
* for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit
|
|
* is used and reported. This (ab)uses the convenient fact that the last bit
|
|
* always means "enabled for this world" when context switched correctly.
|
|
* The per-world values have been adjusted such that this is always true.
|
|
*/
|
|
#if ENABLE_BRBE_FOR_NS
|
|
#define MDCR_FEAT_BRBE MDCR_SBRBE(1UL)
|
|
#else
|
|
#define MDCR_FEAT_BRBE (0)
|
|
#endif
|
|
|
|
#if ENABLE_FEAT_FGT
|
|
#define MDCR_FEAT_FGT MDCR_TDCC_BIT
|
|
#else
|
|
#define MDCR_FEAT_FGT (0)
|
|
#endif
|
|
|
|
#if ENABLE_TRBE_FOR_NS
|
|
#define MDCR_FEAT_TRBE MDCR_NSTB(1UL)
|
|
#else
|
|
#define MDCR_FEAT_TRBE (0)
|
|
#endif
|
|
|
|
#if ENABLE_TRF_FOR_NS
|
|
#define MDCR_FEAT_TRF MDCR_TTRF_BIT
|
|
#else
|
|
#define MDCR_FEAT_TRF (0)
|
|
#endif
|
|
|
|
#if ENABLE_SPE_FOR_NS
|
|
#define MDCR_FEAT_SPE MDCR_NSPB(1UL)
|
|
#else
|
|
#define MDCR_FEAT_SPE (0)
|
|
#endif
|
|
|
|
#define MDCR_EL3_FEATS ( \
|
|
MDCR_FEAT_BRBE | \
|
|
MDCR_FEAT_FGT | \
|
|
MDCR_FEAT_TRBE | \
|
|
MDCR_FEAT_TRF | \
|
|
MDCR_FEAT_SPE | \
|
|
MDCR_TDOSA_BIT | \
|
|
MDCR_TDA_BIT | \
|
|
MDCR_TPM_BIT | /* FEAT_PMUv3 */ \
|
|
MDCR_PLAT_FEATS)
|
|
#define MDCR_EL3_FLIPPED ( \
|
|
MDCR_FEAT_FGT | \
|
|
MDCR_FEAT_TRF | \
|
|
MDCR_TDOSA_BIT | \
|
|
MDCR_TDA_BIT | \
|
|
MDCR_TPM_BIT | \
|
|
MDCR_PLAT_FLIPPED)
|
|
#define MDCR_EL3_IGNORED ( \
|
|
MDCR_EBWE_BIT | \
|
|
MDCR_EnPMSN_BIT | \
|
|
MDCR_SBRBE(2UL) | \
|
|
MDCR_MTPME_BIT | \
|
|
MDCR_NSTBE_BIT | \
|
|
MDCR_NSTB(2UL) | \
|
|
MDCR_SDD_BIT | \
|
|
MDCR_SPD32(3UL) | \
|
|
MDCR_NSPB(2UL) | \
|
|
MDCR_NSPBE_BIT | \
|
|
MDCR_PLAT_IGNORED)
|
|
CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored);
|
|
CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat);
|
|
|
|
#define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT)
|
|
#define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT)
|
|
#define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT)
|
|
CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored);
|
|
CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat);
|
|
|
|
/* The hex representations of these registers' S3 encoding */
|
|
#define SCR_EL3_OPCODE U(0x1E1100)
|
|
#define CPTR_EL3_OPCODE U(0x1E1140)
|
|
#define MDCR_EL3_OPCODE U(0x1E1320)
|
|
#define MPAM3_EL3_OPCODE U(0x1EA500)
|
|
|
|
#endif /* ARCH_FEATURE_AVAILABILITY */
|
|
#endif /* __ASSEMBLER__ */
|
|
#endif /* ARM_ARCH_SVC_H */
|