Merge changes from topic "ar/smccc_arch_wa_4" into integration

* changes:
  fix(security): apply SMCCC_ARCH_WORKAROUND_4 to affected cpus
  fix(security): add support in cpu_ops for CVE-2024-7881
  fix(security): add CVE-2024-7881 mitigation to Cortex-X3
  fix(security): add CVE-2024-7881 mitigation to Neoverse-V3
  fix(security): add CVE-2024-7881 mitigation to Neoverse-V2
  fix(security): add CVE-2024-7881 mitigation to Cortex-X925
  fix(security): add CVE-2024-7881 mitigation to Cortex-X4
  fix(security): enable WORKAROUND_CVE_2024_7881 build option
This commit is contained in:
Bipin Ravi 2025-01-31 17:10:57 +01:00 committed by TrustedFirmware Code Review
commit 1f2c58b109
17 changed files with 201 additions and 26 deletions

View file

@ -38,6 +38,10 @@ vulnerability workarounds should be applied at runtime.
in EL3 FW. This build option should be set to 1 if the target platform contains
at least 1 CPU that requires this mitigation. Defaults to 1.
- ``WORKAROUND_CVE_2024_7881``: Enables mitigation for `CVE-2024-7881`.
This build option should be set to 1 if the target platform contains at
least 1 CPU that requires this mitigation. Defaults to 1.
.. _arm_cpu_macros_errata_workarounds:
CPU Errata Workarounds
@ -1055,7 +1059,7 @@ GIC Errata Workarounds
--------------
*Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.*
*Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved.*
.. _CVE-2017-5715: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-5715
.. _CVE-2018-3639: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-3639

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2024, Arm Limited. All rights reserved.
* Copyright (c) 2022-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -36,6 +36,11 @@
#define CORTEX_X4_CPUACTLR5_EL1 S3_0_C15_C8_0
#define CORTEX_X4_CPUACTLR5_EL1_BIT_14 (ULL(1) << 14)
/*******************************************************************************
* CPU Auxiliary control register 6 specific definitions
******************************************************************************/
#define CORTEX_X4_CPUACTLR6_EL1 S3_0_C15_C8_1
#ifndef __ASSEMBLER__
#if ERRATA_X4_2726228
long check_erratum_cortex_x4_2726228(long cpu_rev);

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023-2024, Arm Limited. All rights reserved.
* Copyright (c) 2023-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -21,4 +21,9 @@
#define CORTEX_X925_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define CORTEX_X925_CPUPWRCTLR_EL1_CORE_PWRDN_BIT U(1)
/*******************************************************************************
* CPU Auxiliary control register 6 specific definitions
******************************************************************************/
#define CORTEX_X925_CPUACTLR6_EL1 S3_0_C15_C8_1
#endif /* CORTEX_X925_H */

View file

@ -63,6 +63,10 @@
* This is a placeholder for future per CPU operations. Currently,
* some CPUs use this entry to set a test function to determine if
* the workaround for CVE-2022-23960 needs to be applied or not.
* _extra4:
* This is a placeholder for future per CPU operations. Currently,
* some CPUs use this entry to set a test function to determine if
* the workaround for CVE-2024-7881 needs to be applied or not.
* _e_handler:
* This is a placeholder for future per CPU exception handlers.
* _power_down_ops:
@ -75,7 +79,8 @@
* used to handle power down at subsequent levels
*/
.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
_extra1:req, _extra2:req, _extra3:req, _e_handler:req, _power_down_ops:vararg
_extra1:req, _extra2:req, _extra3:req, _extra4:req, \
_e_handler:req, _power_down_ops:vararg
.section .cpu_ops, "a"
.align 3
.type cpu_ops_\_name, %object
@ -86,6 +91,7 @@
.quad \_extra1
.quad \_extra2
.quad \_extra3
.quad \_extra4
.quad \_e_handler
#ifdef IMAGE_BL31
/* Insert list of functions */
@ -148,21 +154,28 @@
.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
_power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, \
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, 0, \
\_power_down_ops
.endm
.macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \
_e_handler:req, _power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
0, 0, 0, \_e_handler, \_power_down_ops
0, 0, 0, 0, \_e_handler, \_power_down_ops
.endm
.macro declare_cpu_ops_wa _name:req, _midr:req, \
_resetfunc:req, _extra1:req, _extra2:req, \
_extra3:req, _power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
\_extra1, \_extra2, \_extra3, 0, \_power_down_ops
\_extra1, \_extra2, \_extra3, 0, 0, \_power_down_ops
.endm
.macro declare_cpu_ops_wa_4 _name:req, _midr:req, \
_resetfunc:req, _extra1:req, _extra2:req, \
_extra3:req, _extra4:req, _power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
\_extra1, \_extra2, \_extra3, \_extra4, 0, \_power_down_ops
.endm
/*

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021-2023, Arm Limited. All rights reserved.
* Copyright (c) 2021-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -58,4 +58,9 @@
#define NEOVERSE_V2_CPUACTLR5_EL1_BIT_56 (ULL(1) << 56)
#define NEOVERSE_V2_CPUACTLR5_EL1_BIT_55 (ULL(1) << 55)
/*******************************************************************************
* CPU Auxiliary control register 6 specific definitions
******************************************************************************/
#define NEOVERSE_V2_CPUACTLR6_EL1 S3_0_C15_C8_1
#endif /* NEOVERSE_V2_H */

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2024, Arm Limited. All rights reserved.
* Copyright (c) 2022-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -22,7 +22,12 @@
/*******************************************************************************
* CPU Power Control register specific definitions
******************************************************************************/
#define NEOVERSE_V3_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define NEOVERSE_V3_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define NEOVERSE_V3_CPUPWRCTLR_EL1_CORE_PWRDN_BIT U(1)
/*******************************************************************************
* CPU Auxiliary control register 6 specific definitions
******************************************************************************/
#define NEOVERSE_V3_CPUACTLR6_EL1 S3_0_C15_C8_1
#endif /* NEOVERSE_V3_H */

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023-2024, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2023-2025, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -28,6 +28,7 @@
#define CPU_NO_EXTRA1_FUNC 0
#define CPU_NO_EXTRA2_FUNC 0
#define CPU_NO_EXTRA3_FUNC 0
#define CPU_NO_EXTRA4_FUNC 0
#endif /* __aarch64__ */
@ -45,6 +46,7 @@
#define CPU_EXTRA1_FUNC_SIZE CPU_WORD_SIZE
#define CPU_EXTRA2_FUNC_SIZE CPU_WORD_SIZE
#define CPU_EXTRA3_FUNC_SIZE CPU_WORD_SIZE
#define CPU_EXTRA4_FUNC_SIZE CPU_WORD_SIZE
#define CPU_E_HANDLER_FUNC_SIZE CPU_WORD_SIZE
/* The power down core and cluster is needed only in BL31 and BL32 */
#if defined(IMAGE_BL31) || defined(IMAGE_BL32)
@ -89,7 +91,8 @@
#define CPU_EXTRA1_FUNC CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
#define CPU_EXTRA2_FUNC CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
#define CPU_EXTRA3_FUNC CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
#define CPU_E_HANDLER_FUNC CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
#define CPU_EXTRA4_FUNC CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
#define CPU_E_HANDLER_FUNC CPU_EXTRA4_FUNC + CPU_EXTRA4_FUNC_SIZE
#define CPU_PWR_DWN_OPS CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE
#else
#define CPU_PWR_DWN_OPS CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
@ -119,6 +122,7 @@ struct cpu_ops {
void (*extra1_func)(void);
void (*extra2_func)(void);
void (*extra3_func)(void);
void (*extra4_func)(void);
void (*e_handler_func)(long es);
#endif /* __aarch64__ */
#if (defined(IMAGE_BL31) || defined(IMAGE_BL32)) && CPU_MAX_PWR_DWN_OPS

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -48,6 +48,8 @@ static inline bool errata_a75_764081_applies(void)
unsigned int check_if_affected_core(void);
#endif
int check_wa_cve_2024_7881(void);
/*
* NOTE that this structure will be different on AArch32 and AArch64. The
* uintptr_t will reflect the change and the alignment will be correct in both.

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -14,6 +14,7 @@
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF)
#define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003)
#define SMCCC_ARCH_WORKAROUND_4 U(0x80000004)
#define SMCCC_GET_SOC_VERSION U(0)
#define SMCCC_GET_SOC_REVISION U(1)

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021-2024, Arm Limited. All rights reserved.
* Copyright (c) 2021-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -111,6 +111,17 @@ workaround_reset_end cortex_x3, CVE(2022, 23960)
check_erratum_chosen cortex_x3, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
workaround_reset_start cortex_x3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------
* Sets BIT41 of CPUACTLR6_EL1 which
* disables L1 Data cache prefetcher
* ---------------------------------
*/
sysreg_bit_set CORTEX_X3_CPUACTLR6_EL1, BIT(41)
workaround_reset_end cortex_x3, CVE(2024, 7881)
check_erratum_chosen cortex_x3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
cpu_reset_func_start cortex_x3
/* Disable speculative loads */
msr SSBS, xzr
@ -151,6 +162,10 @@ func cortex_x3_cpu_reg_dump
ret
endfunc cortex_x3_cpu_reg_dump
declare_cpu_ops cortex_x3, CORTEX_X3_MIDR, \
declare_cpu_ops_wa_4 cortex_x3, CORTEX_X3_MIDR, \
cortex_x3_reset_func, \
CPU_NO_EXTRA1_FUNC, \
CPU_NO_EXTRA2_FUNC, \
CPU_NO_EXTRA3_FUNC, \
check_erratum_cortex_x3_7881, \
cortex_x3_core_pwr_dwn

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2024, Arm Limited. All rights reserved.
* Copyright (c) 2022-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -108,6 +108,17 @@ workaround_reset_end cortex_x4, CVE(2022, 23960)
check_erratum_chosen cortex_x4, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
workaround_reset_start cortex_x4, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------
* Sets BIT41 of CPUACTLR6_EL1 which
* disables L1 Data cache prefetcher
* ---------------------------------
*/
sysreg_bit_set CORTEX_X4_CPUACTLR6_EL1, BIT(41)
workaround_reset_end cortex_x4, CVE(2024, 7881)
check_erratum_chosen cortex_x4, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
cpu_reset_func_start cortex_x4
/* Disable speculative loads */
msr SSBS, xzr
@ -149,6 +160,10 @@ func cortex_x4_cpu_reg_dump
ret
endfunc cortex_x4_cpu_reg_dump
declare_cpu_ops cortex_x4, CORTEX_X4_MIDR, \
declare_cpu_ops_wa_4 cortex_x4, CORTEX_X4_MIDR, \
cortex_x4_reset_func, \
CPU_NO_EXTRA1_FUNC, \
CPU_NO_EXTRA2_FUNC, \
CPU_NO_EXTRA3_FUNC, \
check_erratum_cortex_x4_7881, \
cortex_x4_core_pwr_dwn

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023-2024, Arm Limited. All rights reserved.
* Copyright (c) 2023-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -28,6 +28,17 @@ workaround_reset_end cortex_x925, CVE(2024, 5660)
check_erratum_ls cortex_x925, CVE(2024, 5660), CPU_REV(0, 1)
workaround_reset_start cortex_x925, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------
* Sets BIT41 of CPUACTLR6_EL1 which
* disables L1 Data cache prefetcher
* ---------------------------------
*/
sysreg_bit_set CORTEX_X925_CPUACTLR6_EL1, BIT(41)
workaround_reset_end cortex_x925, CVE(2024, 7881)
check_erratum_chosen cortex_x925, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
cpu_reset_func_start cortex_x925
/* Disable speculative loads */
msr SSBS, xzr
@ -66,6 +77,10 @@ func cortex_x925_cpu_reg_dump
ret
endfunc cortex_x925_cpu_reg_dump
declare_cpu_ops cortex_x925, CORTEX_X925_MIDR, \
declare_cpu_ops_wa_4 cortex_x925, CORTEX_X925_MIDR, \
cortex_x925_reset_func, \
CPU_NO_EXTRA1_FUNC, \
CPU_NO_EXTRA2_FUNC, \
CPU_NO_EXTRA3_FUNC, \
check_erratum_cortex_x925_7881, \
cortex_x925_core_pwr_dwn

View file

@ -326,6 +326,43 @@ func check_wa_cve_2017_5715
ret
endfunc check_wa_cve_2017_5715
/*
* int check_wa_cve_2024_7881(void);
*
* This function returns:
* - ERRATA_APPLIES when firmware mitigation is required.
* - ERRATA_NOT_APPLIES when firmware mitigation is _not_ required.
* - ERRATA_MISSING when firmware mitigation would be required but
* is not compiled in.
*
* NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data.
*/
.globl check_wa_cve_2024_7881
func check_wa_cve_2024_7881
mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS
cmp x0, #0
ASM_ASSERT(ne)
#endif
ldr x0, [x0, #CPU_DATA_CPU_OPS_PTR]
#if ENABLE_ASSERTIONS
cmp x0, #0
ASM_ASSERT(ne)
#endif
ldr x0, [x0, #CPU_EXTRA4_FUNC]
/*
* If the reserved function pointer is NULL, this CPU
* is unaffected by CVE-2024-7881 so bail out.
*/
cmp x0, #CPU_NO_EXTRA4_FUNC
beq 1f
br x0
1:
mov x0, #ERRATA_NOT_APPLIES
ret
endfunc check_wa_cve_2024_7881
/*
* void *wa_cve_2018_3639_get_disable_ptr(void);
*

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021-2024, Arm Limited. All rights reserved.
* Copyright (c) 2021-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -97,6 +97,17 @@ check_erratum_chosen neoverse_v2, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
wa_cve_2022_23960_bhb_vector_table NEOVERSE_V2_BHB_LOOP_COUNT, neoverse_v2
#endif /* WORKAROUND_CVE_2022_23960 */
workaround_reset_start neoverse_v2, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------
* Sets BIT41 of CPUACTLR6_EL1 which
* disables L1 Data cache prefetcher
* ---------------------------------
*/
sysreg_bit_set NEOVERSE_V2_CPUACTLR6_EL1, BIT(41)
workaround_reset_end neoverse_v2, CVE(2024, 7881)
check_erratum_chosen neoverse_v2, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ----------------------------------------------------
* HW will do the cache maintenance while powering down
* ----------------------------------------------------
@ -142,6 +153,10 @@ func neoverse_v2_cpu_reg_dump
ret
endfunc neoverse_v2_cpu_reg_dump
declare_cpu_ops neoverse_v2, NEOVERSE_V2_MIDR, \
declare_cpu_ops_wa_4 neoverse_v2, NEOVERSE_V2_MIDR, \
neoverse_v2_reset_func, \
CPU_NO_EXTRA1_FUNC, \
CPU_NO_EXTRA2_FUNC, \
CPU_NO_EXTRA3_FUNC, \
check_erratum_neoverse_v2_7881, \
neoverse_v2_core_pwr_dwn

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2024, Arm Limited. All rights reserved.
* Copyright (c) 2022-2025, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -46,6 +46,17 @@ workaround_reset_end neoverse_v3, CVE(2022,23960)
check_erratum_chosen neoverse_v3, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
workaround_reset_start neoverse_v3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------
* Sets BIT41 of CPUACTLR6_EL1 which
* disables L1 Data cache prefetcher
* ---------------------------------
*/
sysreg_bit_set NEOVERSE_V3_CPUACTLR6_EL1, BIT(41)
workaround_reset_end neoverse_v3, CVE(2024, 7881)
check_erratum_chosen neoverse_v3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
/* ---------------------------------------------
* HW will do the cache maintenance while powering down
* ---------------------------------------------
@ -90,6 +101,10 @@ declare_cpu_ops neoverse_v3, NEOVERSE_V3_VNAE_MIDR, \
neoverse_v3_reset_func, \
neoverse_v3_core_pwr_dwn
declare_cpu_ops neoverse_v3, NEOVERSE_V3_MIDR, \
declare_cpu_ops_wa_4 neoverse_v3, NEOVERSE_V3_MIDR, \
neoverse_v3_reset_func, \
CPU_NO_EXTRA1_FUNC, \
CPU_NO_EXTRA2_FUNC, \
CPU_NO_EXTRA3_FUNC, \
check_erratum_neoverse_v3_7881, \
neoverse_v3_core_pwr_dwn

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.
# Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved.
# Copyright (c) 2020-2022, NVIDIA Corporation. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
@ -32,6 +32,8 @@ CPU_FLAG_LIST += WORKAROUND_CVE_2018_3639
CPU_FLAG_LIST += DYNAMIC_WORKAROUND_CVE_2018_3639
WORKAROUND_CVE_2022_23960 ?=1
CPU_FLAG_LIST += WORKAROUND_CVE_2022_23960
WORKAROUND_CVE_2024_7881 ?=1
CPU_FLAG_LIST += WORKAROUND_CVE_2024_7881
# Flag to disable Hardware page aggregation(HPA).
# This flag is enabled by default.

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -99,6 +99,14 @@ static int32_t smccc_arch_features(u_register_t arg1)
return SMC_ARCH_CALL_SUCCESS;
#endif /* ARCH_FEATURE_AVAILABILITY */
#if WORKAROUND_CVE_2024_7881
case SMCCC_ARCH_WORKAROUND_4:
if (check_wa_cve_2024_7881() != ERRATA_APPLIES) {
return SMC_ARCH_CALL_NOT_SUPPORTED;
}
return 0;
#endif /* WORKAROUND_CVE_2024_7881 */
#endif /* __aarch64__ */
/* Fallthrough */
@ -254,6 +262,15 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
*/
SMC_RET0(handle);
#endif
#if WORKAROUND_CVE_2024_7881
case SMCCC_ARCH_WORKAROUND_4:
/*
* The workaround has already been applied on affected PEs
* during cold boot. This function has no effect whether PE is
* affected or not.
*/
SMC_RET0(handle);
#endif /* WORKAROUND_CVE_2024_7881 */
#endif /* __aarch64__ */
#if ARCH_FEATURE_AVAILABILITY
/* return is 64 bit so only reply on SMC64 requests */