From fdb9166b9494402eb2da7e0b004c121b322725e0 Mon Sep 17 00:00:00 2001 From: Madhukar Pappireddy Date: Wed, 16 Mar 2022 14:20:48 -0500 Subject: [PATCH 1/3] fix(fvp): disable reclaiming init code by default In anticipation of Spectre BHB workaround mitigation patches, we disable the RECLAIM_INIT_CODE for FVP platform. Since the spectre BHB mitigation workarounds inevitably increase the size of the various segments due to additional instructions and/or macros, these segments cannot be fit in the existing memory layout designated for BL31 image. The issue is specifically seen in complex build configs for FVP platform. One such config has TBB with Dual CoT and test secure payload dispatcher(TSPD) enabled. Even a small increase in individual segment size in order of few bytes might lead to build fails due to alignment requirements(PAGE_ALIGN to 4KB). This is needed to workaround the following build failures observed across multiple build configs: aarch64-none-elf-ld.bfd: BL31 init has exceeded progbits limit. aarch64-none-elf-ld.bfd: /work/workspace/workspace/tf-worker_ws_2/trusted_firmware/build/fvp/debug/bl31/bl31.elf section coherent_ram will not fit in region RAM aarch64-none-elf-ld.bfd: BL31 image has exceeded its limit. aarch64-none-elf-ld.bfd: region RAM overflowed by 4096 bytes Change-Id: Idfab539e9a40f4346ee11eea1e618c97e93e19a1 Signed-off-by: Madhukar Pappireddy --- plat/arm/board/fvp/platform.mk | 8 -------- 1 file changed, 8 deletions(-) diff --git a/plat/arm/board/fvp/platform.mk b/plat/arm/board/fvp/platform.mk index a24a2e50c..acac88645 100644 --- a/plat/arm/board/fvp/platform.mk +++ b/plat/arm/board/fvp/platform.mk @@ -308,14 +308,6 @@ ENABLE_AMU := 1 # Enable dynamic mitigation support by default DYNAMIC_WORKAROUND_CVE_2018_3639 := 1 -# Enable reclaiming of BL31 initialisation code for secondary cores -# stacks for FVP. However, don't enable reclaiming for clang. -ifneq (${RESET_TO_BL31},1) -ifeq ($(findstring clang,$(notdir $(CC))),) -RECLAIM_INIT_CODE := 1 -endif -endif - ifeq (${ENABLE_AMU},1) BL31_SOURCES += lib/cpus/aarch64/cpuamu.c \ lib/cpus/aarch64/cpuamu_helpers.S From be9121fd311ff48c94f3d90fe7efcf84586119e4 Mon Sep 17 00:00:00 2001 From: Bipin Ravi Date: Tue, 15 Feb 2022 23:24:51 -0600 Subject: [PATCH 2/3] fix(security): workaround for CVE-2022-23960 for Cortex-A57, Cortex-A72 Implements mitigation for Cortex-A72 CPU versions that support the CSV2 feature(from r1p0). It also applies the mitigation for Cortex-A57 CPU. Signed-off-by: Bipin Ravi Change-Id: I7cfcf06537710f144f6e849992612033ddd79d33 --- include/lib/cpus/aarch64/cortex_a72.h | 5 +++- lib/cpus/aarch64/cortex_a57.S | 17 ++++++++++++- lib/cpus/aarch64/cortex_a72.S | 36 ++++++++++++++++++++++++--- 3 files changed, 53 insertions(+), 5 deletions(-) diff --git a/include/lib/cpus/aarch64/cortex_a72.h b/include/lib/cpus/aarch64/cortex_a72.h index 28b440e19..17776458b 100644 --- a/include/lib/cpus/aarch64/cortex_a72.h +++ b/include/lib/cpus/aarch64/cortex_a72.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015-2019, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2015-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -12,6 +12,9 @@ /* Cortex-A72 midr for revision 0 */ #define CORTEX_A72_MIDR U(0x410FD080) +/* Cortex-A72 loop count for CVE-2022-23960 mitigation */ +#define CORTEX_A72_BHB_LOOP_COUNT U(8) + /******************************************************************************* * CPU Extended Control register specific definitions. ******************************************************************************/ diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S index 8ef0f922a..4120f119e 100644 --- a/lib/cpus/aarch64/cortex_a57.S +++ b/lib/cpus/aarch64/cortex_a57.S @@ -470,7 +470,12 @@ func cortex_a57_reset_func bl errata_a57_859972_wa #endif -#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 +#if IMAGE_BL31 && ( WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 ) + /* --------------------------------------------------------------- + * Override vector table & enable existing workaround if either of + * the build flags are enabled + * --------------------------------------------------------------- + */ adr x0, wa_cve_2017_5715_mmu_vbar msr vbar_el3, x0 /* isb will be performed before returning from this function */ @@ -506,6 +511,15 @@ func cortex_a57_reset_func ret x19 endfunc cortex_a57_reset_func +func check_errata_cve_2022_23960 +#if WORKAROUND_CVE_2022_23960 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2022_23960 + /* ---------------------------------------------------- * The CPU Ops core power down function for Cortex-A57. * ---------------------------------------------------- @@ -630,6 +644,7 @@ func cortex_a57_errata_report report_errata ERRATA_A57_1319537, cortex_a57, 1319537 report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639 + report_errata WORKAROUND_CVE_2022_23960, cortex_a57, cve_2022_23960 ldp x8, x30, [sp], #16 ret diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S index aff6072a0..3dc44805e 100644 --- a/lib/cpus/aarch64/cortex_a72.S +++ b/lib/cpus/aarch64/cortex_a72.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015-2020, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2015-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -9,6 +9,11 @@ #include #include #include +#include "wa_cve_2022_23960_bhb_vector.S" + +#if WORKAROUND_CVE_2022_23960 + wa_cve_2022_23960_bhb_vector_table CORTEX_A72_BHB_LOOP_COUNT, cortex_a72 +#endif /* WORKAROUND_CVE_2022_23960 */ /* --------------------------------------------- * Disable L1 data cache and unified L2 cache @@ -133,6 +138,15 @@ func check_errata_1319367 ret endfunc check_errata_1319367 +func check_errata_cve_2022_23960 +#if WORKAROUND_CVE_2022_23960 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2022_23960 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. * ------------------------------------------------- @@ -147,13 +161,28 @@ func cortex_a72_reset_func bl errata_a72_859971_wa #endif -#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 +#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) cpu_check_csv2 x0, 1f adr x0, wa_cve_2017_5715_mmu_vbar msr vbar_el3, x0 /* isb will be performed before returning from this function */ + + /* Skip CVE_2022_23960 mitigation if cve_2017_5715 mitigation applied */ + b 2f 1: -#endif +#if WORKAROUND_CVE_2022_23960 + /* + * The Cortex-A72 generic vectors are overridden to apply the + * mitigation on exception entry from lower ELs for revisions >= r1p0 + * which has CSV2 implemented. + */ + adr x0, wa_cve_vbar_cortex_a72 + msr vbar_el3, x0 + + /* isb will be performed before returning from this function */ +#endif /* WORKAROUND_CVE_2022_23960 */ +2: +#endif /* IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */ #if WORKAROUND_CVE_2018_3639 mrs x0, CORTEX_A72_CPUACTLR_EL1 @@ -299,6 +328,7 @@ func cortex_a72_errata_report report_errata ERRATA_A72_1319367, cortex_a72, 1319367 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639 + report_errata WORKAROUND_CVE_2022_23960, cortex_a72, cve_2022_23960 ldp x8, x30, [sp], #16 ret From 9b2510b69de26cc7f571731b415f6dec82669b6c Mon Sep 17 00:00:00 2001 From: Bipin Ravi Date: Wed, 23 Feb 2022 23:45:50 -0600 Subject: [PATCH 3/3] fix(security): apply SMCCC_ARCH_WORKAROUND_3 to A73/A75/A72/A57 This patch applies CVE-2022-23960 workarounds for Cortex-A75, Cortex-A73, Cortex-A72 & Cortex-A57. This patch also implements the new SMCCC_ARCH_WORKAROUND_3 and enables necessary discovery hooks for Coxtex-A72, Cortex-A57, Cortex-A73 and Cortex-A75 to enable discovery of this SMC via SMC_FEATURES. SMCCC_ARCH_WORKAROUND_3 is implemented for A57/A72 because some revisions are affected by both CVE-2022-23960 and CVE-2017-5715 and this allows callers to replace SMCCC_ARCH_WORKAROUND_1 calls with SMCCC_ARCH_WORKAROUND_3. For details of SMCCC_ARCH_WORKAROUND_3, please refer SMCCCv1.4 specification. Signed-off-by: Bipin Ravi Signed-off-by: John Powell Change-Id: Ifa6d9c7baa6764924638efe3c70468f98d60ed7c --- include/lib/cpus/aarch64/cpu_macros.S | 24 ++++++++----- include/lib/cpus/wa_cve_2022_23960.h | 12 +++++++ include/services/arm_arch_svc.h | 3 +- lib/cpus/aarch64/cortex_a57.S | 8 ++++- lib/cpus/aarch64/cortex_a72.S | 10 ++++++ lib/cpus/aarch64/cortex_a73.S | 38 ++++++++++++++++++-- lib/cpus/aarch64/cortex_a75.S | 36 +++++++++++++++++-- lib/cpus/aarch64/cortex_a76.S | 1 + lib/cpus/aarch64/cpu_helpers.S | 42 ++++++++++++++++++++-- lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S | 21 ++++++++--- lib/cpus/aarch64/wa_cve_2017_5715_mmu.S | 9 +++-- plat/st/stm32mp1/platform.mk | 1 + services/arm_arch_svc/arm_arch_svc_setup.c | 28 +++++++++++++-- 13 files changed, 206 insertions(+), 27 deletions(-) create mode 100644 include/lib/cpus/wa_cve_2022_23960.h diff --git a/include/lib/cpus/aarch64/cpu_macros.S b/include/lib/cpus/aarch64/cpu_macros.S index 92891ce38..92e65ae90 100644 --- a/include/lib/cpus/aarch64/cpu_macros.S +++ b/include/lib/cpus/aarch64/cpu_macros.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -21,6 +21,7 @@ #define CPU_NO_EXTRA1_FUNC 0 #define CPU_NO_EXTRA2_FUNC 0 +#define CPU_NO_EXTRA3_FUNC 0 /* Word size for 64-bit CPUs */ #define CPU_WORD_SIZE 8 @@ -39,6 +40,7 @@ .equ CPU_MIDR_SIZE, CPU_WORD_SIZE .equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE .equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE + .equ CPU_EXTRA3_FUNC_SIZE, CPU_WORD_SIZE .equ CPU_E_HANDLER_FUNC_SIZE, CPU_WORD_SIZE .equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE .equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS @@ -80,7 +82,8 @@ .equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE .equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE .equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE - .equ CPU_E_HANDLER_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE + .equ CPU_EXTRA3_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE + .equ CPU_E_HANDLER_FUNC, CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE .equ CPU_PWR_DWN_OPS, CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE .equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE .equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE @@ -134,9 +137,13 @@ * some CPUs use this entry to set a test function to determine if * the workaround for CVE-2017-5715 needs to be applied or not. * _extra2: - * This is a placeholder for future per CPU operations. Currently + * This is a placeholder for future per CPU operations. Currently * some CPUs use this entry to set a function to disable the * workaround for CVE-2018-3639. + * _extra3: + * This is a placeholder for future per CPU operations. Currently, + * some CPUs use this entry to set a test function to determine if + * the workaround for CVE-2022-23960 needs to be applied or not. * _e_handler: * This is a placeholder for future per CPU exception handlers. * _power_down_ops: @@ -149,7 +156,7 @@ * used to handle power down at subsequent levels */ .macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \ - _extra1:req, _extra2:req, _e_handler:req, _power_down_ops:vararg + _extra1:req, _extra2:req, _extra3:req, _e_handler:req, _power_down_ops:vararg .section cpu_ops, "a" .align 3 .type cpu_ops_\_name, %object @@ -159,6 +166,7 @@ #endif .quad \_extra1 .quad \_extra2 + .quad \_extra3 .quad \_e_handler #ifdef IMAGE_BL31 /* Insert list of functions */ @@ -204,21 +212,21 @@ .macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \ _power_down_ops:vararg - declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, \ + declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, \ \_power_down_ops .endm .macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \ _e_handler:req, _power_down_ops:vararg declare_cpu_ops_base \_name, \_midr, \_resetfunc, \ - 0, 0, \_e_handler, \_power_down_ops + 0, 0, 0, \_e_handler, \_power_down_ops .endm .macro declare_cpu_ops_wa _name:req, _midr:req, \ _resetfunc:req, _extra1:req, _extra2:req, \ - _power_down_ops:vararg + _extra3:req, _power_down_ops:vararg declare_cpu_ops_base \_name, \_midr, \_resetfunc, \ - \_extra1, \_extra2, 0, \_power_down_ops + \_extra1, \_extra2, \_extra3, 0, \_power_down_ops .endm #if REPORT_ERRATA diff --git a/include/lib/cpus/wa_cve_2022_23960.h b/include/lib/cpus/wa_cve_2022_23960.h new file mode 100644 index 000000000..35b3fd8c6 --- /dev/null +++ b/include/lib/cpus/wa_cve_2022_23960.h @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2022, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#ifndef WA_CVE_2022_23960_H +#define WA_CVE_2022_23960_H + +int check_smccc_arch_wa3_applies(void); + +#endif /* WA_CVE_2022_23960_H */ diff --git a/include/services/arm_arch_svc.h b/include/services/arm_arch_svc.h index 5bbd8bb6c..645b388fe 100644 --- a/include/services/arm_arch_svc.h +++ b/include/services/arm_arch_svc.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -12,6 +12,7 @@ #define SMCCC_ARCH_SOC_ID U(0x80000002) #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) #define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF) +#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF) #define SMCCC_GET_SOC_VERSION U(0) #define SMCCC_GET_SOC_REVISION U(1) diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S index 4120f119e..3766ec769 100644 --- a/lib/cpus/aarch64/cortex_a57.S +++ b/lib/cpus/aarch64/cortex_a57.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2020, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2020, NVIDIA Corporation. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause @@ -520,6 +520,11 @@ func check_errata_cve_2022_23960 ret endfunc check_errata_cve_2022_23960 +func check_smccc_arch_workaround_3 + mov x0, #ERRATA_APPLIES + ret +endfunc check_smccc_arch_workaround_3 + /* ---------------------------------------------------- * The CPU Ops core power down function for Cortex-A57. * ---------------------------------------------------- @@ -676,5 +681,6 @@ declare_cpu_ops_wa cortex_a57, CORTEX_A57_MIDR, \ cortex_a57_reset_func, \ check_errata_cve_2017_5715, \ CPU_NO_EXTRA2_FUNC, \ + check_smccc_arch_workaround_3, \ cortex_a57_core_pwr_dwn, \ cortex_a57_cluster_pwr_dwn diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S index 3dc44805e..de2d36ea4 100644 --- a/lib/cpus/aarch64/cortex_a72.S +++ b/lib/cpus/aarch64/cortex_a72.S @@ -147,6 +147,15 @@ func check_errata_cve_2022_23960 ret endfunc check_errata_cve_2022_23960 +func check_smccc_arch_workaround_3 + cpu_check_csv2 x0, 1f + mov x0, #ERRATA_APPLIES + ret +1: + mov x0, #ERRATA_NOT_APPLIES + ret +endfunc check_smccc_arch_workaround_3 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. * ------------------------------------------------- @@ -360,5 +369,6 @@ declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \ cortex_a72_reset_func, \ check_errata_cve_2017_5715, \ CPU_NO_EXTRA2_FUNC, \ + check_smccc_arch_workaround_3, \ cortex_a72_core_pwr_dwn, \ cortex_a72_cluster_pwr_dwn diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S index 5c8a8871d..edcd1f51c 100644 --- a/lib/cpus/aarch64/cortex_a73.S +++ b/lib/cpus/aarch64/cortex_a73.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -111,13 +111,21 @@ func cortex_a73_reset_func bl errata_a73_855423_wa #endif -#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 +#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) cpu_check_csv2 x0, 1f adr x0, wa_cve_2017_5715_bpiall_vbar msr vbar_el3, x0 - /* isb will be performed before returning from this function */ + isb + /* Skip installing vector table again for CVE_2022_23960 */ + b 2f 1: +#if WORKAROUND_CVE_2022_23960 + adr x0, wa_cve_2017_5715_bpiall_vbar + msr vbar_el3, x0 + isb #endif +2: +#endif /* IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */ #if WORKAROUND_CVE_2018_3639 mrs x0, CORTEX_A73_IMP_DEF_REG1 @@ -221,6 +229,28 @@ func check_errata_cve_2018_3639 ret endfunc check_errata_cve_2018_3639 +func check_errata_cve_2022_23960 +#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 + cpu_check_csv2 x0, 1f + mov x0, #ERRATA_APPLIES + ret + 1: +# if WORKAROUND_CVE_2022_23960 + mov x0, #ERRATA_APPLIES +# else + mov x0, #ERRATA_MISSING +# endif /* WORKAROUND_CVE_2022_23960 */ + ret +#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */ + mov x0, #ERRATA_MISSING + ret +endfunc check_errata_cve_2022_23960 + +func check_smccc_arch_workaround_3 + mov x0, #ERRATA_APPLIES + ret +endfunc check_smccc_arch_workaround_3 + #if REPORT_ERRATA /* * Errata printing function for Cortex A75. Must follow AAPCS. @@ -239,6 +269,7 @@ func cortex_a73_errata_report report_errata ERRATA_A73_855423, cortex_a73, 855423 report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715 report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639 + report_errata WORKAROUND_CVE_2022_23960, cortex_a73, cve_2022_23960 ldp x8, x30, [sp], #16 ret @@ -269,5 +300,6 @@ declare_cpu_ops_wa cortex_a73, CORTEX_A73_MIDR, \ cortex_a73_reset_func, \ check_errata_cve_2017_5715, \ CPU_NO_EXTRA2_FUNC, \ + check_smccc_arch_workaround_3, \ cortex_a73_core_pwr_dwn, \ cortex_a73_cluster_pwr_dwn diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S index 657457ee1..d561be45e 100644 --- a/lib/cpus/aarch64/cortex_a75.S +++ b/lib/cpus/aarch64/cortex_a75.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -90,13 +90,21 @@ func cortex_a75_reset_func bl errata_a75_790748_wa #endif -#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 +#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) cpu_check_csv2 x0, 1f adr x0, wa_cve_2017_5715_bpiall_vbar msr vbar_el3, x0 isb + /* Skip installing vector table again for CVE_2022_23960 */ + b 2f 1: +#if WORKAROUND_CVE_2022_23960 + adr x0, wa_cve_2017_5715_bpiall_vbar + msr vbar_el3, x0 + isb #endif +2: +#endif /* IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */ #if WORKAROUND_CVE_2018_3639 mrs x0, CORTEX_A75_CPUACTLR_EL1 @@ -161,6 +169,28 @@ func check_errata_cve_2018_3639 ret endfunc check_errata_cve_2018_3639 +func check_errata_cve_2022_23960 +#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 + cpu_check_csv2 x0, 1f + mov x0, #ERRATA_APPLIES + ret +1: +# if WORKAROUND_CVE_2022_23960 + mov x0, #ERRATA_APPLIES +# else + mov x0, #ERRATA_MISSING +# endif /* WORKAROUND_CVE_2022_23960 */ + ret +#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */ + mov x0, #ERRATA_MISSING + ret +endfunc check_errata_cve_2022_23960 + +func check_smccc_arch_workaround_3 + mov x0, #ERRATA_APPLIES + ret +endfunc check_smccc_arch_workaround_3 + /* --------------------------------------------- * HW will do the cache maintenance while powering down * --------------------------------------------- @@ -197,6 +227,7 @@ func cortex_a75_errata_report report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639 report_errata ERRATA_DSU_798953, cortex_a75, dsu_798953 report_errata ERRATA_DSU_936184, cortex_a75, dsu_936184 + report_errata WORKAROUND_CVE_2022_23960, cortex_a75, cve_2022_23960 ldp x8, x30, [sp], #16 ret @@ -226,4 +257,5 @@ declare_cpu_ops_wa cortex_a75, CORTEX_A75_MIDR, \ cortex_a75_reset_func, \ check_errata_cve_2017_5715, \ CPU_NO_EXTRA2_FUNC, \ + check_smccc_arch_workaround_3, \ cortex_a75_core_pwr_dwn diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S index 4f7f4bb9a..4d0e9f982 100644 --- a/lib/cpus/aarch64/cortex_a76.S +++ b/lib/cpus/aarch64/cortex_a76.S @@ -685,4 +685,5 @@ declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \ cortex_a76_reset_func, \ CPU_NO_EXTRA1_FUNC, \ cortex_a76_disable_wa_cve_2018_3639, \ + CPU_NO_EXTRA3_FUNC, \ cortex_a76_core_pwr_dwn diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S index bd8f85f6d..238562732 100644 --- a/lib/cpus/aarch64/cpu_helpers.S +++ b/lib/cpus/aarch64/cpu_helpers.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2021, Arm Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2022, Arm Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -381,7 +381,7 @@ func check_wa_cve_2017_5715 * If the reserved function pointer is NULL, this CPU * is unaffected by CVE-2017-5715 so bail out. */ - cmp x0, #0 + cmp x0, #CPU_NO_EXTRA1_FUNC beq 1f br x0 1: @@ -416,3 +416,41 @@ func wa_cve_2018_3639_get_disable_ptr ldr x0, [x0, #CPU_EXTRA2_FUNC] ret endfunc wa_cve_2018_3639_get_disable_ptr + +/* + * int check_smccc_arch_wa3_applies(void); + * + * This function checks whether SMCCC_ARCH_WORKAROUND_3 is enabled to mitigate + * CVE-2022-23960 for this CPU. It returns: + * - ERRATA_APPLIES when SMCCC_ARCH_WORKAROUND_3 can be invoked to mitigate + * the CVE. + * - ERRATA_NOT_APPLIES when SMCCC_ARCH_WORKAROUND_3 should not be invoked to + * mitigate the CVE. + * + * NOTE: Must be called only after cpu_ops have been initialized + * in per-CPU data. + */ + .globl check_smccc_arch_wa3_applies +func check_smccc_arch_wa3_applies + mrs x0, tpidr_el3 +#if ENABLE_ASSERTIONS + cmp x0, #0 + ASM_ASSERT(ne) +#endif + ldr x0, [x0, #CPU_DATA_CPU_OPS_PTR] +#if ENABLE_ASSERTIONS + cmp x0, #0 + ASM_ASSERT(ne) +#endif + ldr x0, [x0, #CPU_EXTRA3_FUNC] + /* + * If the reserved function pointer is NULL, this CPU + * is unaffected by CVE-2022-23960 so bail out. + */ + cmp x0, #CPU_NO_EXTRA3_FUNC + beq 1f + br x0 +1: + mov x0, #ERRATA_NOT_APPLIES + ret +endfunc check_smccc_arch_wa3_applies diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S index c9a954496..022281876 100644 --- a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S +++ b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -308,22 +308,25 @@ vector_entry bpiall_ret_sync_exception_aarch32 /* * Check if SMC is coming from A64 state on #0 - * with W0 = SMCCC_ARCH_WORKAROUND_1 + * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3 * * This sequence evaluates as: - * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE) + * (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ? + * (ESR_EL3==SMC#0) : (NE) * allowing use of a single branch operation */ orr w2, wzr, #SMCCC_ARCH_WORKAROUND_1 cmp w0, w2 + orr w2, wzr, #SMCCC_ARCH_WORKAROUND_3 + ccmp w0, w2, #4, ne mov_imm w2, ESR_EL3_A64_SMC0 ccmp w3, w2, #0, eq /* Static predictor will predict a fall through */ bne 1f eret 1: - ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] - b sync_exception_aarch64 + /* restore x2 and x3 and continue sync exception handling */ + b bpiall_ret_sync_exception_aarch32_tail end_vector_entry bpiall_ret_sync_exception_aarch32 vector_entry bpiall_ret_irq_aarch32 @@ -355,3 +358,11 @@ end_vector_entry bpiall_ret_fiq_aarch32 vector_entry bpiall_ret_serror_aarch32 b report_unhandled_exception end_vector_entry bpiall_ret_serror_aarch32 + + /* + * Part of bpiall_ret_sync_exception_aarch32 to save vector space + */ +func bpiall_ret_sync_exception_aarch32_tail + ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] + b sync_exception_aarch64 +endfunc bpiall_ret_sync_exception_aarch32_tail diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S index 5134ee3f1..ed0a54986 100644 --- a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S +++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -34,15 +34,18 @@ vector_base wa_cve_2017_5715_mmu_vbar /* * Ensure SMC is coming from A64/A32 state on #0 - * with W0 = SMCCC_ARCH_WORKAROUND_1 + * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3 * * This sequence evaluates as: - * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE) + * (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ? + * (ESR_EL3==SMC#0) : (NE) * allowing use of a single branch operation */ .if \_is_sync_exception orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1 cmp w0, w1 + orr w1, wzr, #SMCCC_ARCH_WORKAROUND_3 + ccmp w0, w1, #4, ne mrs x0, esr_el3 mov_imm w1, \_esr_el3_val ccmp w0, w1, #0, eq diff --git a/plat/st/stm32mp1/platform.mk b/plat/st/stm32mp1/platform.mk index 0f579a40a..a4c40c4c4 100644 --- a/plat/st/stm32mp1/platform.mk +++ b/plat/st/stm32mp1/platform.mk @@ -38,6 +38,7 @@ TF_CFLAGS += -Wformat-signedness # Not needed for Cortex-A7 WORKAROUND_CVE_2017_5715:= 0 +WORKAROUND_CVE_2022_23960:= 0 ifeq (${PSA_FWU_SUPPORT},1) ifneq (${STM32MP_USE_STM32IMAGE},1) diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c index 5523a1c38..46ccd9e74 100644 --- a/services/arm_arch_svc/arm_arch_svc_setup.c +++ b/services/arm_arch_svc/arm_arch_svc_setup.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -9,6 +9,7 @@ #include #include #include +#include #include #include #include @@ -74,6 +75,20 @@ static int32_t smccc_arch_features(u_register_t arg1) } #endif +#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715) + case SMCCC_ARCH_WORKAROUND_3: + /* + * SMCCC_ARCH_WORKAROUND_3 should also take into account + * CVE-2017-5715 since this SMC can be used instead of + * SMCCC_ARCH_WORKAROUND_1. + */ + if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) && + (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) { + return 1; + } + return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ +#endif + /* Fallthrough */ default: @@ -117,7 +132,7 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, case SMCCC_ARCH_WORKAROUND_1: /* * The workaround has already been applied on affected PEs - * during entry to EL3. On unaffected PEs, this function + * during entry to EL3. On unaffected PEs, this function * has no effect. */ SMC_RET0(handle); @@ -131,6 +146,15 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, * has no effect. */ SMC_RET0(handle); +#endif +#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715) + case SMCCC_ARCH_WORKAROUND_3: + /* + * The workaround has already been applied on affected PEs + * during entry to EL3. On unaffected PEs, this function + * has no effect. + */ + SMC_RET0(handle); #endif default: WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",