mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-25 06:19:56 +00:00
fix(security): apply SMCCC_ARCH_WORKAROUND_3 to A73/A75/A72/A57
This patch applies CVE-2022-23960 workarounds for Cortex-A75, Cortex-A73, Cortex-A72 & Cortex-A57. This patch also implements the new SMCCC_ARCH_WORKAROUND_3 and enables necessary discovery hooks for Coxtex-A72, Cortex-A57, Cortex-A73 and Cortex-A75 to enable discovery of this SMC via SMC_FEATURES. SMCCC_ARCH_WORKAROUND_3 is implemented for A57/A72 because some revisions are affected by both CVE-2022-23960 and CVE-2017-5715 and this allows callers to replace SMCCC_ARCH_WORKAROUND_1 calls with SMCCC_ARCH_WORKAROUND_3. For details of SMCCC_ARCH_WORKAROUND_3, please refer SMCCCv1.4 specification. Signed-off-by: Bipin Ravi <bipin.ravi@arm.com> Signed-off-by: John Powell <john.powell@arm.com> Change-Id: Ifa6d9c7baa6764924638efe3c70468f98d60ed7c
This commit is contained in:
parent
be9121fd31
commit
9b2510b69d
13 changed files with 206 additions and 27 deletions
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -21,6 +21,7 @@
|
||||||
|
|
||||||
#define CPU_NO_EXTRA1_FUNC 0
|
#define CPU_NO_EXTRA1_FUNC 0
|
||||||
#define CPU_NO_EXTRA2_FUNC 0
|
#define CPU_NO_EXTRA2_FUNC 0
|
||||||
|
#define CPU_NO_EXTRA3_FUNC 0
|
||||||
|
|
||||||
/* Word size for 64-bit CPUs */
|
/* Word size for 64-bit CPUs */
|
||||||
#define CPU_WORD_SIZE 8
|
#define CPU_WORD_SIZE 8
|
||||||
|
@ -39,6 +40,7 @@
|
||||||
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
|
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
|
||||||
.equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
|
.equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
.equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
|
.equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_EXTRA3_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
.equ CPU_E_HANDLER_FUNC_SIZE, CPU_WORD_SIZE
|
.equ CPU_E_HANDLER_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
|
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
|
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
|
||||||
|
@ -80,7 +82,8 @@
|
||||||
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
|
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
|
||||||
.equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
|
.equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
|
||||||
.equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
|
.equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
|
||||||
.equ CPU_E_HANDLER_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
|
.equ CPU_EXTRA3_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
|
||||||
|
.equ CPU_E_HANDLER_FUNC, CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
|
||||||
.equ CPU_PWR_DWN_OPS, CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE
|
.equ CPU_PWR_DWN_OPS, CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE
|
||||||
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
|
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
|
||||||
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
|
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
|
||||||
|
@ -134,9 +137,13 @@
|
||||||
* some CPUs use this entry to set a test function to determine if
|
* some CPUs use this entry to set a test function to determine if
|
||||||
* the workaround for CVE-2017-5715 needs to be applied or not.
|
* the workaround for CVE-2017-5715 needs to be applied or not.
|
||||||
* _extra2:
|
* _extra2:
|
||||||
* This is a placeholder for future per CPU operations. Currently
|
* This is a placeholder for future per CPU operations. Currently
|
||||||
* some CPUs use this entry to set a function to disable the
|
* some CPUs use this entry to set a function to disable the
|
||||||
* workaround for CVE-2018-3639.
|
* workaround for CVE-2018-3639.
|
||||||
|
* _extra3:
|
||||||
|
* This is a placeholder for future per CPU operations. Currently,
|
||||||
|
* some CPUs use this entry to set a test function to determine if
|
||||||
|
* the workaround for CVE-2022-23960 needs to be applied or not.
|
||||||
* _e_handler:
|
* _e_handler:
|
||||||
* This is a placeholder for future per CPU exception handlers.
|
* This is a placeholder for future per CPU exception handlers.
|
||||||
* _power_down_ops:
|
* _power_down_ops:
|
||||||
|
@ -149,7 +156,7 @@
|
||||||
* used to handle power down at subsequent levels
|
* used to handle power down at subsequent levels
|
||||||
*/
|
*/
|
||||||
.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
|
.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
|
||||||
_extra1:req, _extra2:req, _e_handler:req, _power_down_ops:vararg
|
_extra1:req, _extra2:req, _extra3:req, _e_handler:req, _power_down_ops:vararg
|
||||||
.section cpu_ops, "a"
|
.section cpu_ops, "a"
|
||||||
.align 3
|
.align 3
|
||||||
.type cpu_ops_\_name, %object
|
.type cpu_ops_\_name, %object
|
||||||
|
@ -159,6 +166,7 @@
|
||||||
#endif
|
#endif
|
||||||
.quad \_extra1
|
.quad \_extra1
|
||||||
.quad \_extra2
|
.quad \_extra2
|
||||||
|
.quad \_extra3
|
||||||
.quad \_e_handler
|
.quad \_e_handler
|
||||||
#ifdef IMAGE_BL31
|
#ifdef IMAGE_BL31
|
||||||
/* Insert list of functions */
|
/* Insert list of functions */
|
||||||
|
@ -204,21 +212,21 @@
|
||||||
|
|
||||||
.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
|
.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
|
||||||
_power_down_ops:vararg
|
_power_down_ops:vararg
|
||||||
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, \
|
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, \
|
||||||
\_power_down_ops
|
\_power_down_ops
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
.macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \
|
.macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \
|
||||||
_e_handler:req, _power_down_ops:vararg
|
_e_handler:req, _power_down_ops:vararg
|
||||||
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
|
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
|
||||||
0, 0, \_e_handler, \_power_down_ops
|
0, 0, 0, \_e_handler, \_power_down_ops
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
.macro declare_cpu_ops_wa _name:req, _midr:req, \
|
.macro declare_cpu_ops_wa _name:req, _midr:req, \
|
||||||
_resetfunc:req, _extra1:req, _extra2:req, \
|
_resetfunc:req, _extra1:req, _extra2:req, \
|
||||||
_power_down_ops:vararg
|
_extra3:req, _power_down_ops:vararg
|
||||||
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
|
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
|
||||||
\_extra1, \_extra2, 0, \_power_down_ops
|
\_extra1, \_extra2, \_extra3, 0, \_power_down_ops
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
#if REPORT_ERRATA
|
#if REPORT_ERRATA
|
||||||
|
|
12
include/lib/cpus/wa_cve_2022_23960.h
Normal file
12
include/lib/cpus/wa_cve_2022_23960.h
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2022, ARM Limited and Contributors. All rights reserved.
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef WA_CVE_2022_23960_H
|
||||||
|
#define WA_CVE_2022_23960_H
|
||||||
|
|
||||||
|
int check_smccc_arch_wa3_applies(void);
|
||||||
|
|
||||||
|
#endif /* WA_CVE_2022_23960_H */
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -12,6 +12,7 @@
|
||||||
#define SMCCC_ARCH_SOC_ID U(0x80000002)
|
#define SMCCC_ARCH_SOC_ID U(0x80000002)
|
||||||
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
|
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
|
||||||
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
|
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
|
||||||
|
#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF)
|
||||||
|
|
||||||
#define SMCCC_GET_SOC_VERSION U(0)
|
#define SMCCC_GET_SOC_VERSION U(0)
|
||||||
#define SMCCC_GET_SOC_REVISION U(1)
|
#define SMCCC_GET_SOC_REVISION U(1)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2014-2020, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
* Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
|
* Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
@ -520,6 +520,11 @@ func check_errata_cve_2022_23960
|
||||||
ret
|
ret
|
||||||
endfunc check_errata_cve_2022_23960
|
endfunc check_errata_cve_2022_23960
|
||||||
|
|
||||||
|
func check_smccc_arch_workaround_3
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
endfunc check_smccc_arch_workaround_3
|
||||||
|
|
||||||
/* ----------------------------------------------------
|
/* ----------------------------------------------------
|
||||||
* The CPU Ops core power down function for Cortex-A57.
|
* The CPU Ops core power down function for Cortex-A57.
|
||||||
* ----------------------------------------------------
|
* ----------------------------------------------------
|
||||||
|
@ -676,5 +681,6 @@ declare_cpu_ops_wa cortex_a57, CORTEX_A57_MIDR, \
|
||||||
cortex_a57_reset_func, \
|
cortex_a57_reset_func, \
|
||||||
check_errata_cve_2017_5715, \
|
check_errata_cve_2017_5715, \
|
||||||
CPU_NO_EXTRA2_FUNC, \
|
CPU_NO_EXTRA2_FUNC, \
|
||||||
|
check_smccc_arch_workaround_3, \
|
||||||
cortex_a57_core_pwr_dwn, \
|
cortex_a57_core_pwr_dwn, \
|
||||||
cortex_a57_cluster_pwr_dwn
|
cortex_a57_cluster_pwr_dwn
|
||||||
|
|
|
@ -147,6 +147,15 @@ func check_errata_cve_2022_23960
|
||||||
ret
|
ret
|
||||||
endfunc check_errata_cve_2022_23960
|
endfunc check_errata_cve_2022_23960
|
||||||
|
|
||||||
|
func check_smccc_arch_workaround_3
|
||||||
|
cpu_check_csv2 x0, 1f
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
1:
|
||||||
|
mov x0, #ERRATA_NOT_APPLIES
|
||||||
|
ret
|
||||||
|
endfunc check_smccc_arch_workaround_3
|
||||||
|
|
||||||
/* -------------------------------------------------
|
/* -------------------------------------------------
|
||||||
* The CPU Ops reset function for Cortex-A72.
|
* The CPU Ops reset function for Cortex-A72.
|
||||||
* -------------------------------------------------
|
* -------------------------------------------------
|
||||||
|
@ -360,5 +369,6 @@ declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
|
||||||
cortex_a72_reset_func, \
|
cortex_a72_reset_func, \
|
||||||
check_errata_cve_2017_5715, \
|
check_errata_cve_2017_5715, \
|
||||||
CPU_NO_EXTRA2_FUNC, \
|
CPU_NO_EXTRA2_FUNC, \
|
||||||
|
check_smccc_arch_workaround_3, \
|
||||||
cortex_a72_core_pwr_dwn, \
|
cortex_a72_core_pwr_dwn, \
|
||||||
cortex_a72_cluster_pwr_dwn
|
cortex_a72_cluster_pwr_dwn
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2016-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -111,13 +111,21 @@ func cortex_a73_reset_func
|
||||||
bl errata_a73_855423_wa
|
bl errata_a73_855423_wa
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
|
#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
|
||||||
cpu_check_csv2 x0, 1f
|
cpu_check_csv2 x0, 1f
|
||||||
adr x0, wa_cve_2017_5715_bpiall_vbar
|
adr x0, wa_cve_2017_5715_bpiall_vbar
|
||||||
msr vbar_el3, x0
|
msr vbar_el3, x0
|
||||||
/* isb will be performed before returning from this function */
|
isb
|
||||||
|
/* Skip installing vector table again for CVE_2022_23960 */
|
||||||
|
b 2f
|
||||||
1:
|
1:
|
||||||
|
#if WORKAROUND_CVE_2022_23960
|
||||||
|
adr x0, wa_cve_2017_5715_bpiall_vbar
|
||||||
|
msr vbar_el3, x0
|
||||||
|
isb
|
||||||
#endif
|
#endif
|
||||||
|
2:
|
||||||
|
#endif /* IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
|
||||||
|
|
||||||
#if WORKAROUND_CVE_2018_3639
|
#if WORKAROUND_CVE_2018_3639
|
||||||
mrs x0, CORTEX_A73_IMP_DEF_REG1
|
mrs x0, CORTEX_A73_IMP_DEF_REG1
|
||||||
|
@ -221,6 +229,28 @@ func check_errata_cve_2018_3639
|
||||||
ret
|
ret
|
||||||
endfunc check_errata_cve_2018_3639
|
endfunc check_errata_cve_2018_3639
|
||||||
|
|
||||||
|
func check_errata_cve_2022_23960
|
||||||
|
#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
|
||||||
|
cpu_check_csv2 x0, 1f
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
1:
|
||||||
|
# if WORKAROUND_CVE_2022_23960
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
# else
|
||||||
|
mov x0, #ERRATA_MISSING
|
||||||
|
# endif /* WORKAROUND_CVE_2022_23960 */
|
||||||
|
ret
|
||||||
|
#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
|
||||||
|
mov x0, #ERRATA_MISSING
|
||||||
|
ret
|
||||||
|
endfunc check_errata_cve_2022_23960
|
||||||
|
|
||||||
|
func check_smccc_arch_workaround_3
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
endfunc check_smccc_arch_workaround_3
|
||||||
|
|
||||||
#if REPORT_ERRATA
|
#if REPORT_ERRATA
|
||||||
/*
|
/*
|
||||||
* Errata printing function for Cortex A75. Must follow AAPCS.
|
* Errata printing function for Cortex A75. Must follow AAPCS.
|
||||||
|
@ -239,6 +269,7 @@ func cortex_a73_errata_report
|
||||||
report_errata ERRATA_A73_855423, cortex_a73, 855423
|
report_errata ERRATA_A73_855423, cortex_a73, 855423
|
||||||
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
|
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
|
||||||
report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
|
report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
|
||||||
|
report_errata WORKAROUND_CVE_2022_23960, cortex_a73, cve_2022_23960
|
||||||
|
|
||||||
ldp x8, x30, [sp], #16
|
ldp x8, x30, [sp], #16
|
||||||
ret
|
ret
|
||||||
|
@ -269,5 +300,6 @@ declare_cpu_ops_wa cortex_a73, CORTEX_A73_MIDR, \
|
||||||
cortex_a73_reset_func, \
|
cortex_a73_reset_func, \
|
||||||
check_errata_cve_2017_5715, \
|
check_errata_cve_2017_5715, \
|
||||||
CPU_NO_EXTRA2_FUNC, \
|
CPU_NO_EXTRA2_FUNC, \
|
||||||
|
check_smccc_arch_workaround_3, \
|
||||||
cortex_a73_core_pwr_dwn, \
|
cortex_a73_core_pwr_dwn, \
|
||||||
cortex_a73_cluster_pwr_dwn
|
cortex_a73_cluster_pwr_dwn
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -90,13 +90,21 @@ func cortex_a75_reset_func
|
||||||
bl errata_a75_790748_wa
|
bl errata_a75_790748_wa
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
|
#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
|
||||||
cpu_check_csv2 x0, 1f
|
cpu_check_csv2 x0, 1f
|
||||||
adr x0, wa_cve_2017_5715_bpiall_vbar
|
adr x0, wa_cve_2017_5715_bpiall_vbar
|
||||||
msr vbar_el3, x0
|
msr vbar_el3, x0
|
||||||
isb
|
isb
|
||||||
|
/* Skip installing vector table again for CVE_2022_23960 */
|
||||||
|
b 2f
|
||||||
1:
|
1:
|
||||||
|
#if WORKAROUND_CVE_2022_23960
|
||||||
|
adr x0, wa_cve_2017_5715_bpiall_vbar
|
||||||
|
msr vbar_el3, x0
|
||||||
|
isb
|
||||||
#endif
|
#endif
|
||||||
|
2:
|
||||||
|
#endif /* IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
|
||||||
|
|
||||||
#if WORKAROUND_CVE_2018_3639
|
#if WORKAROUND_CVE_2018_3639
|
||||||
mrs x0, CORTEX_A75_CPUACTLR_EL1
|
mrs x0, CORTEX_A75_CPUACTLR_EL1
|
||||||
|
@ -161,6 +169,28 @@ func check_errata_cve_2018_3639
|
||||||
ret
|
ret
|
||||||
endfunc check_errata_cve_2018_3639
|
endfunc check_errata_cve_2018_3639
|
||||||
|
|
||||||
|
func check_errata_cve_2022_23960
|
||||||
|
#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
|
||||||
|
cpu_check_csv2 x0, 1f
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
1:
|
||||||
|
# if WORKAROUND_CVE_2022_23960
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
# else
|
||||||
|
mov x0, #ERRATA_MISSING
|
||||||
|
# endif /* WORKAROUND_CVE_2022_23960 */
|
||||||
|
ret
|
||||||
|
#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
|
||||||
|
mov x0, #ERRATA_MISSING
|
||||||
|
ret
|
||||||
|
endfunc check_errata_cve_2022_23960
|
||||||
|
|
||||||
|
func check_smccc_arch_workaround_3
|
||||||
|
mov x0, #ERRATA_APPLIES
|
||||||
|
ret
|
||||||
|
endfunc check_smccc_arch_workaround_3
|
||||||
|
|
||||||
/* ---------------------------------------------
|
/* ---------------------------------------------
|
||||||
* HW will do the cache maintenance while powering down
|
* HW will do the cache maintenance while powering down
|
||||||
* ---------------------------------------------
|
* ---------------------------------------------
|
||||||
|
@ -197,6 +227,7 @@ func cortex_a75_errata_report
|
||||||
report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
|
report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
|
||||||
report_errata ERRATA_DSU_798953, cortex_a75, dsu_798953
|
report_errata ERRATA_DSU_798953, cortex_a75, dsu_798953
|
||||||
report_errata ERRATA_DSU_936184, cortex_a75, dsu_936184
|
report_errata ERRATA_DSU_936184, cortex_a75, dsu_936184
|
||||||
|
report_errata WORKAROUND_CVE_2022_23960, cortex_a75, cve_2022_23960
|
||||||
|
|
||||||
ldp x8, x30, [sp], #16
|
ldp x8, x30, [sp], #16
|
||||||
ret
|
ret
|
||||||
|
@ -226,4 +257,5 @@ declare_cpu_ops_wa cortex_a75, CORTEX_A75_MIDR, \
|
||||||
cortex_a75_reset_func, \
|
cortex_a75_reset_func, \
|
||||||
check_errata_cve_2017_5715, \
|
check_errata_cve_2017_5715, \
|
||||||
CPU_NO_EXTRA2_FUNC, \
|
CPU_NO_EXTRA2_FUNC, \
|
||||||
|
check_smccc_arch_workaround_3, \
|
||||||
cortex_a75_core_pwr_dwn
|
cortex_a75_core_pwr_dwn
|
||||||
|
|
|
@ -685,4 +685,5 @@ declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
|
||||||
cortex_a76_reset_func, \
|
cortex_a76_reset_func, \
|
||||||
CPU_NO_EXTRA1_FUNC, \
|
CPU_NO_EXTRA1_FUNC, \
|
||||||
cortex_a76_disable_wa_cve_2018_3639, \
|
cortex_a76_disable_wa_cve_2018_3639, \
|
||||||
|
CPU_NO_EXTRA3_FUNC, \
|
||||||
cortex_a76_core_pwr_dwn
|
cortex_a76_core_pwr_dwn
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2014-2021, Arm Limited and Contributors. All rights reserved.
|
* Copyright (c) 2014-2022, Arm Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -381,7 +381,7 @@ func check_wa_cve_2017_5715
|
||||||
* If the reserved function pointer is NULL, this CPU
|
* If the reserved function pointer is NULL, this CPU
|
||||||
* is unaffected by CVE-2017-5715 so bail out.
|
* is unaffected by CVE-2017-5715 so bail out.
|
||||||
*/
|
*/
|
||||||
cmp x0, #0
|
cmp x0, #CPU_NO_EXTRA1_FUNC
|
||||||
beq 1f
|
beq 1f
|
||||||
br x0
|
br x0
|
||||||
1:
|
1:
|
||||||
|
@ -416,3 +416,41 @@ func wa_cve_2018_3639_get_disable_ptr
|
||||||
ldr x0, [x0, #CPU_EXTRA2_FUNC]
|
ldr x0, [x0, #CPU_EXTRA2_FUNC]
|
||||||
ret
|
ret
|
||||||
endfunc wa_cve_2018_3639_get_disable_ptr
|
endfunc wa_cve_2018_3639_get_disable_ptr
|
||||||
|
|
||||||
|
/*
|
||||||
|
* int check_smccc_arch_wa3_applies(void);
|
||||||
|
*
|
||||||
|
* This function checks whether SMCCC_ARCH_WORKAROUND_3 is enabled to mitigate
|
||||||
|
* CVE-2022-23960 for this CPU. It returns:
|
||||||
|
* - ERRATA_APPLIES when SMCCC_ARCH_WORKAROUND_3 can be invoked to mitigate
|
||||||
|
* the CVE.
|
||||||
|
* - ERRATA_NOT_APPLIES when SMCCC_ARCH_WORKAROUND_3 should not be invoked to
|
||||||
|
* mitigate the CVE.
|
||||||
|
*
|
||||||
|
* NOTE: Must be called only after cpu_ops have been initialized
|
||||||
|
* in per-CPU data.
|
||||||
|
*/
|
||||||
|
.globl check_smccc_arch_wa3_applies
|
||||||
|
func check_smccc_arch_wa3_applies
|
||||||
|
mrs x0, tpidr_el3
|
||||||
|
#if ENABLE_ASSERTIONS
|
||||||
|
cmp x0, #0
|
||||||
|
ASM_ASSERT(ne)
|
||||||
|
#endif
|
||||||
|
ldr x0, [x0, #CPU_DATA_CPU_OPS_PTR]
|
||||||
|
#if ENABLE_ASSERTIONS
|
||||||
|
cmp x0, #0
|
||||||
|
ASM_ASSERT(ne)
|
||||||
|
#endif
|
||||||
|
ldr x0, [x0, #CPU_EXTRA3_FUNC]
|
||||||
|
/*
|
||||||
|
* If the reserved function pointer is NULL, this CPU
|
||||||
|
* is unaffected by CVE-2022-23960 so bail out.
|
||||||
|
*/
|
||||||
|
cmp x0, #CPU_NO_EXTRA3_FUNC
|
||||||
|
beq 1f
|
||||||
|
br x0
|
||||||
|
1:
|
||||||
|
mov x0, #ERRATA_NOT_APPLIES
|
||||||
|
ret
|
||||||
|
endfunc check_smccc_arch_wa3_applies
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -308,22 +308,25 @@ vector_entry bpiall_ret_sync_exception_aarch32
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Check if SMC is coming from A64 state on #0
|
* Check if SMC is coming from A64 state on #0
|
||||||
* with W0 = SMCCC_ARCH_WORKAROUND_1
|
* with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
|
||||||
*
|
*
|
||||||
* This sequence evaluates as:
|
* This sequence evaluates as:
|
||||||
* (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
|
* (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
|
||||||
|
* (ESR_EL3==SMC#0) : (NE)
|
||||||
* allowing use of a single branch operation
|
* allowing use of a single branch operation
|
||||||
*/
|
*/
|
||||||
orr w2, wzr, #SMCCC_ARCH_WORKAROUND_1
|
orr w2, wzr, #SMCCC_ARCH_WORKAROUND_1
|
||||||
cmp w0, w2
|
cmp w0, w2
|
||||||
|
orr w2, wzr, #SMCCC_ARCH_WORKAROUND_3
|
||||||
|
ccmp w0, w2, #4, ne
|
||||||
mov_imm w2, ESR_EL3_A64_SMC0
|
mov_imm w2, ESR_EL3_A64_SMC0
|
||||||
ccmp w3, w2, #0, eq
|
ccmp w3, w2, #0, eq
|
||||||
/* Static predictor will predict a fall through */
|
/* Static predictor will predict a fall through */
|
||||||
bne 1f
|
bne 1f
|
||||||
eret
|
eret
|
||||||
1:
|
1:
|
||||||
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
/* restore x2 and x3 and continue sync exception handling */
|
||||||
b sync_exception_aarch64
|
b bpiall_ret_sync_exception_aarch32_tail
|
||||||
end_vector_entry bpiall_ret_sync_exception_aarch32
|
end_vector_entry bpiall_ret_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_ret_irq_aarch32
|
vector_entry bpiall_ret_irq_aarch32
|
||||||
|
@ -355,3 +358,11 @@ end_vector_entry bpiall_ret_fiq_aarch32
|
||||||
vector_entry bpiall_ret_serror_aarch32
|
vector_entry bpiall_ret_serror_aarch32
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
end_vector_entry bpiall_ret_serror_aarch32
|
end_vector_entry bpiall_ret_serror_aarch32
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Part of bpiall_ret_sync_exception_aarch32 to save vector space
|
||||||
|
*/
|
||||||
|
func bpiall_ret_sync_exception_aarch32_tail
|
||||||
|
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
||||||
|
b sync_exception_aarch64
|
||||||
|
endfunc bpiall_ret_sync_exception_aarch32_tail
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -34,15 +34,18 @@ vector_base wa_cve_2017_5715_mmu_vbar
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Ensure SMC is coming from A64/A32 state on #0
|
* Ensure SMC is coming from A64/A32 state on #0
|
||||||
* with W0 = SMCCC_ARCH_WORKAROUND_1
|
* with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
|
||||||
*
|
*
|
||||||
* This sequence evaluates as:
|
* This sequence evaluates as:
|
||||||
* (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
|
* (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
|
||||||
|
* (ESR_EL3==SMC#0) : (NE)
|
||||||
* allowing use of a single branch operation
|
* allowing use of a single branch operation
|
||||||
*/
|
*/
|
||||||
.if \_is_sync_exception
|
.if \_is_sync_exception
|
||||||
orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
|
orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
|
||||||
cmp w0, w1
|
cmp w0, w1
|
||||||
|
orr w1, wzr, #SMCCC_ARCH_WORKAROUND_3
|
||||||
|
ccmp w0, w1, #4, ne
|
||||||
mrs x0, esr_el3
|
mrs x0, esr_el3
|
||||||
mov_imm w1, \_esr_el3_val
|
mov_imm w1, \_esr_el3_val
|
||||||
ccmp w0, w1, #0, eq
|
ccmp w0, w1, #0, eq
|
||||||
|
|
|
@ -38,6 +38,7 @@ TF_CFLAGS += -Wformat-signedness
|
||||||
|
|
||||||
# Not needed for Cortex-A7
|
# Not needed for Cortex-A7
|
||||||
WORKAROUND_CVE_2017_5715:= 0
|
WORKAROUND_CVE_2017_5715:= 0
|
||||||
|
WORKAROUND_CVE_2022_23960:= 0
|
||||||
|
|
||||||
ifeq (${PSA_FWU_SUPPORT},1)
|
ifeq (${PSA_FWU_SUPPORT},1)
|
||||||
ifneq (${STM32MP_USE_STM32IMAGE},1)
|
ifneq (${STM32MP_USE_STM32IMAGE},1)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -9,6 +9,7 @@
|
||||||
#include <lib/cpus/errata_report.h>
|
#include <lib/cpus/errata_report.h>
|
||||||
#include <lib/cpus/wa_cve_2017_5715.h>
|
#include <lib/cpus/wa_cve_2017_5715.h>
|
||||||
#include <lib/cpus/wa_cve_2018_3639.h>
|
#include <lib/cpus/wa_cve_2018_3639.h>
|
||||||
|
#include <lib/cpus/wa_cve_2022_23960.h>
|
||||||
#include <lib/smccc.h>
|
#include <lib/smccc.h>
|
||||||
#include <services/arm_arch_svc.h>
|
#include <services/arm_arch_svc.h>
|
||||||
#include <smccc_helpers.h>
|
#include <smccc_helpers.h>
|
||||||
|
@ -74,6 +75,20 @@ static int32_t smccc_arch_features(u_register_t arg1)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
|
||||||
|
case SMCCC_ARCH_WORKAROUND_3:
|
||||||
|
/*
|
||||||
|
* SMCCC_ARCH_WORKAROUND_3 should also take into account
|
||||||
|
* CVE-2017-5715 since this SMC can be used instead of
|
||||||
|
* SMCCC_ARCH_WORKAROUND_1.
|
||||||
|
*/
|
||||||
|
if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
|
||||||
|
(check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Fallthrough */
|
/* Fallthrough */
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -117,7 +132,7 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
|
||||||
case SMCCC_ARCH_WORKAROUND_1:
|
case SMCCC_ARCH_WORKAROUND_1:
|
||||||
/*
|
/*
|
||||||
* The workaround has already been applied on affected PEs
|
* The workaround has already been applied on affected PEs
|
||||||
* during entry to EL3. On unaffected PEs, this function
|
* during entry to EL3. On unaffected PEs, this function
|
||||||
* has no effect.
|
* has no effect.
|
||||||
*/
|
*/
|
||||||
SMC_RET0(handle);
|
SMC_RET0(handle);
|
||||||
|
@ -131,6 +146,15 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
|
||||||
* has no effect.
|
* has no effect.
|
||||||
*/
|
*/
|
||||||
SMC_RET0(handle);
|
SMC_RET0(handle);
|
||||||
|
#endif
|
||||||
|
#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
|
||||||
|
case SMCCC_ARCH_WORKAROUND_3:
|
||||||
|
/*
|
||||||
|
* The workaround has already been applied on affected PEs
|
||||||
|
* during entry to EL3. On unaffected PEs, this function
|
||||||
|
* has no effect.
|
||||||
|
*/
|
||||||
|
SMC_RET0(handle);
|
||||||
#endif
|
#endif
|
||||||
default:
|
default:
|
||||||
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
|
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
|
||||||
|
|
Loading…
Add table
Reference in a new issue