Merge "fix(mpam): run-time checks for mpam save/restore routines" into integration

This commit is contained in:
Manish Pandey 2023-02-10 10:20:07 +01:00 committed by TrustedFirmware Code Review
commit d69a0bf22d
2 changed files with 181 additions and 29 deletions

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2020-2022, NVIDIA Corporation. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
@ -1063,13 +1063,17 @@
#define PMBLIMITR_EL1 S3_0_C9_C10_0
/*******************************************************************************
* Definitions for system register interface to MPAM
* Definitions for system register interface, shifts and masks for MPAM
******************************************************************************/
#define MPAMIDR_EL1 S3_0_C10_C4_4
#define MPAM2_EL2 S3_4_C10_C5_0
#define MPAMHCR_EL2 S3_4_C10_C4_0
#define MPAM3_EL3 S3_6_C10_C5_0
#define MPAMIDR_EL1_HAS_HCR_SHIFT ULL(0x11)
#define MPAMIDR_EL1_VPMR_MAX_SHIFT ULL(0x12)
#define MPAMIDR_EL1_VPMR_MAX_WIDTH ULL(0x3)
#define MPAMIDR_EL1_VPMR_MAX_POSSIBLE ULL(0x7)
/*******************************************************************************
* Definitions for system register interface to AMU for FEAT_AMUv1
******************************************************************************/

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -257,52 +257,200 @@ func el2_sysregs_context_save_mpam
mrs x10, MPAM2_EL2
str x10, [x0, #CTX_MPAM2_EL2]
mrs x10, MPAMIDR_EL1
/*
* The context registers that we intend to save would be part of the
* PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1.
*/
tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f
/*
* MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the
* system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to save
* the context of these registers.
*/
mrs x11, MPAMHCR_EL2
mrs x12, MPAMVPM0_EL2
stp x11, x12, [x0, #CTX_MPAMHCR_EL2]
mrs x13, MPAMVPM1_EL2
mrs x14, MPAMVPM2_EL2
stp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
mrs x13, MPAMVPMV_EL2
str x13, [x0, #CTX_MPAMVPMV_EL2]
mrs x15, MPAMVPM3_EL2
mrs x16, MPAMVPM4_EL2
stp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
/*
* MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported
* VPMR value. Proceed to save the context of registers from
* MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. From MPAM spec,
* VPMR_MAX should not be zero if HAS_HCR == 1.
*/
ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \
#MPAMIDR_EL1_VPMR_MAX_WIDTH
mrs x9, MPAMVPM5_EL2
mrs x10, MPAMVPM6_EL2
stp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
/*
* Once VPMR_MAX has been identified, calculate the offset relative to
* PC to jump to so that relevant context can be saved. The offset is
* calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for
* saving one VPM register) + (absolute address of label "1").
*/
mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE
sub w10, w11, w10
mrs x11, MPAMVPM7_EL2
mrs x12, MPAMVPMV_EL2
stp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
ret
/* Calculate the size of one block of MPAMVPM*_EL2 save */
adr x11, 1f
adr x12, 2f
sub x12, x12, x11
madd x10, x10, x12, x11
br x10
/*
* The branch above would land properly on one of the blocks following
* label "1". Make sure that the order of save is retained.
*/
1:
#if ENABLE_BTI
bti j
#endif
mrs x10, MPAMVPM7_EL2
str x10, [x0, #CTX_MPAMVPM7_EL2]
2:
#if ENABLE_BTI
bti j
#endif
mrs x11, MPAMVPM6_EL2
str x11, [x0, #CTX_MPAMVPM6_EL2]
#if ENABLE_BTI
bti j
#endif
mrs x12, MPAMVPM5_EL2
str x12, [x0, #CTX_MPAMVPM5_EL2]
#if ENABLE_BTI
bti j
#endif
mrs x13, MPAMVPM4_EL2
str x13, [x0, #CTX_MPAMVPM4_EL2]
#if ENABLE_BTI
bti j
#endif
mrs x14, MPAMVPM3_EL2
str x14, [x0, #CTX_MPAMVPM3_EL2]
#if ENABLE_BTI
bti j
#endif
mrs x15, MPAMVPM2_EL2
str x15, [x0, #CTX_MPAMVPM2_EL2]
#if ENABLE_BTI
bti j
#endif
mrs x16, MPAMVPM1_EL2
str x16, [x0, #CTX_MPAMVPM1_EL2]
3: ret
endfunc el2_sysregs_context_save_mpam
func el2_sysregs_context_restore_mpam
ldr x10, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x10
mrs x10, MPAMIDR_EL1
/*
* The context registers that we intend to restore would be part of the
* PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1.
*/
tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f
/*
* MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the
* system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to restore
* the context of these registers
*/
ldp x11, x12, [x0, #CTX_MPAMHCR_EL2]
msr MPAMHCR_EL2, x11
msr MPAMVPM0_EL2, x12
ldp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
msr MPAMVPM1_EL2, x13
msr MPAMVPM2_EL2, x14
ldr x13, [x0, #CTX_MPAMVPMV_EL2]
msr MPAMVPMV_EL2, x13
ldp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
msr MPAMVPM3_EL2, x15
msr MPAMVPM4_EL2, x16
/*
* MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported
* VPMR value. Proceed to restore the context of registers from
* MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. from MPAM spec,
* VPMR_MAX should not be zero if HAS_HCR == 1.
*/
ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \
#MPAMIDR_EL1_VPMR_MAX_WIDTH
ldp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
msr MPAMVPM5_EL2, x9
msr MPAMVPM6_EL2, x10
/*
* Once VPMR_MAX has been identified, calculate the offset relative to
* PC to jump to so that relevant context can be restored. The offset is
* calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for
* restoring one VPM register) + (absolute address of label "1").
*/
mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE
sub w10, w11, w10
ldp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
msr MPAMVPM7_EL2, x11
msr MPAMVPMV_EL2, x12
ret
/* Calculate the size of one block of MPAMVPM*_EL2 restore */
adr x11, 1f
adr x12, 2f
sub x12, x12, x11
madd x10, x10, x12, x11
br x10
/*
* The branch above would land properly on one of the blocks following
* label "1". Make sure that the order of restore is retained.
*/
1:
#if ENABLE_BTI
bti j
#endif
ldr x10, [x0, #CTX_MPAMVPM7_EL2]
msr MPAMVPM7_EL2, x10
2:
#if ENABLE_BTI
bti j
#endif
ldr x11, [x0, #CTX_MPAMVPM6_EL2]
msr MPAMVPM6_EL2, x11
#if ENABLE_BTI
bti j
#endif
ldr x12, [x0, #CTX_MPAMVPM5_EL2]
msr MPAMVPM5_EL2, x12
#if ENABLE_BTI
bti j
#endif
ldr x13, [x0, #CTX_MPAMVPM4_EL2]
msr MPAMVPM4_EL2, x13
#if ENABLE_BTI
bti j
#endif
ldr x14, [x0, #CTX_MPAMVPM3_EL2]
msr MPAMVPM3_EL2, x14
#if ENABLE_BTI
bti j
#endif
ldr x15, [x0, #CTX_MPAMVPM2_EL2]
msr MPAMVPM2_EL2, x15
#if ENABLE_BTI
bti j
#endif
ldr x16, [x0, #CTX_MPAMVPM1_EL2]
msr MPAMVPM1_EL2, x16
3: ret
endfunc el2_sysregs_context_restore_mpam
#endif /* ENABLE_MPAM_FOR_LOWER_ELS */