mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-19 11:04:20 +00:00
Fix the CAS spinlock implementation
Make the spinlock implementation use ARMv8.1-LSE CAS instruction based on a platform build option. The CAS-based implementation used to be unconditionally selected for all ARM8.1+ platforms. The previous CAS spinlock implementation had a bug wherein the spin_unlock() implementation had an `sev` after `stlr` which is not sufficient. A dsb is needed to ensure that the stlr completes prior to the sev. Having a dsb is heavyweight and a better solution would be to use load exclusive semantics to monitor the lock and wake up from wfe when a store happens to the lock. The patch implements the same. Change-Id: I5283ce4a889376e4cc01d1b9d09afa8229a2e522 Signed-off-by: Soby Mathew <soby.mathew@arm.com> Signed-off-by: Olivier Deprez <olivier.deprez@arm.com>
This commit is contained in:
parent
ace23683be
commit
c97cba4ea4
5 changed files with 43 additions and 37 deletions
11
Makefile
11
Makefile
|
@ -141,6 +141,15 @@ else
|
||||||
$(error Unknown BRANCH_PROTECTION value ${BRANCH_PROTECTION})
|
$(error Unknown BRANCH_PROTECTION value ${BRANCH_PROTECTION})
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# USE_SPINLOCK_CAS requires AArch64 build
|
||||||
|
ifeq (${USE_SPINLOCK_CAS},1)
|
||||||
|
ifneq (${ARCH},aarch64)
|
||||||
|
$(error USE_SPINLOCK_CAS requires AArch64)
|
||||||
|
else
|
||||||
|
$(info USE_SPINLOCK_CAS is an experimental feature)
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Toolchain
|
# Toolchain
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -690,6 +699,7 @@ $(eval $(call assert_boolean,WARMBOOT_ENABLE_DCACHE_EARLY))
|
||||||
$(eval $(call assert_boolean,BL2_AT_EL3))
|
$(eval $(call assert_boolean,BL2_AT_EL3))
|
||||||
$(eval $(call assert_boolean,BL2_IN_XIP_MEM))
|
$(eval $(call assert_boolean,BL2_IN_XIP_MEM))
|
||||||
$(eval $(call assert_boolean,BL2_INV_DCACHE))
|
$(eval $(call assert_boolean,BL2_INV_DCACHE))
|
||||||
|
$(eval $(call assert_boolean,USE_SPINLOCK_CAS))
|
||||||
|
|
||||||
$(eval $(call assert_numeric,ARM_ARCH_MAJOR))
|
$(eval $(call assert_numeric,ARM_ARCH_MAJOR))
|
||||||
$(eval $(call assert_numeric,ARM_ARCH_MINOR))
|
$(eval $(call assert_numeric,ARM_ARCH_MINOR))
|
||||||
|
@ -755,6 +765,7 @@ $(eval $(call add_define,WARMBOOT_ENABLE_DCACHE_EARLY))
|
||||||
$(eval $(call add_define,BL2_AT_EL3))
|
$(eval $(call add_define,BL2_AT_EL3))
|
||||||
$(eval $(call add_define,BL2_IN_XIP_MEM))
|
$(eval $(call add_define,BL2_IN_XIP_MEM))
|
||||||
$(eval $(call add_define,BL2_INV_DCACHE))
|
$(eval $(call add_define,BL2_INV_DCACHE))
|
||||||
|
$(eval $(call add_define,USE_SPINLOCK_CAS))
|
||||||
|
|
||||||
ifeq (${SANITIZE_UB},trap)
|
ifeq (${SANITIZE_UB},trap)
|
||||||
$(eval $(call add_define,MONITOR_TRAPS))
|
$(eval $(call add_define,MONITOR_TRAPS))
|
||||||
|
|
|
@ -2540,8 +2540,11 @@ Armv8.1-A
|
||||||
This Architecture Extension is targeted when ``ARM_ARCH_MAJOR`` >= 8, or when
|
This Architecture Extension is targeted when ``ARM_ARCH_MAJOR`` >= 8, or when
|
||||||
``ARM_ARCH_MAJOR`` == 8 and ``ARM_ARCH_MINOR`` >= 1.
|
``ARM_ARCH_MAJOR`` == 8 and ``ARM_ARCH_MINOR`` >= 1.
|
||||||
|
|
||||||
- The Compare and Swap instruction is used to implement spinlocks. Otherwise,
|
- By default, a load-/store-exclusive instruction pair is used to implement
|
||||||
the load-/store-exclusive instruction pair is used.
|
spinlocks. The ``USE_SPINLOCK_CAS`` build option when set to 1 selects the
|
||||||
|
spinlock implementation using the ARMv8.1-LSE Compare and Swap instruction.
|
||||||
|
Notice this instruction is only available in AArch64 execution state, so
|
||||||
|
the option is only available to AArch64 builds.
|
||||||
|
|
||||||
Armv8.2-A
|
Armv8.2-A
|
||||||
~~~~~~~~~
|
~~~~~~~~~
|
||||||
|
|
|
@ -820,6 +820,10 @@ Common build options
|
||||||
reduces SRAM usage. Refer to `Library at ROM`_ for further details. Default
|
reduces SRAM usage. Refer to `Library at ROM`_ for further details. Default
|
||||||
is 0.
|
is 0.
|
||||||
|
|
||||||
|
- ``USE_SPINLOCK_CAS``: Setting this build flag to 1 selects the spinlock
|
||||||
|
implementation variant using the ARMv8.1-LSE compare-and-swap instruction.
|
||||||
|
Notice this option is experimental and only available to AArch64 builds.
|
||||||
|
|
||||||
- ``V``: Verbose build. If assigned anything other than 0, the build commands
|
- ``V``: Verbose build. If assigned anything other than 0, the build commands
|
||||||
are printed. Default is 0.
|
are printed. Default is 0.
|
||||||
|
|
||||||
|
|
|
@ -9,56 +9,38 @@
|
||||||
.globl spin_lock
|
.globl spin_lock
|
||||||
.globl spin_unlock
|
.globl spin_unlock
|
||||||
|
|
||||||
#if ARM_ARCH_AT_LEAST(8, 1)
|
#if USE_SPINLOCK_CAS
|
||||||
|
#if !ARM_ARCH_AT_LEAST(8, 1)
|
||||||
|
#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* When compiled for ARMv8.1 or later, choose spin locks based on Compare and
|
* When compiled for ARMv8.1 or later, choose spin locks based on Compare and
|
||||||
* Swap instruction.
|
* Swap instruction.
|
||||||
*/
|
*/
|
||||||
# define USE_CAS 1
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Lock contenders using CAS, upon failing to acquire the lock, wait with the
|
|
||||||
* monitor in open state. Therefore, a normal store upon unlocking won't
|
|
||||||
* generate an SEV. Use explicit SEV instruction with CAS unlock.
|
|
||||||
*/
|
|
||||||
# define COND_SEV() sev
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
# define USE_CAS 0
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
|
|
||||||
* with the monitor in exclusive state. A normal store upon unlocking will
|
|
||||||
* implicitly generate an envent; so, no explicit SEV with unlock is required.
|
|
||||||
*/
|
|
||||||
# define COND_SEV()
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if USE_CAS
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Acquire lock using Compare and Swap instruction.
|
* Acquire lock using Compare and Swap instruction.
|
||||||
*
|
*
|
||||||
* Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
|
* Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
|
||||||
* 0.
|
* load exclusive semantics to monitor the address and enter WFE.
|
||||||
*
|
*
|
||||||
* void spin_lock(spinlock_t *lock);
|
* void spin_lock(spinlock_t *lock);
|
||||||
*/
|
*/
|
||||||
func spin_lock
|
func spin_lock
|
||||||
mov w2, #1
|
mov w2, #1
|
||||||
sevl
|
1: mov w1, wzr
|
||||||
1:
|
2: casa w1, w2, [x0]
|
||||||
|
cbz w1, 3f
|
||||||
|
ldxr w1, [x0]
|
||||||
|
cbz w1, 2b
|
||||||
wfe
|
wfe
|
||||||
mov w1, wzr
|
b 1b
|
||||||
casa w1, w2, [x0]
|
3:
|
||||||
cbnz w1, 1b
|
|
||||||
ret
|
ret
|
||||||
endfunc spin_lock
|
endfunc spin_lock
|
||||||
|
|
||||||
#else /* !USE_CAS */
|
#else /* !USE_SPINLOCK_CAS */
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Acquire lock using load-/store-exclusive instruction pair.
|
* Acquire lock using load-/store-exclusive instruction pair.
|
||||||
|
@ -76,17 +58,18 @@ l2: ldaxr w1, [x0]
|
||||||
ret
|
ret
|
||||||
endfunc spin_lock
|
endfunc spin_lock
|
||||||
|
|
||||||
#endif /* USE_CAS */
|
#endif /* USE_SPINLOCK_CAS */
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Release lock previously acquired by spin_lock.
|
* Release lock previously acquired by spin_lock.
|
||||||
*
|
*
|
||||||
* Unconditionally write 0, and conditionally generate an event.
|
* Use store-release to unconditionally clear the spinlock variable.
|
||||||
|
* Store operation generates an event to all cores waiting in WFE
|
||||||
|
* when address is monitored by the global monitor.
|
||||||
*
|
*
|
||||||
* void spin_unlock(spinlock_t *lock);
|
* void spin_unlock(spinlock_t *lock);
|
||||||
*/
|
*/
|
||||||
func spin_unlock
|
func spin_unlock
|
||||||
stlr wzr, [x0]
|
stlr wzr, [x0]
|
||||||
COND_SEV()
|
|
||||||
ret
|
ret
|
||||||
endfunc spin_unlock
|
endfunc spin_unlock
|
||||||
|
|
|
@ -234,3 +234,8 @@ else
|
||||||
endif
|
endif
|
||||||
|
|
||||||
SANITIZE_UB := off
|
SANITIZE_UB := off
|
||||||
|
|
||||||
|
# For ARMv8.1 (AArch64) platforms, enabling this option selects the spinlock
|
||||||
|
# implementation variant using the ARMv8.1-LSE compare-and-swap instruction.
|
||||||
|
# Default: disabled
|
||||||
|
USE_SPINLOCK_CAS := 0
|
||||||
|
|
Loading…
Add table
Reference in a new issue