mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-20 19:44:23 +00:00
Merge pull request #1460 from robertovargas-arm/clang
Make TF compatible with Clang assembler and linker
This commit is contained in:
commit
9a93d8ccff
28 changed files with 369 additions and 318 deletions
21
Makefile
21
Makefile
|
@ -85,7 +85,13 @@ $(eval $(call add_define,DEBUG))
|
||||||
ifneq (${DEBUG}, 0)
|
ifneq (${DEBUG}, 0)
|
||||||
BUILD_TYPE := debug
|
BUILD_TYPE := debug
|
||||||
TF_CFLAGS += -g
|
TF_CFLAGS += -g
|
||||||
|
|
||||||
|
ifneq ($(findstring clang,$(notdir $(CC))),)
|
||||||
|
ASFLAGS += -g
|
||||||
|
else
|
||||||
ASFLAGS += -g -Wa,--gdwarf-2
|
ASFLAGS += -g -Wa,--gdwarf-2
|
||||||
|
endif
|
||||||
|
|
||||||
# Use LOG_LEVEL_INFO by default for debug builds
|
# Use LOG_LEVEL_INFO by default for debug builds
|
||||||
LOG_LEVEL := 40
|
LOG_LEVEL := 40
|
||||||
else
|
else
|
||||||
|
@ -119,7 +125,7 @@ CC := ${CROSS_COMPILE}gcc
|
||||||
CPP := ${CROSS_COMPILE}cpp
|
CPP := ${CROSS_COMPILE}cpp
|
||||||
AS := ${CROSS_COMPILE}gcc
|
AS := ${CROSS_COMPILE}gcc
|
||||||
AR := ${CROSS_COMPILE}ar
|
AR := ${CROSS_COMPILE}ar
|
||||||
LD := ${CROSS_COMPILE}ld
|
LINKER := ${CROSS_COMPILE}ld
|
||||||
OC := ${CROSS_COMPILE}objcopy
|
OC := ${CROSS_COMPILE}objcopy
|
||||||
OD := ${CROSS_COMPILE}objdump
|
OD := ${CROSS_COMPILE}objdump
|
||||||
NM := ${CROSS_COMPILE}nm
|
NM := ${CROSS_COMPILE}nm
|
||||||
|
@ -128,8 +134,8 @@ DTC := dtc
|
||||||
|
|
||||||
# Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH).
|
# Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH).
|
||||||
ifneq ($(strip $(wildcard ${LD}.bfd) \
|
ifneq ($(strip $(wildcard ${LD}.bfd) \
|
||||||
$(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LD}.bfd))),)
|
$(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LINKER}.bfd))),)
|
||||||
LD := ${LD}.bfd
|
LINKER := ${LINKER}.bfd
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq (${ARM_ARCH_MAJOR},7)
|
ifeq (${ARM_ARCH_MAJOR},7)
|
||||||
|
@ -143,12 +149,21 @@ endif
|
||||||
ifeq ($(notdir $(CC)),armclang)
|
ifeq ($(notdir $(CC)),armclang)
|
||||||
TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive)
|
TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive)
|
||||||
TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a
|
TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a
|
||||||
|
LD = $(LINKER)
|
||||||
|
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
|
||||||
|
CPP = $(CC) -E $(TF_CFLAGS_$(ARCH))
|
||||||
|
PP = $(CC) -E $(TF_CFLAGS_$(ARCH))
|
||||||
else ifneq ($(findstring clang,$(notdir $(CC))),)
|
else ifneq ($(findstring clang,$(notdir $(CC))),)
|
||||||
TF_CFLAGS_aarch32 = $(target32-directive)
|
TF_CFLAGS_aarch32 = $(target32-directive)
|
||||||
TF_CFLAGS_aarch64 = -target aarch64-elf
|
TF_CFLAGS_aarch64 = -target aarch64-elf
|
||||||
|
LD = $(LINKER)
|
||||||
|
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
|
||||||
|
CPP = $(CC) -E
|
||||||
|
PP = $(CC) -E
|
||||||
else
|
else
|
||||||
TF_CFLAGS_aarch32 = $(march32-directive)
|
TF_CFLAGS_aarch32 = $(march32-directive)
|
||||||
TF_CFLAGS_aarch64 = -march=armv8-a
|
TF_CFLAGS_aarch64 = -march=armv8-a
|
||||||
|
LD = $(LINKER)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
TF_CFLAGS_aarch32 += -mno-unaligned-access
|
TF_CFLAGS_aarch32 += -mno-unaligned-access
|
||||||
|
|
|
@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
|
||||||
mov x0, #SYNC_EXCEPTION_SP_EL0
|
mov x0, #SYNC_EXCEPTION_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSP0
|
end_vector_entry SynchronousExceptionSP0
|
||||||
|
|
||||||
vector_entry IrqSP0
|
vector_entry IrqSP0
|
||||||
mov x0, #IRQ_SP_EL0
|
mov x0, #IRQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSP0
|
end_vector_entry IrqSP0
|
||||||
|
|
||||||
vector_entry FiqSP0
|
vector_entry FiqSP0
|
||||||
mov x0, #FIQ_SP_EL0
|
mov x0, #FIQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSP0
|
end_vector_entry FiqSP0
|
||||||
|
|
||||||
vector_entry SErrorSP0
|
vector_entry SErrorSP0
|
||||||
mov x0, #SERROR_SP_EL0
|
mov x0, #SERROR_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSP0
|
end_vector_entry SErrorSP0
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Current EL with SPx: 0x200 - 0x400
|
* Current EL with SPx: 0x200 - 0x400
|
||||||
|
@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
|
||||||
mov x0, #SYNC_EXCEPTION_SP_ELX
|
mov x0, #SYNC_EXCEPTION_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSPx
|
end_vector_entry SynchronousExceptionSPx
|
||||||
|
|
||||||
vector_entry IrqSPx
|
vector_entry IrqSPx
|
||||||
mov x0, #IRQ_SP_ELX
|
mov x0, #IRQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSPx
|
end_vector_entry IrqSPx
|
||||||
|
|
||||||
vector_entry FiqSPx
|
vector_entry FiqSPx
|
||||||
mov x0, #FIQ_SP_ELX
|
mov x0, #FIQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSPx
|
end_vector_entry FiqSPx
|
||||||
|
|
||||||
vector_entry SErrorSPx
|
vector_entry SErrorSPx
|
||||||
mov x0, #SERROR_SP_ELX
|
mov x0, #SERROR_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSPx
|
end_vector_entry SErrorSPx
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -91,25 +91,25 @@ vector_entry SynchronousExceptionA64
|
||||||
b.ne unexpected_sync_exception
|
b.ne unexpected_sync_exception
|
||||||
|
|
||||||
b smc_handler64
|
b smc_handler64
|
||||||
check_vector_size SynchronousExceptionA64
|
end_vector_entry SynchronousExceptionA64
|
||||||
|
|
||||||
vector_entry IrqA64
|
vector_entry IrqA64
|
||||||
mov x0, #IRQ_AARCH64
|
mov x0, #IRQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA64
|
end_vector_entry IrqA64
|
||||||
|
|
||||||
vector_entry FiqA64
|
vector_entry FiqA64
|
||||||
mov x0, #FIQ_AARCH64
|
mov x0, #FIQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA64
|
end_vector_entry FiqA64
|
||||||
|
|
||||||
vector_entry SErrorA64
|
vector_entry SErrorA64
|
||||||
mov x0, #SERROR_AARCH64
|
mov x0, #SERROR_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA64
|
end_vector_entry SErrorA64
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -119,25 +119,25 @@ vector_entry SynchronousExceptionA32
|
||||||
mov x0, #SYNC_EXCEPTION_AARCH32
|
mov x0, #SYNC_EXCEPTION_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionA32
|
end_vector_entry SynchronousExceptionA32
|
||||||
|
|
||||||
vector_entry IrqA32
|
vector_entry IrqA32
|
||||||
mov x0, #IRQ_AARCH32
|
mov x0, #IRQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA32
|
end_vector_entry IrqA32
|
||||||
|
|
||||||
vector_entry FiqA32
|
vector_entry FiqA32
|
||||||
mov x0, #FIQ_AARCH32
|
mov x0, #FIQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA32
|
end_vector_entry FiqA32
|
||||||
|
|
||||||
vector_entry SErrorA32
|
vector_entry SErrorA32
|
||||||
mov x0, #SERROR_AARCH32
|
mov x0, #SERROR_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA32
|
end_vector_entry SErrorA32
|
||||||
|
|
||||||
|
|
||||||
func smc_handler64
|
func smc_handler64
|
||||||
|
|
13
bl1/bl1.ld.S
13
bl1/bl1.ld.S
|
@ -28,10 +28,19 @@ SECTIONS
|
||||||
*bl1_entrypoint.o(.text*)
|
*bl1_entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >ROM
|
} >ROM
|
||||||
|
|
||||||
|
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
|
||||||
|
.ARM.extab . : {
|
||||||
|
*(.ARM.extab* .gnu.linkonce.armextab.*)
|
||||||
|
} >ROM
|
||||||
|
|
||||||
|
.ARM.exidx . : {
|
||||||
|
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
|
||||||
|
} >ROM
|
||||||
|
|
||||||
.rodata . : {
|
.rodata . : {
|
||||||
__RODATA_START__ = .;
|
__RODATA_START__ = .;
|
||||||
*(.rodata*)
|
*(.rodata*)
|
||||||
|
@ -152,7 +161,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
|
||||||
mov x0, #SYNC_EXCEPTION_SP_EL0
|
mov x0, #SYNC_EXCEPTION_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSP0
|
end_vector_entry SynchronousExceptionSP0
|
||||||
|
|
||||||
vector_entry IrqSP0
|
vector_entry IrqSP0
|
||||||
mov x0, #IRQ_SP_EL0
|
mov x0, #IRQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSP0
|
end_vector_entry IrqSP0
|
||||||
|
|
||||||
vector_entry FiqSP0
|
vector_entry FiqSP0
|
||||||
mov x0, #FIQ_SP_EL0
|
mov x0, #FIQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSP0
|
end_vector_entry FiqSP0
|
||||||
|
|
||||||
vector_entry SErrorSP0
|
vector_entry SErrorSP0
|
||||||
mov x0, #SERROR_SP_EL0
|
mov x0, #SERROR_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSP0
|
end_vector_entry SErrorSP0
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Current EL with SPx: 0x200 - 0x400
|
* Current EL with SPx: 0x200 - 0x400
|
||||||
|
@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
|
||||||
mov x0, #SYNC_EXCEPTION_SP_ELX
|
mov x0, #SYNC_EXCEPTION_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSPx
|
end_vector_entry SynchronousExceptionSPx
|
||||||
|
|
||||||
vector_entry IrqSPx
|
vector_entry IrqSPx
|
||||||
mov x0, #IRQ_SP_ELX
|
mov x0, #IRQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSPx
|
end_vector_entry IrqSPx
|
||||||
|
|
||||||
vector_entry FiqSPx
|
vector_entry FiqSPx
|
||||||
mov x0, #FIQ_SP_ELX
|
mov x0, #FIQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSPx
|
end_vector_entry FiqSPx
|
||||||
|
|
||||||
vector_entry SErrorSPx
|
vector_entry SErrorSPx
|
||||||
mov x0, #SERROR_SP_ELX
|
mov x0, #SERROR_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSPx
|
end_vector_entry SErrorSPx
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -82,25 +82,25 @@ vector_entry SynchronousExceptionA64
|
||||||
mov x0, #SYNC_EXCEPTION_AARCH64
|
mov x0, #SYNC_EXCEPTION_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionA64
|
end_vector_entry SynchronousExceptionA64
|
||||||
|
|
||||||
vector_entry IrqA64
|
vector_entry IrqA64
|
||||||
mov x0, #IRQ_AARCH64
|
mov x0, #IRQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA64
|
end_vector_entry IrqA64
|
||||||
|
|
||||||
vector_entry FiqA64
|
vector_entry FiqA64
|
||||||
mov x0, #FIQ_AARCH64
|
mov x0, #FIQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA64
|
end_vector_entry FiqA64
|
||||||
|
|
||||||
vector_entry SErrorA64
|
vector_entry SErrorA64
|
||||||
mov x0, #SERROR_AARCH64
|
mov x0, #SERROR_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA64
|
end_vector_entry SErrorA64
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -110,22 +110,22 @@ vector_entry SynchronousExceptionA32
|
||||||
mov x0, #SYNC_EXCEPTION_AARCH32
|
mov x0, #SYNC_EXCEPTION_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionA32
|
end_vector_entry SynchronousExceptionA32
|
||||||
|
|
||||||
vector_entry IrqA32
|
vector_entry IrqA32
|
||||||
mov x0, #IRQ_AARCH32
|
mov x0, #IRQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA32
|
end_vector_entry IrqA32
|
||||||
|
|
||||||
vector_entry FiqA32
|
vector_entry FiqA32
|
||||||
mov x0, #FIQ_AARCH32
|
mov x0, #FIQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA32
|
end_vector_entry FiqA32
|
||||||
|
|
||||||
vector_entry SErrorA32
|
vector_entry SErrorA32
|
||||||
mov x0, #SERROR_AARCH32
|
mov x0, #SERROR_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA32
|
end_vector_entry SErrorA32
|
||||||
|
|
17
bl2/bl2.ld.S
17
bl2/bl2.ld.S
|
@ -28,10 +28,19 @@ SECTIONS
|
||||||
*bl2_entrypoint.o(.text*)
|
*bl2_entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
|
||||||
|
.ARM.extab . : {
|
||||||
|
*(.ARM.extab* .gnu.linkonce.armextab.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
|
.ARM.exidx . : {
|
||||||
|
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
.rodata . : {
|
.rodata . : {
|
||||||
__RODATA_START__ = .;
|
__RODATA_START__ = .;
|
||||||
*(.rodata*)
|
*(.rodata*)
|
||||||
|
@ -42,7 +51,7 @@ SECTIONS
|
||||||
KEEP(*(.img_parser_lib_descs))
|
KEEP(*(.img_parser_lib_descs))
|
||||||
__PARSER_LIB_DESCS_END__ = .;
|
__PARSER_LIB_DESCS_END__ = .;
|
||||||
|
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#else
|
#else
|
||||||
|
@ -65,7 +74,7 @@ SECTIONS
|
||||||
* read-only, executable. No RW data from the next section must
|
* read-only, executable. No RW data from the next section must
|
||||||
* creep in. Ensure the rest of the current memory page is unused.
|
* creep in. Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -131,7 +140,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -42,7 +42,7 @@ SECTIONS
|
||||||
__TEXT_RESIDENT_END__ = .;
|
__TEXT_RESIDENT_END__ = .;
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
#if BL2_IN_XIP_MEM
|
#if BL2_IN_XIP_MEM
|
||||||
} >ROM
|
} >ROM
|
||||||
|
@ -69,7 +69,7 @@ SECTIONS
|
||||||
KEEP(*(cpu_ops))
|
KEEP(*(cpu_ops))
|
||||||
__CPU_OPS_END__ = .;
|
__CPU_OPS_END__ = .;
|
||||||
|
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
#if BL2_IN_XIP_MEM
|
#if BL2_IN_XIP_MEM
|
||||||
} >ROM
|
} >ROM
|
||||||
|
@ -111,7 +111,7 @@ SECTIONS
|
||||||
* read-only, executable. No RW data from the next section must
|
* read-only, executable. No RW data from the next section must
|
||||||
* creep in. Ensure the rest of the current memory page is unused.
|
* creep in. Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
|
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
#if BL2_IN_XIP_MEM
|
#if BL2_IN_XIP_MEM
|
||||||
|
@ -195,7 +195,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -28,14 +28,23 @@ SECTIONS
|
||||||
*bl2u_entrypoint.o(.text*)
|
*bl2u_entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
|
||||||
|
.ARM.extab . : {
|
||||||
|
*(.ARM.extab* .gnu.linkonce.armextab.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
|
.ARM.exidx . : {
|
||||||
|
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
.rodata . : {
|
.rodata . : {
|
||||||
__RODATA_START__ = .;
|
__RODATA_START__ = .;
|
||||||
*(.rodata*)
|
*(.rodata*)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#else
|
#else
|
||||||
|
@ -52,7 +61,7 @@ SECTIONS
|
||||||
* read-only, executable. No RW data from the next section must
|
* read-only, executable. No RW data from the next section must
|
||||||
* creep in. Ensure the rest of the current memory page is unused.
|
* creep in. Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -118,7 +127,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -233,7 +233,7 @@ vector_base runtime_exceptions
|
||||||
vector_entry sync_exception_sp_el0
|
vector_entry sync_exception_sp_el0
|
||||||
/* We don't expect any synchronous exceptions from EL3 */
|
/* We don't expect any synchronous exceptions from EL3 */
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size sync_exception_sp_el0
|
end_vector_entry sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry irq_sp_el0
|
vector_entry irq_sp_el0
|
||||||
/*
|
/*
|
||||||
|
@ -241,17 +241,17 @@ vector_entry irq_sp_el0
|
||||||
* error. Loop infinitely.
|
* error. Loop infinitely.
|
||||||
*/
|
*/
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size irq_sp_el0
|
end_vector_entry irq_sp_el0
|
||||||
|
|
||||||
|
|
||||||
vector_entry fiq_sp_el0
|
vector_entry fiq_sp_el0
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size fiq_sp_el0
|
end_vector_entry fiq_sp_el0
|
||||||
|
|
||||||
|
|
||||||
vector_entry serror_sp_el0
|
vector_entry serror_sp_el0
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size serror_sp_el0
|
end_vector_entry serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400
|
* Current EL with SP_ELx: 0x200 - 0x400
|
||||||
|
@ -265,19 +265,19 @@ vector_entry sync_exception_sp_elx
|
||||||
* corrupted.
|
* corrupted.
|
||||||
*/
|
*/
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size sync_exception_sp_elx
|
end_vector_entry sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry irq_sp_elx
|
vector_entry irq_sp_elx
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size irq_sp_elx
|
end_vector_entry irq_sp_elx
|
||||||
|
|
||||||
vector_entry fiq_sp_elx
|
vector_entry fiq_sp_elx
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size fiq_sp_elx
|
end_vector_entry fiq_sp_elx
|
||||||
|
|
||||||
vector_entry serror_sp_elx
|
vector_entry serror_sp_elx
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size serror_sp_elx
|
end_vector_entry serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -292,17 +292,17 @@ vector_entry sync_exception_aarch64
|
||||||
*/
|
*/
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_sync_exception
|
handle_sync_exception
|
||||||
check_vector_size sync_exception_aarch64
|
end_vector_entry sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry irq_aarch64
|
vector_entry irq_aarch64
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception irq_aarch64
|
handle_interrupt_exception irq_aarch64
|
||||||
check_vector_size irq_aarch64
|
end_vector_entry irq_aarch64
|
||||||
|
|
||||||
vector_entry fiq_aarch64
|
vector_entry fiq_aarch64
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception fiq_aarch64
|
handle_interrupt_exception fiq_aarch64
|
||||||
check_vector_size fiq_aarch64
|
end_vector_entry fiq_aarch64
|
||||||
|
|
||||||
vector_entry serror_aarch64
|
vector_entry serror_aarch64
|
||||||
msr daifclr, #DAIF_ABT_BIT
|
msr daifclr, #DAIF_ABT_BIT
|
||||||
|
@ -313,7 +313,7 @@ vector_entry serror_aarch64
|
||||||
*/
|
*/
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
||||||
handle_ea #ERROR_EA_ASYNC
|
handle_ea #ERROR_EA_ASYNC
|
||||||
check_vector_size serror_aarch64
|
end_vector_entry serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -328,17 +328,17 @@ vector_entry sync_exception_aarch32
|
||||||
*/
|
*/
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_sync_exception
|
handle_sync_exception
|
||||||
check_vector_size sync_exception_aarch32
|
end_vector_entry sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry irq_aarch32
|
vector_entry irq_aarch32
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception irq_aarch32
|
handle_interrupt_exception irq_aarch32
|
||||||
check_vector_size irq_aarch32
|
end_vector_entry irq_aarch32
|
||||||
|
|
||||||
vector_entry fiq_aarch32
|
vector_entry fiq_aarch32
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception fiq_aarch32
|
handle_interrupt_exception fiq_aarch32
|
||||||
check_vector_size fiq_aarch32
|
end_vector_entry fiq_aarch32
|
||||||
|
|
||||||
vector_entry serror_aarch32
|
vector_entry serror_aarch32
|
||||||
msr daifclr, #DAIF_ABT_BIT
|
msr daifclr, #DAIF_ABT_BIT
|
||||||
|
@ -349,7 +349,7 @@ vector_entry serror_aarch32
|
||||||
*/
|
*/
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
||||||
handle_ea #ERROR_EA_ASYNC
|
handle_ea #ERROR_EA_ASYNC
|
||||||
check_vector_size serror_aarch32
|
end_vector_entry serror_aarch32
|
||||||
|
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
|
|
|
@ -32,7 +32,7 @@ SECTIONS
|
||||||
*bl31_entrypoint.o(.text*)
|
*bl31_entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ SECTIONS
|
||||||
. = ALIGN(8);
|
. = ALIGN(8);
|
||||||
#include <pubsub_events.h>
|
#include <pubsub_events.h>
|
||||||
|
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#else
|
#else
|
||||||
|
@ -111,7 +111,7 @@ SECTIONS
|
||||||
* executable. No RW data from the next section must creep in.
|
* executable. No RW data from the next section must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -131,7 +131,7 @@ SECTIONS
|
||||||
spm_shim_exceptions : ALIGN(PAGE_SIZE) {
|
spm_shim_exceptions : ALIGN(PAGE_SIZE) {
|
||||||
__SPM_SHIM_EXCEPTIONS_START__ = .;
|
__SPM_SHIM_EXCEPTIONS_START__ = .;
|
||||||
*(.spm_shim_exceptions)
|
*(.spm_shim_exceptions)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__SPM_SHIM_EXCEPTIONS_END__ = .;
|
__SPM_SHIM_EXCEPTIONS_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -246,7 +246,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -28,10 +28,19 @@ SECTIONS
|
||||||
*entrypoint.o(.text*)
|
*entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
|
||||||
|
.ARM.extab . : {
|
||||||
|
*(.ARM.extab* .gnu.linkonce.armextab.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
|
.ARM.exidx . : {
|
||||||
|
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
|
||||||
|
} >RAM
|
||||||
|
|
||||||
.rodata . : {
|
.rodata . : {
|
||||||
__RODATA_START__ = .;
|
__RODATA_START__ = .;
|
||||||
*(.rodata*)
|
*(.rodata*)
|
||||||
|
@ -55,7 +64,7 @@ SECTIONS
|
||||||
. = ALIGN(8);
|
. = ALIGN(8);
|
||||||
#include <pubsub_events.h>
|
#include <pubsub_events.h>
|
||||||
|
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#else
|
#else
|
||||||
|
@ -92,7 +101,7 @@ SECTIONS
|
||||||
* read-only, executable. No RW data from the next section must
|
* read-only, executable. No RW data from the next section must
|
||||||
* creep in. Ensure the rest of the current memory block is unused.
|
* creep in. Ensure the rest of the current memory block is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -207,7 +216,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
|
|
@ -82,19 +82,19 @@ vector_base tsp_exceptions
|
||||||
*/
|
*/
|
||||||
vector_entry sync_exception_sp_el0
|
vector_entry sync_exception_sp_el0
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size sync_exception_sp_el0
|
end_vector_entry sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry irq_sp_el0
|
vector_entry irq_sp_el0
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size irq_sp_el0
|
end_vector_entry irq_sp_el0
|
||||||
|
|
||||||
vector_entry fiq_sp_el0
|
vector_entry fiq_sp_el0
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size fiq_sp_el0
|
end_vector_entry fiq_sp_el0
|
||||||
|
|
||||||
vector_entry serror_sp_el0
|
vector_entry serror_sp_el0
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size serror_sp_el0
|
end_vector_entry serror_sp_el0
|
||||||
|
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
|
@ -104,19 +104,19 @@ vector_entry serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry sync_exception_sp_elx
|
vector_entry sync_exception_sp_elx
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size sync_exception_sp_elx
|
end_vector_entry sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry irq_sp_elx
|
vector_entry irq_sp_elx
|
||||||
handle_tsp_interrupt irq_sp_elx
|
handle_tsp_interrupt irq_sp_elx
|
||||||
check_vector_size irq_sp_elx
|
end_vector_entry irq_sp_elx
|
||||||
|
|
||||||
vector_entry fiq_sp_elx
|
vector_entry fiq_sp_elx
|
||||||
handle_tsp_interrupt fiq_sp_elx
|
handle_tsp_interrupt fiq_sp_elx
|
||||||
check_vector_size fiq_sp_elx
|
end_vector_entry fiq_sp_elx
|
||||||
|
|
||||||
vector_entry serror_sp_elx
|
vector_entry serror_sp_elx
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size serror_sp_elx
|
end_vector_entry serror_sp_elx
|
||||||
|
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
|
@ -126,19 +126,19 @@ vector_entry serror_sp_elx
|
||||||
*/
|
*/
|
||||||
vector_entry sync_exception_aarch64
|
vector_entry sync_exception_aarch64
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size sync_exception_aarch64
|
end_vector_entry sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry irq_aarch64
|
vector_entry irq_aarch64
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size irq_aarch64
|
end_vector_entry irq_aarch64
|
||||||
|
|
||||||
vector_entry fiq_aarch64
|
vector_entry fiq_aarch64
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size fiq_aarch64
|
end_vector_entry fiq_aarch64
|
||||||
|
|
||||||
vector_entry serror_aarch64
|
vector_entry serror_aarch64
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size serror_aarch64
|
end_vector_entry serror_aarch64
|
||||||
|
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
|
@ -148,16 +148,16 @@ vector_entry serror_aarch64
|
||||||
*/
|
*/
|
||||||
vector_entry sync_exception_aarch32
|
vector_entry sync_exception_aarch32
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size sync_exception_aarch32
|
end_vector_entry sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry irq_aarch32
|
vector_entry irq_aarch32
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size irq_aarch32
|
end_vector_entry irq_aarch32
|
||||||
|
|
||||||
vector_entry fiq_aarch32
|
vector_entry fiq_aarch32
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size fiq_aarch32
|
end_vector_entry fiq_aarch32
|
||||||
|
|
||||||
vector_entry serror_aarch32
|
vector_entry serror_aarch32
|
||||||
b plat_panic_handler
|
b plat_panic_handler
|
||||||
check_vector_size serror_aarch32
|
end_vector_entry serror_aarch32
|
||||||
|
|
|
@ -29,14 +29,14 @@ SECTIONS
|
||||||
*tsp_entrypoint.o(.text*)
|
*tsp_entrypoint.o(.text*)
|
||||||
*(.text*)
|
*(.text*)
|
||||||
*(.vectors)
|
*(.vectors)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__TEXT_END__ = .;
|
__TEXT_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
.rodata . : {
|
.rodata . : {
|
||||||
__RODATA_START__ = .;
|
__RODATA_START__ = .;
|
||||||
*(.rodata*)
|
*(.rodata*)
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RODATA_END__ = .;
|
__RODATA_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#else
|
#else
|
||||||
|
@ -52,7 +52,7 @@ SECTIONS
|
||||||
* read-only, executable. No RW data from the next section must
|
* read-only, executable. No RW data from the next section must
|
||||||
* creep in. Ensure the rest of the current memory page is unused.
|
* creep in. Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
@ -117,7 +117,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -24,25 +24,25 @@ vector_entry SynchronousExceptionSP0
|
||||||
mov x0, #SYNC_EXCEPTION_SP_EL0
|
mov x0, #SYNC_EXCEPTION_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSP0
|
end_vector_entry SynchronousExceptionSP0
|
||||||
|
|
||||||
vector_entry IrqSP0
|
vector_entry IrqSP0
|
||||||
mov x0, #IRQ_SP_EL0
|
mov x0, #IRQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSP0
|
end_vector_entry IrqSP0
|
||||||
|
|
||||||
vector_entry FiqSP0
|
vector_entry FiqSP0
|
||||||
mov x0, #FIQ_SP_EL0
|
mov x0, #FIQ_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSP0
|
end_vector_entry FiqSP0
|
||||||
|
|
||||||
vector_entry SErrorSP0
|
vector_entry SErrorSP0
|
||||||
mov x0, #SERROR_SP_EL0
|
mov x0, #SERROR_SP_EL0
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSP0
|
end_vector_entry SErrorSP0
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Current EL with SPx: 0x200 - 0x400
|
* Current EL with SPx: 0x200 - 0x400
|
||||||
|
@ -52,25 +52,25 @@ vector_entry SynchronousExceptionSPx
|
||||||
mov x0, #SYNC_EXCEPTION_SP_ELX
|
mov x0, #SYNC_EXCEPTION_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionSPx
|
end_vector_entry SynchronousExceptionSPx
|
||||||
|
|
||||||
vector_entry IrqSPx
|
vector_entry IrqSPx
|
||||||
mov x0, #IRQ_SP_ELX
|
mov x0, #IRQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqSPx
|
end_vector_entry IrqSPx
|
||||||
|
|
||||||
vector_entry FiqSPx
|
vector_entry FiqSPx
|
||||||
mov x0, #FIQ_SP_ELX
|
mov x0, #FIQ_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqSPx
|
end_vector_entry FiqSPx
|
||||||
|
|
||||||
vector_entry SErrorSPx
|
vector_entry SErrorSPx
|
||||||
mov x0, #SERROR_SP_ELX
|
mov x0, #SERROR_SP_ELX
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorSPx
|
end_vector_entry SErrorSPx
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -80,25 +80,25 @@ vector_entry SynchronousExceptionA64
|
||||||
mov x0, #SYNC_EXCEPTION_AARCH64
|
mov x0, #SYNC_EXCEPTION_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionA64
|
end_vector_entry SynchronousExceptionA64
|
||||||
|
|
||||||
vector_entry IrqA64
|
vector_entry IrqA64
|
||||||
mov x0, #IRQ_AARCH64
|
mov x0, #IRQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA64
|
end_vector_entry IrqA64
|
||||||
|
|
||||||
vector_entry FiqA64
|
vector_entry FiqA64
|
||||||
mov x0, #FIQ_AARCH64
|
mov x0, #FIQ_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA64
|
end_vector_entry FiqA64
|
||||||
|
|
||||||
vector_entry SErrorA64
|
vector_entry SErrorA64
|
||||||
mov x0, #SERROR_AARCH64
|
mov x0, #SERROR_AARCH64
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA64
|
end_vector_entry SErrorA64
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -108,22 +108,22 @@ vector_entry SynchronousExceptionA32
|
||||||
mov x0, #SYNC_EXCEPTION_AARCH32
|
mov x0, #SYNC_EXCEPTION_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SynchronousExceptionA32
|
end_vector_entry SynchronousExceptionA32
|
||||||
|
|
||||||
vector_entry IrqA32
|
vector_entry IrqA32
|
||||||
mov x0, #IRQ_AARCH32
|
mov x0, #IRQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size IrqA32
|
end_vector_entry IrqA32
|
||||||
|
|
||||||
vector_entry FiqA32
|
vector_entry FiqA32
|
||||||
mov x0, #FIQ_AARCH32
|
mov x0, #FIQ_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size FiqA32
|
end_vector_entry FiqA32
|
||||||
|
|
||||||
vector_entry SErrorA32
|
vector_entry SErrorA32
|
||||||
mov x0, #SERROR_AARCH32
|
mov x0, #SERROR_AARCH32
|
||||||
bl plat_report_exception
|
bl plat_report_exception
|
||||||
no_ret plat_panic_handler
|
no_ret plat_panic_handler
|
||||||
check_vector_size SErrorA32
|
end_vector_entry SErrorA32
|
||||||
|
|
|
@ -62,8 +62,8 @@ given Linaro Release. Also, these `Linaro instructions`_ provide further
|
||||||
guidance and a script, which can be used to download Linaro deliverables
|
guidance and a script, which can be used to download Linaro deliverables
|
||||||
automatically.
|
automatically.
|
||||||
|
|
||||||
Optionally, TF-A can be built using clang or Arm Compiler 6.
|
Optionally, TF-A can be built using clang version 4.0 or newer or Arm
|
||||||
See instructions below on how to switch the default compiler.
|
Compiler 6. See instructions below on how to switch the default compiler.
|
||||||
|
|
||||||
In addition, the following optional packages and tools may be needed:
|
In addition, the following optional packages and tools may be needed:
|
||||||
|
|
||||||
|
@ -103,10 +103,14 @@ Building TF-A
|
||||||
|
|
||||||
export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf-
|
export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf-
|
||||||
|
|
||||||
It is possible to build TF-A using clang or Arm Compiler 6. To do so
|
It is possible to build TF-A using Clang or Arm Compiler 6. To do so
|
||||||
``CC`` needs to point to the clang or armclang binary. Only the compiler
|
``CC`` needs to point to the clang or armclang binary, which will
|
||||||
is switched; the assembler and linker need to be provided by the GNU
|
also select the clang or armclang assembler. Be aware that the
|
||||||
toolchain, thus ``CROSS_COMPILE`` should be set as described above.
|
GNU linker is used by default. In case of being needed the linker
|
||||||
|
can be overriden using the ``LD`` variable. Clang linker version 6 is
|
||||||
|
known to work with TF-A.
|
||||||
|
|
||||||
|
In both cases ``CROSS_COMPILE`` should be set as described above.
|
||||||
|
|
||||||
Arm Compiler 6 will be selected when the base name of the path assigned
|
Arm Compiler 6 will be selected when the base name of the path assigned
|
||||||
to ``CC`` matches the string 'armclang'.
|
to ``CC`` matches the string 'armclang'.
|
||||||
|
|
|
@ -83,11 +83,20 @@
|
||||||
.section \section_name, "ax"
|
.section \section_name, "ax"
|
||||||
.align 7, 0
|
.align 7, 0
|
||||||
.type \label, %function
|
.type \label, %function
|
||||||
.func \label
|
|
||||||
.cfi_startproc
|
.cfi_startproc
|
||||||
\label:
|
\label:
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Add the bytes until fill the full exception vector, whose size is always
|
||||||
|
* 32 instructions. If there are more than 32 instructions in the
|
||||||
|
* exception vector then an error is emitted.
|
||||||
|
*/
|
||||||
|
.macro end_vector_entry label
|
||||||
|
.cfi_endproc
|
||||||
|
.fill \label + (32 * 4) - .
|
||||||
|
.endm
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This macro verifies that the given vector doesn't exceed the
|
* This macro verifies that the given vector doesn't exceed the
|
||||||
* architectural limit of 32 instructions. This is meant to be placed
|
* architectural limit of 32 instructions. This is meant to be placed
|
||||||
|
@ -95,11 +104,10 @@
|
||||||
* vector entry as the parameter
|
* vector entry as the parameter
|
||||||
*/
|
*/
|
||||||
.macro check_vector_size since
|
.macro check_vector_size since
|
||||||
.endfunc
|
#if ERROR_DEPRECATED
|
||||||
.cfi_endproc
|
.error "check_vector_size must not be used. Use end_vector_entry instead"
|
||||||
.if (. - \since) > (32 * 4)
|
#endif
|
||||||
.error "Vector exceeds 32 instructions"
|
end_vector_entry \since
|
||||||
.endif
|
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
#if ENABLE_PLAT_COMPAT
|
#if ENABLE_PLAT_COMPAT
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -31,7 +31,6 @@
|
||||||
.cfi_sections .debug_frame
|
.cfi_sections .debug_frame
|
||||||
.section .text.asm.\_name, "ax"
|
.section .text.asm.\_name, "ax"
|
||||||
.type \_name, %function
|
.type \_name, %function
|
||||||
.func \_name
|
|
||||||
/*
|
/*
|
||||||
* .cfi_startproc and .cfi_endproc are needed to output entries in
|
* .cfi_startproc and .cfi_endproc are needed to output entries in
|
||||||
* .debug_frame
|
* .debug_frame
|
||||||
|
@ -45,7 +44,6 @@
|
||||||
* This macro is used to mark the end of a function.
|
* This macro is used to mark the end of a function.
|
||||||
*/
|
*/
|
||||||
.macro endfunc _name
|
.macro endfunc _name
|
||||||
.endfunc
|
|
||||||
.cfi_endproc
|
.cfi_endproc
|
||||||
.size \_name, . - \_name
|
.size \_name, . - \_name
|
||||||
.endm
|
.endm
|
||||||
|
|
|
@ -35,38 +35,47 @@
|
||||||
# define REPORT_ERRATA 0
|
# define REPORT_ERRATA 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
|
||||||
|
.equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
|
||||||
|
|
||||||
|
#ifndef IMAGE_AT_EL3
|
||||||
|
.equ CPU_RESET_FUNC_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* The power down core and cluster is needed only in BL32 */
|
||||||
|
#ifndef IMAGE_BL32
|
||||||
|
.equ CPU_PWR_DWN_OPS_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Fields required to print errata status */
|
||||||
|
#if !REPORT_ERRATA
|
||||||
|
.equ CPU_ERRATA_FUNC_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Only BL32 requires mutual exclusion and printed flag. */
|
||||||
|
#if !(REPORT_ERRATA && defined(IMAGE_BL32))
|
||||||
|
.equ CPU_ERRATA_LOCK_SIZE, 0
|
||||||
|
.equ CPU_ERRATA_PRINTED_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Define the offsets to the fields in cpu_ops structure.
|
* Define the offsets to the fields in cpu_ops structure.
|
||||||
|
* Every offset is defined based on the offset and size of the previous
|
||||||
|
* field.
|
||||||
*/
|
*/
|
||||||
.struct 0
|
.equ CPU_MIDR, 0
|
||||||
CPU_MIDR: /* cpu_ops midr */
|
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
|
||||||
.space 4
|
.equ CPU_PWR_DWN_OPS, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
|
||||||
/* Reset fn is needed during reset */
|
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
|
||||||
#if defined(IMAGE_AT_EL3)
|
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
|
||||||
CPU_RESET_FUNC: /* cpu_ops reset_func */
|
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
|
||||||
.space 4
|
.equ CPU_OPS_SIZE, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
|
||||||
#endif
|
|
||||||
#ifdef IMAGE_BL32 /* The power down core and cluster is needed only in BL32 */
|
|
||||||
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
|
|
||||||
.space (4 * CPU_MAX_PWR_DWN_OPS)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Fields required to print errata status. Only in BL32 that the printing
|
|
||||||
* require mutual exclusion and printed flag.
|
|
||||||
*/
|
|
||||||
#if REPORT_ERRATA
|
|
||||||
CPU_ERRATA_FUNC: /* CPU errata status printing function */
|
|
||||||
.space 4
|
|
||||||
#if defined(IMAGE_BL32)
|
|
||||||
CPU_ERRATA_LOCK:
|
|
||||||
.space 4
|
|
||||||
CPU_ERRATA_PRINTED:
|
|
||||||
.space 4
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
CPU_OPS_SIZE = .
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Write given expressions as words
|
* Write given expressions as words
|
||||||
|
@ -128,21 +137,8 @@ CPU_OPS_SIZE = .
|
||||||
.word \_resetfunc
|
.word \_resetfunc
|
||||||
#endif
|
#endif
|
||||||
#ifdef IMAGE_BL32
|
#ifdef IMAGE_BL32
|
||||||
1:
|
|
||||||
/* Insert list of functions */
|
/* Insert list of functions */
|
||||||
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
|
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
|
||||||
2:
|
|
||||||
/*
|
|
||||||
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
|
|
||||||
* list
|
|
||||||
*/
|
|
||||||
.ifeq 2b - 1b
|
|
||||||
.error "At least one power down function must be specified"
|
|
||||||
.else
|
|
||||||
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
|
|
||||||
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
|
|
||||||
.endif
|
|
||||||
.endif
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if REPORT_ERRATA
|
#if REPORT_ERRATA
|
||||||
|
|
|
@ -38,46 +38,56 @@
|
||||||
# define REPORT_ERRATA 0
|
# define REPORT_ERRATA 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
|
||||||
|
.equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
|
||||||
|
.equ CPU_REG_DUMP_SIZE, CPU_WORD_SIZE
|
||||||
|
|
||||||
|
#ifndef IMAGE_AT_EL3
|
||||||
|
.equ CPU_RESET_FUNC_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* The power down core and cluster is needed only in BL31 */
|
||||||
|
#ifndef IMAGE_BL31
|
||||||
|
.equ CPU_PWR_DWN_OPS_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Fields required to print errata status. */
|
||||||
|
#if !REPORT_ERRATA
|
||||||
|
.equ CPU_ERRATA_FUNC_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Only BL31 requieres mutual exclusion and printed flag. */
|
||||||
|
#if !(REPORT_ERRATA && defined(IMAGE_BL31))
|
||||||
|
.equ CPU_ERRATA_LOCK_SIZE, 0
|
||||||
|
.equ CPU_ERRATA_PRINTED_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if !defined(IMAGE_BL31) || !CRASH_REPORTING
|
||||||
|
.equ CPU_REG_DUMP_SIZE, 0
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Define the offsets to the fields in cpu_ops structure.
|
* Define the offsets to the fields in cpu_ops structure.
|
||||||
|
* Every offset is defined based in the offset and size of the previous
|
||||||
|
* field.
|
||||||
*/
|
*/
|
||||||
.struct 0
|
.equ CPU_MIDR, 0
|
||||||
CPU_MIDR: /* cpu_ops midr */
|
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
|
||||||
.space 8
|
.equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
|
||||||
/* Reset fn is needed in BL at reset vector */
|
.equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
|
||||||
#if defined(IMAGE_AT_EL3)
|
.equ CPU_PWR_DWN_OPS, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
|
||||||
CPU_RESET_FUNC: /* cpu_ops reset_func */
|
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
|
||||||
.space 8
|
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
|
||||||
#endif
|
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
|
||||||
CPU_EXTRA1_FUNC:
|
.equ CPU_REG_DUMP, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
|
||||||
.space 8
|
.equ CPU_OPS_SIZE, CPU_REG_DUMP + CPU_REG_DUMP_SIZE
|
||||||
CPU_EXTRA2_FUNC:
|
|
||||||
.space 8
|
|
||||||
#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
|
|
||||||
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
|
|
||||||
.space (8 * CPU_MAX_PWR_DWN_OPS)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Fields required to print errata status. Only in BL31 that the printing
|
|
||||||
* require mutual exclusion and printed flag.
|
|
||||||
*/
|
|
||||||
#if REPORT_ERRATA
|
|
||||||
CPU_ERRATA_FUNC:
|
|
||||||
.space 8
|
|
||||||
#if defined(IMAGE_BL31)
|
|
||||||
CPU_ERRATA_LOCK:
|
|
||||||
.space 8
|
|
||||||
CPU_ERRATA_PRINTED:
|
|
||||||
.space 8
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(IMAGE_BL31) && CRASH_REPORTING
|
|
||||||
CPU_REG_DUMP: /* cpu specific register dump for crash reporting */
|
|
||||||
.space 8
|
|
||||||
#endif
|
|
||||||
CPU_OPS_SIZE = .
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Write given expressions as quad words
|
* Write given expressions as quad words
|
||||||
|
@ -149,21 +159,8 @@ CPU_OPS_SIZE = .
|
||||||
.quad \_extra1
|
.quad \_extra1
|
||||||
.quad \_extra2
|
.quad \_extra2
|
||||||
#ifdef IMAGE_BL31
|
#ifdef IMAGE_BL31
|
||||||
1:
|
|
||||||
/* Insert list of functions */
|
/* Insert list of functions */
|
||||||
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
|
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
|
||||||
2:
|
|
||||||
/*
|
|
||||||
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
|
|
||||||
* list
|
|
||||||
*/
|
|
||||||
.ifeq 2b - 1b
|
|
||||||
.error "At least one power down function must be specified"
|
|
||||||
.else
|
|
||||||
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
|
|
||||||
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
|
|
||||||
.endif
|
|
||||||
.endif
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if REPORT_ERRATA
|
#if REPORT_ERRATA
|
||||||
|
|
|
@ -22,8 +22,7 @@ icc_regs:
|
||||||
|
|
||||||
/* Registers common to both GICv2 and GICv3 */
|
/* Registers common to both GICv2 and GICv3 */
|
||||||
gicd_pend_reg:
|
gicd_pend_reg:
|
||||||
.asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n" \
|
.asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n Offset:\t\t\tvalue\n"
|
||||||
" Offset:\t\t\tvalue\n"
|
|
||||||
newline:
|
newline:
|
||||||
.asciz "\n"
|
.asciz "\n"
|
||||||
spacer:
|
spacer:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
|
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -22,7 +22,7 @@ SECTIONS
|
||||||
*(arm_el3_tzc_dram)
|
*(arm_el3_tzc_dram)
|
||||||
__EL3_SEC_DRAM_UNALIGNED_END__ = .;
|
__EL3_SEC_DRAM_UNALIGNED_END__ = .;
|
||||||
|
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__EL3_SEC_DRAM_END__ = .;
|
__EL3_SEC_DRAM_END__ = .;
|
||||||
} >EL3_SEC_DRAM
|
} >EL3_SEC_DRAM
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,19 +107,19 @@ vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
|
||||||
*/
|
*/
|
||||||
vector_entry cortex_a76_sync_exception_sp_el0
|
vector_entry cortex_a76_sync_exception_sp_el0
|
||||||
b sync_exception_sp_el0
|
b sync_exception_sp_el0
|
||||||
check_vector_size cortex_a76_sync_exception_sp_el0
|
end_vector_entry cortex_a76_sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry cortex_a76_irq_sp_el0
|
vector_entry cortex_a76_irq_sp_el0
|
||||||
b irq_sp_el0
|
b irq_sp_el0
|
||||||
check_vector_size cortex_a76_irq_sp_el0
|
end_vector_entry cortex_a76_irq_sp_el0
|
||||||
|
|
||||||
vector_entry cortex_a76_fiq_sp_el0
|
vector_entry cortex_a76_fiq_sp_el0
|
||||||
b fiq_sp_el0
|
b fiq_sp_el0
|
||||||
check_vector_size cortex_a76_fiq_sp_el0
|
end_vector_entry cortex_a76_fiq_sp_el0
|
||||||
|
|
||||||
vector_entry cortex_a76_serror_sp_el0
|
vector_entry cortex_a76_serror_sp_el0
|
||||||
b serror_sp_el0
|
b serror_sp_el0
|
||||||
check_vector_size cortex_a76_serror_sp_el0
|
end_vector_entry cortex_a76_serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400
|
* Current EL with SP_ELx: 0x200 - 0x400
|
||||||
|
@ -127,19 +127,19 @@ vector_entry cortex_a76_serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry cortex_a76_sync_exception_sp_elx
|
vector_entry cortex_a76_sync_exception_sp_elx
|
||||||
b sync_exception_sp_elx
|
b sync_exception_sp_elx
|
||||||
check_vector_size cortex_a76_sync_exception_sp_elx
|
end_vector_entry cortex_a76_sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry cortex_a76_irq_sp_elx
|
vector_entry cortex_a76_irq_sp_elx
|
||||||
b irq_sp_elx
|
b irq_sp_elx
|
||||||
check_vector_size cortex_a76_irq_sp_elx
|
end_vector_entry cortex_a76_irq_sp_elx
|
||||||
|
|
||||||
vector_entry cortex_a76_fiq_sp_elx
|
vector_entry cortex_a76_fiq_sp_elx
|
||||||
b fiq_sp_elx
|
b fiq_sp_elx
|
||||||
check_vector_size cortex_a76_fiq_sp_elx
|
end_vector_entry cortex_a76_fiq_sp_elx
|
||||||
|
|
||||||
vector_entry cortex_a76_serror_sp_elx
|
vector_entry cortex_a76_serror_sp_elx
|
||||||
b serror_sp_elx
|
b serror_sp_elx
|
||||||
check_vector_size cortex_a76_serror_sp_elx
|
end_vector_entry cortex_a76_serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -148,22 +148,22 @@ vector_entry cortex_a76_serror_sp_elx
|
||||||
vector_entry cortex_a76_sync_exception_aarch64
|
vector_entry cortex_a76_sync_exception_aarch64
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b sync_exception_aarch64
|
b sync_exception_aarch64
|
||||||
check_vector_size cortex_a76_sync_exception_aarch64
|
end_vector_entry cortex_a76_sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry cortex_a76_irq_aarch64
|
vector_entry cortex_a76_irq_aarch64
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b irq_aarch64
|
b irq_aarch64
|
||||||
check_vector_size cortex_a76_irq_aarch64
|
end_vector_entry cortex_a76_irq_aarch64
|
||||||
|
|
||||||
vector_entry cortex_a76_fiq_aarch64
|
vector_entry cortex_a76_fiq_aarch64
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b fiq_aarch64
|
b fiq_aarch64
|
||||||
check_vector_size cortex_a76_fiq_aarch64
|
end_vector_entry cortex_a76_fiq_aarch64
|
||||||
|
|
||||||
vector_entry cortex_a76_serror_aarch64
|
vector_entry cortex_a76_serror_aarch64
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b serror_aarch64
|
b serror_aarch64
|
||||||
check_vector_size cortex_a76_serror_aarch64
|
end_vector_entry cortex_a76_serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -172,22 +172,22 @@ vector_entry cortex_a76_serror_aarch64
|
||||||
vector_entry cortex_a76_sync_exception_aarch32
|
vector_entry cortex_a76_sync_exception_aarch32
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b sync_exception_aarch32
|
b sync_exception_aarch32
|
||||||
check_vector_size cortex_a76_sync_exception_aarch32
|
end_vector_entry cortex_a76_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry cortex_a76_irq_aarch32
|
vector_entry cortex_a76_irq_aarch32
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b irq_aarch32
|
b irq_aarch32
|
||||||
check_vector_size cortex_a76_irq_aarch32
|
end_vector_entry cortex_a76_irq_aarch32
|
||||||
|
|
||||||
vector_entry cortex_a76_fiq_aarch32
|
vector_entry cortex_a76_fiq_aarch32
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b fiq_aarch32
|
b fiq_aarch32
|
||||||
check_vector_size cortex_a76_fiq_aarch32
|
end_vector_entry cortex_a76_fiq_aarch32
|
||||||
|
|
||||||
vector_entry cortex_a76_serror_aarch32
|
vector_entry cortex_a76_serror_aarch32
|
||||||
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b serror_aarch32
|
b serror_aarch32
|
||||||
check_vector_size cortex_a76_serror_aarch32
|
end_vector_entry cortex_a76_serror_aarch32
|
||||||
|
|
||||||
func check_errata_cve_2018_3639
|
func check_errata_cve_2018_3639
|
||||||
#if WORKAROUND_CVE_2018_3639
|
#if WORKAROUND_CVE_2018_3639
|
||||||
|
|
|
@ -55,19 +55,19 @@ vector_base workaround_bpflush_runtime_exceptions
|
||||||
*/
|
*/
|
||||||
vector_entry workaround_bpflush_sync_exception_sp_el0
|
vector_entry workaround_bpflush_sync_exception_sp_el0
|
||||||
b sync_exception_sp_el0
|
b sync_exception_sp_el0
|
||||||
check_vector_size workaround_bpflush_sync_exception_sp_el0
|
end_vector_entry workaround_bpflush_sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry workaround_bpflush_irq_sp_el0
|
vector_entry workaround_bpflush_irq_sp_el0
|
||||||
b irq_sp_el0
|
b irq_sp_el0
|
||||||
check_vector_size workaround_bpflush_irq_sp_el0
|
end_vector_entry workaround_bpflush_irq_sp_el0
|
||||||
|
|
||||||
vector_entry workaround_bpflush_fiq_sp_el0
|
vector_entry workaround_bpflush_fiq_sp_el0
|
||||||
b fiq_sp_el0
|
b fiq_sp_el0
|
||||||
check_vector_size workaround_bpflush_fiq_sp_el0
|
end_vector_entry workaround_bpflush_fiq_sp_el0
|
||||||
|
|
||||||
vector_entry workaround_bpflush_serror_sp_el0
|
vector_entry workaround_bpflush_serror_sp_el0
|
||||||
b serror_sp_el0
|
b serror_sp_el0
|
||||||
check_vector_size workaround_bpflush_serror_sp_el0
|
end_vector_entry workaround_bpflush_serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400
|
* Current EL with SP_ELx: 0x200 - 0x400
|
||||||
|
@ -75,19 +75,19 @@ vector_entry workaround_bpflush_serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry workaround_bpflush_sync_exception_sp_elx
|
vector_entry workaround_bpflush_sync_exception_sp_elx
|
||||||
b sync_exception_sp_elx
|
b sync_exception_sp_elx
|
||||||
check_vector_size workaround_bpflush_sync_exception_sp_elx
|
end_vector_entry workaround_bpflush_sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry workaround_bpflush_irq_sp_elx
|
vector_entry workaround_bpflush_irq_sp_elx
|
||||||
b irq_sp_elx
|
b irq_sp_elx
|
||||||
check_vector_size workaround_bpflush_irq_sp_elx
|
end_vector_entry workaround_bpflush_irq_sp_elx
|
||||||
|
|
||||||
vector_entry workaround_bpflush_fiq_sp_elx
|
vector_entry workaround_bpflush_fiq_sp_elx
|
||||||
b fiq_sp_elx
|
b fiq_sp_elx
|
||||||
check_vector_size workaround_bpflush_fiq_sp_elx
|
end_vector_entry workaround_bpflush_fiq_sp_elx
|
||||||
|
|
||||||
vector_entry workaround_bpflush_serror_sp_elx
|
vector_entry workaround_bpflush_serror_sp_elx
|
||||||
b serror_sp_elx
|
b serror_sp_elx
|
||||||
check_vector_size workaround_bpflush_serror_sp_elx
|
end_vector_entry workaround_bpflush_serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -96,22 +96,22 @@ vector_entry workaround_bpflush_serror_sp_elx
|
||||||
vector_entry workaround_bpflush_sync_exception_aarch64
|
vector_entry workaround_bpflush_sync_exception_aarch64
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b sync_exception_aarch64
|
b sync_exception_aarch64
|
||||||
check_vector_size workaround_bpflush_sync_exception_aarch64
|
end_vector_entry workaround_bpflush_sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry workaround_bpflush_irq_aarch64
|
vector_entry workaround_bpflush_irq_aarch64
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b irq_aarch64
|
b irq_aarch64
|
||||||
check_vector_size workaround_bpflush_irq_aarch64
|
end_vector_entry workaround_bpflush_irq_aarch64
|
||||||
|
|
||||||
vector_entry workaround_bpflush_fiq_aarch64
|
vector_entry workaround_bpflush_fiq_aarch64
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b fiq_aarch64
|
b fiq_aarch64
|
||||||
check_vector_size workaround_bpflush_fiq_aarch64
|
end_vector_entry workaround_bpflush_fiq_aarch64
|
||||||
|
|
||||||
vector_entry workaround_bpflush_serror_aarch64
|
vector_entry workaround_bpflush_serror_aarch64
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b serror_aarch64
|
b serror_aarch64
|
||||||
check_vector_size workaround_bpflush_serror_aarch64
|
end_vector_entry workaround_bpflush_serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -120,22 +120,22 @@ vector_entry workaround_bpflush_serror_aarch64
|
||||||
vector_entry workaround_bpflush_sync_exception_aarch32
|
vector_entry workaround_bpflush_sync_exception_aarch32
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b sync_exception_aarch32
|
b sync_exception_aarch32
|
||||||
check_vector_size workaround_bpflush_sync_exception_aarch32
|
end_vector_entry workaround_bpflush_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry workaround_bpflush_irq_aarch32
|
vector_entry workaround_bpflush_irq_aarch32
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b irq_aarch32
|
b irq_aarch32
|
||||||
check_vector_size workaround_bpflush_irq_aarch32
|
end_vector_entry workaround_bpflush_irq_aarch32
|
||||||
|
|
||||||
vector_entry workaround_bpflush_fiq_aarch32
|
vector_entry workaround_bpflush_fiq_aarch32
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b fiq_aarch32
|
b fiq_aarch32
|
||||||
check_vector_size workaround_bpflush_fiq_aarch32
|
end_vector_entry workaround_bpflush_fiq_aarch32
|
||||||
|
|
||||||
vector_entry workaround_bpflush_serror_aarch32
|
vector_entry workaround_bpflush_serror_aarch32
|
||||||
apply_workaround
|
apply_workaround
|
||||||
b serror_aarch32
|
b serror_aarch32
|
||||||
check_vector_size workaround_bpflush_serror_aarch32
|
end_vector_entry workaround_bpflush_serror_aarch32
|
||||||
|
|
||||||
.global denver_disable_dco
|
.global denver_disable_dco
|
||||||
|
|
||||||
|
|
|
@ -114,19 +114,19 @@ aarch32_stub:
|
||||||
.word EMIT_BPIALL
|
.word EMIT_BPIALL
|
||||||
.word EMIT_SMC
|
.word EMIT_SMC
|
||||||
|
|
||||||
check_vector_size bpiall_sync_exception_sp_el0
|
end_vector_entry bpiall_sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_irq_sp_el0
|
vector_entry bpiall_irq_sp_el0
|
||||||
b irq_sp_el0
|
b irq_sp_el0
|
||||||
check_vector_size bpiall_irq_sp_el0
|
end_vector_entry bpiall_irq_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_fiq_sp_el0
|
vector_entry bpiall_fiq_sp_el0
|
||||||
b fiq_sp_el0
|
b fiq_sp_el0
|
||||||
check_vector_size bpiall_fiq_sp_el0
|
end_vector_entry bpiall_fiq_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_serror_sp_el0
|
vector_entry bpiall_serror_sp_el0
|
||||||
b serror_sp_el0
|
b serror_sp_el0
|
||||||
check_vector_size bpiall_serror_sp_el0
|
end_vector_entry bpiall_serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400
|
* Current EL with SP_ELx: 0x200 - 0x400
|
||||||
|
@ -134,19 +134,19 @@ vector_entry bpiall_serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_sync_exception_sp_elx
|
vector_entry bpiall_sync_exception_sp_elx
|
||||||
b sync_exception_sp_elx
|
b sync_exception_sp_elx
|
||||||
check_vector_size bpiall_sync_exception_sp_elx
|
end_vector_entry bpiall_sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_irq_sp_elx
|
vector_entry bpiall_irq_sp_elx
|
||||||
b irq_sp_elx
|
b irq_sp_elx
|
||||||
check_vector_size bpiall_irq_sp_elx
|
end_vector_entry bpiall_irq_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_fiq_sp_elx
|
vector_entry bpiall_fiq_sp_elx
|
||||||
b fiq_sp_elx
|
b fiq_sp_elx
|
||||||
check_vector_size bpiall_fiq_sp_elx
|
end_vector_entry bpiall_fiq_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_serror_sp_elx
|
vector_entry bpiall_serror_sp_elx
|
||||||
b serror_sp_elx
|
b serror_sp_elx
|
||||||
check_vector_size bpiall_serror_sp_elx
|
end_vector_entry bpiall_serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -154,19 +154,19 @@ vector_entry bpiall_serror_sp_elx
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_sync_exception_aarch64
|
vector_entry bpiall_sync_exception_aarch64
|
||||||
apply_cve_2017_5715_wa 1
|
apply_cve_2017_5715_wa 1
|
||||||
check_vector_size bpiall_sync_exception_aarch64
|
end_vector_entry bpiall_sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_irq_aarch64
|
vector_entry bpiall_irq_aarch64
|
||||||
apply_cve_2017_5715_wa 2
|
apply_cve_2017_5715_wa 2
|
||||||
check_vector_size bpiall_irq_aarch64
|
end_vector_entry bpiall_irq_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_fiq_aarch64
|
vector_entry bpiall_fiq_aarch64
|
||||||
apply_cve_2017_5715_wa 4
|
apply_cve_2017_5715_wa 4
|
||||||
check_vector_size bpiall_fiq_aarch64
|
end_vector_entry bpiall_fiq_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_serror_aarch64
|
vector_entry bpiall_serror_aarch64
|
||||||
apply_cve_2017_5715_wa 8
|
apply_cve_2017_5715_wa 8
|
||||||
check_vector_size bpiall_serror_aarch64
|
end_vector_entry bpiall_serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -174,19 +174,19 @@ vector_entry bpiall_serror_aarch64
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_sync_exception_aarch32
|
vector_entry bpiall_sync_exception_aarch32
|
||||||
apply_cve_2017_5715_wa 1
|
apply_cve_2017_5715_wa 1
|
||||||
check_vector_size bpiall_sync_exception_aarch32
|
end_vector_entry bpiall_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_irq_aarch32
|
vector_entry bpiall_irq_aarch32
|
||||||
apply_cve_2017_5715_wa 2
|
apply_cve_2017_5715_wa 2
|
||||||
check_vector_size bpiall_irq_aarch32
|
end_vector_entry bpiall_irq_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_fiq_aarch32
|
vector_entry bpiall_fiq_aarch32
|
||||||
apply_cve_2017_5715_wa 4
|
apply_cve_2017_5715_wa 4
|
||||||
check_vector_size bpiall_fiq_aarch32
|
end_vector_entry bpiall_fiq_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_serror_aarch32
|
vector_entry bpiall_serror_aarch32
|
||||||
apply_cve_2017_5715_wa 8
|
apply_cve_2017_5715_wa 8
|
||||||
check_vector_size bpiall_serror_aarch32
|
end_vector_entry bpiall_serror_aarch32
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* This vector table is used while the workaround is executing. It
|
* This vector table is used while the workaround is executing. It
|
||||||
|
@ -203,19 +203,19 @@ vector_base wa_cve_2017_5715_bpiall_ret_vbar
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_ret_sync_exception_sp_el0
|
vector_entry bpiall_ret_sync_exception_sp_el0
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_sync_exception_sp_el0
|
end_vector_entry bpiall_ret_sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_ret_irq_sp_el0
|
vector_entry bpiall_ret_irq_sp_el0
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_irq_sp_el0
|
end_vector_entry bpiall_ret_irq_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_ret_fiq_sp_el0
|
vector_entry bpiall_ret_fiq_sp_el0
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_fiq_sp_el0
|
end_vector_entry bpiall_ret_fiq_sp_el0
|
||||||
|
|
||||||
vector_entry bpiall_ret_serror_sp_el0
|
vector_entry bpiall_ret_serror_sp_el0
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_serror_sp_el0
|
end_vector_entry bpiall_ret_serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
|
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
|
||||||
|
@ -223,19 +223,19 @@ vector_entry bpiall_ret_serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_ret_sync_exception_sp_elx
|
vector_entry bpiall_ret_sync_exception_sp_elx
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_sync_exception_sp_elx
|
end_vector_entry bpiall_ret_sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_ret_irq_sp_elx
|
vector_entry bpiall_ret_irq_sp_elx
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_irq_sp_elx
|
end_vector_entry bpiall_ret_irq_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_ret_fiq_sp_elx
|
vector_entry bpiall_ret_fiq_sp_elx
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_fiq_sp_elx
|
end_vector_entry bpiall_ret_fiq_sp_elx
|
||||||
|
|
||||||
vector_entry bpiall_ret_serror_sp_elx
|
vector_entry bpiall_ret_serror_sp_elx
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_serror_sp_elx
|
end_vector_entry bpiall_ret_serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
|
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
|
||||||
|
@ -243,19 +243,19 @@ vector_entry bpiall_ret_serror_sp_elx
|
||||||
*/
|
*/
|
||||||
vector_entry bpiall_ret_sync_exception_aarch64
|
vector_entry bpiall_ret_sync_exception_aarch64
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_sync_exception_aarch64
|
end_vector_entry bpiall_ret_sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_ret_irq_aarch64
|
vector_entry bpiall_ret_irq_aarch64
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_irq_aarch64
|
end_vector_entry bpiall_ret_irq_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_ret_fiq_aarch64
|
vector_entry bpiall_ret_fiq_aarch64
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_fiq_aarch64
|
end_vector_entry bpiall_ret_fiq_aarch64
|
||||||
|
|
||||||
vector_entry bpiall_ret_serror_aarch64
|
vector_entry bpiall_ret_serror_aarch64
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_serror_aarch64
|
end_vector_entry bpiall_ret_serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -324,7 +324,7 @@ vector_entry bpiall_ret_sync_exception_aarch32
|
||||||
1:
|
1:
|
||||||
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
||||||
b sync_exception_aarch64
|
b sync_exception_aarch64
|
||||||
check_vector_size bpiall_ret_sync_exception_aarch32
|
end_vector_entry bpiall_ret_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_ret_irq_aarch32
|
vector_entry bpiall_ret_irq_aarch32
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
|
@ -346,12 +346,12 @@ bpiall_ret_fiq:
|
||||||
bpiall_ret_serror:
|
bpiall_ret_serror:
|
||||||
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
|
||||||
b serror_aarch64
|
b serror_aarch64
|
||||||
check_vector_size bpiall_ret_irq_aarch32
|
end_vector_entry bpiall_ret_irq_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_ret_fiq_aarch32
|
vector_entry bpiall_ret_fiq_aarch32
|
||||||
b report_unhandled_interrupt
|
b report_unhandled_interrupt
|
||||||
check_vector_size bpiall_ret_fiq_aarch32
|
end_vector_entry bpiall_ret_fiq_aarch32
|
||||||
|
|
||||||
vector_entry bpiall_ret_serror_aarch32
|
vector_entry bpiall_ret_serror_aarch32
|
||||||
b report_unhandled_exception
|
b report_unhandled_exception
|
||||||
check_vector_size bpiall_ret_serror_aarch32
|
end_vector_entry bpiall_ret_serror_aarch32
|
||||||
|
|
|
@ -66,19 +66,19 @@ vector_base wa_cve_2017_5715_mmu_vbar
|
||||||
*/
|
*/
|
||||||
vector_entry mmu_sync_exception_sp_el0
|
vector_entry mmu_sync_exception_sp_el0
|
||||||
b sync_exception_sp_el0
|
b sync_exception_sp_el0
|
||||||
check_vector_size mmu_sync_exception_sp_el0
|
end_vector_entry mmu_sync_exception_sp_el0
|
||||||
|
|
||||||
vector_entry mmu_irq_sp_el0
|
vector_entry mmu_irq_sp_el0
|
||||||
b irq_sp_el0
|
b irq_sp_el0
|
||||||
check_vector_size mmu_irq_sp_el0
|
end_vector_entry mmu_irq_sp_el0
|
||||||
|
|
||||||
vector_entry mmu_fiq_sp_el0
|
vector_entry mmu_fiq_sp_el0
|
||||||
b fiq_sp_el0
|
b fiq_sp_el0
|
||||||
check_vector_size mmu_fiq_sp_el0
|
end_vector_entry mmu_fiq_sp_el0
|
||||||
|
|
||||||
vector_entry mmu_serror_sp_el0
|
vector_entry mmu_serror_sp_el0
|
||||||
b serror_sp_el0
|
b serror_sp_el0
|
||||||
check_vector_size mmu_serror_sp_el0
|
end_vector_entry mmu_serror_sp_el0
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Current EL with SP_ELx: 0x200 - 0x400
|
* Current EL with SP_ELx: 0x200 - 0x400
|
||||||
|
@ -86,19 +86,19 @@ vector_entry mmu_serror_sp_el0
|
||||||
*/
|
*/
|
||||||
vector_entry mmu_sync_exception_sp_elx
|
vector_entry mmu_sync_exception_sp_elx
|
||||||
b sync_exception_sp_elx
|
b sync_exception_sp_elx
|
||||||
check_vector_size mmu_sync_exception_sp_elx
|
end_vector_entry mmu_sync_exception_sp_elx
|
||||||
|
|
||||||
vector_entry mmu_irq_sp_elx
|
vector_entry mmu_irq_sp_elx
|
||||||
b irq_sp_elx
|
b irq_sp_elx
|
||||||
check_vector_size mmu_irq_sp_elx
|
end_vector_entry mmu_irq_sp_elx
|
||||||
|
|
||||||
vector_entry mmu_fiq_sp_elx
|
vector_entry mmu_fiq_sp_elx
|
||||||
b fiq_sp_elx
|
b fiq_sp_elx
|
||||||
check_vector_size mmu_fiq_sp_elx
|
end_vector_entry mmu_fiq_sp_elx
|
||||||
|
|
||||||
vector_entry mmu_serror_sp_elx
|
vector_entry mmu_serror_sp_elx
|
||||||
b serror_sp_elx
|
b serror_sp_elx
|
||||||
check_vector_size mmu_serror_sp_elx
|
end_vector_entry mmu_serror_sp_elx
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600
|
* Lower EL using AArch64 : 0x400 - 0x600
|
||||||
|
@ -107,22 +107,22 @@ vector_entry mmu_serror_sp_elx
|
||||||
vector_entry mmu_sync_exception_aarch64
|
vector_entry mmu_sync_exception_aarch64
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b sync_exception_aarch64
|
b sync_exception_aarch64
|
||||||
check_vector_size mmu_sync_exception_aarch64
|
end_vector_entry mmu_sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry mmu_irq_aarch64
|
vector_entry mmu_irq_aarch64
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b irq_aarch64
|
b irq_aarch64
|
||||||
check_vector_size mmu_irq_aarch64
|
end_vector_entry mmu_irq_aarch64
|
||||||
|
|
||||||
vector_entry mmu_fiq_aarch64
|
vector_entry mmu_fiq_aarch64
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b fiq_aarch64
|
b fiq_aarch64
|
||||||
check_vector_size mmu_fiq_aarch64
|
end_vector_entry mmu_fiq_aarch64
|
||||||
|
|
||||||
vector_entry mmu_serror_aarch64
|
vector_entry mmu_serror_aarch64
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
|
||||||
b serror_aarch64
|
b serror_aarch64
|
||||||
check_vector_size mmu_serror_aarch64
|
end_vector_entry mmu_serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -131,19 +131,19 @@ vector_entry mmu_serror_aarch64
|
||||||
vector_entry mmu_sync_exception_aarch32
|
vector_entry mmu_sync_exception_aarch32
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b sync_exception_aarch32
|
b sync_exception_aarch32
|
||||||
check_vector_size mmu_sync_exception_aarch32
|
end_vector_entry mmu_sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry mmu_irq_aarch32
|
vector_entry mmu_irq_aarch32
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b irq_aarch32
|
b irq_aarch32
|
||||||
check_vector_size mmu_irq_aarch32
|
end_vector_entry mmu_irq_aarch32
|
||||||
|
|
||||||
vector_entry mmu_fiq_aarch32
|
vector_entry mmu_fiq_aarch32
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b fiq_aarch32
|
b fiq_aarch32
|
||||||
check_vector_size mmu_fiq_aarch32
|
end_vector_entry mmu_fiq_aarch32
|
||||||
|
|
||||||
vector_entry mmu_serror_aarch32
|
vector_entry mmu_serror_aarch32
|
||||||
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
|
||||||
b serror_aarch32
|
b serror_aarch32
|
||||||
check_vector_size mmu_serror_aarch32
|
end_vector_entry mmu_serror_aarch32
|
||||||
|
|
|
@ -59,7 +59,7 @@ SECTIONS
|
||||||
* executable. No RW data from the next section must creep in.
|
* executable. No RW data from the next section must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__RO_END__ = .;
|
__RO_END__ = .;
|
||||||
} >RAM
|
} >RAM
|
||||||
|
|
||||||
|
@ -161,7 +161,7 @@ SECTIONS
|
||||||
* as device memory. No other unexpected data must creep in.
|
* as device memory. No other unexpected data must creep in.
|
||||||
* Ensure the rest of the current memory page is unused.
|
* Ensure the rest of the current memory page is unused.
|
||||||
*/
|
*/
|
||||||
. = NEXT(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
__COHERENT_RAM_END__ = .;
|
__COHERENT_RAM_END__ = .;
|
||||||
} >RAM2
|
} >RAM2
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
.macro pmusram_entry_func _name
|
.macro pmusram_entry_func _name
|
||||||
.section .pmusram.entry, "ax"
|
.section .pmusram.entry, "ax"
|
||||||
.type \_name, %function
|
.type \_name, %function
|
||||||
.func \_name
|
|
||||||
.cfi_startproc
|
.cfi_startproc
|
||||||
\_name:
|
\_name:
|
||||||
.endm
|
.endm
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
.cfi_sections .debug_frame
|
.cfi_sections .debug_frame
|
||||||
.section .sram.text, "ax"
|
.section .sram.text, "ax"
|
||||||
.type \_name, %function
|
.type \_name, %function
|
||||||
.func \_name
|
|
||||||
.cfi_startproc
|
.cfi_startproc
|
||||||
\_name:
|
\_name:
|
||||||
.endm
|
.endm
|
||||||
|
|
|
@ -23,19 +23,19 @@ vector_base spm_shim_exceptions_ptr, .spm_shim_exceptions
|
||||||
*/
|
*/
|
||||||
vector_entry SynchronousExceptionSP0, .spm_shim_exceptions
|
vector_entry SynchronousExceptionSP0, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SynchronousExceptionSP0
|
end_vector_entry SynchronousExceptionSP0
|
||||||
|
|
||||||
vector_entry IrqSP0, .spm_shim_exceptions
|
vector_entry IrqSP0, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size IrqSP0
|
end_vector_entry IrqSP0
|
||||||
|
|
||||||
vector_entry FiqSP0, .spm_shim_exceptions
|
vector_entry FiqSP0, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size FiqSP0
|
end_vector_entry FiqSP0
|
||||||
|
|
||||||
vector_entry SErrorSP0, .spm_shim_exceptions
|
vector_entry SErrorSP0, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SErrorSP0
|
end_vector_entry SErrorSP0
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Current EL with SPx: 0x200 - 0x400
|
* Current EL with SPx: 0x200 - 0x400
|
||||||
|
@ -43,19 +43,19 @@ vector_entry SErrorSP0, .spm_shim_exceptions
|
||||||
*/
|
*/
|
||||||
vector_entry SynchronousExceptionSPx, .spm_shim_exceptions
|
vector_entry SynchronousExceptionSPx, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SynchronousExceptionSPx
|
end_vector_entry SynchronousExceptionSPx
|
||||||
|
|
||||||
vector_entry IrqSPx, .spm_shim_exceptions
|
vector_entry IrqSPx, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size IrqSPx
|
end_vector_entry IrqSPx
|
||||||
|
|
||||||
vector_entry FiqSPx, .spm_shim_exceptions
|
vector_entry FiqSPx, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size FiqSPx
|
end_vector_entry FiqSPx
|
||||||
|
|
||||||
vector_entry SErrorSPx, .spm_shim_exceptions
|
vector_entry SErrorSPx, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SErrorSPx
|
end_vector_entry SErrorSPx
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch64 : 0x400 - 0x600. No exceptions
|
* Lower EL using AArch64 : 0x400 - 0x600. No exceptions
|
||||||
|
@ -93,19 +93,19 @@ do_smc:
|
||||||
handle_sys_trap:
|
handle_sys_trap:
|
||||||
panic:
|
panic:
|
||||||
b panic
|
b panic
|
||||||
check_vector_size SynchronousExceptionA64
|
end_vector_entry SynchronousExceptionA64
|
||||||
|
|
||||||
vector_entry IrqA64, .spm_shim_exceptions
|
vector_entry IrqA64, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size IrqA64
|
end_vector_entry IrqA64
|
||||||
|
|
||||||
vector_entry FiqA64, .spm_shim_exceptions
|
vector_entry FiqA64, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size FiqA64
|
end_vector_entry FiqA64
|
||||||
|
|
||||||
vector_entry SErrorA64, .spm_shim_exceptions
|
vector_entry SErrorA64, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SErrorA64
|
end_vector_entry SErrorA64
|
||||||
|
|
||||||
/* -----------------------------------------------------
|
/* -----------------------------------------------------
|
||||||
* Lower EL using AArch32 : 0x600 - 0x800
|
* Lower EL using AArch32 : 0x600 - 0x800
|
||||||
|
@ -113,16 +113,16 @@ vector_entry SErrorA64, .spm_shim_exceptions
|
||||||
*/
|
*/
|
||||||
vector_entry SynchronousExceptionA32, .spm_shim_exceptions
|
vector_entry SynchronousExceptionA32, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SynchronousExceptionA32
|
end_vector_entry SynchronousExceptionA32
|
||||||
|
|
||||||
vector_entry IrqA32, .spm_shim_exceptions
|
vector_entry IrqA32, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size IrqA32
|
end_vector_entry IrqA32
|
||||||
|
|
||||||
vector_entry FiqA32, .spm_shim_exceptions
|
vector_entry FiqA32, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size FiqA32
|
end_vector_entry FiqA32
|
||||||
|
|
||||||
vector_entry SErrorA32, .spm_shim_exceptions
|
vector_entry SErrorA32, .spm_shim_exceptions
|
||||||
b .
|
b .
|
||||||
check_vector_size SErrorA32
|
end_vector_entry SErrorA32
|
||||||
|
|
Loading…
Add table
Reference in a new issue