mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-04-16 17:44:19 +00:00
AMU: Add configuration helpers for aarch64
Add some AMU helper functions to allow configuring, reading and writing of the Group 0 and Group 1 counters. Documentation for these helpers will come in a separate patch. Change-Id: I656e070d2dae830c22414f694aa655341d4e2c40 Signed-off-by: Dimitris Papastamos <dimitris.papastamos@arm.com>
This commit is contained in:
parent
59902b7c4c
commit
0767d50e69
8 changed files with 469 additions and 43 deletions
|
@ -51,7 +51,8 @@ BL31_SOURCES += lib/extensions/spe/spe.c
|
|||
endif
|
||||
|
||||
ifeq (${ENABLE_AMU},1)
|
||||
BL31_SOURCES += lib/extensions/amu/aarch64/amu.c
|
||||
BL31_SOURCES += lib/extensions/amu/aarch64/amu.c \
|
||||
lib/extensions/amu/aarch64/amu_helpers.S
|
||||
endif
|
||||
|
||||
ifeq (${ENABLE_SVE_FOR_NS},1)
|
||||
|
|
|
@ -656,4 +656,45 @@
|
|||
#define AMEVTYPER02_EL0 S3_3_C13_C6_2
|
||||
#define AMEVTYPER03_EL0 S3_3_C13_C6_3
|
||||
|
||||
/* Activity Monitor Group 1 Event Counter Registers */
|
||||
#define AMEVCNTR10_EL0 S3_3_C13_C12_0
|
||||
#define AMEVCNTR11_EL0 S3_3_C13_C12_1
|
||||
#define AMEVCNTR12_EL0 S3_3_C13_C12_2
|
||||
#define AMEVCNTR13_EL0 S3_3_C13_C12_3
|
||||
#define AMEVCNTR14_EL0 S3_3_C13_C12_4
|
||||
#define AMEVCNTR15_EL0 S3_3_C13_C12_5
|
||||
#define AMEVCNTR16_EL0 S3_3_C13_C12_6
|
||||
#define AMEVCNTR17_EL0 S3_3_C13_C12_7
|
||||
#define AMEVCNTR18_EL0 S3_3_C13_C13_0
|
||||
#define AMEVCNTR19_EL0 S3_3_C13_C13_1
|
||||
#define AMEVCNTR1A_EL0 S3_3_C13_C13_2
|
||||
#define AMEVCNTR1B_EL0 S3_3_C13_C13_3
|
||||
#define AMEVCNTR1C_EL0 S3_3_C13_C13_4
|
||||
#define AMEVCNTR1D_EL0 S3_3_C13_C13_5
|
||||
#define AMEVCNTR1E_EL0 S3_3_C13_C13_6
|
||||
#define AMEVCNTR1F_EL0 S3_3_C13_C13_7
|
||||
|
||||
/* Activity Monitor Group 1 Event Type Registers */
|
||||
#define AMEVTYPER10_EL0 S3_3_C13_C14_0
|
||||
#define AMEVTYPER11_EL0 S3_3_C13_C14_1
|
||||
#define AMEVTYPER12_EL0 S3_3_C13_C14_2
|
||||
#define AMEVTYPER13_EL0 S3_3_C13_C14_3
|
||||
#define AMEVTYPER14_EL0 S3_3_C13_C14_4
|
||||
#define AMEVTYPER15_EL0 S3_3_C13_C14_5
|
||||
#define AMEVTYPER16_EL0 S3_3_C13_C14_6
|
||||
#define AMEVTYPER17_EL0 S3_3_C13_C14_7
|
||||
#define AMEVTYPER18_EL0 S3_3_C13_C15_0
|
||||
#define AMEVTYPER19_EL0 S3_3_C13_C15_1
|
||||
#define AMEVTYPER1A_EL0 S3_3_C13_C15_2
|
||||
#define AMEVTYPER1B_EL0 S3_3_C13_C15_3
|
||||
#define AMEVTYPER1C_EL0 S3_3_C13_C15_4
|
||||
#define AMEVTYPER1D_EL0 S3_3_C13_C15_5
|
||||
#define AMEVTYPER1E_EL0 S3_3_C13_C15_6
|
||||
#define AMEVTYPER1F_EL0 S3_3_C13_C15_7
|
||||
|
||||
/* AMCGCR_EL0 definitions */
|
||||
#define AMCGCR_EL0_CG1NC_SHIFT U(8)
|
||||
#define AMCGCR_EL0_CG1NC_LENGTH U(8)
|
||||
#define AMCGCR_EL0_CG1NC_MASK U(0xff)
|
||||
|
||||
#endif /* __ARCH_H__ */
|
||||
|
|
|
@ -322,6 +322,7 @@ DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_eoir0_el1, ICC_EOIR0_EL1)
|
|||
DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_eoir1_el1, ICC_EOIR1_EL1)
|
||||
DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_sgi0r_el1, ICC_SGI0R_EL1)
|
||||
|
||||
DEFINE_RENAME_SYSREG_RW_FUNCS(amcgcr_el0, AMCGCR_EL0)
|
||||
DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr0_el0, AMCNTENCLR0_EL0)
|
||||
DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset0_el0, AMCNTENSET0_EL0)
|
||||
DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr1_el0, AMCNTENCLR1_EL0)
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include <sys/cdefs.h> /* for CASSERT() */
|
||||
#include <cassert.h>
|
||||
#include <platform_def.h>
|
||||
#include <stdint.h>
|
||||
|
||||
/* All group 0 counters */
|
||||
#define AMU_GROUP0_COUNTERS_MASK 0xf
|
||||
|
@ -29,6 +30,16 @@
|
|||
CASSERT(AMU_GROUP1_COUNTERS_MASK <= 0xffff, invalid_amu_group1_counters_mask);
|
||||
CASSERT(AMU_GROUP1_NR_COUNTERS <= 16, invalid_amu_group1_nr_counters);
|
||||
|
||||
int amu_supported(void);
|
||||
void amu_enable(int el2_unused);
|
||||
|
||||
/* Group 0 configuration helpers */
|
||||
uint64_t amu_group0_cnt_read(int idx);
|
||||
void amu_group0_cnt_write(int idx, uint64_t val);
|
||||
|
||||
/* Group 1 configuration helpers */
|
||||
uint64_t amu_group1_cnt_read(int idx);
|
||||
void amu_group1_cnt_write(int idx, uint64_t val);
|
||||
void amu_group1_set_evtype(int idx, unsigned int val);
|
||||
|
||||
#endif /* __AMU_H__ */
|
||||
|
|
19
include/lib/extensions/amu_private.h
Normal file
19
include/lib/extensions/amu_private.h
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
#ifndef __AMU_PRIVATE_H__
|
||||
#define __AMU_PRIVATE_H__
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
uint64_t amu_group0_cnt_read_internal(int idx);
|
||||
void amu_group0_cnt_write_internal(int idx, uint64_t);
|
||||
|
||||
uint64_t amu_group1_cnt_read_internal(int idx);
|
||||
void amu_group1_cnt_write_internal(int idx, uint64_t);
|
||||
void amu_group1_set_evtype_internal(int idx, unsigned int val);
|
||||
|
||||
#endif /* __AMU_PRIVATE_H__ */
|
|
@ -7,26 +7,30 @@
|
|||
#include <amu.h>
|
||||
#include <arch.h>
|
||||
#include <arch_helpers.h>
|
||||
#include <debug.h>
|
||||
|
||||
void amu_enable(int el2_unused)
|
||||
{
|
||||
uint64_t features;
|
||||
|
||||
features = read_id_pfr0() >> ID_PFR0_AMU_SHIFT;
|
||||
if ((features & ID_PFR0_AMU_MASK) == 1) {
|
||||
if (el2_unused) {
|
||||
uint64_t v;
|
||||
|
||||
/*
|
||||
* Non-secure access from EL0 or EL1 to the Activity Monitor
|
||||
* registers do not trap to EL2.
|
||||
*/
|
||||
v = read_hcptr();
|
||||
v &= ~TAM_BIT;
|
||||
write_hcptr(v);
|
||||
}
|
||||
|
||||
/* Enable group 0 counters */
|
||||
write_amcntenset0(AMU_GROUP0_COUNTERS_MASK);
|
||||
if ((features & ID_PFR0_AMU_MASK) != 1) {
|
||||
WARN("Cannot enable AMU - not supported\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if (el2_unused) {
|
||||
uint64_t v;
|
||||
|
||||
/*
|
||||
* Non-secure access from EL0 or EL1 to the Activity Monitor
|
||||
* registers do not trap to EL2.
|
||||
*/
|
||||
v = read_hcptr();
|
||||
v &= ~TAM_BIT;
|
||||
write_hcptr(v);
|
||||
}
|
||||
|
||||
/* Enable group 0 counters */
|
||||
write_amcntenset0(AMU_GROUP0_COUNTERS_MASK);
|
||||
}
|
||||
|
|
|
@ -5,38 +5,106 @@
|
|||
*/
|
||||
|
||||
#include <amu.h>
|
||||
#include <amu_private.h>
|
||||
#include <arch.h>
|
||||
#include <arch_helpers.h>
|
||||
#include <assert.h>
|
||||
#include <debug.h>
|
||||
|
||||
void amu_enable(int el2_unused)
|
||||
#define AMU_GROUP0_NR_COUNTERS 4
|
||||
|
||||
int amu_supported(void)
|
||||
{
|
||||
uint64_t features;
|
||||
|
||||
features = read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT;
|
||||
if ((features & ID_AA64PFR0_AMU_MASK) == 1) {
|
||||
uint64_t v;
|
||||
|
||||
if (el2_unused) {
|
||||
/*
|
||||
* CPTR_EL2.TAM: Set to zero so any accesses to
|
||||
* the Activity Monitor registers do not trap to EL2.
|
||||
*/
|
||||
v = read_cptr_el2();
|
||||
v &= ~CPTR_EL2_TAM_BIT;
|
||||
write_cptr_el2(v);
|
||||
}
|
||||
|
||||
/*
|
||||
* CPTR_EL3.TAM: Set to zero so that any accesses to
|
||||
* the Activity Monitor registers do not trap to EL3.
|
||||
*/
|
||||
v = read_cptr_el3();
|
||||
v &= ~TAM_BIT;
|
||||
write_cptr_el3(v);
|
||||
|
||||
/* Enable group 0 counters */
|
||||
write_amcntenset0_el0(AMU_GROUP0_COUNTERS_MASK);
|
||||
/* Enable group 1 counters */
|
||||
write_amcntenset1_el0(AMU_GROUP1_COUNTERS_MASK);
|
||||
}
|
||||
return (features & ID_AA64PFR0_AMU_MASK) == 1;
|
||||
}
|
||||
|
||||
/*
|
||||
* Enable counters. This function is meant to be invoked
|
||||
* by the context management library before exiting from EL3.
|
||||
*/
|
||||
void amu_enable(int el2_unused)
|
||||
{
|
||||
uint64_t v;
|
||||
|
||||
if (!amu_supported()) {
|
||||
WARN("Cannot enable AMU - not supported\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if (el2_unused) {
|
||||
/*
|
||||
* CPTR_EL2.TAM: Set to zero so any accesses to
|
||||
* the Activity Monitor registers do not trap to EL2.
|
||||
*/
|
||||
v = read_cptr_el2();
|
||||
v &= ~CPTR_EL2_TAM_BIT;
|
||||
write_cptr_el2(v);
|
||||
}
|
||||
|
||||
/*
|
||||
* CPTR_EL3.TAM: Set to zero so that any accesses to
|
||||
* the Activity Monitor registers do not trap to EL3.
|
||||
*/
|
||||
v = read_cptr_el3();
|
||||
v &= ~TAM_BIT;
|
||||
write_cptr_el3(v);
|
||||
|
||||
/* Enable group 0 counters */
|
||||
write_amcntenset0_el0(AMU_GROUP0_COUNTERS_MASK);
|
||||
/* Enable group 1 counters */
|
||||
write_amcntenset1_el0(AMU_GROUP1_COUNTERS_MASK);
|
||||
}
|
||||
|
||||
/* Read the group 0 counter identified by the given `idx`. */
|
||||
uint64_t amu_group0_cnt_read(int idx)
|
||||
{
|
||||
assert(amu_supported());
|
||||
assert(idx >= 0 && idx < AMU_GROUP0_NR_COUNTERS);
|
||||
|
||||
return amu_group0_cnt_read_internal(idx);
|
||||
}
|
||||
|
||||
/* Write the group 0 counter identified by the given `idx` with `val`. */
|
||||
void amu_group0_cnt_write(int idx, uint64_t val)
|
||||
{
|
||||
assert(amu_supported());
|
||||
assert(idx >= 0 && idx < AMU_GROUP0_NR_COUNTERS);
|
||||
|
||||
amu_group0_cnt_write_internal(idx, val);
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read the group 1 counter identified by the given `idx`. */
|
||||
uint64_t amu_group1_cnt_read(int idx)
|
||||
{
|
||||
assert(amu_supported());
|
||||
assert(idx >= 0 && idx < AMU_GROUP1_NR_COUNTERS);
|
||||
|
||||
return amu_group1_cnt_read_internal(idx);
|
||||
}
|
||||
|
||||
/* Write the group 1 counter identified by the given `idx` with `val`. */
|
||||
void amu_group1_cnt_write(int idx, uint64_t val)
|
||||
{
|
||||
assert(amu_supported());
|
||||
assert(idx >= 0 && idx < AMU_GROUP1_NR_COUNTERS);
|
||||
|
||||
amu_group1_cnt_write_internal(idx, val);
|
||||
isb();
|
||||
}
|
||||
|
||||
/*
|
||||
* Program the event type register for the given `idx` with
|
||||
* the event number `val`.
|
||||
*/
|
||||
void amu_group1_set_evtype(int idx, unsigned int val)
|
||||
{
|
||||
assert(amu_supported());
|
||||
assert (idx >= 0 && idx < AMU_GROUP1_NR_COUNTERS);
|
||||
|
||||
amu_group1_set_evtype_internal(idx, val);
|
||||
isb();
|
||||
}
|
||||
|
|
281
lib/extensions/amu/aarch64/amu_helpers.S
Normal file
281
lib/extensions/amu/aarch64/amu_helpers.S
Normal file
|
@ -0,0 +1,281 @@
|
|||
/*
|
||||
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
#include <arch.h>
|
||||
#include <assert_macros.S>
|
||||
#include <asm_macros.S>
|
||||
|
||||
.globl amu_group0_cnt_read_internal
|
||||
.globl amu_group0_cnt_write_internal
|
||||
.globl amu_group1_cnt_read_internal
|
||||
.globl amu_group1_cnt_write_internal
|
||||
.globl amu_group1_set_evtype_internal
|
||||
|
||||
/*
|
||||
* uint64_t amu_group0_cnt_read_internal(int idx);
|
||||
*
|
||||
* Given `idx`, read the corresponding AMU counter
|
||||
* and return it in `x0`.
|
||||
*/
|
||||
func amu_group0_cnt_read_internal
|
||||
#if ENABLE_ASSERTIONS
|
||||
/*
|
||||
* It can be dangerous to call this function with an
|
||||
* out of bounds index. Ensure `idx` is valid.
|
||||
*/
|
||||
mov x1, x0
|
||||
lsr x1, x1, #2
|
||||
cmp x1, #0
|
||||
ASM_ASSERT(eq)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Given `idx` calculate address of mrs/ret instruction pair
|
||||
* in the table below.
|
||||
*/
|
||||
adr x1, 1f
|
||||
lsl x0, x0, #3 /* each mrs/ret sequence is 8 bytes */
|
||||
add x1, x1, x0
|
||||
br x1
|
||||
|
||||
1:
|
||||
mrs x0, AMEVCNTR00_EL0 /* index 0 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR01_EL0 /* index 1 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR02_EL0 /* index 2 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR03_EL0 /* index 3 */
|
||||
ret
|
||||
endfunc amu_group0_cnt_read_internal
|
||||
|
||||
/*
|
||||
* void amu_group0_cnt_write_internal(int idx, uint64_t val);
|
||||
*
|
||||
* Given `idx`, write `val` to the corresponding AMU counter.
|
||||
*/
|
||||
func amu_group0_cnt_write_internal
|
||||
#if ENABLE_ASSERTIONS
|
||||
/*
|
||||
* It can be dangerous to call this function with an
|
||||
* out of bounds index. Ensure `idx` is valid.
|
||||
*/
|
||||
mov x2, x0
|
||||
lsr x2, x2, #2
|
||||
cmp x2, #0
|
||||
ASM_ASSERT(eq)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Given `idx` calculate address of mrs/ret instruction pair
|
||||
* in the table below.
|
||||
*/
|
||||
adr x2, 1f
|
||||
lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
|
||||
add x2, x2, x0
|
||||
br x2
|
||||
|
||||
1:
|
||||
msr AMEVCNTR00_EL0, x1 /* index 0 */
|
||||
ret
|
||||
msr AMEVCNTR01_EL0, x1 /* index 1 */
|
||||
ret
|
||||
msr AMEVCNTR02_EL0, x1 /* index 2 */
|
||||
ret
|
||||
msr AMEVCNTR03_EL0, x1 /* index 3 */
|
||||
ret
|
||||
endfunc amu_group0_cnt_write_internal
|
||||
|
||||
/*
|
||||
* uint64_t amu_group1_cnt_read_internal(int idx);
|
||||
*
|
||||
* Given `idx`, read the corresponding AMU counter
|
||||
* and return it in `x0`.
|
||||
*/
|
||||
func amu_group1_cnt_read_internal
|
||||
#if ENABLE_ASSERTIONS
|
||||
/*
|
||||
* It can be dangerous to call this function with an
|
||||
* out of bounds index. Ensure `idx` is valid.
|
||||
*/
|
||||
mov x1, x0
|
||||
lsr x1, x1, #4
|
||||
cmp x1, #0
|
||||
ASM_ASSERT(eq)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Given `idx` calculate address of mrs/ret instruction pair
|
||||
* in the table below.
|
||||
*/
|
||||
adr x1, 1f
|
||||
lsl x0, x0, #3 /* each mrs/ret sequence is 8 bytes */
|
||||
add x1, x1, x0
|
||||
br x1
|
||||
|
||||
1:
|
||||
mrs x0, AMEVCNTR10_EL0 /* index 0 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR11_EL0 /* index 1 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR12_EL0 /* index 2 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR13_EL0 /* index 3 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR14_EL0 /* index 4 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR15_EL0 /* index 5 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR16_EL0 /* index 6 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR17_EL0 /* index 7 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR18_EL0 /* index 8 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR19_EL0 /* index 9 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1A_EL0 /* index 10 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1B_EL0 /* index 11 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1C_EL0 /* index 12 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1D_EL0 /* index 13 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1E_EL0 /* index 14 */
|
||||
ret
|
||||
mrs x0, AMEVCNTR1F_EL0 /* index 15 */
|
||||
ret
|
||||
endfunc amu_group1_cnt_read_internal
|
||||
|
||||
/*
|
||||
* void amu_group1_cnt_write_internal(int idx, uint64_t val);
|
||||
*
|
||||
* Given `idx`, write `val` to the corresponding AMU counter.
|
||||
*/
|
||||
func amu_group1_cnt_write_internal
|
||||
#if ENABLE_ASSERTIONS
|
||||
/*
|
||||
* It can be dangerous to call this function with an
|
||||
* out of bounds index. Ensure `idx` is valid.
|
||||
*/
|
||||
mov x2, x0
|
||||
lsr x2, x2, #4
|
||||
cmp x2, #0
|
||||
ASM_ASSERT(eq)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Given `idx` calculate address of mrs/ret instruction pair
|
||||
* in the table below.
|
||||
*/
|
||||
adr x2, 1f
|
||||
lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
|
||||
add x2, x2, x0
|
||||
br x2
|
||||
|
||||
1:
|
||||
msr AMEVCNTR10_EL0, x1 /* index 0 */
|
||||
ret
|
||||
msr AMEVCNTR11_EL0, x1 /* index 1 */
|
||||
ret
|
||||
msr AMEVCNTR12_EL0, x1 /* index 2 */
|
||||
ret
|
||||
msr AMEVCNTR13_EL0, x1 /* index 3 */
|
||||
ret
|
||||
msr AMEVCNTR14_EL0, x1 /* index 4 */
|
||||
ret
|
||||
msr AMEVCNTR15_EL0, x1 /* index 5 */
|
||||
ret
|
||||
msr AMEVCNTR16_EL0, x1 /* index 6 */
|
||||
ret
|
||||
msr AMEVCNTR17_EL0, x1 /* index 7 */
|
||||
ret
|
||||
msr AMEVCNTR18_EL0, x1 /* index 8 */
|
||||
ret
|
||||
msr AMEVCNTR19_EL0, x1 /* index 9 */
|
||||
ret
|
||||
msr AMEVCNTR1A_EL0, x1 /* index 10 */
|
||||
ret
|
||||
msr AMEVCNTR1B_EL0, x1 /* index 11 */
|
||||
ret
|
||||
msr AMEVCNTR1C_EL0, x1 /* index 12 */
|
||||
ret
|
||||
msr AMEVCNTR1D_EL0, x1 /* index 13 */
|
||||
ret
|
||||
msr AMEVCNTR1E_EL0, x1 /* index 14 */
|
||||
ret
|
||||
msr AMEVCNTR1F_EL0, x1 /* index 15 */
|
||||
ret
|
||||
endfunc amu_group1_cnt_write_internal
|
||||
|
||||
/*
|
||||
* void amu_group1_set_evtype_internal(int idx, unsigned int val);
|
||||
*
|
||||
* Program the AMU event type register indexed by `idx`
|
||||
* with the value `val`.
|
||||
*/
|
||||
func amu_group1_set_evtype_internal
|
||||
#if ENABLE_ASSERTIONS
|
||||
/*
|
||||
* It can be dangerous to call this function with an
|
||||
* out of bounds index. Ensure `idx` is valid.
|
||||
*/
|
||||
mov x2, x0
|
||||
lsr x2, x2, #4
|
||||
cmp x2, #0
|
||||
ASM_ASSERT(eq)
|
||||
|
||||
/* val should be between [0, 65535] */
|
||||
mov x2, x1
|
||||
lsr x2, x2, #16
|
||||
cmp x2, #0
|
||||
ASM_ASSERT(eq)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Given `idx` calculate address of msr/ret instruction pair
|
||||
* in the table below.
|
||||
*/
|
||||
adr x2, 1f
|
||||
lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
|
||||
add x2, x2, x0
|
||||
br x2
|
||||
|
||||
1:
|
||||
msr AMEVTYPER10_EL0, x1 /* index 0 */
|
||||
ret
|
||||
msr AMEVTYPER11_EL0, x1 /* index 1 */
|
||||
ret
|
||||
msr AMEVTYPER12_EL0, x1 /* index 2 */
|
||||
ret
|
||||
msr AMEVTYPER13_EL0, x1 /* index 3 */
|
||||
ret
|
||||
msr AMEVTYPER14_EL0, x1 /* index 4 */
|
||||
ret
|
||||
msr AMEVTYPER15_EL0, x1 /* index 5 */
|
||||
ret
|
||||
msr AMEVTYPER16_EL0, x1 /* index 6 */
|
||||
ret
|
||||
msr AMEVTYPER17_EL0, x1 /* index 7 */
|
||||
ret
|
||||
msr AMEVTYPER18_EL0, x1 /* index 8 */
|
||||
ret
|
||||
msr AMEVTYPER19_EL0, x1 /* index 9 */
|
||||
ret
|
||||
msr AMEVTYPER1A_EL0, x1 /* index 10 */
|
||||
ret
|
||||
msr AMEVTYPER1B_EL0, x1 /* index 11 */
|
||||
ret
|
||||
msr AMEVTYPER1C_EL0, x1 /* index 12 */
|
||||
ret
|
||||
msr AMEVTYPER1D_EL0, x1 /* index 13 */
|
||||
ret
|
||||
msr AMEVTYPER1E_EL0, x1 /* index 14 */
|
||||
ret
|
||||
msr AMEVTYPER1F_EL0, x1 /* index 15 */
|
||||
ret
|
||||
endfunc amu_group1_set_evtype_internal
|
Loading…
Add table
Reference in a new issue