refactor(amu): detect architected counters at runtime

This change removes the `AMU_GROUP0_COUNTERS_MASK` and
`AMU_GROUP0_MAX_COUNTERS` preprocessor definitions, instead retrieving
the number of group 0 counters dynamically through `AMCGCR_EL0.CG0NC`.

Change-Id: I70e39c30fbd5df89b214276fac79cc8758a89f72
Signed-off-by: Chris Kay <chris.kay@arm.com>
This commit is contained in:
Chris Kay 2021-05-25 12:33:18 +01:00
parent 1fd685a74d
commit 81e2ff1f36
5 changed files with 39 additions and 23 deletions

View file

@ -755,6 +755,8 @@
#define AMCFGR_N_MASK U(0xff) #define AMCFGR_N_MASK U(0xff)
/* AMCGCR definitions */ /* AMCGCR definitions */
#define AMCGCR_CG0NC_SHIFT U(0)
#define AMCGCR_CG0NC_MASK U(0xff)
#define AMCGCR_CG1NC_SHIFT U(8) #define AMCGCR_CG1NC_SHIFT U(8)
#define AMCGCR_CG1NC_MASK U(0xff) #define AMCGCR_CG1NC_MASK U(0xff)

View file

@ -1069,6 +1069,8 @@
#define AMCFGR_EL0_N_MASK U(0xff) #define AMCFGR_EL0_N_MASK U(0xff)
/* AMCGCR_EL0 definitions */ /* AMCGCR_EL0 definitions */
#define AMCGCR_EL0_CG0NC_SHIFT U(0)
#define AMCGCR_EL0_CG0NC_MASK U(0xff)
#define AMCGCR_EL0_CG1NC_SHIFT U(8) #define AMCGCR_EL0_CG1NC_SHIFT U(8)
#define AMCGCR_EL0_CG1NC_MASK U(0xff) #define AMCGCR_EL0_CG1NC_MASK U(0xff)

View file

@ -15,9 +15,7 @@
#include <platform_def.h> #include <platform_def.h>
/* All group 0 counters */ #define AMU_GROUP0_MAX_COUNTERS U(16)
#define AMU_GROUP0_COUNTERS_MASK U(0xf)
#define AMU_GROUP0_NR_COUNTERS U(4)
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
#define AMU_GROUP1_COUNTERS_MASK U(0) #define AMU_GROUP1_COUNTERS_MASK U(0)
@ -63,10 +61,10 @@ CASSERT(AMU_GROUP1_COUNTERS_MASK <= 0xffff, invalid_amu_group1_counters_mask);
#endif #endif
struct amu_ctx { struct amu_ctx {
uint64_t group0_cnts[AMU_GROUP0_NR_COUNTERS]; uint64_t group0_cnts[AMU_GROUP0_MAX_COUNTERS];
#if __aarch64__ #if __aarch64__
/* Architected event counter 1 does not have an offset register. */ /* Architected event counter 1 does not have an offset register. */
uint64_t group0_voffsets[AMU_GROUP0_NR_COUNTERS-1]; uint64_t group0_voffsets[AMU_GROUP0_MAX_COUNTERS-1];
#endif #endif
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS

View file

@ -43,6 +43,12 @@ static inline __unused uint32_t read_amcfgr_ncg(void)
AMCFGR_NCG_MASK; AMCFGR_NCG_MASK;
} }
static inline __unused uint32_t read_amcgcr_cg0nc(void)
{
return (read_amcgcr() >> AMCGCR_CG0NC_SHIFT) &
AMCGCR_CG0NC_MASK;
}
static inline __unused uint32_t read_amcgcr_cg1nc(void) static inline __unused uint32_t read_amcgcr_cg1nc(void)
{ {
return (read_amcgcr() >> AMCGCR_CG1NC_SHIFT) & return (read_amcgcr() >> AMCGCR_CG1NC_SHIFT) &
@ -163,7 +169,7 @@ void amu_enable(bool el2_unused)
} }
/* Enable group 0 counters */ /* Enable group 0 counters */
write_amcntenset0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenset0_px((UINT32_C(1) << read_amcgcr_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -196,7 +202,7 @@ void amu_enable(bool el2_unused)
static uint64_t amu_group0_cnt_read(unsigned int idx) static uint64_t amu_group0_cnt_read(unsigned int idx)
{ {
assert(amu_supported()); assert(amu_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_cg0nc());
return amu_group0_cnt_read_internal(idx); return amu_group0_cnt_read_internal(idx);
} }
@ -205,7 +211,7 @@ static uint64_t amu_group0_cnt_read(unsigned int idx)
static void amu_group0_cnt_write(unsigned int idx, uint64_t val) static void amu_group0_cnt_write(unsigned int idx, uint64_t val)
{ {
assert(amu_supported()); assert(amu_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_cg0nc());
amu_group0_cnt_write_internal(idx, val); amu_group0_cnt_write_internal(idx, val);
isb(); isb();
@ -252,7 +258,8 @@ static void *amu_context_save(const void *arg)
#endif #endif
/* Assert that group 0/1 counter configuration is what we expect */ /* Assert that group 0/1 counter configuration is what we expect */
assert(read_amcntenset0_px() == AMU_GROUP0_COUNTERS_MASK); assert(read_amcntenset0_px() ==
((UINT32_C(1) << read_amcgcr_cg0nc()) - 1U));
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -263,7 +270,7 @@ static void *amu_context_save(const void *arg)
* Disable group 0/1 counters to avoid other observers like SCP sampling * Disable group 0/1 counters to avoid other observers like SCP sampling
* counter values from the future via the memory mapped view. * counter values from the future via the memory mapped view.
*/ */
write_amcntenclr0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenclr0_px((UINT32_C(1) << read_amcgcr_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -274,7 +281,7 @@ static void *amu_context_save(const void *arg)
isb(); isb();
/* Save all group 0 counters */ /* Save all group 0 counters */
for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) { for (i = 0U; i < read_amcgcr_cg0nc(); i++) {
ctx->group0_cnts[i] = amu_group0_cnt_read(i); ctx->group0_cnts[i] = amu_group0_cnt_read(i);
} }
@ -319,12 +326,12 @@ static void *amu_context_restore(const void *arg)
#endif #endif
/* Restore all group 0 counters */ /* Restore all group 0 counters */
for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) { for (i = 0U; i < read_amcgcr_cg0nc(); i++) {
amu_group0_cnt_write(i, ctx->group0_cnts[i]); amu_group0_cnt_write(i, ctx->group0_cnts[i]);
} }
/* Restore group 0 counter configuration */ /* Restore group 0 counter configuration */
write_amcntenset0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenset0_px((UINT32_C(1) << read_amcgcr_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {

View file

@ -66,6 +66,12 @@ static inline __unused uint64_t read_amcfgr_el0_ncg(void)
AMCFGR_EL0_NCG_MASK; AMCFGR_EL0_NCG_MASK;
} }
static inline uint64_t read_amcgcr_el0_cg0nc(void)
{
return (read_amcgcr_el0() >> AMCGCR_EL0_CG0NC_SHIFT) &
AMCGCR_EL0_CG0NC_MASK;
}
static inline __unused uint64_t read_amcg1idr_el0_voff(void) static inline __unused uint64_t read_amcg1idr_el0_voff(void)
{ {
return (read_amcg1idr_el0() >> AMCG1IDR_VOFF_SHIFT) & return (read_amcg1idr_el0() >> AMCG1IDR_VOFF_SHIFT) &
@ -197,7 +203,7 @@ void amu_enable(bool el2_unused, cpu_context_t *ctx)
write_cptr_el3_tam(ctx, 0U); write_cptr_el3_tam(ctx, 0U);
/* Enable group 0 counters */ /* Enable group 0 counters */
write_amcntenset0_el0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenset0_el0_px((UINT64_C(1) << read_amcgcr_el0_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -235,7 +241,7 @@ void amu_enable(bool el2_unused, cpu_context_t *ctx)
static uint64_t amu_group0_cnt_read(unsigned int idx) static uint64_t amu_group0_cnt_read(unsigned int idx)
{ {
assert(amu_supported()); assert(amu_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_el0_cg0nc());
return amu_group0_cnt_read_internal(idx); return amu_group0_cnt_read_internal(idx);
} }
@ -244,7 +250,7 @@ static uint64_t amu_group0_cnt_read(unsigned int idx)
static void amu_group0_cnt_write(unsigned int idx, uint64_t val) static void amu_group0_cnt_write(unsigned int idx, uint64_t val)
{ {
assert(amu_supported()); assert(amu_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_el0_cg0nc());
amu_group0_cnt_write_internal(idx, val); amu_group0_cnt_write_internal(idx, val);
isb(); isb();
@ -259,7 +265,7 @@ static void amu_group0_cnt_write(unsigned int idx, uint64_t val)
static uint64_t amu_group0_voffset_read(unsigned int idx) static uint64_t amu_group0_voffset_read(unsigned int idx)
{ {
assert(amu_v1p1_supported()); assert(amu_v1p1_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_el0_cg0nc());
assert(idx != 1U); assert(idx != 1U);
return amu_group0_voffset_read_internal(idx); return amu_group0_voffset_read_internal(idx);
@ -274,7 +280,7 @@ static uint64_t amu_group0_voffset_read(unsigned int idx)
static void amu_group0_voffset_write(unsigned int idx, uint64_t val) static void amu_group0_voffset_write(unsigned int idx, uint64_t val)
{ {
assert(amu_v1p1_supported()); assert(amu_v1p1_supported());
assert(idx < AMU_GROUP0_NR_COUNTERS); assert(idx < read_amcgcr_el0_cg0nc());
assert(idx != 1U); assert(idx != 1U);
amu_group0_voffset_write_internal(idx, val); amu_group0_voffset_write_internal(idx, val);
@ -353,7 +359,8 @@ static void *amu_context_save(const void *arg)
#endif #endif
/* Assert that group 0/1 counter configuration is what we expect */ /* Assert that group 0/1 counter configuration is what we expect */
assert(read_amcntenset0_el0_px() == AMU_GROUP0_COUNTERS_MASK); assert(read_amcntenset0_el0_px() ==
((UINT64_C(1) << read_amcgcr_el0_cg0nc()) - 1U));
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -365,7 +372,7 @@ static void *amu_context_save(const void *arg)
* Disable group 0/1 counters to avoid other observers like SCP sampling * Disable group 0/1 counters to avoid other observers like SCP sampling
* counter values from the future via the memory mapped view. * counter values from the future via the memory mapped view.
*/ */
write_amcntenclr0_el0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenclr0_el0_px((UINT64_C(1) << read_amcgcr_el0_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {
@ -376,7 +383,7 @@ static void *amu_context_save(const void *arg)
isb(); isb();
/* Save all group 0 counters */ /* Save all group 0 counters */
for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) { for (i = 0U; i < read_amcgcr_el0_cg0nc(); i++) {
ctx->group0_cnts[i] = amu_group0_cnt_read(i); ctx->group0_cnts[i] = amu_group0_cnt_read(i);
} }
@ -442,7 +449,7 @@ static void *amu_context_restore(const void *arg)
#endif #endif
/* Restore all group 0 counters */ /* Restore all group 0 counters */
for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) { for (i = 0U; i < read_amcgcr_el0_cg0nc(); i++) {
amu_group0_cnt_write(i, ctx->group0_cnts[i]); amu_group0_cnt_write(i, ctx->group0_cnts[i]);
} }
@ -455,7 +462,7 @@ static void *amu_context_restore(const void *arg)
} }
/* Restore group 0 counter configuration */ /* Restore group 0 counter configuration */
write_amcntenset0_el0_px(AMU_GROUP0_COUNTERS_MASK); write_amcntenset0_el0_px((UINT64_C(1) << read_amcgcr_el0_cg0nc()) - 1U);
#if ENABLE_AMU_AUXILIARY_COUNTERS #if ENABLE_AMU_AUXILIARY_COUNTERS
if (AMU_GROUP1_NR_COUNTERS > 0U) { if (AMU_GROUP1_NR_COUNTERS > 0U) {