perf(amu): greatly simplify AMU context management

The current code is incredibly resilient to updates to the spec and
has worked quite well so far. However, recent implementations expose a
weakness in that this is rather slow. A large part of it is written in
assembly, making it opaque to the compiler for optimisations. The
future proofness requires reading registers that are effectively
`volatile`, making it even harder for the compiler, as well as adding
lots of implicit barriers, making it hard for the microarchitecutre to
optimise as well.

We can make a few assumptions, checked by a few well placed asserts, and
remove a lot of this burden. For a start, at the moment there are 4
group 0 counters with static assignments. Contexting them is a trivial
affair that doesn't need a loop. Similarly, there can only be up to 16
group 1 counters. Contexting them is a bit harder, but we can do with a
single branch with a falling through switch. If/when both of these
change, we have a pair of asserts and the feature detection mechanism to
guard us against pretending that we support something we don't.

We can drop contexting of the offset registers. They are fully
accessible by EL2 and as such are its responsibility to preserve on
powerdown.

Another small thing we can do, is pass the core_pos into the hook.
The caller already knows which core we're running on, we don't need to
call this non-trivial function again.

Finally, knowing this, we don't really need the auxiliary AMUs to be
described by the device tree. Linux doesn't care at the moment, and any
information we need for EL3 can be neatly placed in a simple array.

All of this, combined with lifting the actual saving out of assembly,
reduces the instructions to save the context from 180 to 40, including a
lot fewer branches. The code is also much shorter and easier to read.

Also propagate to aarch32 so that the two don't diverge too much.

Change-Id: Ib62e6e9ba5be7fb9fb8965c8eee148d5598a5361
Signed-off-by: Boyan Karatotev <boyan.karatotev@arm.com>
diff --git a/include/arch/aarch32/arch.h b/include/arch/aarch32/arch.h
index d2591dd..41be1a1 100644
--- a/include/arch/aarch32/arch.h
+++ b/include/arch/aarch32/arch.h
@@ -761,7 +761,7 @@
 
 /* AMCNTENSET0 definitions */
 #define AMCNTENSET0_Pn_SHIFT	U(0)
-#define AMCNTENSET0_Pn_MASK	U(0xffff)
+#define AMCNTENSET0_Pn_MASK	U(0xf)
 
 /* AMCNTENSET1 definitions */
 #define AMCNTENSET1_Pn_SHIFT	U(0)
@@ -769,7 +769,7 @@
 
 /* AMCNTENCLR0 definitions */
 #define AMCNTENCLR0_Pn_SHIFT	U(0)
-#define AMCNTENCLR0_Pn_MASK	U(0xffff)
+#define AMCNTENCLR0_Pn_MASK	U(0xf)
 
 /* AMCNTENCLR1 definitions */
 #define AMCNTENCLR1_Pn_SHIFT	U(0)
diff --git a/include/arch/aarch32/arch_features.h b/include/arch/aarch32/arch_features.h
index e347240..e80faf2 100644
--- a/include/arch/aarch32/arch_features.h
+++ b/include/arch/aarch32/arch_features.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2019-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2019-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -85,6 +85,10 @@
 CREATE_FEATURE_FUNCS(feat_amu, id_pfr0, ID_PFR0_AMU_SHIFT,
 		    ID_PFR0_AMU_MASK, ID_PFR0_AMU_V1, ENABLE_FEAT_AMU)
 
+/* Auxiliary counters for FEAT_AMU */
+CREATE_FEATURE_FUNCS(feat_amu_aux, amcfgr, AMCFGR_NCG_SHIFT,
+		    AMCFGR_NCG_MASK, 1U, ENABLE_AMU_AUXILIARY_COUNTERS)
+
 /* FEAT_AMUV1P1: AMU Extension v1.1 */
 CREATE_FEATURE_FUNCS(feat_amuv1p1, id_pfr0, ID_PFR0_AMU_SHIFT,
 		    ID_PFR0_AMU_MASK, ID_PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
diff --git a/include/arch/aarch32/arch_helpers.h b/include/arch/aarch32/arch_helpers.h
index adc96ae..4f80e3a 100644
--- a/include/arch/aarch32/arch_helpers.h
+++ b/include/arch/aarch32/arch_helpers.h
@@ -324,6 +324,23 @@
 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
 
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr10, AMEVCNTR10);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr11, AMEVCNTR11);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr12, AMEVCNTR12);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr13, AMEVCNTR13);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr14, AMEVCNTR14);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr15, AMEVCNTR15);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr16, AMEVCNTR16);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr17, AMEVCNTR17);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr18, AMEVCNTR18);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr19, AMEVCNTR19);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1a, AMEVCNTR1A);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1b, AMEVCNTR1B);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1c, AMEVCNTR1C);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1d, AMEVCNTR1D);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1e, AMEVCNTR1E);
+DEFINE_COPROCR_RW_FUNCS_64(amevcntr1f, AMEVCNTR1F);
+
 /*
  * TLBI operation prototypes
  */
diff --git a/include/arch/aarch64/arch_features.h b/include/arch/aarch64/arch_features.h
index 1d0a2e0..a580213 100644
--- a/include/arch/aarch64/arch_features.h
+++ b/include/arch/aarch64/arch_features.h
@@ -311,6 +311,10 @@
 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
 		     ID_AA64PFR0_AMU_MASK, 1U, ENABLE_FEAT_AMU)
 
+/* Auxiliary counters for FEAT_AMU */
+CREATE_FEATURE_FUNCS(feat_amu_aux, amcfgr_el0, AMCFGR_EL0_NCG_SHIFT,
+		     AMCFGR_EL0_NCG_MASK, 1U, ENABLE_AMU_AUXILIARY_COUNTERS)
+
 /* FEAT_AMUV1P1: AMU Extension v1.1 */
 CREATE_FEATURE_FUNCS(feat_amuv1p1, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
 		     ID_AA64PFR0_AMU_MASK, ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
diff --git a/include/arch/aarch64/arch_helpers.h b/include/arch/aarch64/arch_helpers.h
index abe379d..f85da97 100644
--- a/include/arch/aarch64/arch_helpers.h
+++ b/include/arch/aarch64/arch_helpers.h
@@ -564,6 +564,27 @@
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset0_el0, AMCNTENSET0_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr1_el0, AMCNTENCLR1_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset1_el0, AMCNTENSET1_EL0)
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr00_el0, AMEVCNTR00_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr01_el0, AMEVCNTR01_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr02_el0, AMEVCNTR02_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr03_el0, AMEVCNTR03_EL0);
+
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr10_el0, AMEVCNTR10_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr11_el0, AMEVCNTR11_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr12_el0, AMEVCNTR12_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr13_el0, AMEVCNTR13_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr14_el0, AMEVCNTR14_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr15_el0, AMEVCNTR15_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr16_el0, AMEVCNTR16_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr17_el0, AMEVCNTR17_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr18_el0, AMEVCNTR18_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr19_el0, AMEVCNTR19_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1a_el0, AMEVCNTR1A_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1b_el0, AMEVCNTR1B_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1c_el0, AMEVCNTR1C_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1d_el0, AMEVCNTR1D_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1e_el0, AMEVCNTR1E_EL0);
+DEFINE_RENAME_SYSREG_RW_FUNCS(amevcntr1f_el0, AMEVCNTR1F_EL0);
 
 DEFINE_RENAME_SYSREG_RW_FUNCS(pmblimitr_el1, PMBLIMITR_EL1)