refactor(cpufeat): feat detect helpers inlining

Force inlining feat detect helpers such that context save/restore
operations are flattened with sequences of ID regs read and conditional
instructions for system registers read/write. This is opposed to current
situation where with -Os optimization level, feat detect helpers get
called through non-inlined sequences of branch-link+ret.

Signed-off-by: Olivier Deprez <olivier.deprez@arm.com>
Change-Id: I2633442fb0e69e4a4ed13467e65846fb66d214f6
diff --git a/include/arch/aarch32/arch_features.h b/include/arch/aarch32/arch_features.h
index 8e39529..abe34a4 100644
--- a/include/arch/aarch32/arch_features.h
+++ b/include/arch/aarch32/arch_features.h
@@ -16,6 +16,7 @@
 	((unsigned int)(((reg) >> (feat)) & mask))
 
 #define CREATE_FEATURE_SUPPORTED(name, read_func, guard)			\
+__attribute__((always_inline))							\
 static inline bool is_ ## name ## _supported(void)				\
 {										\
 	if ((guard) == FEAT_STATE_DISABLED) {					\
@@ -28,6 +29,7 @@
 }
 
 #define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)		\
+__attribute__((always_inline))							\
 static inline bool is_ ## name ## _present(void)				\
 {										\
 	return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) 	\
@@ -68,6 +70,7 @@
  */
 
 /* GENTIMER */
+__attribute__((always_inline))
 static inline bool is_armv7_gentimer_present(void)
 {
 	return ISOLATE_FIELD(read_id_pfr1(), ID_PFR1_GENTIMER_SHIFT,
@@ -111,6 +114,7 @@
 		      ID_DFR0_PERFMON_MASK, 3U)
 
 /* FEAT_MTPMU */
+__attribute__((always_inline))
 static inline bool is_feat_mtpmu_present(void)
 {
 	unsigned int mtpmu = ISOLATE_FIELD(read_id_dfr1(), ID_DFR1_MTPMU_SHIFT,
@@ -124,39 +128,71 @@
  * code. In fact, EL2 context switching is only needed for AArch64 (since
  * there is no secure AArch32 EL2), so just disable these features here.
  */
+__attribute__((always_inline))
 static inline bool is_feat_twed_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_ecv_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_ecv_v2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_csv2_2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_csv2_3_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_ras_supported(void) { return false; }
 
 /* The following features are supported in AArch64 only. */
+__attribute__((always_inline))
 static inline bool is_feat_vhe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sel2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_fgt_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_tcr2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_spe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_rng_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_gcs_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_mte2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_mpam_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_hcx_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sve_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_brbe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_trbe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_nv2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sme_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sme2_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_s2poe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_s1poe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sxpoe_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_s2pie_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_s1pie_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sxpie_supported(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_uao_present(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_nmi_present(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_ebep_present(void) { return false; }
+__attribute__((always_inline))
 static inline bool is_feat_sebep_present(void) { return false; }
 
 #endif /* ARCH_FEATURES_H */
diff --git a/include/arch/aarch64/arch_features.h b/include/arch/aarch64/arch_features.h
index ddc1c80..1270be3 100644
--- a/include/arch/aarch64/arch_features.h
+++ b/include/arch/aarch64/arch_features.h
@@ -16,6 +16,7 @@
 	((unsigned int)(((reg) >> (feat)) & mask))
 
 #define CREATE_FEATURE_SUPPORTED(name, read_func, guard)			\
+__attribute__((always_inline))							\
 static inline bool is_ ## name ## _supported(void)				\
 {										\
 	if ((guard) == FEAT_STATE_DISABLED) {					\
@@ -28,6 +29,7 @@
 }
 
 #define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)		\
+__attribute__((always_inline))							\
 static inline bool is_ ## name ## _present(void)				\
 {										\
 	return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) 	\
@@ -132,6 +134,7 @@
  * +----------------------------+
  */
 
+__attribute__((always_inline))
 static inline bool is_armv7_gentimer_present(void)
 {
 	/* The Generic Timer is always present in an ARMv8-A implementation */
@@ -160,6 +163,7 @@
 			(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT)), 1U)
 
 /* PAUTH */
+__attribute__((always_inline))
 static inline bool is_armv8_3_pauth_present(void)
 {
 	uint64_t mask_id_aa64isar1 =
@@ -238,6 +242,7 @@
 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
 		     ID_AA64MMFR3_EL1_S1POE_MASK, 1U, ENABLE_FEAT_S1POE)
 
+__attribute__((always_inline))
 static inline bool is_feat_sxpoe_supported(void)
 {
 	return is_feat_s1poe_supported() || is_feat_s2poe_supported();
@@ -251,6 +256,7 @@
 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
 		     ID_AA64MMFR3_EL1_S1PIE_MASK, 1U, ENABLE_FEAT_S1PIE)
 
+__attribute__((always_inline))
 static inline bool is_feat_sxpie_supported(void)
 {
 	return is_feat_s1pie_supported() || is_feat_s2pie_supported();
@@ -277,6 +283,7 @@
  * 0x11: v1.1 Armv8.4 or later
  *
  */
+__attribute__((always_inline))
 static inline bool is_feat_mpam_present(void)
 {
 	unsigned int ret = (unsigned int)((((read_id_aa64pfr0_el1() >>
@@ -375,6 +382,7 @@
  * Function to get hardware granularity support
  ******************************************************************************/
 
+__attribute__((always_inline))
 static inline bool is_feat_tgran4K_present(void)
 {
 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
@@ -385,6 +393,7 @@
 CREATE_FEATURE_PRESENT(feat_tgran16K, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_TGRAN16_SHIFT,
 		       ID_AA64MMFR0_EL1_TGRAN16_MASK, TGRAN16_IMPLEMENTED)
 
+__attribute__((always_inline))
 static inline bool is_feat_tgran64K_present(void)
 {
 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
@@ -397,6 +406,7 @@
 		      ID_AA64DFR0_PMUVER_MASK, 1U)
 
 /* FEAT_MTPMU */
+__attribute__((always_inline))
 static inline bool is_feat_mtpmu_present(void)
 {
 	unsigned int mtpmu = ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT,