Merge "feat(allwinner): use reset through scpi for warm/soft reset" into integration
diff --git a/lib/cpus/aarch64/cortex_a520.S b/lib/cpus/aarch64/cortex_a520.S
index 5bbe862..6c2f33e 100644
--- a/lib/cpus/aarch64/cortex_a520.S
+++ b/lib/cpus/aarch64/cortex_a520.S
@@ -30,28 +30,17 @@
 	 * Enable CPU power down bit in power control register
 	 * ---------------------------------------------------
 	 */
-	mrs	x0, CORTEX_A520_CPUPWRCTLR_EL1
-	orr	x0, x0, #CORTEX_A520_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	msr	CORTEX_A520_CPUPWRCTLR_EL1, x0
+	sysreg_bit_set CORTEX_A520_CPUPWRCTLR_EL1, CORTEX_A520_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
 	isb
 	ret
 endfunc cortex_a520_core_pwr_dwn
 
-	/*
-	 * Errata printing function for Cortex A520. Must follow AAPCS.
-	 */
-#if REPORT_ERRATA
-func cortex_a520_errata_report
-	ret
-endfunc cortex_a520_errata_report
-#endif
+errata_report_shim cortex_a520
 
-func cortex_a520_reset_func
+cpu_reset_func_start cortex_a520
 	/* Disable speculative loads */
 	msr	SSBS, xzr
-	isb
-	ret
-endfunc cortex_a520_reset_func
+cpu_reset_func_end cortex_a520
 
 	/* ---------------------------------------------
 	 * This function provides Cortex A520 specific
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index de2d36e..997f261 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2015-2022, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2015-2023, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -47,9 +47,7 @@
 	 * ---------------------------------------------
 	 */
 func cortex_a72_disable_hw_prefetcher
-	mrs	x0, CORTEX_A72_CPUACTLR_EL1
-	orr	x0, x0, #CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH
-	msr	CORTEX_A72_CPUACTLR_EL1, x0
+	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH
 	isb
 	dsb	ish
 	ret
@@ -60,9 +58,7 @@
 	 * ---------------------------------------------
 	 */
 func cortex_a72_disable_smp
-	mrs	x0, CORTEX_A72_ECTLR_EL1
-	bic	x0, x0, #CORTEX_A72_ECTLR_SMP_BIT
-	msr	CORTEX_A72_ECTLR_EL1, x0
+	sysreg_bit_clear CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
 	ret
 endfunc cortex_a72_disable_smp
 
@@ -78,139 +74,95 @@
 	ret
 endfunc cortex_a72_disable_ext_debug
 
-	/* --------------------------------------------------
-	 * Errata Workaround for Cortex A72 Errata #859971.
-	 * This applies only to revision <= r0p3 of Cortex A72.
-	 * Inputs:
-	 * x0: variant[4:7] and revision[0:3] of current cpu.
-	 * Shall clobber:
-	 * --------------------------------------------------
-	 */
-func errata_a72_859971_wa
-	mov	x17,x30
-	bl	check_errata_859971
-	cbz	x0, 1f
-	mrs	x1, CORTEX_A72_CPUACTLR_EL1
-	orr	x1, x1, #CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH
-	msr	CORTEX_A72_CPUACTLR_EL1, x1
-1:
-	ret	x17
-endfunc errata_a72_859971_wa
-
-func check_errata_859971
-	mov	x1, #0x03
-	b	cpu_rev_var_ls
-endfunc check_errata_859971
-
-func check_errata_cve_2017_5715
+func check_smccc_arch_workaround_3
 	cpu_check_csv2	x0, 1f
-#if WORKAROUND_CVE_2017_5715
 	mov	x0, #ERRATA_APPLIES
-#else
-	mov	x0, #ERRATA_MISSING
-#endif
 	ret
 1:
 	mov	x0, #ERRATA_NOT_APPLIES
 	ret
-endfunc check_errata_cve_2017_5715
+endfunc check_smccc_arch_workaround_3
 
-func check_errata_cve_2018_3639
-#if WORKAROUND_CVE_2018_3639
-	mov	x0, #ERRATA_APPLIES
-#else
-	mov	x0, #ERRATA_MISSING
-#endif
-	ret
-endfunc check_errata_cve_2018_3639
+workaround_reset_start cortex_a72, ERRATUM(859971), ERRATA_A72_859971
+	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH
+workaround_reset_end cortex_a72, ERRATUM(859971)
 
-	/* --------------------------------------------------
-	 * Errata workaround for Cortex A72 Errata #1319367.
-	 * This applies to all revisions of Cortex A72.
-	 * --------------------------------------------------
-	 */
-func check_errata_1319367
-#if ERRATA_A72_1319367
-	mov	x0, #ERRATA_APPLIES
-#else
-	mov	x0, #ERRATA_MISSING
+check_erratum_ls cortex_a72, ERRATUM(859971), CPU_REV(0, 3)
+
+/* Due to the nature of the errata it is applied unconditionally when chosen */
+check_erratum_chosen cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367
+/* erratum workaround is interleaved with generic code */
+add_erratum_entry cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367, NO_APPLY_AT_RESET
+
+workaround_reset_start cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
+#if IMAGE_BL31
+	override_vector_table wa_cve_2017_5715_mmu_vbar
 #endif
-	ret
-endfunc check_errata_1319367
+workaround_reset_end cortex_a72, CVE(2017, 5715)
 
-func check_errata_cve_2022_23960
-#if WORKAROUND_CVE_2022_23960
+check_erratum_custom_start cortex_a72, CVE(2017, 5715)
+	cpu_check_csv2	x0, 1f
+#if WORKAROUND_CVE_2017_5715
 	mov	x0, #ERRATA_APPLIES
 #else
 	mov	x0, #ERRATA_MISSING
 #endif
 	ret
-endfunc check_errata_cve_2022_23960
-
-func check_smccc_arch_workaround_3
-	cpu_check_csv2	x0, 1f
-	mov	x0, #ERRATA_APPLIES
-	ret
 1:
 	mov	x0, #ERRATA_NOT_APPLIES
 	ret
-endfunc check_smccc_arch_workaround_3
-
-	/* -------------------------------------------------
-	 * The CPU Ops reset function for Cortex-A72.
-	 * -------------------------------------------------
-	 */
-func cortex_a72_reset_func
-	mov	x19, x30
-	bl	cpu_get_rev_var
-	mov	x18, x0
-
-#if ERRATA_A72_859971
-	mov	x0, x18
-	bl	errata_a72_859971_wa
-#endif
+check_erratum_custom_end cortex_a72, CVE(2017, 5715)
 
-#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
-	cpu_check_csv2	x0, 1f
-	adr	x0, wa_cve_2017_5715_mmu_vbar
-	msr	vbar_el3, x0
-	/* isb will be performed before returning from this function */
+workaround_reset_start cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
+	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
+	isb
+	dsb	sy
+workaround_reset_end cortex_a72, CVE(2018, 3639)
+check_erratum_chosen cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
 
-	/* Skip CVE_2022_23960 mitigation if cve_2017_5715 mitigation applied */
-	b	2f
-1:
-#if WORKAROUND_CVE_2022_23960
+workaround_reset_start cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
+#if IMAGE_BL31
+	/* Skip installing vector table again if already done for CVE(2017, 5715) */
 	/*
 	 * The Cortex-A72 generic vectors are overridden to apply the
-         * mitigation on exception entry from lower ELs for revisions >= r1p0
+	 * mitigation on exception entry from lower ELs for revisions >= r1p0
 	 * which has CSV2 implemented.
 	 */
 	adr	x0, wa_cve_vbar_cortex_a72
+	mrs	x1, vbar_el3
+	cmp	x0, x1
+	b.eq	1f
 	msr	vbar_el3, x0
+1:
+#endif /* IMAGE_BL31 */
+workaround_reset_end cortex_a72, CVE(2022, 23960)
 
-	/* isb will be performed before returning from this function */
+check_erratum_custom_start cortex_a72, CVE(2022, 23960)
+#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
+	cpu_check_csv2	x0, 1f
+	mov	x0, #ERRATA_APPLIES
+	ret
+1:
+#if WORKAROUND_CVE_2022_23960
+	mov	x0, #ERRATA_APPLIES
+#else
+	mov	x0, #ERRATA_MISSING
 #endif /* WORKAROUND_CVE_2022_23960 */
-2:
-#endif /* IMAGE_BL31 &&  (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
+	ret
+#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
+	mov	x0, #ERRATA_MISSING
+	ret
+check_erratum_custom_end cortex_a72, CVE(2022, 23960)
 
-#if WORKAROUND_CVE_2018_3639
-	mrs	x0, CORTEX_A72_CPUACTLR_EL1
-	orr	x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
-	msr	CORTEX_A72_CPUACTLR_EL1, x0
-	isb
-	dsb	sy
-#endif
+cpu_reset_func_start cortex_a72
 
 	/* ---------------------------------------------
 	 * Enable the SMP bit.
 	 * ---------------------------------------------
 	 */
-	mrs	x0, CORTEX_A72_ECTLR_EL1
-	orr	x0, x0, #CORTEX_A72_ECTLR_SMP_BIT
-	msr	CORTEX_A72_ECTLR_EL1, x0
-	isb
-	ret x19
-endfunc cortex_a72_reset_func
+	sysreg_bit_set CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
+
+cpu_reset_func_end cortex_a72
 
 	/* ----------------------------------------------------
 	 * The CPU Ops core power down function for Cortex-A72.
@@ -319,30 +271,7 @@
 	b	cortex_a72_disable_ext_debug
 endfunc cortex_a72_cluster_pwr_dwn
 
-#if REPORT_ERRATA
-/*
- * Errata printing function for Cortex A72. Must follow AAPCS.
- */
-func cortex_a72_errata_report
-	stp	x8, x30, [sp, #-16]!
-
-	bl	cpu_get_rev_var
-	mov	x8, x0
-
-	/*
-	 * Report all errata. The revision-variant information is passed to
-	 * checking functions of each errata.
-	 */
-	report_errata ERRATA_A72_859971, cortex_a72, 859971
-	report_errata ERRATA_A72_1319367, cortex_a72, 1319367
-	report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
-	report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
-	report_errata WORKAROUND_CVE_2022_23960, cortex_a72, cve_2022_23960
-
-	ldp	x8, x30, [sp], #16
-	ret
-endfunc cortex_a72_errata_report
-#endif
+errata_report_shim cortex_a72
 
 	/* ---------------------------------------------
 	 * This function provides cortex_a72 specific
@@ -367,7 +296,7 @@
 
 declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
 	cortex_a72_reset_func, \
-	check_errata_cve_2017_5715, \
+	check_erratum_cortex_a72_5715, \
 	CPU_NO_EXTRA2_FUNC, \
 	check_smccc_arch_workaround_3, \
 	cortex_a72_core_pwr_dwn, \
diff --git a/lib/cpus/aarch64/cortex_a720.S b/lib/cpus/aarch64/cortex_a720.S
index 529ab50..4b28fdb 100644
--- a/lib/cpus/aarch64/cortex_a720.S
+++ b/lib/cpus/aarch64/cortex_a720.S
@@ -26,31 +26,22 @@
         wa_cve_2022_23960_bhb_vector_table CORTEX_A720_BHB_LOOP_COUNT, cortex_a720
 #endif /* WORKAROUND_CVE_2022_23960 */
 
-func check_errata_cve_2022_23960
-#if WORKAROUND_CVE_2022_23960
-	mov	x0, #ERRATA_APPLIES
-#else
-	mov	x0, #ERRATA_MISSING
-#endif
-	ret
-endfunc check_errata_cve_2022_23960
-
-func cortex_a720_reset_func
-	/* Disable speculative loads */
-	msr	SSBS, xzr
-
-#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
+workaround_reset_start cortex_a720, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
+#if IMAGE_BL31
 	/*
 	 * The Cortex A720 generic vectors are overridden to apply errata
 	 * mitigation on exception entry from lower ELs.
 	 */
-	adr	x0, wa_cve_vbar_cortex_a720
-	msr	vbar_el3, x0
-#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
+	override_vector_table wa_cve_vbar_cortex_a720
+#endif /* IMAGE_BL31 */
+workaround_reset_end cortex_a720, CVE(2022, 23960)
 
-	isb
-	ret
-endfunc cortex_a720_reset_func
+check_erratum_chosen cortex_a720, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
+
+cpu_reset_func_start cortex_a720
+	/* Disable speculative loads */
+	msr	SSBS, xzr
+cpu_reset_func_end cortex_a720
 
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
@@ -61,33 +52,13 @@
 	 * Enable CPU power down bit in power control register
 	 * ---------------------------------------------------
 	 */
-	mrs	x0, CORTEX_A720_CPUPWRCTLR_EL1
-	orr	x0, x0, #CORTEX_A720_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	msr	CORTEX_A720_CPUPWRCTLR_EL1, x0
+	sysreg_bit_set CORTEX_A720_CPUPWRCTLR_EL1, CORTEX_A720_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
+
 	isb
 	ret
 endfunc cortex_a720_core_pwr_dwn
 
-#if REPORT_ERRATA
-/*
- * Errata printing function for Cortex A720. Must follow AAPCS.
- */
-func cortex_a720_errata_report
-	stp	x8, x30, [sp, #-16]!
-
-	bl	cpu_get_rev_var
-	mov	x8, x0
-
-	/*
-	 * Report all errata. The revision-variant information is passed to
-	 * checking functions of each errata.
-	 */
-	report_errata WORKAROUND_CVE_2022_23960, cortex_a720, cve_2022_23960
-
-	ldp	x8, x30, [sp], #16
-	ret
-endfunc cortex_a720_errata_report
-#endif
+errata_report_shim cortex_a720
 
 	/* ---------------------------------------------
 	 * This function provides Cortex A720-specific
diff --git a/lib/cpus/aarch64/cortex_blackhawk.S b/lib/cpus/aarch64/cortex_blackhawk.S
index 8dac4e9..b7b7a2d 100644
--- a/lib/cpus/aarch64/cortex_blackhawk.S
+++ b/lib/cpus/aarch64/cortex_blackhawk.S
@@ -21,12 +21,10 @@
 #error "Cortex blackhawk supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
-func cortex_blackhawk_reset_func
+cpu_reset_func_start cortex_blackhawk
 	/* Disable speculative loads */
 	msr	SSBS, xzr
-	isb
-	ret
-endfunc cortex_blackhawk_reset_func
+cpu_reset_func_end cortex_blackhawk
 
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
@@ -37,21 +35,12 @@
 	 * Enable CPU power down bit in power control register
 	 * ---------------------------------------------------
 	 */
-	mrs	x0, CORTEX_BLACKHAWK_CPUPWRCTLR_EL1
-	orr	x0, x0, #CORTEX_BLACKHAWK_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	msr	CORTEX_BLACKHAWK_CPUPWRCTLR_EL1, x0
+	sysreg_bit_set CORTEX_BLACKHAWK_CPUPWRCTLR_EL1, CORTEX_BLACKHAWK_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
 	isb
 	ret
 endfunc cortex_blackhawk_core_pwr_dwn
 
-#if REPORT_ERRATA
-/*
- * Errata printing function for Cortex Blackhawk. Must follow AAPCS.
- */
-func cortex_blackhawk_errata_report
-	ret
-endfunc cortex_blackhawk_errata_report
-#endif
+errata_report_shim cortex_blackhawk
 
 	/* ---------------------------------------------
 	 * This function provides Cortex Blackhawk specific
diff --git a/lib/cpus/aarch64/cortex_chaberton.S b/lib/cpus/aarch64/cortex_chaberton.S
index 2c47bd3..596fe4a 100644
--- a/lib/cpus/aarch64/cortex_chaberton.S
+++ b/lib/cpus/aarch64/cortex_chaberton.S
@@ -21,12 +21,10 @@
 #error "Cortex Chaberton supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
-func cortex_chaberton_reset_func
+cpu_reset_func_start cortex_chaberton
 	/* Disable speculative loads */
 	msr	SSBS, xzr
-	isb
-	ret
-endfunc cortex_chaberton_reset_func
+cpu_reset_func_end cortex_chaberton
 
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
@@ -37,21 +35,12 @@
 	 * Enable CPU power down bit in power control register
 	 * ---------------------------------------------------
 	 */
-	mrs	x0, CORTEX_CHABERTON_CPUPWRCTLR_EL1
-	orr	x0, x0, #CORTEX_CHABERTON_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	msr	CORTEX_CHABERTON_CPUPWRCTLR_EL1, x0
+	sysreg_bit_set CORTEX_CHABERTON_CPUPWRCTLR_EL1, CORTEX_CHABERTON_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
 	isb
 	ret
 endfunc cortex_chaberton_core_pwr_dwn
 
-#if REPORT_ERRATA
-/*
- * Errata printing function for Cortex Chaberton. Must follow AAPCS.
- */
-func cortex_chaberton_errata_report
-	ret
-endfunc cortex_chaberton_errata_report
-#endif
+errata_report_shim cortex_chaberton
 
 	/* ---------------------------------------------
 	 * This function provides Cortex Chaberton specific
diff --git a/lib/cpus/aarch64/cortex_x4.S b/lib/cpus/aarch64/cortex_x4.S
index db87008..7619f9c 100644
--- a/lib/cpus/aarch64/cortex_x4.S
+++ b/lib/cpus/aarch64/cortex_x4.S
@@ -26,31 +26,22 @@
         wa_cve_2022_23960_bhb_vector_table CORTEX_X4_BHB_LOOP_COUNT, cortex_x4
 #endif /* WORKAROUND_CVE_2022_23960 */
 
-func check_errata_cve_2022_23960
-#if WORKAROUND_CVE_2022_23960
-	mov	x0, #ERRATA_APPLIES
-#else
-	mov	x0, #ERRATA_MISSING
-#endif
-	ret
-endfunc check_errata_cve_2022_23960
-
-func cortex_x4_reset_func
-	/* Disable speculative loads */
-	msr	SSBS, xzr
-
-#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
+workaround_reset_start cortex_x4, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
+#if IMAGE_BL31
 	/*
 	 * The Cortex X4 generic vectors are overridden to apply errata
 	 * mitigation on exception entry from lower ELs.
 	 */
-	adr	x0, wa_cve_vbar_cortex_x4
-	msr	vbar_el3, x0
-#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
+	override_vector_table wa_cve_vbar_cortex_x4
+#endif /* IMAGE_BL31 */
+workaround_reset_end cortex_x4, CVE(2022, 23960)
 
-	isb
-	ret
-endfunc cortex_x4_reset_func
+check_erratum_chosen cortex_x4, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
+
+cpu_reset_func_start cortex_x4
+	/* Disable speculative loads */
+	msr	SSBS, xzr
+cpu_reset_func_end cortex_x4
 
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
@@ -61,33 +52,12 @@
 	 * Enable CPU power down bit in power control register
 	 * ---------------------------------------------------
 	 */
-	mrs	x0, CORTEX_X4_CPUPWRCTLR_EL1
-	orr	x0, x0, #CORTEX_X4_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	msr	CORTEX_X4_CPUPWRCTLR_EL1, x0
+	sysreg_bit_set CORTEX_X4_CPUPWRCTLR_EL1, CORTEX_X4_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
 	isb
 	ret
 endfunc cortex_x4_core_pwr_dwn
 
-#if REPORT_ERRATA
-/*
- * Errata printing function for Cortex X4. Must follow AAPCS.
- */
-func cortex_x4_errata_report
-	stp	x8, x30, [sp, #-16]!
-
-	bl	cpu_get_rev_var
-	mov	x8, x0
-
-	/*
-	 * Report all errata. The revision-variant information is passed to
-	 * checking functions of each errata.
-	 */
-	report_errata WORKAROUND_CVE_2022_23960, cortex_x4, cve_2022_23960
-
-	ldp	x8, x30, [sp], #16
-	ret
-endfunc cortex_x4_errata_report
-#endif
+errata_report_shim cortex_x4
 
 	/* ---------------------------------------------
 	 * This function provides Cortex X4-specific