Merge changes from topic "spectre_bhb" into integration

* changes:
  fix(security): apply SMCCC_ARCH_WORKAROUND_3 to A73/A75/A72/A57
  fix(security): workaround for CVE-2022-23960 for Cortex-A57, Cortex-A72
  fix(fvp): disable reclaiming init code by default
diff --git a/include/lib/cpus/aarch64/cortex_a72.h b/include/lib/cpus/aarch64/cortex_a72.h
index 28b440e..1777645 100644
--- a/include/lib/cpus/aarch64/cortex_a72.h
+++ b/include/lib/cpus/aarch64/cortex_a72.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2015-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2015-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -12,6 +12,9 @@
 /* Cortex-A72 midr for revision 0 */
 #define CORTEX_A72_MIDR 				U(0x410FD080)
 
+/* Cortex-A72 loop count for CVE-2022-23960 mitigation */
+#define CORTEX_A72_BHB_LOOP_COUNT			U(8)
+
 /*******************************************************************************
  * CPU Extended Control register specific definitions.
  ******************************************************************************/
diff --git a/include/lib/cpus/aarch64/cpu_macros.S b/include/lib/cpus/aarch64/cpu_macros.S
index 92891ce..92e65ae 100644
--- a/include/lib/cpus/aarch64/cpu_macros.S
+++ b/include/lib/cpus/aarch64/cpu_macros.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -21,6 +21,7 @@
 
 #define CPU_NO_EXTRA1_FUNC		0
 #define CPU_NO_EXTRA2_FUNC		0
+#define CPU_NO_EXTRA3_FUNC		0
 
 /* Word size for 64-bit CPUs */
 #define CPU_WORD_SIZE			8
@@ -39,6 +40,7 @@
 	.equ	CPU_MIDR_SIZE, CPU_WORD_SIZE
 	.equ	CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
 	.equ	CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
+	.equ	CPU_EXTRA3_FUNC_SIZE, CPU_WORD_SIZE
 	.equ	CPU_E_HANDLER_FUNC_SIZE, CPU_WORD_SIZE
 	.equ	CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
 	.equ	CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
@@ -80,7 +82,8 @@
 	.equ	CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
 	.equ	CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
 	.equ	CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
-	.equ	CPU_E_HANDLER_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
+	.equ	CPU_EXTRA3_FUNC, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
+	.equ	CPU_E_HANDLER_FUNC, CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
 	.equ	CPU_PWR_DWN_OPS, CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE
 	.equ	CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
 	.equ	CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
@@ -134,9 +137,13 @@
 	 *	some CPUs use this entry to set a test function to determine if
 	 *	the workaround for CVE-2017-5715 needs to be applied or not.
 	 * _extra2:
-	 *	This is a placeholder for future per CPU operations.  Currently
+	 *	This is a placeholder for future per CPU operations. Currently
 	 *	some CPUs use this entry to set a function to disable the
 	 *	workaround for CVE-2018-3639.
+	 * _extra3:
+	 *	This is a placeholder for future per CPU operations. Currently,
+	 *	some CPUs use this entry to set a test function to determine if
+	 *	the workaround for CVE-2022-23960 needs to be applied or not.
 	 * _e_handler:
 	 *	This is a placeholder for future per CPU exception handlers.
 	 * _power_down_ops:
@@ -149,7 +156,7 @@
 	 *	used to handle power down at subsequent levels
 	 */
 	.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
-		_extra1:req, _extra2:req, _e_handler:req, _power_down_ops:vararg
+		_extra1:req, _extra2:req, _extra3:req, _e_handler:req, _power_down_ops:vararg
 	.section cpu_ops, "a"
 	.align 3
 	.type cpu_ops_\_name, %object
@@ -159,6 +166,7 @@
 #endif
 	.quad \_extra1
 	.quad \_extra2
+	.quad \_extra3
 	.quad \_e_handler
 #ifdef IMAGE_BL31
 	/* Insert list of functions */
@@ -204,21 +212,21 @@
 
 	.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
 		_power_down_ops:vararg
-		declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, \
+		declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, \
 			\_power_down_ops
 	.endm
 
 	.macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \
 		_e_handler:req, _power_down_ops:vararg
 		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
-			0, 0, \_e_handler, \_power_down_ops
+			0, 0, 0, \_e_handler, \_power_down_ops
 	.endm
 
 	.macro declare_cpu_ops_wa _name:req, _midr:req, \
 		_resetfunc:req, _extra1:req, _extra2:req, \
-		_power_down_ops:vararg
+		_extra3:req, _power_down_ops:vararg
 		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
-			\_extra1, \_extra2, 0, \_power_down_ops
+			\_extra1, \_extra2, \_extra3, 0, \_power_down_ops
 	.endm
 
 #if REPORT_ERRATA
diff --git a/include/lib/cpus/wa_cve_2022_23960.h b/include/lib/cpus/wa_cve_2022_23960.h
new file mode 100644
index 0000000..35b3fd8
--- /dev/null
+++ b/include/lib/cpus/wa_cve_2022_23960.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2022, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef WA_CVE_2022_23960_H
+#define WA_CVE_2022_23960_H
+
+int check_smccc_arch_wa3_applies(void);
+
+#endif /* WA_CVE_2022_23960_H */
diff --git a/include/services/arm_arch_svc.h b/include/services/arm_arch_svc.h
index 5bbd8bb..645b388 100644
--- a/include/services/arm_arch_svc.h
+++ b/include/services/arm_arch_svc.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -12,6 +12,7 @@
 #define SMCCC_ARCH_SOC_ID		U(0x80000002)
 #define SMCCC_ARCH_WORKAROUND_1		U(0x80008000)
 #define SMCCC_ARCH_WORKAROUND_2		U(0x80007FFF)
+#define SMCCC_ARCH_WORKAROUND_3		U(0x80003FFF)
 
 #define SMCCC_GET_SOC_VERSION		U(0)
 #define SMCCC_GET_SOC_REVISION		U(1)
diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S
index 8ef0f92..3766ec7 100644
--- a/lib/cpus/aarch64/cortex_a57.S
+++ b/lib/cpus/aarch64/cortex_a57.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2014-2020, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2022, ARM Limited and Contributors. All rights reserved.
  * Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
@@ -470,7 +470,12 @@
 	bl	errata_a57_859972_wa
 #endif
 
-#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+#if IMAGE_BL31 && ( WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 )
+	/* ---------------------------------------------------------------
+	 * Override vector table & enable existing workaround if either of
+	 * the build flags are enabled
+	 * ---------------------------------------------------------------
+	 */
 	adr	x0, wa_cve_2017_5715_mmu_vbar
 	msr	vbar_el3, x0
 	/* isb will be performed before returning from this function */
@@ -506,6 +511,20 @@
 	ret	x19
 endfunc cortex_a57_reset_func
 
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2022_23960
+	mov	x0, #ERRATA_APPLIES
+#else
+	mov	x0, #ERRATA_MISSING
+#endif
+	ret
+endfunc check_errata_cve_2022_23960
+
+func check_smccc_arch_workaround_3
+	mov	x0, #ERRATA_APPLIES
+	ret
+endfunc check_smccc_arch_workaround_3
+
 	/* ----------------------------------------------------
 	 * The CPU Ops core power down function for Cortex-A57.
 	 * ----------------------------------------------------
@@ -630,6 +649,7 @@
 	report_errata ERRATA_A57_1319537, cortex_a57, 1319537
 	report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
 	report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
+	report_errata WORKAROUND_CVE_2022_23960, cortex_a57, cve_2022_23960
 
 	ldp	x8, x30, [sp], #16
 	ret
@@ -661,5 +681,6 @@
 	cortex_a57_reset_func, \
 	check_errata_cve_2017_5715, \
 	CPU_NO_EXTRA2_FUNC, \
+	check_smccc_arch_workaround_3, \
 	cortex_a57_core_pwr_dwn, \
 	cortex_a57_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index aff6072..de2d36e 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2015-2020, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2015-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -9,6 +9,11 @@
 #include <cortex_a72.h>
 #include <cpu_macros.S>
 #include <plat_macros.S>
+#include "wa_cve_2022_23960_bhb_vector.S"
+
+#if WORKAROUND_CVE_2022_23960
+	wa_cve_2022_23960_bhb_vector_table CORTEX_A72_BHB_LOOP_COUNT, cortex_a72
+#endif /* WORKAROUND_CVE_2022_23960 */
 
 	/* ---------------------------------------------
 	 * Disable L1 data cache and unified L2 cache
@@ -133,6 +138,24 @@
 	ret
 endfunc check_errata_1319367
 
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2022_23960
+	mov	x0, #ERRATA_APPLIES
+#else
+	mov	x0, #ERRATA_MISSING
+#endif
+	ret
+endfunc check_errata_cve_2022_23960
+
+func check_smccc_arch_workaround_3
+	cpu_check_csv2	x0, 1f
+	mov	x0, #ERRATA_APPLIES
+	ret
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
+endfunc check_smccc_arch_workaround_3
+
 	/* -------------------------------------------------
 	 * The CPU Ops reset function for Cortex-A72.
 	 * -------------------------------------------------
@@ -147,13 +170,28 @@
 	bl	errata_a72_859971_wa
 #endif
 
-#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
 	cpu_check_csv2	x0, 1f
 	adr	x0, wa_cve_2017_5715_mmu_vbar
 	msr	vbar_el3, x0
 	/* isb will be performed before returning from this function */
+
+	/* Skip CVE_2022_23960 mitigation if cve_2017_5715 mitigation applied */
+	b	2f
 1:
-#endif
+#if WORKAROUND_CVE_2022_23960
+	/*
+	 * The Cortex-A72 generic vectors are overridden to apply the
+         * mitigation on exception entry from lower ELs for revisions >= r1p0
+	 * which has CSV2 implemented.
+	 */
+	adr	x0, wa_cve_vbar_cortex_a72
+	msr	vbar_el3, x0
+
+	/* isb will be performed before returning from this function */
+#endif /* WORKAROUND_CVE_2022_23960 */
+2:
+#endif /* IMAGE_BL31 &&  (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
 
 #if WORKAROUND_CVE_2018_3639
 	mrs	x0, CORTEX_A72_CPUACTLR_EL1
@@ -299,6 +337,7 @@
 	report_errata ERRATA_A72_1319367, cortex_a72, 1319367
 	report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
 	report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
+	report_errata WORKAROUND_CVE_2022_23960, cortex_a72, cve_2022_23960
 
 	ldp	x8, x30, [sp], #16
 	ret
@@ -330,5 +369,6 @@
 	cortex_a72_reset_func, \
 	check_errata_cve_2017_5715, \
 	CPU_NO_EXTRA2_FUNC, \
+	check_smccc_arch_workaround_3, \
 	cortex_a72_core_pwr_dwn, \
 	cortex_a72_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S
index 5c8a887..edcd1f5 100644
--- a/lib/cpus/aarch64/cortex_a73.S
+++ b/lib/cpus/aarch64/cortex_a73.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -111,13 +111,21 @@
 	bl	errata_a73_855423_wa
 #endif
 
-#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
 	cpu_check_csv2	x0, 1f
 	adr	x0, wa_cve_2017_5715_bpiall_vbar
 	msr	vbar_el3, x0
-	/* isb will be performed before returning from this function */
+	isb
+	/* Skip installing vector table again for CVE_2022_23960 */
+        b       2f
 1:
+#if WORKAROUND_CVE_2022_23960
+	adr	x0, wa_cve_2017_5715_bpiall_vbar
+	msr	vbar_el3, x0
+	isb
 #endif
+2:
+#endif /* IMAGE_BL31 &&  (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
 
 #if WORKAROUND_CVE_2018_3639
 	mrs	x0, CORTEX_A73_IMP_DEF_REG1
@@ -221,6 +229,28 @@
 	ret
 endfunc check_errata_cve_2018_3639
 
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
+	cpu_check_csv2	x0, 1f
+	mov	x0, #ERRATA_APPLIES
+	ret
+ 1:
+# if WORKAROUND_CVE_2022_23960
+	mov	x0, #ERRATA_APPLIES
+# else
+	mov	x0, #ERRATA_MISSING
+# endif /* WORKAROUND_CVE_2022_23960 */
+	ret
+#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
+	mov	x0, #ERRATA_MISSING
+	ret
+endfunc check_errata_cve_2022_23960
+
+func check_smccc_arch_workaround_3
+	mov	x0, #ERRATA_APPLIES
+	ret
+endfunc check_smccc_arch_workaround_3
+
 #if REPORT_ERRATA
 /*
  * Errata printing function for Cortex A75. Must follow AAPCS.
@@ -239,6 +269,7 @@
 	report_errata ERRATA_A73_855423, cortex_a73, 855423
 	report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
 	report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
+	report_errata WORKAROUND_CVE_2022_23960, cortex_a73, cve_2022_23960
 
 	ldp	x8, x30, [sp], #16
 	ret
@@ -269,5 +300,6 @@
 	cortex_a73_reset_func, \
 	check_errata_cve_2017_5715, \
 	CPU_NO_EXTRA2_FUNC, \
+	check_smccc_arch_workaround_3, \
 	cortex_a73_core_pwr_dwn, \
 	cortex_a73_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S
index 657457e..d561be4 100644
--- a/lib/cpus/aarch64/cortex_a75.S
+++ b/lib/cpus/aarch64/cortex_a75.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -90,13 +90,21 @@
 	bl	errata_a75_790748_wa
 #endif
 
-#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+#if IMAGE_BL31 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
 	cpu_check_csv2	x0, 1f
 	adr	x0, wa_cve_2017_5715_bpiall_vbar
 	msr	vbar_el3, x0
 	isb
+	/* Skip installing vector table again for CVE_2022_23960 */
+        b       2f
 1:
+#if WORKAROUND_CVE_2022_23960
+	adr	x0, wa_cve_2017_5715_bpiall_vbar
+	msr	vbar_el3, x0
+	isb
 #endif
+2:
+#endif /* IMAGE_BL31 &&  (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) */
 
 #if WORKAROUND_CVE_2018_3639
 	mrs	x0, CORTEX_A75_CPUACTLR_EL1
@@ -161,6 +169,28 @@
 	ret
 endfunc check_errata_cve_2018_3639
 
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
+	cpu_check_csv2	x0, 1f
+	mov	x0, #ERRATA_APPLIES
+	ret
+1:
+# if WORKAROUND_CVE_2022_23960
+	mov	x0, #ERRATA_APPLIES
+# else
+	mov	x0, #ERRATA_MISSING
+# endif /* WORKAROUND_CVE_2022_23960 */
+	ret
+#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
+	mov	x0, #ERRATA_MISSING
+	ret
+endfunc check_errata_cve_2022_23960
+
+func check_smccc_arch_workaround_3
+	mov	x0, #ERRATA_APPLIES
+	ret
+endfunc check_smccc_arch_workaround_3
+
 	/* ---------------------------------------------
 	 * HW will do the cache maintenance while powering down
 	 * ---------------------------------------------
@@ -197,6 +227,7 @@
 	report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
 	report_errata ERRATA_DSU_798953, cortex_a75, dsu_798953
 	report_errata ERRATA_DSU_936184, cortex_a75, dsu_936184
+	report_errata WORKAROUND_CVE_2022_23960, cortex_a75, cve_2022_23960
 
 	ldp	x8, x30, [sp], #16
 	ret
@@ -226,4 +257,5 @@
 	cortex_a75_reset_func, \
 	check_errata_cve_2017_5715, \
 	CPU_NO_EXTRA2_FUNC, \
+	check_smccc_arch_workaround_3, \
 	cortex_a75_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S
index 114d0f5..50bd8cd 100644
--- a/lib/cpus/aarch64/cortex_a76.S
+++ b/lib/cpus/aarch64/cortex_a76.S
@@ -808,4 +808,5 @@
 	cortex_a76_reset_func, \
 	CPU_NO_EXTRA1_FUNC, \
 	cortex_a76_disable_wa_cve_2018_3639, \
+	CPU_NO_EXTRA3_FUNC, \
 	cortex_a76_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index bd8f85f..2385627 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2014-2021, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2022, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -381,7 +381,7 @@
 	 * If the reserved function pointer is NULL, this CPU
 	 * is unaffected by CVE-2017-5715 so bail out.
 	 */
-	cmp	x0, #0
+	cmp	x0, #CPU_NO_EXTRA1_FUNC
 	beq	1f
 	br	x0
 1:
@@ -416,3 +416,41 @@
 	ldr	x0, [x0, #CPU_EXTRA2_FUNC]
 	ret
 endfunc wa_cve_2018_3639_get_disable_ptr
+
+/*
+ * int check_smccc_arch_wa3_applies(void);
+ *
+ * This function checks whether SMCCC_ARCH_WORKAROUND_3 is enabled to mitigate
+ * CVE-2022-23960 for this CPU. It returns:
+ *  - ERRATA_APPLIES when SMCCC_ARCH_WORKAROUND_3 can be invoked to mitigate
+ *    the CVE.
+ *  - ERRATA_NOT_APPLIES when SMCCC_ARCH_WORKAROUND_3 should not be invoked to
+ *    mitigate the CVE.
+ *
+ * NOTE: Must be called only after cpu_ops have been initialized
+ *       in per-CPU data.
+ */
+	.globl	check_smccc_arch_wa3_applies
+func check_smccc_arch_wa3_applies
+	mrs	x0, tpidr_el3
+#if ENABLE_ASSERTIONS
+	cmp	x0, #0
+	ASM_ASSERT(ne)
+#endif
+	ldr	x0, [x0, #CPU_DATA_CPU_OPS_PTR]
+#if ENABLE_ASSERTIONS
+	cmp	x0, #0
+	ASM_ASSERT(ne)
+#endif
+	ldr	x0, [x0, #CPU_EXTRA3_FUNC]
+	/*
+	 * If the reserved function pointer is NULL, this CPU
+	 * is unaffected by CVE-2022-23960 so bail out.
+	 */
+	cmp	x0, #CPU_NO_EXTRA3_FUNC
+	beq	1f
+	br	x0
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
+endfunc check_smccc_arch_wa3_applies
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
index c9a9544..0222818 100644
--- a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -308,22 +308,25 @@
 
 	/*
 	 * Check if SMC is coming from A64 state on #0
-	 * with W0 = SMCCC_ARCH_WORKAROUND_1
+	 * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
 	 *
 	 * This sequence evaluates as:
-	 *    (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
+	 *    (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
+	 *    (ESR_EL3==SMC#0) : (NE)
 	 * allowing use of a single branch operation
 	 */
 	orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_1
 	cmp	w0, w2
+	orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_3
+	ccmp	w0, w2, #4, ne
 	mov_imm	w2, ESR_EL3_A64_SMC0
 	ccmp	w3, w2, #0, eq
 	/* Static predictor will predict a fall through */
 	bne	1f
 	eret
 1:
-	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
-	b	sync_exception_aarch64
+	/* restore x2 and x3 and continue sync exception handling */
+	b	bpiall_ret_sync_exception_aarch32_tail
 end_vector_entry bpiall_ret_sync_exception_aarch32
 
 vector_entry bpiall_ret_irq_aarch32
@@ -355,3 +358,11 @@
 vector_entry bpiall_ret_serror_aarch32
 	b	report_unhandled_exception
 end_vector_entry bpiall_ret_serror_aarch32
+
+	/*
+	 * Part of bpiall_ret_sync_exception_aarch32 to save vector space
+	 */
+func bpiall_ret_sync_exception_aarch32_tail
+	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
+	b	sync_exception_aarch64
+endfunc bpiall_ret_sync_exception_aarch32_tail
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
index 5134ee3..ed0a549 100644
--- a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -34,15 +34,18 @@
 
 	/*
 	 * Ensure SMC is coming from A64/A32 state on #0
-	 * with W0 = SMCCC_ARCH_WORKAROUND_1
+	 * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
 	 *
 	 * This sequence evaluates as:
-	 *    (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
+	 *    (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
+	 *    (ESR_EL3==SMC#0) : (NE)
 	 * allowing use of a single branch operation
 	 */
 	.if \_is_sync_exception
 		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_1
 		cmp	w0, w1
+		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_3
+		ccmp	w0, w1, #4, ne
 		mrs	x0, esr_el3
 		mov_imm	w1, \_esr_el3_val
 		ccmp	w0, w1, #0, eq
diff --git a/plat/arm/board/fvp/platform.mk b/plat/arm/board/fvp/platform.mk
index a24a2e5..acac886 100644
--- a/plat/arm/board/fvp/platform.mk
+++ b/plat/arm/board/fvp/platform.mk
@@ -308,14 +308,6 @@
 # Enable dynamic mitigation support by default
 DYNAMIC_WORKAROUND_CVE_2018_3639	:=	1
 
-# Enable reclaiming of BL31 initialisation code for secondary cores
-# stacks for FVP. However, don't enable reclaiming for clang.
-ifneq (${RESET_TO_BL31},1)
-ifeq ($(findstring clang,$(notdir $(CC))),)
-RECLAIM_INIT_CODE	:=	1
-endif
-endif
-
 ifeq (${ENABLE_AMU},1)
 BL31_SOURCES		+=	lib/cpus/aarch64/cpuamu.c		\
 				lib/cpus/aarch64/cpuamu_helpers.S
diff --git a/plat/st/stm32mp1/platform.mk b/plat/st/stm32mp1/platform.mk
index 0f579a4..a4c40c4 100644
--- a/plat/st/stm32mp1/platform.mk
+++ b/plat/st/stm32mp1/platform.mk
@@ -38,6 +38,7 @@
 
 # Not needed for Cortex-A7
 WORKAROUND_CVE_2017_5715:=	0
+WORKAROUND_CVE_2022_23960:=	0
 
 ifeq (${PSA_FWU_SUPPORT},1)
 ifneq (${STM32MP_USE_STM32IMAGE},1)
diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c
index 5523a1c..46ccd9e 100644
--- a/services/arm_arch_svc/arm_arch_svc_setup.c
+++ b/services/arm_arch_svc/arm_arch_svc_setup.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -9,6 +9,7 @@
 #include <lib/cpus/errata_report.h>
 #include <lib/cpus/wa_cve_2017_5715.h>
 #include <lib/cpus/wa_cve_2018_3639.h>
+#include <lib/cpus/wa_cve_2022_23960.h>
 #include <lib/smccc.h>
 #include <services/arm_arch_svc.h>
 #include <smccc_helpers.h>
@@ -74,6 +75,20 @@
 	}
 #endif
 
+#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
+	case SMCCC_ARCH_WORKAROUND_3:
+		/*
+		 * SMCCC_ARCH_WORKAROUND_3 should also take into account
+		 * CVE-2017-5715 since this SMC can be used instead of
+		 * SMCCC_ARCH_WORKAROUND_1.
+		 */
+		if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
+		    (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
+			return 1;
+		}
+		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
+#endif
+
 	/* Fallthrough */
 
 	default:
@@ -117,7 +132,7 @@
 	case SMCCC_ARCH_WORKAROUND_1:
 		/*
 		 * The workaround has already been applied on affected PEs
-		 * during entry to EL3.  On unaffected PEs, this function
+		 * during entry to EL3. On unaffected PEs, this function
 		 * has no effect.
 		 */
 		SMC_RET0(handle);
@@ -132,6 +147,15 @@
 		 */
 		SMC_RET0(handle);
 #endif
+#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
+	case SMCCC_ARCH_WORKAROUND_3:
+		/*
+		 * The workaround has already been applied on affected PEs
+		 * during entry to EL3. On unaffected PEs, this function
+		 * has no effect.
+		 */
+		SMC_RET0(handle);
+#endif
 	default:
 		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
 			smc_fid);