Merge pull request #1305 from dp-arm/dp/smccc

Implement support for v1.2 of firmware interfaces spec (ARM DEN 0070A)
diff --git a/bl31/bl31.mk b/bl31/bl31.mk
index 886d301..0e47ddf 100644
--- a/bl31/bl31.mk
+++ b/bl31/bl31.mk
@@ -61,8 +61,8 @@
 endif
 
 ifeq (${WORKAROUND_CVE_2017_5715},1)
-BL31_SOURCES		+=	lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S		\
-				lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S
+BL31_SOURCES		+=	lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S	\
+				lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S
 endif
 
 BL31_LINKERFILE		:=	bl31/bl31.ld.S
diff --git a/include/lib/cpus/aarch64/cpu_macros.S b/include/lib/cpus/aarch64/cpu_macros.S
index ccf5306..8f0a74f 100644
--- a/include/lib/cpus/aarch64/cpu_macros.S
+++ b/include/lib/cpus/aarch64/cpu_macros.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -46,6 +46,8 @@
 CPU_RESET_FUNC: /* cpu_ops reset_func */
 	.space  8
 #endif
+CPU_EXTRA1_FUNC:
+	.space	8
 #ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
 CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
 	.space  (8 * CPU_MAX_PWR_DWN_OPS)
@@ -113,6 +115,10 @@
 	 * _resetfunc:
 	 *	Reset function for the CPU. If there's no CPU reset function,
 	 *	specify CPU_NO_RESET_FUNC
+	 * _extra1:
+	 *	This is a placeholder for future per CPU operations.  Currently,
+	 *	some CPUs use this entry to set a test function to determine if
+	 *	the workaround for CVE-2017-5715 needs to be applied or not.
 	 * _power_down_ops:
 	 *	Comma-separated list of functions to perform power-down
 	 *	operatios on the CPU. At least one, and up to
@@ -122,8 +128,8 @@
 	 *	CPU_MAX_PWR_DWN_OPS functions, the last specified one will be
 	 *	used to handle power down at subsequent levels
 	 */
-	.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
-		_power_down_ops:vararg
+	.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
+		_extra1:req, _power_down_ops:vararg
 	.section cpu_ops, "a"
 	.align 3
 	.type cpu_ops_\_name, %object
@@ -131,6 +137,7 @@
 #if defined(IMAGE_AT_EL3)
 	.quad \_resetfunc
 #endif
+	.quad \_extra1
 #ifdef IMAGE_BL31
 1:
 	/* Insert list of functions */
@@ -187,6 +194,18 @@
 #endif
 	.endm
 
+	.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
+		_power_down_ops:vararg
+		declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, \
+			\_power_down_ops
+	.endm
+
+	.macro declare_cpu_ops_workaround_cve_2017_5715 _name:req, _midr:req, \
+		_resetfunc:req, _extra1:req, _power_down_ops:vararg
+		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
+			\_extra1, \_power_down_ops
+	.endm
+
 #if REPORT_ERRATA
 	/*
 	 * Print status of a CPU errata
@@ -229,3 +248,18 @@
 #endif
 
 #endif /* __CPU_MACROS_S__ */
+
+	/*
+	 * This macro is used on some CPUs to detect if they are vulnerable
+	 * to CVE-2017-5715.
+	 */
+	.macro	cpu_check_csv2 _reg _label
+	mrs	\_reg, id_aa64pfr0_el1
+	ubfx	\_reg, \_reg, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
+	/*
+	 * If the field equals to 1 then branch targets trained in one
+	 * context cannot affect speculative execution in a different context.
+	 */
+	cmp	\_reg, #1
+	beq	\_label
+	.endm
diff --git a/include/lib/cpus/workaround_cve_2017_5715.h b/include/lib/cpus/workaround_cve_2017_5715.h
new file mode 100644
index 0000000..e837a67
--- /dev/null
+++ b/include/lib/cpus/workaround_cve_2017_5715.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __WORKAROUND_CVE_2017_5715_H__
+#define __WORKAROUND_CVE_2017_5715_H__
+
+int check_workaround_cve_2017_5715(void);
+
+#endif /* __WORKAROUND_CVE_2017_5715_H__ */
diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S
index c82ebfc..4d072e1 100644
--- a/lib/cpus/aarch64/cortex_a57.S
+++ b/lib/cpus/aarch64/cortex_a57.S
@@ -555,8 +555,8 @@
 	ret
 endfunc cortex_a57_cpu_reg_dump
 
-
-declare_cpu_ops cortex_a57, CORTEX_A57_MIDR, \
+declare_cpu_ops_workaround_cve_2017_5715 cortex_a57, CORTEX_A57_MIDR, \
 	cortex_a57_reset_func, \
+	check_errata_cve_2017_5715, \
 	cortex_a57_core_pwr_dwn, \
 	cortex_a57_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index 9633aa8..29fa77b 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -98,12 +98,16 @@
 endfunc check_errata_859971
 
 func check_errata_cve_2017_5715
+	cpu_check_csv2	x0, 1f
 #if WORKAROUND_CVE_2017_5715
 	mov	x0, #ERRATA_APPLIES
 #else
 	mov	x0, #ERRATA_MISSING
 #endif
 	ret
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
 endfunc check_errata_cve_2017_5715
 
 	/* -------------------------------------------------
@@ -121,8 +125,10 @@
 #endif
 
 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+	cpu_check_csv2	x0, 1f
 	adr	x0, workaround_mmu_runtime_exceptions
 	msr	vbar_el3, x0
+1:
 #endif
 
 	/* ---------------------------------------------
@@ -286,8 +292,8 @@
 	ret
 endfunc cortex_a72_cpu_reg_dump
 
-
-declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
+declare_cpu_ops_workaround_cve_2017_5715 cortex_a72, CORTEX_A72_MIDR, \
 	cortex_a72_reset_func, \
+	check_errata_cve_2017_5715, \
 	cortex_a72_core_pwr_dwn, \
 	cortex_a72_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S
index 11680a0..0a961ea 100644
--- a/lib/cpus/aarch64/cortex_a73.S
+++ b/lib/cpus/aarch64/cortex_a73.S
@@ -37,8 +37,10 @@
 
 func cortex_a73_reset_func
 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
+	cpu_check_csv2	x0, 1f
 	adr	x0, workaround_bpiall_vbar0_runtime_exceptions
 	msr	vbar_el3, x0
+1:
 #endif
 
 	/* ---------------------------------------------
@@ -115,12 +117,16 @@
 endfunc cortex_a73_cluster_pwr_dwn
 
 func check_errata_cve_2017_5715
+	cpu_check_csv2	x0, 1f
 #if WORKAROUND_CVE_2017_5715
 	mov	x0, #ERRATA_APPLIES
 #else
 	mov	x0, #ERRATA_MISSING
 #endif
 	ret
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
 endfunc check_errata_cve_2017_5715
 
 #if REPORT_ERRATA
@@ -164,7 +170,8 @@
 	ret
 endfunc cortex_a73_cpu_reg_dump
 
-declare_cpu_ops cortex_a73, CORTEX_A73_MIDR, \
+declare_cpu_ops_workaround_cve_2017_5715 cortex_a73, CORTEX_A73_MIDR, \
 	cortex_a73_reset_func, \
+	check_errata_cve_2017_5715, \
 	cortex_a73_core_pwr_dwn, \
 	cortex_a73_cluster_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S
index 12ea304..288f5af 100644
--- a/lib/cpus/aarch64/cortex_a75.S
+++ b/lib/cpus/aarch64/cortex_a75.S
@@ -12,15 +12,7 @@
 
 func cortex_a75_reset_func
 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
-	mrs	x0, id_aa64pfr0_el1
-	ubfx	x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
-	/*
-	 * If the field equals to 1 then branch targets trained in one
-	 * context cannot affect speculative execution in a different context.
-	 */
-	cmp	x0, #1
-	beq	1f
-
+	cpu_check_csv2	x0, 1f
 	adr	x0, workaround_bpiall_vbar0_runtime_exceptions
 	msr	vbar_el3, x0
 1:
@@ -53,15 +45,7 @@
 endfunc cortex_a75_reset_func
 
 func check_errata_cve_2017_5715
-	mrs	x0, id_aa64pfr0_el1
-	ubfx	x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
-	/*
-	 * If the field equals to 1 then branch targets trained in one
-	 * context cannot affect speculative execution in a different context.
-	 */
-	cmp	x0, #1
-	beq	1f
-
+	cpu_check_csv2	x0, 1f
 #if WORKAROUND_CVE_2017_5715
 	mov	x0, #ERRATA_APPLIES
 #else
@@ -129,6 +113,7 @@
 	ret
 endfunc cortex_a75_cpu_reg_dump
 
-declare_cpu_ops cortex_a75, CORTEX_A75_MIDR, \
+declare_cpu_ops_workaround_cve_2017_5715 cortex_a75, CORTEX_A75_MIDR, \
 	cortex_a75_reset_func, \
+	check_errata_cve_2017_5715, \
 	cortex_a75_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index ae1c3c2..5a9226d 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -7,9 +7,7 @@
 #include <arch.h>
 #include <asm_macros.S>
 #include <assert_macros.S>
-#if defined(IMAGE_BL31) || (defined(IMAGE_BL2) && BL2_AT_EL3)
 #include <cpu_data.h>
-#endif
 #include <cpu_macros.S>
 #include <debug.h>
 #include <errata_report.h>
@@ -281,3 +279,36 @@
 	br	x1
 endfunc print_errata_status
 #endif
+
+/*
+ * int check_workaround_cve_2017_5715(void);
+ *
+ * This function returns:
+ *  - ERRATA_APPLIES when firmware mitigation is required.
+ *  - ERRATA_NOT_APPLIES when firmware mitigation is _not_ required.
+ *  - ERRATA_MISSING when firmware mitigation would be required but
+ *    is not compiled in.
+ *
+ * NOTE: Must be called only after cpu_ops have been initialized
+ *       in per-CPU data.
+ */
+	.globl	check_workaround_cve_2017_5715
+func check_workaround_cve_2017_5715
+	mrs	x0, tpidr_el3
+#if ENABLE_ASSERTIONS
+	cmp	x0, #0
+	ASM_ASSERT(ne)
+#endif
+	ldr	x0, [x0, #CPU_DATA_CPU_OPS_PTR]
+	ldr	x0, [x0, #CPU_EXTRA1_FUNC]
+	/*
+	 * If the reserved function pointer is NULL, this CPU
+	 * is unaffected by CVE-2017-5715 so bail out.
+	 */
+	cmp	x0, #0
+	beq	1f
+	br	x0
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
+endfunc check_workaround_cve_2017_5715
diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c
index a809c42..f75a737 100644
--- a/services/arm_arch_svc/arm_arch_svc_setup.c
+++ b/services/arm_arch_svc/arm_arch_svc_setup.c
@@ -6,9 +6,11 @@
 
 #include <arm_arch_svc.h>
 #include <debug.h>
+#include <errata_report.h>
 #include <runtime_svc.h>
 #include <smcc.h>
 #include <smcc_helpers.h>
+#include <workaround_cve_2017_5715.h>
 
 static int32_t smccc_version(void)
 {
@@ -17,14 +19,19 @@
 
 static int32_t smccc_arch_features(u_register_t arg)
 {
+	int ret;
+
 	switch (arg) {
 	case SMCCC_VERSION:
 	case SMCCC_ARCH_FEATURES:
 		return SMC_OK;
-#if WORKAROUND_CVE_2017_5715
 	case SMCCC_ARCH_WORKAROUND_1:
-		return SMC_OK;
-#endif
+		ret = check_workaround_cve_2017_5715();
+		if (ret == ERRATA_APPLIES)
+			return 0;
+		else if (ret == ERRATA_NOT_APPLIES)
+			return 1;
+		return -1; /* ERRATA_MISSING */
 	default:
 		return SMC_UNK;
 	}