sp_min: Implement workaround for CVE-2017-5715

This patch introduces two workarounds for ARMv7 systems.  The
workarounds need to be applied prior to any `branch` instruction in
secure world.  This is achieved using a custom vector table where each
entry is an `add sp, sp, #1` instruction.

On entry to monitor mode, once the sequence of `ADD` instructions is
executed, the branch target buffer (BTB) is invalidated.  The bottom
bits of `SP` are then used to decode the exception entry type.

A side effect of this change is that the exception vectors are
installed before the CPU specific reset function.  This is now
consistent with how it is done on AArch64.

Note, on AArch32 systems, the exception vectors are typically tightly
integrated with the secure payload (e.g. the Trusted OS).  This
workaround will need porting to each secure payload that requires it.

The patch to modify the AArch32 per-cpu vbar to the corresponding
workaround vector table according to the CPU type will be done in a
later patch.

Change-Id: I5786872497d359e496ebe0757e8017fa98f753fa
Signed-off-by: Dimitris Papastamos <dimitris.papastamos@arm.com>
diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S
index 59e99f8..74fb582 100644
--- a/include/common/aarch32/el3_common_macros.S
+++ b/include/common/aarch32/el3_common_macros.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -14,7 +14,7 @@
 	/*
 	 * Helper macro to initialise EL3 registers we care about.
 	 */
-	.macro el3_arch_init_common _exception_vectors
+	.macro el3_arch_init_common
 	/* ---------------------------------------------------------------------
 	 * SCTLR has already been initialised - read current value before
 	 * modifying.
@@ -34,15 +34,6 @@
 	isb
 
 	/* ---------------------------------------------------------------------
-	 * Set the exception vectors (VBAR/MVBAR).
-	 * ---------------------------------------------------------------------
-	 */
-	ldr	r0, =\_exception_vectors
-	stcopr	r0, VBAR
-	stcopr	r0, MVBAR
-	isb
-
-	/* ---------------------------------------------------------------------
 	 * Initialise SCR, setting all fields rather than relying on the hw.
 	 *
 	 * SCR.SIF: Enabled so that Secure state instruction fetches from
@@ -211,6 +202,15 @@
 	.endif /* _warm_boot_mailbox */
 
 	/* ---------------------------------------------------------------------
+	 * Set the exception vectors (VBAR/MVBAR).
+	 * ---------------------------------------------------------------------
+	 */
+	ldr	r0, =\_exception_vectors
+	stcopr	r0, VBAR
+	stcopr	r0, MVBAR
+	isb
+
+	/* ---------------------------------------------------------------------
 	 * It is a cold boot.
 	 * Perform any processor specific actions upon reset e.g. cache, TLB
 	 * invalidations etc.
@@ -218,7 +218,7 @@
 	 */
 	bl	reset_handler
 
-	el3_arch_init_common \_exception_vectors
+	el3_arch_init_common
 
 	.if \_secondary_cold_boot
 		/* -------------------------------------------------------------
diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h
index 4d2a5fc..134d534 100644
--- a/include/lib/aarch32/arch.h
+++ b/include/lib/aarch32/arch.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -426,6 +426,8 @@
 #define TLBIMVAA	p15, 0, c8, c7, 3
 #define TLBIMVAAIS	p15, 0, c8, c3, 3
 #define BPIALLIS	p15, 0, c7, c1, 6
+#define BPIALL		p15, 0, c7, c5, 6
+#define ICIALLU		p15, 0, c7, c5, 0
 #define HSCTLR		p15, 4, c1, c0, 0
 #define HCR		p15, 4, c1, c1, 0
 #define HCPTR		p15, 4, c1, c1, 2
diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h
index 53f1aa4..ed3b722 100644
--- a/include/lib/aarch32/smcc_helpers.h
+++ b/include/lib/aarch32/smcc_helpers.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,7 +22,7 @@
 #define SMC_CTX_LR_MON		0x80
 #define SMC_CTX_SCR		0x84
 #define SMC_CTX_PMCR		0x88
-#define SMC_CTX_SIZE		0x8C
+#define SMC_CTX_SIZE		0x90
 
 #ifndef __ASSEMBLY__
 #include <cassert.h>
@@ -75,7 +75,13 @@
 	u_register_t lr_mon;
 	u_register_t scr;
 	u_register_t pmcr;
-} smc_ctx_t;
+	/*
+	 * The workaround for CVE-2017-5715 requires storing information in
+	 * the bottom 3 bits of the stack pointer.  Add a padding field to
+	 * force the size of the struct to be a multiple of 8.
+	 */
+	u_register_t pad;
+} smc_ctx_t __aligned(8);
 
 /*
  * Compile time assertions related to the 'smc_context' structure to
@@ -99,6 +105,7 @@
 CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
 	assert_smc_ctx_spsr_mon_offset_mismatch);
 
+CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned);
 CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
 
 /* Convenience macros to return from SMC handler */