Preserve x19-x29 across world switch for exception handling

Previously exception handlers in BL3-1, X19-X29 were not saved
and restored on every SMC/trap into EL3. Instead these registers
were 'saved as needed' as a side effect of the A64 ABI used by the C
compiler.

That approach failed when world switching but was not visible
with the TSP/TSPD code because the TSP is 64-bit, did not
clobber these registers when running and did not support pre-emption
by normal world interrupts. These scenarios showed
that the values in these registers can be passed through a world
switch, which broke the normal and trusted world assumptions
about these registers being preserved.

The Ideal solution saves and restores these registers when a
world switch occurs - but that type of implementation is more complex.
So this patch always saves and restores these registers on entry and
exit of EL3.

Fixes ARM-software/tf-issues#141

Change-Id: I9a727167bbc594454e81cf78a97ca899dfb11c27
diff --git a/include/bl31/cm_macros.S b/include/bl31/cm_macros.S
index d264956..e82f3a3 100644
--- a/include/bl31/cm_macros.S
+++ b/include/bl31/cm_macros.S
@@ -27,31 +27,9 @@
  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  * POSSIBILITY OF SUCH DAMAGE.
  */
-
 #include <arch.h>
 #include <context.h>
 
-
-	/* ---------------------------------------------
-	 * Zero out the callee saved register to prevent
-	 * leakage of secure state into the normal world
-	 * during the first ERET after a cold/warm boot.
-	 * ---------------------------------------------
-	 */
-	.macro	zero_callee_saved_regs
-	mov	x19, xzr
-	mov	x20, xzr
-	mov	x21, xzr
-	mov	x22, xzr
-	mov	x23, xzr
-	mov	x24, xzr
-	mov	x25, xzr
-	mov	x26, xzr
-	mov	x27, xzr
-	mov	x28, xzr
-	mov	x29, xzr
-	.endm
-
 	.macro	switch_to_exception_stack reg1 reg2
 	mov     \reg1 , sp
 	ldr	\reg2, [\reg1, #CTX_EL3STATE_OFFSET + CTX_EXCEPTION_SP]
@@ -64,7 +42,7 @@
 	 * -----------------------------------------------------
 	 */
 	.macro	handle_sync_exception
-	stp	x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
 	mrs	x30, esr_el3
 	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
 
@@ -83,7 +61,7 @@
 	 * not expect any such exceptions.
 	 * -----------------------------------------------------
 	 */
-	bl	save_scratch_registers
+	bl	save_gp_registers
 	switch_to_exception_stack x0 x1
 
 	/* Save the core_context pointer for handled faults */
@@ -92,8 +70,8 @@
 	ldp	x0, xzr, [sp], #0x10
 
 	mov	sp, x0
-	bl	restore_scratch_registers
-	ldp	x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+	bl	restore_gp_registers
+	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
 	eret
 	.endm
 
@@ -103,8 +81,8 @@
 	 * -----------------------------------------------------
 	 */
 	.macro	handle_async_exception type
-	stp	x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
-	bl	save_scratch_registers
+	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+	bl	save_gp_registers
 	switch_to_exception_stack x0 x1
 
 	/* Save the core_context pointer */
@@ -114,7 +92,7 @@
 	ldp	x0, xzr, [sp], #0x10
 
 	mov	sp, x0
-	bl	restore_scratch_registers
-	ldp	x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+	bl	restore_gp_registers
+	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
 	.endm