armv8: aarch64: Fix the warning about x1-x3 nonzero issue

For 64-bit kernel, there is a warning about x1-x3 nonzero in violation
of boot protocol. To fix this issue, input argument 4 is added for
armv8_switch_to_el2 and armv8_switch_to_el1. The input argument 4 will
be set to the right value, such as zero.

Signed-off-by: Alison Wang <alison.wang@nxp.com>
Reviewed-by: Alexander Graf <agraf@suse.de>
Tested-by: Ryan Harkin <ryan.harkin@linaro.org>
Tested-by: Michal Simek <michal.simek@xilinx.com>
Reviewed-by: York Sun <york.sun@nxp.com>
diff --git a/arch/arm/cpu/armv8/fsl-layerscape/lowlevel.S b/arch/arm/cpu/armv8/fsl-layerscape/lowlevel.S
index 62efa90..a2185f2 100644
--- a/arch/arm/cpu/armv8/fsl-layerscape/lowlevel.S
+++ b/arch/arm/cpu/armv8/fsl-layerscape/lowlevel.S
@@ -486,29 +486,29 @@
 	b.eq	1f
 
 #ifdef CONFIG_ARMV8_SWITCH_TO_EL1
-	adr	x3, secondary_switch_to_el1
-	ldr	x4, =ES_TO_AARCH64
+	adr	x4, secondary_switch_to_el1
+	ldr	x5, =ES_TO_AARCH64
 #else
-	ldr	x3, [x11]
-	ldr	x4, =ES_TO_AARCH32
+	ldr	x4, [x11]
+	ldr	x5, =ES_TO_AARCH32
 #endif
 	bl	secondary_switch_to_el2
 
 1:
 #ifdef CONFIG_ARMV8_SWITCH_TO_EL1
-	adr	x3, secondary_switch_to_el1
+	adr	x4, secondary_switch_to_el1
 #else
-	ldr	x3, [x11]
+	ldr	x4, [x11]
 #endif
-	ldr	x4, =ES_TO_AARCH64
+	ldr	x5, =ES_TO_AARCH64
 	bl	secondary_switch_to_el2
 
 ENDPROC(secondary_boot_func)
 
 ENTRY(secondary_switch_to_el2)
-	switch_el x5, 1f, 0f, 0f
+	switch_el x6, 1f, 0f, 0f
 0:	ret
-1:	armv8_switch_to_el2_m x3, x4, x5
+1:	armv8_switch_to_el2_m x4, x5, x6
 ENDPROC(secondary_switch_to_el2)
 
 ENTRY(secondary_switch_to_el1)
@@ -522,22 +522,22 @@
 	/* physical address of this cpus spin table element */
 	add	x11, x1, x0
 
-	ldr	x3, [x11]
+	ldr	x4, [x11]
 
 	ldr	x5, [x11, #24]
 	ldr	x6, =IH_ARCH_DEFAULT
 	cmp	x6, x5
 	b.eq	2f
 
-	ldr	x4, =ES_TO_AARCH32
+	ldr	x5, =ES_TO_AARCH32
 	bl	switch_to_el1
 
-2:	ldr	x4, =ES_TO_AARCH64
+2:	ldr	x5, =ES_TO_AARCH64
 
 switch_to_el1:
-	switch_el x5, 0f, 1f, 0f
+	switch_el x6, 0f, 1f, 0f
 0:	ret
-1:	armv8_switch_to_el1_m x3, x4, x5
+1:	armv8_switch_to_el1_m x4, x5, x6
 ENDPROC(secondary_switch_to_el1)
 
 	/* Ensure that the literals used by the secondary boot code are
diff --git a/arch/arm/cpu/armv8/sec_firmware_asm.S b/arch/arm/cpu/armv8/sec_firmware_asm.S
index 903195d..5ed3677 100644
--- a/arch/arm/cpu/armv8/sec_firmware_asm.S
+++ b/arch/arm/cpu/armv8/sec_firmware_asm.S
@@ -57,7 +57,8 @@
  * x0: argument, zero
  * x1: machine nr
  * x2: fdt address
- * x3: kernel entry point
+ * x3: input argument
+ * x4: kernel entry point
  * @param outputs for secure firmware:
  * x0: function id
  * x1: kernel entry point
@@ -65,10 +66,9 @@
  * x3: fdt address
 */
 ENTRY(armv8_el2_to_aarch32)
-	mov	x0, x3
 	mov	x3, x2
 	mov	x2, x1
-	mov	x1, x0
+	mov	x1, x4
 	ldr	x0, =0xc000ff04
 	smc	#0
 	ret
diff --git a/arch/arm/cpu/armv8/start.S b/arch/arm/cpu/armv8/start.S
index 5308702..368e3dc 100644
--- a/arch/arm/cpu/armv8/start.S
+++ b/arch/arm/cpu/armv8/start.S
@@ -262,14 +262,14 @@
 	/*
 	 * All slaves will enter EL2 and optionally EL1.
 	 */
-	adr	x3, lowlevel_in_el2
-	ldr	x4, =ES_TO_AARCH64
+	adr	x4, lowlevel_in_el2
+	ldr	x5, =ES_TO_AARCH64
 	bl	armv8_switch_to_el2
 
 lowlevel_in_el2:
 #ifdef CONFIG_ARMV8_SWITCH_TO_EL1
-	adr	x3, lowlevel_in_el1
-	ldr	x4, =ES_TO_AARCH64
+	adr	x4, lowlevel_in_el1
+	ldr	x5, =ES_TO_AARCH64
 	bl	armv8_switch_to_el1
 
 lowlevel_in_el1:
diff --git a/arch/arm/cpu/armv8/transition.S b/arch/arm/cpu/armv8/transition.S
index adb9f35..ca07465 100644
--- a/arch/arm/cpu/armv8/transition.S
+++ b/arch/arm/cpu/armv8/transition.S
@@ -11,9 +11,9 @@
 #include <asm/macro.h>
 
 ENTRY(armv8_switch_to_el2)
-	switch_el x5, 1f, 0f, 0f
+	switch_el x6, 1f, 0f, 0f
 0:
-	cmp x4, #ES_TO_AARCH64
+	cmp x5, #ES_TO_AARCH64
 	b.eq 2f
 	/*
 	 * When loading 32-bit kernel, it will jump
@@ -22,23 +22,23 @@
 	bl armv8_el2_to_aarch32
 2:
 	/*
-	 * x3 is kernel entry point or switch_to_el1
+	 * x4 is kernel entry point or switch_to_el1
 	 * if CONFIG_ARMV8_SWITCH_TO_EL1 is defined.
          * When running in EL2 now, jump to the
-	 * address saved in x3.
+	 * address saved in x4.
 	 */
-	br x3
-1:	armv8_switch_to_el2_m x3, x4, x5
+	br x4
+1:	armv8_switch_to_el2_m x4, x5, x6
 ENDPROC(armv8_switch_to_el2)
 
 ENTRY(armv8_switch_to_el1)
-	switch_el x5, 0f, 1f, 0f
+	switch_el x6, 0f, 1f, 0f
 0:
-	/* x3 is kernel entry point. When running in EL1
-	 * now, jump to the address saved in x3.
+	/* x4 is kernel entry point. When running in EL1
+	 * now, jump to the address saved in x4.
 	 */
-	br x3
-1:	armv8_switch_to_el1_m x3, x4, x5
+	br x4
+1:	armv8_switch_to_el1_m x4, x5, x6
 ENDPROC(armv8_switch_to_el1)
 
 WEAK(armv8_el2_to_aarch32)