Simplify management of SCTLR_EL3 and SCTLR_EL1
This patch reworks the manner in which the M,A, C, SA, I, WXN & EE bits of
SCTLR_EL3 & SCTLR_EL1 are managed. The EE bit is cleared immediately after reset
in EL3. The I, A and SA bits are set next in EL3 and immediately upon entry in
S-EL1. These bits are no longer managed in the blX_arch_setup() functions. They
do not have to be saved and restored either. The M, WXN and optionally the C
bit are set in the enable_mmu_elX() function. This is done during both the warm
and cold boot paths.
Fixes ARM-software/tf-issues#226
Change-Id: Ie894d1a07b8697c116960d858cd138c50bc7a069
diff --git a/bl1/aarch64/bl1_arch_setup.c b/bl1/aarch64/bl1_arch_setup.c
index cf69ac7..eeaa24a 100644
--- a/bl1/aarch64/bl1_arch_setup.c
+++ b/bl1/aarch64/bl1_arch_setup.c
@@ -37,20 +37,11 @@
******************************************************************************/
void bl1_arch_setup(void)
{
- unsigned long tmp_reg = 0;
-
- /* Enable alignment checks */
- tmp_reg = read_sctlr_el3();
- tmp_reg |= (SCTLR_A_BIT | SCTLR_SA_BIT);
- write_sctlr_el3(tmp_reg);
- isb();
-
/*
* Set the next EL to be AArch64, route external abort and SError
* interrupts to EL3
*/
- tmp_reg = SCR_RES1_BITS | SCR_RW_BIT | SCR_EA_BIT;
- write_scr(tmp_reg);
+ write_scr_el3(SCR_RES1_BITS | SCR_RW_BIT | SCR_EA_BIT);
/*
* Enable SError and Debug exceptions
diff --git a/bl1/aarch64/bl1_entrypoint.S b/bl1/aarch64/bl1_entrypoint.S
index ac6d913..dd7d78f 100644
--- a/bl1/aarch64/bl1_entrypoint.S
+++ b/bl1/aarch64/bl1_entrypoint.S
@@ -44,7 +44,7 @@
func bl1_entrypoint
/* ---------------------------------------------
* Set the CPU endianness before doing anything
- * that might involve memory reads or writes
+ * that might involve memory reads or writes.
* ---------------------------------------------
*/
mrs x0, sctlr_el3
@@ -59,12 +59,14 @@
*/
bl cpu_reset_handler
- /* -------------------------------
- * Enable the instruction cache.
- * -------------------------------
+ /* ---------------------------------------------
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
+ * ---------------------------------------------
*/
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el3
- orr x0, x0, #SCTLR_I_BIT
+ orr x0, x0, x1
msr sctlr_el3, x0
isb