ARMv7: introduce Cortex-A9

As Cortex-A9 needs to manually enable program flow prediction,
do not reset SCTLR[Z] at entry. Platform should enable it only
once MMU is enabled.

Change-Id: I34e1ee2da73221903f7767f23bc6fc10ad01e3de
Signed-off-by: Etienne Carriere <etienne.carriere@linaro.org>
diff --git a/include/lib/cpus/aarch32/cortex_a9.h b/include/lib/cpus/aarch32/cortex_a9.h
new file mode 100644
index 0000000..be85f9b
--- /dev/null
+++ b/include/lib/cpus/aarch32/cortex_a9.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __CORTEX_A9_H__
+#define __CORTEX_A9_H__
+
+/*******************************************************************************
+ * Cortex-A9 midr with version/revision set to 0
+ ******************************************************************************/
+#define CORTEX_A9_MIDR			0x410FC090
+
+/*******************************************************************************
+ * CPU Auxiliary Control register specific definitions.
+ ******************************************************************************/
+#define CORTEX_A9_ACTLR_SMP_BIT		(1 << 6)
+#define CORTEX_A9_ACTLR_FLZW_BIT	(1 << 3)
+
+/*******************************************************************************
+ * CPU Power Control Register
+ ******************************************************************************/
+#define PCR		p15, 0, c15, c0, 0
+
+#ifndef __ASSEMBLY__
+#include <arch_helpers.h>
+DEFINE_COPROCR_RW_FUNCS(pcr, PCR)
+#endif
+
+#endif /* __CORTEX_A9_H__ */
diff --git a/lib/cpus/aarch32/cortex_a9.S b/lib/cpus/aarch32/cortex_a9.S
new file mode 100644
index 0000000..4f30f84
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a9.S
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a9.h>
+#include <cpu_macros.S>
+
+	.macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+		ldcopr	r0, SCTLR
+		tst	r0, #SCTLR_C_BIT
+		ASM_ASSERT(eq)
+#endif
+	.endm
+
+func cortex_a9_disable_smp
+	ldcopr	r0, ACTLR
+	bic	r0, #CORTEX_A9_ACTLR_SMP_BIT
+	stcopr	r0, ACTLR
+	isb
+	dsb	sy
+	bx	lr
+endfunc cortex_a9_disable_smp
+
+func cortex_a9_enable_smp
+	ldcopr	r0, ACTLR
+	orr	r0, #CORTEX_A9_ACTLR_SMP_BIT
+	stcopr	r0, ACTLR
+	isb
+	bx	lr
+endfunc cortex_a9_enable_smp
+
+func cortex_a9_reset_func
+	b	cortex_a9_enable_smp
+endfunc cortex_a9_reset_func
+
+func cortex_a9_core_pwr_dwn
+	push	{r12, lr}
+
+	assert_cache_enabled
+
+	/* Flush L1 cache */
+	mov	r0, #DC_OP_CISW
+	bl	dcsw_op_level1
+
+	/* Exit cluster coherency */
+	pop	{r12, lr}
+	b	cortex_a9_disable_smp
+endfunc cortex_a9_core_pwr_dwn
+
+func cortex_a9_cluster_pwr_dwn
+	push	{r12, lr}
+
+	assert_cache_enabled
+
+	/* Flush L1 caches */
+	mov	r0, #DC_OP_CISW
+	bl	dcsw_op_level1
+
+	bl	plat_disable_acp
+
+	/* Exit cluster coherency */
+	pop	{r12, lr}
+	b	cortex_a9_disable_smp
+endfunc cortex_a9_cluster_pwr_dwn
+
+declare_cpu_ops cortex_a9, CORTEX_A9_MIDR, \
+	cortex_a9_reset_func, \
+	cortex_a9_core_pwr_dwn, \
+	cortex_a9_cluster_pwr_dwn