blob: 3977049dc38b1fd800e90285e0de17404b642301 [file] [log] [blame]
Becky Bruced1cb6cb2008-11-03 15:44:01 -06001/*
2 * Copyright 2004, 2007, 2008 Freescale Semiconductor.
3 * Srikanth Srinivasan <srikanth.srinivaan@freescale.com>
4 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02005 * SPDX-License-Identifier: GPL-2.0+
Becky Bruced1cb6cb2008-11-03 15:44:01 -06006 */
7#include <config.h>
8#include <mpc86xx.h>
Becky Bruced1cb6cb2008-11-03 15:44:01 -06009
10#include <ppc_asm.tmpl>
11#include <ppc_defs.h>
12
13#include <asm/cache.h>
14#include <asm/mmu.h>
15
16/* If this is a multi-cpu system then we need to handle the
17 * 2nd cpu. The assumption is that the 2nd cpu is being
18 * held in boot holdoff mode until the 1st cpu unlocks it
19 * from Linux. We'll do some basic cpu init and then pass
20 * it to the Linux Reset Vector.
21 * Sri: Much of this initialization is not required. Linux
22 * rewrites the bats, and the sprs and also enables the L1 cache.
23 *
24 * Core 0 must copy this to a 1M aligned region and set BPTR
25 * to point to it.
26 */
Becky Bruced1cb6cb2008-11-03 15:44:01 -060027 .align 12
28.globl __secondary_start_page
29__secondary_start_page:
30 .space 0x100 /* space over to reset vector loc */
31 mfspr r0, MSSCR0
32 andi. r0, r0, 0x0020
33 rlwinm r0,r0,27,31,31
34 mtspr PIR, r0
35
36 /* Invalidate BATs */
37 li r0, 0
38 mtspr IBAT0U, r0
39 mtspr IBAT1U, r0
40 mtspr IBAT2U, r0
41 mtspr IBAT3U, r0
42 mtspr IBAT4U, r0
43 mtspr IBAT5U, r0
44 mtspr IBAT6U, r0
45 mtspr IBAT7U, r0
46 isync
47 mtspr DBAT0U, r0
48 mtspr DBAT1U, r0
49 mtspr DBAT2U, r0
50 mtspr DBAT3U, r0
51 mtspr DBAT4U, r0
52 mtspr DBAT5U, r0
53 mtspr DBAT6U, r0
54 mtspr DBAT7U, r0
55 isync
56 sync
57
58 /* enable extended addressing */
59 mfspr r0, HID0
60 lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h
61 ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l
62 mtspr HID0, r0
63 sync
64 isync
65
66#ifdef CONFIG_SYS_L2
67 /* init the L2 cache */
68 addis r3, r0, L2_INIT@h
69 ori r3, r3, L2_INIT@l
70 sync
71 mtspr l2cr, r3
72#ifdef CONFIG_ALTIVEC
73 dssall
74#endif
75 /* invalidate the L2 cache */
76 mfspr r3, l2cr
77 rlwinm. r3, r3, 0, 0, 0
78 beq 1f
79
80 mfspr r3, l2cr
81 rlwinm r3, r3, 0, 1, 31
82
83#ifdef CONFIG_ALTIVEC
84 dssall
85#endif
86 sync
87 mtspr l2cr, r3
88 sync
891: mfspr r3, l2cr
90 oris r3, r3, L2CR_L2I@h
91 mtspr l2cr, r3
92
93invl2:
94 mfspr r3, l2cr
95 andis. r3, r3, L2CR_L2I@h
96 bne invl2
97 sync
98#endif
99
100 /* enable and invalidate the data cache */
101 mfspr r3, HID0
102 li r5, HID0_DCFI|HID0_DLOCK
103 andc r3, r3, r5
104 mtspr HID0, r3 /* no invalidate, unlock */
105 ori r3, r3, HID0_DCE
106 ori r5, r3, HID0_DCFI
107 mtspr HID0, r5 /* enable + invalidate */
108 mtspr HID0, r3 /* enable */
109 sync
Jean-Christophe PLAGNIOL-VILLARDf88438a2008-12-14 10:29:39 +0100110#ifdef CONFIG_SYS_L2
Becky Bruced1cb6cb2008-11-03 15:44:01 -0600111 sync
112 lis r3, L2_ENABLE@h
113 ori r3, r3, L2_ENABLE@l
114 mtspr l2cr, r3
115 isync
116 sync
117#endif
118
119 /* enable and invalidate the instruction cache*/
120 mfspr r3, HID0
121 li r5, HID0_ICFI|HID0_ILOCK
122 andc r3, r3, r5
123 ori r3, r3, HID0_ICE
124 ori r5, r3, HID0_ICFI
125 mtspr HID0, r5
126 mtspr HID0, r3
127 isync
128 sync
129
130 /* TBEN in HID0 */
131 mfspr r4, HID0
132 oris r4, r4, 0x0400
133 mtspr HID0, r4
134 sync
135 isync
136
137 /* MCP|SYNCBE|ABE in HID1 */
138 mfspr r4, HID1
139 oris r4, r4, 0x8000
140 ori r4, r4, 0x0C00
141 mtspr HID1, r4
142 sync
143 isync
144
145 lis r3, CONFIG_LINUX_RESET_VEC@h
146 ori r3, r3, CONFIG_LINUX_RESET_VEC@l
147 mtlr r3
148 blr
149
150 /* Never Returns, Running in Linux Now */