Becky Bruce | d1cb6cb | 2008-11-03 15:44:01 -0600 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright 2004, 2007, 2008 Freescale Semiconductor. |
| 3 | * Srikanth Srinivasan <srikanth.srinivaan@freescale.com> |
| 4 | * |
| 5 | * See file CREDITS for list of people who contributed to this |
| 6 | * project. |
| 7 | * |
| 8 | * This program is free software; you can redistribute it and/or |
| 9 | * modify it under the terms of the GNU General Public License as |
| 10 | * published by the Free Software Foundation; either version 2 of |
| 11 | * the License, or (at your option) any later version. |
| 12 | * |
| 13 | * This program is distributed in the hope that it will be useful, |
| 14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 16 | * GNU General Public License for more details. |
| 17 | * |
| 18 | * You should have received a copy of the GNU General Public License |
| 19 | * along with this program; if not, write to the Free Software |
| 20 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, |
| 21 | * MA 02111-1307 USA |
| 22 | */ |
| 23 | #include <config.h> |
| 24 | #include <mpc86xx.h> |
| 25 | #include <version.h> |
| 26 | |
| 27 | #include <ppc_asm.tmpl> |
| 28 | #include <ppc_defs.h> |
| 29 | |
| 30 | #include <asm/cache.h> |
| 31 | #include <asm/mmu.h> |
| 32 | |
| 33 | /* If this is a multi-cpu system then we need to handle the |
| 34 | * 2nd cpu. The assumption is that the 2nd cpu is being |
| 35 | * held in boot holdoff mode until the 1st cpu unlocks it |
| 36 | * from Linux. We'll do some basic cpu init and then pass |
| 37 | * it to the Linux Reset Vector. |
| 38 | * Sri: Much of this initialization is not required. Linux |
| 39 | * rewrites the bats, and the sprs and also enables the L1 cache. |
| 40 | * |
| 41 | * Core 0 must copy this to a 1M aligned region and set BPTR |
| 42 | * to point to it. |
| 43 | */ |
| 44 | #if (CONFIG_NUM_CPUS > 1) |
| 45 | .align 12 |
| 46 | .globl __secondary_start_page |
| 47 | __secondary_start_page: |
| 48 | .space 0x100 /* space over to reset vector loc */ |
| 49 | mfspr r0, MSSCR0 |
| 50 | andi. r0, r0, 0x0020 |
| 51 | rlwinm r0,r0,27,31,31 |
| 52 | mtspr PIR, r0 |
| 53 | |
| 54 | /* Invalidate BATs */ |
| 55 | li r0, 0 |
| 56 | mtspr IBAT0U, r0 |
| 57 | mtspr IBAT1U, r0 |
| 58 | mtspr IBAT2U, r0 |
| 59 | mtspr IBAT3U, r0 |
| 60 | mtspr IBAT4U, r0 |
| 61 | mtspr IBAT5U, r0 |
| 62 | mtspr IBAT6U, r0 |
| 63 | mtspr IBAT7U, r0 |
| 64 | isync |
| 65 | mtspr DBAT0U, r0 |
| 66 | mtspr DBAT1U, r0 |
| 67 | mtspr DBAT2U, r0 |
| 68 | mtspr DBAT3U, r0 |
| 69 | mtspr DBAT4U, r0 |
| 70 | mtspr DBAT5U, r0 |
| 71 | mtspr DBAT6U, r0 |
| 72 | mtspr DBAT7U, r0 |
| 73 | isync |
| 74 | sync |
| 75 | |
| 76 | /* enable extended addressing */ |
| 77 | mfspr r0, HID0 |
| 78 | lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h |
| 79 | ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l |
| 80 | mtspr HID0, r0 |
| 81 | sync |
| 82 | isync |
| 83 | |
| 84 | #ifdef CONFIG_SYS_L2 |
| 85 | /* init the L2 cache */ |
| 86 | addis r3, r0, L2_INIT@h |
| 87 | ori r3, r3, L2_INIT@l |
| 88 | sync |
| 89 | mtspr l2cr, r3 |
| 90 | #ifdef CONFIG_ALTIVEC |
| 91 | dssall |
| 92 | #endif |
| 93 | /* invalidate the L2 cache */ |
| 94 | mfspr r3, l2cr |
| 95 | rlwinm. r3, r3, 0, 0, 0 |
| 96 | beq 1f |
| 97 | |
| 98 | mfspr r3, l2cr |
| 99 | rlwinm r3, r3, 0, 1, 31 |
| 100 | |
| 101 | #ifdef CONFIG_ALTIVEC |
| 102 | dssall |
| 103 | #endif |
| 104 | sync |
| 105 | mtspr l2cr, r3 |
| 106 | sync |
| 107 | 1: mfspr r3, l2cr |
| 108 | oris r3, r3, L2CR_L2I@h |
| 109 | mtspr l2cr, r3 |
| 110 | |
| 111 | invl2: |
| 112 | mfspr r3, l2cr |
| 113 | andis. r3, r3, L2CR_L2I@h |
| 114 | bne invl2 |
| 115 | sync |
| 116 | #endif |
| 117 | |
| 118 | /* enable and invalidate the data cache */ |
| 119 | mfspr r3, HID0 |
| 120 | li r5, HID0_DCFI|HID0_DLOCK |
| 121 | andc r3, r3, r5 |
| 122 | mtspr HID0, r3 /* no invalidate, unlock */ |
| 123 | ori r3, r3, HID0_DCE |
| 124 | ori r5, r3, HID0_DCFI |
| 125 | mtspr HID0, r5 /* enable + invalidate */ |
| 126 | mtspr HID0, r3 /* enable */ |
| 127 | sync |
| 128 | #ifdef CFG_L2 |
| 129 | sync |
| 130 | lis r3, L2_ENABLE@h |
| 131 | ori r3, r3, L2_ENABLE@l |
| 132 | mtspr l2cr, r3 |
| 133 | isync |
| 134 | sync |
| 135 | #endif |
| 136 | |
| 137 | /* enable and invalidate the instruction cache*/ |
| 138 | mfspr r3, HID0 |
| 139 | li r5, HID0_ICFI|HID0_ILOCK |
| 140 | andc r3, r3, r5 |
| 141 | ori r3, r3, HID0_ICE |
| 142 | ori r5, r3, HID0_ICFI |
| 143 | mtspr HID0, r5 |
| 144 | mtspr HID0, r3 |
| 145 | isync |
| 146 | sync |
| 147 | |
| 148 | /* TBEN in HID0 */ |
| 149 | mfspr r4, HID0 |
| 150 | oris r4, r4, 0x0400 |
| 151 | mtspr HID0, r4 |
| 152 | sync |
| 153 | isync |
| 154 | |
| 155 | /* MCP|SYNCBE|ABE in HID1 */ |
| 156 | mfspr r4, HID1 |
| 157 | oris r4, r4, 0x8000 |
| 158 | ori r4, r4, 0x0C00 |
| 159 | mtspr HID1, r4 |
| 160 | sync |
| 161 | isync |
| 162 | |
| 163 | lis r3, CONFIG_LINUX_RESET_VEC@h |
| 164 | ori r3, r3, CONFIG_LINUX_RESET_VEC@l |
| 165 | mtlr r3 |
| 166 | blr |
| 167 | |
| 168 | /* Never Returns, Running in Linux Now */ |
| 169 | #endif |