1/*
2 * Copyright 2004, 2007, 2008 Freescale Semiconductor.
3 * Srikanth Srinivasan <srikanth.srinivaan@freescale.com>
4 *
5 * See file CREDITS for list of people who contributed to this
6 * project.
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2 of
11 * the License, or (at your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
21 * MA 02111-1307 USA
22 */
23#include <config.h>
24#include <mpc86xx.h>
25#include <version.h>
26
27#include <ppc_asm.tmpl>
28#include <ppc_defs.h>
29
30#include <asm/cache.h>
31#include <asm/mmu.h>
32
33/* If this is a multi-cpu system then we need to handle the
34 * 2nd cpu.  The assumption is that the 2nd cpu is being
35 * held in boot holdoff mode until the 1st cpu unlocks it
36 * from Linux.	We'll do some basic cpu init and then pass
37 * it to the Linux Reset Vector.
38 * Sri:	 Much of this initialization is not required. Linux
39 * rewrites the bats, and the sprs and also enables the L1 cache.
40 *
41 * Core 0 must copy this to a 1M aligned region and set BPTR
42 * to point to it.
43 */
44	.align 12
45.globl __secondary_start_page
46__secondary_start_page:
47	.space 0x100	/* space over to reset vector loc */
48	mfspr	r0, MSSCR0
49	andi.	r0, r0, 0x0020
50	rlwinm	r0,r0,27,31,31
51	mtspr	PIR, r0
52
53	/* Invalidate BATs */
54	li	r0, 0
55	mtspr	IBAT0U, r0
56	mtspr	IBAT1U, r0
57	mtspr	IBAT2U, r0
58	mtspr	IBAT3U, r0
59	mtspr	IBAT4U, r0
60	mtspr	IBAT5U, r0
61	mtspr	IBAT6U, r0
62	mtspr	IBAT7U, r0
63	isync
64	mtspr	DBAT0U, r0
65	mtspr	DBAT1U, r0
66	mtspr	DBAT2U, r0
67	mtspr	DBAT3U, r0
68	mtspr	DBAT4U, r0
69	mtspr	DBAT5U, r0
70	mtspr	DBAT6U, r0
71	mtspr	DBAT7U, r0
72	isync
73	sync
74
75	/* enable extended addressing */
76	mfspr	r0, HID0
77	lis	r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h
78	ori	r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l
79	mtspr	HID0, r0
80	sync
81	isync
82
83#ifdef CONFIG_SYS_L2
84	/* init the L2 cache */
85	addis	r3, r0, L2_INIT@h
86	ori	r3, r3, L2_INIT@l
87	sync
88	mtspr	l2cr, r3
89#ifdef CONFIG_ALTIVEC
90	dssall
91#endif
92	/* invalidate the L2 cache */
93	mfspr	r3, l2cr
94	rlwinm.	r3, r3, 0, 0, 0
95	beq	1f
96
97	mfspr	r3, l2cr
98	rlwinm	r3, r3, 0, 1, 31
99
100#ifdef	CONFIG_ALTIVEC
101	dssall
102#endif
103	sync
104	mtspr	l2cr, r3
105	sync
1061:	mfspr	r3, l2cr
107	oris	r3, r3, L2CR_L2I@h
108	mtspr	l2cr, r3
109
110invl2:
111	mfspr	r3, l2cr
112	andis.	r3, r3, L2CR_L2I@h
113	bne	invl2
114	sync
115#endif
116
117	/* enable and invalidate the data cache */
118	mfspr	r3, HID0
119	li	r5, HID0_DCFI|HID0_DLOCK
120	andc	r3, r3, r5
121	mtspr	HID0, r3		/* no invalidate, unlock */
122	ori	r3, r3, HID0_DCE
123	ori	r5, r3, HID0_DCFI
124	mtspr	HID0, r5		/* enable + invalidate */
125	mtspr	HID0, r3		/* enable */
126	sync
127#ifdef CONFIG_SYS_L2
128	sync
129	lis	r3, L2_ENABLE@h
130	ori	r3, r3, L2_ENABLE@l
131	mtspr	l2cr, r3
132	isync
133	sync
134#endif
135
136	/* enable and invalidate the instruction cache*/
137	mfspr	r3, HID0
138	li	r5, HID0_ICFI|HID0_ILOCK
139	andc	r3, r3, r5
140	ori	r3, r3, HID0_ICE
141	ori	r5, r3, HID0_ICFI
142	mtspr	HID0, r5
143	mtspr	HID0, r3
144	isync
145	sync
146
147	/* TBEN in HID0 */
148	mfspr	r4, HID0
149	oris	r4, r4, 0x0400
150	mtspr	HID0, r4
151	sync
152	isync
153
154	/* MCP|SYNCBE|ABE in HID1 */
155	mfspr	r4, HID1
156	oris	r4, r4, 0x8000
157	ori	r4, r4, 0x0C00
158	mtspr	HID1, r4
159	sync
160	isync
161
162	lis	r3, CONFIG_LINUX_RESET_VEC@h
163	ori	r3, r3, CONFIG_LINUX_RESET_VEC@l
164	mtlr	r3
165	blr
166
167	/* Never Returns, Running in Linux Now */
168