1 /*
2  * arch/powerpc/cpu/ppc4xx/44x_spd_ddr2.c
3  * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
4  * DDR2 controller (non Denali Core). Those currently are:
5  *
6  * 405:		405EX(r)
7  * 440/460:	440SP/440SPe/460EX/460GT
8  *
9  * Copyright (c) 2008 Nuovation System Designs, LLC
10  *   Grant Erickson <gerickson@nuovations.com>
11 
12  * (C) Copyright 2007-2009
13  * Stefan Roese, DENX Software Engineering, sr@denx.de.
14  *
15  * COPYRIGHT   AMCC   CORPORATION 2004
16  *
17  * See file CREDITS for list of people who contributed to this
18  * project.
19  *
20  * This program is free software; you can redistribute it and/or
21  * modify it under the terms of the GNU General Public License as
22  * published by the Free Software Foundation; either version 2 of
23  * the License, or (at your option) any later version.
24  *
25  * This program is distributed in the hope that it will be useful,
26  * but WITHOUT ANY WARRANTY; without even the implied warranty of
27  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the
28  * GNU General Public License for more details.
29  *
30  * You should have received a copy of the GNU General Public License
31  * along with this program; if not, write to the Free Software
32  * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
33  * MA 02111-1307 USA
34  *
35  */
36 
37 /* define DEBUG for debugging output (obviously ;-)) */
38 #if 0
39 #define DEBUG
40 #endif
41 
42 #include <common.h>
43 #include <command.h>
44 #include <asm/ppc4xx.h>
45 #include <i2c.h>
46 #include <asm/io.h>
47 #include <asm/processor.h>
48 #include <asm/mmu.h>
49 #include <asm/cache.h>
50 
51 #include "ecc.h"
52 
53 #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic)				\
54 	do {								\
55 		u32 data;						\
56 		mfsdram(SDRAM_##mnemonic, data);			\
57 		printf("%20s[%02x] = 0x%08X\n",				\
58 		       "SDRAM_" #mnemonic, SDRAM_##mnemonic, data);	\
59 	} while (0)
60 
61 #define PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(mnemonic)			\
62 	do {								\
63 		u32 data;						\
64 		data = mfdcr(SDRAM_##mnemonic);				\
65 		printf("%20s[%02x] = 0x%08X\n",				\
66 		       "SDRAM_" #mnemonic, SDRAM_##mnemonic, data);	\
67 	} while (0)
68 
69 #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
update_rdcc(void)70 static void update_rdcc(void)
71 {
72 	u32 val;
73 
74 	/*
75 	 * Complete RDSS configuration as mentioned on page 7 of the AMCC
76 	 * PowerPC440SP/SPe DDR2 application note:
77 	 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
78 	 *
79 	 * Or item #10 "10. Complete RDSS configuration" in chapter
80 	 * "22.2.9 SDRAM Initialization" of AMCC PPC460EX/EXr/GT users
81 	 * manual.
82 	 */
83 	mfsdram(SDRAM_RTSR, val);
84 	if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
85 		mfsdram(SDRAM_RDCC, val);
86 		if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
87 			val += 0x40000000;
88 			mtsdram(SDRAM_RDCC, val);
89 		}
90 	}
91 }
92 #endif
93 
94 #if defined(CONFIG_440)
95 /*
96  * This DDR2 setup code can dynamically setup the TLB entries for the DDR2
97  * memory region. Right now the cache should still be disabled in U-Boot
98  * because of the EMAC driver, that need its buffer descriptor to be located
99  * in non cached memory.
100  *
101  * If at some time this restriction doesn't apply anymore, just define
102  * CONFIG_4xx_DCACHE in the board config file and this code should setup
103  * everything correctly.
104  */
105 #ifdef CONFIG_4xx_DCACHE
106 /* enable caching on SDRAM */
107 #define MY_TLB_WORD2_I_ENABLE		0
108 #else
109 /* disable caching on SDRAM */
110 #define MY_TLB_WORD2_I_ENABLE		TLB_WORD2_I_ENABLE
111 #endif /* CONFIG_4xx_DCACHE */
112 
113 void dcbz_area(u32 start_address, u32 num_bytes);
114 #endif /* CONFIG_440 */
115 
116 #define MAXRANKS	4
117 #define MAXBXCF		4
118 
119 #define MULDIV64(m1, m2, d)	(u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
120 
121 #if !defined(CONFIG_NAND_SPL)
122 /*-----------------------------------------------------------------------------+
123  * sdram_memsize
124  *-----------------------------------------------------------------------------*/
sdram_memsize(void)125 phys_size_t sdram_memsize(void)
126 {
127 	phys_size_t mem_size;
128 	unsigned long mcopt2;
129 	unsigned long mcstat;
130 	unsigned long mb0cf;
131 	unsigned long sdsz;
132 	unsigned long i;
133 
134 	mem_size = 0;
135 
136 	mfsdram(SDRAM_MCOPT2, mcopt2);
137 	mfsdram(SDRAM_MCSTAT, mcstat);
138 
139 	/* DDR controller must be enabled and not in self-refresh. */
140 	/* Otherwise memsize is zero. */
141 	if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
142 	    && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
143 	    && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
144 		== (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
145 		for (i = 0; i < MAXBXCF; i++) {
146 			mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
147 			/* Banks enabled */
148 			if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
149 #if defined(CONFIG_440)
150 				sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
151 #else
152 				sdsz = mb0cf & SDRAM_RXBAS_SDSZ_MASK;
153 #endif
154 				switch(sdsz) {
155 				case SDRAM_RXBAS_SDSZ_8:
156 					mem_size+=8;
157 					break;
158 				case SDRAM_RXBAS_SDSZ_16:
159 					mem_size+=16;
160 					break;
161 				case SDRAM_RXBAS_SDSZ_32:
162 					mem_size+=32;
163 					break;
164 				case SDRAM_RXBAS_SDSZ_64:
165 					mem_size+=64;
166 					break;
167 				case SDRAM_RXBAS_SDSZ_128:
168 					mem_size+=128;
169 					break;
170 				case SDRAM_RXBAS_SDSZ_256:
171 					mem_size+=256;
172 					break;
173 				case SDRAM_RXBAS_SDSZ_512:
174 					mem_size+=512;
175 					break;
176 				case SDRAM_RXBAS_SDSZ_1024:
177 					mem_size+=1024;
178 					break;
179 				case SDRAM_RXBAS_SDSZ_2048:
180 					mem_size+=2048;
181 					break;
182 				case SDRAM_RXBAS_SDSZ_4096:
183 					mem_size+=4096;
184 					break;
185 				default:
186 					printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
187 					       , sdsz);
188 					mem_size=0;
189 					break;
190 				}
191 			}
192 		}
193 	}
194 
195 	return mem_size << 20;
196 }
197 
198 /*-----------------------------------------------------------------------------+
199  * is_ecc_enabled
200  *-----------------------------------------------------------------------------*/
is_ecc_enabled(void)201 static unsigned long is_ecc_enabled(void)
202 {
203 	unsigned long val;
204 
205 	mfsdram(SDRAM_MCOPT1, val);
206 
207 	return SDRAM_MCOPT1_MCHK_CHK_DECODE(val);
208 }
209 
210 /*-----------------------------------------------------------------------------+
211  * board_add_ram_info
212  *-----------------------------------------------------------------------------*/
board_add_ram_info(int use_default)213 void board_add_ram_info(int use_default)
214 {
215 	PPC4xx_SYS_INFO board_cfg;
216 	u32 val;
217 
218 	if (is_ecc_enabled())
219 		puts(" (ECC");
220 	else
221 		puts(" (ECC not");
222 
223 	get_sys_info(&board_cfg);
224 
225 #if defined(CONFIG_405EX)
226 	val = board_cfg.freqPLB;
227 #else
228 	mfsdr(SDR0_DDR0, val);
229 	val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
230 #endif
231 	printf(" enabled, %d MHz", (val * 2) / 1000000);
232 
233 	mfsdram(SDRAM_MMODE, val);
234 	val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
235 	printf(", CL%d)", val);
236 }
237 #endif /* !CONFIG_NAND_SPL */
238 
239 #if defined(CONFIG_SPD_EEPROM)
240 
241 /*-----------------------------------------------------------------------------+
242  * Defines
243  *-----------------------------------------------------------------------------*/
244 #ifndef	TRUE
245 #define TRUE		1
246 #endif
247 #ifndef FALSE
248 #define FALSE		0
249 #endif
250 
251 #define SDRAM_DDR1	1
252 #define SDRAM_DDR2	2
253 #define SDRAM_NONE	0
254 
255 #define MAXDIMMS	2
256 #define MAX_SPD_BYTES	256   /* Max number of bytes on the DIMM's SPD EEPROM */
257 
258 #define ONE_BILLION	1000000000
259 
260 #define CMD_NOP		(7 << 19)
261 #define CMD_PRECHARGE	(2 << 19)
262 #define CMD_REFRESH	(1 << 19)
263 #define CMD_EMR		(0 << 19)
264 #define CMD_READ	(5 << 19)
265 #define CMD_WRITE	(4 << 19)
266 
267 #define SELECT_MR	(0 << 16)
268 #define SELECT_EMR	(1 << 16)
269 #define SELECT_EMR2	(2 << 16)
270 #define SELECT_EMR3	(3 << 16)
271 
272 /* MR */
273 #define DLL_RESET	0x00000100
274 
275 #define WRITE_RECOV_2	(1 << 9)
276 #define WRITE_RECOV_3	(2 << 9)
277 #define WRITE_RECOV_4	(3 << 9)
278 #define WRITE_RECOV_5	(4 << 9)
279 #define WRITE_RECOV_6	(5 << 9)
280 
281 #define BURST_LEN_4	0x00000002
282 
283 /* EMR */
284 #define ODT_0_OHM	0x00000000
285 #define ODT_50_OHM	0x00000044
286 #define ODT_75_OHM	0x00000004
287 #define ODT_150_OHM	0x00000040
288 
289 #define ODS_FULL	0x00000000
290 #define ODS_REDUCED	0x00000002
291 #define OCD_CALIB_DEF	0x00000380
292 
293 /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
294 #define ODT_EB0R	(0x80000000 >> 8)
295 #define ODT_EB0W	(0x80000000 >> 7)
296 #define CALC_ODT_R(n)	(ODT_EB0R << (n << 1))
297 #define CALC_ODT_W(n)	(ODT_EB0W << (n << 1))
298 #define CALC_ODT_RW(n)	(CALC_ODT_R(n) | CALC_ODT_W(n))
299 
300 /* Defines for the Read Cycle Delay test */
301 #define NUMMEMTESTS	8
302 #define NUMMEMWORDS	8
303 #define NUMLOOPS	64		/* memory test loops */
304 
305 /*
306  * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
307  * To support such configurations, we "only" map the first 2GB via the TLB's. We
308  * need some free virtual address space for the remaining peripherals like, SoC
309  * devices, FLASH etc.
310  *
311  * Note that ECC is currently not supported on configurations with more than 2GB
312  * SDRAM. This is because we only map the first 2GB on such systems, and therefore
313  * the ECC parity byte of the remaining area can't be written.
314  */
315 
316 /*
317  * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
318  */
__spd_ddr_init_hang(void)319 void __spd_ddr_init_hang (void)
320 {
321 	hang ();
322 }
323 void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
324 
325 /*
326  * To provide an interface for board specific config values in this common
327  * DDR setup code, we implement he "weak" default functions here. They return
328  * the default value back to the caller.
329  *
330  * Please see include/configs/yucca.h for an example fora board specific
331  * implementation.
332  */
__ddr_wrdtr(u32 default_val)333 u32 __ddr_wrdtr(u32 default_val)
334 {
335 	return default_val;
336 }
337 u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
338 
__ddr_clktr(u32 default_val)339 u32 __ddr_clktr(u32 default_val)
340 {
341 	return default_val;
342 }
343 u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
344 
345 
346 /* Private Structure Definitions */
347 
348 /* enum only to ease code for cas latency setting */
349 typedef enum ddr_cas_id {
350 	DDR_CAS_2      = 20,
351 	DDR_CAS_2_5    = 25,
352 	DDR_CAS_3      = 30,
353 	DDR_CAS_4      = 40,
354 	DDR_CAS_5      = 50
355 } ddr_cas_id_t;
356 
357 /*-----------------------------------------------------------------------------+
358  * Prototypes
359  *-----------------------------------------------------------------------------*/
360 static void get_spd_info(unsigned long *dimm_populated,
361 			 unsigned char *iic0_dimm_addr,
362 			 unsigned long num_dimm_banks);
363 static void check_mem_type(unsigned long *dimm_populated,
364 			   unsigned char *iic0_dimm_addr,
365 			   unsigned long num_dimm_banks);
366 static void check_frequency(unsigned long *dimm_populated,
367 			    unsigned char *iic0_dimm_addr,
368 			    unsigned long num_dimm_banks);
369 static void check_rank_number(unsigned long *dimm_populated,
370 			      unsigned char *iic0_dimm_addr,
371 			      unsigned long num_dimm_banks);
372 static void check_voltage_type(unsigned long *dimm_populated,
373 			       unsigned char *iic0_dimm_addr,
374 			       unsigned long num_dimm_banks);
375 static void program_memory_queue(unsigned long *dimm_populated,
376 				 unsigned char *iic0_dimm_addr,
377 				 unsigned long num_dimm_banks);
378 static void program_codt(unsigned long *dimm_populated,
379 			 unsigned char *iic0_dimm_addr,
380 			 unsigned long num_dimm_banks);
381 static void program_mode(unsigned long *dimm_populated,
382 			 unsigned char *iic0_dimm_addr,
383 			 unsigned long num_dimm_banks,
384 			 ddr_cas_id_t *selected_cas,
385 			 int *write_recovery);
386 static void program_tr(unsigned long *dimm_populated,
387 		       unsigned char *iic0_dimm_addr,
388 		       unsigned long num_dimm_banks);
389 static void program_rtr(unsigned long *dimm_populated,
390 			unsigned char *iic0_dimm_addr,
391 			unsigned long num_dimm_banks);
392 static void program_bxcf(unsigned long *dimm_populated,
393 			 unsigned char *iic0_dimm_addr,
394 			 unsigned long num_dimm_banks);
395 static void program_copt1(unsigned long *dimm_populated,
396 			  unsigned char *iic0_dimm_addr,
397 			  unsigned long num_dimm_banks);
398 static void program_initplr(unsigned long *dimm_populated,
399 			    unsigned char *iic0_dimm_addr,
400 			    unsigned long num_dimm_banks,
401 			    ddr_cas_id_t selected_cas,
402 			    int write_recovery);
403 #ifdef CONFIG_DDR_ECC
404 static void program_ecc(unsigned long *dimm_populated,
405 			unsigned char *iic0_dimm_addr,
406 			unsigned long num_dimm_banks,
407 			unsigned long tlb_word2_i_value);
408 #endif
409 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
410 static void program_DQS_calibration(unsigned long *dimm_populated,
411 				unsigned char *iic0_dimm_addr,
412 				unsigned long num_dimm_banks);
413 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
414 static void	test(void);
415 #else
416 static void	DQS_calibration_process(void);
417 #endif
418 #endif
419 
spd_read(uchar chip,uint addr)420 static unsigned char spd_read(uchar chip, uint addr)
421 {
422 	unsigned char data[2];
423 
424 	if (i2c_probe(chip) == 0)
425 		if (i2c_read(chip, addr, 1, data, 1) == 0)
426 			return data[0];
427 
428 	return 0;
429 }
430 
431 /*-----------------------------------------------------------------------------+
432  * initdram.  Initializes the 440SP Memory Queue and DDR SDRAM controller.
433  * Note: This routine runs from flash with a stack set up in the chip's
434  * sram space.  It is important that the routine does not require .sbss, .bss or
435  * .data sections.  It also cannot call routines that require these sections.
436  *-----------------------------------------------------------------------------*/
437 /*-----------------------------------------------------------------------------
438  * Function:	 initdram
439  * Description:  Configures SDRAM memory banks for DDR operation.
440  *		 Auto Memory Configuration option reads the DDR SDRAM EEPROMs
441  *		 via the IIC bus and then configures the DDR SDRAM memory
442  *		 banks appropriately. If Auto Memory Configuration is
443  *		 not used, it is assumed that no DIMM is plugged
444  *-----------------------------------------------------------------------------*/
initdram(int board_type)445 phys_size_t initdram(int board_type)
446 {
447 	unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
448 	unsigned char spd0[MAX_SPD_BYTES];
449 	unsigned char spd1[MAX_SPD_BYTES];
450 	unsigned char *dimm_spd[MAXDIMMS];
451 	unsigned long dimm_populated[MAXDIMMS] = {SDRAM_NONE, SDRAM_NONE};
452 	unsigned long num_dimm_banks;		/* on board dimm banks */
453 	unsigned long val;
454 	ddr_cas_id_t selected_cas = DDR_CAS_5;	/* preset to silence compiler */
455 	int write_recovery;
456 	phys_size_t dram_size = 0;
457 
458 	num_dimm_banks = sizeof(iic0_dimm_addr);
459 
460 	/*------------------------------------------------------------------
461 	 * Set up an array of SPD matrixes.
462 	 *-----------------------------------------------------------------*/
463 	dimm_spd[0] = spd0;
464 	dimm_spd[1] = spd1;
465 
466 	/*------------------------------------------------------------------
467 	 * Reset the DDR-SDRAM controller.
468 	 *-----------------------------------------------------------------*/
469 	mtsdr(SDR0_SRST, SDR0_SRST0_DMC);
470 	mtsdr(SDR0_SRST, 0x00000000);
471 
472 	/*
473 	 * Make sure I2C controller is initialized
474 	 * before continuing.
475 	 */
476 
477 	/* switch to correct I2C bus */
478 	I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
479 	i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
480 
481 	/*------------------------------------------------------------------
482 	 * Clear out the serial presence detect buffers.
483 	 * Perform IIC reads from the dimm.  Fill in the spds.
484 	 * Check to see if the dimm slots are populated
485 	 *-----------------------------------------------------------------*/
486 	get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
487 
488 	/*------------------------------------------------------------------
489 	 * Check the memory type for the dimms plugged.
490 	 *-----------------------------------------------------------------*/
491 	check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
492 
493 	/*------------------------------------------------------------------
494 	 * Check the frequency supported for the dimms plugged.
495 	 *-----------------------------------------------------------------*/
496 	check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
497 
498 	/*------------------------------------------------------------------
499 	 * Check the total rank number.
500 	 *-----------------------------------------------------------------*/
501 	check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
502 
503 	/*------------------------------------------------------------------
504 	 * Check the voltage type for the dimms plugged.
505 	 *-----------------------------------------------------------------*/
506 	check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
507 
508 	/*------------------------------------------------------------------
509 	 * Program SDRAM controller options 2 register
510 	 * Except Enabling of the memory controller.
511 	 *-----------------------------------------------------------------*/
512 	mfsdram(SDRAM_MCOPT2, val);
513 	mtsdram(SDRAM_MCOPT2,
514 		(val &
515 		 ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
516 		   SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
517 		   SDRAM_MCOPT2_ISIE_MASK))
518 		| (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
519 		   SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
520 		   SDRAM_MCOPT2_ISIE_ENABLE));
521 
522 	/*------------------------------------------------------------------
523 	 * Program SDRAM controller options 1 register
524 	 * Note: Does not enable the memory controller.
525 	 *-----------------------------------------------------------------*/
526 	program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
527 
528 	/*------------------------------------------------------------------
529 	 * Set the SDRAM Controller On Die Termination Register
530 	 *-----------------------------------------------------------------*/
531 	program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
532 
533 	/*------------------------------------------------------------------
534 	 * Program SDRAM refresh register.
535 	 *-----------------------------------------------------------------*/
536 	program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
537 
538 	/*------------------------------------------------------------------
539 	 * Program SDRAM mode register.
540 	 *-----------------------------------------------------------------*/
541 	program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
542 		     &selected_cas, &write_recovery);
543 
544 	/*------------------------------------------------------------------
545 	 * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
546 	 *-----------------------------------------------------------------*/
547 	mfsdram(SDRAM_WRDTR, val);
548 	mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
549 		ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
550 
551 	/*------------------------------------------------------------------
552 	 * Set the SDRAM Clock Timing Register
553 	 *-----------------------------------------------------------------*/
554 	mfsdram(SDRAM_CLKTR, val);
555 	mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
556 		ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
557 
558 	/*------------------------------------------------------------------
559 	 * Program the BxCF registers.
560 	 *-----------------------------------------------------------------*/
561 	program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
562 
563 	/*------------------------------------------------------------------
564 	 * Program SDRAM timing registers.
565 	 *-----------------------------------------------------------------*/
566 	program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
567 
568 	/*------------------------------------------------------------------
569 	 * Set the Extended Mode register
570 	 *-----------------------------------------------------------------*/
571 	mfsdram(SDRAM_MEMODE, val);
572 	mtsdram(SDRAM_MEMODE,
573 		(val & ~(SDRAM_MEMODE_DIC_MASK  | SDRAM_MEMODE_DLL_MASK |
574 			 SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
575 		(SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
576 		 | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
577 
578 	/*------------------------------------------------------------------
579 	 * Program Initialization preload registers.
580 	 *-----------------------------------------------------------------*/
581 	program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
582 			selected_cas, write_recovery);
583 
584 	/*------------------------------------------------------------------
585 	 * Delay to ensure 200usec have elapsed since reset.
586 	 *-----------------------------------------------------------------*/
587 	udelay(400);
588 
589 	/*------------------------------------------------------------------
590 	 * Set the memory queue core base addr.
591 	 *-----------------------------------------------------------------*/
592 	program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
593 
594 	/*------------------------------------------------------------------
595 	 * Program SDRAM controller options 2 register
596 	 * Enable the memory controller.
597 	 *-----------------------------------------------------------------*/
598 	mfsdram(SDRAM_MCOPT2, val);
599 	mtsdram(SDRAM_MCOPT2,
600 		(val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
601 			 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
602 			 SDRAM_MCOPT2_IPTR_EXECUTE);
603 
604 	/*------------------------------------------------------------------
605 	 * Wait for IPTR_EXECUTE init sequence to complete.
606 	 *-----------------------------------------------------------------*/
607 	do {
608 		mfsdram(SDRAM_MCSTAT, val);
609 	} while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
610 
611 	/* enable the controller only after init sequence completes */
612 	mfsdram(SDRAM_MCOPT2, val);
613 	mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
614 
615 	/* Make sure delay-line calibration is done before proceeding */
616 	do {
617 		mfsdram(SDRAM_DLCR, val);
618 	} while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
619 
620 	/* get installed memory size */
621 	dram_size = sdram_memsize();
622 
623 	/*
624 	 * Limit size to 2GB
625 	 */
626 	if (dram_size > CONFIG_MAX_MEM_MAPPED)
627 		dram_size = CONFIG_MAX_MEM_MAPPED;
628 
629 	/* and program tlb entries for this size (dynamic) */
630 
631 	/*
632 	 * Program TLB entries with caches enabled, for best performace
633 	 * while auto-calibrating and ECC generation
634 	 */
635 	program_tlb(0, 0, dram_size, 0);
636 
637 	/*------------------------------------------------------------------
638 	 * DQS calibration.
639 	 *-----------------------------------------------------------------*/
640 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
641 	DQS_autocalibration();
642 #else
643 	program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
644 #endif
645 	/*
646 	 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
647 	 * PowerPC440SP/SPe DDR2 application note:
648 	 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
649 	 */
650 	update_rdcc();
651 
652 #ifdef CONFIG_DDR_ECC
653 	/*------------------------------------------------------------------
654 	 * If ecc is enabled, initialize the parity bits.
655 	 *-----------------------------------------------------------------*/
656 	program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
657 #endif
658 
659 	/*
660 	 * Now after initialization (auto-calibration and ECC generation)
661 	 * remove the TLB entries with caches enabled and program again with
662 	 * desired cache functionality
663 	 */
664 	remove_tlb(0, dram_size);
665 	program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
666 
667 	ppc4xx_ibm_ddr2_register_dump();
668 
669 	/*
670 	 * Clear potential errors resulting from auto-calibration.
671 	 * If not done, then we could get an interrupt later on when
672 	 * exceptions are enabled.
673 	 */
674 	set_mcsr(get_mcsr());
675 
676 	return sdram_memsize();
677 }
678 
get_spd_info(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)679 static void get_spd_info(unsigned long *dimm_populated,
680 			 unsigned char *iic0_dimm_addr,
681 			 unsigned long num_dimm_banks)
682 {
683 	unsigned long dimm_num;
684 	unsigned long dimm_found;
685 	unsigned char num_of_bytes;
686 	unsigned char total_size;
687 
688 	dimm_found = FALSE;
689 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
690 		num_of_bytes = 0;
691 		total_size = 0;
692 
693 		num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
694 		debug("\nspd_read(0x%x) returned %d\n",
695 		      iic0_dimm_addr[dimm_num], num_of_bytes);
696 		total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
697 		debug("spd_read(0x%x) returned %d\n",
698 		      iic0_dimm_addr[dimm_num], total_size);
699 
700 		if ((num_of_bytes != 0) && (total_size != 0)) {
701 			dimm_populated[dimm_num] = TRUE;
702 			dimm_found = TRUE;
703 			debug("DIMM slot %lu: populated\n", dimm_num);
704 		} else {
705 			dimm_populated[dimm_num] = FALSE;
706 			debug("DIMM slot %lu: Not populated\n", dimm_num);
707 		}
708 	}
709 
710 	if (dimm_found == FALSE) {
711 		printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
712 		spd_ddr_init_hang ();
713 	}
714 }
715 
716 
717 /*------------------------------------------------------------------
718  * For the memory DIMMs installed, this routine verifies that they
719  * really are DDR specific DIMMs.
720  *-----------------------------------------------------------------*/
check_mem_type(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)721 static void check_mem_type(unsigned long *dimm_populated,
722 			   unsigned char *iic0_dimm_addr,
723 			   unsigned long num_dimm_banks)
724 {
725 	unsigned long dimm_num;
726 	unsigned long dimm_type;
727 
728 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
729 		if (dimm_populated[dimm_num] == TRUE) {
730 			dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
731 			switch (dimm_type) {
732 			case 1:
733 				printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
734 				       "slot %d.\n", (unsigned int)dimm_num);
735 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
736 				printf("Replace the DIMM module with a supported DIMM.\n\n");
737 				spd_ddr_init_hang ();
738 				break;
739 			case 2:
740 				printf("ERROR: EDO DIMM detected in slot %d.\n",
741 				       (unsigned int)dimm_num);
742 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
743 				printf("Replace the DIMM module with a supported DIMM.\n\n");
744 				spd_ddr_init_hang ();
745 				break;
746 			case 3:
747 				printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
748 				       (unsigned int)dimm_num);
749 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
750 				printf("Replace the DIMM module with a supported DIMM.\n\n");
751 				spd_ddr_init_hang ();
752 				break;
753 			case 4:
754 				printf("ERROR: SDRAM DIMM detected in slot %d.\n",
755 				       (unsigned int)dimm_num);
756 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
757 				printf("Replace the DIMM module with a supported DIMM.\n\n");
758 				spd_ddr_init_hang ();
759 				break;
760 			case 5:
761 				printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
762 				       (unsigned int)dimm_num);
763 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
764 				printf("Replace the DIMM module with a supported DIMM.\n\n");
765 				spd_ddr_init_hang ();
766 				break;
767 			case 6:
768 				printf("ERROR: SGRAM DIMM detected in slot %d.\n",
769 				       (unsigned int)dimm_num);
770 				printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
771 				printf("Replace the DIMM module with a supported DIMM.\n\n");
772 				spd_ddr_init_hang ();
773 				break;
774 			case 7:
775 				debug("DIMM slot %lu: DDR1 SDRAM detected\n", dimm_num);
776 				dimm_populated[dimm_num] = SDRAM_DDR1;
777 				break;
778 			case 8:
779 				debug("DIMM slot %lu: DDR2 SDRAM detected\n", dimm_num);
780 				dimm_populated[dimm_num] = SDRAM_DDR2;
781 				break;
782 			default:
783 				printf("ERROR: Unknown DIMM detected in slot %d.\n",
784 				       (unsigned int)dimm_num);
785 				printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
786 				printf("Replace the DIMM module with a supported DIMM.\n\n");
787 				spd_ddr_init_hang ();
788 				break;
789 			}
790 		}
791 	}
792 	for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
793 		if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
794 		    && (dimm_populated[dimm_num]   != SDRAM_NONE)
795 		    && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
796 			printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
797 			spd_ddr_init_hang ();
798 		}
799 	}
800 }
801 
802 /*------------------------------------------------------------------
803  * For the memory DIMMs installed, this routine verifies that
804  * frequency previously calculated is supported.
805  *-----------------------------------------------------------------*/
check_frequency(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)806 static void check_frequency(unsigned long *dimm_populated,
807 			    unsigned char *iic0_dimm_addr,
808 			    unsigned long num_dimm_banks)
809 {
810 	unsigned long dimm_num;
811 	unsigned long tcyc_reg;
812 	unsigned long cycle_time;
813 	unsigned long calc_cycle_time;
814 	unsigned long sdram_freq;
815 	unsigned long sdr_ddrpll;
816 	PPC4xx_SYS_INFO board_cfg;
817 
818 	/*------------------------------------------------------------------
819 	 * Get the board configuration info.
820 	 *-----------------------------------------------------------------*/
821 	get_sys_info(&board_cfg);
822 
823 	mfsdr(SDR0_DDR0, sdr_ddrpll);
824 	sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
825 
826 	/*
827 	 * calc_cycle_time is calculated from DDR frequency set by board/chip
828 	 * and is expressed in multiple of 10 picoseconds
829 	 * to match the way DIMM cycle time is calculated below.
830 	 */
831 	calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
832 
833 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
834 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
835 			tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
836 			/*
837 			 * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
838 			 * the higher order nibble (bits 4-7) designates the cycle time
839 			 * to a granularity of 1ns;
840 			 * the value presented by the lower order nibble (bits 0-3)
841 			 * has a granularity of .1ns and is added to the value designated
842 			 * by the higher nibble. In addition, four lines of the lower order
843 			 * nibble are assigned to support +.25,+.33, +.66 and +.75.
844 			 */
845 			 /* Convert from hex to decimal */
846 			if ((tcyc_reg & 0x0F) == 0x0D)
847 				cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
848 			else if ((tcyc_reg & 0x0F) == 0x0C)
849 				cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
850 			else if ((tcyc_reg & 0x0F) == 0x0B)
851 				cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
852 			else if ((tcyc_reg & 0x0F) == 0x0A)
853 				cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
854 			else
855 				cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
856 					((tcyc_reg & 0x0F)*10);
857 			debug("cycle_time=%lu [10 picoseconds]\n", cycle_time);
858 
859 			if  (cycle_time > (calc_cycle_time + 10)) {
860 				/*
861 				 * the provided sdram cycle_time is too small
862 				 * for the available DIMM cycle_time.
863 				 * The additionnal 100ps is here to accept a small incertainty.
864 				 */
865 				printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
866 				       "slot %d \n while calculated cycle time is %d ps.\n",
867 				       (unsigned int)(cycle_time*10),
868 				       (unsigned int)dimm_num,
869 				       (unsigned int)(calc_cycle_time*10));
870 				printf("Replace the DIMM, or change DDR frequency via "
871 				       "strapping bits.\n\n");
872 				spd_ddr_init_hang ();
873 			}
874 		}
875 	}
876 }
877 
878 /*------------------------------------------------------------------
879  * For the memory DIMMs installed, this routine verifies two
880  * ranks/banks maximum are availables.
881  *-----------------------------------------------------------------*/
check_rank_number(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)882 static void check_rank_number(unsigned long *dimm_populated,
883 			      unsigned char *iic0_dimm_addr,
884 			      unsigned long num_dimm_banks)
885 {
886 	unsigned long dimm_num;
887 	unsigned long dimm_rank;
888 	unsigned long total_rank = 0;
889 
890 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
891 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
892 			dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
893 			if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
894 				dimm_rank = (dimm_rank & 0x0F) +1;
895 			else
896 				dimm_rank = dimm_rank & 0x0F;
897 
898 
899 			if (dimm_rank > MAXRANKS) {
900 				printf("ERROR: DRAM DIMM detected with %lu ranks in "
901 				       "slot %lu is not supported.\n", dimm_rank, dimm_num);
902 				printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
903 				printf("Replace the DIMM module with a supported DIMM.\n\n");
904 				spd_ddr_init_hang ();
905 			} else
906 				total_rank += dimm_rank;
907 		}
908 		if (total_rank > MAXRANKS) {
909 			printf("ERROR: DRAM DIMM detected with a total of %d ranks "
910 			       "for all slots.\n", (unsigned int)total_rank);
911 			printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
912 			printf("Remove one of the DIMM modules.\n\n");
913 			spd_ddr_init_hang ();
914 		}
915 	}
916 }
917 
918 /*------------------------------------------------------------------
919  * only support 2.5V modules.
920  * This routine verifies this.
921  *-----------------------------------------------------------------*/
check_voltage_type(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)922 static void check_voltage_type(unsigned long *dimm_populated,
923 			       unsigned char *iic0_dimm_addr,
924 			       unsigned long num_dimm_banks)
925 {
926 	unsigned long dimm_num;
927 	unsigned long voltage_type;
928 
929 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
930 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
931 			voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
932 			switch (voltage_type) {
933 			case 0x00:
934 				printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
935 				printf("This DIMM is 5.0 Volt/TTL.\n");
936 				printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
937 				       (unsigned int)dimm_num);
938 				spd_ddr_init_hang ();
939 				break;
940 			case 0x01:
941 				printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
942 				printf("This DIMM is LVTTL.\n");
943 				printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
944 				       (unsigned int)dimm_num);
945 				spd_ddr_init_hang ();
946 				break;
947 			case 0x02:
948 				printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
949 				printf("This DIMM is 1.5 Volt.\n");
950 				printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
951 				       (unsigned int)dimm_num);
952 				spd_ddr_init_hang ();
953 				break;
954 			case 0x03:
955 				printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
956 				printf("This DIMM is 3.3 Volt/TTL.\n");
957 				printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
958 				       (unsigned int)dimm_num);
959 				spd_ddr_init_hang ();
960 				break;
961 			case 0x04:
962 				/* 2.5 Voltage only for DDR1 */
963 				break;
964 			case 0x05:
965 				/* 1.8 Voltage only for DDR2 */
966 				break;
967 			default:
968 				printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
969 				printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
970 				       (unsigned int)dimm_num);
971 				spd_ddr_init_hang ();
972 				break;
973 			}
974 		}
975 	}
976 }
977 
978 /*-----------------------------------------------------------------------------+
979  * program_copt1.
980  *-----------------------------------------------------------------------------*/
program_copt1(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)981 static void program_copt1(unsigned long *dimm_populated,
982 			  unsigned char *iic0_dimm_addr,
983 			  unsigned long num_dimm_banks)
984 {
985 	unsigned long dimm_num;
986 	unsigned long mcopt1;
987 	unsigned long ecc_enabled;
988 	unsigned long ecc = 0;
989 	unsigned long data_width = 0;
990 	unsigned long dimm_32bit;
991 	unsigned long dimm_64bit;
992 	unsigned long registered = 0;
993 	unsigned long attribute = 0;
994 	unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
995 	unsigned long bankcount;
996 	unsigned long ddrtype;
997 	unsigned long val;
998 
999 #ifdef CONFIG_DDR_ECC
1000 	ecc_enabled = TRUE;
1001 #else
1002 	ecc_enabled = FALSE;
1003 #endif
1004 	dimm_32bit = FALSE;
1005 	dimm_64bit = FALSE;
1006 	buf0 = FALSE;
1007 	buf1 = FALSE;
1008 
1009 	/*------------------------------------------------------------------
1010 	 * Set memory controller options reg 1, SDRAM_MCOPT1.
1011 	 *-----------------------------------------------------------------*/
1012 	mfsdram(SDRAM_MCOPT1, val);
1013 	mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
1014 			 SDRAM_MCOPT1_PMU_MASK  | SDRAM_MCOPT1_DMWD_MASK |
1015 			 SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
1016 			 SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
1017 			 SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
1018 			 SDRAM_MCOPT1_DREF_MASK);
1019 
1020 	mcopt1 |= SDRAM_MCOPT1_QDEP;
1021 	mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
1022 	mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
1023 	mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
1024 	mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
1025 	mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
1026 
1027 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1028 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
1029 			/* test ecc support */
1030 			ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
1031 			if (ecc != 0x02) /* ecc not supported */
1032 				ecc_enabled = FALSE;
1033 
1034 			/* test bank count */
1035 			bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
1036 			if (bankcount == 0x04) /* bank count = 4 */
1037 				mcopt1 |= SDRAM_MCOPT1_4_BANKS;
1038 			else /* bank count = 8 */
1039 				mcopt1 |= SDRAM_MCOPT1_8_BANKS;
1040 
1041 			/* test DDR type */
1042 			ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
1043 			/* test for buffered/unbuffered, registered, differential clocks */
1044 			registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
1045 			attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
1046 
1047 			/* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
1048 			if (dimm_num == 0) {
1049 				if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1050 					mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1051 				if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1052 					mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1053 				if (registered == 1) { /* DDR2 always buffered */
1054 					/* TODO: what about above  comments ? */
1055 					mcopt1 |= SDRAM_MCOPT1_RDEN;
1056 					buf0 = TRUE;
1057 				} else {
1058 					/* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
1059 					if ((attribute & 0x02) == 0x00) {
1060 						/* buffered not supported */
1061 						buf0 = FALSE;
1062 					} else {
1063 						mcopt1 |= SDRAM_MCOPT1_RDEN;
1064 						buf0 = TRUE;
1065 					}
1066 				}
1067 			}
1068 			else if (dimm_num == 1) {
1069 				if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1070 					mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1071 				if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1072 					mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1073 				if (registered == 1) {
1074 					/* DDR2 always buffered */
1075 					mcopt1 |= SDRAM_MCOPT1_RDEN;
1076 					buf1 = TRUE;
1077 				} else {
1078 					if ((attribute & 0x02) == 0x00) {
1079 						/* buffered not supported */
1080 						buf1 = FALSE;
1081 					} else {
1082 						mcopt1 |= SDRAM_MCOPT1_RDEN;
1083 						buf1 = TRUE;
1084 					}
1085 				}
1086 			}
1087 
1088 			/* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
1089 			data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
1090 				(((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
1091 
1092 			switch (data_width) {
1093 			case 72:
1094 			case 64:
1095 				dimm_64bit = TRUE;
1096 				break;
1097 			case 40:
1098 			case 32:
1099 				dimm_32bit = TRUE;
1100 				break;
1101 			default:
1102 				printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
1103 				       data_width);
1104 				printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
1105 				break;
1106 			}
1107 		}
1108 	}
1109 
1110 	/* verify matching properties */
1111 	if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
1112 		if (buf0 != buf1) {
1113 			printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
1114 			spd_ddr_init_hang ();
1115 		}
1116 	}
1117 
1118 	if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
1119 		printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
1120 		spd_ddr_init_hang ();
1121 	}
1122 	else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
1123 		mcopt1 |= SDRAM_MCOPT1_DMWD_64;
1124 	} else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
1125 		mcopt1 |= SDRAM_MCOPT1_DMWD_32;
1126 	} else {
1127 		printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
1128 		spd_ddr_init_hang ();
1129 	}
1130 
1131 	if (ecc_enabled == TRUE)
1132 		mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
1133 	else
1134 		mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
1135 
1136 	mtsdram(SDRAM_MCOPT1, mcopt1);
1137 }
1138 
1139 /*-----------------------------------------------------------------------------+
1140  * program_codt.
1141  *-----------------------------------------------------------------------------*/
program_codt(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)1142 static void program_codt(unsigned long *dimm_populated,
1143 			 unsigned char *iic0_dimm_addr,
1144 			 unsigned long num_dimm_banks)
1145 {
1146 	unsigned long codt;
1147 	unsigned long modt0 = 0;
1148 	unsigned long modt1 = 0;
1149 	unsigned long modt2 = 0;
1150 	unsigned long modt3 = 0;
1151 	unsigned char dimm_num;
1152 	unsigned char dimm_rank;
1153 	unsigned char total_rank = 0;
1154 	unsigned char total_dimm = 0;
1155 	unsigned char dimm_type = 0;
1156 	unsigned char firstSlot = 0;
1157 
1158 	/*------------------------------------------------------------------
1159 	 * Set the SDRAM Controller On Die Termination Register
1160 	 *-----------------------------------------------------------------*/
1161 	mfsdram(SDRAM_CODT, codt);
1162 	codt &= ~(SDRAM_CODT_DQS_SINGLE_END | SDRAM_CODT_CKSE_SINGLE_END);
1163 	codt |= SDRAM_CODT_IO_NMODE;
1164 
1165 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1166 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
1167 			dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
1168 			if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
1169 				dimm_rank = (dimm_rank & 0x0F) + 1;
1170 				dimm_type = SDRAM_DDR2;
1171 			} else {
1172 				dimm_rank = dimm_rank & 0x0F;
1173 				dimm_type = SDRAM_DDR1;
1174 			}
1175 
1176 			total_rank += dimm_rank;
1177 			total_dimm++;
1178 			if ((dimm_num == 0) && (total_dimm == 1))
1179 				firstSlot = TRUE;
1180 			else
1181 				firstSlot = FALSE;
1182 		}
1183 	}
1184 	if (dimm_type == SDRAM_DDR2) {
1185 		codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
1186 		if ((total_dimm == 1) && (firstSlot == TRUE)) {
1187 			if (total_rank == 1) {	/* PUUU */
1188 				codt |= CALC_ODT_R(0);
1189 				modt0 = CALC_ODT_W(0);
1190 				modt1 = 0x00000000;
1191 				modt2 = 0x00000000;
1192 				modt3 = 0x00000000;
1193 			}
1194 			if (total_rank == 2) {	/* PPUU */
1195 				codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
1196 				modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
1197 				modt1 = 0x00000000;
1198 				modt2 = 0x00000000;
1199 				modt3 = 0x00000000;
1200 			}
1201 		} else if ((total_dimm == 1) && (firstSlot != TRUE)) {
1202 			if (total_rank == 1) {	/* UUPU */
1203 				codt |= CALC_ODT_R(2);
1204 				modt0 = 0x00000000;
1205 				modt1 = 0x00000000;
1206 				modt2 = CALC_ODT_W(2);
1207 				modt3 = 0x00000000;
1208 			}
1209 			if (total_rank == 2) {	/* UUPP */
1210 				codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
1211 				modt0 = 0x00000000;
1212 				modt1 = 0x00000000;
1213 				modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
1214 				modt3 = 0x00000000;
1215 			}
1216 		}
1217 		if (total_dimm == 2) {
1218 			if (total_rank == 2) {	/* PUPU */
1219 				codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
1220 				modt0 = CALC_ODT_RW(2);
1221 				modt1 = 0x00000000;
1222 				modt2 = CALC_ODT_RW(0);
1223 				modt3 = 0x00000000;
1224 			}
1225 			if (total_rank == 4) {	/* PPPP */
1226 				codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
1227 					CALC_ODT_R(2) | CALC_ODT_R(3);
1228 				modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
1229 				modt1 = 0x00000000;
1230 				modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
1231 				modt3 = 0x00000000;
1232 			}
1233 		}
1234 	} else {
1235 		codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
1236 		modt0 = 0x00000000;
1237 		modt1 = 0x00000000;
1238 		modt2 = 0x00000000;
1239 		modt3 = 0x00000000;
1240 
1241 		if (total_dimm == 1) {
1242 			if (total_rank == 1)
1243 				codt |= 0x00800000;
1244 			if (total_rank == 2)
1245 				codt |= 0x02800000;
1246 		}
1247 		if (total_dimm == 2) {
1248 			if (total_rank == 2)
1249 				codt |= 0x08800000;
1250 			if (total_rank == 4)
1251 				codt |= 0x2a800000;
1252 		}
1253 	}
1254 
1255 	debug("nb of dimm %d\n", total_dimm);
1256 	debug("nb of rank %d\n", total_rank);
1257 	if (total_dimm == 1)
1258 		debug("dimm in slot %d\n", firstSlot);
1259 
1260 	mtsdram(SDRAM_CODT, codt);
1261 	mtsdram(SDRAM_MODT0, modt0);
1262 	mtsdram(SDRAM_MODT1, modt1);
1263 	mtsdram(SDRAM_MODT2, modt2);
1264 	mtsdram(SDRAM_MODT3, modt3);
1265 }
1266 
1267 /*-----------------------------------------------------------------------------+
1268  * program_initplr.
1269  *-----------------------------------------------------------------------------*/
program_initplr(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks,ddr_cas_id_t selected_cas,int write_recovery)1270 static void program_initplr(unsigned long *dimm_populated,
1271 			    unsigned char *iic0_dimm_addr,
1272 			    unsigned long num_dimm_banks,
1273 			    ddr_cas_id_t selected_cas,
1274 			    int write_recovery)
1275 {
1276 	u32 cas = 0;
1277 	u32 odt = 0;
1278 	u32 ods = 0;
1279 	u32 mr;
1280 	u32 wr;
1281 	u32 emr;
1282 	u32 emr2;
1283 	u32 emr3;
1284 	int dimm_num;
1285 	int total_dimm = 0;
1286 
1287 	/******************************************************
1288 	 ** Assumption: if more than one DIMM, all DIMMs are the same
1289 	 **		as already checked in check_memory_type
1290 	 ******************************************************/
1291 
1292 	if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
1293 		mtsdram(SDRAM_INITPLR0, 0x81B80000);
1294 		mtsdram(SDRAM_INITPLR1, 0x81900400);
1295 		mtsdram(SDRAM_INITPLR2, 0x81810000);
1296 		mtsdram(SDRAM_INITPLR3, 0xff800162);
1297 		mtsdram(SDRAM_INITPLR4, 0x81900400);
1298 		mtsdram(SDRAM_INITPLR5, 0x86080000);
1299 		mtsdram(SDRAM_INITPLR6, 0x86080000);
1300 		mtsdram(SDRAM_INITPLR7, 0x81000062);
1301 	} else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
1302 		switch (selected_cas) {
1303 		case DDR_CAS_3:
1304 			cas = 3 << 4;
1305 			break;
1306 		case DDR_CAS_4:
1307 			cas = 4 << 4;
1308 			break;
1309 		case DDR_CAS_5:
1310 			cas = 5 << 4;
1311 			break;
1312 		default:
1313 			printf("ERROR: ucode error on selected_cas value %d", selected_cas);
1314 			spd_ddr_init_hang ();
1315 			break;
1316 		}
1317 
1318 #if 0
1319 		/*
1320 		 * ToDo - Still a problem with the write recovery:
1321 		 * On the Corsair CM2X512-5400C4 module, setting write recovery
1322 		 * in the INITPLR reg to the value calculated in program_mode()
1323 		 * results in not correctly working DDR2 memory (crash after
1324 		 * relocation).
1325 		 *
1326 		 * So for now, set the write recovery to 3. This seems to work
1327 		 * on the Corair module too.
1328 		 *
1329 		 * 2007-03-01, sr
1330 		 */
1331 		switch (write_recovery) {
1332 		case 3:
1333 			wr = WRITE_RECOV_3;
1334 			break;
1335 		case 4:
1336 			wr = WRITE_RECOV_4;
1337 			break;
1338 		case 5:
1339 			wr = WRITE_RECOV_5;
1340 			break;
1341 		case 6:
1342 			wr = WRITE_RECOV_6;
1343 			break;
1344 		default:
1345 			printf("ERROR: write recovery not support (%d)", write_recovery);
1346 			spd_ddr_init_hang ();
1347 			break;
1348 		}
1349 #else
1350 		wr = WRITE_RECOV_3; /* test-only, see description above */
1351 #endif
1352 
1353 		for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
1354 			if (dimm_populated[dimm_num] != SDRAM_NONE)
1355 				total_dimm++;
1356 		if (total_dimm == 1) {
1357 			odt = ODT_150_OHM;
1358 			ods = ODS_FULL;
1359 		} else if (total_dimm == 2) {
1360 			odt = ODT_75_OHM;
1361 			ods = ODS_REDUCED;
1362 		} else {
1363 			printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
1364 			spd_ddr_init_hang ();
1365 		}
1366 
1367 		mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
1368 		emr = CMD_EMR | SELECT_EMR | odt | ods;
1369 		emr2 = CMD_EMR | SELECT_EMR2;
1370 		emr3 = CMD_EMR | SELECT_EMR3;
1371 		/* NOP - Wait 106 MemClk cycles */
1372 		mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
1373 					SDRAM_INITPLR_IMWT_ENCODE(106));
1374 		udelay(1000);
1375 		/* precharge 4 MemClk cycles */
1376 		mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1377 					SDRAM_INITPLR_IMWT_ENCODE(4));
1378 		/* EMR2 - Wait tMRD (2 MemClk cycles) */
1379 		mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
1380 					SDRAM_INITPLR_IMWT_ENCODE(2));
1381 		/* EMR3 - Wait tMRD (2 MemClk cycles) */
1382 		mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
1383 					SDRAM_INITPLR_IMWT_ENCODE(2));
1384 		/* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
1385 		mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
1386 					SDRAM_INITPLR_IMWT_ENCODE(2));
1387 		/* MR w/ DLL reset - 200 cycle wait for DLL reset */
1388 		mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
1389 					SDRAM_INITPLR_IMWT_ENCODE(200));
1390 		udelay(1000);
1391 		/* precharge 4 MemClk cycles */
1392 		mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1393 					SDRAM_INITPLR_IMWT_ENCODE(4));
1394 		/* Refresh 25 MemClk cycles */
1395 		mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1396 					SDRAM_INITPLR_IMWT_ENCODE(25));
1397 		/* Refresh 25 MemClk cycles */
1398 		mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1399 					SDRAM_INITPLR_IMWT_ENCODE(25));
1400 		/* Refresh 25 MemClk cycles */
1401 		mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1402 					SDRAM_INITPLR_IMWT_ENCODE(25));
1403 		/* Refresh 25 MemClk cycles */
1404 		mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1405 					 SDRAM_INITPLR_IMWT_ENCODE(25));
1406 		/* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
1407 		mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
1408 					 SDRAM_INITPLR_IMWT_ENCODE(2));
1409 		/* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
1410 		mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
1411 					 SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
1412 		/* EMR OCD Exit */
1413 		mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
1414 					 SDRAM_INITPLR_IMWT_ENCODE(2));
1415 	} else {
1416 		printf("ERROR: ucode error as unknown DDR type in program_initplr");
1417 		spd_ddr_init_hang ();
1418 	}
1419 }
1420 
1421 /*------------------------------------------------------------------
1422  * This routine programs the SDRAM_MMODE register.
1423  * the selected_cas is an output parameter, that will be passed
1424  * by caller to call the above program_initplr( )
1425  *-----------------------------------------------------------------*/
program_mode(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks,ddr_cas_id_t * selected_cas,int * write_recovery)1426 static void program_mode(unsigned long *dimm_populated,
1427 			 unsigned char *iic0_dimm_addr,
1428 			 unsigned long num_dimm_banks,
1429 			 ddr_cas_id_t *selected_cas,
1430 			 int *write_recovery)
1431 {
1432 	unsigned long dimm_num;
1433 	unsigned long sdram_ddr1;
1434 	unsigned long t_wr_ns;
1435 	unsigned long t_wr_clk;
1436 	unsigned long cas_bit;
1437 	unsigned long cas_index;
1438 	unsigned long sdram_freq;
1439 	unsigned long ddr_check;
1440 	unsigned long mmode;
1441 	unsigned long tcyc_reg;
1442 	unsigned long cycle_2_0_clk;
1443 	unsigned long cycle_2_5_clk;
1444 	unsigned long cycle_3_0_clk;
1445 	unsigned long cycle_4_0_clk;
1446 	unsigned long cycle_5_0_clk;
1447 	unsigned long max_2_0_tcyc_ns_x_100;
1448 	unsigned long max_2_5_tcyc_ns_x_100;
1449 	unsigned long max_3_0_tcyc_ns_x_100;
1450 	unsigned long max_4_0_tcyc_ns_x_100;
1451 	unsigned long max_5_0_tcyc_ns_x_100;
1452 	unsigned long cycle_time_ns_x_100[3];
1453 	PPC4xx_SYS_INFO board_cfg;
1454 	unsigned char cas_2_0_available;
1455 	unsigned char cas_2_5_available;
1456 	unsigned char cas_3_0_available;
1457 	unsigned char cas_4_0_available;
1458 	unsigned char cas_5_0_available;
1459 	unsigned long sdr_ddrpll;
1460 
1461 	/*------------------------------------------------------------------
1462 	 * Get the board configuration info.
1463 	 *-----------------------------------------------------------------*/
1464 	get_sys_info(&board_cfg);
1465 
1466 	mfsdr(SDR0_DDR0, sdr_ddrpll);
1467 	sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
1468 	debug("sdram_freq=%lu\n", sdram_freq);
1469 
1470 	/*------------------------------------------------------------------
1471 	 * Handle the timing.  We need to find the worst case timing of all
1472 	 * the dimm modules installed.
1473 	 *-----------------------------------------------------------------*/
1474 	t_wr_ns = 0;
1475 	cas_2_0_available = TRUE;
1476 	cas_2_5_available = TRUE;
1477 	cas_3_0_available = TRUE;
1478 	cas_4_0_available = TRUE;
1479 	cas_5_0_available = TRUE;
1480 	max_2_0_tcyc_ns_x_100 = 10;
1481 	max_2_5_tcyc_ns_x_100 = 10;
1482 	max_3_0_tcyc_ns_x_100 = 10;
1483 	max_4_0_tcyc_ns_x_100 = 10;
1484 	max_5_0_tcyc_ns_x_100 = 10;
1485 	sdram_ddr1 = TRUE;
1486 
1487 	/* loop through all the DIMM slots on the board */
1488 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1489 		/* If a dimm is installed in a particular slot ... */
1490 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
1491 			if (dimm_populated[dimm_num] == SDRAM_DDR1)
1492 				sdram_ddr1 = TRUE;
1493 			else
1494 				sdram_ddr1 = FALSE;
1495 
1496 			/* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /*  not used in this loop. */
1497 			cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
1498 			debug("cas_bit[SPD byte 18]=%02lx\n", cas_bit);
1499 
1500 			/* For a particular DIMM, grab the three CAS values it supports */
1501 			for (cas_index = 0; cas_index < 3; cas_index++) {
1502 				switch (cas_index) {
1503 				case 0:
1504 					tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
1505 					break;
1506 				case 1:
1507 					tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
1508 					break;
1509 				default:
1510 					tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
1511 					break;
1512 				}
1513 
1514 				if ((tcyc_reg & 0x0F) >= 10) {
1515 					if ((tcyc_reg & 0x0F) == 0x0D) {
1516 						/* Convert from hex to decimal */
1517 						cycle_time_ns_x_100[cas_index] =
1518 							(((tcyc_reg & 0xF0) >> 4) * 100) + 75;
1519 					} else {
1520 						printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
1521 						       "in slot %d\n", (unsigned int)dimm_num);
1522 						spd_ddr_init_hang ();
1523 					}
1524 				} else {
1525 					/* Convert from hex to decimal */
1526 					cycle_time_ns_x_100[cas_index] =
1527 						(((tcyc_reg & 0xF0) >> 4) * 100) +
1528 						((tcyc_reg & 0x0F)*10);
1529 				}
1530 				debug("cas_index=%lu: cycle_time_ns_x_100=%lu\n", cas_index,
1531 				      cycle_time_ns_x_100[cas_index]);
1532 			}
1533 
1534 			/* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
1535 			/* supported for a particular DIMM. */
1536 			cas_index = 0;
1537 
1538 			if (sdram_ddr1) {
1539 				/*
1540 				 * DDR devices use the following bitmask for CAS latency:
1541 				 *  Bit   7    6    5    4    3    2    1    0
1542 				 *       TBD  4.0  3.5  3.0  2.5  2.0  1.5  1.0
1543 				 */
1544 				if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
1545 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1546 					max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1547 								    cycle_time_ns_x_100[cas_index]);
1548 					cas_index++;
1549 				} else {
1550 					if (cas_index != 0)
1551 						cas_index++;
1552 					cas_4_0_available = FALSE;
1553 				}
1554 
1555 				if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1556 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1557 					max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1558 								    cycle_time_ns_x_100[cas_index]);
1559 					cas_index++;
1560 				} else {
1561 					if (cas_index != 0)
1562 						cas_index++;
1563 					cas_3_0_available = FALSE;
1564 				}
1565 
1566 				if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1567 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1568 					max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
1569 								    cycle_time_ns_x_100[cas_index]);
1570 					cas_index++;
1571 				} else {
1572 					if (cas_index != 0)
1573 						cas_index++;
1574 					cas_2_5_available = FALSE;
1575 				}
1576 
1577 				if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
1578 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1579 					max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
1580 								    cycle_time_ns_x_100[cas_index]);
1581 					cas_index++;
1582 				} else {
1583 					if (cas_index != 0)
1584 						cas_index++;
1585 					cas_2_0_available = FALSE;
1586 				}
1587 			} else {
1588 				/*
1589 				 * DDR2 devices use the following bitmask for CAS latency:
1590 				 *  Bit   7    6    5    4    3    2    1    0
1591 				 *       TBD  6.0  5.0  4.0  3.0  2.0  TBD  TBD
1592 				 */
1593 				if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
1594 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1595 					max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
1596 								    cycle_time_ns_x_100[cas_index]);
1597 					cas_index++;
1598 				} else {
1599 					if (cas_index != 0)
1600 						cas_index++;
1601 					cas_5_0_available = FALSE;
1602 				}
1603 
1604 				if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1605 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1606 					max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1607 								    cycle_time_ns_x_100[cas_index]);
1608 					cas_index++;
1609 				} else {
1610 					if (cas_index != 0)
1611 						cas_index++;
1612 					cas_4_0_available = FALSE;
1613 				}
1614 
1615 				if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1616 				    (cycle_time_ns_x_100[cas_index] != 0)) {
1617 					max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1618 								    cycle_time_ns_x_100[cas_index]);
1619 					cas_index++;
1620 				} else {
1621 					if (cas_index != 0)
1622 						cas_index++;
1623 					cas_3_0_available = FALSE;
1624 				}
1625 			}
1626 		}
1627 	}
1628 
1629 	/*------------------------------------------------------------------
1630 	 * Set the SDRAM mode, SDRAM_MMODE
1631 	 *-----------------------------------------------------------------*/
1632 	mfsdram(SDRAM_MMODE, mmode);
1633 	mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
1634 
1635 	/* add 10 here because of rounding problems */
1636 	cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
1637 	cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
1638 	cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
1639 	cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
1640 	cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
1641 	debug("cycle_3_0_clk=%lu\n", cycle_3_0_clk);
1642 	debug("cycle_4_0_clk=%lu\n", cycle_4_0_clk);
1643 	debug("cycle_5_0_clk=%lu\n", cycle_5_0_clk);
1644 
1645 	if (sdram_ddr1 == TRUE) { /* DDR1 */
1646 		if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
1647 			mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
1648 			*selected_cas = DDR_CAS_2;
1649 		} else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
1650 			mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
1651 			*selected_cas = DDR_CAS_2_5;
1652 		} else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1653 			mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
1654 			*selected_cas = DDR_CAS_3;
1655 		} else {
1656 			printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1657 			printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
1658 			printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
1659 			spd_ddr_init_hang ();
1660 		}
1661 	} else { /* DDR2 */
1662 		debug("cas_3_0_available=%d\n", cas_3_0_available);
1663 		debug("cas_4_0_available=%d\n", cas_4_0_available);
1664 		debug("cas_5_0_available=%d\n", cas_5_0_available);
1665 		if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1666 			mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
1667 			*selected_cas = DDR_CAS_3;
1668 		} else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
1669 			mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
1670 			*selected_cas = DDR_CAS_4;
1671 		} else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
1672 			mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
1673 			*selected_cas = DDR_CAS_5;
1674 		} else {
1675 			printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1676 			printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
1677 			printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
1678 			printf("cas3=%d cas4=%d cas5=%d\n",
1679 			       cas_3_0_available, cas_4_0_available, cas_5_0_available);
1680 			printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
1681 			       sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
1682 			spd_ddr_init_hang ();
1683 		}
1684 	}
1685 
1686 	if (sdram_ddr1 == TRUE)
1687 		mmode |= SDRAM_MMODE_WR_DDR1;
1688 	else {
1689 
1690 		/* loop through all the DIMM slots on the board */
1691 		for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1692 			/* If a dimm is installed in a particular slot ... */
1693 			if (dimm_populated[dimm_num] != SDRAM_NONE)
1694 				t_wr_ns = max(t_wr_ns,
1695 					      spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1696 		}
1697 
1698 		/*
1699 		 * convert from nanoseconds to ddr clocks
1700 		 * round up if necessary
1701 		 */
1702 		t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
1703 		ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
1704 		if (sdram_freq != ddr_check)
1705 			t_wr_clk++;
1706 
1707 		switch (t_wr_clk) {
1708 		case 0:
1709 		case 1:
1710 		case 2:
1711 		case 3:
1712 			mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
1713 			break;
1714 		case 4:
1715 			mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
1716 			break;
1717 		case 5:
1718 			mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
1719 			break;
1720 		default:
1721 			mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
1722 			break;
1723 		}
1724 		*write_recovery = t_wr_clk;
1725 	}
1726 
1727 	debug("CAS latency = %d\n", *selected_cas);
1728 	debug("Write recovery = %d\n", *write_recovery);
1729 
1730 	mtsdram(SDRAM_MMODE, mmode);
1731 }
1732 
1733 /*-----------------------------------------------------------------------------+
1734  * program_rtr.
1735  *-----------------------------------------------------------------------------*/
program_rtr(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)1736 static void program_rtr(unsigned long *dimm_populated,
1737 			unsigned char *iic0_dimm_addr,
1738 			unsigned long num_dimm_banks)
1739 {
1740 	PPC4xx_SYS_INFO board_cfg;
1741 	unsigned long max_refresh_rate;
1742 	unsigned long dimm_num;
1743 	unsigned long refresh_rate_type;
1744 	unsigned long refresh_rate;
1745 	unsigned long rint;
1746 	unsigned long sdram_freq;
1747 	unsigned long sdr_ddrpll;
1748 	unsigned long val;
1749 
1750 	/*------------------------------------------------------------------
1751 	 * Get the board configuration info.
1752 	 *-----------------------------------------------------------------*/
1753 	get_sys_info(&board_cfg);
1754 
1755 	/*------------------------------------------------------------------
1756 	 * Set the SDRAM Refresh Timing Register, SDRAM_RTR
1757 	 *-----------------------------------------------------------------*/
1758 	mfsdr(SDR0_DDR0, sdr_ddrpll);
1759 	sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1760 
1761 	max_refresh_rate = 0;
1762 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1763 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
1764 
1765 			refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
1766 			refresh_rate_type &= 0x7F;
1767 			switch (refresh_rate_type) {
1768 			case 0:
1769 				refresh_rate =  15625;
1770 				break;
1771 			case 1:
1772 				refresh_rate =   3906;
1773 				break;
1774 			case 2:
1775 				refresh_rate =   7812;
1776 				break;
1777 			case 3:
1778 				refresh_rate =  31250;
1779 				break;
1780 			case 4:
1781 				refresh_rate =  62500;
1782 				break;
1783 			case 5:
1784 				refresh_rate = 125000;
1785 				break;
1786 			default:
1787 				refresh_rate = 0;
1788 				printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
1789 				       (unsigned int)dimm_num);
1790 				printf("Replace the DIMM module with a supported DIMM.\n\n");
1791 				spd_ddr_init_hang ();
1792 				break;
1793 			}
1794 
1795 			max_refresh_rate = max(max_refresh_rate, refresh_rate);
1796 		}
1797 	}
1798 
1799 	rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
1800 	mfsdram(SDRAM_RTR, val);
1801 	mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
1802 		(SDRAM_RTR_RINT_ENCODE(rint)));
1803 }
1804 
1805 /*------------------------------------------------------------------
1806  * This routine programs the SDRAM_TRx registers.
1807  *-----------------------------------------------------------------*/
program_tr(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)1808 static void program_tr(unsigned long *dimm_populated,
1809 		       unsigned char *iic0_dimm_addr,
1810 		       unsigned long num_dimm_banks)
1811 {
1812 	unsigned long dimm_num;
1813 	unsigned long sdram_ddr1;
1814 	unsigned long t_rp_ns;
1815 	unsigned long t_rcd_ns;
1816 	unsigned long t_rrd_ns;
1817 	unsigned long t_ras_ns;
1818 	unsigned long t_rc_ns;
1819 	unsigned long t_rfc_ns;
1820 	unsigned long t_wpc_ns;
1821 	unsigned long t_wtr_ns;
1822 	unsigned long t_rpc_ns;
1823 	unsigned long t_rp_clk;
1824 	unsigned long t_rcd_clk;
1825 	unsigned long t_rrd_clk;
1826 	unsigned long t_ras_clk;
1827 	unsigned long t_rc_clk;
1828 	unsigned long t_rfc_clk;
1829 	unsigned long t_wpc_clk;
1830 	unsigned long t_wtr_clk;
1831 	unsigned long t_rpc_clk;
1832 	unsigned long sdtr1, sdtr2, sdtr3;
1833 	unsigned long ddr_check;
1834 	unsigned long sdram_freq;
1835 	unsigned long sdr_ddrpll;
1836 
1837 	PPC4xx_SYS_INFO board_cfg;
1838 
1839 #ifdef CONFIG_SAM460EX
1840 	int ddr2_boost = 0;
1841 	char s[32] = { 0 };
1842 
1843 	getenv_r("ddr2_boost", s, 32);
1844 	if (strcmp(s,"1") == 0) ddr2_boost = 1;
1845 #endif
1846 	/*------------------------------------------------------------------
1847 	 * Get the board configuration info.
1848 	 *-----------------------------------------------------------------*/
1849 	get_sys_info(&board_cfg);
1850 
1851 	mfsdr(SDR0_DDR0, sdr_ddrpll);
1852 	sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1853 
1854 	/*------------------------------------------------------------------
1855 	 * Handle the timing.  We need to find the worst case timing of all
1856 	 * the dimm modules installed.
1857 	 *-----------------------------------------------------------------*/
1858 	t_rp_ns = 0;
1859 	t_rrd_ns = 0;
1860 	t_rcd_ns = 0;
1861 	t_ras_ns = 0;
1862 	t_rc_ns = 0;
1863 	t_rfc_ns = 0;
1864 	t_wpc_ns = 0;
1865 	t_wtr_ns = 0;
1866 	t_rpc_ns = 0;
1867 	sdram_ddr1 = TRUE;
1868 
1869 	/* loop through all the DIMM slots on the board */
1870 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1871 		/* If a dimm is installed in a particular slot ... */
1872 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
1873 			if (dimm_populated[dimm_num] == SDRAM_DDR2)
1874 				sdram_ddr1 = TRUE;
1875 			else
1876 				sdram_ddr1 = FALSE;
1877 
1878 			t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
1879 			t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
1880 			t_rp_ns  = max(t_rp_ns,  spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
1881 			t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
1882 			t_rc_ns  = max(t_rc_ns,  spd_read(iic0_dimm_addr[dimm_num], 41));
1883 			t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
1884 		}
1885 	}
1886 
1887 	/*------------------------------------------------------------------
1888 	 * Set the SDRAM Timing Reg 1, SDRAM_TR1
1889 	 *-----------------------------------------------------------------*/
1890 	mfsdram(SDRAM_SDTR1, sdtr1);
1891 	sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
1892 		   SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
1893 
1894 	/* default values */
1895 	if (ddr2_boost)
1896 		sdtr1 |= SDRAM_SDTR1_LDOF_1_CLK;
1897 	else
1898 		sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
1899 	sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
1900 
1901 	/* normal operations */
1902 	sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
1903 	sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
1904 
1905 	mtsdram(SDRAM_SDTR1, sdtr1);
1906 
1907 	/*------------------------------------------------------------------
1908 	 * Set the SDRAM Timing Reg 2, SDRAM_TR2
1909 	 *-----------------------------------------------------------------*/
1910 	mfsdram(SDRAM_SDTR2, sdtr2);
1911 	sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK  | SDRAM_SDTR2_WTR_MASK |
1912 		   SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
1913 		   SDRAM_SDTR2_RPC_MASK  | SDRAM_SDTR2_RP_MASK  |
1914 		   SDRAM_SDTR2_RRD_MASK);
1915 
1916 	/*
1917 	 * convert t_rcd from nanoseconds to ddr clocks
1918 	 * round up if necessary
1919 	 */
1920 	t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
1921 	ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
1922 	if (sdram_freq != ddr_check)
1923 		t_rcd_clk++;
1924 
1925 	switch (t_rcd_clk) {
1926 	case 0:
1927 	case 1:
1928 		sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
1929 		break;
1930 	case 2:
1931 		sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
1932 		break;
1933 	case 3:
1934 		sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
1935 		break;
1936 	case 4:
1937 		sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
1938 		break;
1939 	default:
1940 		sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
1941 		break;
1942 	}
1943 
1944 	if (sdram_ddr1 == TRUE) { /* DDR1 */
1945 		if (sdram_freq < 200000000) {
1946 			sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1947 			sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1948 			sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1949 		} else {
1950 			sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1951 			sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1952 			sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1953 		}
1954 	} else { /* DDR2 */
1955 		/* loop through all the DIMM slots on the board */
1956 		for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1957 			/* If a dimm is installed in a particular slot ... */
1958 			if (dimm_populated[dimm_num] != SDRAM_NONE) {
1959 				t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1960 				t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
1961 				t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
1962 			}
1963 		}
1964 
1965 		/*
1966 		 * convert from nanoseconds to ddr clocks
1967 		 * round up if necessary
1968 		 */
1969 		t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
1970 		ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
1971 		if (sdram_freq != ddr_check)
1972 			t_wpc_clk++;
1973 
1974 		switch (t_wpc_clk) {
1975 		case 0:
1976 		case 1:
1977 		case 2:
1978 			sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1979 			break;
1980 		case 3:
1981 			sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1982 			break;
1983 		case 4:
1984 			sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
1985 			break;
1986 		case 5:
1987 			sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
1988 			break;
1989 		default:
1990 			sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
1991 			break;
1992 		}
1993 
1994 		/*
1995 		 * convert from nanoseconds to ddr clocks
1996 		 * round up if necessary
1997 		 */
1998 		t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
1999 		ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
2000 		if (sdram_freq != ddr_check)
2001 			t_wtr_clk++;
2002 
2003 		switch (t_wtr_clk) {
2004 		case 0:
2005 		case 1:
2006 			sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
2007 			break;
2008 		case 2:
2009 			sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
2010 			break;
2011 		case 3:
2012 			sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
2013 			break;
2014 		default:
2015 			sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
2016 			break;
2017 		}
2018 
2019 		/*
2020 		 * convert from nanoseconds to ddr clocks
2021 		 * round up if necessary
2022 		 */
2023 		t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
2024 		ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
2025 		if (sdram_freq != ddr_check)
2026 			t_rpc_clk++;
2027 
2028 		switch (t_rpc_clk) {
2029 		case 0:
2030 		case 1:
2031 		case 2:
2032 			sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
2033 			break;
2034 		case 3:
2035 			sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
2036 			break;
2037 		default:
2038 			sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
2039 			break;
2040 		}
2041 	}
2042 
2043 	/* default value */
2044 	sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
2045 
2046 	/*
2047 	 * convert t_rrd from nanoseconds to ddr clocks
2048 	 * round up if necessary
2049 	 */
2050 	t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
2051 	ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
2052 	if (sdram_freq != ddr_check)
2053 		t_rrd_clk++;
2054 
2055 	if (t_rrd_clk == 3)
2056 		sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
2057 	else
2058 		sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
2059 
2060 	/*
2061 	 * convert t_rp from nanoseconds to ddr clocks
2062 	 * round up if necessary
2063 	 */
2064 	t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
2065 	ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
2066 	if (sdram_freq != ddr_check)
2067 		t_rp_clk++;
2068 
2069 	switch (t_rp_clk) {
2070 	case 0:
2071 	case 1:
2072 	case 2:
2073 	case 3:
2074 		sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
2075 		break;
2076 	case 4:
2077 		sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
2078 		break;
2079 	case 5:
2080 		sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
2081 		break;
2082 	case 6:
2083 		sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
2084 		break;
2085 	default:
2086 		sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
2087 		break;
2088 	}
2089 
2090 	mtsdram(SDRAM_SDTR2, sdtr2);
2091 
2092 	/*------------------------------------------------------------------
2093 	 * Set the SDRAM Timing Reg 3, SDRAM_TR3
2094 	 *-----------------------------------------------------------------*/
2095 	mfsdram(SDRAM_SDTR3, sdtr3);
2096 	sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK  | SDRAM_SDTR3_RC_MASK |
2097 		   SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
2098 
2099 	/*
2100 	 * convert t_ras from nanoseconds to ddr clocks
2101 	 * round up if necessary
2102 	 */
2103 	t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
2104 	ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
2105 	if (sdram_freq != ddr_check)
2106 		t_ras_clk++;
2107 
2108 	sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
2109 
2110 	/*
2111 	 * convert t_rc from nanoseconds to ddr clocks
2112 	 * round up if necessary
2113 	 */
2114 	t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
2115 	ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
2116 	if (sdram_freq != ddr_check)
2117 		t_rc_clk++;
2118 
2119 	sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
2120 
2121 	/* default xcs value */
2122 	sdtr3 |= SDRAM_SDTR3_XCS;
2123 
2124 	/*
2125 	 * convert t_rfc from nanoseconds to ddr clocks
2126 	 * round up if necessary
2127 	 */
2128 	t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
2129 	ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
2130 	if (sdram_freq != ddr_check)
2131 		t_rfc_clk++;
2132 
2133 	sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
2134 
2135 	mtsdram(SDRAM_SDTR3, sdtr3);
2136 }
2137 
2138 /*-----------------------------------------------------------------------------+
2139  * program_bxcf.
2140  *-----------------------------------------------------------------------------*/
program_bxcf(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)2141 static void program_bxcf(unsigned long *dimm_populated,
2142 			 unsigned char *iic0_dimm_addr,
2143 			 unsigned long num_dimm_banks)
2144 {
2145 	unsigned long dimm_num;
2146 	unsigned long num_col_addr;
2147 	unsigned long num_ranks;
2148 	unsigned long num_banks;
2149 	unsigned long mode;
2150 	unsigned long ind_rank;
2151 	unsigned long ind;
2152 	unsigned long ind_bank;
2153 	unsigned long bank_0_populated;
2154 
2155 	/*------------------------------------------------------------------
2156 	 * Set the BxCF regs.  First, wipe out the bank config registers.
2157 	 *-----------------------------------------------------------------*/
2158 	mtsdram(SDRAM_MB0CF, 0x00000000);
2159 	mtsdram(SDRAM_MB1CF, 0x00000000);
2160 	mtsdram(SDRAM_MB2CF, 0x00000000);
2161 	mtsdram(SDRAM_MB3CF, 0x00000000);
2162 
2163 	mode = SDRAM_BXCF_M_BE_ENABLE;
2164 
2165 	bank_0_populated = 0;
2166 
2167 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2168 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
2169 			num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
2170 			num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2171 			if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2172 				num_ranks = (num_ranks & 0x0F) +1;
2173 			else
2174 				num_ranks = num_ranks & 0x0F;
2175 
2176 			num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
2177 
2178 			for (ind_bank = 0; ind_bank < 2; ind_bank++) {
2179 				if (num_banks == 4)
2180 					ind = 0;
2181 				else
2182 					ind = 5 << 8;
2183 				switch (num_col_addr) {
2184 				case 0x08:
2185 					mode |= (SDRAM_BXCF_M_AM_0 + ind);
2186 					break;
2187 				case 0x09:
2188 					mode |= (SDRAM_BXCF_M_AM_1 + ind);
2189 					break;
2190 				case 0x0A:
2191 					mode |= (SDRAM_BXCF_M_AM_2 + ind);
2192 					break;
2193 				case 0x0B:
2194 					mode |= (SDRAM_BXCF_M_AM_3 + ind);
2195 					break;
2196 				case 0x0C:
2197 					mode |= (SDRAM_BXCF_M_AM_4 + ind);
2198 					break;
2199 				default:
2200 					printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
2201 					       (unsigned int)dimm_num);
2202 					printf("ERROR: Unsupported value for number of "
2203 					       "column addresses: %d.\n", (unsigned int)num_col_addr);
2204 					printf("Replace the DIMM module with a supported DIMM.\n\n");
2205 					spd_ddr_init_hang ();
2206 				}
2207 			}
2208 
2209 			if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
2210 				bank_0_populated = 1;
2211 
2212 			for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
2213 				mtsdram(SDRAM_MB0CF +
2214 					((dimm_num + bank_0_populated + ind_rank) << 2),
2215 					mode);
2216 			}
2217 		}
2218 	}
2219 }
2220 
2221 /*------------------------------------------------------------------
2222  * program memory queue.
2223  *-----------------------------------------------------------------*/
program_memory_queue(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)2224 static void program_memory_queue(unsigned long *dimm_populated,
2225 				 unsigned char *iic0_dimm_addr,
2226 				 unsigned long num_dimm_banks)
2227 {
2228 	unsigned long dimm_num;
2229 	phys_size_t rank_base_addr;
2230 	unsigned long rank_reg;
2231 	phys_size_t rank_size_bytes;
2232 	unsigned long rank_size_id;
2233 	unsigned long num_ranks;
2234 	unsigned long baseadd_size;
2235 	unsigned long i;
2236 	unsigned long bank_0_populated = 0;
2237 	phys_size_t total_size = 0;
2238 
2239 	/*------------------------------------------------------------------
2240 	 * Reset the rank_base_address.
2241 	 *-----------------------------------------------------------------*/
2242 	rank_reg   = SDRAM_R0BAS;
2243 
2244 	rank_base_addr = 0x00000000;
2245 
2246 	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2247 		if (dimm_populated[dimm_num] != SDRAM_NONE) {
2248 			num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2249 			if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2250 				num_ranks = (num_ranks & 0x0F) + 1;
2251 			else
2252 				num_ranks = num_ranks & 0x0F;
2253 
2254 			rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
2255 
2256 			/*------------------------------------------------------------------
2257 			 * Set the sizes
2258 			 *-----------------------------------------------------------------*/
2259 			baseadd_size = 0;
2260 			switch (rank_size_id) {
2261 			case 0x01:
2262 				baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
2263 				total_size = 1024;
2264 				break;
2265 			case 0x02:
2266 				baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
2267 				total_size = 2048;
2268 				break;
2269 			case 0x04:
2270 				baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
2271 				total_size = 4096;
2272 				break;
2273 			case 0x08:
2274 				baseadd_size |= SDRAM_RXBAS_SDSZ_32;
2275 				total_size = 32;
2276 				break;
2277 			case 0x10:
2278 				baseadd_size |= SDRAM_RXBAS_SDSZ_64;
2279 				total_size = 64;
2280 				break;
2281 			case 0x20:
2282 				baseadd_size |= SDRAM_RXBAS_SDSZ_128;
2283 				total_size = 128;
2284 				break;
2285 			case 0x40:
2286 				baseadd_size |= SDRAM_RXBAS_SDSZ_256;
2287 				total_size = 256;
2288 				break;
2289 			case 0x80:
2290 				baseadd_size |= SDRAM_RXBAS_SDSZ_512;
2291 				total_size = 512;
2292 				break;
2293 			default:
2294 				printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
2295 				       (unsigned int)dimm_num);
2296 				printf("ERROR: Unsupported value for the banksize: %d.\n",
2297 				       (unsigned int)rank_size_id);
2298 				printf("Replace the DIMM module with a supported DIMM.\n\n");
2299 				spd_ddr_init_hang ();
2300 			}
2301 			rank_size_bytes = total_size << 20;
2302 
2303 			if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
2304 				bank_0_populated = 1;
2305 
2306 			for (i = 0; i < num_ranks; i++)	{
2307 				mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
2308 					  (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
2309 					   baseadd_size));
2310 				rank_base_addr += rank_size_bytes;
2311 			}
2312 		}
2313 	}
2314 
2315 #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
2316     defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
2317     defined(CONFIG_460SX)
2318 	/*
2319 	 * Enable high bandwidth access
2320 	 * This is currently not used, but with this setup
2321 	 * it is possible to use it later on in e.g. the Linux
2322 	 * EMAC driver for performance gain.
2323 	 */
2324 	mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
2325 	mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
2326 
2327 	/*
2328 	 * Set optimal value for Memory Queue HB/LL Configuration registers
2329 	 */
2330 	mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
2331 	      SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
2332 	      SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
2333 	mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
2334 	      SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
2335 	      SDRAM_CONF1LL_RPLM);
2336 	mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
2337 #endif
2338 }
2339 
2340 #ifdef CONFIG_DDR_ECC
2341 /*-----------------------------------------------------------------------------+
2342  * program_ecc.
2343  *-----------------------------------------------------------------------------*/
program_ecc(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks,unsigned long tlb_word2_i_value)2344 static void program_ecc(unsigned long *dimm_populated,
2345 			unsigned char *iic0_dimm_addr,
2346 			unsigned long num_dimm_banks,
2347 			unsigned long tlb_word2_i_value)
2348 {
2349 	unsigned long dimm_num;
2350 	unsigned long ecc;
2351 
2352 	ecc = 0;
2353 	/* loop through all the DIMM slots on the board */
2354 	for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2355 		/* If a dimm is installed in a particular slot ... */
2356 		if (dimm_populated[dimm_num] != SDRAM_NONE)
2357 			ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
2358 	}
2359 	if (ecc == 0)
2360 		return;
2361 
2362 	do_program_ecc(tlb_word2_i_value);
2363 }
2364 #endif
2365 
2366 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2367 /*-----------------------------------------------------------------------------+
2368  * program_DQS_calibration.
2369  *-----------------------------------------------------------------------------*/
program_DQS_calibration(unsigned long * dimm_populated,unsigned char * iic0_dimm_addr,unsigned long num_dimm_banks)2370 static void program_DQS_calibration(unsigned long *dimm_populated,
2371 				    unsigned char *iic0_dimm_addr,
2372 				    unsigned long num_dimm_banks)
2373 {
2374 	unsigned long val;
2375 
2376 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
2377 	mtsdram(SDRAM_RQDC, 0x80000037);
2378 	mtsdram(SDRAM_RDCC, 0x40000000);
2379 	mtsdram(SDRAM_RFDC, 0x000001DF);
2380 
2381 	test();
2382 #else
2383 	/*------------------------------------------------------------------
2384 	 * Program RDCC register
2385 	 * Read sample cycle auto-update enable
2386 	 *-----------------------------------------------------------------*/
2387 
2388 	mfsdram(SDRAM_RDCC, val);
2389 	mtsdram(SDRAM_RDCC,
2390 		(val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
2391 		| SDRAM_RDCC_RSAE_ENABLE);
2392 
2393 	/*------------------------------------------------------------------
2394 	 * Program RQDC register
2395 	 * Internal DQS delay mechanism enable
2396 	 *-----------------------------------------------------------------*/
2397 	mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
2398 
2399 	/*------------------------------------------------------------------
2400 	 * Program RFDC register
2401 	 * Set Feedback Fractional Oversample
2402 	 * Auto-detect read sample cycle enable
2403 	 * Set RFOS to 1/4 of memclk cycle (0x3f)
2404 	 *-----------------------------------------------------------------*/
2405 	mfsdram(SDRAM_RFDC, val);
2406 	mtsdram(SDRAM_RFDC,
2407 		(val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
2408 			 SDRAM_RFDC_RFFD_MASK))
2409 		| (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x5F) | // was 0x3f
2410 		   SDRAM_RFDC_RFFD_ENCODE(0)));
2411 
2412 	DQS_calibration_process();
2413 #endif
2414 }
2415 
short_mem_test(void)2416 static int short_mem_test(void)
2417 {
2418 	u32 *membase;
2419 	u32 bxcr_num;
2420 	u32 bxcf;
2421 	int i;
2422 	int j;
2423 	phys_size_t base_addr;
2424 	u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
2425 		{0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2426 		 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2427 		{0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2428 		 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2429 		{0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2430 		 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2431 		{0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2432 		 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2433 		{0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2434 		 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2435 		{0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2436 		 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2437 		{0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2438 		 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2439 		{0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2440 		 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2441 	int l;
2442 
2443 	for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
2444 		mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
2445 
2446 		/* Banks enabled */
2447 		if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2448 			/* Bank is enabled */
2449 
2450 			/*
2451 			 * Only run test on accessable memory (below 2GB)
2452 			 */
2453 			base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
2454 			if (base_addr >= CONFIG_MAX_MEM_MAPPED)
2455 				continue;
2456 
2457 			/*------------------------------------------------------------------
2458 			 * Run the short memory test.
2459 			 *-----------------------------------------------------------------*/
2460 			membase = (u32 *)(u32)base_addr;
2461 
2462 			for (i = 0; i < NUMMEMTESTS; i++) {
2463 				for (j = 0; j < NUMMEMWORDS; j++) {
2464 					membase[j] = test[i][j];
2465 					ppcDcbf((u32)&(membase[j]));
2466 				}
2467 				sync();
2468 				for (l=0; l<NUMLOOPS; l++) {
2469 					for (j = 0; j < NUMMEMWORDS; j++) {
2470 						if (membase[j] != test[i][j]) {
2471 							ppcDcbf((u32)&(membase[j]));
2472 							return 0;
2473 						}
2474 						ppcDcbf((u32)&(membase[j]));
2475 					}
2476 					sync();
2477 				}
2478 			}
2479 		}	/* if bank enabled */
2480 	}		/* for bxcf_num */
2481 
2482 	return 1;
2483 }
2484 
2485 #ifndef HARD_CODED_DQS
2486 /*-----------------------------------------------------------------------------+
2487  * DQS_calibration_process.
2488  *-----------------------------------------------------------------------------*/
DQS_calibration_process(void)2489 static void DQS_calibration_process(void)
2490 {
2491 	unsigned long rfdc_reg;
2492 	unsigned long rffd;
2493 	unsigned long val;
2494 	long rffd_average;
2495 	long max_start;
2496 	long min_end;
2497 	unsigned long begin_rqfd[MAXRANKS];
2498 	unsigned long begin_rffd[MAXRANKS];
2499 	unsigned long end_rqfd[MAXRANKS];
2500 	unsigned long end_rffd[MAXRANKS];
2501 	char window_found;
2502 	unsigned long dlycal;
2503 	unsigned long dly_val;
2504 	unsigned long max_pass_length;
2505 	unsigned long current_pass_length;
2506 	unsigned long current_fail_length;
2507 	unsigned long current_start;
2508 	long max_end;
2509 	unsigned char fail_found;
2510 	unsigned char pass_found;
2511 #if !defined(CONFIG_DDR_RQDC_FIXED)
2512 	u32 rqdc_reg;
2513 	u32 rqfd;
2514 	u32 rqfd_start;
2515 	u32 rqfd_average;
2516 	int loopi = 0;
2517 	char str[] = "Auto calibration -";
2518 	char slash[] = "\\|/-\\|/-";
2519 
2520 	/*------------------------------------------------------------------
2521 	 * Test to determine the best read clock delay tuning bits.
2522 	 *
2523 	 * Before the DDR controller can be used, the read clock delay needs to be
2524 	 * set.  This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2525 	 * This value cannot be hardcoded into the program because it changes
2526 	 * depending on the board's setup and environment.
2527 	 * To do this, all delay values are tested to see if they
2528 	 * work or not.  By doing this, you get groups of fails with groups of
2529 	 * passing values.  The idea is to find the start and end of a passing
2530 	 * window and take the center of it to use as the read clock delay.
2531 	 *
2532 	 * A failure has to be seen first so that when we hit a pass, we know
2533 	 * that it is truely the start of the window.  If we get passing values
2534 	 * to start off with, we don't know if we are at the start of the window.
2535 	 *
2536 	 * The code assumes that a failure will always be found.
2537 	 * If a failure is not found, there is no easy way to get the middle
2538 	 * of the passing window.  I guess we can pretty much pick any value
2539 	 * but some values will be better than others.  Since the lowest speed
2540 	 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2541 	 * from experimentation it is safe to say you will always have a failure.
2542 	 *-----------------------------------------------------------------*/
2543 
2544 	/* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
2545 	rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
2546 
2547 	puts(str);
2548 
2549 calibration_loop:
2550 	mfsdram(SDRAM_RQDC, rqdc_reg);
2551 	mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2552 		SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
2553 #else /* CONFIG_DDR_RQDC_FIXED */
2554 	/*
2555 	 * On Katmai the complete auto-calibration somehow doesn't seem to
2556 	 * produce the best results, meaning optimal values for RQFD/RFFD.
2557 	 * This was discovered by GDA using a high bandwidth scope,
2558 	 * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
2559 	 * so now on Katmai "only" RFFD is auto-calibrated.
2560 	 */
2561 	mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
2562 #endif /* CONFIG_DDR_RQDC_FIXED */
2563 
2564 	max_start = 0;
2565 	min_end = 0;
2566 	begin_rqfd[0] = 0;
2567 	begin_rffd[0] = 0;
2568 	begin_rqfd[1] = 0;
2569 	begin_rffd[1] = 0;
2570 	end_rqfd[0] = 0;
2571 	end_rffd[0] = 0;
2572 	end_rqfd[1] = 0;
2573 	end_rffd[1] = 0;
2574 	window_found = FALSE;
2575 
2576 	max_pass_length = 0;
2577 	max_start = 0;
2578 	max_end = 0;
2579 	current_pass_length = 0;
2580 	current_fail_length = 0;
2581 	current_start = 0;
2582 	window_found = FALSE;
2583 	fail_found = FALSE;
2584 	pass_found = FALSE;
2585 
2586 	/*
2587 	 * get the delay line calibration register value
2588 	 */
2589 	mfsdram(SDRAM_DLCR, dlycal);
2590 	dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
2591 
2592 	for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
2593 		mfsdram(SDRAM_RFDC, rfdc_reg);
2594 		rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
2595 
2596 		/*------------------------------------------------------------------
2597 		 * Set the timing reg for the test.
2598 		 *-----------------------------------------------------------------*/
2599 		mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
2600 
2601 		/*------------------------------------------------------------------
2602 		 * See if the rffd value passed.
2603 		 *-----------------------------------------------------------------*/
2604 		if (short_mem_test()) {
2605 			if (fail_found == TRUE) {
2606 				pass_found = TRUE;
2607 				if (current_pass_length == 0)
2608 					current_start = rffd;
2609 
2610 				current_fail_length = 0;
2611 				current_pass_length++;
2612 
2613 				if (current_pass_length > max_pass_length) {
2614 					max_pass_length = current_pass_length;
2615 					max_start = current_start;
2616 					max_end = rffd;
2617 				}
2618 			}
2619 		} else {
2620 			current_pass_length = 0;
2621 			current_fail_length++;
2622 
2623 			if (current_fail_length >= (dly_val >> 2)) {
2624 				if (fail_found == FALSE) {
2625 					fail_found = TRUE;
2626 				} else if (pass_found == TRUE) {
2627 					window_found = TRUE;
2628 					break;
2629 				}
2630 			}
2631 		}
2632 	}		/* for rffd */
2633 
2634 	/*------------------------------------------------------------------
2635 	 * Set the average RFFD value
2636 	 *-----------------------------------------------------------------*/
2637 	rffd_average = ((max_start + max_end) >> 1);
2638 
2639 	if (rffd_average < 0)
2640 		rffd_average = 0;
2641 
2642 	if (rffd_average > SDRAM_RFDC_RFFD_MAX)
2643 		rffd_average = SDRAM_RFDC_RFFD_MAX;
2644 	/* now fix RFDC[RFFD] found and find RQDC[RQFD] */
2645 	mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
2646 
2647 #if !defined(CONFIG_DDR_RQDC_FIXED)
2648 	max_pass_length = 0;
2649 	max_start = 0;
2650 	max_end = 0;
2651 	current_pass_length = 0;
2652 	current_fail_length = 0;
2653 	current_start = 0;
2654 	window_found = FALSE;
2655 	fail_found = FALSE;
2656 	pass_found = FALSE;
2657 
2658 	for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
2659 		mfsdram(SDRAM_RQDC, rqdc_reg);
2660 		rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
2661 
2662 		/*------------------------------------------------------------------
2663 		 * Set the timing reg for the test.
2664 		 *-----------------------------------------------------------------*/
2665 		mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
2666 
2667 		/*------------------------------------------------------------------
2668 		 * See if the rffd value passed.
2669 		 *-----------------------------------------------------------------*/
2670 		if (short_mem_test()) {
2671 			if (fail_found == TRUE) {
2672 				pass_found = TRUE;
2673 				if (current_pass_length == 0)
2674 					current_start = rqfd;
2675 
2676 				current_fail_length = 0;
2677 				current_pass_length++;
2678 
2679 				if (current_pass_length > max_pass_length) {
2680 					max_pass_length = current_pass_length;
2681 					max_start = current_start;
2682 					max_end = rqfd;
2683 				}
2684 			}
2685 		} else {
2686 			current_pass_length = 0;
2687 			current_fail_length++;
2688 
2689 			if (fail_found == FALSE) {
2690 				fail_found = TRUE;
2691 			} else if (pass_found == TRUE) {
2692 				window_found = TRUE;
2693 				break;
2694 			}
2695 		}
2696 	}
2697 
2698 	rqfd_average = ((max_start + max_end) >> 1);
2699 
2700 	/*------------------------------------------------------------------
2701 	 * Make sure we found the valid read passing window.  Halt if not
2702 	 *-----------------------------------------------------------------*/
2703 	if (window_found == FALSE) {
2704 		if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
2705 			putc('\b');
2706 			putc(slash[loopi++ % 8]);
2707 
2708 			/* try again from with a different RQFD start value */
2709 			rqfd_start++;
2710 			goto calibration_loop;
2711 		}
2712 
2713 		printf("\nERROR: Cannot determine a common read delay for the "
2714 		       "DIMM(s) installed.\n");
2715 		debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
2716 		ppc4xx_ibm_ddr2_register_dump();
2717 		spd_ddr_init_hang ();
2718 	}
2719 
2720 	if (rqfd_average < 0)
2721 		rqfd_average = 0;
2722 
2723 	if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
2724 		rqfd_average = SDRAM_RQDC_RQFD_MAX;
2725 
2726 	mtsdram(SDRAM_RQDC,
2727 		(rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2728 		SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
2729 
2730 	blank_string(strlen(str));
2731 #endif /* CONFIG_DDR_RQDC_FIXED */
2732 
2733 	mfsdram(SDRAM_DLCR, val);
2734 	debug("%s[%d] DLCR: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2735 	mfsdram(SDRAM_RQDC, val);
2736 	debug("%s[%d] RQDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2737 	mfsdram(SDRAM_RFDC, val);
2738 	debug("%s[%d] RFDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2739 	mfsdram(SDRAM_RDCC, val);
2740 	debug("%s[%d] RDCC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2741 }
2742 #else /* calibration test with hardvalues */
2743 /*-----------------------------------------------------------------------------+
2744  * DQS_calibration_process.
2745  *-----------------------------------------------------------------------------*/
test(void)2746 static void test(void)
2747 {
2748 	unsigned long dimm_num;
2749 	unsigned long ecc_temp;
2750 	unsigned long i, j;
2751 	unsigned long *membase;
2752 	unsigned long bxcf[MAXRANKS];
2753 	unsigned long val;
2754 	char window_found;
2755 	char begin_found[MAXDIMMS];
2756 	char end_found[MAXDIMMS];
2757 	char search_end[MAXDIMMS];
2758 	unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
2759 		{0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2760 		 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2761 		{0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2762 		 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2763 		{0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2764 		 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2765 		{0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2766 		 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2767 		{0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2768 		 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2769 		{0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2770 		 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2771 		{0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2772 		 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2773 		{0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2774 		 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2775 
2776 	/*------------------------------------------------------------------
2777 	 * Test to determine the best read clock delay tuning bits.
2778 	 *
2779 	 * Before the DDR controller can be used, the read clock delay needs to be
2780 	 * set.  This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2781 	 * This value cannot be hardcoded into the program because it changes
2782 	 * depending on the board's setup and environment.
2783 	 * To do this, all delay values are tested to see if they
2784 	 * work or not.  By doing this, you get groups of fails with groups of
2785 	 * passing values.  The idea is to find the start and end of a passing
2786 	 * window and take the center of it to use as the read clock delay.
2787 	 *
2788 	 * A failure has to be seen first so that when we hit a pass, we know
2789 	 * that it is truely the start of the window.  If we get passing values
2790 	 * to start off with, we don't know if we are at the start of the window.
2791 	 *
2792 	 * The code assumes that a failure will always be found.
2793 	 * If a failure is not found, there is no easy way to get the middle
2794 	 * of the passing window.  I guess we can pretty much pick any value
2795 	 * but some values will be better than others.  Since the lowest speed
2796 	 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2797 	 * from experimentation it is safe to say you will always have a failure.
2798 	 *-----------------------------------------------------------------*/
2799 	mfsdram(SDRAM_MCOPT1, ecc_temp);
2800 	ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
2801 	mfsdram(SDRAM_MCOPT1, val);
2802 	mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
2803 		SDRAM_MCOPT1_MCHK_NON);
2804 
2805 	window_found = FALSE;
2806 	begin_found[0] = FALSE;
2807 	end_found[0] = FALSE;
2808 	search_end[0] = FALSE;
2809 	begin_found[1] = FALSE;
2810 	end_found[1] = FALSE;
2811 	search_end[1] = FALSE;
2812 
2813 	for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2814 		mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
2815 
2816 		/* Banks enabled */
2817 		if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2818 
2819 			/* Bank is enabled */
2820 			membase =
2821 				(unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
2822 
2823 			/*------------------------------------------------------------------
2824 			 * Run the short memory test.
2825 			 *-----------------------------------------------------------------*/
2826 			for (i = 0; i < NUMMEMTESTS; i++) {
2827 				for (j = 0; j < NUMMEMWORDS; j++) {
2828 					membase[j] = test[i][j];
2829 					ppcDcbf((u32)&(membase[j]));
2830 				}
2831 				sync();
2832 				for (j = 0; j < NUMMEMWORDS; j++) {
2833 					if (membase[j] != test[i][j]) {
2834 						ppcDcbf((u32)&(membase[j]));
2835 						break;
2836 					}
2837 					ppcDcbf((u32)&(membase[j]));
2838 				}
2839 				sync();
2840 				if (j < NUMMEMWORDS)
2841 					break;
2842 			}
2843 
2844 			/*------------------------------------------------------------------
2845 			 * See if the rffd value passed.
2846 			 *-----------------------------------------------------------------*/
2847 			if (i < NUMMEMTESTS) {
2848 				if ((end_found[dimm_num] == FALSE) &&
2849 				    (search_end[dimm_num] == TRUE)) {
2850 					end_found[dimm_num] = TRUE;
2851 				}
2852 				if ((end_found[0] == TRUE) &&
2853 				    (end_found[1] == TRUE))
2854 					break;
2855 			} else {
2856 				if (begin_found[dimm_num] == FALSE) {
2857 					begin_found[dimm_num] = TRUE;
2858 					search_end[dimm_num] = TRUE;
2859 				}
2860 			}
2861 		} else {
2862 			begin_found[dimm_num] = TRUE;
2863 			end_found[dimm_num] = TRUE;
2864 		}
2865 	}
2866 
2867 	if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
2868 		window_found = TRUE;
2869 
2870 	/*------------------------------------------------------------------
2871 	 * Make sure we found the valid read passing window.  Halt if not
2872 	 *-----------------------------------------------------------------*/
2873 	if (window_found == FALSE) {
2874 		printf("ERROR: Cannot determine a common read delay for the "
2875 		       "DIMM(s) installed.\n");
2876 		spd_ddr_init_hang ();
2877 	}
2878 
2879 	/*------------------------------------------------------------------
2880 	 * Restore the ECC variable to what it originally was
2881 	 *-----------------------------------------------------------------*/
2882 	mtsdram(SDRAM_MCOPT1,
2883 		(ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
2884 		| ecc_temp);
2885 }
2886 #endif /* !HARD_CODED_DQS */
2887 #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
2888 
2889 #else /* CONFIG_SPD_EEPROM */
2890 
2891 /*-----------------------------------------------------------------------------
2892  * Function:	initdram
2893  * Description: Configures the PPC4xx IBM DDR1/DDR2 SDRAM memory controller.
2894  * 		The configuration is performed using static, compile-
2895  *		time parameters.
2896  * 		Configures the PPC405EX(r) and PPC460EX/GT
2897  *---------------------------------------------------------------------------*/
initdram(int board_type)2898 phys_size_t initdram(int board_type)
2899 {
2900 	/*
2901 	 * Only run this SDRAM init code once. For NAND booting
2902 	 * targets like Kilauea, we call initdram() early from the
2903 	 * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
2904 	 * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
2905 	 * which calls initdram() again. This time the controller
2906 	 * mustn't be reconfigured again since we're already running
2907 	 * from SDRAM.
2908 	 */
2909 #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
2910 	unsigned long val;
2911 
2912 #if defined(CONFIG_440)
2913 	mtdcr(SDRAM_R0BAS,	CONFIG_SYS_SDRAM_R0BAS);
2914 	mtdcr(SDRAM_R1BAS,	CONFIG_SYS_SDRAM_R1BAS);
2915 	mtdcr(SDRAM_R2BAS,	CONFIG_SYS_SDRAM_R2BAS);
2916 	mtdcr(SDRAM_R3BAS,	CONFIG_SYS_SDRAM_R3BAS);
2917 	mtdcr(SDRAM_PLBADDULL,	CONFIG_SYS_SDRAM_PLBADDULL);	/* MQ0_BAUL */
2918 	mtdcr(SDRAM_PLBADDUHB,	CONFIG_SYS_SDRAM_PLBADDUHB);	/* MQ0_BAUH */
2919 	mtdcr(SDRAM_CONF1LL,	CONFIG_SYS_SDRAM_CONF1LL);
2920 	mtdcr(SDRAM_CONF1HB,	CONFIG_SYS_SDRAM_CONF1HB);
2921 	mtdcr(SDRAM_CONFPATHB,	CONFIG_SYS_SDRAM_CONFPATHB);
2922 #endif
2923 
2924 	/* Set Memory Bank Configuration Registers */
2925 
2926 	mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
2927 	mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
2928 	mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
2929 	mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
2930 
2931 	/* Set Memory Clock Timing Register */
2932 
2933 	mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
2934 
2935 	/* Set Refresh Time Register */
2936 
2937 	mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
2938 
2939 	/* Set SDRAM Timing Registers */
2940 
2941 	mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
2942 	mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
2943 	mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
2944 
2945 	/* Set Mode and Extended Mode Registers */
2946 
2947 	mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
2948 	mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
2949 
2950 	/* Set Memory Controller Options 1 Register */
2951 
2952 	mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
2953 
2954 	/* Set Manual Initialization Control Registers */
2955 
2956 	mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
2957 	mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
2958 	mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
2959 	mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
2960 	mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
2961 	mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
2962 	mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
2963 	mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
2964 	mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
2965 	mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
2966 	mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
2967 	mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
2968 	mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
2969 	mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
2970 	mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
2971 	mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
2972 
2973 	/* Set On-Die Termination Registers */
2974 
2975 	mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
2976 	mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
2977 	mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
2978 
2979 	/* Set Write Timing Register */
2980 
2981 	mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
2982 
2983 	/*
2984 	 * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
2985 	 * SDRAM0_MCOPT2[IPTR] = 1
2986 	 */
2987 
2988 	mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
2989 			       SDRAM_MCOPT2_IPTR_EXECUTE));
2990 
2991 	/*
2992 	 * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
2993 	 * completion of initialization.
2994 	 */
2995 
2996 	do {
2997 		mfsdram(SDRAM_MCSTAT, val);
2998 	} while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
2999 
3000 	/* Set Delay Control Registers */
3001 
3002 	mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
3003 
3004 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3005 	mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
3006 	mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
3007 	mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
3008 #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3009 
3010 	/*
3011 	 * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
3012 	 */
3013 
3014 	mfsdram(SDRAM_MCOPT2, val);
3015 	mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
3016 
3017 #if defined(CONFIG_440)
3018 	/*
3019 	 * Program TLB entries with caches enabled, for best performace
3020 	 * while auto-calibrating and ECC generation
3021 	 */
3022 	program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), 0);
3023 #endif
3024 
3025 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3026 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3027 	/*------------------------------------------------------------------
3028 	 | DQS calibration.
3029 	 +-----------------------------------------------------------------*/
3030 	DQS_autocalibration();
3031 #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
3032 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3033 
3034 	/*
3035 	 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
3036 	 * PowerPC440SP/SPe DDR2 application note:
3037 	 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
3038 	 */
3039 	update_rdcc();
3040 
3041 #if defined(CONFIG_DDR_ECC)
3042 	do_program_ecc(0);
3043 #endif /* defined(CONFIG_DDR_ECC) */
3044 
3045 #if defined(CONFIG_440)
3046 	/*
3047 	 * Now after initialization (auto-calibration and ECC generation)
3048 	 * remove the TLB entries with caches enabled and program again with
3049 	 * desired cache functionality
3050 	 */
3051 	remove_tlb(0, (CONFIG_SYS_MBYTES_SDRAM << 20));
3052 	program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), MY_TLB_WORD2_I_ENABLE);
3053 #endif
3054 
3055 	ppc4xx_ibm_ddr2_register_dump();
3056 
3057 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3058 	/*
3059 	 * Clear potential errors resulting from auto-calibration.
3060 	 * If not done, then we could get an interrupt later on when
3061 	 * exceptions are enabled.
3062 	 */
3063 	set_mcsr(get_mcsr());
3064 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3065 
3066 #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
3067 
3068 	return (CONFIG_SYS_MBYTES_SDRAM << 20);
3069 }
3070 #endif /* CONFIG_SPD_EEPROM */
3071 
3072 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3073 #if defined(CONFIG_440)
mfdcr_any(u32 dcr)3074 u32 mfdcr_any(u32 dcr)
3075 {
3076 	u32 val;
3077 
3078 	switch (dcr) {
3079 	case SDRAM_R0BAS + 0:
3080 		val = mfdcr(SDRAM_R0BAS + 0);
3081 		break;
3082 	case SDRAM_R0BAS + 1:
3083 		val = mfdcr(SDRAM_R0BAS + 1);
3084 		break;
3085 	case SDRAM_R0BAS + 2:
3086 		val = mfdcr(SDRAM_R0BAS + 2);
3087 		break;
3088 	case SDRAM_R0BAS + 3:
3089 		val = mfdcr(SDRAM_R0BAS + 3);
3090 		break;
3091 	default:
3092 		printf("DCR %d not defined in case statement!!!\n", dcr);
3093 		val = 0; /* just to satisfy the compiler */
3094 	}
3095 
3096 	return val;
3097 }
3098 
mtdcr_any(u32 dcr,u32 val)3099 void mtdcr_any(u32 dcr, u32 val)
3100 {
3101 	switch (dcr) {
3102 	case SDRAM_R0BAS + 0:
3103 		mtdcr(SDRAM_R0BAS + 0, val);
3104 		break;
3105 	case SDRAM_R0BAS + 1:
3106 		mtdcr(SDRAM_R0BAS + 1, val);
3107 		break;
3108 	case SDRAM_R0BAS + 2:
3109 		mtdcr(SDRAM_R0BAS + 2, val);
3110 		break;
3111 	case SDRAM_R0BAS + 3:
3112 		mtdcr(SDRAM_R0BAS + 3, val);
3113 		break;
3114 	default:
3115 		printf("DCR %d not defined in case statement!!!\n", dcr);
3116 	}
3117 }
3118 #endif /* defined(CONFIG_440) */
3119 #endif /* !defined(CONFIG_NAND_U_BOOT) &&  !defined(CONFIG_NAND_SPL) */
3120 
ppc4xx_ibm_ddr2_register_dump(void)3121 inline void ppc4xx_ibm_ddr2_register_dump(void)
3122 {
3123 #if defined(DEBUG)
3124 	printf("\nPPC4xx IBM DDR2 Register Dump:\n");
3125 
3126 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3127      defined(CONFIG_460EX) || defined(CONFIG_460GT))
3128 	PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R0BAS);
3129 	PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R1BAS);
3130 	PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R2BAS);
3131 	PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R3BAS);
3132 #endif /* (defined(CONFIG_440SP) || ... */
3133 #if defined(CONFIG_405EX)
3134 	PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
3135 	PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
3136 	PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
3137 	PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
3138 	PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
3139 	PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
3140 #endif /* defined(CONFIG_405EX) */
3141 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
3142 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
3143 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
3144 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
3145 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
3146 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
3147 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
3148 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
3149 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
3150 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
3151 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
3152 	PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
3153 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) ||	\
3154      defined(CONFIG_460EX) || defined(CONFIG_460GT))
3155 	PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
3156 	PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
3157 	/*
3158 	 * OPART is only used as a trigger register.
3159 	 *
3160 	 * No data is contained in this register, and reading or writing
3161 	 * to is can cause bad things to happen (hangs). Just skip it and
3162 	 * report "N/A".
3163 	 */
3164 	printf("%20s = N/A\n", "SDRAM_OPART");
3165 #endif /* defined(CONFIG_440SP) || ... */
3166 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
3167 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
3168 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
3169 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
3170 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
3171 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
3172 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
3173 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
3174 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
3175 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
3176 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
3177 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
3178 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
3179 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
3180 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
3181 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
3182 	PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
3183 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
3184 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
3185 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
3186 	PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
3187 	PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
3188 	PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
3189 	PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
3190 	PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
3191 	PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
3192 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
3193 	PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
3194 	PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCES);
3195 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3196      defined(CONFIG_460EX) || defined(CONFIG_460GT))
3197 	PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
3198 #endif /* defined(CONFIG_440SP) || ... */
3199 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
3200 	PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
3201 	PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
3202 #endif /* defined(DEBUG) */
3203 }
3204