1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * (C) Copyright 2006-2007 Freescale Semiconductor, Inc.
4  *
5  * (C) Copyright 2006
6  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
7  *
8  * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
9  * (C) Copyright 2003 Motorola Inc.
10  * Xianghua Xiao (X.Xiao@motorola.com)
11  */
12 
13 #ifndef CONFIG_MPC83XX_SDRAM
14 
15 #include <common.h>
16 #include <asm/processor.h>
17 #include <asm/io.h>
18 #include <i2c.h>
19 #include <spd.h>
20 #include <asm/mmu.h>
21 #include <spd_sdram.h>
22 
23 DECLARE_GLOBAL_DATA_PTR;
24 
board_add_ram_info(int use_default)25 void board_add_ram_info(int use_default)
26 {
27 	volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR;
28 	volatile ddr83xx_t *ddr = &immap->ddr;
29 	char buf[32];
30 
31 	printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK)
32 			   >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1);
33 
34 #if defined(CONFIG_MPC8308) || defined(CONFIG_MPC831x)
35 	if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_16)
36 		puts(", 16-bit");
37 	else if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_32)
38 		puts(", 32-bit");
39 	else
40 		puts(", unknown width");
41 #else
42 	if (ddr->sdram_cfg & SDRAM_CFG_32_BE)
43 		puts(", 32-bit");
44 	else
45 		puts(", 64-bit");
46 #endif
47 
48 	if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN)
49 		puts(", ECC on");
50 	else
51 		puts(", ECC off");
52 
53 	printf(", %s MHz)", strmhz(buf, gd->mem_clk));
54 
55 #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE)
56 	puts("\nSDRAM: ");
57 	print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)");
58 #endif
59 }
60 
61 #ifdef CONFIG_SPD_EEPROM
62 #ifndef	CONFIG_SYS_READ_SPD
63 #define CONFIG_SYS_READ_SPD	i2c_read
64 #endif
65 #ifndef SPD_EEPROM_OFFSET
66 #define SPD_EEPROM_OFFSET	0
67 #endif
68 #ifndef SPD_EEPROM_ADDR_LEN
69 #define SPD_EEPROM_ADDR_LEN     1
70 #endif
71 
72 /*
73  * Convert picoseconds into clock cycles (rounding up if needed).
74  */
75 int
picos_to_clk(int picos)76 picos_to_clk(int picos)
77 {
78 	unsigned int mem_bus_clk;
79 	int clks;
80 
81 	mem_bus_clk = gd->mem_clk >> 1;
82 	clks = picos / (1000000000 / (mem_bus_clk / 1000));
83 	if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0)
84 		clks++;
85 
86 	return clks;
87 }
88 
banksize(unsigned char row_dens)89 unsigned int banksize(unsigned char row_dens)
90 {
91 	return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
92 }
93 
read_spd(uint addr)94 int read_spd(uint addr)
95 {
96 	return ((int) addr);
97 }
98 
99 #undef SPD_DEBUG
100 #ifdef SPD_DEBUG
spd_debug(spd_eeprom_t * spd)101 static void spd_debug(spd_eeprom_t *spd)
102 {
103 	printf ("\nDIMM type:       %-18.18s\n", spd->mpart);
104 	printf ("SPD size:        %d\n", spd->info_size);
105 	printf ("EEPROM size:     %d\n", 1 << spd->chip_size);
106 	printf ("Memory type:     %d\n", spd->mem_type);
107 	printf ("Row addr:        %d\n", spd->nrow_addr);
108 	printf ("Column addr:     %d\n", spd->ncol_addr);
109 	printf ("# of rows:       %d\n", spd->nrows);
110 	printf ("Row density:     %d\n", spd->row_dens);
111 	printf ("# of banks:      %d\n", spd->nbanks);
112 	printf ("Data width:      %d\n",
113 			256 * spd->dataw_msb + spd->dataw_lsb);
114 	printf ("Chip width:      %d\n", spd->primw);
115 	printf ("Refresh rate:    %02X\n", spd->refresh);
116 	printf ("CAS latencies:   %02X\n", spd->cas_lat);
117 	printf ("Write latencies: %02X\n", spd->write_lat);
118 	printf ("tRP:             %d\n", spd->trp);
119 	printf ("tRCD:            %d\n", spd->trcd);
120 	printf ("\n");
121 }
122 #endif /* SPD_DEBUG */
123 
spd_sdram()124 long int spd_sdram()
125 {
126 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
127 	volatile ddr83xx_t *ddr = &immap->ddr;
128 	volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
129 	spd_eeprom_t spd;
130 	unsigned int n_ranks;
131 	unsigned int odt_rd_cfg, odt_wr_cfg;
132 	unsigned char twr_clk, twtr_clk;
133 	unsigned int sdram_type;
134 	unsigned int memsize;
135 	unsigned int law_size;
136 	unsigned char caslat, caslat_ctrl;
137 	unsigned int trfc, trfc_clk, trfc_low;
138 	unsigned int trcd_clk, trtp_clk;
139 	unsigned char cke_min_clk;
140 	unsigned char add_lat, wr_lat;
141 	unsigned char wr_data_delay;
142 	unsigned char four_act;
143 	unsigned char cpo;
144 	unsigned char burstlen;
145 	unsigned char odt_cfg, mode_odt_enable;
146 	unsigned int max_bus_clk;
147 	unsigned int max_data_rate, effective_data_rate;
148 	unsigned int ddrc_clk;
149 	unsigned int refresh_clk;
150 	unsigned int sdram_cfg;
151 	unsigned int ddrc_ecc_enable;
152 	unsigned int pvr = get_pvr();
153 
154 	/*
155 	 * First disable the memory controller (could be enabled
156 	 * by the debugger)
157 	 */
158 	clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0);
159 	sync();
160 	isync();
161 
162 	/* Read SPD parameters with I2C */
163 	CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, SPD_EEPROM_OFFSET,
164 		SPD_EEPROM_ADDR_LEN, (uchar *) &spd, sizeof(spd));
165 #ifdef SPD_DEBUG
166 	spd_debug(&spd);
167 #endif
168 	/* Check the memory type */
169 	if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
170 		debug("DDR: Module mem type is %02X\n", spd.mem_type);
171 		return 0;
172 	}
173 
174 	/* Check the number of physical bank */
175 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
176 		n_ranks = spd.nrows;
177 	} else {
178 		n_ranks = (spd.nrows & 0x7) + 1;
179 	}
180 
181 	if (n_ranks > 2) {
182 		printf("DDR: The number of physical bank is %02X\n", n_ranks);
183 		return 0;
184 	}
185 
186 	/* Check if the number of row of the module is in the range of DDRC */
187 	if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
188 		printf("DDR: Row number is out of range of DDRC, row=%02X\n",
189 							 spd.nrow_addr);
190 		return 0;
191 	}
192 
193 	/* Check if the number of col of the module is in the range of DDRC */
194 	if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
195 		printf("DDR: Col number is out of range of DDRC, col=%02X\n",
196 							 spd.ncol_addr);
197 		return 0;
198 	}
199 
200 #ifdef CONFIG_SYS_DDRCDR_VALUE
201 	/*
202 	 * Adjust DDR II IO voltage biasing.  It just makes it work.
203 	 */
204 	if(spd.mem_type == SPD_MEMTYPE_DDR2) {
205 		immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE;
206 	}
207 	udelay(50000);
208 #endif
209 
210 	/*
211 	 * ODT configuration recommendation from DDR Controller Chapter.
212 	 */
213 	odt_rd_cfg = 0;			/* Never assert ODT */
214 	odt_wr_cfg = 0;			/* Never assert ODT */
215 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
216 		odt_wr_cfg = 1;		/* Assert ODT on writes to CSn */
217 	}
218 
219 	/* Setup DDR chip select register */
220 #ifdef CONFIG_SYS_83XX_DDR_USES_CS0
221 	ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
222 	ddr->cs_config[0] = ( 1 << 31
223 			    | (odt_rd_cfg << 20)
224 			    | (odt_wr_cfg << 16)
225 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
226 			    | ((spd.nrow_addr - 12) << 8)
227 			    | (spd.ncol_addr - 8) );
228 	debug("\n");
229 	debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
230 	debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
231 
232 	if (n_ranks == 2) {
233 		ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
234 				  | ((banksize(spd.row_dens) >> 23) - 1) );
235 		ddr->cs_config[1] = ( 1<<31
236 				    | (odt_rd_cfg << 20)
237 				    | (odt_wr_cfg << 16)
238 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
239 				    | ((spd.nrow_addr - 12) << 8)
240 				    | (spd.ncol_addr - 8) );
241 		debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
242 		debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
243 	}
244 
245 #else
246 	ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
247 	ddr->cs_config[2] = ( 1 << 31
248 			    | (odt_rd_cfg << 20)
249 			    | (odt_wr_cfg << 16)
250 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
251 			    | ((spd.nrow_addr - 12) << 8)
252 			    | (spd.ncol_addr - 8) );
253 	debug("\n");
254 	debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
255 	debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
256 
257 	if (n_ranks == 2) {
258 		ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
259 				  | ((banksize(spd.row_dens) >> 23) - 1) );
260 		ddr->cs_config[3] = ( 1<<31
261 				    | (odt_rd_cfg << 20)
262 				    | (odt_wr_cfg << 16)
263 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
264 				    | ((spd.nrow_addr - 12) << 8)
265 				    | (spd.ncol_addr - 8) );
266 		debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
267 		debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
268 	}
269 #endif
270 
271 	/*
272 	 * Figure out memory size in Megabytes.
273 	 */
274 	memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
275 
276 	/*
277 	 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
278 	 */
279 	law_size = 19 + __ilog2(memsize);
280 
281 	/*
282 	 * Set up LAWBAR for all of DDR.
283 	 */
284 	ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000;
285 	ecm->ar  = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
286 	debug("DDR:bar=0x%08x\n", ecm->bar);
287 	debug("DDR:ar=0x%08x\n", ecm->ar);
288 
289 	/*
290 	 * Find the largest CAS by locating the highest 1 bit
291 	 * in the spd.cas_lat field.  Translate it to a DDR
292 	 * controller field value:
293 	 *
294 	 *	CAS Lat	DDR I	DDR II	Ctrl
295 	 *	Clocks	SPD Bit	SPD Bit	Value
296 	 *	-------	-------	-------	-----
297 	 *	1.0	0		0001
298 	 *	1.5	1		0010
299 	 *	2.0	2	2	0011
300 	 *	2.5	3		0100
301 	 *	3.0	4	3	0101
302 	 *	3.5	5		0110
303 	 *	4.0	6	4	0111
304 	 *	4.5			1000
305 	 *	5.0		5	1001
306 	 */
307 	caslat = __ilog2(spd.cas_lat);
308 	if ((spd.mem_type == SPD_MEMTYPE_DDR)
309 	    && (caslat > 6)) {
310 		printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
311 		return 0;
312 	} else if (spd.mem_type == SPD_MEMTYPE_DDR2
313 		   && (caslat < 2 || caslat > 5)) {
314 		printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
315 		       spd.cas_lat);
316 		return 0;
317 	}
318 	debug("DDR: caslat SPD bit is %d\n", caslat);
319 
320 	max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
321 			+ (spd.clk_cycle & 0x0f));
322 	max_data_rate = max_bus_clk * 2;
323 
324 	debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate);
325 
326 	ddrc_clk = gd->mem_clk / 1000000;
327 	effective_data_rate = 0;
328 
329 	if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */
330 		if (spd.cas_lat & 0x08)
331 			caslat = 3;
332 		else
333 			caslat = 4;
334 		if (ddrc_clk <= 460 && ddrc_clk > 350)
335 			effective_data_rate = 400;
336 		else if (ddrc_clk <=350 && ddrc_clk > 280)
337 			effective_data_rate = 333;
338 		else if (ddrc_clk <= 280 && ddrc_clk > 230)
339 			effective_data_rate = 266;
340 		else
341 			effective_data_rate = 200;
342 	} else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
343 		if (ddrc_clk <= 460 && ddrc_clk > 350) {
344 			/* DDR controller clk at 350~460 */
345 			effective_data_rate = 400; /* 5ns */
346 			caslat = caslat;
347 		} else if (ddrc_clk <= 350 && ddrc_clk > 280) {
348 			/* DDR controller clk at 280~350 */
349 			effective_data_rate = 333; /* 6ns */
350 			if (spd.clk_cycle2 == 0x60)
351 				caslat = caslat - 1;
352 			else
353 				caslat = caslat;
354 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
355 			/* DDR controller clk at 230~280 */
356 			effective_data_rate = 266; /* 7.5ns */
357 			if (spd.clk_cycle3 == 0x75)
358 				caslat = caslat - 2;
359 			else if (spd.clk_cycle2 == 0x75)
360 				caslat = caslat - 1;
361 			else
362 				caslat = caslat;
363 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
364 			/* DDR controller clk at 90~230 */
365 			effective_data_rate = 200; /* 10ns */
366 			if (spd.clk_cycle3 == 0xa0)
367 				caslat = caslat - 2;
368 			else if (spd.clk_cycle2 == 0xa0)
369 				caslat = caslat - 1;
370 			else
371 				caslat = caslat;
372 		}
373 	} else if (max_data_rate >= 323) { /* it is DDR 333 */
374 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
375 			/* DDR controller clk at 280~350 */
376 			effective_data_rate = 333; /* 6ns */
377 			caslat = caslat;
378 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
379 			/* DDR controller clk at 230~280 */
380 			effective_data_rate = 266; /* 7.5ns */
381 			if (spd.clk_cycle2 == 0x75)
382 				caslat = caslat - 1;
383 			else
384 				caslat = caslat;
385 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
386 			/* DDR controller clk at 90~230 */
387 			effective_data_rate = 200; /* 10ns */
388 			if (spd.clk_cycle3 == 0xa0)
389 				caslat = caslat - 2;
390 			else if (spd.clk_cycle2 == 0xa0)
391 				caslat = caslat - 1;
392 			else
393 				caslat = caslat;
394 		}
395 	} else if (max_data_rate >= 256) { /* it is DDR 266 */
396 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
397 			/* DDR controller clk at 280~350 */
398 			printf("DDR: DDR controller freq is more than "
399 				"max data rate of the module\n");
400 			return 0;
401 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
402 			/* DDR controller clk at 230~280 */
403 			effective_data_rate = 266; /* 7.5ns */
404 			caslat = caslat;
405 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
406 			/* DDR controller clk at 90~230 */
407 			effective_data_rate = 200; /* 10ns */
408 			if (spd.clk_cycle2 == 0xa0)
409 				caslat = caslat - 1;
410 		}
411 	} else if (max_data_rate >= 190) { /* it is DDR 200 */
412 		if (ddrc_clk <= 350 && ddrc_clk > 230) {
413 			/* DDR controller clk at 230~350 */
414 			printf("DDR: DDR controller freq is more than "
415 				"max data rate of the module\n");
416 			return 0;
417 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
418 			/* DDR controller clk at 90~230 */
419 			effective_data_rate = 200; /* 10ns */
420 			caslat = caslat;
421 		}
422 	}
423 
424 	debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate);
425 	debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
426 
427 	/*
428 	 * Errata DDR6 work around: input enable 2 cycles earlier.
429 	 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
430 	 */
431 	if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
432 		if (caslat == 2)
433 			ddr->debug_reg = 0x201c0000; /* CL=2 */
434 		else if (caslat == 3)
435 			ddr->debug_reg = 0x202c0000; /* CL=2.5 */
436 		else if (caslat == 4)
437 			ddr->debug_reg = 0x202c0000; /* CL=3.0 */
438 
439 		__asm__ __volatile__ ("sync");
440 
441 		debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
442 	}
443 
444 	/*
445 	 * Convert caslat clocks to DDR controller value.
446 	 * Force caslat_ctrl to be DDR Controller field-sized.
447 	 */
448 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
449 		caslat_ctrl = (caslat + 1) & 0x07;
450 	} else {
451 		caslat_ctrl =  (2 * caslat - 1) & 0x0f;
452 	}
453 
454 	debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
455 	debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
456 	      caslat, caslat_ctrl);
457 
458 	/*
459 	 * Timing Config 0.
460 	 * Avoid writing for DDR I.
461 	 */
462 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
463 		unsigned char taxpd_clk = 8;		/* By the book. */
464 		unsigned char tmrd_clk = 2;		/* By the book. */
465 		unsigned char act_pd_exit = 2;		/* Empirical? */
466 		unsigned char pre_pd_exit = 6;		/* Empirical? */
467 
468 		ddr->timing_cfg_0 = (0
469 			| ((act_pd_exit & 0x7) << 20)	/* ACT_PD_EXIT */
470 			| ((pre_pd_exit & 0x7) << 16)	/* PRE_PD_EXIT */
471 			| ((taxpd_clk & 0xf) << 8)	/* ODT_PD_EXIT */
472 			| ((tmrd_clk & 0xf) << 0)	/* MRS_CYC */
473 			);
474 		debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
475 	}
476 
477 	/*
478 	 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
479 	 * use conservative value.
480 	 * For DDR II, they are bytes 36 and 37, in quarter nanos.
481 	 */
482 
483 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
484 		twr_clk = 3;	/* Clocks */
485 		twtr_clk = 1;	/* Clocks */
486 	} else {
487 		twr_clk = picos_to_clk(spd.twr * 250);
488 		twtr_clk = picos_to_clk(spd.twtr * 250);
489 		if (twtr_clk < 2)
490 			twtr_clk = 2;
491 	}
492 
493 	/*
494 	 * Calculate Trfc, in picos.
495 	 * DDR I:  Byte 42 straight up in ns.
496 	 * DDR II: Byte 40 and 42 swizzled some, in ns.
497 	 */
498 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
499 		trfc = spd.trfc * 1000;		/* up to ps */
500 	} else {
501 		unsigned int byte40_table_ps[8] = {
502 			0,
503 			250,
504 			330,
505 			500,
506 			660,
507 			750,
508 			0,
509 			0
510 		};
511 
512 		trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
513 			+ byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
514 	}
515 	trfc_clk = picos_to_clk(trfc);
516 
517 	/*
518 	 * Trcd, Byte 29, from quarter nanos to ps and clocks.
519 	 */
520 	trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
521 
522 	/*
523 	 * Convert trfc_clk to DDR controller fields.  DDR I should
524 	 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
525 	 * 83xx controller has an extended REFREC field of three bits.
526 	 * The controller automatically adds 8 clocks to this value,
527 	 * so preadjust it down 8 first before splitting it up.
528 	 */
529 	trfc_low = (trfc_clk - 8) & 0xf;
530 
531 	ddr->timing_cfg_1 =
532 	    (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) |	/* PRETOACT */
533 	     ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
534 	     (trcd_clk << 20 ) |				/* ACTTORW */
535 	     (caslat_ctrl << 16 ) |				/* CASLAT */
536 	     (trfc_low << 12 ) |				/* REFEC */
537 	     ((twr_clk & 0x07) << 8) |				/* WRRREC */
538 	     ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) |	/* ACTTOACT */
539 	     ((twtr_clk & 0x07) << 0)				/* WRTORD */
540 	    );
541 
542 	/*
543 	 * Additive Latency
544 	 * For DDR I, 0.
545 	 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
546 	 * which comes from Trcd, and also note that:
547 	 *	add_lat + caslat must be >= 4
548 	 */
549 	add_lat = 0;
550 	if (spd.mem_type == SPD_MEMTYPE_DDR2
551 	    && (odt_wr_cfg || odt_rd_cfg)
552 	    && (caslat < 4)) {
553 		add_lat = 4 - caslat;
554 		if ((add_lat + caslat) < 4) {
555 			add_lat = 0;
556 		}
557 	}
558 
559 	/*
560 	 * Write Data Delay
561 	 * Historically 0x2 == 4/8 clock delay.
562 	 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
563 	 */
564 	wr_data_delay = 2;
565 #ifdef CONFIG_SYS_DDR_WRITE_DATA_DELAY
566 	wr_data_delay = CONFIG_SYS_DDR_WRITE_DATA_DELAY;
567 #endif
568 
569 	/*
570 	 * Write Latency
571 	 * Read to Precharge
572 	 * Minimum CKE Pulse Width.
573 	 * Four Activate Window
574 	 */
575 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
576 		/*
577 		 * This is a lie.  It should really be 1, but if it is
578 		 * set to 1, bits overlap into the old controller's
579 		 * otherwise unused ACSM field.  If we leave it 0, then
580 		 * the HW will magically treat it as 1 for DDR 1.  Oh Yea.
581 		 */
582 		wr_lat = 0;
583 
584 		trtp_clk = 2;		/* By the book. */
585 		cke_min_clk = 1;	/* By the book. */
586 		four_act = 1;		/* By the book. */
587 
588 	} else {
589 		wr_lat = caslat - 1;
590 
591 		/* Convert SPD value from quarter nanos to picos. */
592 		trtp_clk = picos_to_clk(spd.trtp * 250);
593 		if (trtp_clk < 2)
594 			trtp_clk = 2;
595 		trtp_clk += add_lat;
596 
597 		cke_min_clk = 3;	/* By the book. */
598 		four_act = picos_to_clk(37500);	/* By the book. 1k pages? */
599 	}
600 
601 	/*
602 	 * Empirically set ~MCAS-to-preamble override for DDR 2.
603 	 * Your mileage will vary.
604 	 */
605 	cpo = 0;
606 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
607 #ifdef CONFIG_SYS_DDR_CPO
608 		cpo = CONFIG_SYS_DDR_CPO;
609 #else
610 		if (effective_data_rate == 266) {
611 			cpo = 0x4;		/* READ_LAT + 1/2 */
612 		} else if (effective_data_rate == 333) {
613 			cpo = 0x6;		/* READ_LAT + 1 */
614 		} else if (effective_data_rate == 400) {
615 			cpo = 0x7;		/* READ_LAT + 5/4 */
616 		} else {
617 			/* Automatic calibration */
618 			cpo = 0x1f;
619 		}
620 #endif
621 	}
622 
623 	ddr->timing_cfg_2 = (0
624 		| ((add_lat & 0x7) << 28)		/* ADD_LAT */
625 		| ((cpo & 0x1f) << 23)			/* CPO */
626 		| ((wr_lat & 0x7) << 19)		/* WR_LAT */
627 		| ((trtp_clk & 0x7) << 13)		/* RD_TO_PRE */
628 		| ((wr_data_delay & 0x7) << 10)		/* WR_DATA_DELAY */
629 		| ((cke_min_clk & 0x7) << 6)		/* CKE_PLS */
630 		| ((four_act & 0x1f) << 0)		/* FOUR_ACT */
631 		);
632 
633 	debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
634 	debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
635 
636 	/* Check DIMM data bus width */
637 	if (spd.dataw_lsb < 64) {
638 		if (spd.mem_type == SPD_MEMTYPE_DDR)
639 			burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
640 		else
641 			burstlen = 0x02; /* 32 bit data bus, burst len is 4 */
642 		debug("\n   DDR DIMM: data bus width is 32 bit");
643 	} else {
644 		burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
645 		debug("\n   DDR DIMM: data bus width is 64 bit");
646 	}
647 
648 	/* Is this an ECC DDR chip? */
649 	if (spd.config == 0x02)
650 		debug(" with ECC\n");
651 	else
652 		debug(" without ECC\n");
653 
654 	/* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
655 	   Burst type is sequential
656 	 */
657 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
658 		switch (caslat) {
659 		case 1:
660 			ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
661 			break;
662 		case 2:
663 			ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
664 			break;
665 		case 3:
666 			ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
667 			break;
668 		case 4:
669 			ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
670 			break;
671 		default:
672 			printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
673 			return 0;
674 		}
675 	} else {
676 		mode_odt_enable = 0x0;                  /* Default disabled */
677 		if (odt_wr_cfg || odt_rd_cfg) {
678 			/*
679 			 * Bits 6 and 2 in Extended MRS(1)
680 			 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
681 			 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
682 			 */
683 			mode_odt_enable = 0x40;         /* 150 Ohm */
684 		}
685 
686 		ddr->sdram_mode =
687 			(0
688 			 | (1 << (16 + 10))             /* DQS Differential disable */
689 #ifdef CONFIG_SYS_DDR_MODE_WEAK
690 			 | (1 << (16 + 1))		/* weak driver (~60%) */
691 #endif
692 			 | (add_lat << (16 + 3))        /* Additive Latency in EMRS1 */
693 			 | (mode_odt_enable << 16)      /* ODT Enable in EMRS1 */
694 			 | ((twr_clk - 1) << 9)         /* Write Recovery Autopre */
695 			 | (caslat << 4)                /* caslat */
696 			 | (burstlen << 0)              /* Burst length */
697 			);
698 	}
699 	debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
700 
701 	/*
702 	 * Clear EMRS2 and EMRS3.
703 	 */
704 	ddr->sdram_mode2 = 0;
705 	debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
706 
707 	switch (spd.refresh) {
708 		case 0x00:
709 		case 0x80:
710 			refresh_clk = picos_to_clk(15625000);
711 			break;
712 		case 0x01:
713 		case 0x81:
714 			refresh_clk = picos_to_clk(3900000);
715 			break;
716 		case 0x02:
717 		case 0x82:
718 			refresh_clk = picos_to_clk(7800000);
719 			break;
720 		case 0x03:
721 		case 0x83:
722 			refresh_clk = picos_to_clk(31300000);
723 			break;
724 		case 0x04:
725 		case 0x84:
726 			refresh_clk = picos_to_clk(62500000);
727 			break;
728 		case 0x05:
729 		case 0x85:
730 			refresh_clk = picos_to_clk(125000000);
731 			break;
732 		default:
733 			refresh_clk = 0x512;
734 			break;
735 	}
736 
737 	/*
738 	 * Set BSTOPRE to 0x100 for page mode
739 	 * If auto-charge is used, set BSTOPRE = 0
740 	 */
741 	ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
742 	debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
743 
744 	/*
745 	 * SDRAM Cfg 2
746 	 */
747 	odt_cfg = 0;
748 #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU
749 	if (odt_rd_cfg | odt_wr_cfg) {
750 		odt_cfg = 0x2;		/* ODT to IOs during reads */
751 	}
752 #endif
753 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
754 		ddr->sdram_cfg2 = (0
755 			    | (0 << 26)	/* True DQS */
756 			    | (odt_cfg << 21)	/* ODT only read */
757 			    | (1 << 12)	/* 1 refresh at a time */
758 			    );
759 
760 		debug("DDR: sdram_cfg2  = 0x%08x\n", ddr->sdram_cfg2);
761 	}
762 
763 #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL	/* Optional platform specific value */
764 	ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL;
765 #endif
766 	debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
767 
768 	asm("sync;isync");
769 
770 	udelay(600);
771 
772 	/*
773 	 * Figure out the settings for the sdram_cfg register. Build up
774 	 * the value in 'sdram_cfg' before writing since the write into
775 	 * the register will actually enable the memory controller, and all
776 	 * settings must be done before enabling.
777 	 *
778 	 * sdram_cfg[0]   = 1 (ddr sdram logic enable)
779 	 * sdram_cfg[1]   = 1 (self-refresh-enable)
780 	 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
781 	 *			010 DDR 1 SDRAM
782 	 *			011 DDR 2 SDRAM
783 	 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
784 	 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
785 	 */
786 	if (spd.mem_type == SPD_MEMTYPE_DDR)
787 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
788 	else
789 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2;
790 
791 	sdram_cfg = (0
792 		     | SDRAM_CFG_MEM_EN		/* DDR enable */
793 		     | SDRAM_CFG_SREN		/* Self refresh */
794 		     | sdram_type		/* SDRAM type */
795 		     );
796 
797 	/* sdram_cfg[3] = RD_EN - registered DIMM enable */
798 	if (spd.mod_attr & 0x02)
799 		sdram_cfg |= SDRAM_CFG_RD_EN;
800 
801 	/* The DIMM is 32bit width */
802 	if (spd.dataw_lsb < 64) {
803 		if (spd.mem_type == SPD_MEMTYPE_DDR)
804 			sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE;
805 		if (spd.mem_type == SPD_MEMTYPE_DDR2)
806 			sdram_cfg |= SDRAM_CFG_32_BE;
807 	}
808 
809 	ddrc_ecc_enable = 0;
810 
811 #if defined(CONFIG_DDR_ECC)
812 	/* Enable ECC with sdram_cfg[2] */
813 	if (spd.config == 0x02) {
814 		sdram_cfg |= 0x20000000;
815 		ddrc_ecc_enable = 1;
816 		/* disable error detection */
817 		ddr->err_disable = ~ECC_ERROR_ENABLE;
818 		/* set single bit error threshold to maximum value,
819 		 * reset counter to zero */
820 		ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
821 				(0 << ECC_ERROR_MAN_SBEC_SHIFT);
822 	}
823 
824 	debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
825 	debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
826 #endif
827 	debug("   DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
828 
829 #if defined(CONFIG_DDR_2T_TIMING)
830 	/*
831 	 * Enable 2T timing by setting sdram_cfg[16].
832 	 */
833 	sdram_cfg |= SDRAM_CFG_2T_EN;
834 #endif
835 	/* Enable controller, and GO! */
836 	ddr->sdram_cfg = sdram_cfg;
837 	asm("sync;isync");
838 	udelay(500);
839 
840 	debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
841 	return memsize; /*in MBytes*/
842 }
843 #endif /* CONFIG_SPD_EEPROM */
844 
845 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
846 /*
847  * Use timebase counter, get_timer() is not available
848  * at this point of initialization yet.
849  */
get_tbms(void)850 static __inline__ unsigned long get_tbms (void)
851 {
852 	unsigned long tbl;
853 	unsigned long tbu1, tbu2;
854 	unsigned long ms;
855 	unsigned long long tmp;
856 
857 	ulong tbclk = get_tbclk();
858 
859 	/* get the timebase ticks */
860 	do {
861 		asm volatile ("mftbu %0":"=r" (tbu1):);
862 		asm volatile ("mftb %0":"=r" (tbl):);
863 		asm volatile ("mftbu %0":"=r" (tbu2):);
864 	} while (tbu1 != tbu2);
865 
866 	/* convert ticks to ms */
867 	tmp = (unsigned long long)(tbu1);
868 	tmp = (tmp << 32);
869 	tmp += (unsigned long long)(tbl);
870 	ms = tmp/(tbclk/1000);
871 
872 	return ms;
873 }
874 
875 /*
876  * Initialize all of memory for ECC, then enable errors.
877  */
ddr_enable_ecc(unsigned int dram_size)878 void ddr_enable_ecc(unsigned int dram_size)
879 {
880 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
881 	volatile ddr83xx_t *ddr= &immap->ddr;
882 	unsigned long t_start, t_end;
883 	register u64 *p;
884 	register uint size;
885 	unsigned int pattern[2];
886 
887 	icache_enable();
888 	t_start = get_tbms();
889 	pattern[0] = 0xdeadbeef;
890 	pattern[1] = 0xdeadbeef;
891 
892 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
893 	dma_meminit(pattern[0], dram_size);
894 #else
895 	debug("ddr init: CPU FP write method\n");
896 	size = dram_size;
897 	for (p = 0; p < (u64*)(size); p++) {
898 		ppcDWstore((u32*)p, pattern);
899 	}
900 	__asm__ __volatile__ ("sync");
901 #endif
902 
903 	t_end = get_tbms();
904 	icache_disable();
905 
906 	debug("\nREADY!!\n");
907 	debug("ddr init duration: %ld ms\n", t_end - t_start);
908 
909 	/* Clear All ECC Errors */
910 	if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
911 		ddr->err_detect |= ECC_ERROR_DETECT_MME;
912 	if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
913 		ddr->err_detect |= ECC_ERROR_DETECT_MBE;
914 	if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
915 		ddr->err_detect |= ECC_ERROR_DETECT_SBE;
916 	if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
917 		ddr->err_detect |= ECC_ERROR_DETECT_MSE;
918 
919 	/* Disable ECC-Interrupts */
920 	ddr->err_int_en &= ECC_ERR_INT_DISABLE;
921 
922 	/* Enable errors for ECC */
923 	ddr->err_disable &= ECC_ERROR_ENABLE;
924 
925 	__asm__ __volatile__ ("sync");
926 	__asm__ __volatile__ ("isync");
927 }
928 #endif	/* CONFIG_DDR_ECC */
929 
930 #endif /* !CONFIG_MPC83XX_SDRAM */
931