1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * sunxi DRAM controller initialization
4  * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
5  * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6  *
7  * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
8  * and earlier U-Boot Allwinner A10 SPL work
9  *
10  * (C) Copyright 2007-2012
11  * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
12  * Berg Xing <bergxing@allwinnertech.com>
13  * Tom Cubie <tangliang@allwinnertech.com>
14  */
15 
16 /*
17  * Unfortunately the only documentation we have on the sun7i DRAM
18  * controller is Allwinner boot0 + boot1 code, and that code uses
19  * magic numbers & shifts with no explanations. Hence this code is
20  * rather undocumented and full of magic.
21  */
22 
23 #include <common.h>
24 #include <init.h>
25 #include <asm/io.h>
26 #include <asm/arch/clock.h>
27 #include <asm/arch/dram.h>
28 #include <asm/arch/timer.h>
29 #include <asm/arch/sys_proto.h>
30 #include <linux/delay.h>
31 
32 #define CPU_CFG_CHIP_VER(n) ((n) << 6)
33 #define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
34 #define CPU_CFG_CHIP_REV_A 0x0
35 #define CPU_CFG_CHIP_REV_C1 0x1
36 #define CPU_CFG_CHIP_REV_C2 0x2
37 #define CPU_CFG_CHIP_REV_B 0x3
38 
39 /*
40  * Wait up to 1s for mask to be clear in given reg.
41  */
await_bits_clear(u32 * reg,u32 mask)42 static inline void await_bits_clear(u32 *reg, u32 mask)
43 {
44 	mctl_await_completion(reg, mask, 0);
45 }
46 
47 /*
48  * Wait up to 1s for mask to be set in given reg.
49  */
await_bits_set(u32 * reg,u32 mask)50 static inline void await_bits_set(u32 *reg, u32 mask)
51 {
52 	mctl_await_completion(reg, mask, mask);
53 }
54 
55 /*
56  * This performs the external DRAM reset by driving the RESET pin low and
57  * then high again. According to the DDR3 spec, the RESET pin needs to be
58  * kept low for at least 200 us.
59  */
mctl_ddr3_reset(void)60 static void mctl_ddr3_reset(void)
61 {
62 	struct sunxi_dram_reg *dram =
63 			(struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
64 
65 #ifdef CONFIG_MACH_SUN4I
66 	struct sunxi_timer_reg *timer =
67 			(struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
68 	u32 reg_val;
69 
70 	writel(0, &timer->cpu_cfg);
71 	reg_val = readl(&timer->cpu_cfg);
72 
73 	if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
74 	    CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
75 		setbits_le32(&dram->mcr, DRAM_MCR_RESET);
76 		udelay(200);
77 		clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
78 	} else
79 #endif
80 	{
81 		clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
82 		udelay(200);
83 		setbits_le32(&dram->mcr, DRAM_MCR_RESET);
84 	}
85 	/* After the RESET pin is de-asserted, the DDR3 spec requires to wait
86 	 * for additional 500 us before driving the CKE pin (Clock Enable)
87 	 * high. The duration of this delay can be configured in the SDR_IDCR
88 	 * (Initialization Delay Configuration Register) and applied
89 	 * automatically by the DRAM controller during the DDR3 initialization
90 	 * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
91 	 * can't provide sufficient delay at DRAM clock frequencies higher than
92 	 * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
93 	 * 533 MHz according to the datasheet). Additionally, there is no
94 	 * official documentation for the SDR_IDCR register anywhere, and
95 	 * there is always a chance that we are interpreting it wrong.
96 	 * Better be safe than sorry, so add an explicit delay here. */
97 	udelay(500);
98 }
99 
mctl_set_drive(void)100 static void mctl_set_drive(void)
101 {
102 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
103 
104 #ifdef CONFIG_MACH_SUN7I
105 	clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
106 #else
107 	clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
108 #endif
109 			DRAM_MCR_MODE_EN(0x3) |
110 			0xffc);
111 }
112 
mctl_itm_disable(void)113 static void mctl_itm_disable(void)
114 {
115 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
116 
117 	clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
118 }
119 
mctl_itm_enable(void)120 static void mctl_itm_enable(void)
121 {
122 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
123 
124 	clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
125 }
126 
mctl_itm_reset(void)127 static void mctl_itm_reset(void)
128 {
129 	mctl_itm_disable();
130 	udelay(1); /* ITM reset needs a bit of delay */
131 	mctl_itm_enable();
132 	udelay(1);
133 }
134 
mctl_enable_dll0(u32 phase)135 static void mctl_enable_dll0(u32 phase)
136 {
137 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
138 
139 	clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
140 			((phase >> 16) & 0x3f) << 6);
141 	clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
142 	udelay(2);
143 
144 	clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
145 	udelay(22);
146 
147 	clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
148 	udelay(22);
149 }
150 
151 /* Get the number of DDR byte lanes */
mctl_get_number_of_lanes(void)152 static u32 mctl_get_number_of_lanes(void)
153 {
154 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
155 	if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
156 				DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
157 		return 4;
158 	else
159 		return 2;
160 }
161 
162 /*
163  * Note: This differs from pm/standby in that it checks the bus width
164  */
mctl_enable_dllx(u32 phase)165 static void mctl_enable_dllx(u32 phase)
166 {
167 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
168 	u32 i, number_of_lanes;
169 
170 	number_of_lanes = mctl_get_number_of_lanes();
171 
172 	for (i = 1; i <= number_of_lanes; i++) {
173 		clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
174 				(phase & 0xf) << 14);
175 		clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
176 				DRAM_DLLCR_DISABLE);
177 		phase >>= 4;
178 	}
179 	udelay(2);
180 
181 	for (i = 1; i <= number_of_lanes; i++)
182 		clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
183 			     DRAM_DLLCR_DISABLE);
184 	udelay(22);
185 
186 	for (i = 1; i <= number_of_lanes; i++)
187 		clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
188 				DRAM_DLLCR_NRESET);
189 	udelay(22);
190 }
191 
192 static u32 hpcr_value[32] = {
193 #ifdef CONFIG_MACH_SUN5I
194 	0, 0, 0, 0,
195 	0, 0, 0, 0,
196 	0, 0, 0, 0,
197 	0, 0, 0, 0,
198 	0x1031, 0x1031, 0x0735, 0x1035,
199 	0x1035, 0x0731, 0x1031, 0,
200 	0x0301, 0x0301, 0x0301, 0x0301,
201 	0x0301, 0x0301, 0x0301, 0
202 #endif
203 #ifdef CONFIG_MACH_SUN4I
204 	0x0301, 0x0301, 0x0301, 0x0301,
205 	0x0301, 0x0301, 0, 0,
206 	0, 0, 0, 0,
207 	0, 0, 0, 0,
208 	0x1031, 0x1031, 0x0735, 0x5031,
209 	0x1035, 0x0731, 0x1031, 0x0735,
210 	0x1035, 0x1031, 0x0731, 0x1035,
211 	0x1031, 0x0301, 0x0301, 0x0731
212 #endif
213 #ifdef CONFIG_MACH_SUN7I
214 	0x0301, 0x0301, 0x0301, 0x0301,
215 	0x0301, 0x0301, 0x0301, 0x0301,
216 	0, 0, 0, 0,
217 	0, 0, 0, 0,
218 	0x1031, 0x1031, 0x0735, 0x1035,
219 	0x1035, 0x0731, 0x1031, 0x0735,
220 	0x1035, 0x1031, 0x0731, 0x1035,
221 	0x0001, 0x1031, 0, 0x1031
222 	/* last row differs from boot0 source table
223 	 * 0x1031, 0x0301, 0x0301, 0x0731
224 	 * but boot0 code skips #28 and #30, and sets #29 and #31 to the
225 	 * value from #28 entry (0x1031)
226 	 */
227 #endif
228 };
229 
mctl_configure_hostport(void)230 static void mctl_configure_hostport(void)
231 {
232 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
233 	u32 i;
234 
235 	for (i = 0; i < 32; i++)
236 		writel(hpcr_value[i], &dram->hpcr[i]);
237 }
238 
mctl_setup_dram_clock(u32 clk,u32 mbus_clk)239 static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
240 {
241 	u32 reg_val;
242 	struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
243 	u32 pll5p_clk, pll6x_clk;
244 	u32 pll5p_div, pll6x_div;
245 	u32 pll5p_rate, pll6x_rate;
246 
247 	/* setup DRAM PLL */
248 	reg_val = readl(&ccm->pll5_cfg);
249 	reg_val &= ~CCM_PLL5_CTRL_M_MASK;		/* set M to 0 (x1) */
250 	reg_val &= ~CCM_PLL5_CTRL_K_MASK;		/* set K to 0 (x1) */
251 	reg_val &= ~CCM_PLL5_CTRL_N_MASK;		/* set N to 0 (x0) */
252 	reg_val &= ~CCM_PLL5_CTRL_P_MASK;		/* set P to 0 (x1) */
253 #ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
254 	/* Old kernels are hardcoded to P=1 (divide by 2) */
255 	reg_val |= CCM_PLL5_CTRL_P(1);
256 #endif
257 	if (clk >= 540 && clk < 552) {
258 		/* dram = 540MHz */
259 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
260 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
261 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
262 	} else if (clk >= 512 && clk < 528) {
263 		/* dram = 512MHz */
264 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
265 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
266 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
267 	} else if (clk >= 496 && clk < 504) {
268 		/* dram = 496MHz */
269 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
270 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
271 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
272 	} else if (clk >= 468 && clk < 480) {
273 		/* dram = 468MHz */
274 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
275 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
276 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
277 	} else if (clk >= 396 && clk < 408) {
278 		/* dram = 396MHz */
279 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
280 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
281 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
282 	} else 	{
283 		/* any other frequency that is a multiple of 24 */
284 		reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
285 		reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
286 		reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
287 	}
288 	reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN;		/* PLL VCO Gain off */
289 	reg_val |= CCM_PLL5_CTRL_EN;			/* PLL On */
290 	writel(reg_val, &ccm->pll5_cfg);
291 	udelay(5500);
292 
293 	setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
294 
295 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
296 	/* reset GPS */
297 	clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
298 	setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
299 	udelay(1);
300 	clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
301 #endif
302 
303 	/* setup MBUS clock */
304 	if (!mbus_clk)
305 		mbus_clk = 300;
306 
307 	/* PLL5P and PLL6 are the potential clock sources for MBUS */
308 	pll6x_clk = clock_get_pll6() / 1000000;
309 #ifdef CONFIG_MACH_SUN7I
310 	pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
311 #endif
312 	pll5p_clk = clock_get_pll5p() / 1000000;
313 	pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
314 	pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
315 	pll6x_rate = pll6x_clk / pll6x_div;
316 	pll5p_rate = pll5p_clk / pll5p_div;
317 
318 	if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
319 		/* use PLL6 as the MBUS clock source */
320 		reg_val = CCM_MBUS_CTRL_GATE |
321 			  CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
322 			  CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
323 			  CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
324 	} else if (pll5p_div <= 16) {
325 		/* use PLL5P as the MBUS clock source */
326 		reg_val = CCM_MBUS_CTRL_GATE |
327 			  CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
328 			  CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
329 			  CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
330 	} else {
331 		panic("Bad mbus_clk\n");
332 	}
333 	writel(reg_val, &ccm->mbus_clk_cfg);
334 
335 	/*
336 	 * open DRAMC AHB & DLL register clock
337 	 * close it first
338 	 */
339 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
340 	clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
341 #else
342 	clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
343 #endif
344 	udelay(22);
345 
346 	/* then open it */
347 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
348 	setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
349 #else
350 	setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
351 #endif
352 	udelay(22);
353 }
354 
355 /*
356  * The data from rslrX and rdgrX registers (X=rank) is stored
357  * in a single 32-bit value using the following format:
358  *   bits [31:26] - DQS gating system latency for byte lane 3
359  *   bits [25:24] - DQS gating phase select for byte lane 3
360  *   bits [23:18] - DQS gating system latency for byte lane 2
361  *   bits [17:16] - DQS gating phase select for byte lane 2
362  *   bits [15:10] - DQS gating system latency for byte lane 1
363  *   bits [ 9:8 ] - DQS gating phase select for byte lane 1
364  *   bits [ 7:2 ] - DQS gating system latency for byte lane 0
365  *   bits [ 1:0 ] - DQS gating phase select for byte lane 0
366  */
mctl_set_dqs_gating_delay(int rank,u32 dqs_gating_delay)367 static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
368 {
369 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
370 	u32 lane, number_of_lanes = mctl_get_number_of_lanes();
371 	/* rank0 gating system latency (3 bits per lane: cycles) */
372 	u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
373 	/* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
374 	u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
375 	for (lane = 0; lane < number_of_lanes; lane++) {
376 		u32 tmp = dqs_gating_delay >> (lane * 8);
377 		slr &= ~(7 << (lane * 3));
378 		slr |= ((tmp >> 2) & 7) << (lane * 3);
379 		dgr &= ~(3 << (lane * 2));
380 		dgr |= (tmp & 3) << (lane * 2);
381 	}
382 	writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
383 	writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
384 }
385 
dramc_scan_readpipe(void)386 static int dramc_scan_readpipe(void)
387 {
388 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
389 	u32 reg_val;
390 
391 	/* data training trigger */
392 	clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
393 	setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
394 
395 	/* check whether data training process has completed */
396 	await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
397 
398 	/* check data training result */
399 	reg_val = readl(&dram->csr);
400 	if (reg_val & DRAM_CSR_FAILED)
401 		return -1;
402 
403 	return 0;
404 }
405 
dramc_clock_output_en(u32 on)406 static void dramc_clock_output_en(u32 on)
407 {
408 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
409 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
410 
411 	if (on)
412 		setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
413 	else
414 		clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
415 #endif
416 #ifdef CONFIG_MACH_SUN4I
417 	struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
418 	if (on)
419 		setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
420 	else
421 		clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
422 #endif
423 }
424 
425 /* tRFC in nanoseconds for different densities (from the DDR3 spec) */
426 static const u16 tRFC_DDR3_table[6] = {
427 	/* 256Mb    512Mb    1Gb      2Gb      4Gb      8Gb */
428 	   90,      90,      110,     160,     300,     350
429 };
430 
dramc_set_autorefresh_cycle(u32 clk,u32 density)431 static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
432 {
433 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
434 	u32 tRFC, tREFI;
435 
436 	tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
437 	tREFI = (7987 * clk) >> 10;	/* <= 7.8us */
438 
439 	writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
440 }
441 
442 /* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
ddr3_write_recovery(u32 clk)443 static u32 ddr3_write_recovery(u32 clk)
444 {
445 	u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
446 	u32 twr_ck = (twr_ns * clk + 999) / 1000;
447 	if (twr_ck < 5)
448 		return 1;
449 	else if (twr_ck <= 8)
450 		return twr_ck - 4;
451 	else if (twr_ck <= 10)
452 		return 5;
453 	else
454 		return 6;
455 }
456 
457 /*
458  * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
459  * means that DRAM is currently in self-refresh mode and retaining the old
460  * data. Since we have no idea what to do in this situation yet, just set this
461  * register to 0 and initialize DRAM in the same way as on any normal reboot
462  * (discarding whatever was stored there).
463  *
464  * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
465  * value for this write operation to have any effect. On sun5i hadware this
466  * magic value is not necessary. And on sun4i hardware the writes to this
467  * register seem to have no effect at all.
468  */
mctl_disable_power_save(void)469 static void mctl_disable_power_save(void)
470 {
471 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
472 	writel(0x16510000, &dram->ppwrsctl);
473 }
474 
475 /*
476  * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
477  * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
478  * (SDR_IDCR) register appears to configure this delay, which gets applied
479  * right at the time when the DRAM initialization is activated in the
480  * 'mctl_ddr3_initialize' function.
481  */
mctl_set_cke_delay(void)482 static void mctl_set_cke_delay(void)
483 {
484 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
485 
486 	/* The CKE delay is represented in DRAM clock cycles, multiplied by N
487 	 * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
488 	 * the maximum possible value 0x1ffff, just like in the Allwinner's
489 	 * boot0 bootloader. The resulting delay value is somewhere between
490 	 * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
491 	 * with 360 MHz DRAM clock speed). */
492 	setbits_le32(&dram->idcr, 0x1ffff);
493 }
494 
495 /*
496  * This triggers the DRAM initialization. It performs sending the mode registers
497  * to the DRAM among other things. Very likely the ZQCL command is also getting
498  * executed (to do the initial impedance calibration on the DRAM side of the
499  * wire). The memory controller and the PHY must be already configured before
500  * calling this function.
501  */
mctl_ddr3_initialize(void)502 static void mctl_ddr3_initialize(void)
503 {
504 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
505 	setbits_le32(&dram->ccr, DRAM_CCR_INIT);
506 	await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
507 }
508 
509 /*
510  * Perform impedance calibration on the DRAM controller side of the wire.
511  */
mctl_set_impedance(u32 zq,bool odt_en)512 static void mctl_set_impedance(u32 zq, bool odt_en)
513 {
514 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
515 	u32 reg_val;
516 	u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
517 
518 #ifndef CONFIG_MACH_SUN7I
519 	/* Appears that some kind of automatically initiated default
520 	 * ZQ calibration is already in progress at this point on sun4i/sun5i
521 	 * hardware, but not on sun7i. So it is reasonable to wait for its
522 	 * completion before doing anything else. */
523 	await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
524 #endif
525 
526 	/* ZQ calibration is not really useful unless ODT is enabled */
527 	if (!odt_en)
528 		return;
529 
530 #ifdef CONFIG_MACH_SUN7I
531 	/* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
532 	 * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
533 	 * SDR_ZQCR1 register, but there are hints indicating that it might
534 	 * be related to periodic impedance re-calibration. This particular
535 	 * magic value is borrowed from the Allwinner boot0 bootloader, and
536 	 * using it helps to avoid troubles */
537 	writel((1 << 24) | (1 << 1), &dram->zqcr1);
538 #endif
539 
540 	/* Needed at least for sun5i, because it does not self clear there */
541 	clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
542 
543 	if (zdata) {
544 		/* Set the user supplied impedance data */
545 		reg_val = DRAM_ZQCR0_ZDEN | zdata;
546 		writel(reg_val, &dram->zqcr0);
547 		/* no need to wait, this takes effect immediately */
548 	} else {
549 		/* Do the calibration using the external resistor */
550 		reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
551 		writel(reg_val, &dram->zqcr0);
552 		/* Wait for the new impedance configuration to settle */
553 		await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
554 	}
555 
556 	/* Needed at least for sun5i, because it does not self clear there */
557 	clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
558 
559 	/* Set I/O configure register */
560 	writel(DRAM_IOCR_ODT_EN, &dram->iocr);
561 }
562 
dramc_init_helper(struct dram_para * para)563 static unsigned long dramc_init_helper(struct dram_para *para)
564 {
565 	struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
566 	u32 reg_val;
567 	u32 density;
568 	int ret_val;
569 
570 	/*
571 	 * only single rank DDR3 is supported by this code even though the
572 	 * hardware can theoretically support DDR2 and up to two ranks
573 	 */
574 	if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
575 		return 0;
576 
577 	/* setup DRAM relative clock */
578 	mctl_setup_dram_clock(para->clock, para->mbus_clock);
579 
580 	/* Disable any pad power save control */
581 	mctl_disable_power_save();
582 
583 	mctl_set_drive();
584 
585 	/* dram clock off */
586 	dramc_clock_output_en(0);
587 
588 #ifdef CONFIG_MACH_SUN4I
589 	/* select dram controller 1 */
590 	writel(DRAM_CSEL_MAGIC, &dram->csel);
591 #endif
592 
593 	mctl_itm_disable();
594 	mctl_enable_dll0(para->tpr3);
595 
596 	/* configure external DRAM */
597 	reg_val = DRAM_DCR_TYPE_DDR3;
598 	reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
599 
600 	if (para->density == 256)
601 		density = DRAM_DCR_CHIP_DENSITY_256M;
602 	else if (para->density == 512)
603 		density = DRAM_DCR_CHIP_DENSITY_512M;
604 	else if (para->density == 1024)
605 		density = DRAM_DCR_CHIP_DENSITY_1024M;
606 	else if (para->density == 2048)
607 		density = DRAM_DCR_CHIP_DENSITY_2048M;
608 	else if (para->density == 4096)
609 		density = DRAM_DCR_CHIP_DENSITY_4096M;
610 	else if (para->density == 8192)
611 		density = DRAM_DCR_CHIP_DENSITY_8192M;
612 	else
613 		density = DRAM_DCR_CHIP_DENSITY_256M;
614 
615 	reg_val |= DRAM_DCR_CHIP_DENSITY(density);
616 	reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
617 	reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
618 	reg_val |= DRAM_DCR_CMD_RANK_ALL;
619 	reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
620 	writel(reg_val, &dram->dcr);
621 
622 	dramc_clock_output_en(1);
623 
624 	mctl_set_impedance(para->zq, para->odt_en);
625 
626 	mctl_set_cke_delay();
627 
628 	mctl_ddr3_reset();
629 
630 	udelay(1);
631 
632 	await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
633 
634 	mctl_enable_dllx(para->tpr3);
635 
636 	/* set refresh period */
637 	dramc_set_autorefresh_cycle(para->clock, density);
638 
639 	/* set timing parameters */
640 	writel(para->tpr0, &dram->tpr0);
641 	writel(para->tpr1, &dram->tpr1);
642 	writel(para->tpr2, &dram->tpr2);
643 
644 	reg_val = DRAM_MR_BURST_LENGTH(0x0);
645 #if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
646 	reg_val |= DRAM_MR_POWER_DOWN;
647 #endif
648 	reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
649 	reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
650 	writel(reg_val, &dram->mr);
651 
652 	writel(para->emr1, &dram->emr);
653 	writel(para->emr2, &dram->emr2);
654 	writel(para->emr3, &dram->emr3);
655 
656 	/* disable drift compensation and set passive DQS window mode */
657 	clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
658 
659 #ifdef CONFIG_MACH_SUN7I
660 	/* Command rate timing mode 2T & 1T */
661 	if (para->tpr4 & 0x1)
662 		setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
663 #endif
664 	/* initialize external DRAM */
665 	mctl_ddr3_initialize();
666 
667 	/* scan read pipe value */
668 	mctl_itm_enable();
669 
670 	/* Hardware DQS gate training */
671 	ret_val = dramc_scan_readpipe();
672 
673 	if (ret_val < 0)
674 		return 0;
675 
676 	/* allow to override the DQS training results with a custom delay */
677 	if (para->dqs_gating_delay)
678 		mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
679 
680 	/* set the DQS gating window type */
681 	if (para->active_windowing)
682 		clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
683 	else
684 		setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
685 
686 	mctl_itm_reset();
687 
688 	/* configure all host port */
689 	mctl_configure_hostport();
690 
691 	return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
692 }
693 
dramc_init(struct dram_para * para)694 unsigned long dramc_init(struct dram_para *para)
695 {
696 	unsigned long dram_size, actual_density;
697 
698 	/* If the dram configuration is not provided, use a default */
699 	if (!para)
700 		return 0;
701 
702 	/* if everything is known, then autodetection is not necessary */
703 	if (para->io_width && para->bus_width && para->density)
704 		return dramc_init_helper(para);
705 
706 	/* try to autodetect the DRAM bus width and density */
707 	para->io_width  = 16;
708 	para->bus_width = 32;
709 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
710 	/* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
711 	para->density = 4096;
712 #else
713 	/* all A0-A15 address lines on A20, which allow density 8192 */
714 	para->density = 8192;
715 #endif
716 
717 	dram_size = dramc_init_helper(para);
718 	if (!dram_size) {
719 		/* if 32-bit bus width failed, try 16-bit bus width instead */
720 		para->bus_width = 16;
721 		dram_size = dramc_init_helper(para);
722 		if (!dram_size) {
723 			/* if 16-bit bus width also failed, then bail out */
724 			return dram_size;
725 		}
726 	}
727 
728 	/* check if we need to adjust the density */
729 	actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
730 
731 	if (actual_density != para->density) {
732 		/* update the density and re-initialize DRAM again */
733 		para->density = actual_density;
734 		dram_size = dramc_init_helper(para);
735 	}
736 
737 	return dram_size;
738 }
739