1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <init.h>
10 #include <log.h>
11 #include <ram.h>
12 #include <syscon.h>
13 #include <asm/io.h>
14 #include <asm/arch-rockchip/clock.h>
15 #include <asm/arch-rockchip/cru_px30.h>
16 #include <asm/arch-rockchip/grf_px30.h>
17 #include <asm/arch-rockchip/hardware.h>
18 #include <asm/arch-rockchip/sdram.h>
19 #include <asm/arch-rockchip/sdram_px30.h>
20 #include <linux/delay.h>
21 
22 struct dram_info {
23 #ifdef CONFIG_TPL_BUILD
24 	struct ddr_pctl_regs *pctl;
25 	struct ddr_phy_regs *phy;
26 	struct px30_cru *cru;
27 	struct msch_regs *msch;
28 	struct px30_ddr_grf_regs *ddr_grf;
29 	struct px30_grf *grf;
30 #endif
31 	struct ram_info info;
32 	struct px30_pmugrf *pmugrf;
33 };
34 
35 #ifdef CONFIG_TPL_BUILD
36 
37 u8 ddr_cfg_2_rbc[] = {
38 	/*
39 	 * [6:4] max row: 13+n
40 	 * [3]  bank(0:4bank,1:8bank)
41 	 * [2:0]    col(10+n)
42 	 */
43 	((5 << 4) | (1 << 3) | 0), /* 0 */
44 	((5 << 4) | (1 << 3) | 1), /* 1 */
45 	((4 << 4) | (1 << 3) | 2), /* 2 */
46 	((3 << 4) | (1 << 3) | 3), /* 3 */
47 	((2 << 4) | (1 << 3) | 4), /* 4 */
48 	((5 << 4) | (0 << 3) | 2), /* 5 */
49 	((4 << 4) | (1 << 3) | 2), /* 6 */
50 	/*((0<<3)|3),*/	 /* 12 for ddr4 */
51 	/*((1<<3)|1),*/  /* 13 B,C exchange for rkvdec */
52 };
53 
54 /*
55  * for ddr4 if ddrconfig=7, upctl should set 7 and noc should
56  * set to 1 for more efficient.
57  * noc ddrconf, upctl addrmap
58  * 1  7
59  * 2  8
60  * 3  9
61  * 12 10
62  * 5  11
63  */
64 u8 d4_rbc_2_d3_rbc[] = {
65 	1, /* 7 */
66 	2, /* 8 */
67 	3, /* 9 */
68 	12, /* 10 */
69 	5, /* 11 */
70 };
71 
72 /*
73  * row higher than cs should be disabled by set to 0xf
74  * rank addrmap calculate by real cap.
75  */
76 u32 addrmap[][8] = {
77 	/* map0 map1,   map2,       map3,       map4,      map5
78 	 * map6,        map7,       map8
79 	 * -------------------------------------------------------
80 	 * bk2-0       col 5-2     col 9-6    col 11-10   row 11-0
81 	 * row 15-12   row 17-16   bg1,0
82 	 * -------------------------------------------------------
83 	 * 4,3,2       5-2         9-6                    6
84 	 *                         3,2
85 	 */
86 	{0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
87 		0x05050505, 0x00000505, 0x3f3f}, /* 0 */
88 	{0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
89 		0x06060606, 0x06060606, 0x3f3f}, /* 1 */
90 	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
91 		0x07070707, 0x00000f07, 0x3f3f}, /* 2 */
92 	{0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
93 		0x08080808, 0x00000f0f, 0x3f3f}, /* 3 */
94 	{0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
95 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
96 	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
97 		0x06060606, 0x00000606, 0x3f3f}, /* 5 */
98 	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
99 		0x07070707, 0x00000f0f, 0x3f3f}, /* 6 */
100 	{0x003f0808, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
101 		0x06060606, 0x00000606, 0x0600}, /* 7 */
102 	{0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
103 		0x07070707, 0x00000f07, 0x0700}, /* 8 */
104 	{0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
105 		0x08080808, 0x00000f0f, 0x0801}, /* 9 */
106 	{0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
107 		0x07070707, 0x00000f07, 0x3f01}, /* 10 */
108 	{0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
109 		0x06060606, 0x00000606, 0x3f00}, /* 11 */
110 	/* when ddr4 12 map to 10, when ddr3 12 unused */
111 	{0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
112 		0x07070707, 0x00000f07, 0x3f01}, /* 10 */
113 	{0x00070706, 0x00000000, 0x1f010000, 0x00001f1f, 0x06060606,
114 		0x06060606, 0x00000606, 0x3f3f}, /* 13 */
115 };
116 
117 #define PMUGRF_BASE_ADDR		0xFF010000
118 #define CRU_BASE_ADDR			0xFF2B0000
119 #define GRF_BASE_ADDR			0xFF140000
120 #define DDRC_BASE_ADDR			0xFF600000
121 #define DDR_PHY_BASE_ADDR		0xFF2A0000
122 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
123 #define DDR_GRF_BASE_ADDR		0xff630000
124 
125 struct dram_info dram_info;
126 
127 struct px30_sdram_params sdram_configs[] = {
128 #if defined(CONFIG_RAM_PX30_DDR4)
129 #include	"sdram-px30-ddr4-detect-333.inc"
130 #elif defined(CONFIG_RAM_PX30_LPDDR2)
131 #include	"sdram-px30-lpddr2-detect-333.inc"
132 #elif defined(CONFIG_RAM_PX30_LPDDR3)
133 #include	"sdram-px30-lpddr3-detect-333.inc"
134 #else
135 #include	"sdram-px30-ddr3-detect-333.inc"
136 #endif
137 };
138 
139 struct ddr_phy_skew skew = {
140 #include	"sdram-px30-ddr_skew.inc"
141 };
142 
rkclk_ddr_reset(struct dram_info * dram,u32 ctl_srstn,u32 ctl_psrstn,u32 phy_srstn,u32 phy_psrstn)143 static void rkclk_ddr_reset(struct dram_info *dram,
144 			    u32 ctl_srstn, u32 ctl_psrstn,
145 			    u32 phy_srstn, u32 phy_psrstn)
146 {
147 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
148 	       upctl2_asrstn_req(ctl_srstn),
149 	       &dram->cru->softrst_con[1]);
150 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
151 	       &dram->cru->softrst_con[2]);
152 }
153 
rkclk_set_dpll(struct dram_info * dram,unsigned int hz)154 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
155 {
156 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
157 	int delay = 1000;
158 	u32 mhz = hz / MHz;
159 
160 	refdiv = 1;
161 	if (mhz <= 300) {
162 		postdiv1 = 4;
163 		postdiv2 = 2;
164 	} else if (mhz <= 400) {
165 		postdiv1 = 6;
166 		postdiv2 = 1;
167 	} else if (mhz <= 600) {
168 		postdiv1 = 4;
169 		postdiv2 = 1;
170 	} else if (mhz <= 800) {
171 		postdiv1 = 3;
172 		postdiv2 = 1;
173 	} else if (mhz <= 1600) {
174 		postdiv1 = 2;
175 		postdiv2 = 1;
176 	} else {
177 		postdiv1 = 1;
178 		postdiv2 = 1;
179 	}
180 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
181 
182 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
183 
184 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
185 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
186 	       &dram->cru->pll[1].con1);
187 
188 	while (delay > 0) {
189 		udelay(1);
190 		if (LOCK(readl(&dram->cru->pll[1].con1)))
191 			break;
192 		delay--;
193 	}
194 
195 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
196 }
197 
rkclk_configure_ddr(struct dram_info * dram,struct px30_sdram_params * sdram_params)198 static void rkclk_configure_ddr(struct dram_info *dram,
199 				struct px30_sdram_params *sdram_params)
200 {
201 	/* for inno ddr phy need 2*freq */
202 	rkclk_set_dpll(dram,  sdram_params->base.ddr_freq * MHz * 2);
203 }
204 
205 /* return ddrconfig value
206  *       (-1), find ddrconfig fail
207  *       other, the ddrconfig value
208  * only support cs0_row >= cs1_row
209  */
calculate_ddrconfig(struct px30_sdram_params * sdram_params)210 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
211 {
212 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
213 	u32 bw, die_bw, col, bank;
214 	u32 i, tmp;
215 	u32 ddrconf = -1;
216 
217 	bw = cap_info->bw;
218 	die_bw = cap_info->dbw;
219 	col = cap_info->col;
220 	bank = cap_info->bk;
221 
222 	if (sdram_params->base.dramtype == DDR4) {
223 		if (die_bw == 0)
224 			ddrconf = 7 + bw;
225 		else
226 			ddrconf = 12 - bw;
227 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
228 	} else {
229 		tmp = ((bank - 2) << 3) | (col + bw - 10);
230 		for (i = 0; i < 7; i++)
231 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
232 				ddrconf = i;
233 				break;
234 			}
235 		if (i > 6)
236 			printascii("calculate ddrconfig error\n");
237 	}
238 
239 	return ddrconf;
240 }
241 
242 /*
243  * calculate controller dram address map, and setting to register.
244  * argument sdram_params->ch.ddrconf must be right value before
245  * call this function.
246  */
set_ctl_address_map(struct dram_info * dram,struct px30_sdram_params * sdram_params)247 static void set_ctl_address_map(struct dram_info *dram,
248 				struct px30_sdram_params *sdram_params)
249 {
250 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
251 	void __iomem *pctl_base = dram->pctl;
252 	u32 cs_pst, bg, max_row, ddrconf;
253 	u32 i;
254 
255 	if (sdram_params->base.dramtype == DDR4)
256 		/*
257 		 * DDR4 8bit dram BG = 2(4bank groups),
258 		 * 16bit dram BG = 1 (2 bank groups)
259 		 */
260 		bg = (cap_info->dbw == 0) ? 2 : 1;
261 	else
262 		bg = 0;
263 
264 	cs_pst = cap_info->bw + cap_info->col +
265 		bg + cap_info->bk + cap_info->cs0_row;
266 	if (cs_pst >= 32 || cap_info->rank == 1)
267 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
268 	else
269 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
270 
271 	ddrconf = cap_info->ddrconfig;
272 	if (sdram_params->base.dramtype == DDR4) {
273 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
274 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
275 				ddrconf = 7 + i;
276 				break;
277 			}
278 		}
279 	}
280 
281 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
282 			  &addrmap[ddrconf][0], 8 * 4);
283 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
284 
285 	if (max_row < 12)
286 		printascii("set addrmap fail\n");
287 	/* need to disable row ahead of rank by set to 0xf */
288 	for (i = 17; i > max_row; i--)
289 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
290 			((i - 12) * 8 / 32) * 4,
291 			0xf << ((i - 12) * 8 % 32),
292 			0xf << ((i - 12) * 8 % 32));
293 
294 	if ((sdram_params->base.dramtype == LPDDR3 ||
295 	     sdram_params->base.dramtype == LPDDR2) &&
296 		 cap_info->row_3_4)
297 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
298 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw != 0x2)
299 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
300 }
301 
302 /*
303  * rank = 1: cs0
304  * rank = 2: cs1
305  */
read_mr(struct dram_info * dram,u32 rank,u32 mr_num)306 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
307 {
308 	void __iomem *ddr_grf_base = dram->ddr_grf;
309 
310 	pctl_read_mr(dram->pctl, rank, mr_num);
311 
312 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
313 }
314 
315 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
316 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
check_rd_gate(struct dram_info * dram)317 static u32 check_rd_gate(struct dram_info *dram)
318 {
319 	void __iomem *phy_base = dram->phy;
320 
321 	u32 max_val = 0;
322 	u32 min_val = 0xff;
323 	u32 gate[4];
324 	u32 i, bw;
325 
326 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
327 	switch (bw) {
328 	case 0x1:
329 		bw = 1;
330 		break;
331 	case 0x3:
332 		bw = 2;
333 		break;
334 	case 0xf:
335 	default:
336 		bw = 4;
337 		break;
338 	}
339 
340 	for (i = 0; i < bw; i++) {
341 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
342 		max_val = MAX(max_val, gate[i]);
343 		min_val = MIN(min_val, gate[i]);
344 	}
345 
346 	if (max_val > 0x80 || min_val < 0x20)
347 		return -1;
348 	else
349 		return 0;
350 }
351 
data_training(struct dram_info * dram,u32 cs,u32 dramtype)352 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
353 {
354 	void __iomem *pctl_base = dram->pctl;
355 	u32 dis_auto_zq = 0;
356 	u32 pwrctl;
357 	u32 ret;
358 
359 	/* disable auto low-power */
360 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
361 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
362 
363 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
364 
365 	ret = phy_data_training(dram->phy, cs, dramtype);
366 
367 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
368 
369 	/* restore auto low-power */
370 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
371 
372 	return ret;
373 }
374 
dram_set_bw(struct dram_info * dram,u32 bw)375 static void dram_set_bw(struct dram_info *dram, u32 bw)
376 {
377 	phy_dram_set_bw(dram->phy, bw);
378 }
379 
set_ddrconfig(struct dram_info * dram,u32 ddrconfig)380 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
381 {
382 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
383 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
384 }
385 
sdram_msch_config(struct msch_regs * msch,struct sdram_msch_timings * noc_timings,struct sdram_cap_info * cap_info,struct sdram_base_params * base)386 static void sdram_msch_config(struct msch_regs *msch,
387 			      struct sdram_msch_timings *noc_timings,
388 			      struct sdram_cap_info *cap_info,
389 			      struct sdram_base_params *base)
390 {
391 	u64 cs_cap[2];
392 
393 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, base->dramtype);
394 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, base->dramtype);
395 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
396 			(((cs_cap[0] >> 20) / 64) & 0xff),
397 			&msch->devicesize);
398 
399 	writel(noc_timings->ddrtiminga0.d32,
400 	       &msch->ddrtiminga0);
401 	writel(noc_timings->ddrtimingb0.d32,
402 	       &msch->ddrtimingb0);
403 	writel(noc_timings->ddrtimingc0.d32,
404 	       &msch->ddrtimingc0);
405 	writel(noc_timings->devtodev0.d32,
406 	       &msch->devtodev0);
407 	writel(noc_timings->ddrmode.d32, &msch->ddrmode);
408 	writel(noc_timings->ddr4timing.d32,
409 	       &msch->ddr4timing);
410 	writel(noc_timings->agingx0, &msch->agingx0);
411 	writel(noc_timings->agingx0, &msch->aging0);
412 	writel(noc_timings->agingx0, &msch->aging1);
413 	writel(noc_timings->agingx0, &msch->aging2);
414 	writel(noc_timings->agingx0, &msch->aging3);
415 }
416 
dram_all_config(struct dram_info * dram,struct px30_sdram_params * sdram_params)417 static void dram_all_config(struct dram_info *dram,
418 			    struct px30_sdram_params *sdram_params)
419 {
420 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
421 	u32 sys_reg2 = 0;
422 	u32 sys_reg3 = 0;
423 
424 	set_ddrconfig(dram, cap_info->ddrconfig);
425 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
426 			 &sys_reg3, 0);
427 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
428 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
429 	sdram_msch_config(dram->msch, &sdram_params->ch.noc_timings, cap_info,
430 			  &sdram_params->base);
431 }
432 
enable_low_power(struct dram_info * dram,struct px30_sdram_params * sdram_params)433 static void enable_low_power(struct dram_info *dram,
434 			     struct px30_sdram_params *sdram_params)
435 {
436 	void __iomem *pctl_base = dram->pctl;
437 	void __iomem *phy_base = dram->phy;
438 	void __iomem *ddr_grf_base = dram->ddr_grf;
439 	u32 grf_lp_con;
440 
441 	/*
442 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
443 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
444 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
445 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
446 	 * bit4: grf_upctl_syscreq_cg_en = 1
447 	 *       ungating coreclk when c_sysreq assert
448 	 * bit8-11: grf_auto_sr_dly = 6
449 	 */
450 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
451 
452 	if (sdram_params->base.dramtype == DDR4)
453 		grf_lp_con = (0x7 << 16) | (1 << 1);
454 	else if (sdram_params->base.dramtype == DDR3)
455 		grf_lp_con = (0x7 << 16) | (1 << 0);
456 	else
457 		grf_lp_con = (0x7 << 16) | (1 << 2);
458 
459 	/* en lpckdis_en */
460 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
461 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
462 
463 	/* off digit module clock when enter power down */
464 	setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
465 
466 	/* enable sr, pd */
467 	if (PD_IDLE == 0)
468 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
469 	else
470 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
471 	if (SR_IDLE == 0)
472 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
473 	else
474 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
475 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
476 }
477 
478 /*
479  * pre_init: 0: pre init for dram cap detect
480  * 1: detect correct cap(except cs1 row)info, than reinit
481  * 2: after reinit, we detect cs1_row, if cs1_row not equal
482  *    to cs0_row and cs is in middle on ddrconf map, we need
483  *    to reinit dram, than set the correct ddrconf.
484  */
sdram_init_(struct dram_info * dram,struct px30_sdram_params * sdram_params,u32 pre_init)485 static int sdram_init_(struct dram_info *dram,
486 		       struct px30_sdram_params *sdram_params, u32 pre_init)
487 {
488 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
489 	void __iomem *pctl_base = dram->pctl;
490 
491 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
492 	udelay(10);
493 	/*
494 	 * dereset ddr phy psrstn to config pll,
495 	 * if using phy pll psrstn must be dereset
496 	 * before config pll
497 	 */
498 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
499 	rkclk_configure_ddr(dram, sdram_params);
500 
501 	/* release phy srst to provide clk to ctrl */
502 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
503 	udelay(10);
504 	phy_soft_reset(dram->phy);
505 	/* release ctrl presetn, and config ctl registers */
506 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
507 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
508 	cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
509 	set_ctl_address_map(dram, sdram_params);
510 	phy_cfg(dram->phy, &sdram_params->phy_regs, sdram_params->skew,
511 		&sdram_params->base, cap_info->bw);
512 
513 	/* enable dfi_init_start to init phy after ctl srstn deassert */
514 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
515 
516 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
517 	/* wait for dfi_init_done and dram init complete */
518 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
519 		continue;
520 
521 	if (sdram_params->base.dramtype == LPDDR3)
522 		pctl_write_mr(dram->pctl, 3, 11, 3, LPDDR3);
523 
524 	/* do ddr gate training */
525 redo_cs0_training:
526 	if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
527 		if (pre_init != 0)
528 			printascii("DTT cs0 error\n");
529 		return -1;
530 	}
531 	if (check_rd_gate(dram)) {
532 		printascii("re training cs0");
533 		goto redo_cs0_training;
534 	}
535 
536 	if (sdram_params->base.dramtype == LPDDR3) {
537 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
538 			return -1;
539 	} else if (sdram_params->base.dramtype == LPDDR2) {
540 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
541 			return -1;
542 	}
543 	/* for px30: when 2cs, both 2 cs should be training */
544 	if (pre_init != 0 && cap_info->rank == 2) {
545 redo_cs1_training:
546 		if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
547 			printascii("DTT cs1 error\n");
548 			return -1;
549 		}
550 		if (check_rd_gate(dram)) {
551 			printascii("re training cs1");
552 			goto redo_cs1_training;
553 		}
554 	}
555 
556 	if (sdram_params->base.dramtype == DDR4)
557 		pctl_write_vrefdq(dram->pctl, 0x3, 5670,
558 				  sdram_params->base.dramtype);
559 
560 	dram_all_config(dram, sdram_params);
561 	enable_low_power(dram, sdram_params);
562 
563 	return 0;
564 }
565 
dram_detect_cap(struct dram_info * dram,struct px30_sdram_params * sdram_params,unsigned char channel)566 static int dram_detect_cap(struct dram_info *dram,
567 			   struct px30_sdram_params *sdram_params,
568 			   unsigned char channel)
569 {
570 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
571 
572 	/*
573 	 * for ddr3: ddrconf = 3
574 	 * for ddr4: ddrconf = 12
575 	 * for lpddr3: ddrconf = 3
576 	 * default bw = 1
577 	 */
578 	u32 bk, bktmp;
579 	u32 col, coltmp;
580 	u32 rowtmp;
581 	u32 cs;
582 	u32 bw = 1;
583 	u32 dram_type = sdram_params->base.dramtype;
584 
585 	if (dram_type != DDR4) {
586 		/* detect col and bk for ddr3/lpddr3 */
587 		coltmp = 12;
588 		bktmp = 3;
589 		if (dram_type == LPDDR2)
590 			rowtmp = 15;
591 		else
592 			rowtmp = 16;
593 
594 		if (sdram_detect_col(cap_info, coltmp) != 0)
595 			goto cap_err;
596 		sdram_detect_bank(cap_info, coltmp, bktmp);
597 		sdram_detect_dbw(cap_info, dram_type);
598 	} else {
599 		/* detect bg for ddr4 */
600 		coltmp = 10;
601 		bktmp = 4;
602 		rowtmp = 17;
603 
604 		col = 10;
605 		bk = 2;
606 		cap_info->col = col;
607 		cap_info->bk = bk;
608 		sdram_detect_bg(cap_info, coltmp);
609 	}
610 
611 	/* detect row */
612 	if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
613 		goto cap_err;
614 
615 	/* detect row_3_4 */
616 	sdram_detect_row_3_4(cap_info, coltmp, bktmp);
617 
618 	/* bw and cs detect using data training */
619 	if (data_training(dram, 1, dram_type) == 0)
620 		cs = 1;
621 	else
622 		cs = 0;
623 	cap_info->rank = cs + 1;
624 
625 	dram_set_bw(dram, 2);
626 	if (data_training(dram, 0, dram_type) == 0)
627 		bw = 2;
628 	else
629 		bw = 1;
630 	cap_info->bw = bw;
631 
632 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
633 	if (cs) {
634 		cap_info->cs1_row = cap_info->cs0_row;
635 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
636 	} else {
637 		cap_info->cs1_row = 0;
638 		cap_info->cs1_high16bit_row = 0;
639 	}
640 
641 	return 0;
642 cap_err:
643 	return -1;
644 }
645 
646 /* return: 0 = success, other = fail */
sdram_init_detect(struct dram_info * dram,struct px30_sdram_params * sdram_params)647 static int sdram_init_detect(struct dram_info *dram,
648 			     struct px30_sdram_params *sdram_params)
649 {
650 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
651 	u32 ret;
652 	u32 sys_reg = 0;
653 	u32 sys_reg3 = 0;
654 
655 	if (sdram_init_(dram, sdram_params, 0) != 0)
656 		return -1;
657 
658 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
659 		return -1;
660 
661 	/* modify bw, cs related timing */
662 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
663 				   sdram_params->base.dramtype);
664 	/* reinit sdram by real dram cap */
665 	ret = sdram_init_(dram, sdram_params, 1);
666 	if (ret != 0)
667 		goto out;
668 
669 	/* redetect cs1 row */
670 	sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
671 	if (cap_info->cs1_row) {
672 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
673 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
674 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
675 				    sys_reg, sys_reg3, 0);
676 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
677 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
678 	}
679 
680 	ret = sdram_detect_high_row(cap_info);
681 
682 out:
683 	return ret;
684 }
685 
686 struct px30_sdram_params
get_default_sdram_config(void)687 		*get_default_sdram_config(void)
688 {
689 	sdram_configs[0].skew = &skew;
690 
691 	return &sdram_configs[0];
692 }
693 
694 /* return: 0 = success, other = fail */
sdram_init(void)695 int sdram_init(void)
696 {
697 	struct px30_sdram_params *sdram_params;
698 	int ret = 0;
699 
700 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
701 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
702 	dram_info.grf = (void *)GRF_BASE_ADDR;
703 	dram_info.cru = (void *)CRU_BASE_ADDR;
704 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
705 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
706 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
707 
708 	sdram_params = get_default_sdram_config();
709 	ret = sdram_init_detect(&dram_info, sdram_params);
710 
711 	if (ret)
712 		goto error;
713 
714 	sdram_print_ddr_info(&sdram_params->ch.cap_info, &sdram_params->base);
715 
716 	printascii("out\n");
717 	return ret;
718 error:
719 	return (-1);
720 }
721 #else
722 
px30_dmc_probe(struct udevice * dev)723 static int px30_dmc_probe(struct udevice *dev)
724 {
725 	struct dram_info *priv = dev_get_priv(dev);
726 
727 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
728 	debug("%s: grf=%p\n", __func__, priv->pmugrf);
729 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
730 	priv->info.size =
731 		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
732 
733 	return 0;
734 }
735 
px30_dmc_get_info(struct udevice * dev,struct ram_info * info)736 static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
737 {
738 	struct dram_info *priv = dev_get_priv(dev);
739 
740 	*info = priv->info;
741 
742 	return 0;
743 }
744 
745 static struct ram_ops px30_dmc_ops = {
746 	.get_info = px30_dmc_get_info,
747 };
748 
749 static const struct udevice_id px30_dmc_ids[] = {
750 	{ .compatible = "rockchip,px30-dmc" },
751 	{ }
752 };
753 
754 U_BOOT_DRIVER(dmc_px30) = {
755 	.name = "rockchip_px30_dmc",
756 	.id = UCLASS_RAM,
757 	.of_match = px30_dmc_ids,
758 	.ops = &px30_dmc_ops,
759 	.probe = px30_dmc_probe,
760 	.priv_auto	= sizeof(struct dram_info),
761 };
762 #endif /* CONFIG_TPL_BUILD */
763