1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
4  */
5 #include <common.h>
6 #include <clk.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <dt-structs.h>
10 #include <init.h>
11 #include <log.h>
12 #include <ram.h>
13 #include <regmap.h>
14 #include <syscon.h>
15 #include <asm/io.h>
16 #include <asm/arch-rockchip/clock.h>
17 #include <asm/arch-rockchip/cru_rk3328.h>
18 #include <asm/arch-rockchip/grf_rk3328.h>
19 #include <asm/arch-rockchip/sdram.h>
20 #include <asm/arch-rockchip/sdram_rk3328.h>
21 #include <asm/arch-rockchip/uart.h>
22 #include <linux/delay.h>
23 
24 struct dram_info {
25 #ifdef CONFIG_TPL_BUILD
26 	struct ddr_pctl_regs *pctl;
27 	struct ddr_phy_regs *phy;
28 	struct clk ddr_clk;
29 	struct rk3328_cru *cru;
30 	struct msch_regs *msch;
31 	struct rk3328_ddr_grf_regs *ddr_grf;
32 #endif
33 	struct ram_info info;
34 	struct rk3328_grf_regs *grf;
35 };
36 
37 #ifdef CONFIG_TPL_BUILD
38 
39 struct rk3328_sdram_channel sdram_ch;
40 
41 struct rockchip_dmc_plat {
42 #if CONFIG_IS_ENABLED(OF_PLATDATA)
43 	struct dtd_rockchip_rk3328_dmc dtplat;
44 #else
45 	struct rk3328_sdram_params sdram_params;
46 #endif
47 	struct regmap *map;
48 };
49 
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
conv_of_plat(struct udevice * dev)51 static int conv_of_plat(struct udevice *dev)
52 {
53 	struct rockchip_dmc_plat *plat = dev_get_plat(dev);
54 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
55 	int ret;
56 
57 	ret = regmap_init_mem_plat(dev, dtplat->reg,
58 				   ARRAY_SIZE(dtplat->reg) / 2, &plat->map);
59 	if (ret)
60 		return ret;
61 
62 	return 0;
63 }
64 #endif
65 
rkclk_ddr_reset(struct dram_info * dram,u32 ctl_srstn,u32 ctl_psrstn,u32 phy_srstn,u32 phy_psrstn)66 static void rkclk_ddr_reset(struct dram_info *dram,
67 			    u32 ctl_srstn, u32 ctl_psrstn,
68 			    u32 phy_srstn, u32 phy_psrstn)
69 {
70 	writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
71 		ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
72 		&dram->cru->softrst_con[5]);
73 	writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
74 }
75 
rkclk_set_dpll(struct dram_info * dram,unsigned int hz)76 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
77 {
78 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
79 	int delay = 1000;
80 	u32 mhz = hz / MHZ;
81 
82 	refdiv = 1;
83 	if (mhz <= 300) {
84 		postdiv1 = 4;
85 		postdiv2 = 2;
86 	} else if (mhz <= 400) {
87 		postdiv1 = 6;
88 		postdiv2 = 1;
89 	} else if (mhz <= 600) {
90 		postdiv1 = 4;
91 		postdiv2 = 1;
92 	} else if (mhz <= 800) {
93 		postdiv1 = 3;
94 		postdiv2 = 1;
95 	} else if (mhz <= 1600) {
96 		postdiv1 = 2;
97 		postdiv2 = 1;
98 	} else {
99 		postdiv1 = 1;
100 		postdiv2 = 1;
101 	}
102 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
103 
104 	writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
105 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
106 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
107 	       &dram->cru->dpll_con[1]);
108 
109 	while (delay > 0) {
110 		udelay(1);
111 		if (LOCK(readl(&dram->cru->dpll_con[1])))
112 			break;
113 		delay--;
114 	}
115 
116 	writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
117 }
118 
rkclk_configure_ddr(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)119 static void rkclk_configure_ddr(struct dram_info *dram,
120 				struct rk3328_sdram_params *sdram_params)
121 {
122 	void __iomem *phy_base = dram->phy;
123 
124 	/* choose DPLL for ddr clk source */
125 	clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
126 
127 	/* for inno ddr phy need 2*freq */
128 	rkclk_set_dpll(dram,  sdram_params->base.ddr_freq * MHZ * 2);
129 }
130 
131 /* return ddrconfig value
132  *       (-1), find ddrconfig fail
133  *       other, the ddrconfig value
134  * only support cs0_row >= cs1_row
135  */
calculate_ddrconfig(struct rk3328_sdram_params * sdram_params)136 static u32 calculate_ddrconfig(struct rk3328_sdram_params *sdram_params)
137 {
138 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
139 	u32 cs, bw, die_bw, col, row, bank;
140 	u32 cs1_row;
141 	u32 i, tmp;
142 	u32 ddrconf = -1;
143 
144 	cs = cap_info->rank;
145 	bw = cap_info->bw;
146 	die_bw = cap_info->dbw;
147 	col = cap_info->col;
148 	row = cap_info->cs0_row;
149 	cs1_row = cap_info->cs1_row;
150 	bank = cap_info->bk;
151 
152 	if (sdram_params->base.dramtype == DDR4) {
153 		/* when DDR_TEST, CS always at MSB position for easy test */
154 		if (cs == 2 && row == cs1_row) {
155 			/* include 2cs cap both 2^n  or both (2^n - 2^(n-2)) */
156 			tmp = ((row - 13) << 3) | (1 << 2) | (bw & 0x2) |
157 			      die_bw;
158 			for (i = 17; i < 21; i++) {
159 				if (((tmp & 0x7) ==
160 				     (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
161 				    ((tmp & 0x3c) <=
162 				     (ddr4_cfg_2_rbc[i - 10] & 0x3c))) {
163 					ddrconf = i;
164 					goto out;
165 				}
166 			}
167 		}
168 
169 		tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
170 		for (i = 10; i < 17; i++) {
171 			if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
172 			    ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
173 			    ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
174 				ddrconf = i;
175 				goto out;
176 			}
177 		}
178 	} else {
179 		if (bank == 2) {
180 			ddrconf = 8;
181 			goto out;
182 		}
183 
184 		/* when DDR_TEST, CS always at MSB position for easy test */
185 		if (cs == 2 && row == cs1_row) {
186 			/* include 2cs cap both 2^n  or both (2^n - 2^(n-2)) */
187 			for (i = 5; i < 8; i++) {
188 				if ((bw + col - 11) == (ddr_cfg_2_rbc[i] &
189 							0x3)) {
190 					ddrconf = i;
191 					goto out;
192 				}
193 			}
194 		}
195 
196 		tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
197 		for (i = 0; i < 5; i++)
198 			if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
199 			    ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
200 				ddrconf = i;
201 				goto out;
202 			}
203 	}
204 
205 out:
206 	if (ddrconf > 20)
207 		printf("calculate ddrconfig error\n");
208 
209 	return ddrconf;
210 }
211 
212 /*******
213  * calculate controller dram address map, and setting to register.
214  * argument sdram_ch.ddrconf must be right value before
215  * call this function.
216  *******/
set_ctl_address_map(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)217 static void set_ctl_address_map(struct dram_info *dram,
218 				struct rk3328_sdram_params *sdram_params)
219 {
220 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
221 	void __iomem *pctl_base = dram->pctl;
222 
223 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
224 			  &addrmap[cap_info->ddrconfig][0], 9 * 4);
225 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
226 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
227 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
228 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
229 
230 	if (cap_info->rank == 1)
231 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
232 }
233 
data_training(struct dram_info * dram,u32 cs,u32 dramtype)234 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
235 {
236 	void __iomem *pctl_base = dram->pctl;
237 	u32 dis_auto_zq = 0;
238 	u32 pwrctl;
239 	u32 ret;
240 
241 	/* disable auto low-power */
242 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
243 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
244 
245 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
246 
247 	ret = phy_data_training(dram->phy, cs, dramtype);
248 
249 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
250 
251 	/* restore auto low-power */
252 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
253 
254 	return ret;
255 }
256 
rx_deskew_switch_adjust(struct dram_info * dram)257 static void rx_deskew_switch_adjust(struct dram_info *dram)
258 {
259 	u32 i, deskew_val;
260 	u32 gate_val = 0;
261 	void __iomem *phy_base = dram->phy;
262 
263 	for (i = 0; i < 4; i++)
264 		gate_val = MAX(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
265 
266 	deskew_val = (gate_val >> 3) + 1;
267 	deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
268 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
269 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
270 			(deskew_val & 0x1c) << 2);
271 }
272 
tx_deskew_switch_adjust(struct dram_info * dram)273 static void tx_deskew_switch_adjust(struct dram_info *dram)
274 {
275 	void __iomem *phy_base = dram->phy;
276 
277 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
278 }
279 
set_ddrconfig(struct dram_info * dram,u32 ddrconfig)280 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
281 {
282 	writel(ddrconfig, &dram->msch->ddrconf);
283 }
284 
sdram_msch_config(struct msch_regs * msch,struct sdram_msch_timings * noc_timings)285 static void sdram_msch_config(struct msch_regs *msch,
286 			      struct sdram_msch_timings *noc_timings)
287 {
288 	writel(noc_timings->ddrtiming.d32, &msch->ddrtiming);
289 
290 	writel(noc_timings->ddrmode.d32, &msch->ddrmode);
291 	writel(noc_timings->readlatency, &msch->readlatency);
292 
293 	writel(noc_timings->activate.d32, &msch->activate);
294 	writel(noc_timings->devtodev.d32, &msch->devtodev);
295 	writel(noc_timings->ddr4timing.d32, &msch->ddr4_timing);
296 	writel(noc_timings->agingx0, &msch->aging0);
297 	writel(noc_timings->agingx0, &msch->aging1);
298 	writel(noc_timings->agingx0, &msch->aging2);
299 	writel(noc_timings->agingx0, &msch->aging3);
300 	writel(noc_timings->agingx0, &msch->aging4);
301 	writel(noc_timings->agingx0, &msch->aging5);
302 }
303 
dram_all_config(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)304 static void dram_all_config(struct dram_info *dram,
305 			    struct rk3328_sdram_params *sdram_params)
306 {
307 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
308 	u32 sys_reg2 = 0;
309 	u32 sys_reg3 = 0;
310 
311 	set_ddrconfig(dram, cap_info->ddrconfig);
312 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
313 			 &sys_reg3, 0);
314 	writel(sys_reg2, &dram->grf->os_reg[2]);
315 	writel(sys_reg3, &dram->grf->os_reg[3]);
316 
317 	sdram_msch_config(dram->msch, &sdram_ch.noc_timings);
318 }
319 
enable_low_power(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)320 static void enable_low_power(struct dram_info *dram,
321 			     struct rk3328_sdram_params *sdram_params)
322 {
323 	void __iomem *pctl_base = dram->pctl;
324 
325 	/* enable upctl2 axi clock auto gating */
326 	writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
327 	writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
328 	/* enable upctl2 core clock auto gating */
329 	writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
330 	/* enable sr, pd */
331 	if (PD_IDLE == 0)
332 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
333 	else
334 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
335 	if (SR_IDLE == 0)
336 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL,	1);
337 	else
338 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
339 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
340 }
341 
sdram_init(struct dram_info * dram,struct rk3328_sdram_params * sdram_params,u32 pre_init)342 static int sdram_init(struct dram_info *dram,
343 		      struct rk3328_sdram_params *sdram_params, u32 pre_init)
344 {
345 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
346 	void __iomem *pctl_base = dram->pctl;
347 
348 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
349 	udelay(10);
350 	/*
351 	 * dereset ddr phy psrstn to config pll,
352 	 * if using phy pll psrstn must be dereset
353 	 * before config pll
354 	 */
355 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
356 	rkclk_configure_ddr(dram, sdram_params);
357 
358 	/* release phy srst to provide clk to ctrl */
359 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
360 	udelay(10);
361 	phy_soft_reset(dram->phy);
362 	/* release ctrl presetn, and config ctl registers */
363 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
364 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
365 	cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
366 	set_ctl_address_map(dram, sdram_params);
367 	phy_cfg(dram->phy, &sdram_params->phy_regs, &sdram_params->skew,
368 		&sdram_params->base, cap_info->bw);
369 
370 	/* enable dfi_init_start to init phy after ctl srstn deassert */
371 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
372 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
373 	/* wait for dfi_init_done and dram init complete */
374 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
375 		continue;
376 
377 	/* do ddr gate training */
378 	if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
379 		printf("data training error\n");
380 		return -1;
381 	}
382 
383 	if (sdram_params->base.dramtype == DDR4)
384 		pctl_write_vrefdq(dram->pctl, 0x3, 5670,
385 				  sdram_params->base.dramtype);
386 
387 	if (pre_init != 0) {
388 		rx_deskew_switch_adjust(dram);
389 		tx_deskew_switch_adjust(dram);
390 	}
391 
392 	dram_all_config(dram, sdram_params);
393 	enable_low_power(dram, sdram_params);
394 
395 	return 0;
396 }
397 
dram_detect_cap(struct dram_info * dram,struct rk3328_sdram_params * sdram_params,unsigned char channel)398 static u64 dram_detect_cap(struct dram_info *dram,
399 			   struct rk3328_sdram_params *sdram_params,
400 			   unsigned char channel)
401 {
402 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
403 
404 	/*
405 	 * for ddr3: ddrconf = 3
406 	 * for ddr4: ddrconf = 12
407 	 * for lpddr3: ddrconf = 3
408 	 * default bw = 1
409 	 */
410 	u32 bk, bktmp;
411 	u32 col, coltmp;
412 	u32 rowtmp;
413 	u32 cs;
414 	u32 bw = 1;
415 	u32 dram_type = sdram_params->base.dramtype;
416 
417 	if (dram_type != DDR4) {
418 		/* detect col and bk for ddr3/lpddr3 */
419 		coltmp = 12;
420 		bktmp = 3;
421 		rowtmp = 16;
422 
423 		if (sdram_detect_col(cap_info, coltmp) != 0)
424 			goto cap_err;
425 		sdram_detect_bank(cap_info, coltmp, bktmp);
426 		sdram_detect_dbw(cap_info, dram_type);
427 	} else {
428 		/* detect bg for ddr4 */
429 		coltmp = 10;
430 		bktmp = 4;
431 		rowtmp = 17;
432 
433 		col = 10;
434 		bk = 2;
435 		cap_info->col = col;
436 		cap_info->bk = bk;
437 		sdram_detect_bg(cap_info, coltmp);
438 	}
439 
440 	/* detect row */
441 	if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
442 		goto cap_err;
443 
444 	/* detect row_3_4 */
445 	sdram_detect_row_3_4(cap_info, coltmp, bktmp);
446 
447 	/* bw and cs detect using data training */
448 	if (data_training(dram, 1, dram_type) == 0)
449 		cs = 1;
450 	else
451 		cs = 0;
452 	cap_info->rank = cs + 1;
453 
454 	bw = 2;
455 	cap_info->bw = bw;
456 
457 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
458 	if (cs) {
459 		cap_info->cs1_row = cap_info->cs0_row;
460 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
461 	} else {
462 		cap_info->cs1_row = 0;
463 		cap_info->cs1_high16bit_row = 0;
464 	}
465 
466 	return 0;
467 cap_err:
468 	return -1;
469 }
470 
sdram_init_detect(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)471 static int sdram_init_detect(struct dram_info *dram,
472 			     struct rk3328_sdram_params *sdram_params)
473 {
474 	u32 sys_reg = 0;
475 	u32 sys_reg3 = 0;
476 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
477 
478 	debug("Starting SDRAM initialization...\n");
479 
480 	memcpy(&sdram_ch, &sdram_params->ch,
481 	       sizeof(struct rk3328_sdram_channel));
482 
483 	sdram_init(dram, sdram_params, 0);
484 	dram_detect_cap(dram, sdram_params, 0);
485 
486 	/* modify bw, cs related timing */
487 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
488 				   sdram_params->base.dramtype);
489 
490 	if (cap_info->bw == 2)
491 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
492 	else
493 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
494 
495 	/* reinit sdram by real dram cap */
496 	sdram_init(dram, sdram_params, 1);
497 
498 	/* redetect cs1 row */
499 	sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
500 	if (cap_info->cs1_row) {
501 		sys_reg = readl(&dram->grf->os_reg[2]);
502 		sys_reg3 = readl(&dram->grf->os_reg[3]);
503 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
504 				    sys_reg, sys_reg3, 0);
505 		writel(sys_reg, &dram->grf->os_reg[2]);
506 		writel(sys_reg3, &dram->grf->os_reg[3]);
507 	}
508 
509 	sdram_print_ddr_info(&sdram_params->ch.cap_info, &sdram_params->base);
510 
511 	return 0;
512 }
513 
rk3328_dmc_init(struct udevice * dev)514 static int rk3328_dmc_init(struct udevice *dev)
515 {
516 	struct dram_info *priv = dev_get_priv(dev);
517 	struct rockchip_dmc_plat *plat = dev_get_plat(dev);
518 	int ret;
519 
520 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
521 	struct rk3328_sdram_params *params = &plat->sdram_params;
522 #else
523 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
524 	struct rk3328_sdram_params *params =
525 					(void *)dtplat->rockchip_sdram_params;
526 
527 	ret = conv_of_plat(dev);
528 	if (ret)
529 		return ret;
530 #endif
531 	priv->phy = regmap_get_range(plat->map, 0);
532 	priv->pctl = regmap_get_range(plat->map, 1);
533 	priv->grf = regmap_get_range(plat->map, 2);
534 	priv->cru = regmap_get_range(plat->map, 3);
535 	priv->msch = regmap_get_range(plat->map, 4);
536 	priv->ddr_grf = regmap_get_range(plat->map, 5);
537 
538 	debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
539 	      __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
540 	      priv->msch, priv->ddr_grf);
541 	ret = sdram_init_detect(priv, params);
542 	if (ret < 0) {
543 		printf("%s DRAM init failed%d\n", __func__, ret);
544 		return ret;
545 	}
546 
547 	return 0;
548 }
549 
rk3328_dmc_of_to_plat(struct udevice * dev)550 static int rk3328_dmc_of_to_plat(struct udevice *dev)
551 {
552 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
553 	struct rockchip_dmc_plat *plat = dev_get_plat(dev);
554 	int ret;
555 
556 	ret = dev_read_u32_array(dev, "rockchip,sdram-params",
557 				 (u32 *)&plat->sdram_params,
558 				 sizeof(plat->sdram_params) / sizeof(u32));
559 	if (ret) {
560 		printf("%s: Cannot read rockchip,sdram-params %d\n",
561 		       __func__, ret);
562 		return ret;
563 	}
564 	ret = regmap_init_mem(dev, &plat->map);
565 	if (ret)
566 		printf("%s: regmap failed %d\n", __func__, ret);
567 #endif
568 	return 0;
569 }
570 
571 #endif
572 
rk3328_dmc_probe(struct udevice * dev)573 static int rk3328_dmc_probe(struct udevice *dev)
574 {
575 #ifdef CONFIG_TPL_BUILD
576 	if (rk3328_dmc_init(dev))
577 		return 0;
578 #else
579 	struct dram_info *priv = dev_get_priv(dev);
580 
581 	priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
582 	debug("%s: grf=%p\n", __func__, priv->grf);
583 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
584 	priv->info.size = rockchip_sdram_size(
585 				(phys_addr_t)&priv->grf->os_reg[2]);
586 #endif
587 	return 0;
588 }
589 
rk3328_dmc_get_info(struct udevice * dev,struct ram_info * info)590 static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
591 {
592 	struct dram_info *priv = dev_get_priv(dev);
593 
594 	*info = priv->info;
595 
596 	return 0;
597 }
598 
599 static struct ram_ops rk3328_dmc_ops = {
600 	.get_info = rk3328_dmc_get_info,
601 };
602 
603 static const struct udevice_id rk3328_dmc_ids[] = {
604 	{ .compatible = "rockchip,rk3328-dmc" },
605 	{ }
606 };
607 
608 U_BOOT_DRIVER(rockchip_rk3328_dmc) = {
609 	.name = "rockchip_rk3328_dmc",
610 	.id = UCLASS_RAM,
611 	.of_match = rk3328_dmc_ids,
612 	.ops = &rk3328_dmc_ops,
613 #ifdef CONFIG_TPL_BUILD
614 	.of_to_plat = rk3328_dmc_of_to_plat,
615 #endif
616 	.probe = rk3328_dmc_probe,
617 	.priv_auto	= sizeof(struct dram_info),
618 #ifdef CONFIG_TPL_BUILD
619 	.plat_auto	= sizeof(struct rockchip_dmc_plat),
620 #endif
621 };
622