1 /*
2  * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <platform_def.h>
8 
9 #include <arch_helpers.h>
10 #include <common/debug.h>
11 
12 #include <dram.h>
13 #include <plat_private.h>
14 #include <pmu.h>
15 #include <pmu_bits.h>
16 #include <pmu_regs.h>
17 #include <rk3399_def.h>
18 #include <secure.h>
19 #include <soc.h>
20 #include <suspend.h>
21 
22 #define PMUGRF_OS_REG0			0x300
23 #define PMUGRF_OS_REG1			0x304
24 #define PMUGRF_OS_REG2			0x308
25 #define PMUGRF_OS_REG3			0x30c
26 
27 #define CRU_SFTRST_DDR_CTRL(ch, n)	((0x1 << (8 + 16 + (ch) * 4)) | \
28 					 ((n) << (8 + (ch) * 4)))
29 #define CRU_SFTRST_DDR_PHY(ch, n)	((0x1 << (9 + 16 + (ch) * 4)) | \
30 					 ((n) << (9 + (ch) * 4)))
31 
32 #define FBDIV_ENC(n)			((n) << 16)
33 #define FBDIV_DEC(n)			(((n) >> 16) & 0xfff)
34 #define POSTDIV2_ENC(n)			((n) << 12)
35 #define POSTDIV2_DEC(n)			(((n) >> 12) & 0x7)
36 #define POSTDIV1_ENC(n)			((n) << 8)
37 #define POSTDIV1_DEC(n)			(((n) >> 8) & 0x7)
38 #define REFDIV_ENC(n)			(n)
39 #define REFDIV_DEC(n)			((n) & 0x3f)
40 
41 /* PMU CRU */
42 #define PMUCRU_RSTNHOLD_CON0		0x120
43 #define PMUCRU_RSTNHOLD_CON1		0x124
44 
45 #define PRESET_GPIO0_HOLD(n)		(((n) << 7) | WMSK_BIT(7))
46 #define PRESET_GPIO1_HOLD(n)		(((n) << 8) | WMSK_BIT(8))
47 
48 #define SYS_COUNTER_FREQ_IN_MHZ		(SYS_COUNTER_FREQ_IN_TICKS / 1000000)
49 
50 __pmusramdata uint32_t dpll_data[PLL_CON_COUNT];
51 __pmusramdata uint32_t cru_clksel_con6;
52 
53 /*
54  * Copy @num registers from @src to @dst
55  */
sram_regcpy(uintptr_t dst,uintptr_t src,uint32_t num)56 static __pmusramfunc void sram_regcpy(uintptr_t dst, uintptr_t src,
57 		uint32_t num)
58 {
59 	while (num--) {
60 		mmio_write_32(dst, mmio_read_32(src));
61 		dst += sizeof(uint32_t);
62 		src += sizeof(uint32_t);
63 	}
64 }
65 
66 /*
67  * Copy @num registers from @src to @dst
68  * This is intentionally a copy of the sram_regcpy function. PMUSRAM functions
69  * cannot be called from code running in DRAM.
70  */
dram_regcpy(uintptr_t dst,uintptr_t src,uint32_t num)71 static void dram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
72 {
73 	while (num--) {
74 		mmio_write_32(dst, mmio_read_32(src));
75 		dst += sizeof(uint32_t);
76 		src += sizeof(uint32_t);
77 	}
78 }
79 
sram_get_timer_value(void)80 static __pmusramfunc uint32_t sram_get_timer_value(void)
81 {
82 	/*
83 	 * Generic delay timer implementation expects the timer to be a down
84 	 * counter. We apply bitwise NOT operator to the tick values returned
85 	 * by read_cntpct_el0() to simulate the down counter.
86 	 */
87 	return (uint32_t)(~read_cntpct_el0());
88 }
89 
sram_udelay(uint32_t usec)90 static __pmusramfunc void sram_udelay(uint32_t usec)
91 {
92 	uint32_t start, cnt, delta, total_ticks;
93 
94 	/* counter is decreasing */
95 	start = sram_get_timer_value();
96 	total_ticks = usec * SYS_COUNTER_FREQ_IN_MHZ;
97 	do {
98 		cnt = sram_get_timer_value();
99 		if (cnt > start) {
100 			delta = UINT32_MAX - cnt;
101 			delta += start;
102 		} else
103 			delta = start - cnt;
104 	} while (delta <= total_ticks);
105 }
106 
configure_sgrf(void)107 static __pmusramfunc void configure_sgrf(void)
108 {
109 	/*
110 	 * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
111 	 * IC ECO bug, need to set this register.
112 	 *
113 	 * SGRF_DDR_RGN_BYPS:
114 	 * After the PD_CENTER suspend/resume, the DDR region
115 	 * related registers in the SGRF will be reset, we
116 	 * need to re-initialize them.
117 	 */
118 	mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
119 		      SGRF_DDR_RGN_DPLL_CLK |
120 		      SGRF_DDR_RGN_RTC_CLK |
121 		      SGRF_DDR_RGN_BYPS);
122 }
123 
rkclk_ddr_reset(uint32_t channel,uint32_t ctl,uint32_t phy)124 static __pmusramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
125 		uint32_t phy)
126 {
127 	channel &= 0x1;
128 	ctl &= 0x1;
129 	phy &= 0x1;
130 	mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
131 		      CRU_SFTRST_DDR_CTRL(channel, ctl) |
132 		      CRU_SFTRST_DDR_PHY(channel, phy));
133 }
134 
phy_pctrl_reset(uint32_t ch)135 static __pmusramfunc void phy_pctrl_reset(uint32_t ch)
136 {
137 	rkclk_ddr_reset(ch, 1, 1);
138 	sram_udelay(10);
139 	rkclk_ddr_reset(ch, 1, 0);
140 	sram_udelay(10);
141 	rkclk_ddr_reset(ch, 0, 0);
142 	sram_udelay(10);
143 }
144 
set_cs_training_index(uint32_t ch,uint32_t rank)145 static __pmusramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
146 {
147 	uint32_t byte;
148 
149 	/* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
150 	for (byte = 0; byte < 4; byte++)
151 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 24,
152 				   rank << 24);
153 }
154 
select_per_cs_training_index(uint32_t ch,uint32_t rank)155 static __pmusramfunc void select_per_cs_training_index(uint32_t ch,
156 		uint32_t rank)
157 {
158 	/* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
159 	if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
160 		set_cs_training_index(ch, rank);
161 }
162 
override_write_leveling_value(uint32_t ch)163 static __pmusramfunc void override_write_leveling_value(uint32_t ch)
164 {
165 	uint32_t byte;
166 
167 	for (byte = 0; byte < 4; byte++) {
168 		/*
169 		 * PHY_8/136/264/392
170 		 * phy_per_cs_training_multicast_en_X 1bit offset_16
171 		 */
172 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 16,
173 				   1 << 16);
174 		mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
175 				   0xffffu << 16,
176 				   0x200 << 16);
177 	}
178 
179 	/* CTL_200 ctrlupd_req 1bit offset_8 */
180 	mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
181 }
182 
data_training(uint32_t ch,struct rk3399_sdram_params * sdram_params,uint32_t training_flag)183 static __pmusramfunc int data_training(uint32_t ch,
184 		struct rk3399_sdram_params *sdram_params,
185 		uint32_t training_flag)
186 {
187 	uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
188 	uint32_t rank = sdram_params->ch[ch].rank;
189 	uint32_t rank_mask;
190 	uint32_t i, tmp;
191 
192 	if (sdram_params->dramtype == LPDDR4)
193 		rank_mask = (rank == 1) ? 0x5 : 0xf;
194 	else
195 		rank_mask = (rank == 1) ? 0x1 : 0x3;
196 
197 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
198 	mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
199 
200 	if (training_flag == PI_FULL_TRAINING) {
201 		if (sdram_params->dramtype == LPDDR4) {
202 			training_flag = PI_WRITE_LEVELING |
203 					PI_READ_GATE_TRAINING |
204 					PI_READ_LEVELING |
205 					PI_WDQ_LEVELING;
206 		} else if (sdram_params->dramtype == LPDDR3) {
207 			training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
208 					PI_READ_GATE_TRAINING;
209 		} else if (sdram_params->dramtype == DDR3) {
210 			training_flag = PI_WRITE_LEVELING |
211 					PI_READ_GATE_TRAINING |
212 					PI_READ_LEVELING;
213 		}
214 	}
215 
216 	/* ca training(LPDDR4,LPDDR3 support) */
217 	if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
218 		for (i = 0; i < 4; i++) {
219 			if (!(rank_mask & (1 << i)))
220 				continue;
221 
222 			select_per_cs_training_index(ch, i);
223 			/* PI_100 PI_CALVL_EN:RW:8:2 */
224 			mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
225 
226 			/* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
227 			mmio_clrsetbits_32(PI_REG(ch, 92),
228 					   (0x1 << 16) | (0x3 << 24),
229 					   (0x1 << 16) | (i << 24));
230 			while (1) {
231 				/* PI_174 PI_INT_STATUS:RD:8:18 */
232 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
233 
234 				/*
235 				 * check status obs
236 				 * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
237 				 */
238 				obs_0 = mmio_read_32(PHY_REG(ch, 532));
239 				obs_1 = mmio_read_32(PHY_REG(ch, 660));
240 				obs_2 = mmio_read_32(PHY_REG(ch, 788));
241 				if (((obs_0 >> 30) & 0x3) ||
242 				    ((obs_1 >> 30) & 0x3) ||
243 				    ((obs_2 >> 30) & 0x3))
244 					obs_err = 1;
245 				if ((((tmp >> 11) & 0x1) == 0x1) &&
246 				    (((tmp >> 13) & 0x1) == 0x1) &&
247 				    (((tmp >> 5) & 0x1) == 0x0) &&
248 				    (obs_err == 0))
249 					break;
250 				else if ((((tmp >> 5) & 0x1) == 0x1) ||
251 					 (obs_err == 1))
252 					return -1;
253 			}
254 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
255 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
256 		}
257 		mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
258 	}
259 
260 	/* write leveling(LPDDR4,LPDDR3,DDR3 support) */
261 	if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
262 		for (i = 0; i < rank; i++) {
263 			select_per_cs_training_index(ch, i);
264 			/* PI_60 PI_WRLVL_EN:RW:8:2 */
265 			mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
266 			/* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
267 			mmio_clrsetbits_32(PI_REG(ch, 59),
268 					   (0x1 << 8) | (0x3 << 16),
269 					   (0x1 << 8) | (i << 16));
270 
271 			while (1) {
272 				/* PI_174 PI_INT_STATUS:RD:8:18 */
273 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
274 
275 				/*
276 				 * check status obs, if error maybe can not
277 				 * get leveling done PHY_40/168/296/424
278 				 * phy_wrlvl_status_obs_X:0:13
279 				 */
280 				obs_0 = mmio_read_32(PHY_REG(ch, 40));
281 				obs_1 = mmio_read_32(PHY_REG(ch, 168));
282 				obs_2 = mmio_read_32(PHY_REG(ch, 296));
283 				obs_3 = mmio_read_32(PHY_REG(ch, 424));
284 				if (((obs_0 >> 12) & 0x1) ||
285 				    ((obs_1 >> 12) & 0x1) ||
286 				    ((obs_2 >> 12) & 0x1) ||
287 				    ((obs_3 >> 12) & 0x1))
288 					obs_err = 1;
289 				if ((((tmp >> 10) & 0x1) == 0x1) &&
290 				    (((tmp >> 13) & 0x1) == 0x1) &&
291 				    (((tmp >> 4) & 0x1) == 0x0) &&
292 				    (obs_err == 0))
293 					break;
294 				else if ((((tmp >> 4) & 0x1) == 0x1) ||
295 					 (obs_err == 1))
296 					return -1;
297 			}
298 
299 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
300 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
301 		}
302 		override_write_leveling_value(ch);
303 		mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
304 	}
305 
306 	/* read gate training(LPDDR4,LPDDR3,DDR3 support) */
307 	if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
308 		for (i = 0; i < rank; i++) {
309 			select_per_cs_training_index(ch, i);
310 			/* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
311 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
312 					   0x2 << 24);
313 			/*
314 			 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
315 			 * PI_RDLVL_CS:RW:24:2
316 			 */
317 			mmio_clrsetbits_32(PI_REG(ch, 74),
318 					   (0x1 << 16) | (0x3 << 24),
319 					   (0x1 << 16) | (i << 24));
320 
321 			while (1) {
322 				/* PI_174 PI_INT_STATUS:RD:8:18 */
323 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
324 
325 				/*
326 				 * check status obs
327 				 * PHY_43/171/299/427
328 				 *     PHY_GTLVL_STATUS_OBS_x:16:8
329 				 */
330 				obs_0 = mmio_read_32(PHY_REG(ch, 43));
331 				obs_1 = mmio_read_32(PHY_REG(ch, 171));
332 				obs_2 = mmio_read_32(PHY_REG(ch, 299));
333 				obs_3 = mmio_read_32(PHY_REG(ch, 427));
334 				if (((obs_0 >> (16 + 6)) & 0x3) ||
335 				    ((obs_1 >> (16 + 6)) & 0x3) ||
336 				    ((obs_2 >> (16 + 6)) & 0x3) ||
337 				    ((obs_3 >> (16 + 6)) & 0x3))
338 					obs_err = 1;
339 				if ((((tmp >> 9) & 0x1) == 0x1) &&
340 				    (((tmp >> 13) & 0x1) == 0x1) &&
341 				    (((tmp >> 3) & 0x1) == 0x0) &&
342 				    (obs_err == 0))
343 					break;
344 				else if ((((tmp >> 3) & 0x1) == 0x1) ||
345 					 (obs_err == 1))
346 					return -1;
347 			}
348 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
349 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
350 		}
351 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
352 	}
353 
354 	/* read leveling(LPDDR4,LPDDR3,DDR3 support) */
355 	if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
356 		for (i = 0; i < rank; i++) {
357 			select_per_cs_training_index(ch, i);
358 			/* PI_80 PI_RDLVL_EN:RW:16:2 */
359 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
360 					   0x2 << 16);
361 			/* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
362 			mmio_clrsetbits_32(PI_REG(ch, 74),
363 					   (0x1 << 8) | (0x3 << 24),
364 					   (0x1 << 8) | (i << 24));
365 			while (1) {
366 				/* PI_174 PI_INT_STATUS:RD:8:18 */
367 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
368 
369 				/*
370 				 * make sure status obs not report error bit
371 				 * PHY_46/174/302/430
372 				 *     phy_rdlvl_status_obs_X:16:8
373 				 */
374 				if ((((tmp >> 8) & 0x1) == 0x1) &&
375 				    (((tmp >> 13) & 0x1) == 0x1) &&
376 				    (((tmp >> 2) & 0x1) == 0x0))
377 					break;
378 				else if (((tmp >> 2) & 0x1) == 0x1)
379 					return -1;
380 			}
381 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
382 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
383 		}
384 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
385 	}
386 
387 	/* wdq leveling(LPDDR4 support) */
388 	if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
389 		for (i = 0; i < 4; i++) {
390 			if (!(rank_mask & (1 << i)))
391 				continue;
392 
393 			select_per_cs_training_index(ch, i);
394 			/*
395 			 * disable PI_WDQLVL_VREF_EN before wdq leveling?
396 			 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
397 			 */
398 			mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
399 			/* PI_124 PI_WDQLVL_EN:RW:16:2 */
400 			mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
401 					   0x2 << 16);
402 			/* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
403 			mmio_clrsetbits_32(PI_REG(ch, 121),
404 					   (0x1 << 8) | (0x3 << 16),
405 					   (0x1 << 8) | (i << 16));
406 			while (1) {
407 				/* PI_174 PI_INT_STATUS:RD:8:18 */
408 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
409 				if ((((tmp >> 12) & 0x1) == 0x1) &&
410 				    (((tmp >> 13) & 0x1) == 0x1) &&
411 				    (((tmp >> 6) & 0x1) == 0x0))
412 					break;
413 				else if (((tmp >> 6) & 0x1) == 0x1)
414 					return -1;
415 			}
416 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
417 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
418 		}
419 		mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
420 	}
421 
422 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
423 	mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
424 
425 	return 0;
426 }
427 
set_ddrconfig(struct rk3399_sdram_params * sdram_params,unsigned char channel,uint32_t ddrconfig)428 static __pmusramfunc void set_ddrconfig(
429 		struct rk3399_sdram_params *sdram_params,
430 		unsigned char channel, uint32_t ddrconfig)
431 {
432 	/* only need to set ddrconfig */
433 	struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
434 	unsigned int cs0_cap = 0;
435 	unsigned int cs1_cap = 0;
436 
437 	cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
438 	if (ch->rank > 1)
439 		cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
440 	if (ch->row_3_4) {
441 		cs0_cap = cs0_cap * 3 / 4;
442 		cs1_cap = cs1_cap * 3 / 4;
443 	}
444 
445 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
446 		      ddrconfig | (ddrconfig << 6));
447 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
448 		      ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
449 }
450 
dram_all_config(struct rk3399_sdram_params * sdram_params)451 static __pmusramfunc void dram_all_config(
452 		struct rk3399_sdram_params *sdram_params)
453 {
454 	unsigned int i;
455 
456 	for (i = 0; i < 2; i++) {
457 		struct rk3399_sdram_channel *info = &sdram_params->ch[i];
458 		struct rk3399_msch_timings *noc = &info->noc_timings;
459 
460 		if (sdram_params->ch[i].col == 0)
461 			continue;
462 
463 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
464 			      noc->ddrtiminga0.d32);
465 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
466 			      noc->ddrtimingb0.d32);
467 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
468 			      noc->ddrtimingc0.d32);
469 		mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
470 			      noc->devtodev0.d32);
471 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
472 
473 		/* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
474 		if (sdram_params->ch[i].rank == 1)
475 			mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
476 	}
477 
478 	DDR_STRIDE(sdram_params->stride);
479 
480 	/* reboot hold register set */
481 	mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
482 		      CRU_PMU_SGRF_RST_RLS |
483 		      PRESET_GPIO0_HOLD(1) |
484 		      PRESET_GPIO1_HOLD(1));
485 	mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
486 }
487 
pctl_cfg(uint32_t ch,struct rk3399_sdram_params * sdram_params)488 static __pmusramfunc void pctl_cfg(uint32_t ch,
489 		struct rk3399_sdram_params *sdram_params)
490 {
491 	const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
492 	const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
493 	const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
494 	uint32_t tmp, tmp1, tmp2, i;
495 
496 	/*
497 	 * Workaround controller bug:
498 	 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
499 	 */
500 	sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
501 		    CTL_REG_NUM - 1);
502 	mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
503 	sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
504 		    PI_REG_NUM);
505 
506 	sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
507 		    3);
508 
509 	mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
510 				PWRUP_SREFRESH_EXIT);
511 
512 	/* PHY_DLL_RST_EN */
513 	mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
514 	dmbst();
515 
516 	mmio_setbits_32(PI_REG(ch, 0), START);
517 	mmio_setbits_32(CTL_REG(ch, 0), START);
518 
519 	/* wait lock */
520 	while (1) {
521 		tmp = mmio_read_32(PHY_REG(ch, 920));
522 		tmp1 = mmio_read_32(PHY_REG(ch, 921));
523 		tmp2 = mmio_read_32(PHY_REG(ch, 922));
524 		if ((((tmp >> 16) & 0x1) == 0x1) &&
525 		     (((tmp1 >> 16) & 0x1) == 0x1) &&
526 		     (((tmp1 >> 0) & 0x1) == 0x1) &&
527 		     (((tmp2 >> 0) & 0x1) == 0x1))
528 			break;
529 		/* if PLL bypass,don't need wait lock */
530 		if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
531 			break;
532 	}
533 
534 	sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
535 
536 	for (i = 0; i < 4; i++)
537 		sram_regcpy(PHY_REG(ch, 128 * i),
538 			    (uintptr_t)&phy_regs->phy0[0], 91);
539 
540 	for (i = 0; i < 3; i++)
541 		sram_regcpy(PHY_REG(ch, 512 + 128 * i),
542 				(uintptr_t)&phy_regs->phy512[i][0], 38);
543 }
544 
dram_switch_to_next_index(struct rk3399_sdram_params * sdram_params)545 static __pmusramfunc int dram_switch_to_next_index(
546 		struct rk3399_sdram_params *sdram_params)
547 {
548 	uint32_t ch, ch_count;
549 	uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
550 
551 	mmio_write_32(CIC_BASE + CIC_CTRL0,
552 		      (((0x3 << 4) | (1 << 2) | 1) << 16) |
553 		      (fn << 4) | (1 << 2) | 1);
554 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
555 		;
556 
557 	mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
558 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
559 		;
560 
561 	ch_count = sdram_params->num_channels;
562 
563 	/* LPDDR4 f2 cann't do training, all training will fail */
564 	for (ch = 0; ch < ch_count; ch++) {
565 		mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
566 				   fn << 8);
567 
568 		/* data_training failed */
569 		if (data_training(ch, sdram_params, PI_FULL_TRAINING))
570 			return -1;
571 	}
572 
573 	return 0;
574 }
575 
576 /*
577  * Needs to be done for both channels at once in case of a shared reset signal
578  * between channels.
579  */
pctl_start(uint32_t channel_mask,struct rk3399_sdram_params * sdram_params)580 static __pmusramfunc int pctl_start(uint32_t channel_mask,
581 		struct rk3399_sdram_params *sdram_params)
582 {
583 	uint32_t count;
584 	uint32_t byte;
585 
586 	mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
587 	mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
588 
589 	/* need de-access IO retention before controller START */
590 	if (channel_mask & (1 << 0))
591 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
592 	if (channel_mask & (1 << 1))
593 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
594 
595 	/* PHY_DLL_RST_EN */
596 	if (channel_mask & (1 << 0))
597 		mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
598 				   0x2 << 24);
599 	if (channel_mask & (1 << 1))
600 		mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
601 				   0x2 << 24);
602 
603 	/* check ERROR bit */
604 	if (channel_mask & (1 << 0)) {
605 		count = 0;
606 		while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
607 			/* CKE is low, loop 10ms */
608 			if (count > 100)
609 				return -1;
610 
611 			sram_udelay(100);
612 			count++;
613 		}
614 
615 		mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
616 
617 		/* Restore the PHY_RX_CAL_DQS value */
618 		for (byte = 0; byte < 4; byte++)
619 			mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
620 					   0xfff << 16,
621 					   sdram_params->rx_cal_dqs[0][byte]);
622 	}
623 	if (channel_mask & (1 << 1)) {
624 		count = 0;
625 		while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
626 			/* CKE is low, loop 10ms */
627 			if (count > 100)
628 				return -1;
629 
630 			sram_udelay(100);
631 			count++;
632 		}
633 
634 		mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
635 
636 		/* Restore the PHY_RX_CAL_DQS value */
637 		for (byte = 0; byte < 4; byte++)
638 			mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
639 					   0xfff << 16,
640 					   sdram_params->rx_cal_dqs[1][byte]);
641 	}
642 
643 	return 0;
644 }
645 
pmusram_restore_pll(int pll_id,uint32_t * src)646 __pmusramfunc static void pmusram_restore_pll(int pll_id, uint32_t *src)
647 {
648 	mmio_write_32((CRU_BASE + CRU_PLL_CON(pll_id, 3)), PLL_SLOW_MODE);
649 
650 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 0), src[0] | REG_SOC_WMSK);
651 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 1), src[1] | REG_SOC_WMSK);
652 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 2), src[2]);
653 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 4), src[4] | REG_SOC_WMSK);
654 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 5), src[5] | REG_SOC_WMSK);
655 
656 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 3), src[3] | REG_SOC_WMSK);
657 
658 	while ((mmio_read_32(CRU_BASE + CRU_PLL_CON(pll_id, 2)) &
659 		(1U << 31)) == 0x0)
660 		;
661 }
662 
pmusram_enable_watchdog(void)663 __pmusramfunc static void pmusram_enable_watchdog(void)
664 {
665 	/* Make the watchdog use the first global reset. */
666 	mmio_write_32(CRU_BASE + CRU_GLB_RST_CON, 1 << 1);
667 
668 	/*
669 	 * This gives the system ~8 seconds before reset. The pclk for the
670 	 * watchdog is 4MHz on reset. The value of 0x9 in WDT_TORR means that
671 	 * the watchdog will wait for 0x1ffffff cycles before resetting.
672 	 */
673 	mmio_write_32(WDT0_BASE + 4, 0x9);
674 
675 	/* Enable the watchdog */
676 	mmio_setbits_32(WDT0_BASE, 0x1);
677 
678 	/* Magic reset the watchdog timer value for WDT_CRR. */
679 	mmio_write_32(WDT0_BASE + 0xc, 0x76);
680 
681 	secure_watchdog_ungate();
682 
683 	/* The watchdog is in PD_ALIVE, so deidle it. */
684 	mmio_clrbits_32(PMU_BASE + PMU_BUS_CLR, PMU_CLR_ALIVE);
685 }
686 
dmc_suspend(void)687 void dmc_suspend(void)
688 {
689 	struct rk3399_sdram_params *sdram_params = &sdram_config;
690 	struct rk3399_ddr_publ_regs *phy_regs;
691 	uint32_t *params_ctl;
692 	uint32_t *params_pi;
693 	uint32_t refdiv, postdiv2, postdiv1, fbdiv;
694 	uint32_t ch, byte, i;
695 
696 	phy_regs = &sdram_params->phy_regs;
697 	params_ctl = sdram_params->pctl_regs.denali_ctl;
698 	params_pi = sdram_params->pi_regs.denali_pi;
699 
700 	/* save dpll register and ddr clock register value to pmusram */
701 	cru_clksel_con6 = mmio_read_32(CRU_BASE + CRU_CLKSEL_CON6);
702 	for (i = 0; i < PLL_CON_COUNT; i++)
703 		dpll_data[i] = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, i));
704 
705 	fbdiv = dpll_data[0] & 0xfff;
706 	postdiv2 = POSTDIV2_DEC(dpll_data[1]);
707 	postdiv1 = POSTDIV1_DEC(dpll_data[1]);
708 	refdiv = REFDIV_DEC(dpll_data[1]);
709 
710 	sdram_params->ddr_freq = ((fbdiv * 24) /
711 				(refdiv * postdiv1 * postdiv2)) * MHz;
712 
713 	INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
714 	sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
715 			       0x7) != 0) ? 1 : 0;
716 
717 	/* copy the registers CTL PI and PHY */
718 	dram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
719 
720 	/* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
721 	params_ctl[0] &= ~(0x1 << 0);
722 
723 	dram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
724 		    PI_REG_NUM);
725 
726 	/* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
727 	params_pi[0] &= ~(0x1 << 0);
728 
729 	dram_regcpy((uintptr_t)&phy_regs->phy0[0],
730 			    PHY_REG(0, 0), 91);
731 
732 	for (i = 0; i < 3; i++)
733 		dram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
734 			    PHY_REG(0, 512 + 128 * i), 38);
735 
736 	dram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
737 
738 	for (ch = 0; ch < sdram_params->num_channels; ch++) {
739 		for (byte = 0; byte < 4; byte++)
740 			sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
741 				mmio_read_32(PHY_REG(ch, 57 + byte * 128));
742 	}
743 
744 	/* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
745 	phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
746 	phy_regs->phy896[957 - 896] |= 1 << 24;
747 	phy_regs->phy896[0] |= 1;
748 	phy_regs->phy896[0] &= ~(0x3 << 8);
749 }
750 
dmc_resume(void)751 __pmusramfunc void dmc_resume(void)
752 {
753 	struct rk3399_sdram_params *sdram_params = &sdram_config;
754 	uint32_t channel_mask = 0;
755 	uint32_t channel;
756 
757 	pmusram_enable_watchdog();
758 	pmu_sgrf_rst_hld_release();
759 	restore_pmu_rsthold();
760 	sram_secure_timer_init();
761 
762 	/*
763 	 * we switch ddr clock to abpll when suspend,
764 	 * we set back to dpll here
765 	 */
766 	mmio_write_32(CRU_BASE + CRU_CLKSEL_CON6,
767 			cru_clksel_con6 | REG_SOC_WMSK);
768 	pmusram_restore_pll(DPLL_ID, dpll_data);
769 
770 	configure_sgrf();
771 
772 retry:
773 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
774 		phy_pctrl_reset(channel);
775 		pctl_cfg(channel, sdram_params);
776 	}
777 
778 	for (channel = 0; channel < 2; channel++) {
779 		if (sdram_params->ch[channel].col)
780 			channel_mask |= 1 << channel;
781 	}
782 
783 	if (pctl_start(channel_mask, sdram_params) < 0)
784 		goto retry;
785 
786 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
787 		/* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
788 		if (sdram_params->dramtype == LPDDR3)
789 			sram_udelay(10);
790 
791 		/* If traning fail, retry to do it again. */
792 		if (data_training(channel, sdram_params, PI_FULL_TRAINING))
793 			goto retry;
794 
795 		set_ddrconfig(sdram_params, channel,
796 			      sdram_params->ch[channel].ddrconfig);
797 	}
798 
799 	dram_all_config(sdram_params);
800 
801 	/* Switch to index 1 and prepare for DDR frequency switch. */
802 	dram_switch_to_next_index(sdram_params);
803 }
804