1 /*
2  * Copyright (c) 2019, Intel Corporation. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <lib/mmio.h>
9 #include <lib/utils.h>
10 #include <common/debug.h>
11 #include <drivers/delay_timer.h>
12 #include <platform_def.h>
13 
14 #include "agilex_memory_controller.h"
15 
16 #define ALT_CCU_NOC_DI_SET_MSK		0x10
17 
18 #define DDR_READ_LATENCY_DELAY		40
19 #define MAX_MEM_CAL_RETRY		3
20 #define PRE_CALIBRATION_DELAY		1
21 #define POST_CALIBRATION_DELAY		1
22 #define TIMEOUT_EMIF_CALIBRATION	1000
23 #define CLEAR_EMIF_DELAY		1000
24 #define CLEAR_EMIF_TIMEOUT		1000
25 
26 #define DDR_CONFIG(A, B, C, R)	(((A) << 24) | ((B) << 16) | ((C) << 8) | (R))
27 #define DDR_CONFIG_ELEMENTS	(sizeof(ddr_config)/sizeof(uint32_t))
28 
29 /* tWR = Min. 15ns constant, see JEDEC standard eg. DDR4 is JESD79-4.pdf */
30 #define tWR_IN_NS 15
31 
32 void configure_hmc_adaptor_regs(void);
33 void configure_ddr_sched_ctrl_regs(void);
34 
35 /* The followring are the supported configurations */
36 uint32_t ddr_config[] = {
37 	/* DDR_CONFIG(Address order,Bank,Column,Row) */
38 	/* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */
39 	DDR_CONFIG(0, 3, 10, 12),
40 	DDR_CONFIG(0, 3,  9, 13),
41 	DDR_CONFIG(0, 3, 10, 13),
42 	DDR_CONFIG(0, 3,  9, 14),
43 	DDR_CONFIG(0, 3, 10, 14),
44 	DDR_CONFIG(0, 3, 10, 15),
45 	DDR_CONFIG(0, 3, 11, 14),
46 	DDR_CONFIG(0, 3, 11, 15),
47 	DDR_CONFIG(0, 3, 10, 16),
48 	DDR_CONFIG(0, 3, 11, 16),
49 	DDR_CONFIG(0, 3, 12, 15),	/* 0xa */
50 	/* List for DDR4 only (pinout order > chip, bank, row, column) */
51 	DDR_CONFIG(1, 3, 10, 14),
52 	DDR_CONFIG(1, 4, 10, 14),
53 	DDR_CONFIG(1, 3, 10, 15),
54 	DDR_CONFIG(1, 4, 10, 15),
55 	DDR_CONFIG(1, 3, 10, 16),
56 	DDR_CONFIG(1, 4, 10, 16),
57 	DDR_CONFIG(1, 3, 10, 17),
58 	DDR_CONFIG(1, 4, 10, 17),
59 };
60 
match_ddr_conf(uint32_t ddr_conf)61 static int match_ddr_conf(uint32_t ddr_conf)
62 {
63 	int i;
64 
65 	for (i = 0; i < DDR_CONFIG_ELEMENTS; i++) {
66 		if (ddr_conf == ddr_config[i])
67 			return i;
68 	}
69 	return 0;
70 }
71 
check_hmc_clk(void)72 static int check_hmc_clk(void)
73 {
74 	unsigned long timeout = 0;
75 	uint32_t hmc_clk;
76 
77 	do {
78 		hmc_clk = mmio_read_32(AGX_SYSMGR_CORE_HMC_CLK);
79 		if (hmc_clk & AGX_SYSMGR_CORE_HMC_CLK_STATUS)
80 			break;
81 		udelay(1);
82 	} while (++timeout < 1000);
83 	if (timeout >= 1000)
84 		return -ETIMEDOUT;
85 
86 	return 0;
87 }
88 
clear_emif(void)89 static int clear_emif(void)
90 {
91 	uint32_t data;
92 	unsigned long timeout;
93 
94 	mmio_write_32(AGX_MPFE_HMC_ADP_RSTHANDSHAKECTRL, 0);
95 
96 	timeout = 0;
97 	do {
98 		data = mmio_read_32(AGX_MPFE_HMC_ADP_RSTHANDSHAKESTAT);
99 		if ((data & AGX_MPFE_HMC_ADP_RSTHANDSHAKESTAT_SEQ2CORE) == 0)
100 			break;
101 		udelay(CLEAR_EMIF_DELAY);
102 	} while (++timeout < CLEAR_EMIF_TIMEOUT);
103 	if (timeout >= CLEAR_EMIF_TIMEOUT)
104 		return -ETIMEDOUT;
105 
106 	return 0;
107 }
108 
mem_calibration(void)109 static int mem_calibration(void)
110 {
111 	int status;
112 	uint32_t data;
113 	unsigned long timeout;
114 	unsigned long retry = 0;
115 
116 	udelay(PRE_CALIBRATION_DELAY);
117 
118 	do {
119 		if (retry != 0)
120 			INFO("DDR: Retrying DRAM calibration\n");
121 
122 		timeout = 0;
123 		do {
124 			data = mmio_read_32(AGX_MPFE_HMC_ADP_DDRCALSTAT);
125 			if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 1)
126 				break;
127 			udelay(500);
128 		} while (++timeout < TIMEOUT_EMIF_CALIBRATION);
129 
130 		if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
131 			status = clear_emif();
132 			if (status)
133 				ERROR("Failed to clear Emif\n");
134 		} else {
135 			break;
136 		}
137 	} while (++retry < MAX_MEM_CAL_RETRY);
138 
139 	if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
140 		ERROR("DDR: DRAM calibration failed.\n");
141 		status = -EIO;
142 	} else {
143 		INFO("DDR: DRAM calibration success.\n");
144 		status = 0;
145 	}
146 
147 	udelay(POST_CALIBRATION_DELAY);
148 
149 	return status;
150 }
151 
init_hard_memory_controller(void)152 int init_hard_memory_controller(void)
153 {
154 	int status;
155 
156 	status = check_hmc_clk();
157 	if (status) {
158 		ERROR("DDR: Error, HMC clock not running\n");
159 		return status;
160 	}
161 
162 	status = mem_calibration();
163 	if (status) {
164 		ERROR("DDR: Memory Calibration Failed\n");
165 		return status;
166 	}
167 
168 	configure_hmc_adaptor_regs();
169 
170 	return 0;
171 }
172 
configure_ddr_sched_ctrl_regs(void)173 void configure_ddr_sched_ctrl_regs(void)
174 {
175 	uint32_t data, dram_addr_order, ddr_conf, bank, row, col,
176 		rd_to_miss, wr_to_miss, burst_len, burst_len_ddr_clk,
177 		burst_len_sched_clk, act_to_act, rd_to_wr, wr_to_rd, bw_ratio,
178 		t_rtp, t_rp, t_rcd, rd_latency, tw_rin_clk_cycles,
179 		bw_ratio_extended, auto_precharge = 0, act_to_act_bank, faw,
180 		faw_bank, bus_rd_to_rd, bus_rd_to_wr, bus_wr_to_rd;
181 
182 	INFO("Init HPS NOC's DDR Scheduler.\n");
183 
184 	data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG1);
185 	dram_addr_order = AGX_MPFE_IOHMC_CTRLCFG1_CFG_ADDR_ORDER(data);
186 
187 	data = mmio_read_32(AGX_MPFE_IOHMC_DRAMADDRW);
188 
189 	col  = IOHMC_DRAMADDRW_COL_ADDR_WIDTH(data);
190 	row  = IOHMC_DRAMADDRW_ROW_ADDR_WIDTH(data);
191 	bank = IOHMC_DRAMADDRW_BANK_ADDR_WIDTH(data) +
192 		IOHMC_DRAMADDRW_BANK_GRP_ADDR_WIDTH(data);
193 
194 	ddr_conf = match_ddr_conf(DDR_CONFIG(dram_addr_order, bank, col, row));
195 
196 	if (ddr_conf) {
197 		mmio_clrsetbits_32(
198 			AGX_MPFE_DDR_MAIN_SCHED_DDRCONF,
199 			AGX_MPFE_DDR_MAIN_SCHED_DDRCONF_SET_MSK,
200 			AGX_MPFE_DDR_MAIN_SCHED_DDRCONF_SET(ddr_conf));
201 	} else {
202 		ERROR("DDR: Cannot find predefined ddrConf configuration.\n");
203 	}
204 
205 	mmio_write_32(AGX_MPFE_HMC_ADP(ADP_DRAMADDRWIDTH), data);
206 
207 	data = mmio_read_32(AGX_MPFE_IOHMC_DRAMTIMING0);
208 	rd_latency = AGX_MPFE_IOHMC_REG_DRAMTIMING0_CFG_TCL(data);
209 
210 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING0);
211 	act_to_act = ACT_TO_ACT(data);
212 	t_rcd = ACT_TO_RDWR(data);
213 	act_to_act_bank = ACT_TO_ACT_DIFF_BANK(data);
214 
215 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING1);
216 	rd_to_wr = RD_TO_WR(data);
217 	bus_rd_to_rd = RD_TO_RD_DIFF_CHIP(data);
218 	bus_rd_to_wr = RD_TO_WR_DIFF_CHIP(data);
219 
220 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING2);
221 	t_rtp = RD_TO_PCH(data);
222 
223 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING3);
224 	wr_to_rd = CALTIMING3_WR_TO_RD(data);
225 	bus_wr_to_rd = CALTIMING3_WR_TO_RD_DIFF_CHIP(data);
226 
227 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING4);
228 	t_rp = PCH_TO_VALID(data);
229 
230 	data = mmio_read_32(AGX_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
231 	bw_ratio = ((HMC_ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 0 : 1);
232 
233 	data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG0);
234 	burst_len = HMC_ADP_DDRIOCTRL_CTRL_BURST_LENGTH(data);
235 	burst_len_ddr_clk = burst_len / 2;
236 	burst_len_sched_clk = ((burst_len/2) / 2);
237 
238 	data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG0);
239 	switch (AGX_MPFE_IOHMC_REG_CTRLCFG0_CFG_MEM_TYPE(data)) {
240 	case 1:
241 		/* DDR4 - 1333MHz */
242 		/* 20 (19.995) clock cycles = 15ns */
243 		/* Calculate with rounding */
244 		tw_rin_clk_cycles = (((tWR_IN_NS * 1333) % 1000) >= 500) ?
245 			((tWR_IN_NS * 1333) / 1000) + 1 :
246 			((tWR_IN_NS * 1333) / 1000);
247 		break;
248 	default:
249 		/* Others - 1066MHz or slower */
250 		/* 16 (15.990) clock cycles = 15ns */
251 		/* Calculate with rounding */
252 		tw_rin_clk_cycles = (((tWR_IN_NS * 1066) % 1000) >= 500) ?
253 			((tWR_IN_NS * 1066) / 1000) + 1 :
254 			((tWR_IN_NS * 1066) / 1000);
255 		break;
256 	}
257 
258 	rd_to_miss = t_rtp + t_rp + t_rcd - burst_len_sched_clk;
259 	wr_to_miss = ((rd_latency + burst_len_ddr_clk + 2 + tw_rin_clk_cycles)
260 			/ 2) - rd_to_wr + t_rp + t_rcd;
261 
262 	mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DDRTIMING,
263 		bw_ratio << DDRTIMING_BWRATIO_OFST |
264 		wr_to_rd << DDRTIMING_WRTORD_OFST|
265 		rd_to_wr << DDRTIMING_RDTOWR_OFST |
266 		burst_len_sched_clk << DDRTIMING_BURSTLEN_OFST |
267 		wr_to_miss << DDRTIMING_WRTOMISS_OFST |
268 		rd_to_miss << DDRTIMING_RDTOMISS_OFST |
269 		act_to_act << DDRTIMING_ACTTOACT_OFST);
270 
271 	data = mmio_read_32(AGX_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
272 	bw_ratio_extended = ((ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 1 : 0);
273 
274 	mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DDRMODE,
275 		bw_ratio_extended << DDRMODE_BWRATIOEXTENDED_OFST |
276 		auto_precharge << DDRMODE_AUTOPRECHARGE_OFST);
277 
278 	mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_READLATENCY,
279 		(rd_latency / 2) + DDR_READ_LATENCY_DELAY);
280 
281 	data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING9);
282 	faw = AGX_MPFE_IOHMC_CALTIMING9_ACT_TO_ACT(data);
283 
284 	faw_bank = 1; // always 1 because we always have 4 bank DDR.
285 
286 	mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE,
287 		faw_bank << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAWBANK_OFST |
288 		faw << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAW_OFST |
289 		act_to_act_bank << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_RRD_OFST);
290 
291 	mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV,
292 		((bus_rd_to_rd
293 			<< AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_OFST)
294 			& AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_MSK) |
295 		((bus_rd_to_wr
296 			<< AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_OFST)
297 			& AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_MSK) |
298 		((bus_wr_to_rd
299 			<< AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_OFST)
300 			& AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_MSK));
301 
302 }
303 
get_physical_dram_size(void)304 unsigned long get_physical_dram_size(void)
305 {
306 	uint32_t data;
307 	unsigned long ram_addr_width, ram_ext_if_io_width;
308 
309 	data = mmio_read_32(AGX_MPFE_HMC_ADP_DDRIOCTRL);
310 	switch (AGX_MPFE_HMC_ADP_DDRIOCTRL_IO_SIZE(data)) {
311 	case 0:
312 		ram_ext_if_io_width = 16;
313 		break;
314 	case 1:
315 		ram_ext_if_io_width = 32;
316 		break;
317 	case 2:
318 		ram_ext_if_io_width = 64;
319 		break;
320 	default:
321 		ram_ext_if_io_width = 0;
322 		break;
323 	}
324 
325 	data = mmio_read_32(AGX_MPFE_IOHMC_REG_DRAMADDRW);
326 	ram_addr_width = IOHMC_DRAMADDRW_CFG_COL_ADDR_WIDTH(data) +
327 		IOHMC_DRAMADDRW_CFG_ROW_ADDR_WIDTH(data) +
328 		IOHMC_DRAMADDRW_CFG_BANK_ADDR_WIDTH(data) +
329 		IOHMC_DRAMADDRW_CFG_BANK_GROUP_ADDR_WIDTH(data) +
330 		IOHMC_DRAMADDRW_CFG_CS_ADDR_WIDTH(data);
331 
332 	return (1 << ram_addr_width) * (ram_ext_if_io_width / 8);
333 }
334 
335 
336 
configure_hmc_adaptor_regs(void)337 void configure_hmc_adaptor_regs(void)
338 {
339 	uint32_t data;
340 	uint32_t dram_io_width;
341 
342 	/* Configure DDR data rate */
343 	dram_io_width = AGX_MPFE_IOHMC_NIOSRESERVE0_NIOS_RESERVE0(
344 		mmio_read_32(AGX_MPFE_IOHMC_REG_NIOSRESERVE0_OFST));
345 	dram_io_width = (dram_io_width & 0xFF) >> 5;
346 
347 	data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG3);
348 
349 	dram_io_width |= (data & 0x4);
350 
351 	mmio_write_32(AGX_MPFE_HMC_ADP_DDRIOCTRL, dram_io_width);
352 
353 	/* Copy dram addr width from IOHMC to HMC ADP */
354 	data = mmio_read_32(AGX_MPFE_IOHMC_DRAMADDRW);
355 	mmio_write_32(AGX_MPFE_HMC_ADP(ADP_DRAMADDRWIDTH), data);
356 
357 	/* Enable nonsecure access to DDR */
358 	data = get_physical_dram_size();
359 
360 	if (data < AGX_DDR_SIZE)
361 		data = AGX_DDR_SIZE;
362 
363 	mmio_write_32(AGX_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMIT, data - 1);
364 	mmio_write_32(AGX_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMITEXT, 0x1f);
365 
366 	mmio_write_32(AGX_NOC_FW_DDR_SCR_NONMPUREGION0ADDR_LIMIT, data - 1);
367 
368 	mmio_write_32(AGX_SOC_NOC_FW_DDR_SCR_ENABLESET, BIT(0) | BIT(8));
369 
370 	/* ECC enablement */
371 	data = mmio_read_32(AGX_MPFE_IOHMC_REG_CTRLCFG1);
372 	if (data & (1 << AGX_IOHMC_CTRLCFG1_ENABLE_ECC_OFST)) {
373 		mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL1,
374 			AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
375 			AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
376 			AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
377 			AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
378 			AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK);
379 
380 		mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL2,
381 			AGX_MPFE_HMC_ADP_ECCCTRL2_OVRW_RB_ECC_EN_SET_MSK |
382 			AGX_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
383 			AGX_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK,
384 			AGX_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
385 			AGX_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK);
386 
387 		mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL1,
388 			AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
389 			AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
390 			AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
391 			AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK);
392 		INFO("Scrubbing ECC\n");
393 
394 		/* ECC Scrubbing */
395 		zeromem(DRAM_BASE, DRAM_SIZE);
396 	} else {
397 		INFO("ECC is disabled.\n");
398 	}
399 }
400