1 /*
2  * Copyright (c) 2019, Intel Corporation. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <arch.h>
8 #include <arch_helpers.h>
9 #include <errno.h>
10 #include <lib/mmio.h>
11 #include <lib/utils.h>
12 #include <common/debug.h>
13 #include <drivers/delay_timer.h>
14 #include <platform_def.h>
15 #include <string.h>
16 
17 #include "s10_memory_controller.h"
18 #include "socfpga_reset_manager.h"
19 
20 #define ALT_CCU_NOC_DI_SET_MSK 0x10
21 
22 #define DDR_READ_LATENCY_DELAY 40
23 #define MAX_MEM_CAL_RETRY		3
24 #define PRE_CALIBRATION_DELAY		1
25 #define POST_CALIBRATION_DELAY		1
26 #define TIMEOUT_EMIF_CALIBRATION	1000
27 #define CLEAR_EMIF_DELAY		1000
28 #define CLEAR_EMIF_TIMEOUT		1000
29 
30 #define DDR_CONFIG(A, B, C, R)	(((A) << 24) | ((B) << 16) | ((C) << 8) | (R))
31 #define DDR_CONFIG_ELEMENTS	(sizeof(ddr_config)/sizeof(uint32_t))
32 
33 /* tWR = Min. 15ns constant, see JEDEC standard eg. DDR4 is JESD79-4.pdf */
34 #define tWR_IN_NS 15
35 
36 void configure_hmc_adaptor_regs(void);
37 void configure_ddr_sched_ctrl_regs(void);
38 
39 /* The followring are the supported configurations */
40 uint32_t ddr_config[] = {
41 	/* DDR_CONFIG(Address order,Bank,Column,Row) */
42 	/* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */
43 	DDR_CONFIG(0, 3, 10, 12),
44 	DDR_CONFIG(0, 3,  9, 13),
45 	DDR_CONFIG(0, 3, 10, 13),
46 	DDR_CONFIG(0, 3,  9, 14),
47 	DDR_CONFIG(0, 3, 10, 14),
48 	DDR_CONFIG(0, 3, 10, 15),
49 	DDR_CONFIG(0, 3, 11, 14),
50 	DDR_CONFIG(0, 3, 11, 15),
51 	DDR_CONFIG(0, 3, 10, 16),
52 	DDR_CONFIG(0, 3, 11, 16),
53 	DDR_CONFIG(0, 3, 12, 15),	/* 0xa */
54 	/* List for DDR4 only (pinout order > chip, bank, row, column) */
55 	DDR_CONFIG(1, 3, 10, 14),
56 	DDR_CONFIG(1, 4, 10, 14),
57 	DDR_CONFIG(1, 3, 10, 15),
58 	DDR_CONFIG(1, 4, 10, 15),
59 	DDR_CONFIG(1, 3, 10, 16),
60 	DDR_CONFIG(1, 4, 10, 16),
61 	DDR_CONFIG(1, 3, 10, 17),
62 	DDR_CONFIG(1, 4, 10, 17),
63 };
64 
match_ddr_conf(uint32_t ddr_conf)65 static int match_ddr_conf(uint32_t ddr_conf)
66 {
67 	int i;
68 
69 	for (i = 0; i < DDR_CONFIG_ELEMENTS; i++) {
70 		if (ddr_conf == ddr_config[i])
71 			return i;
72 	}
73 	return 0;
74 }
75 
check_hmc_clk(void)76 static int check_hmc_clk(void)
77 {
78 	unsigned long timeout = 0;
79 	uint32_t hmc_clk;
80 
81 	do {
82 		hmc_clk = mmio_read_32(S10_SYSMGR_CORE_HMC_CLK);
83 		if (hmc_clk & S10_SYSMGR_CORE_HMC_CLK_STATUS)
84 			break;
85 		udelay(1);
86 	} while (++timeout < 1000);
87 	if (timeout >= 1000)
88 		return -ETIMEDOUT;
89 
90 	return 0;
91 }
92 
clear_emif(void)93 static int clear_emif(void)
94 {
95 	uint32_t data;
96 	unsigned long timeout;
97 
98 	mmio_write_32(S10_MPFE_HMC_ADP_RSTHANDSHAKECTRL, 0);
99 
100 	timeout = 0;
101 	do {
102 		data = mmio_read_32(S10_MPFE_HMC_ADP_RSTHANDSHAKESTAT);
103 		if ((data & S10_MPFE_HMC_ADP_RSTHANDSHAKESTAT_SEQ2CORE) == 0)
104 			break;
105 		udelay(CLEAR_EMIF_DELAY);
106 	} while (++timeout < CLEAR_EMIF_TIMEOUT);
107 	if (timeout >= CLEAR_EMIF_TIMEOUT)
108 		return -ETIMEDOUT;
109 
110 	return 0;
111 }
112 
mem_calibration(void)113 static int mem_calibration(void)
114 {
115 	int status = 0;
116 	uint32_t data;
117 	unsigned long timeout;
118 	unsigned long retry = 0;
119 
120 	udelay(PRE_CALIBRATION_DELAY);
121 
122 	do {
123 		if (retry != 0)
124 			INFO("DDR: Retrying DRAM calibration\n");
125 
126 		timeout = 0;
127 		do {
128 			data = mmio_read_32(S10_MPFE_HMC_ADP_DDRCALSTAT);
129 			if (S10_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 1)
130 				break;
131 			udelay(500);
132 		} while (++timeout < TIMEOUT_EMIF_CALIBRATION);
133 
134 		if (S10_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
135 			status = clear_emif();
136 			if (status)
137 				ERROR("Failed to clear Emif\n");
138 		} else {
139 			break;
140 		}
141 	} while (++retry < MAX_MEM_CAL_RETRY);
142 
143 	if (S10_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
144 		ERROR("DDR: DRAM calibration failed.\n");
145 		status = -EIO;
146 	} else {
147 		INFO("DDR: DRAM calibration success.\n");
148 		status = 0;
149 	}
150 
151 	udelay(POST_CALIBRATION_DELAY);
152 
153 	return status;
154 }
155 
init_hard_memory_controller(void)156 int init_hard_memory_controller(void)
157 {
158 	int status;
159 
160 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_DDR, S10_CCU_NOC_DI_SET_MSK);
161 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM0, S10_CCU_NOC_DI_SET_MSK);
162 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM1A, S10_CCU_NOC_DI_SET_MSK);
163 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM1B, S10_CCU_NOC_DI_SET_MSK);
164 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM1C, S10_CCU_NOC_DI_SET_MSK);
165 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM1D, S10_CCU_NOC_DI_SET_MSK);
166 	mmio_clrbits_32(S10_CCU_CPU0_MPRT_MEM1E, S10_CCU_NOC_DI_SET_MSK);
167 
168 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM0, S10_CCU_NOC_DI_SET_MSK);
169 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM1A, S10_CCU_NOC_DI_SET_MSK);
170 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM1B, S10_CCU_NOC_DI_SET_MSK);
171 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM1C, S10_CCU_NOC_DI_SET_MSK);
172 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM1D, S10_CCU_NOC_DI_SET_MSK);
173 	mmio_clrbits_32(S10_CCU_IOM_MPRT_MEM1E, S10_CCU_NOC_DI_SET_MSK);
174 
175 	mmio_write_32(S10_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMIT, 0xFFFF0000);
176 	mmio_write_32(S10_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMITEXT, 0x1F);
177 
178 	mmio_write_32(S10_NOC_FW_DDR_SCR_NONMPUREGION0ADDR_LIMIT, 0xFFFF0000);
179 	mmio_write_32(S10_NOC_FW_DDR_SCR_NONMPUREGION0ADDR_LIMITEXT, 0x1F);
180 	mmio_write_32(S10_SOC_NOC_FW_DDR_SCR_ENABLE, BIT(0) | BIT(8));
181 
182 	status = check_hmc_clk();
183 	if (status) {
184 		ERROR("DDR: Error, HMC clock not running\n");
185 		return status;
186 	}
187 
188 	mmio_clrbits_32(SOCFPGA_RSTMGR(BRGMODRST), RSTMGR_FIELD(BRG, DDRSCH));
189 
190 	status = mem_calibration();
191 	if (status) {
192 		ERROR("DDR: Memory Calibration Failed\n");
193 		return status;
194 	}
195 
196 	configure_hmc_adaptor_regs();
197 	configure_ddr_sched_ctrl_regs();
198 
199 	return 0;
200 }
201 
configure_ddr_sched_ctrl_regs(void)202 void configure_ddr_sched_ctrl_regs(void)
203 {
204 	uint32_t data, dram_addr_order, ddr_conf, bank, row, col,
205 		rd_to_miss, wr_to_miss, burst_len, burst_len_ddr_clk,
206 		burst_len_sched_clk, act_to_act, rd_to_wr, wr_to_rd, bw_ratio,
207 		t_rtp, t_rp, t_rcd, rd_latency, tw_rin_clk_cycles,
208 		bw_ratio_extended, auto_precharge = 0, act_to_act_bank, faw,
209 		faw_bank, bus_rd_to_rd, bus_rd_to_wr, bus_wr_to_rd;
210 
211 	INFO("Init HPS NOC's DDR Scheduler.\n");
212 
213 	data = mmio_read_32(S10_MPFE_IOHMC_CTRLCFG1);
214 	dram_addr_order = S10_MPFE_IOHMC_CTRLCFG1_CFG_ADDR_ORDER(data);
215 
216 	data = mmio_read_32(S10_MPFE_IOHMC_DRAMADDRW);
217 
218 	col  = IOHMC_DRAMADDRW_COL_ADDR_WIDTH(data);
219 	row  = IOHMC_DRAMADDRW_ROW_ADDR_WIDTH(data);
220 	bank = IOHMC_DRAMADDRW_BANK_ADDR_WIDTH(data) +
221 		IOHMC_DRAMADDRW_BANK_GRP_ADDR_WIDTH(data);
222 
223 	ddr_conf = match_ddr_conf(DDR_CONFIG(dram_addr_order, bank, col, row));
224 
225 	if (ddr_conf) {
226 		mmio_clrsetbits_32(
227 			S10_MPFE_DDR_MAIN_SCHED_DDRCONF,
228 			S10_MPFE_DDR_MAIN_SCHED_DDRCONF_SET_MSK,
229 			S10_MPFE_DDR_MAIN_SCHED_DDRCONF_SET(ddr_conf));
230 	} else {
231 		ERROR("DDR: Cannot find predefined ddrConf configuration.\n");
232 	}
233 
234 	mmio_write_32(S10_MPFE_HMC_ADP(ADP_DRAMADDRWIDTH), data);
235 
236 	data = mmio_read_32(S10_MPFE_IOHMC_DRAMTIMING0);
237 	rd_latency = S10_MPFE_IOHMC_REG_DRAMTIMING0_CFG_TCL(data);
238 
239 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING0);
240 	act_to_act = ACT_TO_ACT(data);
241 	t_rcd = ACT_TO_RDWR(data);
242 	act_to_act_bank = ACT_TO_ACT_DIFF_BANK(data);
243 
244 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING1);
245 	rd_to_wr = RD_TO_WR(data);
246 	bus_rd_to_rd = RD_TO_RD_DIFF_CHIP(data);
247 	bus_rd_to_wr = RD_TO_WR_DIFF_CHIP(data);
248 
249 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING2);
250 	t_rtp = RD_TO_PCH(data);
251 
252 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING3);
253 	wr_to_rd = CALTIMING3_WR_TO_RD(data);
254 	bus_wr_to_rd = CALTIMING3_WR_TO_RD_DIFF_CHIP(data);
255 
256 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING4);
257 	t_rp = PCH_TO_VALID(data);
258 
259 	data = mmio_read_32(S10_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
260 	bw_ratio = ((HMC_ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 0 : 1);
261 
262 	data = mmio_read_32(S10_MPFE_IOHMC_CTRLCFG0);
263 	burst_len = HMC_ADP_DDRIOCTRL_CTRL_BURST_LENGTH(data);
264 	burst_len_ddr_clk = burst_len / 2;
265 	burst_len_sched_clk = ((burst_len/2) / 2);
266 
267 	data = mmio_read_32(S10_MPFE_IOHMC_CTRLCFG0);
268 	switch (S10_MPFE_IOHMC_REG_CTRLCFG0_CFG_MEM_TYPE(data)) {
269 	case 1:
270 		/* DDR4 - 1333MHz */
271 		/* 20 (19.995) clock cycles = 15ns */
272 		/* Calculate with rounding */
273 		tw_rin_clk_cycles = (((tWR_IN_NS * 1333) % 1000) >= 500) ?
274 			((tWR_IN_NS * 1333) / 1000) + 1 :
275 			((tWR_IN_NS * 1333) / 1000);
276 		break;
277 	default:
278 		/* Others - 1066MHz or slower */
279 		/* 16 (15.990) clock cycles = 15ns */
280 		/* Calculate with rounding */
281 		tw_rin_clk_cycles = (((tWR_IN_NS * 1066) % 1000) >= 500) ?
282 			((tWR_IN_NS * 1066) / 1000) + 1 :
283 			((tWR_IN_NS * 1066) / 1000);
284 		break;
285 	}
286 
287 	rd_to_miss = t_rtp + t_rp + t_rcd - burst_len_sched_clk;
288 	wr_to_miss = ((rd_latency + burst_len_ddr_clk + 2 + tw_rin_clk_cycles)
289 			/ 2) - rd_to_wr + t_rp + t_rcd;
290 
291 	mmio_write_32(S10_MPFE_DDR_MAIN_SCHED_DDRTIMING,
292 		bw_ratio << DDRTIMING_BWRATIO_OFST |
293 		wr_to_rd << DDRTIMING_WRTORD_OFST|
294 		rd_to_wr << DDRTIMING_RDTOWR_OFST |
295 		burst_len_sched_clk << DDRTIMING_BURSTLEN_OFST |
296 		wr_to_miss << DDRTIMING_WRTOMISS_OFST |
297 		rd_to_miss << DDRTIMING_RDTOMISS_OFST |
298 		act_to_act << DDRTIMING_ACTTOACT_OFST);
299 
300 	data = mmio_read_32(S10_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
301 	bw_ratio_extended = ((ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 1 : 0);
302 
303 	mmio_write_32(S10_MPFE_DDR_MAIN_SCHED_DDRMODE,
304 		bw_ratio_extended << DDRMODE_BWRATIOEXTENDED_OFST |
305 		auto_precharge << DDRMODE_AUTOPRECHARGE_OFST);
306 
307 	mmio_write_32(S10_MPFE_DDR_MAIN_SCHED_READLATENCY,
308 		(rd_latency / 2) + DDR_READ_LATENCY_DELAY);
309 
310 	data = mmio_read_32(S10_MPFE_IOHMC_CALTIMING9);
311 	faw = S10_MPFE_IOHMC_CALTIMING9_ACT_TO_ACT(data);
312 
313 	faw_bank = 1; // always 1 because we always have 4 bank DDR.
314 
315 	mmio_write_32(S10_MPFE_DDR_MAIN_SCHED_ACTIVATE,
316 		faw_bank << S10_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAWBANK_OFST |
317 		faw << S10_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAW_OFST |
318 		act_to_act_bank << S10_MPFE_DDR_MAIN_SCHED_ACTIVATE_RRD_OFST);
319 
320 	mmio_write_32(S10_MPFE_DDR_MAIN_SCHED_DEVTODEV,
321 		((bus_rd_to_rd
322 			<< S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_OFST)
323 			& S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_MSK) |
324 		((bus_rd_to_wr
325 			<< S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_OFST)
326 			& S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_MSK) |
327 		((bus_wr_to_rd
328 			<< S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_OFST)
329 			& S10_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_MSK));
330 
331 }
332 
get_physical_dram_size(void)333 unsigned long get_physical_dram_size(void)
334 {
335 	uint32_t data;
336 	unsigned long ram_addr_width, ram_ext_if_io_width;
337 
338 	data = mmio_read_32(S10_MPFE_HMC_ADP_DDRIOCTRL);
339 	switch (S10_MPFE_HMC_ADP_DDRIOCTRL_IO_SIZE(data)) {
340 	case 0:
341 		ram_ext_if_io_width = 16;
342 		break;
343 	case 1:
344 		ram_ext_if_io_width = 32;
345 		break;
346 	case 2:
347 		ram_ext_if_io_width = 64;
348 		break;
349 	default:
350 		ram_ext_if_io_width = 0;
351 		break;
352 	}
353 
354 	data = mmio_read_32(S10_MPFE_IOHMC_REG_DRAMADDRW);
355 	ram_addr_width = IOHMC_DRAMADDRW_CFG_COL_ADDR_WIDTH(data) +
356 		IOHMC_DRAMADDRW_CFG_ROW_ADDR_WIDTH(data) +
357 		IOHMC_DRAMADDRW_CFG_BANK_ADDR_WIDTH(data) +
358 		IOHMC_DRAMADDRW_CFG_BANK_GROUP_ADDR_WIDTH(data) +
359 		IOHMC_DRAMADDRW_CFG_CS_ADDR_WIDTH(data);
360 
361 	return (1 << ram_addr_width) * (ram_ext_if_io_width / 8);
362 }
363 
364 
365 
configure_hmc_adaptor_regs(void)366 void configure_hmc_adaptor_regs(void)
367 {
368 	uint32_t data;
369 	uint32_t dram_io_width;
370 
371 	dram_io_width = S10_MPFE_IOHMC_NIOSRESERVE0_NIOS_RESERVE0(
372 		mmio_read_32(S10_MPFE_IOHMC_REG_NIOSRESERVE0_OFST));
373 
374 	dram_io_width = (dram_io_width & 0xFF) >> 5;
375 
376 	mmio_clrsetbits_32(S10_MPFE_HMC_ADP_DDRIOCTRL,
377 		S10_MPFE_HMC_ADP_DDRIOCTRL_IO_SIZE_MSK,
378 		dram_io_width << S10_MPFE_HMC_ADP_DDRIOCTRL_IO_SIZE_OFST);
379 
380 	mmio_write_32(S10_MPFE_HMC_ADP_HPSINTFCSEL,
381 		S10_MPFE_HMC_ADP_HPSINTFCSEL_ENABLE);
382 
383 	data = mmio_read_32(S10_MPFE_IOHMC_REG_CTRLCFG1);
384 	if (data & (1 << S10_IOHMC_CTRLCFG1_ENABLE_ECC_OFST)) {
385 		mmio_clrsetbits_32(S10_MPFE_HMC_ADP_ECCCTRL1,
386 			S10_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
387 			S10_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
388 			S10_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
389 			S10_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
390 			S10_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK);
391 
392 		mmio_clrsetbits_32(S10_MPFE_HMC_ADP_ECCCTRL2,
393 			S10_MPFE_HMC_ADP_ECCCTRL2_OVRW_RB_ECC_EN_SET_MSK |
394 			S10_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
395 			S10_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK,
396 			S10_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
397 			S10_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK);
398 
399 		mmio_clrsetbits_32(S10_MPFE_HMC_ADP_ECCCTRL1,
400 			S10_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
401 			S10_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
402 			S10_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
403 			S10_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK);
404 		INFO("Scrubbing ECC\n");
405 
406 		/* ECC Scrubbing */
407 		zeromem(DRAM_BASE, DRAM_SIZE);
408 	} else {
409 		INFO("ECC is disabled.\n");
410 	}
411 }
412 
413