xref: /openbsd/sys/dev/pci/drm/i915/soc/intel_dram.c (revision f005ef32)
1*f005ef32Sjsg // SPDX-License-Identifier: MIT
2*f005ef32Sjsg /*
3*f005ef32Sjsg  * Copyright © 2020 Intel Corporation
4*f005ef32Sjsg  */
5*f005ef32Sjsg 
6*f005ef32Sjsg #include <linux/string_helpers.h>
7*f005ef32Sjsg 
8*f005ef32Sjsg #include "i915_drv.h"
9*f005ef32Sjsg #include "i915_reg.h"
10*f005ef32Sjsg #include "intel_dram.h"
11*f005ef32Sjsg #include "intel_mchbar_regs.h"
12*f005ef32Sjsg #include "intel_pcode.h"
13*f005ef32Sjsg #include "vlv_sideband.h"
14*f005ef32Sjsg 
15*f005ef32Sjsg struct dram_dimm_info {
16*f005ef32Sjsg 	u16 size;
17*f005ef32Sjsg 	u8 width, ranks;
18*f005ef32Sjsg };
19*f005ef32Sjsg 
20*f005ef32Sjsg struct dram_channel_info {
21*f005ef32Sjsg 	struct dram_dimm_info dimm_l, dimm_s;
22*f005ef32Sjsg 	u8 ranks;
23*f005ef32Sjsg 	bool is_16gb_dimm;
24*f005ef32Sjsg };
25*f005ef32Sjsg 
26*f005ef32Sjsg #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
27*f005ef32Sjsg 
intel_dram_type_str(enum intel_dram_type type)28*f005ef32Sjsg static const char *intel_dram_type_str(enum intel_dram_type type)
29*f005ef32Sjsg {
30*f005ef32Sjsg 	static const char * const str[] = {
31*f005ef32Sjsg 		DRAM_TYPE_STR(UNKNOWN),
32*f005ef32Sjsg 		DRAM_TYPE_STR(DDR3),
33*f005ef32Sjsg 		DRAM_TYPE_STR(DDR4),
34*f005ef32Sjsg 		DRAM_TYPE_STR(LPDDR3),
35*f005ef32Sjsg 		DRAM_TYPE_STR(LPDDR4),
36*f005ef32Sjsg 	};
37*f005ef32Sjsg 
38*f005ef32Sjsg 	if (type >= ARRAY_SIZE(str))
39*f005ef32Sjsg 		type = INTEL_DRAM_UNKNOWN;
40*f005ef32Sjsg 
41*f005ef32Sjsg 	return str[type];
42*f005ef32Sjsg }
43*f005ef32Sjsg 
44*f005ef32Sjsg #undef DRAM_TYPE_STR
45*f005ef32Sjsg 
pnv_detect_mem_freq(struct drm_i915_private * dev_priv)46*f005ef32Sjsg static void pnv_detect_mem_freq(struct drm_i915_private *dev_priv)
47*f005ef32Sjsg {
48*f005ef32Sjsg 	u32 tmp;
49*f005ef32Sjsg 
50*f005ef32Sjsg 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
51*f005ef32Sjsg 
52*f005ef32Sjsg 	switch (tmp & CLKCFG_FSB_MASK) {
53*f005ef32Sjsg 	case CLKCFG_FSB_533:
54*f005ef32Sjsg 		dev_priv->fsb_freq = 533; /* 133*4 */
55*f005ef32Sjsg 		break;
56*f005ef32Sjsg 	case CLKCFG_FSB_800:
57*f005ef32Sjsg 		dev_priv->fsb_freq = 800; /* 200*4 */
58*f005ef32Sjsg 		break;
59*f005ef32Sjsg 	case CLKCFG_FSB_667:
60*f005ef32Sjsg 		dev_priv->fsb_freq =  667; /* 167*4 */
61*f005ef32Sjsg 		break;
62*f005ef32Sjsg 	case CLKCFG_FSB_400:
63*f005ef32Sjsg 		dev_priv->fsb_freq = 400; /* 100*4 */
64*f005ef32Sjsg 		break;
65*f005ef32Sjsg 	}
66*f005ef32Sjsg 
67*f005ef32Sjsg 	switch (tmp & CLKCFG_MEM_MASK) {
68*f005ef32Sjsg 	case CLKCFG_MEM_533:
69*f005ef32Sjsg 		dev_priv->mem_freq = 533;
70*f005ef32Sjsg 		break;
71*f005ef32Sjsg 	case CLKCFG_MEM_667:
72*f005ef32Sjsg 		dev_priv->mem_freq = 667;
73*f005ef32Sjsg 		break;
74*f005ef32Sjsg 	case CLKCFG_MEM_800:
75*f005ef32Sjsg 		dev_priv->mem_freq = 800;
76*f005ef32Sjsg 		break;
77*f005ef32Sjsg 	}
78*f005ef32Sjsg 
79*f005ef32Sjsg 	/* detect pineview DDR3 setting */
80*f005ef32Sjsg 	tmp = intel_uncore_read(&dev_priv->uncore, CSHRDDR3CTL);
81*f005ef32Sjsg 	dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0;
82*f005ef32Sjsg }
83*f005ef32Sjsg 
ilk_detect_mem_freq(struct drm_i915_private * dev_priv)84*f005ef32Sjsg static void ilk_detect_mem_freq(struct drm_i915_private *dev_priv)
85*f005ef32Sjsg {
86*f005ef32Sjsg 	u16 ddrpll, csipll;
87*f005ef32Sjsg 
88*f005ef32Sjsg 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
89*f005ef32Sjsg 	switch (ddrpll & 0xff) {
90*f005ef32Sjsg 	case 0xc:
91*f005ef32Sjsg 		dev_priv->mem_freq = 800;
92*f005ef32Sjsg 		break;
93*f005ef32Sjsg 	case 0x10:
94*f005ef32Sjsg 		dev_priv->mem_freq = 1066;
95*f005ef32Sjsg 		break;
96*f005ef32Sjsg 	case 0x14:
97*f005ef32Sjsg 		dev_priv->mem_freq = 1333;
98*f005ef32Sjsg 		break;
99*f005ef32Sjsg 	case 0x18:
100*f005ef32Sjsg 		dev_priv->mem_freq = 1600;
101*f005ef32Sjsg 		break;
102*f005ef32Sjsg 	default:
103*f005ef32Sjsg 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
104*f005ef32Sjsg 			ddrpll & 0xff);
105*f005ef32Sjsg 		dev_priv->mem_freq = 0;
106*f005ef32Sjsg 		break;
107*f005ef32Sjsg 	}
108*f005ef32Sjsg 
109*f005ef32Sjsg 	csipll = intel_uncore_read16(&dev_priv->uncore, CSIPLL0);
110*f005ef32Sjsg 	switch (csipll & 0x3ff) {
111*f005ef32Sjsg 	case 0x00c:
112*f005ef32Sjsg 		dev_priv->fsb_freq = 3200;
113*f005ef32Sjsg 		break;
114*f005ef32Sjsg 	case 0x00e:
115*f005ef32Sjsg 		dev_priv->fsb_freq = 3733;
116*f005ef32Sjsg 		break;
117*f005ef32Sjsg 	case 0x010:
118*f005ef32Sjsg 		dev_priv->fsb_freq = 4266;
119*f005ef32Sjsg 		break;
120*f005ef32Sjsg 	case 0x012:
121*f005ef32Sjsg 		dev_priv->fsb_freq = 4800;
122*f005ef32Sjsg 		break;
123*f005ef32Sjsg 	case 0x014:
124*f005ef32Sjsg 		dev_priv->fsb_freq = 5333;
125*f005ef32Sjsg 		break;
126*f005ef32Sjsg 	case 0x016:
127*f005ef32Sjsg 		dev_priv->fsb_freq = 5866;
128*f005ef32Sjsg 		break;
129*f005ef32Sjsg 	case 0x018:
130*f005ef32Sjsg 		dev_priv->fsb_freq = 6400;
131*f005ef32Sjsg 		break;
132*f005ef32Sjsg 	default:
133*f005ef32Sjsg 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n",
134*f005ef32Sjsg 			csipll & 0x3ff);
135*f005ef32Sjsg 		dev_priv->fsb_freq = 0;
136*f005ef32Sjsg 		break;
137*f005ef32Sjsg 	}
138*f005ef32Sjsg }
139*f005ef32Sjsg 
chv_detect_mem_freq(struct drm_i915_private * i915)140*f005ef32Sjsg static void chv_detect_mem_freq(struct drm_i915_private *i915)
141*f005ef32Sjsg {
142*f005ef32Sjsg 	u32 val;
143*f005ef32Sjsg 
144*f005ef32Sjsg 	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_CCK));
145*f005ef32Sjsg 	val = vlv_cck_read(i915, CCK_FUSE_REG);
146*f005ef32Sjsg 	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_CCK));
147*f005ef32Sjsg 
148*f005ef32Sjsg 	switch ((val >> 2) & 0x7) {
149*f005ef32Sjsg 	case 3:
150*f005ef32Sjsg 		i915->mem_freq = 2000;
151*f005ef32Sjsg 		break;
152*f005ef32Sjsg 	default:
153*f005ef32Sjsg 		i915->mem_freq = 1600;
154*f005ef32Sjsg 		break;
155*f005ef32Sjsg 	}
156*f005ef32Sjsg }
157*f005ef32Sjsg 
vlv_detect_mem_freq(struct drm_i915_private * i915)158*f005ef32Sjsg static void vlv_detect_mem_freq(struct drm_i915_private *i915)
159*f005ef32Sjsg {
160*f005ef32Sjsg 	u32 val;
161*f005ef32Sjsg 
162*f005ef32Sjsg 	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_PUNIT));
163*f005ef32Sjsg 	val = vlv_punit_read(i915, PUNIT_REG_GPU_FREQ_STS);
164*f005ef32Sjsg 	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_PUNIT));
165*f005ef32Sjsg 
166*f005ef32Sjsg 	switch ((val >> 6) & 3) {
167*f005ef32Sjsg 	case 0:
168*f005ef32Sjsg 	case 1:
169*f005ef32Sjsg 		i915->mem_freq = 800;
170*f005ef32Sjsg 		break;
171*f005ef32Sjsg 	case 2:
172*f005ef32Sjsg 		i915->mem_freq = 1066;
173*f005ef32Sjsg 		break;
174*f005ef32Sjsg 	case 3:
175*f005ef32Sjsg 		i915->mem_freq = 1333;
176*f005ef32Sjsg 		break;
177*f005ef32Sjsg 	}
178*f005ef32Sjsg }
179*f005ef32Sjsg 
detect_mem_freq(struct drm_i915_private * i915)180*f005ef32Sjsg static void detect_mem_freq(struct drm_i915_private *i915)
181*f005ef32Sjsg {
182*f005ef32Sjsg 	if (IS_PINEVIEW(i915))
183*f005ef32Sjsg 		pnv_detect_mem_freq(i915);
184*f005ef32Sjsg 	else if (GRAPHICS_VER(i915) == 5)
185*f005ef32Sjsg 		ilk_detect_mem_freq(i915);
186*f005ef32Sjsg 	else if (IS_CHERRYVIEW(i915))
187*f005ef32Sjsg 		chv_detect_mem_freq(i915);
188*f005ef32Sjsg 	else if (IS_VALLEYVIEW(i915))
189*f005ef32Sjsg 		vlv_detect_mem_freq(i915);
190*f005ef32Sjsg 
191*f005ef32Sjsg 	if (i915->mem_freq)
192*f005ef32Sjsg 		drm_dbg(&i915->drm, "DDR speed: %d MHz\n", i915->mem_freq);
193*f005ef32Sjsg }
194*f005ef32Sjsg 
intel_dimm_num_devices(const struct dram_dimm_info * dimm)195*f005ef32Sjsg static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
196*f005ef32Sjsg {
197*f005ef32Sjsg 	return dimm->ranks * 64 / (dimm->width ?: 1);
198*f005ef32Sjsg }
199*f005ef32Sjsg 
200*f005ef32Sjsg /* Returns total Gb for the whole DIMM */
skl_get_dimm_size(u16 val)201*f005ef32Sjsg static int skl_get_dimm_size(u16 val)
202*f005ef32Sjsg {
203*f005ef32Sjsg 	return (val & SKL_DRAM_SIZE_MASK) * 8;
204*f005ef32Sjsg }
205*f005ef32Sjsg 
skl_get_dimm_width(u16 val)206*f005ef32Sjsg static int skl_get_dimm_width(u16 val)
207*f005ef32Sjsg {
208*f005ef32Sjsg 	if (skl_get_dimm_size(val) == 0)
209*f005ef32Sjsg 		return 0;
210*f005ef32Sjsg 
211*f005ef32Sjsg 	switch (val & SKL_DRAM_WIDTH_MASK) {
212*f005ef32Sjsg 	case SKL_DRAM_WIDTH_X8:
213*f005ef32Sjsg 	case SKL_DRAM_WIDTH_X16:
214*f005ef32Sjsg 	case SKL_DRAM_WIDTH_X32:
215*f005ef32Sjsg 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
216*f005ef32Sjsg 		return 8 << val;
217*f005ef32Sjsg 	default:
218*f005ef32Sjsg 		MISSING_CASE(val);
219*f005ef32Sjsg 		return 0;
220*f005ef32Sjsg 	}
221*f005ef32Sjsg }
222*f005ef32Sjsg 
skl_get_dimm_ranks(u16 val)223*f005ef32Sjsg static int skl_get_dimm_ranks(u16 val)
224*f005ef32Sjsg {
225*f005ef32Sjsg 	if (skl_get_dimm_size(val) == 0)
226*f005ef32Sjsg 		return 0;
227*f005ef32Sjsg 
228*f005ef32Sjsg 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
229*f005ef32Sjsg 
230*f005ef32Sjsg 	return val + 1;
231*f005ef32Sjsg }
232*f005ef32Sjsg 
233*f005ef32Sjsg /* Returns total Gb for the whole DIMM */
icl_get_dimm_size(u16 val)234*f005ef32Sjsg static int icl_get_dimm_size(u16 val)
235*f005ef32Sjsg {
236*f005ef32Sjsg 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
237*f005ef32Sjsg }
238*f005ef32Sjsg 
icl_get_dimm_width(u16 val)239*f005ef32Sjsg static int icl_get_dimm_width(u16 val)
240*f005ef32Sjsg {
241*f005ef32Sjsg 	if (icl_get_dimm_size(val) == 0)
242*f005ef32Sjsg 		return 0;
243*f005ef32Sjsg 
244*f005ef32Sjsg 	switch (val & ICL_DRAM_WIDTH_MASK) {
245*f005ef32Sjsg 	case ICL_DRAM_WIDTH_X8:
246*f005ef32Sjsg 	case ICL_DRAM_WIDTH_X16:
247*f005ef32Sjsg 	case ICL_DRAM_WIDTH_X32:
248*f005ef32Sjsg 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
249*f005ef32Sjsg 		return 8 << val;
250*f005ef32Sjsg 	default:
251*f005ef32Sjsg 		MISSING_CASE(val);
252*f005ef32Sjsg 		return 0;
253*f005ef32Sjsg 	}
254*f005ef32Sjsg }
255*f005ef32Sjsg 
icl_get_dimm_ranks(u16 val)256*f005ef32Sjsg static int icl_get_dimm_ranks(u16 val)
257*f005ef32Sjsg {
258*f005ef32Sjsg 	if (icl_get_dimm_size(val) == 0)
259*f005ef32Sjsg 		return 0;
260*f005ef32Sjsg 
261*f005ef32Sjsg 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
262*f005ef32Sjsg 
263*f005ef32Sjsg 	return val + 1;
264*f005ef32Sjsg }
265*f005ef32Sjsg 
266*f005ef32Sjsg static bool
skl_is_16gb_dimm(const struct dram_dimm_info * dimm)267*f005ef32Sjsg skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
268*f005ef32Sjsg {
269*f005ef32Sjsg 	/* Convert total Gb to Gb per DRAM device */
270*f005ef32Sjsg 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
271*f005ef32Sjsg }
272*f005ef32Sjsg 
273*f005ef32Sjsg static void
skl_dram_get_dimm_info(struct drm_i915_private * i915,struct dram_dimm_info * dimm,int channel,char dimm_name,u16 val)274*f005ef32Sjsg skl_dram_get_dimm_info(struct drm_i915_private *i915,
275*f005ef32Sjsg 		       struct dram_dimm_info *dimm,
276*f005ef32Sjsg 		       int channel, char dimm_name, u16 val)
277*f005ef32Sjsg {
278*f005ef32Sjsg 	if (GRAPHICS_VER(i915) >= 11) {
279*f005ef32Sjsg 		dimm->size = icl_get_dimm_size(val);
280*f005ef32Sjsg 		dimm->width = icl_get_dimm_width(val);
281*f005ef32Sjsg 		dimm->ranks = icl_get_dimm_ranks(val);
282*f005ef32Sjsg 	} else {
283*f005ef32Sjsg 		dimm->size = skl_get_dimm_size(val);
284*f005ef32Sjsg 		dimm->width = skl_get_dimm_width(val);
285*f005ef32Sjsg 		dimm->ranks = skl_get_dimm_ranks(val);
286*f005ef32Sjsg 	}
287*f005ef32Sjsg 
288*f005ef32Sjsg 	drm_dbg_kms(&i915->drm,
289*f005ef32Sjsg 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
290*f005ef32Sjsg 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
291*f005ef32Sjsg 		    str_yes_no(skl_is_16gb_dimm(dimm)));
292*f005ef32Sjsg }
293*f005ef32Sjsg 
294*f005ef32Sjsg static int
skl_dram_get_channel_info(struct drm_i915_private * i915,struct dram_channel_info * ch,int channel,u32 val)295*f005ef32Sjsg skl_dram_get_channel_info(struct drm_i915_private *i915,
296*f005ef32Sjsg 			  struct dram_channel_info *ch,
297*f005ef32Sjsg 			  int channel, u32 val)
298*f005ef32Sjsg {
299*f005ef32Sjsg 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
300*f005ef32Sjsg 			       channel, 'L', val & 0xffff);
301*f005ef32Sjsg 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
302*f005ef32Sjsg 			       channel, 'S', val >> 16);
303*f005ef32Sjsg 
304*f005ef32Sjsg 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
305*f005ef32Sjsg 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
306*f005ef32Sjsg 		return -EINVAL;
307*f005ef32Sjsg 	}
308*f005ef32Sjsg 
309*f005ef32Sjsg 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
310*f005ef32Sjsg 		ch->ranks = 2;
311*f005ef32Sjsg 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
312*f005ef32Sjsg 		ch->ranks = 2;
313*f005ef32Sjsg 	else
314*f005ef32Sjsg 		ch->ranks = 1;
315*f005ef32Sjsg 
316*f005ef32Sjsg 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
317*f005ef32Sjsg 		skl_is_16gb_dimm(&ch->dimm_s);
318*f005ef32Sjsg 
319*f005ef32Sjsg 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
320*f005ef32Sjsg 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
321*f005ef32Sjsg 
322*f005ef32Sjsg 	return 0;
323*f005ef32Sjsg }
324*f005ef32Sjsg 
325*f005ef32Sjsg static bool
intel_is_dram_symmetric(const struct dram_channel_info * ch0,const struct dram_channel_info * ch1)326*f005ef32Sjsg intel_is_dram_symmetric(const struct dram_channel_info *ch0,
327*f005ef32Sjsg 			const struct dram_channel_info *ch1)
328*f005ef32Sjsg {
329*f005ef32Sjsg 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
330*f005ef32Sjsg 		(ch0->dimm_s.size == 0 ||
331*f005ef32Sjsg 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
332*f005ef32Sjsg }
333*f005ef32Sjsg 
334*f005ef32Sjsg static int
skl_dram_get_channels_info(struct drm_i915_private * i915)335*f005ef32Sjsg skl_dram_get_channels_info(struct drm_i915_private *i915)
336*f005ef32Sjsg {
337*f005ef32Sjsg 	struct dram_info *dram_info = &i915->dram_info;
338*f005ef32Sjsg 	struct dram_channel_info ch0 = {}, ch1 = {};
339*f005ef32Sjsg 	u32 val;
340*f005ef32Sjsg 	int ret;
341*f005ef32Sjsg 
342*f005ef32Sjsg 	val = intel_uncore_read(&i915->uncore,
343*f005ef32Sjsg 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
344*f005ef32Sjsg 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
345*f005ef32Sjsg 	if (ret == 0)
346*f005ef32Sjsg 		dram_info->num_channels++;
347*f005ef32Sjsg 
348*f005ef32Sjsg 	val = intel_uncore_read(&i915->uncore,
349*f005ef32Sjsg 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
350*f005ef32Sjsg 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
351*f005ef32Sjsg 	if (ret == 0)
352*f005ef32Sjsg 		dram_info->num_channels++;
353*f005ef32Sjsg 
354*f005ef32Sjsg 	if (dram_info->num_channels == 0) {
355*f005ef32Sjsg 		drm_info(&i915->drm, "Number of memory channels is zero\n");
356*f005ef32Sjsg 		return -EINVAL;
357*f005ef32Sjsg 	}
358*f005ef32Sjsg 
359*f005ef32Sjsg 	if (ch0.ranks == 0 && ch1.ranks == 0) {
360*f005ef32Sjsg 		drm_info(&i915->drm, "couldn't get memory rank information\n");
361*f005ef32Sjsg 		return -EINVAL;
362*f005ef32Sjsg 	}
363*f005ef32Sjsg 
364*f005ef32Sjsg 	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
365*f005ef32Sjsg 
366*f005ef32Sjsg 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
367*f005ef32Sjsg 
368*f005ef32Sjsg 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
369*f005ef32Sjsg 		    str_yes_no(dram_info->symmetric_memory));
370*f005ef32Sjsg 
371*f005ef32Sjsg 	return 0;
372*f005ef32Sjsg }
373*f005ef32Sjsg 
374*f005ef32Sjsg static enum intel_dram_type
skl_get_dram_type(struct drm_i915_private * i915)375*f005ef32Sjsg skl_get_dram_type(struct drm_i915_private *i915)
376*f005ef32Sjsg {
377*f005ef32Sjsg 	u32 val;
378*f005ef32Sjsg 
379*f005ef32Sjsg 	val = intel_uncore_read(&i915->uncore,
380*f005ef32Sjsg 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
381*f005ef32Sjsg 
382*f005ef32Sjsg 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
383*f005ef32Sjsg 	case SKL_DRAM_DDR_TYPE_DDR3:
384*f005ef32Sjsg 		return INTEL_DRAM_DDR3;
385*f005ef32Sjsg 	case SKL_DRAM_DDR_TYPE_DDR4:
386*f005ef32Sjsg 		return INTEL_DRAM_DDR4;
387*f005ef32Sjsg 	case SKL_DRAM_DDR_TYPE_LPDDR3:
388*f005ef32Sjsg 		return INTEL_DRAM_LPDDR3;
389*f005ef32Sjsg 	case SKL_DRAM_DDR_TYPE_LPDDR4:
390*f005ef32Sjsg 		return INTEL_DRAM_LPDDR4;
391*f005ef32Sjsg 	default:
392*f005ef32Sjsg 		MISSING_CASE(val);
393*f005ef32Sjsg 		return INTEL_DRAM_UNKNOWN;
394*f005ef32Sjsg 	}
395*f005ef32Sjsg }
396*f005ef32Sjsg 
397*f005ef32Sjsg static int
skl_get_dram_info(struct drm_i915_private * i915)398*f005ef32Sjsg skl_get_dram_info(struct drm_i915_private *i915)
399*f005ef32Sjsg {
400*f005ef32Sjsg 	struct dram_info *dram_info = &i915->dram_info;
401*f005ef32Sjsg 	int ret;
402*f005ef32Sjsg 
403*f005ef32Sjsg 	dram_info->type = skl_get_dram_type(i915);
404*f005ef32Sjsg 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
405*f005ef32Sjsg 		    intel_dram_type_str(dram_info->type));
406*f005ef32Sjsg 
407*f005ef32Sjsg 	ret = skl_dram_get_channels_info(i915);
408*f005ef32Sjsg 	if (ret)
409*f005ef32Sjsg 		return ret;
410*f005ef32Sjsg 
411*f005ef32Sjsg 	return 0;
412*f005ef32Sjsg }
413*f005ef32Sjsg 
414*f005ef32Sjsg /* Returns Gb per DRAM device */
bxt_get_dimm_size(u32 val)415*f005ef32Sjsg static int bxt_get_dimm_size(u32 val)
416*f005ef32Sjsg {
417*f005ef32Sjsg 	switch (val & BXT_DRAM_SIZE_MASK) {
418*f005ef32Sjsg 	case BXT_DRAM_SIZE_4GBIT:
419*f005ef32Sjsg 		return 4;
420*f005ef32Sjsg 	case BXT_DRAM_SIZE_6GBIT:
421*f005ef32Sjsg 		return 6;
422*f005ef32Sjsg 	case BXT_DRAM_SIZE_8GBIT:
423*f005ef32Sjsg 		return 8;
424*f005ef32Sjsg 	case BXT_DRAM_SIZE_12GBIT:
425*f005ef32Sjsg 		return 12;
426*f005ef32Sjsg 	case BXT_DRAM_SIZE_16GBIT:
427*f005ef32Sjsg 		return 16;
428*f005ef32Sjsg 	default:
429*f005ef32Sjsg 		MISSING_CASE(val);
430*f005ef32Sjsg 		return 0;
431*f005ef32Sjsg 	}
432*f005ef32Sjsg }
433*f005ef32Sjsg 
bxt_get_dimm_width(u32 val)434*f005ef32Sjsg static int bxt_get_dimm_width(u32 val)
435*f005ef32Sjsg {
436*f005ef32Sjsg 	if (!bxt_get_dimm_size(val))
437*f005ef32Sjsg 		return 0;
438*f005ef32Sjsg 
439*f005ef32Sjsg 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
440*f005ef32Sjsg 
441*f005ef32Sjsg 	return 8 << val;
442*f005ef32Sjsg }
443*f005ef32Sjsg 
bxt_get_dimm_ranks(u32 val)444*f005ef32Sjsg static int bxt_get_dimm_ranks(u32 val)
445*f005ef32Sjsg {
446*f005ef32Sjsg 	if (!bxt_get_dimm_size(val))
447*f005ef32Sjsg 		return 0;
448*f005ef32Sjsg 
449*f005ef32Sjsg 	switch (val & BXT_DRAM_RANK_MASK) {
450*f005ef32Sjsg 	case BXT_DRAM_RANK_SINGLE:
451*f005ef32Sjsg 		return 1;
452*f005ef32Sjsg 	case BXT_DRAM_RANK_DUAL:
453*f005ef32Sjsg 		return 2;
454*f005ef32Sjsg 	default:
455*f005ef32Sjsg 		MISSING_CASE(val);
456*f005ef32Sjsg 		return 0;
457*f005ef32Sjsg 	}
458*f005ef32Sjsg }
459*f005ef32Sjsg 
bxt_get_dimm_type(u32 val)460*f005ef32Sjsg static enum intel_dram_type bxt_get_dimm_type(u32 val)
461*f005ef32Sjsg {
462*f005ef32Sjsg 	if (!bxt_get_dimm_size(val))
463*f005ef32Sjsg 		return INTEL_DRAM_UNKNOWN;
464*f005ef32Sjsg 
465*f005ef32Sjsg 	switch (val & BXT_DRAM_TYPE_MASK) {
466*f005ef32Sjsg 	case BXT_DRAM_TYPE_DDR3:
467*f005ef32Sjsg 		return INTEL_DRAM_DDR3;
468*f005ef32Sjsg 	case BXT_DRAM_TYPE_LPDDR3:
469*f005ef32Sjsg 		return INTEL_DRAM_LPDDR3;
470*f005ef32Sjsg 	case BXT_DRAM_TYPE_DDR4:
471*f005ef32Sjsg 		return INTEL_DRAM_DDR4;
472*f005ef32Sjsg 	case BXT_DRAM_TYPE_LPDDR4:
473*f005ef32Sjsg 		return INTEL_DRAM_LPDDR4;
474*f005ef32Sjsg 	default:
475*f005ef32Sjsg 		MISSING_CASE(val);
476*f005ef32Sjsg 		return INTEL_DRAM_UNKNOWN;
477*f005ef32Sjsg 	}
478*f005ef32Sjsg }
479*f005ef32Sjsg 
bxt_get_dimm_info(struct dram_dimm_info * dimm,u32 val)480*f005ef32Sjsg static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
481*f005ef32Sjsg {
482*f005ef32Sjsg 	dimm->width = bxt_get_dimm_width(val);
483*f005ef32Sjsg 	dimm->ranks = bxt_get_dimm_ranks(val);
484*f005ef32Sjsg 
485*f005ef32Sjsg 	/*
486*f005ef32Sjsg 	 * Size in register is Gb per DRAM device. Convert to total
487*f005ef32Sjsg 	 * Gb to match the way we report this for non-LP platforms.
488*f005ef32Sjsg 	 */
489*f005ef32Sjsg 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
490*f005ef32Sjsg }
491*f005ef32Sjsg 
bxt_get_dram_info(struct drm_i915_private * i915)492*f005ef32Sjsg static int bxt_get_dram_info(struct drm_i915_private *i915)
493*f005ef32Sjsg {
494*f005ef32Sjsg 	struct dram_info *dram_info = &i915->dram_info;
495*f005ef32Sjsg 	u32 val;
496*f005ef32Sjsg 	u8 valid_ranks = 0;
497*f005ef32Sjsg 	int i;
498*f005ef32Sjsg 
499*f005ef32Sjsg 	/*
500*f005ef32Sjsg 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
501*f005ef32Sjsg 	 */
502*f005ef32Sjsg 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
503*f005ef32Sjsg 		struct dram_dimm_info dimm;
504*f005ef32Sjsg 		enum intel_dram_type type;
505*f005ef32Sjsg 
506*f005ef32Sjsg 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
507*f005ef32Sjsg 		if (val == 0xFFFFFFFF)
508*f005ef32Sjsg 			continue;
509*f005ef32Sjsg 
510*f005ef32Sjsg 		dram_info->num_channels++;
511*f005ef32Sjsg 
512*f005ef32Sjsg 		bxt_get_dimm_info(&dimm, val);
513*f005ef32Sjsg 		type = bxt_get_dimm_type(val);
514*f005ef32Sjsg 
515*f005ef32Sjsg 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
516*f005ef32Sjsg 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
517*f005ef32Sjsg 			    dram_info->type != type);
518*f005ef32Sjsg 
519*f005ef32Sjsg 		drm_dbg_kms(&i915->drm,
520*f005ef32Sjsg 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
521*f005ef32Sjsg 			    i - BXT_D_CR_DRP0_DUNIT_START,
522*f005ef32Sjsg 			    dimm.size, dimm.width, dimm.ranks,
523*f005ef32Sjsg 			    intel_dram_type_str(type));
524*f005ef32Sjsg 
525*f005ef32Sjsg 		if (valid_ranks == 0)
526*f005ef32Sjsg 			valid_ranks = dimm.ranks;
527*f005ef32Sjsg 
528*f005ef32Sjsg 		if (type != INTEL_DRAM_UNKNOWN)
529*f005ef32Sjsg 			dram_info->type = type;
530*f005ef32Sjsg 	}
531*f005ef32Sjsg 
532*f005ef32Sjsg 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
533*f005ef32Sjsg 		drm_info(&i915->drm, "couldn't get memory information\n");
534*f005ef32Sjsg 		return -EINVAL;
535*f005ef32Sjsg 	}
536*f005ef32Sjsg 
537*f005ef32Sjsg 	return 0;
538*f005ef32Sjsg }
539*f005ef32Sjsg 
icl_pcode_read_mem_global_info(struct drm_i915_private * dev_priv)540*f005ef32Sjsg static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
541*f005ef32Sjsg {
542*f005ef32Sjsg 	struct dram_info *dram_info = &dev_priv->dram_info;
543*f005ef32Sjsg 	u32 val = 0;
544*f005ef32Sjsg 	int ret;
545*f005ef32Sjsg 
546*f005ef32Sjsg 	ret = snb_pcode_read(&dev_priv->uncore, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
547*f005ef32Sjsg 			     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
548*f005ef32Sjsg 	if (ret)
549*f005ef32Sjsg 		return ret;
550*f005ef32Sjsg 
551*f005ef32Sjsg 	if (GRAPHICS_VER(dev_priv) == 12) {
552*f005ef32Sjsg 		switch (val & 0xf) {
553*f005ef32Sjsg 		case 0:
554*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_DDR4;
555*f005ef32Sjsg 			break;
556*f005ef32Sjsg 		case 1:
557*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_DDR5;
558*f005ef32Sjsg 			break;
559*f005ef32Sjsg 		case 2:
560*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_LPDDR5;
561*f005ef32Sjsg 			break;
562*f005ef32Sjsg 		case 3:
563*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_LPDDR4;
564*f005ef32Sjsg 			break;
565*f005ef32Sjsg 		case 4:
566*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_DDR3;
567*f005ef32Sjsg 			break;
568*f005ef32Sjsg 		case 5:
569*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_LPDDR3;
570*f005ef32Sjsg 			break;
571*f005ef32Sjsg 		default:
572*f005ef32Sjsg 			MISSING_CASE(val & 0xf);
573*f005ef32Sjsg 			return -EINVAL;
574*f005ef32Sjsg 		}
575*f005ef32Sjsg 	} else {
576*f005ef32Sjsg 		switch (val & 0xf) {
577*f005ef32Sjsg 		case 0:
578*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_DDR4;
579*f005ef32Sjsg 			break;
580*f005ef32Sjsg 		case 1:
581*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_DDR3;
582*f005ef32Sjsg 			break;
583*f005ef32Sjsg 		case 2:
584*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_LPDDR3;
585*f005ef32Sjsg 			break;
586*f005ef32Sjsg 		case 3:
587*f005ef32Sjsg 			dram_info->type = INTEL_DRAM_LPDDR4;
588*f005ef32Sjsg 			break;
589*f005ef32Sjsg 		default:
590*f005ef32Sjsg 			MISSING_CASE(val & 0xf);
591*f005ef32Sjsg 			return -EINVAL;
592*f005ef32Sjsg 		}
593*f005ef32Sjsg 	}
594*f005ef32Sjsg 
595*f005ef32Sjsg 	dram_info->num_channels = (val & 0xf0) >> 4;
596*f005ef32Sjsg 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
597*f005ef32Sjsg 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
598*f005ef32Sjsg 
599*f005ef32Sjsg 	return 0;
600*f005ef32Sjsg }
601*f005ef32Sjsg 
gen11_get_dram_info(struct drm_i915_private * i915)602*f005ef32Sjsg static int gen11_get_dram_info(struct drm_i915_private *i915)
603*f005ef32Sjsg {
604*f005ef32Sjsg 	int ret = skl_get_dram_info(i915);
605*f005ef32Sjsg 
606*f005ef32Sjsg 	if (ret)
607*f005ef32Sjsg 		return ret;
608*f005ef32Sjsg 
609*f005ef32Sjsg 	return icl_pcode_read_mem_global_info(i915);
610*f005ef32Sjsg }
611*f005ef32Sjsg 
gen12_get_dram_info(struct drm_i915_private * i915)612*f005ef32Sjsg static int gen12_get_dram_info(struct drm_i915_private *i915)
613*f005ef32Sjsg {
614*f005ef32Sjsg 	i915->dram_info.wm_lv_0_adjust_needed = false;
615*f005ef32Sjsg 
616*f005ef32Sjsg 	return icl_pcode_read_mem_global_info(i915);
617*f005ef32Sjsg }
618*f005ef32Sjsg 
xelpdp_get_dram_info(struct drm_i915_private * i915)619*f005ef32Sjsg static int xelpdp_get_dram_info(struct drm_i915_private *i915)
620*f005ef32Sjsg {
621*f005ef32Sjsg 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
622*f005ef32Sjsg 	struct dram_info *dram_info = &i915->dram_info;
623*f005ef32Sjsg 
624*f005ef32Sjsg 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
625*f005ef32Sjsg 	case 0:
626*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_DDR4;
627*f005ef32Sjsg 		break;
628*f005ef32Sjsg 	case 1:
629*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_DDR5;
630*f005ef32Sjsg 		break;
631*f005ef32Sjsg 	case 2:
632*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_LPDDR5;
633*f005ef32Sjsg 		break;
634*f005ef32Sjsg 	case 3:
635*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_LPDDR4;
636*f005ef32Sjsg 		break;
637*f005ef32Sjsg 	case 4:
638*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_DDR3;
639*f005ef32Sjsg 		break;
640*f005ef32Sjsg 	case 5:
641*f005ef32Sjsg 		dram_info->type = INTEL_DRAM_LPDDR3;
642*f005ef32Sjsg 		break;
643*f005ef32Sjsg 	default:
644*f005ef32Sjsg 		MISSING_CASE(val);
645*f005ef32Sjsg 		return -EINVAL;
646*f005ef32Sjsg 	}
647*f005ef32Sjsg 
648*f005ef32Sjsg 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
649*f005ef32Sjsg 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
650*f005ef32Sjsg 	/* PSF GV points not supported in D14+ */
651*f005ef32Sjsg 
652*f005ef32Sjsg 	return 0;
653*f005ef32Sjsg }
654*f005ef32Sjsg 
intel_dram_detect(struct drm_i915_private * i915)655*f005ef32Sjsg void intel_dram_detect(struct drm_i915_private *i915)
656*f005ef32Sjsg {
657*f005ef32Sjsg 	struct dram_info *dram_info = &i915->dram_info;
658*f005ef32Sjsg 	int ret;
659*f005ef32Sjsg 
660*f005ef32Sjsg 	detect_mem_freq(i915);
661*f005ef32Sjsg 
662*f005ef32Sjsg 	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
663*f005ef32Sjsg 		return;
664*f005ef32Sjsg 
665*f005ef32Sjsg 	/*
666*f005ef32Sjsg 	 * Assume level 0 watermark latency adjustment is needed until proven
667*f005ef32Sjsg 	 * otherwise, this w/a is not needed by bxt/glk.
668*f005ef32Sjsg 	 */
669*f005ef32Sjsg 	dram_info->wm_lv_0_adjust_needed = !IS_GEN9_LP(i915);
670*f005ef32Sjsg 
671*f005ef32Sjsg 	if (DISPLAY_VER(i915) >= 14)
672*f005ef32Sjsg 		ret = xelpdp_get_dram_info(i915);
673*f005ef32Sjsg 	else if (GRAPHICS_VER(i915) >= 12)
674*f005ef32Sjsg 		ret = gen12_get_dram_info(i915);
675*f005ef32Sjsg 	else if (GRAPHICS_VER(i915) >= 11)
676*f005ef32Sjsg 		ret = gen11_get_dram_info(i915);
677*f005ef32Sjsg 	else if (IS_GEN9_LP(i915))
678*f005ef32Sjsg 		ret = bxt_get_dram_info(i915);
679*f005ef32Sjsg 	else
680*f005ef32Sjsg 		ret = skl_get_dram_info(i915);
681*f005ef32Sjsg 	if (ret)
682*f005ef32Sjsg 		return;
683*f005ef32Sjsg 
684*f005ef32Sjsg 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
685*f005ef32Sjsg 
686*f005ef32Sjsg 	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
687*f005ef32Sjsg 		    str_yes_no(dram_info->wm_lv_0_adjust_needed));
688*f005ef32Sjsg }
689*f005ef32Sjsg 
gen9_edram_size_mb(struct drm_i915_private * i915,u32 cap)690*f005ef32Sjsg static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
691*f005ef32Sjsg {
692*f005ef32Sjsg 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
693*f005ef32Sjsg 	static const u8 sets[4] = { 1, 1, 2, 2 };
694*f005ef32Sjsg 
695*f005ef32Sjsg 	return EDRAM_NUM_BANKS(cap) *
696*f005ef32Sjsg 		ways[EDRAM_WAYS_IDX(cap)] *
697*f005ef32Sjsg 		sets[EDRAM_SETS_IDX(cap)];
698*f005ef32Sjsg }
699*f005ef32Sjsg 
intel_dram_edram_detect(struct drm_i915_private * i915)700*f005ef32Sjsg void intel_dram_edram_detect(struct drm_i915_private *i915)
701*f005ef32Sjsg {
702*f005ef32Sjsg 	u32 edram_cap = 0;
703*f005ef32Sjsg 
704*f005ef32Sjsg 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
705*f005ef32Sjsg 		return;
706*f005ef32Sjsg 
707*f005ef32Sjsg 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
708*f005ef32Sjsg 
709*f005ef32Sjsg 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
710*f005ef32Sjsg 
711*f005ef32Sjsg 	if (!(edram_cap & EDRAM_ENABLED))
712*f005ef32Sjsg 		return;
713*f005ef32Sjsg 
714*f005ef32Sjsg 	/*
715*f005ef32Sjsg 	 * The needed capability bits for size calculation are not there with
716*f005ef32Sjsg 	 * pre gen9 so return 128MB always.
717*f005ef32Sjsg 	 */
718*f005ef32Sjsg 	if (GRAPHICS_VER(i915) < 9)
719*f005ef32Sjsg 		i915->edram_size_mb = 128;
720*f005ef32Sjsg 	else
721*f005ef32Sjsg 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
722*f005ef32Sjsg 
723*f005ef32Sjsg 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
724*f005ef32Sjsg }
725