xref: /openbsd/sys/dev/pci/drm/radeon/rv770_dpm.c (revision 5ca02815)
1 /*
2  * Copyright 2011 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 
25 #include "radeon.h"
26 #include "radeon_asic.h"
27 #include "rv770.h"
28 #include "rv770d.h"
29 #include "r600_dpm.h"
30 #include "rv770_dpm.h"
31 #include "cypress_dpm.h"
32 #include "atom.h"
33 #include "evergreen.h"
34 #include <linux/seq_file.h>
35 
36 #define MC_CG_ARB_FREQ_F0           0x0a
37 #define MC_CG_ARB_FREQ_F1           0x0b
38 #define MC_CG_ARB_FREQ_F2           0x0c
39 #define MC_CG_ARB_FREQ_F3           0x0d
40 
41 #define MC_CG_SEQ_DRAMCONF_S0       0x05
42 #define MC_CG_SEQ_DRAMCONF_S1       0x06
43 
44 #define PCIE_BUS_CLK                10000
45 #define TCLK                        (PCIE_BUS_CLK / 10)
46 
47 #define SMC_RAM_END 0xC000
48 
rv770_get_ps(struct radeon_ps * rps)49 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
50 {
51 	struct rv7xx_ps *ps = rps->ps_priv;
52 
53 	return ps;
54 }
55 
rv770_get_pi(struct radeon_device * rdev)56 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
57 {
58 	struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
59 
60 	return pi;
61 }
62 
evergreen_get_pi(struct radeon_device * rdev)63 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
64 {
65 	struct evergreen_power_info *pi = rdev->pm.dpm.priv;
66 
67 	return pi;
68 }
69 
rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)70 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
71 					       bool enable)
72 {
73 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
74 	u32 tmp;
75 
76 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
77 	if (enable) {
78 		tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
79 		tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
80 		tmp |= LC_GEN2_EN_STRAP;
81 	} else {
82 		if (!pi->boot_in_gen2) {
83 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
84 			tmp &= ~LC_GEN2_EN_STRAP;
85 		}
86 	}
87 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
88 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
89 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
90 
91 }
92 
rv770_enable_l0s(struct radeon_device * rdev)93 static void rv770_enable_l0s(struct radeon_device *rdev)
94 {
95 	u32 tmp;
96 
97 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
98 	tmp |= LC_L0S_INACTIVITY(3);
99 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
100 }
101 
rv770_enable_l1(struct radeon_device * rdev)102 static void rv770_enable_l1(struct radeon_device *rdev)
103 {
104 	u32 tmp;
105 
106 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
107 	tmp &= ~LC_L1_INACTIVITY_MASK;
108 	tmp |= LC_L1_INACTIVITY(4);
109 	tmp &= ~LC_PMI_TO_L1_DIS;
110 	tmp &= ~LC_ASPM_TO_L1_DIS;
111 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
112 }
113 
rv770_enable_pll_sleep_in_l1(struct radeon_device * rdev)114 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
115 {
116 	u32 tmp;
117 
118 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
119 	tmp |= LC_L1_INACTIVITY(8);
120 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
121 
122 	/* NOTE, this is a PCIE indirect reg, not PCIE PORT */
123 	tmp = RREG32_PCIE(PCIE_P_CNTL);
124 	tmp |= P_PLL_PWRDN_IN_L1L23;
125 	tmp &= ~P_PLL_BUF_PDNB;
126 	tmp &= ~P_PLL_PDNB;
127 	tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
128 	WREG32_PCIE(PCIE_P_CNTL, tmp);
129 }
130 
rv770_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)131 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
132 					  bool enable)
133 {
134 	if (enable)
135 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
136 	else {
137 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
138 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
139 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
140 		RREG32(GB_TILING_CONFIG);
141 	}
142 }
143 
rv770_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)144 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
145 					 bool enable)
146 {
147 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
148 
149 	if (enable) {
150 		u32 mgcg_cgtt_local0;
151 
152 		if (rdev->family == CHIP_RV770)
153 			mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
154 		else
155 			mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
156 
157 		WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
158 		WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
159 
160 		if (pi->mgcgtssm)
161 			WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
162 	} else {
163 		WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
164 		WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
165 	}
166 }
167 
rv770_restore_cgcg(struct radeon_device * rdev)168 void rv770_restore_cgcg(struct radeon_device *rdev)
169 {
170 	bool dpm_en = false, cg_en = false;
171 
172 	if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
173 		dpm_en = true;
174 	if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
175 		cg_en = true;
176 
177 	if (dpm_en && !cg_en)
178 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
179 }
180 
rv770_start_dpm(struct radeon_device * rdev)181 static void rv770_start_dpm(struct radeon_device *rdev)
182 {
183 	WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
184 
185 	WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
186 
187 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
188 }
189 
rv770_stop_dpm(struct radeon_device * rdev)190 void rv770_stop_dpm(struct radeon_device *rdev)
191 {
192 	PPSMC_Result result;
193 
194 	result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
195 
196 	if (result != PPSMC_Result_OK)
197 		DRM_DEBUG("Could not force DPM to low.\n");
198 
199 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
200 
201 	WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
202 
203 	WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
204 }
205 
rv770_dpm_enabled(struct radeon_device * rdev)206 bool rv770_dpm_enabled(struct radeon_device *rdev)
207 {
208 	if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
209 		return true;
210 	else
211 		return false;
212 }
213 
rv770_enable_thermal_protection(struct radeon_device * rdev,bool enable)214 void rv770_enable_thermal_protection(struct radeon_device *rdev,
215 				     bool enable)
216 {
217 	if (enable)
218 		WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
219 	else
220 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
221 }
222 
rv770_enable_acpi_pm(struct radeon_device * rdev)223 void rv770_enable_acpi_pm(struct radeon_device *rdev)
224 {
225 	WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
226 }
227 
rv770_get_seq_value(struct radeon_device * rdev,struct rv7xx_pl * pl)228 u8 rv770_get_seq_value(struct radeon_device *rdev,
229 		       struct rv7xx_pl *pl)
230 {
231 	return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
232 		MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
233 }
234 
235 #if 0
236 int rv770_read_smc_soft_register(struct radeon_device *rdev,
237 				 u16 reg_offset, u32 *value)
238 {
239 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
240 
241 	return rv770_read_smc_sram_dword(rdev,
242 					 pi->soft_regs_start + reg_offset,
243 					 value, pi->sram_end);
244 }
245 #endif
246 
rv770_write_smc_soft_register(struct radeon_device * rdev,u16 reg_offset,u32 value)247 int rv770_write_smc_soft_register(struct radeon_device *rdev,
248 				  u16 reg_offset, u32 value)
249 {
250 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
251 
252 	return rv770_write_smc_sram_dword(rdev,
253 					  pi->soft_regs_start + reg_offset,
254 					  value, pi->sram_end);
255 }
256 
rv770_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)257 int rv770_populate_smc_t(struct radeon_device *rdev,
258 			 struct radeon_ps *radeon_state,
259 			 RV770_SMC_SWSTATE *smc_state)
260 {
261 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
262 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
263 	int i;
264 	int a_n;
265 	int a_d;
266 	u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
267 	u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
268 	u32 a_t;
269 
270 	l[0] = 0;
271 	r[2] = 100;
272 
273 	a_n = (int)state->medium.sclk * pi->lmp +
274 		(int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
275 	a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
276 		(int)state->medium.sclk * pi->lmp;
277 
278 	l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
279 	r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
280 
281 	a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
282 		(R600_AH_DFLT - pi->rmp);
283 	a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
284 		(int)state->high.sclk * pi->lhp;
285 
286 	l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
287 	r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
288 
289 	for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
290 		a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
291 		smc_state->levels[i].aT = cpu_to_be32(a_t);
292 	}
293 
294 	a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
295 		CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
296 
297 	smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
298 		cpu_to_be32(a_t);
299 
300 	return 0;
301 }
302 
rv770_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)303 int rv770_populate_smc_sp(struct radeon_device *rdev,
304 			  struct radeon_ps *radeon_state,
305 			  RV770_SMC_SWSTATE *smc_state)
306 {
307 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
308 	int i;
309 
310 	for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
311 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
312 
313 	smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
314 		cpu_to_be32(pi->psp);
315 
316 	return 0;
317 }
318 
rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,u32 reference_clock,bool gddr5,struct atom_clock_dividers * dividers,u32 * clkf,u32 * clkfrac)319 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
320 							     u32 reference_clock,
321 							     bool gddr5,
322 							     struct atom_clock_dividers *dividers,
323 							     u32 *clkf,
324 							     u32 *clkfrac)
325 {
326 	u32 post_divider, reference_divider, feedback_divider8;
327 	u32 fyclk;
328 
329 	if (gddr5)
330 		fyclk = (memory_clock * 8) / 2;
331 	else
332 		fyclk = (memory_clock * 4) / 2;
333 
334 	post_divider = dividers->post_div;
335 	reference_divider = dividers->ref_div;
336 
337 	feedback_divider8 =
338 		(8 * fyclk * reference_divider * post_divider) / reference_clock;
339 
340 	*clkf = feedback_divider8 / 8;
341 	*clkfrac = feedback_divider8 % 8;
342 }
343 
rv770_encode_yclk_post_div(u32 postdiv,u32 * encoded_postdiv)344 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
345 {
346 	int ret = 0;
347 
348 	switch (postdiv) {
349 	case 1:
350 		*encoded_postdiv = 0;
351 		break;
352 	case 2:
353 		*encoded_postdiv = 1;
354 		break;
355 	case 4:
356 		*encoded_postdiv = 2;
357 		break;
358 	case 8:
359 		*encoded_postdiv = 3;
360 		break;
361 	case 16:
362 		*encoded_postdiv = 4;
363 		break;
364 	default:
365 		ret = -EINVAL;
366 		break;
367 	}
368 
369 	return ret;
370 }
371 
rv770_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)372 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
373 {
374 	if (clkf <= 0x10)
375 		return 0x4B;
376 	if (clkf <= 0x19)
377 		return 0x5B;
378 	if (clkf <= 0x21)
379 		return 0x2B;
380 	if (clkf <= 0x27)
381 		return 0x6C;
382 	if (clkf <= 0x31)
383 		return 0x9D;
384 	return 0xC6;
385 }
386 
rv770_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk)387 static int rv770_populate_mclk_value(struct radeon_device *rdev,
388 				     u32 engine_clock, u32 memory_clock,
389 				     RV7XX_SMC_MCLK_VALUE *mclk)
390 {
391 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
392 	u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
393 	u32 mpll_ad_func_cntl =
394 		pi->clk_regs.rv770.mpll_ad_func_cntl;
395 	u32 mpll_ad_func_cntl_2 =
396 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
397 	u32 mpll_dq_func_cntl =
398 		pi->clk_regs.rv770.mpll_dq_func_cntl;
399 	u32 mpll_dq_func_cntl_2 =
400 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
401 	u32 mclk_pwrmgt_cntl =
402 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
403 	u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
404 	struct atom_clock_dividers dividers;
405 	u32 reference_clock = rdev->clock.mpll.reference_freq;
406 	u32 clkf, clkfrac;
407 	u32 postdiv_yclk;
408 	u32 ibias;
409 	int ret;
410 
411 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
412 					     memory_clock, false, &dividers);
413 	if (ret)
414 		return ret;
415 
416 	if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
417 		return -EINVAL;
418 
419 	rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
420 							 pi->mem_gddr5,
421 							 &dividers, &clkf, &clkfrac);
422 
423 	ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
424 	if (ret)
425 		return ret;
426 
427 	ibias = rv770_map_clkf_to_ibias(rdev, clkf);
428 
429 	mpll_ad_func_cntl &= ~(CLKR_MASK |
430 			       YCLK_POST_DIV_MASK |
431 			       CLKF_MASK |
432 			       CLKFRAC_MASK |
433 			       IBIAS_MASK);
434 	mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
435 	mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
436 	mpll_ad_func_cntl |= CLKF(clkf);
437 	mpll_ad_func_cntl |= CLKFRAC(clkfrac);
438 	mpll_ad_func_cntl |= IBIAS(ibias);
439 
440 	if (dividers.vco_mode)
441 		mpll_ad_func_cntl_2 |= VCO_MODE;
442 	else
443 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
444 
445 	if (pi->mem_gddr5) {
446 		rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
447 								 reference_clock,
448 								 pi->mem_gddr5,
449 								 &dividers, &clkf, &clkfrac);
450 
451 		ibias = rv770_map_clkf_to_ibias(rdev, clkf);
452 
453 		ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
454 		if (ret)
455 			return ret;
456 
457 		mpll_dq_func_cntl &= ~(CLKR_MASK |
458 				       YCLK_POST_DIV_MASK |
459 				       CLKF_MASK |
460 				       CLKFRAC_MASK |
461 				       IBIAS_MASK);
462 		mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
463 		mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
464 		mpll_dq_func_cntl |= CLKF(clkf);
465 		mpll_dq_func_cntl |= CLKFRAC(clkfrac);
466 		mpll_dq_func_cntl |= IBIAS(ibias);
467 
468 		if (dividers.vco_mode)
469 			mpll_dq_func_cntl_2 |= VCO_MODE;
470 		else
471 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
472 	}
473 
474 	mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
475 	mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
476 	mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
477 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
478 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
479 	mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
480 	mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
481 
482 	return 0;
483 }
484 
rv770_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,RV770_SMC_SCLK_VALUE * sclk)485 static int rv770_populate_sclk_value(struct radeon_device *rdev,
486 				     u32 engine_clock,
487 				     RV770_SMC_SCLK_VALUE *sclk)
488 {
489 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
490 	struct atom_clock_dividers dividers;
491 	u32 spll_func_cntl =
492 		pi->clk_regs.rv770.cg_spll_func_cntl;
493 	u32 spll_func_cntl_2 =
494 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
495 	u32 spll_func_cntl_3 =
496 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
497 	u32 cg_spll_spread_spectrum =
498 		pi->clk_regs.rv770.cg_spll_spread_spectrum;
499 	u32 cg_spll_spread_spectrum_2 =
500 		pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
501 	u64 tmp;
502 	u32 reference_clock = rdev->clock.spll.reference_freq;
503 	u32 reference_divider, post_divider;
504 	u32 fbdiv;
505 	int ret;
506 
507 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
508 					     engine_clock, false, &dividers);
509 	if (ret)
510 		return ret;
511 
512 	reference_divider = 1 + dividers.ref_div;
513 
514 	if (dividers.enable_post_div)
515 		post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
516 	else
517 		post_divider = 1;
518 
519 	tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
520 	do_div(tmp, reference_clock);
521 	fbdiv = (u32) tmp;
522 
523 	if (dividers.enable_post_div)
524 		spll_func_cntl |= SPLL_DIVEN;
525 	else
526 		spll_func_cntl &= ~SPLL_DIVEN;
527 	spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
528 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
529 	spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
530 	spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
531 
532 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
533 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
534 
535 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
536 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
537 	spll_func_cntl_3 |= SPLL_DITHEN;
538 
539 	if (pi->sclk_ss) {
540 		struct radeon_atom_ss ss;
541 		u32 vco_freq = engine_clock * post_divider;
542 
543 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
544 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
545 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
546 			u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
547 
548 			cg_spll_spread_spectrum &= ~CLKS_MASK;
549 			cg_spll_spread_spectrum |= CLKS(clk_s);
550 			cg_spll_spread_spectrum |= SSEN;
551 
552 			cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
553 			cg_spll_spread_spectrum_2 |= CLKV(clk_v);
554 		}
555 	}
556 
557 	sclk->sclk_value = cpu_to_be32(engine_clock);
558 	sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
559 	sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
560 	sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
561 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
562 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
563 
564 	return 0;
565 }
566 
rv770_populate_vddc_value(struct radeon_device * rdev,u16 vddc,RV770_SMC_VOLTAGE_VALUE * voltage)567 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
568 			      RV770_SMC_VOLTAGE_VALUE *voltage)
569 {
570 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
571 	int i;
572 
573 	if (!pi->voltage_control) {
574 		voltage->index = 0;
575 		voltage->value = 0;
576 		return 0;
577 	}
578 
579 	for (i = 0; i < pi->valid_vddc_entries; i++) {
580 		if (vddc <= pi->vddc_table[i].vddc) {
581 			voltage->index = pi->vddc_table[i].vddc_index;
582 			voltage->value = cpu_to_be16(vddc);
583 			break;
584 		}
585 	}
586 
587 	if (i == pi->valid_vddc_entries)
588 		return -EINVAL;
589 
590 	return 0;
591 }
592 
rv770_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)593 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
594 			      RV770_SMC_VOLTAGE_VALUE *voltage)
595 {
596 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
597 
598 	if (!pi->mvdd_control) {
599 		voltage->index = MVDD_HIGH_INDEX;
600 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
601 		return 0;
602 	}
603 
604 	if (mclk <= pi->mvdd_split_frequency) {
605 		voltage->index = MVDD_LOW_INDEX;
606 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
607 	} else {
608 		voltage->index = MVDD_HIGH_INDEX;
609 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
610 	}
611 
612 	return 0;
613 }
614 
rv770_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)615 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
616 					    struct rv7xx_pl *pl,
617 					    RV770_SMC_HW_PERFORMANCE_LEVEL *level,
618 					    u8 watermark_level)
619 {
620 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
621 	int ret;
622 
623 	level->gen2PCIE = pi->pcie_gen2 ?
624 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
625 	level->gen2XSP  = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
626 	level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
627 	level->displayWatermark = watermark_level;
628 
629 	if (rdev->family == CHIP_RV740)
630 		ret = rv740_populate_sclk_value(rdev, pl->sclk,
631 						&level->sclk);
632 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
633 		ret = rv730_populate_sclk_value(rdev, pl->sclk,
634 						&level->sclk);
635 	else
636 		ret = rv770_populate_sclk_value(rdev, pl->sclk,
637 						&level->sclk);
638 	if (ret)
639 		return ret;
640 
641 	if (rdev->family == CHIP_RV740) {
642 		if (pi->mem_gddr5) {
643 			if (pl->mclk <= pi->mclk_strobe_mode_threshold)
644 				level->strobeMode =
645 					rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
646 			else
647 				level->strobeMode = 0;
648 
649 			if (pl->mclk > pi->mclk_edc_enable_threshold)
650 				level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
651 			else
652 				level->mcFlags =  0;
653 		}
654 		ret = rv740_populate_mclk_value(rdev, pl->sclk,
655 						pl->mclk, &level->mclk);
656 	} else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
657 		ret = rv730_populate_mclk_value(rdev, pl->sclk,
658 						pl->mclk, &level->mclk);
659 	else
660 		ret = rv770_populate_mclk_value(rdev, pl->sclk,
661 						pl->mclk, &level->mclk);
662 	if (ret)
663 		return ret;
664 
665 	ret = rv770_populate_vddc_value(rdev, pl->vddc,
666 					&level->vddc);
667 	if (ret)
668 		return ret;
669 
670 	ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
671 
672 	return ret;
673 }
674 
rv770_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)675 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
676 					    struct radeon_ps *radeon_state,
677 					    RV770_SMC_SWSTATE *smc_state)
678 {
679 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
680 	int ret;
681 
682 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
683 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
684 
685 	ret = rv770_convert_power_level_to_smc(rdev,
686 					       &state->low,
687 					       &smc_state->levels[0],
688 					       PPSMC_DISPLAY_WATERMARK_LOW);
689 	if (ret)
690 		return ret;
691 
692 	ret = rv770_convert_power_level_to_smc(rdev,
693 					       &state->medium,
694 					       &smc_state->levels[1],
695 					       PPSMC_DISPLAY_WATERMARK_LOW);
696 	if (ret)
697 		return ret;
698 
699 	ret = rv770_convert_power_level_to_smc(rdev,
700 					       &state->high,
701 					       &smc_state->levels[2],
702 					       PPSMC_DISPLAY_WATERMARK_HIGH);
703 	if (ret)
704 		return ret;
705 
706 	smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
707 	smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
708 	smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
709 
710 	smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
711 							    &state->low);
712 	smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
713 							    &state->medium);
714 	smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
715 							    &state->high);
716 
717 	rv770_populate_smc_sp(rdev, radeon_state, smc_state);
718 
719 	return rv770_populate_smc_t(rdev, radeon_state, smc_state);
720 
721 }
722 
rv770_calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)723 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
724 					u32 engine_clock)
725 {
726 	u32 dram_rows;
727 	u32 dram_refresh_rate;
728 	u32 mc_arb_rfsh_rate;
729 	u32 tmp;
730 
731 	tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
732 	dram_rows = 1 << (tmp + 10);
733 	tmp = RREG32(MC_SEQ_MISC0) & 3;
734 	dram_refresh_rate = 1 << (tmp + 3);
735 	mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
736 
737 	return mc_arb_rfsh_rate;
738 }
739 
rv770_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state)740 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
741 						   struct radeon_ps *radeon_state)
742 {
743 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
744 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
745 	u32 sqm_ratio;
746 	u32 arb_refresh_rate;
747 	u32 high_clock;
748 
749 	if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
750 		high_clock = state->high.sclk;
751 	else
752 		high_clock = (state->low.sclk * 0xFF / 0x40);
753 
754 	radeon_atom_set_engine_dram_timings(rdev, high_clock,
755 					    state->high.mclk);
756 
757 	sqm_ratio =
758 		STATE0(64 * high_clock / pi->boot_sclk) |
759 		STATE1(64 * high_clock / state->low.sclk) |
760 		STATE2(64 * high_clock / state->medium.sclk) |
761 		STATE3(64 * high_clock / state->high.sclk);
762 	WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
763 
764 	arb_refresh_rate =
765 		POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
766 		POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
767 		POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
768 		POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
769 	WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
770 }
771 
rv770_enable_backbias(struct radeon_device * rdev,bool enable)772 void rv770_enable_backbias(struct radeon_device *rdev,
773 			   bool enable)
774 {
775 	if (enable)
776 		WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
777 	else
778 		WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
779 }
780 
rv770_enable_spread_spectrum(struct radeon_device * rdev,bool enable)781 static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
782 					 bool enable)
783 {
784 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
785 
786 	if (enable) {
787 		if (pi->sclk_ss)
788 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
789 
790 		if (pi->mclk_ss) {
791 			if (rdev->family == CHIP_RV740)
792 				rv740_enable_mclk_spread_spectrum(rdev, true);
793 		}
794 	} else {
795 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
796 
797 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
798 
799 		WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
800 
801 		if (rdev->family == CHIP_RV740)
802 			rv740_enable_mclk_spread_spectrum(rdev, false);
803 	}
804 }
805 
rv770_program_mpll_timing_parameters(struct radeon_device * rdev)806 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
807 {
808 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
809 
810 	if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
811 		WREG32(MPLL_TIME,
812 		       (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
813 			MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
814 	}
815 }
816 
rv770_setup_bsp(struct radeon_device * rdev)817 void rv770_setup_bsp(struct radeon_device *rdev)
818 {
819 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
820 	u32 xclk = radeon_get_xclk(rdev);
821 
822 	r600_calculate_u_and_p(pi->asi,
823 			       xclk,
824 			       16,
825 			       &pi->bsp,
826 			       &pi->bsu);
827 
828 	r600_calculate_u_and_p(pi->pasi,
829 			       xclk,
830 			       16,
831 			       &pi->pbsp,
832 			       &pi->pbsu);
833 
834 	pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
835 	pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
836 
837 	WREG32(CG_BSP, pi->dsp);
838 
839 }
840 
rv770_program_git(struct radeon_device * rdev)841 void rv770_program_git(struct radeon_device *rdev)
842 {
843 	WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
844 }
845 
rv770_program_tp(struct radeon_device * rdev)846 void rv770_program_tp(struct radeon_device *rdev)
847 {
848 	int i;
849 	enum r600_td td = R600_TD_DFLT;
850 
851 	for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
852 		WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
853 
854 	if (td == R600_TD_AUTO)
855 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
856 	else
857 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
858 	if (td == R600_TD_UP)
859 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
860 	if (td == R600_TD_DOWN)
861 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
862 }
863 
rv770_program_tpp(struct radeon_device * rdev)864 void rv770_program_tpp(struct radeon_device *rdev)
865 {
866 	WREG32(CG_TPC, R600_TPC_DFLT);
867 }
868 
rv770_program_sstp(struct radeon_device * rdev)869 void rv770_program_sstp(struct radeon_device *rdev)
870 {
871 	WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
872 }
873 
rv770_program_engine_speed_parameters(struct radeon_device * rdev)874 void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
875 {
876 	WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
877 }
878 
rv770_enable_display_gap(struct radeon_device * rdev)879 static void rv770_enable_display_gap(struct radeon_device *rdev)
880 {
881 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
882 
883 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
884 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
885 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
886 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
887 }
888 
rv770_program_vc(struct radeon_device * rdev)889 void rv770_program_vc(struct radeon_device *rdev)
890 {
891 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
892 
893 	WREG32(CG_FTV, pi->vrc);
894 }
895 
rv770_clear_vc(struct radeon_device * rdev)896 void rv770_clear_vc(struct radeon_device *rdev)
897 {
898 	WREG32(CG_FTV, 0);
899 }
900 
rv770_upload_firmware(struct radeon_device * rdev)901 int rv770_upload_firmware(struct radeon_device *rdev)
902 {
903 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
904 	int ret;
905 
906 	rv770_reset_smc(rdev);
907 	rv770_stop_smc_clock(rdev);
908 
909 	ret = rv770_load_smc_ucode(rdev, pi->sram_end);
910 	if (ret)
911 		return ret;
912 
913 	return 0;
914 }
915 
rv770_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)916 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
917 					 RV770_SMC_STATETABLE *table)
918 {
919 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
920 
921 	u32 mpll_ad_func_cntl =
922 		pi->clk_regs.rv770.mpll_ad_func_cntl;
923 	u32 mpll_ad_func_cntl_2 =
924 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
925 	u32 mpll_dq_func_cntl =
926 		pi->clk_regs.rv770.mpll_dq_func_cntl;
927 	u32 mpll_dq_func_cntl_2 =
928 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
929 	u32 spll_func_cntl =
930 		pi->clk_regs.rv770.cg_spll_func_cntl;
931 	u32 spll_func_cntl_2 =
932 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
933 	u32 spll_func_cntl_3 =
934 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
935 	u32 mclk_pwrmgt_cntl;
936 	u32 dll_cntl;
937 
938 	table->ACPIState = table->initialState;
939 
940 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
941 
942 	if (pi->acpi_vddc) {
943 		rv770_populate_vddc_value(rdev, pi->acpi_vddc,
944 					  &table->ACPIState.levels[0].vddc);
945 		if (pi->pcie_gen2) {
946 			if (pi->acpi_pcie_gen2)
947 				table->ACPIState.levels[0].gen2PCIE = 1;
948 			else
949 				table->ACPIState.levels[0].gen2PCIE = 0;
950 		} else
951 			table->ACPIState.levels[0].gen2PCIE = 0;
952 		if (pi->acpi_pcie_gen2)
953 			table->ACPIState.levels[0].gen2XSP = 1;
954 		else
955 			table->ACPIState.levels[0].gen2XSP = 0;
956 	} else {
957 		rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
958 					  &table->ACPIState.levels[0].vddc);
959 		table->ACPIState.levels[0].gen2PCIE = 0;
960 	}
961 
962 
963 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
964 
965 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
966 
967 	mclk_pwrmgt_cntl = (MRDCKA0_RESET |
968 			    MRDCKA1_RESET |
969 			    MRDCKB0_RESET |
970 			    MRDCKB1_RESET |
971 			    MRDCKC0_RESET |
972 			    MRDCKC1_RESET |
973 			    MRDCKD0_RESET |
974 			    MRDCKD1_RESET);
975 
976 	dll_cntl = 0xff000000;
977 
978 	spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
979 
980 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
981 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
982 
983 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
984 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
985 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
986 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
987 
988 	table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
989 	table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
990 
991 	table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
992 
993 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
994 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
995 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
996 
997 	table->ACPIState.levels[0].sclk.sclk_value = 0;
998 
999 	rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1000 
1001 	table->ACPIState.levels[1] = table->ACPIState.levels[0];
1002 	table->ACPIState.levels[2] = table->ACPIState.levels[0];
1003 
1004 	return 0;
1005 }
1006 
rv770_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1007 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1008 				      RV770_SMC_VOLTAGE_VALUE *voltage)
1009 {
1010 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1011 
1012 	if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1013 	     (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1014 		voltage->index = MVDD_LOW_INDEX;
1015 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1016 	} else {
1017 		voltage->index = MVDD_HIGH_INDEX;
1018 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1019 	}
1020 
1021 	return 0;
1022 }
1023 
rv770_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_STATETABLE * table)1024 static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1025 					    struct radeon_ps *radeon_state,
1026 					    RV770_SMC_STATETABLE *table)
1027 {
1028 	struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1029 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1030 	u32 a_t;
1031 
1032 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1033 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1034 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1035 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1036 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1037 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1038 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1039 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1040 	table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1041 		cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1042 	table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1043 		cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1044 
1045 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1046 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1047 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1048 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1049 
1050 	table->initialState.levels[0].mclk.mclk770.mclk_value =
1051 		cpu_to_be32(initial_state->low.mclk);
1052 
1053 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1054 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1055 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1056 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1057 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1058 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1059 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1060 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1061 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1062 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1063 
1064 	table->initialState.levels[0].sclk.sclk_value =
1065 		cpu_to_be32(initial_state->low.sclk);
1066 
1067 	table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1068 
1069 	table->initialState.levels[0].seqValue =
1070 		rv770_get_seq_value(rdev, &initial_state->low);
1071 
1072 	rv770_populate_vddc_value(rdev,
1073 				  initial_state->low.vddc,
1074 				  &table->initialState.levels[0].vddc);
1075 	rv770_populate_initial_mvdd_value(rdev,
1076 					  &table->initialState.levels[0].mvdd);
1077 
1078 	a_t = CG_R(0xffff) | CG_L(0);
1079 	table->initialState.levels[0].aT = cpu_to_be32(a_t);
1080 
1081 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1082 
1083 	if (pi->boot_in_gen2)
1084 		table->initialState.levels[0].gen2PCIE = 1;
1085 	else
1086 		table->initialState.levels[0].gen2PCIE = 0;
1087 	if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1088 		table->initialState.levels[0].gen2XSP = 1;
1089 	else
1090 		table->initialState.levels[0].gen2XSP = 0;
1091 
1092 	if (rdev->family == CHIP_RV740) {
1093 		if (pi->mem_gddr5) {
1094 			if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1095 				table->initialState.levels[0].strobeMode =
1096 					rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1097 			else
1098 				table->initialState.levels[0].strobeMode = 0;
1099 
1100 			if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1101 				table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1102 			else
1103 				table->initialState.levels[0].mcFlags =  0;
1104 		}
1105 	}
1106 
1107 	table->initialState.levels[1] = table->initialState.levels[0];
1108 	table->initialState.levels[2] = table->initialState.levels[0];
1109 
1110 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1111 
1112 	return 0;
1113 }
1114 
rv770_populate_smc_vddc_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1115 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1116 					 RV770_SMC_STATETABLE *table)
1117 {
1118 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1119 	int i;
1120 
1121 	for (i = 0; i < pi->valid_vddc_entries; i++) {
1122 		table->highSMIO[pi->vddc_table[i].vddc_index] =
1123 			pi->vddc_table[i].high_smio;
1124 		table->lowSMIO[pi->vddc_table[i].vddc_index] =
1125 			cpu_to_be32(pi->vddc_table[i].low_smio);
1126 	}
1127 
1128 	table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1129 	table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1130 		cpu_to_be32(pi->vddc_mask_low);
1131 
1132 	for (i = 0;
1133 	     ((i < pi->valid_vddc_entries) &&
1134 	      (pi->max_vddc_in_table >
1135 	       pi->vddc_table[i].vddc));
1136 	     i++);
1137 
1138 	table->maxVDDCIndexInPPTable =
1139 		pi->vddc_table[i].vddc_index;
1140 
1141 	return 0;
1142 }
1143 
rv770_populate_smc_mvdd_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1144 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1145 					 RV770_SMC_STATETABLE *table)
1146 {
1147 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1148 
1149 	if (pi->mvdd_control) {
1150 		table->lowSMIO[MVDD_HIGH_INDEX] |=
1151 			cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1152 		table->lowSMIO[MVDD_LOW_INDEX] |=
1153 			cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1154 
1155 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1156 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1157 			cpu_to_be32(pi->mvdd_mask_low);
1158 	}
1159 
1160 	return 0;
1161 }
1162 
rv770_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1163 static int rv770_init_smc_table(struct radeon_device *rdev,
1164 				struct radeon_ps *radeon_boot_state)
1165 {
1166 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1167 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1168 	RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1169 	int ret;
1170 
1171 	memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1172 
1173 	pi->boot_sclk = boot_state->low.sclk;
1174 
1175 	rv770_populate_smc_vddc_table(rdev, table);
1176 	rv770_populate_smc_mvdd_table(rdev, table);
1177 
1178 	switch (rdev->pm.int_thermal_type) {
1179 	case THERMAL_TYPE_RV770:
1180 	case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1181 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1182 		break;
1183 	case THERMAL_TYPE_NONE:
1184 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1185 		break;
1186 	case THERMAL_TYPE_EXTERNAL_GPIO:
1187 	default:
1188 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1189 		break;
1190 	}
1191 
1192 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1193 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1194 
1195 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1196 			table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1197 
1198 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1199 			table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1200 	}
1201 
1202 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1203 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1204 
1205 	if (pi->mem_gddr5)
1206 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1207 
1208 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1209 		ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1210 	else
1211 		ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1212 	if (ret)
1213 		return ret;
1214 
1215 	if (rdev->family == CHIP_RV740)
1216 		ret = rv740_populate_smc_acpi_state(rdev, table);
1217 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1218 		ret = rv730_populate_smc_acpi_state(rdev, table);
1219 	else
1220 		ret = rv770_populate_smc_acpi_state(rdev, table);
1221 	if (ret)
1222 		return ret;
1223 
1224 	table->driverState = table->initialState;
1225 
1226 	return rv770_copy_bytes_to_smc(rdev,
1227 				       pi->state_table_start,
1228 				       (const u8 *)table,
1229 				       sizeof(RV770_SMC_STATETABLE),
1230 				       pi->sram_end);
1231 }
1232 
rv770_construct_vddc_table(struct radeon_device * rdev)1233 static int rv770_construct_vddc_table(struct radeon_device *rdev)
1234 {
1235 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1236 	u16 min, max, step;
1237 	u32 steps = 0;
1238 	u8 vddc_index = 0;
1239 	u32 i;
1240 
1241 	radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1242 	radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1243 	radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1244 
1245 	steps = (max - min) / step + 1;
1246 
1247 	if (steps > MAX_NO_VREG_STEPS)
1248 		return -EINVAL;
1249 
1250 	for (i = 0; i < steps; i++) {
1251 		u32 gpio_pins, gpio_mask;
1252 
1253 		pi->vddc_table[i].vddc = (u16)(min + i * step);
1254 		radeon_atom_get_voltage_gpio_settings(rdev,
1255 						      pi->vddc_table[i].vddc,
1256 						      SET_VOLTAGE_TYPE_ASIC_VDDC,
1257 						      &gpio_pins, &gpio_mask);
1258 		pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1259 		pi->vddc_table[i].high_smio = 0;
1260 		pi->vddc_mask_low = gpio_mask;
1261 		if (i > 0) {
1262 			if ((pi->vddc_table[i].low_smio !=
1263 			     pi->vddc_table[i - 1].low_smio ) ||
1264 			     (pi->vddc_table[i].high_smio !=
1265 			      pi->vddc_table[i - 1].high_smio))
1266 				vddc_index++;
1267 		}
1268 		pi->vddc_table[i].vddc_index = vddc_index;
1269 	}
1270 
1271 	pi->valid_vddc_entries = (u8)steps;
1272 
1273 	return 0;
1274 }
1275 
rv770_get_mclk_split_point(struct atom_memory_info * memory_info)1276 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1277 {
1278 	if (memory_info->mem_type == MEM_TYPE_GDDR3)
1279 		return 30000;
1280 
1281 	return 0;
1282 }
1283 
rv770_get_mvdd_pin_configuration(struct radeon_device * rdev)1284 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1285 {
1286 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1287 	u32 gpio_pins, gpio_mask;
1288 
1289 	radeon_atom_get_voltage_gpio_settings(rdev,
1290 					      MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1291 					      &gpio_pins, &gpio_mask);
1292 	pi->mvdd_mask_low = gpio_mask;
1293 	pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1294 		gpio_pins & gpio_mask;
1295 
1296 	radeon_atom_get_voltage_gpio_settings(rdev,
1297 					      MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1298 					      &gpio_pins, &gpio_mask);
1299 	pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1300 		gpio_pins & gpio_mask;
1301 
1302 	return 0;
1303 }
1304 
rv770_get_memory_module_index(struct radeon_device * rdev)1305 u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1306 {
1307 	return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1308 }
1309 
rv770_get_mvdd_configuration(struct radeon_device * rdev)1310 static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1311 {
1312 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1313 	u8 memory_module_index;
1314 	struct atom_memory_info memory_info;
1315 
1316 	memory_module_index = rv770_get_memory_module_index(rdev);
1317 
1318 	if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1319 		pi->mvdd_control = false;
1320 		return 0;
1321 	}
1322 
1323 	pi->mvdd_split_frequency =
1324 		rv770_get_mclk_split_point(&memory_info);
1325 
1326 	if (pi->mvdd_split_frequency == 0) {
1327 		pi->mvdd_control = false;
1328 		return 0;
1329 	}
1330 
1331 	return rv770_get_mvdd_pin_configuration(rdev);
1332 }
1333 
rv770_enable_voltage_control(struct radeon_device * rdev,bool enable)1334 void rv770_enable_voltage_control(struct radeon_device *rdev,
1335 				  bool enable)
1336 {
1337 	if (enable)
1338 		WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1339 	else
1340 		WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1341 }
1342 
rv770_program_display_gap(struct radeon_device * rdev)1343 static void rv770_program_display_gap(struct radeon_device *rdev)
1344 {
1345 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1346 
1347 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1348 	if (rdev->pm.dpm.new_active_crtcs & 1) {
1349 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1350 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1351 	} else if (rdev->pm.dpm.new_active_crtcs & 2) {
1352 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1353 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1354 	} else {
1355 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1356 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1357 	}
1358 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1359 }
1360 
rv770_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)1361 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1362 					   bool enable)
1363 {
1364 	rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1365 
1366 	if (enable)
1367 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1368 	else
1369 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1370 }
1371 
r7xx_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1372 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1373 						  struct radeon_ps *radeon_new_state)
1374 {
1375 	if ((rdev->family == CHIP_RV730) ||
1376 	    (rdev->family == CHIP_RV710) ||
1377 	    (rdev->family == CHIP_RV740))
1378 		rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1379 	else
1380 		rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1381 }
1382 
rv770_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1383 static int rv770_upload_sw_state(struct radeon_device *rdev,
1384 				 struct radeon_ps *radeon_new_state)
1385 {
1386 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1387 	u16 address = pi->state_table_start +
1388 		offsetof(RV770_SMC_STATETABLE, driverState);
1389 	RV770_SMC_SWSTATE state = { 0 };
1390 	int ret;
1391 
1392 	ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1393 	if (ret)
1394 		return ret;
1395 
1396 	return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1397 				       sizeof(RV770_SMC_SWSTATE),
1398 				       pi->sram_end);
1399 }
1400 
rv770_halt_smc(struct radeon_device * rdev)1401 int rv770_halt_smc(struct radeon_device *rdev)
1402 {
1403 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1404 		return -EINVAL;
1405 
1406 	if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1407 		return -EINVAL;
1408 
1409 	return 0;
1410 }
1411 
rv770_resume_smc(struct radeon_device * rdev)1412 int rv770_resume_smc(struct radeon_device *rdev)
1413 {
1414 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1415 		return -EINVAL;
1416 	return 0;
1417 }
1418 
rv770_set_sw_state(struct radeon_device * rdev)1419 int rv770_set_sw_state(struct radeon_device *rdev)
1420 {
1421 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1422 		DRM_DEBUG("rv770_set_sw_state failed\n");
1423 	return 0;
1424 }
1425 
rv770_set_boot_state(struct radeon_device * rdev)1426 int rv770_set_boot_state(struct radeon_device *rdev)
1427 {
1428 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1429 		return -EINVAL;
1430 	return 0;
1431 }
1432 
rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1433 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1434 					      struct radeon_ps *new_ps,
1435 					      struct radeon_ps *old_ps)
1436 {
1437 	struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1438 	struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1439 
1440 	if ((new_ps->vclk == old_ps->vclk) &&
1441 	    (new_ps->dclk == old_ps->dclk))
1442 		return;
1443 
1444 	if (new_state->high.sclk >= current_state->high.sclk)
1445 		return;
1446 
1447 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1448 }
1449 
rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1450 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1451 					     struct radeon_ps *new_ps,
1452 					     struct radeon_ps *old_ps)
1453 {
1454 	struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1455 	struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1456 
1457 	if ((new_ps->vclk == old_ps->vclk) &&
1458 	    (new_ps->dclk == old_ps->dclk))
1459 		return;
1460 
1461 	if (new_state->high.sclk < current_state->high.sclk)
1462 		return;
1463 
1464 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1465 }
1466 
rv770_restrict_performance_levels_before_switch(struct radeon_device * rdev)1467 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1468 {
1469 	if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1470 		return -EINVAL;
1471 
1472 	if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1473 		return -EINVAL;
1474 
1475 	return 0;
1476 }
1477 
rv770_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)1478 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
1479 				      enum radeon_dpm_forced_level level)
1480 {
1481 	PPSMC_Msg msg;
1482 
1483 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1484 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK)
1485 			return -EINVAL;
1486 		msg = PPSMC_MSG_ForceHigh;
1487 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1488 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1489 			return -EINVAL;
1490 		msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled);
1491 	} else {
1492 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1493 			return -EINVAL;
1494 		msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled);
1495 	}
1496 
1497 	if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
1498 		return -EINVAL;
1499 
1500 	rdev->pm.dpm.forced_level = level;
1501 
1502 	return 0;
1503 }
1504 
r7xx_start_smc(struct radeon_device * rdev)1505 void r7xx_start_smc(struct radeon_device *rdev)
1506 {
1507 	rv770_start_smc(rdev);
1508 	rv770_start_smc_clock(rdev);
1509 }
1510 
1511 
r7xx_stop_smc(struct radeon_device * rdev)1512 void r7xx_stop_smc(struct radeon_device *rdev)
1513 {
1514 	rv770_reset_smc(rdev);
1515 	rv770_stop_smc_clock(rdev);
1516 }
1517 
rv770_read_clock_registers(struct radeon_device * rdev)1518 static void rv770_read_clock_registers(struct radeon_device *rdev)
1519 {
1520 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1521 
1522 	pi->clk_regs.rv770.cg_spll_func_cntl =
1523 		RREG32(CG_SPLL_FUNC_CNTL);
1524 	pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1525 		RREG32(CG_SPLL_FUNC_CNTL_2);
1526 	pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1527 		RREG32(CG_SPLL_FUNC_CNTL_3);
1528 	pi->clk_regs.rv770.cg_spll_spread_spectrum =
1529 		RREG32(CG_SPLL_SPREAD_SPECTRUM);
1530 	pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1531 		RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1532 	pi->clk_regs.rv770.mpll_ad_func_cntl =
1533 		RREG32(MPLL_AD_FUNC_CNTL);
1534 	pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1535 		RREG32(MPLL_AD_FUNC_CNTL_2);
1536 	pi->clk_regs.rv770.mpll_dq_func_cntl =
1537 		RREG32(MPLL_DQ_FUNC_CNTL);
1538 	pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1539 		RREG32(MPLL_DQ_FUNC_CNTL_2);
1540 	pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1541 		RREG32(MCLK_PWRMGT_CNTL);
1542 	pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1543 }
1544 
r7xx_read_clock_registers(struct radeon_device * rdev)1545 static void r7xx_read_clock_registers(struct radeon_device *rdev)
1546 {
1547 	if (rdev->family == CHIP_RV740)
1548 		rv740_read_clock_registers(rdev);
1549 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1550 		rv730_read_clock_registers(rdev);
1551 	else
1552 		rv770_read_clock_registers(rdev);
1553 }
1554 
rv770_read_voltage_smio_registers(struct radeon_device * rdev)1555 void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1556 {
1557 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1558 
1559 	pi->s0_vid_lower_smio_cntl =
1560 		RREG32(S0_VID_LOWER_SMIO_CNTL);
1561 }
1562 
rv770_reset_smio_status(struct radeon_device * rdev)1563 void rv770_reset_smio_status(struct radeon_device *rdev)
1564 {
1565 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1566 	u32 sw_smio_index, vid_smio_cntl;
1567 
1568 	sw_smio_index =
1569 		(RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1570 	switch (sw_smio_index) {
1571 	case 3:
1572 		vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1573 		break;
1574 	case 2:
1575 		vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1576 		break;
1577 	case 1:
1578 		vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1579 		break;
1580 	case 0:
1581 		return;
1582 	default:
1583 		vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1584 		break;
1585 	}
1586 
1587 	WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1588 	WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1589 }
1590 
rv770_get_memory_type(struct radeon_device * rdev)1591 void rv770_get_memory_type(struct radeon_device *rdev)
1592 {
1593 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1594 	u32 tmp;
1595 
1596 	tmp = RREG32(MC_SEQ_MISC0);
1597 
1598 	if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1599 	    MC_SEQ_MISC0_GDDR5_VALUE)
1600 		pi->mem_gddr5 = true;
1601 	else
1602 		pi->mem_gddr5 = false;
1603 
1604 }
1605 
rv770_get_pcie_gen2_status(struct radeon_device * rdev)1606 void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1607 {
1608 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1609 	u32 tmp;
1610 
1611 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1612 
1613 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1614 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1615 		pi->pcie_gen2 = true;
1616 	else
1617 		pi->pcie_gen2 = false;
1618 
1619 	if (pi->pcie_gen2) {
1620 		if (tmp & LC_CURRENT_DATA_RATE)
1621 			pi->boot_in_gen2 = true;
1622 		else
1623 			pi->boot_in_gen2 = false;
1624 	} else
1625 		pi->boot_in_gen2 = false;
1626 }
1627 
1628 #if 0
1629 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1630 {
1631 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1632 
1633 	if (pi->gfx_clock_gating) {
1634 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1635 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1636 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1637 		RREG32(GB_TILING_CONFIG);
1638 	}
1639 
1640 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1641 		 ~HOST_SMC_MSG_MASK);
1642 
1643 	udelay(7000);
1644 
1645 	return 0;
1646 }
1647 
1648 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1649 {
1650 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1651 	int i;
1652 
1653 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1654 		 ~HOST_SMC_MSG_MASK);
1655 
1656 	udelay(7000);
1657 
1658 	for (i = 0; i < rdev->usec_timeout; i++) {
1659 		if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1660 			break;
1661 		udelay(1000);
1662 	}
1663 
1664 	if (pi->gfx_clock_gating)
1665 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1666 
1667 	return 0;
1668 }
1669 #endif
1670 
rv770_get_mclk_odt_threshold(struct radeon_device * rdev)1671 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1672 {
1673 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1674 	u8 memory_module_index;
1675 	struct atom_memory_info memory_info;
1676 
1677 	pi->mclk_odt_threshold = 0;
1678 
1679 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1680 		memory_module_index = rv770_get_memory_module_index(rdev);
1681 
1682 		if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1683 			return;
1684 
1685 		if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1686 		    memory_info.mem_type == MEM_TYPE_DDR3)
1687 			pi->mclk_odt_threshold = 30000;
1688 	}
1689 }
1690 
rv770_get_max_vddc(struct radeon_device * rdev)1691 void rv770_get_max_vddc(struct radeon_device *rdev)
1692 {
1693 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1694 	u16 vddc;
1695 
1696 	if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1697 		pi->max_vddc = 0;
1698 	else
1699 		pi->max_vddc = vddc;
1700 }
1701 
rv770_program_response_times(struct radeon_device * rdev)1702 void rv770_program_response_times(struct radeon_device *rdev)
1703 {
1704 	u32 voltage_response_time, backbias_response_time;
1705 	u32 acpi_delay_time, vbi_time_out;
1706 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1707 	u32 reference_clock;
1708 
1709 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1710 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1711 
1712 	if (voltage_response_time == 0)
1713 		voltage_response_time = 1000;
1714 
1715 	if (backbias_response_time == 0)
1716 		backbias_response_time = 1000;
1717 
1718 	acpi_delay_time = 15000;
1719 	vbi_time_out = 100000;
1720 
1721 	reference_clock = radeon_get_xclk(rdev);
1722 
1723 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1724 	bb_dly = (backbias_response_time * reference_clock) / 1600;
1725 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1726 	vbi_dly = (vbi_time_out * reference_clock) / 1600;
1727 
1728 	rv770_write_smc_soft_register(rdev,
1729 				      RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1730 	rv770_write_smc_soft_register(rdev,
1731 				      RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1732 	rv770_write_smc_soft_register(rdev,
1733 				      RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1734 	rv770_write_smc_soft_register(rdev,
1735 				      RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1736 #if 0
1737 	/* XXX look up hw revision */
1738 	if (WEKIVA_A21)
1739 		rv770_write_smc_soft_register(rdev,
1740 					      RV770_SMC_SOFT_REGISTER_baby_step_timer,
1741 					      0x10);
1742 #endif
1743 }
1744 
rv770_program_dcodt_before_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1745 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1746 						    struct radeon_ps *radeon_new_state,
1747 						    struct radeon_ps *radeon_current_state)
1748 {
1749 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1750 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1751 	struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1752 	bool current_use_dc = false;
1753 	bool new_use_dc = false;
1754 
1755 	if (pi->mclk_odt_threshold == 0)
1756 		return;
1757 
1758 	if (current_state->high.mclk <= pi->mclk_odt_threshold)
1759 		current_use_dc = true;
1760 
1761 	if (new_state->high.mclk <= pi->mclk_odt_threshold)
1762 		new_use_dc = true;
1763 
1764 	if (current_use_dc == new_use_dc)
1765 		return;
1766 
1767 	if (!current_use_dc && new_use_dc)
1768 		return;
1769 
1770 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1771 		rv730_program_dcodt(rdev, new_use_dc);
1772 }
1773 
rv770_program_dcodt_after_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1774 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1775 						   struct radeon_ps *radeon_new_state,
1776 						   struct radeon_ps *radeon_current_state)
1777 {
1778 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1779 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1780 	struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1781 	bool current_use_dc = false;
1782 	bool new_use_dc = false;
1783 
1784 	if (pi->mclk_odt_threshold == 0)
1785 		return;
1786 
1787 	if (current_state->high.mclk <= pi->mclk_odt_threshold)
1788 		current_use_dc = true;
1789 
1790 	if (new_state->high.mclk <= pi->mclk_odt_threshold)
1791 		new_use_dc = true;
1792 
1793 	if (current_use_dc == new_use_dc)
1794 		return;
1795 
1796 	if (current_use_dc && !new_use_dc)
1797 		return;
1798 
1799 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1800 		rv730_program_dcodt(rdev, new_use_dc);
1801 }
1802 
rv770_retrieve_odt_values(struct radeon_device * rdev)1803 static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1804 {
1805 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1806 
1807 	if (pi->mclk_odt_threshold == 0)
1808 		return;
1809 
1810 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1811 		rv730_get_odt_values(rdev);
1812 }
1813 
rv770_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)1814 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1815 {
1816 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1817 	bool want_thermal_protection;
1818 	enum radeon_dpm_event_src dpm_event_src;
1819 
1820 	switch (sources) {
1821 	case 0:
1822 	default:
1823 		want_thermal_protection = false;
1824 		break;
1825 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1826 		want_thermal_protection = true;
1827 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1828 		break;
1829 
1830 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1831 		want_thermal_protection = true;
1832 		dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1833 		break;
1834 
1835 	case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1836 	      (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1837 		want_thermal_protection = true;
1838 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1839 		break;
1840 	}
1841 
1842 	if (want_thermal_protection) {
1843 		WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1844 		if (pi->thermal_protection)
1845 			WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1846 	} else {
1847 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1848 	}
1849 }
1850 
rv770_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)1851 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1852 				       enum radeon_dpm_auto_throttle_src source,
1853 				       bool enable)
1854 {
1855 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1856 
1857 	if (enable) {
1858 		if (!(pi->active_auto_throttle_sources & (1 << source))) {
1859 			pi->active_auto_throttle_sources |= 1 << source;
1860 			rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1861 		}
1862 	} else {
1863 		if (pi->active_auto_throttle_sources & (1 << source)) {
1864 			pi->active_auto_throttle_sources &= ~(1 << source);
1865 			rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1866 		}
1867 	}
1868 }
1869 
rv770_set_thermal_temperature_range(struct radeon_device * rdev,int min_temp,int max_temp)1870 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1871 					       int min_temp, int max_temp)
1872 {
1873 	int low_temp = 0 * 1000;
1874 	int high_temp = 255 * 1000;
1875 
1876 	if (low_temp < min_temp)
1877 		low_temp = min_temp;
1878 	if (high_temp > max_temp)
1879 		high_temp = max_temp;
1880 	if (high_temp < low_temp) {
1881 		DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1882 		return -EINVAL;
1883 	}
1884 
1885 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1886 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1887 	WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1888 
1889 	rdev->pm.dpm.thermal.min_temp = low_temp;
1890 	rdev->pm.dpm.thermal.max_temp = high_temp;
1891 
1892 	return 0;
1893 }
1894 
rv770_dpm_enable(struct radeon_device * rdev)1895 int rv770_dpm_enable(struct radeon_device *rdev)
1896 {
1897 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1898 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1899 	int ret;
1900 
1901 	if (pi->gfx_clock_gating)
1902 		rv770_restore_cgcg(rdev);
1903 
1904 	if (rv770_dpm_enabled(rdev))
1905 		return -EINVAL;
1906 
1907 	if (pi->voltage_control) {
1908 		rv770_enable_voltage_control(rdev, true);
1909 		ret = rv770_construct_vddc_table(rdev);
1910 		if (ret) {
1911 			DRM_ERROR("rv770_construct_vddc_table failed\n");
1912 			return ret;
1913 		}
1914 	}
1915 
1916 	if (pi->dcodt)
1917 		rv770_retrieve_odt_values(rdev);
1918 
1919 	if (pi->mvdd_control) {
1920 		ret = rv770_get_mvdd_configuration(rdev);
1921 		if (ret) {
1922 			DRM_ERROR("rv770_get_mvdd_configuration failed\n");
1923 			return ret;
1924 		}
1925 	}
1926 
1927 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1928 		rv770_enable_backbias(rdev, true);
1929 
1930 	rv770_enable_spread_spectrum(rdev, true);
1931 
1932 	if (pi->thermal_protection)
1933 		rv770_enable_thermal_protection(rdev, true);
1934 
1935 	rv770_program_mpll_timing_parameters(rdev);
1936 	rv770_setup_bsp(rdev);
1937 	rv770_program_git(rdev);
1938 	rv770_program_tp(rdev);
1939 	rv770_program_tpp(rdev);
1940 	rv770_program_sstp(rdev);
1941 	rv770_program_engine_speed_parameters(rdev);
1942 	rv770_enable_display_gap(rdev);
1943 	rv770_program_vc(rdev);
1944 
1945 	if (pi->dynamic_pcie_gen2)
1946 		rv770_enable_dynamic_pcie_gen2(rdev, true);
1947 
1948 	ret = rv770_upload_firmware(rdev);
1949 	if (ret) {
1950 		DRM_ERROR("rv770_upload_firmware failed\n");
1951 		return ret;
1952 	}
1953 	ret = rv770_init_smc_table(rdev, boot_ps);
1954 	if (ret) {
1955 		DRM_ERROR("rv770_init_smc_table failed\n");
1956 		return ret;
1957 	}
1958 
1959 	rv770_program_response_times(rdev);
1960 	r7xx_start_smc(rdev);
1961 
1962 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1963 		rv730_start_dpm(rdev);
1964 	else
1965 		rv770_start_dpm(rdev);
1966 
1967 	if (pi->gfx_clock_gating)
1968 		rv770_gfx_clock_gating_enable(rdev, true);
1969 
1970 	if (pi->mg_clock_gating)
1971 		rv770_mg_clock_gating_enable(rdev, true);
1972 
1973 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1974 
1975 	return 0;
1976 }
1977 
rv770_dpm_late_enable(struct radeon_device * rdev)1978 int rv770_dpm_late_enable(struct radeon_device *rdev)
1979 {
1980 	int ret;
1981 
1982 	if (rdev->irq.installed &&
1983 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1984 		PPSMC_Result result;
1985 
1986 		ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1987 		if (ret)
1988 			return ret;
1989 		rdev->irq.dpm_thermal = true;
1990 		radeon_irq_set(rdev);
1991 		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1992 
1993 		if (result != PPSMC_Result_OK)
1994 			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1995 	}
1996 
1997 	return 0;
1998 }
1999 
rv770_dpm_disable(struct radeon_device * rdev)2000 void rv770_dpm_disable(struct radeon_device *rdev)
2001 {
2002 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2003 
2004 	if (!rv770_dpm_enabled(rdev))
2005 		return;
2006 
2007 	rv770_clear_vc(rdev);
2008 
2009 	if (pi->thermal_protection)
2010 		rv770_enable_thermal_protection(rdev, false);
2011 
2012 	rv770_enable_spread_spectrum(rdev, false);
2013 
2014 	if (pi->dynamic_pcie_gen2)
2015 		rv770_enable_dynamic_pcie_gen2(rdev, false);
2016 
2017 	if (rdev->irq.installed &&
2018 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
2019 		rdev->irq.dpm_thermal = false;
2020 		radeon_irq_set(rdev);
2021 	}
2022 
2023 	if (pi->gfx_clock_gating)
2024 		rv770_gfx_clock_gating_enable(rdev, false);
2025 
2026 	if (pi->mg_clock_gating)
2027 		rv770_mg_clock_gating_enable(rdev, false);
2028 
2029 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2030 		rv730_stop_dpm(rdev);
2031 	else
2032 		rv770_stop_dpm(rdev);
2033 
2034 	r7xx_stop_smc(rdev);
2035 	rv770_reset_smio_status(rdev);
2036 }
2037 
rv770_dpm_set_power_state(struct radeon_device * rdev)2038 int rv770_dpm_set_power_state(struct radeon_device *rdev)
2039 {
2040 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2041 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2042 	struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
2043 	int ret;
2044 
2045 	ret = rv770_restrict_performance_levels_before_switch(rdev);
2046 	if (ret) {
2047 		DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
2048 		return ret;
2049 	}
2050 	rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
2051 	ret = rv770_halt_smc(rdev);
2052 	if (ret) {
2053 		DRM_ERROR("rv770_halt_smc failed\n");
2054 		return ret;
2055 	}
2056 	ret = rv770_upload_sw_state(rdev, new_ps);
2057 	if (ret) {
2058 		DRM_ERROR("rv770_upload_sw_state failed\n");
2059 		return ret;
2060 	}
2061 	r7xx_program_memory_timing_parameters(rdev, new_ps);
2062 	if (pi->dcodt)
2063 		rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
2064 	ret = rv770_resume_smc(rdev);
2065 	if (ret) {
2066 		DRM_ERROR("rv770_resume_smc failed\n");
2067 		return ret;
2068 	}
2069 	ret = rv770_set_sw_state(rdev);
2070 	if (ret) {
2071 		DRM_ERROR("rv770_set_sw_state failed\n");
2072 		return ret;
2073 	}
2074 	if (pi->dcodt)
2075 		rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2076 	rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2077 
2078 	return 0;
2079 }
2080 
2081 #if 0
2082 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2083 {
2084 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2085 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2086 
2087 	rv770_restrict_performance_levels_before_switch(rdev);
2088 	if (pi->dcodt)
2089 		rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2090 	rv770_set_boot_state(rdev);
2091 	if (pi->dcodt)
2092 		rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2093 }
2094 #endif
2095 
rv770_dpm_setup_asic(struct radeon_device * rdev)2096 void rv770_dpm_setup_asic(struct radeon_device *rdev)
2097 {
2098 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2099 
2100 	r7xx_read_clock_registers(rdev);
2101 	rv770_read_voltage_smio_registers(rdev);
2102 	rv770_get_memory_type(rdev);
2103 	if (pi->dcodt)
2104 		rv770_get_mclk_odt_threshold(rdev);
2105 	rv770_get_pcie_gen2_status(rdev);
2106 
2107 	rv770_enable_acpi_pm(rdev);
2108 
2109 	if (radeon_aspm != 0) {
2110 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2111 			rv770_enable_l0s(rdev);
2112 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2113 			rv770_enable_l1(rdev);
2114 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2115 			rv770_enable_pll_sleep_in_l1(rdev);
2116 	}
2117 }
2118 
rv770_dpm_display_configuration_changed(struct radeon_device * rdev)2119 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2120 {
2121 	rv770_program_display_gap(rdev);
2122 }
2123 
2124 union power_info {
2125 	struct _ATOM_POWERPLAY_INFO info;
2126 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
2127 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
2128 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2129 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2130 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2131 };
2132 
2133 union pplib_clock_info {
2134 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2135 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2136 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2137 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2138 };
2139 
2140 union pplib_power_state {
2141 	struct _ATOM_PPLIB_STATE v1;
2142 	struct _ATOM_PPLIB_STATE_V2 v2;
2143 };
2144 
rv7xx_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)2145 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2146 					     struct radeon_ps *rps,
2147 					     struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2148 					     u8 table_rev)
2149 {
2150 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2151 	rps->class = le16_to_cpu(non_clock_info->usClassification);
2152 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2153 
2154 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2155 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2156 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2157 	} else {
2158 		rps->vclk = 0;
2159 		rps->dclk = 0;
2160 	}
2161 
2162 	if (r600_is_uvd_state(rps->class, rps->class2)) {
2163 		if ((rps->vclk == 0) || (rps->dclk == 0)) {
2164 			rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2165 			rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2166 		}
2167 	}
2168 
2169 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2170 		rdev->pm.dpm.boot_ps = rps;
2171 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2172 		rdev->pm.dpm.uvd_ps = rps;
2173 }
2174 
rv7xx_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)2175 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2176 					 struct radeon_ps *rps, int index,
2177 					 union pplib_clock_info *clock_info)
2178 {
2179 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2180 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2181 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2182 	u32 sclk, mclk;
2183 	struct rv7xx_pl *pl;
2184 
2185 	switch (index) {
2186 	case 0:
2187 		pl = &ps->low;
2188 		break;
2189 	case 1:
2190 		pl = &ps->medium;
2191 		break;
2192 	case 2:
2193 	default:
2194 		pl = &ps->high;
2195 		break;
2196 	}
2197 
2198 	if (rdev->family >= CHIP_CEDAR) {
2199 		sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2200 		sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2201 		mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2202 		mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2203 
2204 		pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2205 		pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2206 		pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2207 	} else {
2208 		sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2209 		sclk |= clock_info->r600.ucEngineClockHigh << 16;
2210 		mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2211 		mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2212 
2213 		pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2214 		pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2215 	}
2216 
2217 	pl->mclk = mclk;
2218 	pl->sclk = sclk;
2219 
2220 	/* patch up vddc if necessary */
2221 	if (pl->vddc == 0xff01) {
2222 		if (pi->max_vddc)
2223 			pl->vddc = pi->max_vddc;
2224 	}
2225 
2226 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2227 		pi->acpi_vddc = pl->vddc;
2228 		if (rdev->family >= CHIP_CEDAR)
2229 			eg_pi->acpi_vddci = pl->vddci;
2230 		if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2231 			pi->acpi_pcie_gen2 = true;
2232 		else
2233 			pi->acpi_pcie_gen2 = false;
2234 	}
2235 
2236 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2237 		if (rdev->family >= CHIP_BARTS) {
2238 			eg_pi->ulv.supported = true;
2239 			eg_pi->ulv.pl = pl;
2240 		}
2241 	}
2242 
2243 	if (pi->min_vddc_in_table > pl->vddc)
2244 		pi->min_vddc_in_table = pl->vddc;
2245 
2246 	if (pi->max_vddc_in_table < pl->vddc)
2247 		pi->max_vddc_in_table = pl->vddc;
2248 
2249 	/* patch up boot state */
2250 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2251 		u16 vddc, vddci, mvdd;
2252 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
2253 		pl->mclk = rdev->clock.default_mclk;
2254 		pl->sclk = rdev->clock.default_sclk;
2255 		pl->vddc = vddc;
2256 		pl->vddci = vddci;
2257 	}
2258 
2259 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2260 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2261 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2262 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2263 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2264 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2265 	}
2266 }
2267 
rv7xx_parse_power_table(struct radeon_device * rdev)2268 int rv7xx_parse_power_table(struct radeon_device *rdev)
2269 {
2270 	struct radeon_mode_info *mode_info = &rdev->mode_info;
2271 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2272 	union pplib_power_state *power_state;
2273 	int i, j;
2274 	union pplib_clock_info *clock_info;
2275 	union power_info *power_info;
2276 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2277 	u16 data_offset;
2278 	u8 frev, crev;
2279 	struct rv7xx_ps *ps;
2280 
2281 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2282 				   &frev, &crev, &data_offset))
2283 		return -EINVAL;
2284 	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2285 
2286 	rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates,
2287 				  sizeof(struct radeon_ps),
2288 				  GFP_KERNEL);
2289 	if (!rdev->pm.dpm.ps)
2290 		return -ENOMEM;
2291 
2292 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2293 		power_state = (union pplib_power_state *)
2294 			(mode_info->atom_context->bios + data_offset +
2295 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2296 			 i * power_info->pplib.ucStateEntrySize);
2297 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2298 			(mode_info->atom_context->bios + data_offset +
2299 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2300 			 (power_state->v1.ucNonClockStateIndex *
2301 			  power_info->pplib.ucNonClockSize));
2302 		if (power_info->pplib.ucStateEntrySize - 1) {
2303 			u8 *idx;
2304 			ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2305 			if (ps == NULL) {
2306 				kfree(rdev->pm.dpm.ps);
2307 				return -ENOMEM;
2308 			}
2309 			rdev->pm.dpm.ps[i].ps_priv = ps;
2310 			rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2311 							 non_clock_info,
2312 							 power_info->pplib.ucNonClockSize);
2313 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
2314 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2315 				clock_info = (union pplib_clock_info *)
2316 					(mode_info->atom_context->bios + data_offset +
2317 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2318 					 (idx[j] * power_info->pplib.ucClockInfoSize));
2319 				rv7xx_parse_pplib_clock_info(rdev,
2320 							     &rdev->pm.dpm.ps[i], j,
2321 							     clock_info);
2322 			}
2323 		}
2324 	}
2325 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2326 	return 0;
2327 }
2328 
rv770_get_engine_memory_ss(struct radeon_device * rdev)2329 void rv770_get_engine_memory_ss(struct radeon_device *rdev)
2330 {
2331 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2332 	struct radeon_atom_ss ss;
2333 
2334 	pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2335 						       ASIC_INTERNAL_ENGINE_SS, 0);
2336 	pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2337 						       ASIC_INTERNAL_MEMORY_SS, 0);
2338 
2339 	if (pi->sclk_ss || pi->mclk_ss)
2340 		pi->dynamic_ss = true;
2341 	else
2342 		pi->dynamic_ss = false;
2343 }
2344 
rv770_dpm_init(struct radeon_device * rdev)2345 int rv770_dpm_init(struct radeon_device *rdev)
2346 {
2347 	struct rv7xx_power_info *pi;
2348 	struct atom_clock_dividers dividers;
2349 	int ret;
2350 
2351 	pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2352 	if (pi == NULL)
2353 		return -ENOMEM;
2354 	rdev->pm.dpm.priv = pi;
2355 
2356 	rv770_get_max_vddc(rdev);
2357 
2358 	pi->acpi_vddc = 0;
2359 	pi->min_vddc_in_table = 0;
2360 	pi->max_vddc_in_table = 0;
2361 
2362 	ret = r600_get_platform_caps(rdev);
2363 	if (ret)
2364 		return ret;
2365 
2366 	ret = rv7xx_parse_power_table(rdev);
2367 	if (ret)
2368 		return ret;
2369 
2370 	if (rdev->pm.dpm.voltage_response_time == 0)
2371 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2372 	if (rdev->pm.dpm.backbias_response_time == 0)
2373 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2374 
2375 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2376 					     0, false, &dividers);
2377 	if (ret)
2378 		pi->ref_div = dividers.ref_div + 1;
2379 	else
2380 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2381 
2382 	pi->mclk_strobe_mode_threshold = 30000;
2383 	pi->mclk_edc_enable_threshold = 30000;
2384 
2385 	pi->rlp = RV770_RLP_DFLT;
2386 	pi->rmp = RV770_RMP_DFLT;
2387 	pi->lhp = RV770_LHP_DFLT;
2388 	pi->lmp = RV770_LMP_DFLT;
2389 
2390 	pi->voltage_control =
2391 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2392 
2393 	pi->mvdd_control =
2394 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2395 
2396 	rv770_get_engine_memory_ss(rdev);
2397 
2398 	pi->asi = RV770_ASI_DFLT;
2399 	pi->pasi = RV770_HASI_DFLT;
2400 	pi->vrc = RV770_VRC_DFLT;
2401 
2402 	pi->power_gating = false;
2403 
2404 	pi->gfx_clock_gating = true;
2405 
2406 	pi->mg_clock_gating = true;
2407 	pi->mgcgtssm = true;
2408 
2409 	pi->dynamic_pcie_gen2 = true;
2410 
2411 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2412 		pi->thermal_protection = true;
2413 	else
2414 		pi->thermal_protection = false;
2415 
2416 	pi->display_gap = true;
2417 
2418 	if (rdev->flags & RADEON_IS_MOBILITY)
2419 		pi->dcodt = true;
2420 	else
2421 		pi->dcodt = false;
2422 
2423 	pi->ulps = true;
2424 
2425 	pi->mclk_stutter_mode_threshold = 0;
2426 
2427 	pi->sram_end = SMC_RAM_END;
2428 	pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2429 	pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2430 
2431 	return 0;
2432 }
2433 
rv770_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)2434 void rv770_dpm_print_power_state(struct radeon_device *rdev,
2435 				 struct radeon_ps *rps)
2436 {
2437 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2438 	struct rv7xx_pl *pl;
2439 
2440 	r600_dpm_print_class_info(rps->class, rps->class2);
2441 	r600_dpm_print_cap_info(rps->caps);
2442 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2443 	if (rdev->family >= CHIP_CEDAR) {
2444 		pl = &ps->low;
2445 		printk("\t\tpower level 0    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2446 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2447 		pl = &ps->medium;
2448 		printk("\t\tpower level 1    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2449 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2450 		pl = &ps->high;
2451 		printk("\t\tpower level 2    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2452 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2453 	} else {
2454 		pl = &ps->low;
2455 		printk("\t\tpower level 0    sclk: %u mclk: %u vddc: %u\n",
2456 		       pl->sclk, pl->mclk, pl->vddc);
2457 		pl = &ps->medium;
2458 		printk("\t\tpower level 1    sclk: %u mclk: %u vddc: %u\n",
2459 		       pl->sclk, pl->mclk, pl->vddc);
2460 		pl = &ps->high;
2461 		printk("\t\tpower level 2    sclk: %u mclk: %u vddc: %u\n",
2462 		       pl->sclk, pl->mclk, pl->vddc);
2463 	}
2464 	r600_dpm_print_ps_status(rdev, rps);
2465 }
2466 
rv770_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)2467 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
2468 						       struct seq_file *m)
2469 {
2470 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2471 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2472 	struct rv7xx_pl *pl;
2473 	u32 current_index =
2474 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2475 		CURRENT_PROFILE_INDEX_SHIFT;
2476 
2477 	if (current_index > 2) {
2478 		seq_printf(m, "invalid dpm profile %d\n", current_index);
2479 	} else {
2480 		if (current_index == 0)
2481 			pl = &ps->low;
2482 		else if (current_index == 1)
2483 			pl = &ps->medium;
2484 		else /* current_index == 2 */
2485 			pl = &ps->high;
2486 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2487 		if (rdev->family >= CHIP_CEDAR) {
2488 			seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2489 				   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2490 		} else {
2491 			seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u\n",
2492 				   current_index, pl->sclk, pl->mclk, pl->vddc);
2493 		}
2494 	}
2495 }
2496 
rv770_dpm_get_current_sclk(struct radeon_device * rdev)2497 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev)
2498 {
2499 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2500 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2501 	struct rv7xx_pl *pl;
2502 	u32 current_index =
2503 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2504 		CURRENT_PROFILE_INDEX_SHIFT;
2505 
2506 	if (current_index > 2) {
2507 		return 0;
2508 	} else {
2509 		if (current_index == 0)
2510 			pl = &ps->low;
2511 		else if (current_index == 1)
2512 			pl = &ps->medium;
2513 		else /* current_index == 2 */
2514 			pl = &ps->high;
2515 		return  pl->sclk;
2516 	}
2517 }
2518 
rv770_dpm_get_current_mclk(struct radeon_device * rdev)2519 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev)
2520 {
2521 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2522 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2523 	struct rv7xx_pl *pl;
2524 	u32 current_index =
2525 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2526 		CURRENT_PROFILE_INDEX_SHIFT;
2527 
2528 	if (current_index > 2) {
2529 		return 0;
2530 	} else {
2531 		if (current_index == 0)
2532 			pl = &ps->low;
2533 		else if (current_index == 1)
2534 			pl = &ps->medium;
2535 		else /* current_index == 2 */
2536 			pl = &ps->high;
2537 		return  pl->mclk;
2538 	}
2539 }
2540 
rv770_dpm_fini(struct radeon_device * rdev)2541 void rv770_dpm_fini(struct radeon_device *rdev)
2542 {
2543 	int i;
2544 
2545 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2546 		kfree(rdev->pm.dpm.ps[i].ps_priv);
2547 	}
2548 	kfree(rdev->pm.dpm.ps);
2549 	kfree(rdev->pm.dpm.priv);
2550 }
2551 
rv770_dpm_get_sclk(struct radeon_device * rdev,bool low)2552 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2553 {
2554 	struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2555 
2556 	if (low)
2557 		return requested_state->low.sclk;
2558 	else
2559 		return requested_state->high.sclk;
2560 }
2561 
rv770_dpm_get_mclk(struct radeon_device * rdev,bool low)2562 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2563 {
2564 	struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2565 
2566 	if (low)
2567 		return requested_state->low.mclk;
2568 	else
2569 		return requested_state->high.mclk;
2570 }
2571 
rv770_dpm_vblank_too_short(struct radeon_device * rdev)2572 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev)
2573 {
2574 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2575 	u32 switch_limit = 200; /* 300 */
2576 
2577 	/* RV770 */
2578 	/* mclk switching doesn't seem to work reliably on desktop RV770s */
2579 	if ((rdev->family == CHIP_RV770) &&
2580 	    !(rdev->flags & RADEON_IS_MOBILITY))
2581 		switch_limit = 0xffffffff; /* disable mclk switching */
2582 
2583 	if (vblank_time < switch_limit)
2584 		return true;
2585 	else
2586 		return false;
2587 
2588 }
2589