xref: /dragonfly/sys/dev/drm/radeon/rv770_dpm.c (revision d78d3a22)
1 /*
2  * Copyright 2011 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include "rv770d.h"
29 #include "r600_dpm.h"
30 #include "rv770_dpm.h"
31 #include "cypress_dpm.h"
32 #include "atom.h"
33 #include <linux/seq_file.h>
34 
35 #define MC_CG_ARB_FREQ_F0           0x0a
36 #define MC_CG_ARB_FREQ_F1           0x0b
37 #define MC_CG_ARB_FREQ_F2           0x0c
38 #define MC_CG_ARB_FREQ_F3           0x0d
39 
40 #define MC_CG_SEQ_DRAMCONF_S0       0x05
41 #define MC_CG_SEQ_DRAMCONF_S1       0x06
42 
43 #define PCIE_BUS_CLK                10000
44 #define TCLK                        (PCIE_BUS_CLK / 10)
45 
46 #define SMC_RAM_END 0xC000
47 
48 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps);
49 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
50 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
51 
rv770_get_ps(struct radeon_ps * rps)52 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
53 {
54 	struct rv7xx_ps *ps = rps->ps_priv;
55 
56 	return ps;
57 }
58 
rv770_get_pi(struct radeon_device * rdev)59 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
60 {
61 	struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
62 
63 	return pi;
64 }
65 
evergreen_get_pi(struct radeon_device * rdev)66 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
67 {
68 	struct evergreen_power_info *pi = rdev->pm.dpm.priv;
69 
70 	return pi;
71 }
72 
rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)73 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
74 					       bool enable)
75 {
76 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
77 	u32 tmp;
78 
79 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
80 	if (enable) {
81 		tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
82 		tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
83 		tmp |= LC_GEN2_EN_STRAP;
84 	} else {
85 		if (!pi->boot_in_gen2) {
86 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
87 			tmp &= ~LC_GEN2_EN_STRAP;
88 		}
89 	}
90 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
91 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
92 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
93 
94 }
95 
rv770_enable_l0s(struct radeon_device * rdev)96 static void rv770_enable_l0s(struct radeon_device *rdev)
97 {
98 	u32 tmp;
99 
100 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
101 	tmp |= LC_L0S_INACTIVITY(3);
102 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
103 }
104 
rv770_enable_l1(struct radeon_device * rdev)105 static void rv770_enable_l1(struct radeon_device *rdev)
106 {
107 	u32 tmp;
108 
109 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
110 	tmp &= ~LC_L1_INACTIVITY_MASK;
111 	tmp |= LC_L1_INACTIVITY(4);
112 	tmp &= ~LC_PMI_TO_L1_DIS;
113 	tmp &= ~LC_ASPM_TO_L1_DIS;
114 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
115 }
116 
rv770_enable_pll_sleep_in_l1(struct radeon_device * rdev)117 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
118 {
119 	u32 tmp;
120 
121 	tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
122 	tmp |= LC_L1_INACTIVITY(8);
123 	WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
124 
125 	/* NOTE, this is a PCIE indirect reg, not PCIE PORT */
126 	tmp = RREG32_PCIE(PCIE_P_CNTL);
127 	tmp |= P_PLL_PWRDN_IN_L1L23;
128 	tmp &= ~P_PLL_BUF_PDNB;
129 	tmp &= ~P_PLL_PDNB;
130 	tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
131 	WREG32_PCIE(PCIE_P_CNTL, tmp);
132 }
133 
rv770_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)134 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
135 					  bool enable)
136 {
137 	if (enable)
138 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
139 	else {
140 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
141 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
142 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
143 		RREG32(GB_TILING_CONFIG);
144 	}
145 }
146 
rv770_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)147 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
148 					 bool enable)
149 {
150 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
151 
152 	if (enable) {
153 		u32 mgcg_cgtt_local0;
154 
155 		if (rdev->family == CHIP_RV770)
156 			mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
157 		else
158 			mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
159 
160 		WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
161 		WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
162 
163 		if (pi->mgcgtssm)
164 			WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
165 	} else {
166 		WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
167 		WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
168 	}
169 }
170 
rv770_restore_cgcg(struct radeon_device * rdev)171 void rv770_restore_cgcg(struct radeon_device *rdev)
172 {
173 	bool dpm_en = false, cg_en = false;
174 
175 	if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
176 		dpm_en = true;
177 	if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
178 		cg_en = true;
179 
180 	if (dpm_en && !cg_en)
181 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
182 }
183 
rv770_start_dpm(struct radeon_device * rdev)184 static void rv770_start_dpm(struct radeon_device *rdev)
185 {
186 	WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
187 
188 	WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
189 
190 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
191 }
192 
rv770_stop_dpm(struct radeon_device * rdev)193 void rv770_stop_dpm(struct radeon_device *rdev)
194 {
195 	PPSMC_Result result;
196 
197 	result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
198 
199 	if (result != PPSMC_Result_OK)
200 		DRM_DEBUG("Could not force DPM to low.\n");
201 
202 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
203 
204 	WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
205 
206 	WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
207 }
208 
rv770_dpm_enabled(struct radeon_device * rdev)209 bool rv770_dpm_enabled(struct radeon_device *rdev)
210 {
211 	if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
212 		return true;
213 	else
214 		return false;
215 }
216 
rv770_enable_thermal_protection(struct radeon_device * rdev,bool enable)217 void rv770_enable_thermal_protection(struct radeon_device *rdev,
218 				     bool enable)
219 {
220 	if (enable)
221 		WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
222 	else
223 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
224 }
225 
rv770_enable_acpi_pm(struct radeon_device * rdev)226 void rv770_enable_acpi_pm(struct radeon_device *rdev)
227 {
228 	WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
229 }
230 
rv770_get_seq_value(struct radeon_device * rdev,struct rv7xx_pl * pl)231 u8 rv770_get_seq_value(struct radeon_device *rdev,
232 		       struct rv7xx_pl *pl)
233 {
234 	return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
235 		MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
236 }
237 
238 #if 0
239 int rv770_read_smc_soft_register(struct radeon_device *rdev,
240 				 u16 reg_offset, u32 *value)
241 {
242 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
243 
244 	return rv770_read_smc_sram_dword(rdev,
245 					 pi->soft_regs_start + reg_offset,
246 					 value, pi->sram_end);
247 }
248 #endif
249 
rv770_write_smc_soft_register(struct radeon_device * rdev,u16 reg_offset,u32 value)250 int rv770_write_smc_soft_register(struct radeon_device *rdev,
251 				  u16 reg_offset, u32 value)
252 {
253 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
254 
255 	return rv770_write_smc_sram_dword(rdev,
256 					  pi->soft_regs_start + reg_offset,
257 					  value, pi->sram_end);
258 }
259 
rv770_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)260 int rv770_populate_smc_t(struct radeon_device *rdev,
261 			 struct radeon_ps *radeon_state,
262 			 RV770_SMC_SWSTATE *smc_state)
263 {
264 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
265 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
266 	int i;
267 	int a_n;
268 	int a_d;
269 	u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
270 	u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
271 	u32 a_t;
272 
273 	l[0] = 0;
274 	r[2] = 100;
275 
276 	a_n = (int)state->medium.sclk * pi->lmp +
277 		(int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
278 	a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
279 		(int)state->medium.sclk * pi->lmp;
280 
281 	l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
282 	r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
283 
284 	a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
285 		(R600_AH_DFLT - pi->rmp);
286 	a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
287 		(int)state->high.sclk * pi->lhp;
288 
289 	l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
290 	r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
291 
292 	for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
293 		a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
294 		smc_state->levels[i].aT = cpu_to_be32(a_t);
295 	}
296 
297 	a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
298 		CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
299 
300 	smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
301 		cpu_to_be32(a_t);
302 
303 	return 0;
304 }
305 
rv770_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)306 int rv770_populate_smc_sp(struct radeon_device *rdev,
307 			  struct radeon_ps *radeon_state,
308 			  RV770_SMC_SWSTATE *smc_state)
309 {
310 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
311 	int i;
312 
313 	for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
314 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
315 
316 	smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
317 		cpu_to_be32(pi->psp);
318 
319 	return 0;
320 }
321 
rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,u32 reference_clock,bool gddr5,struct atom_clock_dividers * dividers,u32 * clkf,u32 * clkfrac)322 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
323 							     u32 reference_clock,
324 							     bool gddr5,
325 							     struct atom_clock_dividers *dividers,
326 							     u32 *clkf,
327 							     u32 *clkfrac)
328 {
329 	u32 post_divider, reference_divider, feedback_divider8;
330 	u32 fyclk;
331 
332 	if (gddr5)
333 		fyclk = (memory_clock * 8) / 2;
334 	else
335 		fyclk = (memory_clock * 4) / 2;
336 
337 	post_divider = dividers->post_div;
338 	reference_divider = dividers->ref_div;
339 
340 	feedback_divider8 =
341 		(8 * fyclk * reference_divider * post_divider) / reference_clock;
342 
343 	*clkf = feedback_divider8 / 8;
344 	*clkfrac = feedback_divider8 % 8;
345 }
346 
rv770_encode_yclk_post_div(u32 postdiv,u32 * encoded_postdiv)347 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
348 {
349 	int ret = 0;
350 
351 	switch (postdiv) {
352 	case 1:
353 		*encoded_postdiv = 0;
354 		break;
355 	case 2:
356 		*encoded_postdiv = 1;
357 		break;
358 	case 4:
359 		*encoded_postdiv = 2;
360 		break;
361 	case 8:
362 		*encoded_postdiv = 3;
363 		break;
364 	case 16:
365 		*encoded_postdiv = 4;
366 		break;
367 	default:
368 		ret = -EINVAL;
369 		break;
370 	}
371 
372 	return ret;
373 }
374 
rv770_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)375 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
376 {
377 	if (clkf <= 0x10)
378 		return 0x4B;
379 	if (clkf <= 0x19)
380 		return 0x5B;
381 	if (clkf <= 0x21)
382 		return 0x2B;
383 	if (clkf <= 0x27)
384 		return 0x6C;
385 	if (clkf <= 0x31)
386 		return 0x9D;
387 	return 0xC6;
388 }
389 
rv770_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk)390 static int rv770_populate_mclk_value(struct radeon_device *rdev,
391 				     u32 engine_clock, u32 memory_clock,
392 				     RV7XX_SMC_MCLK_VALUE *mclk)
393 {
394 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
395 	u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
396 	u32 mpll_ad_func_cntl =
397 		pi->clk_regs.rv770.mpll_ad_func_cntl;
398 	u32 mpll_ad_func_cntl_2 =
399 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
400 	u32 mpll_dq_func_cntl =
401 		pi->clk_regs.rv770.mpll_dq_func_cntl;
402 	u32 mpll_dq_func_cntl_2 =
403 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
404 	u32 mclk_pwrmgt_cntl =
405 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
406 	u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
407 	struct atom_clock_dividers dividers;
408 	u32 reference_clock = rdev->clock.mpll.reference_freq;
409 	u32 clkf, clkfrac;
410 	u32 postdiv_yclk;
411 	u32 ibias;
412 	int ret;
413 
414 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
415 					     memory_clock, false, &dividers);
416 	if (ret)
417 		return ret;
418 
419 	if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
420 		return -EINVAL;
421 
422 	rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
423 							 pi->mem_gddr5,
424 							 &dividers, &clkf, &clkfrac);
425 
426 	ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
427 	if (ret)
428 		return ret;
429 
430 	ibias = rv770_map_clkf_to_ibias(rdev, clkf);
431 
432 	mpll_ad_func_cntl &= ~(CLKR_MASK |
433 			       YCLK_POST_DIV_MASK |
434 			       CLKF_MASK |
435 			       CLKFRAC_MASK |
436 			       IBIAS_MASK);
437 	mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
438 	mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
439 	mpll_ad_func_cntl |= CLKF(clkf);
440 	mpll_ad_func_cntl |= CLKFRAC(clkfrac);
441 	mpll_ad_func_cntl |= IBIAS(ibias);
442 
443 	if (dividers.vco_mode)
444 		mpll_ad_func_cntl_2 |= VCO_MODE;
445 	else
446 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
447 
448 	if (pi->mem_gddr5) {
449 		rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
450 								 reference_clock,
451 								 pi->mem_gddr5,
452 								 &dividers, &clkf, &clkfrac);
453 
454 		ibias = rv770_map_clkf_to_ibias(rdev, clkf);
455 
456 		ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
457 		if (ret)
458 			return ret;
459 
460 		mpll_dq_func_cntl &= ~(CLKR_MASK |
461 				       YCLK_POST_DIV_MASK |
462 				       CLKF_MASK |
463 				       CLKFRAC_MASK |
464 				       IBIAS_MASK);
465 		mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
466 		mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
467 		mpll_dq_func_cntl |= CLKF(clkf);
468 		mpll_dq_func_cntl |= CLKFRAC(clkfrac);
469 		mpll_dq_func_cntl |= IBIAS(ibias);
470 
471 		if (dividers.vco_mode)
472 			mpll_dq_func_cntl_2 |= VCO_MODE;
473 		else
474 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
475 	}
476 
477 	mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
478 	mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
479 	mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
480 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
481 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
482 	mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
483 	mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
484 
485 	return 0;
486 }
487 
rv770_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,RV770_SMC_SCLK_VALUE * sclk)488 static int rv770_populate_sclk_value(struct radeon_device *rdev,
489 				     u32 engine_clock,
490 				     RV770_SMC_SCLK_VALUE *sclk)
491 {
492 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
493 	struct atom_clock_dividers dividers;
494 	u32 spll_func_cntl =
495 		pi->clk_regs.rv770.cg_spll_func_cntl;
496 	u32 spll_func_cntl_2 =
497 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
498 	u32 spll_func_cntl_3 =
499 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
500 	u32 cg_spll_spread_spectrum =
501 		pi->clk_regs.rv770.cg_spll_spread_spectrum;
502 	u32 cg_spll_spread_spectrum_2 =
503 		pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
504 	u64 tmp;
505 	u32 reference_clock = rdev->clock.spll.reference_freq;
506 	u32 reference_divider, post_divider;
507 	u32 fbdiv;
508 	int ret;
509 
510 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
511 					     engine_clock, false, &dividers);
512 	if (ret)
513 		return ret;
514 
515 	reference_divider = 1 + dividers.ref_div;
516 
517 	if (dividers.enable_post_div)
518 		post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
519 	else
520 		post_divider = 1;
521 
522 	tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
523 	do_div(tmp, reference_clock);
524 	fbdiv = (u32) tmp;
525 
526 	if (dividers.enable_post_div)
527 		spll_func_cntl |= SPLL_DIVEN;
528 	else
529 		spll_func_cntl &= ~SPLL_DIVEN;
530 	spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
531 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
532 	spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
533 	spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
534 
535 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
536 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
537 
538 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
539 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
540 	spll_func_cntl_3 |= SPLL_DITHEN;
541 
542 	if (pi->sclk_ss) {
543 		struct radeon_atom_ss ss;
544 		u32 vco_freq = engine_clock * post_divider;
545 
546 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
547 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
548 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
549 			u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
550 
551 			cg_spll_spread_spectrum &= ~CLKS_MASK;
552 			cg_spll_spread_spectrum |= CLKS(clk_s);
553 			cg_spll_spread_spectrum |= SSEN;
554 
555 			cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
556 			cg_spll_spread_spectrum_2 |= CLKV(clk_v);
557 		}
558 	}
559 
560 	sclk->sclk_value = cpu_to_be32(engine_clock);
561 	sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
562 	sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
563 	sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
564 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
565 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
566 
567 	return 0;
568 }
569 
rv770_populate_vddc_value(struct radeon_device * rdev,u16 vddc,RV770_SMC_VOLTAGE_VALUE * voltage)570 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
571 			      RV770_SMC_VOLTAGE_VALUE *voltage)
572 {
573 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
574 	int i;
575 
576 	if (!pi->voltage_control) {
577 		voltage->index = 0;
578 		voltage->value = 0;
579 		return 0;
580 	}
581 
582 	for (i = 0; i < pi->valid_vddc_entries; i++) {
583 		if (vddc <= pi->vddc_table[i].vddc) {
584 			voltage->index = pi->vddc_table[i].vddc_index;
585 			voltage->value = cpu_to_be16(vddc);
586 			break;
587 		}
588 	}
589 
590 	if (i == pi->valid_vddc_entries)
591 		return -EINVAL;
592 
593 	return 0;
594 }
595 
rv770_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)596 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
597 			      RV770_SMC_VOLTAGE_VALUE *voltage)
598 {
599 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
600 
601 	if (!pi->mvdd_control) {
602 		voltage->index = MVDD_HIGH_INDEX;
603 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
604 		return 0;
605 	}
606 
607 	if (mclk <= pi->mvdd_split_frequency) {
608 		voltage->index = MVDD_LOW_INDEX;
609 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
610 	} else {
611 		voltage->index = MVDD_HIGH_INDEX;
612 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
613 	}
614 
615 	return 0;
616 }
617 
rv770_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)618 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
619 					    struct rv7xx_pl *pl,
620 					    RV770_SMC_HW_PERFORMANCE_LEVEL *level,
621 					    u8 watermark_level)
622 {
623 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
624 	int ret;
625 
626 	level->gen2PCIE = pi->pcie_gen2 ?
627 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
628 	level->gen2XSP  = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
629 	level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
630 	level->displayWatermark = watermark_level;
631 
632 	if (rdev->family == CHIP_RV740)
633 		ret = rv740_populate_sclk_value(rdev, pl->sclk,
634 						&level->sclk);
635 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
636 		ret = rv730_populate_sclk_value(rdev, pl->sclk,
637 						&level->sclk);
638 	else
639 		ret = rv770_populate_sclk_value(rdev, pl->sclk,
640 						&level->sclk);
641 	if (ret)
642 		return ret;
643 
644 	if (rdev->family == CHIP_RV740) {
645 		if (pi->mem_gddr5) {
646 			if (pl->mclk <= pi->mclk_strobe_mode_threshold)
647 				level->strobeMode =
648 					rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
649 			else
650 				level->strobeMode = 0;
651 
652 			if (pl->mclk > pi->mclk_edc_enable_threshold)
653 				level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
654 			else
655 				level->mcFlags =  0;
656 		}
657 		ret = rv740_populate_mclk_value(rdev, pl->sclk,
658 						pl->mclk, &level->mclk);
659 	} else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
660 		ret = rv730_populate_mclk_value(rdev, pl->sclk,
661 						pl->mclk, &level->mclk);
662 	else
663 		ret = rv770_populate_mclk_value(rdev, pl->sclk,
664 						pl->mclk, &level->mclk);
665 	if (ret)
666 		return ret;
667 
668 	ret = rv770_populate_vddc_value(rdev, pl->vddc,
669 					&level->vddc);
670 	if (ret)
671 		return ret;
672 
673 	ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
674 
675 	return ret;
676 }
677 
rv770_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)678 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
679 					    struct radeon_ps *radeon_state,
680 					    RV770_SMC_SWSTATE *smc_state)
681 {
682 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
683 	int ret;
684 
685 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
686 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
687 
688 	ret = rv770_convert_power_level_to_smc(rdev,
689 					       &state->low,
690 					       &smc_state->levels[0],
691 					       PPSMC_DISPLAY_WATERMARK_LOW);
692 	if (ret)
693 		return ret;
694 
695 	ret = rv770_convert_power_level_to_smc(rdev,
696 					       &state->medium,
697 					       &smc_state->levels[1],
698 					       PPSMC_DISPLAY_WATERMARK_LOW);
699 	if (ret)
700 		return ret;
701 
702 	ret = rv770_convert_power_level_to_smc(rdev,
703 					       &state->high,
704 					       &smc_state->levels[2],
705 					       PPSMC_DISPLAY_WATERMARK_HIGH);
706 	if (ret)
707 		return ret;
708 
709 	smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
710 	smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
711 	smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
712 
713 	smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
714 							    &state->low);
715 	smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
716 							    &state->medium);
717 	smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
718 							    &state->high);
719 
720 	rv770_populate_smc_sp(rdev, radeon_state, smc_state);
721 
722 	return rv770_populate_smc_t(rdev, radeon_state, smc_state);
723 
724 }
725 
rv770_calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)726 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
727 					u32 engine_clock)
728 {
729 	u32 dram_rows;
730 	u32 dram_refresh_rate;
731 	u32 mc_arb_rfsh_rate;
732 	u32 tmp;
733 
734 	tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
735 	dram_rows = 1 << (tmp + 10);
736 	tmp = RREG32(MC_SEQ_MISC0) & 3;
737 	dram_refresh_rate = 1 << (tmp + 3);
738 	mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
739 
740 	return mc_arb_rfsh_rate;
741 }
742 
rv770_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state)743 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
744 						   struct radeon_ps *radeon_state)
745 {
746 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
747 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
748 	u32 sqm_ratio;
749 	u32 arb_refresh_rate;
750 	u32 high_clock;
751 
752 	if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
753 		high_clock = state->high.sclk;
754 	else
755 		high_clock = (state->low.sclk * 0xFF / 0x40);
756 
757 	radeon_atom_set_engine_dram_timings(rdev, high_clock,
758 					    state->high.mclk);
759 
760 	sqm_ratio =
761 		STATE0(64 * high_clock / pi->boot_sclk) |
762 		STATE1(64 * high_clock / state->low.sclk) |
763 		STATE2(64 * high_clock / state->medium.sclk) |
764 		STATE3(64 * high_clock / state->high.sclk);
765 	WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
766 
767 	arb_refresh_rate =
768 		POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
769 		POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
770 		POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
771 		POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
772 	WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
773 }
774 
rv770_enable_backbias(struct radeon_device * rdev,bool enable)775 void rv770_enable_backbias(struct radeon_device *rdev,
776 			   bool enable)
777 {
778 	if (enable)
779 		WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
780 	else
781 		WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
782 }
783 
rv770_enable_spread_spectrum(struct radeon_device * rdev,bool enable)784 static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
785 					 bool enable)
786 {
787 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
788 
789 	if (enable) {
790 		if (pi->sclk_ss)
791 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
792 
793 		if (pi->mclk_ss) {
794 			if (rdev->family == CHIP_RV740)
795 				rv740_enable_mclk_spread_spectrum(rdev, true);
796 		}
797 	} else {
798 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
799 
800 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
801 
802 		WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
803 
804 		if (rdev->family == CHIP_RV740)
805 			rv740_enable_mclk_spread_spectrum(rdev, false);
806 	}
807 }
808 
rv770_program_mpll_timing_parameters(struct radeon_device * rdev)809 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
810 {
811 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
812 
813 	if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
814 		WREG32(MPLL_TIME,
815 		       (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
816 			MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
817 	}
818 }
819 
rv770_setup_bsp(struct radeon_device * rdev)820 void rv770_setup_bsp(struct radeon_device *rdev)
821 {
822 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
823 	u32 xclk = radeon_get_xclk(rdev);
824 
825 	r600_calculate_u_and_p(pi->asi,
826 			       xclk,
827 			       16,
828 			       &pi->bsp,
829 			       &pi->bsu);
830 
831 	r600_calculate_u_and_p(pi->pasi,
832 			       xclk,
833 			       16,
834 			       &pi->pbsp,
835 			       &pi->pbsu);
836 
837 	pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
838 	pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
839 
840 	WREG32(CG_BSP, pi->dsp);
841 
842 }
843 
rv770_program_git(struct radeon_device * rdev)844 void rv770_program_git(struct radeon_device *rdev)
845 {
846 	WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
847 }
848 
rv770_program_tp(struct radeon_device * rdev)849 void rv770_program_tp(struct radeon_device *rdev)
850 {
851 	int i;
852 	enum r600_td td = R600_TD_DFLT;
853 
854 	for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
855 		WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
856 
857 	if (td == R600_TD_AUTO)
858 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
859 	else
860 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
861 	if (td == R600_TD_UP)
862 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
863 	if (td == R600_TD_DOWN)
864 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
865 }
866 
rv770_program_tpp(struct radeon_device * rdev)867 void rv770_program_tpp(struct radeon_device *rdev)
868 {
869 	WREG32(CG_TPC, R600_TPC_DFLT);
870 }
871 
rv770_program_sstp(struct radeon_device * rdev)872 void rv770_program_sstp(struct radeon_device *rdev)
873 {
874 	WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
875 }
876 
rv770_program_engine_speed_parameters(struct radeon_device * rdev)877 void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
878 {
879 	WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
880 }
881 
rv770_enable_display_gap(struct radeon_device * rdev)882 static void rv770_enable_display_gap(struct radeon_device *rdev)
883 {
884 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
885 
886 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
887 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
888 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
889 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
890 }
891 
rv770_program_vc(struct radeon_device * rdev)892 void rv770_program_vc(struct radeon_device *rdev)
893 {
894 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
895 
896 	WREG32(CG_FTV, pi->vrc);
897 }
898 
rv770_clear_vc(struct radeon_device * rdev)899 void rv770_clear_vc(struct radeon_device *rdev)
900 {
901 	WREG32(CG_FTV, 0);
902 }
903 
rv770_upload_firmware(struct radeon_device * rdev)904 int rv770_upload_firmware(struct radeon_device *rdev)
905 {
906 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
907 	int ret;
908 
909 	rv770_reset_smc(rdev);
910 	rv770_stop_smc_clock(rdev);
911 
912 	ret = rv770_load_smc_ucode(rdev, pi->sram_end);
913 	if (ret)
914 		return ret;
915 
916 	return 0;
917 }
918 
rv770_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)919 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
920 					 RV770_SMC_STATETABLE *table)
921 {
922 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
923 
924 	u32 mpll_ad_func_cntl =
925 		pi->clk_regs.rv770.mpll_ad_func_cntl;
926 	u32 mpll_ad_func_cntl_2 =
927 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
928 	u32 mpll_dq_func_cntl =
929 		pi->clk_regs.rv770.mpll_dq_func_cntl;
930 	u32 mpll_dq_func_cntl_2 =
931 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
932 	u32 spll_func_cntl =
933 		pi->clk_regs.rv770.cg_spll_func_cntl;
934 	u32 spll_func_cntl_2 =
935 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
936 	u32 spll_func_cntl_3 =
937 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
938 	u32 mclk_pwrmgt_cntl;
939 	u32 dll_cntl;
940 
941 	table->ACPIState = table->initialState;
942 
943 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
944 
945 	if (pi->acpi_vddc) {
946 		rv770_populate_vddc_value(rdev, pi->acpi_vddc,
947 					  &table->ACPIState.levels[0].vddc);
948 		if (pi->pcie_gen2) {
949 			if (pi->acpi_pcie_gen2)
950 				table->ACPIState.levels[0].gen2PCIE = 1;
951 			else
952 				table->ACPIState.levels[0].gen2PCIE = 0;
953 		} else
954 			table->ACPIState.levels[0].gen2PCIE = 0;
955 		if (pi->acpi_pcie_gen2)
956 			table->ACPIState.levels[0].gen2XSP = 1;
957 		else
958 			table->ACPIState.levels[0].gen2XSP = 0;
959 	} else {
960 		rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
961 					  &table->ACPIState.levels[0].vddc);
962 		table->ACPIState.levels[0].gen2PCIE = 0;
963 	}
964 
965 
966 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
967 
968 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
969 
970 	mclk_pwrmgt_cntl = (MRDCKA0_RESET |
971 			    MRDCKA1_RESET |
972 			    MRDCKB0_RESET |
973 			    MRDCKB1_RESET |
974 			    MRDCKC0_RESET |
975 			    MRDCKC1_RESET |
976 			    MRDCKD0_RESET |
977 			    MRDCKD1_RESET);
978 
979 	dll_cntl = 0xff000000;
980 
981 	spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
982 
983 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
984 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
985 
986 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
987 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
988 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
989 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
990 
991 	table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
992 	table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
993 
994 	table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
995 
996 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
997 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
998 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
999 
1000 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1001 
1002 	rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1003 
1004 	table->ACPIState.levels[1] = table->ACPIState.levels[0];
1005 	table->ACPIState.levels[2] = table->ACPIState.levels[0];
1006 
1007 	return 0;
1008 }
1009 
rv770_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1010 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1011 				      RV770_SMC_VOLTAGE_VALUE *voltage)
1012 {
1013 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1014 
1015 	if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1016 	     (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1017 		voltage->index = MVDD_LOW_INDEX;
1018 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1019 	} else {
1020 		voltage->index = MVDD_HIGH_INDEX;
1021 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1022 	}
1023 
1024 	return 0;
1025 }
1026 
rv770_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_STATETABLE * table)1027 static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1028 					    struct radeon_ps *radeon_state,
1029 					    RV770_SMC_STATETABLE *table)
1030 {
1031 	struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1032 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1033 	u32 a_t;
1034 
1035 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1036 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1037 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1038 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1039 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1040 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1041 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1042 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1043 	table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1044 		cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1045 	table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1046 		cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1047 
1048 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1049 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1050 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1051 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1052 
1053 	table->initialState.levels[0].mclk.mclk770.mclk_value =
1054 		cpu_to_be32(initial_state->low.mclk);
1055 
1056 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1057 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1058 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1059 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1060 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1061 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1062 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1063 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1064 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1065 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1066 
1067 	table->initialState.levels[0].sclk.sclk_value =
1068 		cpu_to_be32(initial_state->low.sclk);
1069 
1070 	table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1071 
1072 	table->initialState.levels[0].seqValue =
1073 		rv770_get_seq_value(rdev, &initial_state->low);
1074 
1075 	rv770_populate_vddc_value(rdev,
1076 				  initial_state->low.vddc,
1077 				  &table->initialState.levels[0].vddc);
1078 	rv770_populate_initial_mvdd_value(rdev,
1079 					  &table->initialState.levels[0].mvdd);
1080 
1081 	a_t = CG_R(0xffff) | CG_L(0);
1082 	table->initialState.levels[0].aT = cpu_to_be32(a_t);
1083 
1084 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1085 
1086 	if (pi->boot_in_gen2)
1087 		table->initialState.levels[0].gen2PCIE = 1;
1088 	else
1089 		table->initialState.levels[0].gen2PCIE = 0;
1090 	if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1091 		table->initialState.levels[0].gen2XSP = 1;
1092 	else
1093 		table->initialState.levels[0].gen2XSP = 0;
1094 
1095 	if (rdev->family == CHIP_RV740) {
1096 		if (pi->mem_gddr5) {
1097 			if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1098 				table->initialState.levels[0].strobeMode =
1099 					rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1100 			else
1101 				table->initialState.levels[0].strobeMode = 0;
1102 
1103 			if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1104 				table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1105 			else
1106 				table->initialState.levels[0].mcFlags =  0;
1107 		}
1108 	}
1109 
1110 	table->initialState.levels[1] = table->initialState.levels[0];
1111 	table->initialState.levels[2] = table->initialState.levels[0];
1112 
1113 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1114 
1115 	return 0;
1116 }
1117 
rv770_populate_smc_vddc_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1118 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1119 					 RV770_SMC_STATETABLE *table)
1120 {
1121 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1122 	int i;
1123 
1124 	for (i = 0; i < pi->valid_vddc_entries; i++) {
1125 		table->highSMIO[pi->vddc_table[i].vddc_index] =
1126 			pi->vddc_table[i].high_smio;
1127 		table->lowSMIO[pi->vddc_table[i].vddc_index] =
1128 			cpu_to_be32(pi->vddc_table[i].low_smio);
1129 	}
1130 
1131 	table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1132 	table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1133 		cpu_to_be32(pi->vddc_mask_low);
1134 
1135 	for (i = 0;
1136 	     ((i < pi->valid_vddc_entries) &&
1137 	      (pi->max_vddc_in_table >
1138 	       pi->vddc_table[i].vddc));
1139 	     i++);
1140 
1141 	table->maxVDDCIndexInPPTable =
1142 		pi->vddc_table[i].vddc_index;
1143 
1144 	return 0;
1145 }
1146 
rv770_populate_smc_mvdd_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1147 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1148 					 RV770_SMC_STATETABLE *table)
1149 {
1150 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1151 
1152 	if (pi->mvdd_control) {
1153 		table->lowSMIO[MVDD_HIGH_INDEX] |=
1154 			cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1155 		table->lowSMIO[MVDD_LOW_INDEX] |=
1156 			cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1157 
1158 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1159 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1160 			cpu_to_be32(pi->mvdd_mask_low);
1161 	}
1162 
1163 	return 0;
1164 }
1165 
rv770_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1166 static int rv770_init_smc_table(struct radeon_device *rdev,
1167 				struct radeon_ps *radeon_boot_state)
1168 {
1169 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1170 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1171 	RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1172 	int ret;
1173 
1174 	memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1175 
1176 	pi->boot_sclk = boot_state->low.sclk;
1177 
1178 	rv770_populate_smc_vddc_table(rdev, table);
1179 	rv770_populate_smc_mvdd_table(rdev, table);
1180 
1181 	switch (rdev->pm.int_thermal_type) {
1182 	case THERMAL_TYPE_RV770:
1183 	case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1184 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1185 		break;
1186 	case THERMAL_TYPE_NONE:
1187 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1188 		break;
1189 	case THERMAL_TYPE_EXTERNAL_GPIO:
1190 	default:
1191 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1192 		break;
1193 	}
1194 
1195 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1196 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1197 
1198 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1199 			table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1200 
1201 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1202 			table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1203 	}
1204 
1205 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1206 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1207 
1208 	if (pi->mem_gddr5)
1209 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1210 
1211 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1212 		ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1213 	else
1214 		ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1215 	if (ret)
1216 		return ret;
1217 
1218 	if (rdev->family == CHIP_RV740)
1219 		ret = rv740_populate_smc_acpi_state(rdev, table);
1220 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1221 		ret = rv730_populate_smc_acpi_state(rdev, table);
1222 	else
1223 		ret = rv770_populate_smc_acpi_state(rdev, table);
1224 	if (ret)
1225 		return ret;
1226 
1227 	table->driverState = table->initialState;
1228 
1229 	return rv770_copy_bytes_to_smc(rdev,
1230 				       pi->state_table_start,
1231 				       (const u8 *)table,
1232 				       sizeof(RV770_SMC_STATETABLE),
1233 				       pi->sram_end);
1234 }
1235 
rv770_construct_vddc_table(struct radeon_device * rdev)1236 static int rv770_construct_vddc_table(struct radeon_device *rdev)
1237 {
1238 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1239 	u16 min, max, step;
1240 	u32 steps = 0;
1241 	u8 vddc_index = 0;
1242 	u32 i;
1243 
1244 	radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1245 	radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1246 	radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1247 
1248 	steps = (max - min) / step + 1;
1249 
1250 	if (steps > MAX_NO_VREG_STEPS)
1251 		return -EINVAL;
1252 
1253 	for (i = 0; i < steps; i++) {
1254 		u32 gpio_pins, gpio_mask;
1255 
1256 		pi->vddc_table[i].vddc = (u16)(min + i * step);
1257 		radeon_atom_get_voltage_gpio_settings(rdev,
1258 						      pi->vddc_table[i].vddc,
1259 						      SET_VOLTAGE_TYPE_ASIC_VDDC,
1260 						      &gpio_pins, &gpio_mask);
1261 		pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1262 		pi->vddc_table[i].high_smio = 0;
1263 		pi->vddc_mask_low = gpio_mask;
1264 		if (i > 0) {
1265 			if ((pi->vddc_table[i].low_smio !=
1266 			     pi->vddc_table[i - 1].low_smio ) ||
1267 			     (pi->vddc_table[i].high_smio !=
1268 			      pi->vddc_table[i - 1].high_smio))
1269 				vddc_index++;
1270 		}
1271 		pi->vddc_table[i].vddc_index = vddc_index;
1272 	}
1273 
1274 	pi->valid_vddc_entries = (u8)steps;
1275 
1276 	return 0;
1277 }
1278 
rv770_get_mclk_split_point(struct atom_memory_info * memory_info)1279 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1280 {
1281 	if (memory_info->mem_type == MEM_TYPE_GDDR3)
1282 		return 30000;
1283 
1284 	return 0;
1285 }
1286 
rv770_get_mvdd_pin_configuration(struct radeon_device * rdev)1287 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1288 {
1289 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1290 	u32 gpio_pins, gpio_mask;
1291 
1292 	radeon_atom_get_voltage_gpio_settings(rdev,
1293 					      MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1294 					      &gpio_pins, &gpio_mask);
1295 	pi->mvdd_mask_low = gpio_mask;
1296 	pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1297 		gpio_pins & gpio_mask;
1298 
1299 	radeon_atom_get_voltage_gpio_settings(rdev,
1300 					      MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1301 					      &gpio_pins, &gpio_mask);
1302 	pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1303 		gpio_pins & gpio_mask;
1304 
1305 	return 0;
1306 }
1307 
rv770_get_memory_module_index(struct radeon_device * rdev)1308 u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1309 {
1310 	return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1311 }
1312 
rv770_get_mvdd_configuration(struct radeon_device * rdev)1313 static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1314 {
1315 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1316 	u8 memory_module_index;
1317 	struct atom_memory_info memory_info;
1318 
1319 	memory_module_index = rv770_get_memory_module_index(rdev);
1320 
1321 	if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1322 		pi->mvdd_control = false;
1323 		return 0;
1324 	}
1325 
1326 	pi->mvdd_split_frequency =
1327 		rv770_get_mclk_split_point(&memory_info);
1328 
1329 	if (pi->mvdd_split_frequency == 0) {
1330 		pi->mvdd_control = false;
1331 		return 0;
1332 	}
1333 
1334 	return rv770_get_mvdd_pin_configuration(rdev);
1335 }
1336 
rv770_enable_voltage_control(struct radeon_device * rdev,bool enable)1337 void rv770_enable_voltage_control(struct radeon_device *rdev,
1338 				  bool enable)
1339 {
1340 	if (enable)
1341 		WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1342 	else
1343 		WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1344 }
1345 
rv770_program_display_gap(struct radeon_device * rdev)1346 static void rv770_program_display_gap(struct radeon_device *rdev)
1347 {
1348 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1349 
1350 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1351 	if (rdev->pm.dpm.new_active_crtcs & 1) {
1352 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1353 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1354 	} else if (rdev->pm.dpm.new_active_crtcs & 2) {
1355 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1356 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1357 	} else {
1358 		tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1359 		tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1360 	}
1361 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1362 }
1363 
rv770_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)1364 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1365 					   bool enable)
1366 {
1367 	rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1368 
1369 	if (enable)
1370 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1371 	else
1372 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1373 }
1374 
r7xx_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1375 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1376 						  struct radeon_ps *radeon_new_state)
1377 {
1378 	if ((rdev->family == CHIP_RV730) ||
1379 	    (rdev->family == CHIP_RV710) ||
1380 	    (rdev->family == CHIP_RV740))
1381 		rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1382 	else
1383 		rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1384 }
1385 
rv770_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1386 static int rv770_upload_sw_state(struct radeon_device *rdev,
1387 				 struct radeon_ps *radeon_new_state)
1388 {
1389 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1390 	u16 address = pi->state_table_start +
1391 		offsetof(RV770_SMC_STATETABLE, driverState);
1392 	RV770_SMC_SWSTATE state = { 0 };
1393 	int ret;
1394 
1395 	ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1396 	if (ret)
1397 		return ret;
1398 
1399 	return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1400 				       sizeof(RV770_SMC_SWSTATE),
1401 				       pi->sram_end);
1402 }
1403 
rv770_halt_smc(struct radeon_device * rdev)1404 int rv770_halt_smc(struct radeon_device *rdev)
1405 {
1406 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1407 		return -EINVAL;
1408 
1409 	if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1410 		return -EINVAL;
1411 
1412 	return 0;
1413 }
1414 
rv770_resume_smc(struct radeon_device * rdev)1415 int rv770_resume_smc(struct radeon_device *rdev)
1416 {
1417 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1418 		return -EINVAL;
1419 	return 0;
1420 }
1421 
rv770_set_sw_state(struct radeon_device * rdev)1422 int rv770_set_sw_state(struct radeon_device *rdev)
1423 {
1424 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1425 		DRM_DEBUG("rv770_set_sw_state failed\n");
1426 	return 0;
1427 }
1428 
rv770_set_boot_state(struct radeon_device * rdev)1429 int rv770_set_boot_state(struct radeon_device *rdev)
1430 {
1431 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1432 		return -EINVAL;
1433 	return 0;
1434 }
1435 
rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1436 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1437 					      struct radeon_ps *new_ps,
1438 					      struct radeon_ps *old_ps)
1439 {
1440 	struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1441 	struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1442 
1443 	if ((new_ps->vclk == old_ps->vclk) &&
1444 	    (new_ps->dclk == old_ps->dclk))
1445 		return;
1446 
1447 	if (new_state->high.sclk >= current_state->high.sclk)
1448 		return;
1449 
1450 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1451 }
1452 
rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1453 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1454 					     struct radeon_ps *new_ps,
1455 					     struct radeon_ps *old_ps)
1456 {
1457 	struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1458 	struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1459 
1460 	if ((new_ps->vclk == old_ps->vclk) &&
1461 	    (new_ps->dclk == old_ps->dclk))
1462 		return;
1463 
1464 	if (new_state->high.sclk < current_state->high.sclk)
1465 		return;
1466 
1467 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1468 }
1469 
rv770_restrict_performance_levels_before_switch(struct radeon_device * rdev)1470 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1471 {
1472 	if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1473 		return -EINVAL;
1474 
1475 	if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1476 		return -EINVAL;
1477 
1478 	return 0;
1479 }
1480 
rv770_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)1481 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
1482 				      enum radeon_dpm_forced_level level)
1483 {
1484 	PPSMC_Msg msg;
1485 
1486 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1487 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK)
1488 			return -EINVAL;
1489 		msg = PPSMC_MSG_ForceHigh;
1490 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1491 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1492 			return -EINVAL;
1493 		msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled);
1494 	} else {
1495 		if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1496 			return -EINVAL;
1497 		msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled);
1498 	}
1499 
1500 	if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
1501 		return -EINVAL;
1502 
1503 	rdev->pm.dpm.forced_level = level;
1504 
1505 	return 0;
1506 }
1507 
r7xx_start_smc(struct radeon_device * rdev)1508 void r7xx_start_smc(struct radeon_device *rdev)
1509 {
1510 	rv770_start_smc(rdev);
1511 	rv770_start_smc_clock(rdev);
1512 }
1513 
1514 
r7xx_stop_smc(struct radeon_device * rdev)1515 void r7xx_stop_smc(struct radeon_device *rdev)
1516 {
1517 	rv770_reset_smc(rdev);
1518 	rv770_stop_smc_clock(rdev);
1519 }
1520 
rv770_read_clock_registers(struct radeon_device * rdev)1521 static void rv770_read_clock_registers(struct radeon_device *rdev)
1522 {
1523 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1524 
1525 	pi->clk_regs.rv770.cg_spll_func_cntl =
1526 		RREG32(CG_SPLL_FUNC_CNTL);
1527 	pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1528 		RREG32(CG_SPLL_FUNC_CNTL_2);
1529 	pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1530 		RREG32(CG_SPLL_FUNC_CNTL_3);
1531 	pi->clk_regs.rv770.cg_spll_spread_spectrum =
1532 		RREG32(CG_SPLL_SPREAD_SPECTRUM);
1533 	pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1534 		RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1535 	pi->clk_regs.rv770.mpll_ad_func_cntl =
1536 		RREG32(MPLL_AD_FUNC_CNTL);
1537 	pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1538 		RREG32(MPLL_AD_FUNC_CNTL_2);
1539 	pi->clk_regs.rv770.mpll_dq_func_cntl =
1540 		RREG32(MPLL_DQ_FUNC_CNTL);
1541 	pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1542 		RREG32(MPLL_DQ_FUNC_CNTL_2);
1543 	pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1544 		RREG32(MCLK_PWRMGT_CNTL);
1545 	pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1546 }
1547 
r7xx_read_clock_registers(struct radeon_device * rdev)1548 static void r7xx_read_clock_registers(struct radeon_device *rdev)
1549 {
1550 	if (rdev->family == CHIP_RV740)
1551 		rv740_read_clock_registers(rdev);
1552 	else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1553 		rv730_read_clock_registers(rdev);
1554 	else
1555 		rv770_read_clock_registers(rdev);
1556 }
1557 
rv770_read_voltage_smio_registers(struct radeon_device * rdev)1558 void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1559 {
1560 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1561 
1562 	pi->s0_vid_lower_smio_cntl =
1563 		RREG32(S0_VID_LOWER_SMIO_CNTL);
1564 }
1565 
rv770_reset_smio_status(struct radeon_device * rdev)1566 void rv770_reset_smio_status(struct radeon_device *rdev)
1567 {
1568 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1569 	u32 sw_smio_index, vid_smio_cntl;
1570 
1571 	sw_smio_index =
1572 		(RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1573 	switch (sw_smio_index) {
1574 	case 3:
1575 		vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1576 		break;
1577 	case 2:
1578 		vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1579 		break;
1580 	case 1:
1581 		vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1582 		break;
1583 	case 0:
1584 		return;
1585 	default:
1586 		vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1587 		break;
1588 	}
1589 
1590 	WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1591 	WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1592 }
1593 
rv770_get_memory_type(struct radeon_device * rdev)1594 void rv770_get_memory_type(struct radeon_device *rdev)
1595 {
1596 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1597 	u32 tmp;
1598 
1599 	tmp = RREG32(MC_SEQ_MISC0);
1600 
1601 	if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1602 	    MC_SEQ_MISC0_GDDR5_VALUE)
1603 		pi->mem_gddr5 = true;
1604 	else
1605 		pi->mem_gddr5 = false;
1606 
1607 }
1608 
rv770_get_pcie_gen2_status(struct radeon_device * rdev)1609 void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1610 {
1611 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1612 	u32 tmp;
1613 
1614 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1615 
1616 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1617 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1618 		pi->pcie_gen2 = true;
1619 	else
1620 		pi->pcie_gen2 = false;
1621 
1622 	if (pi->pcie_gen2) {
1623 		if (tmp & LC_CURRENT_DATA_RATE)
1624 			pi->boot_in_gen2 = true;
1625 		else
1626 			pi->boot_in_gen2 = false;
1627 	} else
1628 		pi->boot_in_gen2 = false;
1629 }
1630 
1631 #if 0
1632 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1633 {
1634 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1635 
1636 	if (pi->gfx_clock_gating) {
1637 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1638 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1639 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1640 		RREG32(GB_TILING_CONFIG);
1641 	}
1642 
1643 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1644 		 ~HOST_SMC_MSG_MASK);
1645 
1646 	udelay(7000);
1647 
1648 	return 0;
1649 }
1650 
1651 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1652 {
1653 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1654 	int i;
1655 
1656 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1657 		 ~HOST_SMC_MSG_MASK);
1658 
1659 	udelay(7000);
1660 
1661 	for (i = 0; i < rdev->usec_timeout; i++) {
1662 		if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1663 			break;
1664 		udelay(1000);
1665 	}
1666 
1667 	if (pi->gfx_clock_gating)
1668 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1669 
1670 	return 0;
1671 }
1672 #endif
1673 
rv770_get_mclk_odt_threshold(struct radeon_device * rdev)1674 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1675 {
1676 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1677 	u8 memory_module_index;
1678 	struct atom_memory_info memory_info;
1679 
1680 	pi->mclk_odt_threshold = 0;
1681 
1682 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1683 		memory_module_index = rv770_get_memory_module_index(rdev);
1684 
1685 		if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1686 			return;
1687 
1688 		if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1689 		    memory_info.mem_type == MEM_TYPE_DDR3)
1690 			pi->mclk_odt_threshold = 30000;
1691 	}
1692 }
1693 
rv770_get_max_vddc(struct radeon_device * rdev)1694 void rv770_get_max_vddc(struct radeon_device *rdev)
1695 {
1696 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1697 	u16 vddc;
1698 
1699 	if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1700 		pi->max_vddc = 0;
1701 	else
1702 		pi->max_vddc = vddc;
1703 }
1704 
rv770_program_response_times(struct radeon_device * rdev)1705 void rv770_program_response_times(struct radeon_device *rdev)
1706 {
1707 	u32 voltage_response_time, backbias_response_time;
1708 	u32 acpi_delay_time, vbi_time_out;
1709 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1710 	u32 reference_clock;
1711 
1712 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1713 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1714 
1715 	if (voltage_response_time == 0)
1716 		voltage_response_time = 1000;
1717 
1718 	if (backbias_response_time == 0)
1719 		backbias_response_time = 1000;
1720 
1721 	acpi_delay_time = 15000;
1722 	vbi_time_out = 100000;
1723 
1724 	reference_clock = radeon_get_xclk(rdev);
1725 
1726 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1727 	bb_dly = (backbias_response_time * reference_clock) / 1600;
1728 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1729 	vbi_dly = (vbi_time_out * reference_clock) / 1600;
1730 
1731 	rv770_write_smc_soft_register(rdev,
1732 				      RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1733 	rv770_write_smc_soft_register(rdev,
1734 				      RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1735 	rv770_write_smc_soft_register(rdev,
1736 				      RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1737 	rv770_write_smc_soft_register(rdev,
1738 				      RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1739 #if 0
1740 	/* XXX look up hw revision */
1741 	if (WEKIVA_A21)
1742 		rv770_write_smc_soft_register(rdev,
1743 					      RV770_SMC_SOFT_REGISTER_baby_step_timer,
1744 					      0x10);
1745 #endif
1746 }
1747 
rv770_program_dcodt_before_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1748 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1749 						    struct radeon_ps *radeon_new_state,
1750 						    struct radeon_ps *radeon_current_state)
1751 {
1752 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1753 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1754 	struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1755 	bool current_use_dc = false;
1756 	bool new_use_dc = false;
1757 
1758 	if (pi->mclk_odt_threshold == 0)
1759 		return;
1760 
1761 	if (current_state->high.mclk <= pi->mclk_odt_threshold)
1762 		current_use_dc = true;
1763 
1764 	if (new_state->high.mclk <= pi->mclk_odt_threshold)
1765 		new_use_dc = true;
1766 
1767 	if (current_use_dc == new_use_dc)
1768 		return;
1769 
1770 	if (!current_use_dc && new_use_dc)
1771 		return;
1772 
1773 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1774 		rv730_program_dcodt(rdev, new_use_dc);
1775 }
1776 
rv770_program_dcodt_after_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1777 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1778 						   struct radeon_ps *radeon_new_state,
1779 						   struct radeon_ps *radeon_current_state)
1780 {
1781 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1782 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1783 	struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1784 	bool current_use_dc = false;
1785 	bool new_use_dc = false;
1786 
1787 	if (pi->mclk_odt_threshold == 0)
1788 		return;
1789 
1790 	if (current_state->high.mclk <= pi->mclk_odt_threshold)
1791 		current_use_dc = true;
1792 
1793 	if (new_state->high.mclk <= pi->mclk_odt_threshold)
1794 		new_use_dc = true;
1795 
1796 	if (current_use_dc == new_use_dc)
1797 		return;
1798 
1799 	if (current_use_dc && !new_use_dc)
1800 		return;
1801 
1802 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1803 		rv730_program_dcodt(rdev, new_use_dc);
1804 }
1805 
rv770_retrieve_odt_values(struct radeon_device * rdev)1806 static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1807 {
1808 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1809 
1810 	if (pi->mclk_odt_threshold == 0)
1811 		return;
1812 
1813 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1814 		rv730_get_odt_values(rdev);
1815 }
1816 
rv770_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)1817 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1818 {
1819 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1820 	bool want_thermal_protection;
1821 	enum radeon_dpm_event_src dpm_event_src;
1822 
1823 	switch (sources) {
1824 	case 0:
1825 	default:
1826 		want_thermal_protection = false;
1827 		break;
1828 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1829 		want_thermal_protection = true;
1830 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1831 		break;
1832 
1833 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1834 		want_thermal_protection = true;
1835 		dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1836 		break;
1837 
1838 	case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1839 	      (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1840 		want_thermal_protection = true;
1841 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1842 		break;
1843 	}
1844 
1845 	if (want_thermal_protection) {
1846 		WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1847 		if (pi->thermal_protection)
1848 			WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1849 	} else {
1850 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1851 	}
1852 }
1853 
rv770_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)1854 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1855 				       enum radeon_dpm_auto_throttle_src source,
1856 				       bool enable)
1857 {
1858 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1859 
1860 	if (enable) {
1861 		if (!(pi->active_auto_throttle_sources & (1 << source))) {
1862 			pi->active_auto_throttle_sources |= 1 << source;
1863 			rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1864 		}
1865 	} else {
1866 		if (pi->active_auto_throttle_sources & (1 << source)) {
1867 			pi->active_auto_throttle_sources &= ~(1 << source);
1868 			rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1869 		}
1870 	}
1871 }
1872 
rv770_set_thermal_temperature_range(struct radeon_device * rdev,int min_temp,int max_temp)1873 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1874 					       int min_temp, int max_temp)
1875 {
1876 	int low_temp = 0 * 1000;
1877 	int high_temp = 255 * 1000;
1878 
1879 	if (low_temp < min_temp)
1880 		low_temp = min_temp;
1881 	if (high_temp > max_temp)
1882 		high_temp = max_temp;
1883 	if (high_temp < low_temp) {
1884 		DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1885 		return -EINVAL;
1886 	}
1887 
1888 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1889 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1890 	WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1891 
1892 	rdev->pm.dpm.thermal.min_temp = low_temp;
1893 	rdev->pm.dpm.thermal.max_temp = high_temp;
1894 
1895 	return 0;
1896 }
1897 
rv770_dpm_enable(struct radeon_device * rdev)1898 int rv770_dpm_enable(struct radeon_device *rdev)
1899 {
1900 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1901 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1902 	int ret;
1903 
1904 	if (pi->gfx_clock_gating)
1905 		rv770_restore_cgcg(rdev);
1906 
1907 	if (rv770_dpm_enabled(rdev))
1908 		return -EINVAL;
1909 
1910 	if (pi->voltage_control) {
1911 		rv770_enable_voltage_control(rdev, true);
1912 		ret = rv770_construct_vddc_table(rdev);
1913 		if (ret) {
1914 			DRM_ERROR("rv770_construct_vddc_table failed\n");
1915 			return ret;
1916 		}
1917 	}
1918 
1919 	if (pi->dcodt)
1920 		rv770_retrieve_odt_values(rdev);
1921 
1922 	if (pi->mvdd_control) {
1923 		ret = rv770_get_mvdd_configuration(rdev);
1924 		if (ret) {
1925 			DRM_ERROR("rv770_get_mvdd_configuration failed\n");
1926 			return ret;
1927 		}
1928 	}
1929 
1930 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1931 		rv770_enable_backbias(rdev, true);
1932 
1933 	rv770_enable_spread_spectrum(rdev, true);
1934 
1935 	if (pi->thermal_protection)
1936 		rv770_enable_thermal_protection(rdev, true);
1937 
1938 	rv770_program_mpll_timing_parameters(rdev);
1939 	rv770_setup_bsp(rdev);
1940 	rv770_program_git(rdev);
1941 	rv770_program_tp(rdev);
1942 	rv770_program_tpp(rdev);
1943 	rv770_program_sstp(rdev);
1944 	rv770_program_engine_speed_parameters(rdev);
1945 	rv770_enable_display_gap(rdev);
1946 	rv770_program_vc(rdev);
1947 
1948 	if (pi->dynamic_pcie_gen2)
1949 		rv770_enable_dynamic_pcie_gen2(rdev, true);
1950 
1951 	ret = rv770_upload_firmware(rdev);
1952 	if (ret) {
1953 		DRM_ERROR("rv770_upload_firmware failed\n");
1954 		return ret;
1955 	}
1956 	ret = rv770_init_smc_table(rdev, boot_ps);
1957 	if (ret) {
1958 		DRM_ERROR("rv770_init_smc_table failed\n");
1959 		return ret;
1960 	}
1961 
1962 	rv770_program_response_times(rdev);
1963 	r7xx_start_smc(rdev);
1964 
1965 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1966 		rv730_start_dpm(rdev);
1967 	else
1968 		rv770_start_dpm(rdev);
1969 
1970 	if (pi->gfx_clock_gating)
1971 		rv770_gfx_clock_gating_enable(rdev, true);
1972 
1973 	if (pi->mg_clock_gating)
1974 		rv770_mg_clock_gating_enable(rdev, true);
1975 
1976 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1977 
1978 	return 0;
1979 }
1980 
rv770_dpm_late_enable(struct radeon_device * rdev)1981 int rv770_dpm_late_enable(struct radeon_device *rdev)
1982 {
1983 	int ret;
1984 
1985 	if (rdev->irq.installed &&
1986 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1987 		PPSMC_Result result;
1988 
1989 		ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1990 		if (ret)
1991 			return ret;
1992 		rdev->irq.dpm_thermal = true;
1993 		radeon_irq_set(rdev);
1994 		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1995 
1996 		if (result != PPSMC_Result_OK)
1997 			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1998 	}
1999 
2000 	return 0;
2001 }
2002 
rv770_dpm_disable(struct radeon_device * rdev)2003 void rv770_dpm_disable(struct radeon_device *rdev)
2004 {
2005 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2006 
2007 	if (!rv770_dpm_enabled(rdev))
2008 		return;
2009 
2010 	rv770_clear_vc(rdev);
2011 
2012 	if (pi->thermal_protection)
2013 		rv770_enable_thermal_protection(rdev, false);
2014 
2015 	rv770_enable_spread_spectrum(rdev, false);
2016 
2017 	if (pi->dynamic_pcie_gen2)
2018 		rv770_enable_dynamic_pcie_gen2(rdev, false);
2019 
2020 	if (rdev->irq.installed &&
2021 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
2022 		rdev->irq.dpm_thermal = false;
2023 		radeon_irq_set(rdev);
2024 	}
2025 
2026 	if (pi->gfx_clock_gating)
2027 		rv770_gfx_clock_gating_enable(rdev, false);
2028 
2029 	if (pi->mg_clock_gating)
2030 		rv770_mg_clock_gating_enable(rdev, false);
2031 
2032 	if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2033 		rv730_stop_dpm(rdev);
2034 	else
2035 		rv770_stop_dpm(rdev);
2036 
2037 	r7xx_stop_smc(rdev);
2038 	rv770_reset_smio_status(rdev);
2039 }
2040 
rv770_dpm_set_power_state(struct radeon_device * rdev)2041 int rv770_dpm_set_power_state(struct radeon_device *rdev)
2042 {
2043 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2044 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2045 	struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
2046 	int ret;
2047 
2048 	ret = rv770_restrict_performance_levels_before_switch(rdev);
2049 	if (ret) {
2050 		DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
2051 		return ret;
2052 	}
2053 	rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
2054 	ret = rv770_halt_smc(rdev);
2055 	if (ret) {
2056 		DRM_ERROR("rv770_halt_smc failed\n");
2057 		return ret;
2058 	}
2059 	ret = rv770_upload_sw_state(rdev, new_ps);
2060 	if (ret) {
2061 		DRM_ERROR("rv770_upload_sw_state failed\n");
2062 		return ret;
2063 	}
2064 	r7xx_program_memory_timing_parameters(rdev, new_ps);
2065 	if (pi->dcodt)
2066 		rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
2067 	ret = rv770_resume_smc(rdev);
2068 	if (ret) {
2069 		DRM_ERROR("rv770_resume_smc failed\n");
2070 		return ret;
2071 	}
2072 	ret = rv770_set_sw_state(rdev);
2073 	if (ret) {
2074 		DRM_ERROR("rv770_set_sw_state failed\n");
2075 		return ret;
2076 	}
2077 	if (pi->dcodt)
2078 		rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2079 	rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2080 
2081 	return 0;
2082 }
2083 
2084 #if 0
2085 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2086 {
2087 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2088 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2089 
2090 	rv770_restrict_performance_levels_before_switch(rdev);
2091 	if (pi->dcodt)
2092 		rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2093 	rv770_set_boot_state(rdev);
2094 	if (pi->dcodt)
2095 		rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2096 }
2097 #endif
2098 
rv770_dpm_setup_asic(struct radeon_device * rdev)2099 void rv770_dpm_setup_asic(struct radeon_device *rdev)
2100 {
2101 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2102 
2103 	r7xx_read_clock_registers(rdev);
2104 	rv770_read_voltage_smio_registers(rdev);
2105 	rv770_get_memory_type(rdev);
2106 	if (pi->dcodt)
2107 		rv770_get_mclk_odt_threshold(rdev);
2108 	rv770_get_pcie_gen2_status(rdev);
2109 
2110 	rv770_enable_acpi_pm(rdev);
2111 
2112 	if (radeon_aspm != 0) {
2113 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2114 			rv770_enable_l0s(rdev);
2115 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2116 			rv770_enable_l1(rdev);
2117 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2118 			rv770_enable_pll_sleep_in_l1(rdev);
2119 	}
2120 }
2121 
rv770_dpm_display_configuration_changed(struct radeon_device * rdev)2122 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2123 {
2124 	rv770_program_display_gap(rdev);
2125 }
2126 
2127 union power_info {
2128 	struct _ATOM_POWERPLAY_INFO info;
2129 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
2130 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
2131 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2132 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2133 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2134 };
2135 
2136 union pplib_clock_info {
2137 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2138 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2139 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2140 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2141 };
2142 
2143 union pplib_power_state {
2144 	struct _ATOM_PPLIB_STATE v1;
2145 	struct _ATOM_PPLIB_STATE_V2 v2;
2146 };
2147 
rv7xx_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)2148 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2149 					     struct radeon_ps *rps,
2150 					     struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2151 					     u8 table_rev)
2152 {
2153 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2154 	rps->class = le16_to_cpu(non_clock_info->usClassification);
2155 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2156 
2157 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2158 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2159 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2160 	} else {
2161 		rps->vclk = 0;
2162 		rps->dclk = 0;
2163 	}
2164 
2165 	if (r600_is_uvd_state(rps->class, rps->class2)) {
2166 		if ((rps->vclk == 0) || (rps->dclk == 0)) {
2167 			rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2168 			rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2169 		}
2170 	}
2171 
2172 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2173 		rdev->pm.dpm.boot_ps = rps;
2174 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2175 		rdev->pm.dpm.uvd_ps = rps;
2176 }
2177 
rv7xx_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)2178 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2179 					 struct radeon_ps *rps, int index,
2180 					 union pplib_clock_info *clock_info)
2181 {
2182 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2183 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2184 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2185 	u32 sclk, mclk;
2186 	struct rv7xx_pl *pl;
2187 
2188 	switch (index) {
2189 	case 0:
2190 		pl = &ps->low;
2191 		break;
2192 	case 1:
2193 		pl = &ps->medium;
2194 		break;
2195 	case 2:
2196 	default:
2197 		pl = &ps->high;
2198 		break;
2199 	}
2200 
2201 	if (rdev->family >= CHIP_CEDAR) {
2202 		sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2203 		sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2204 		mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2205 		mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2206 
2207 		pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2208 		pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2209 		pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2210 	} else {
2211 		sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2212 		sclk |= clock_info->r600.ucEngineClockHigh << 16;
2213 		mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2214 		mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2215 
2216 		pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2217 		pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2218 	}
2219 
2220 	pl->mclk = mclk;
2221 	pl->sclk = sclk;
2222 
2223 	/* patch up vddc if necessary */
2224 	if (pl->vddc == 0xff01) {
2225 		if (pi->max_vddc)
2226 			pl->vddc = pi->max_vddc;
2227 	}
2228 
2229 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2230 		pi->acpi_vddc = pl->vddc;
2231 		if (rdev->family >= CHIP_CEDAR)
2232 			eg_pi->acpi_vddci = pl->vddci;
2233 		if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2234 			pi->acpi_pcie_gen2 = true;
2235 		else
2236 			pi->acpi_pcie_gen2 = false;
2237 	}
2238 
2239 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2240 		if (rdev->family >= CHIP_BARTS) {
2241 			eg_pi->ulv.supported = true;
2242 			eg_pi->ulv.pl = pl;
2243 		}
2244 	}
2245 
2246 	if (pi->min_vddc_in_table > pl->vddc)
2247 		pi->min_vddc_in_table = pl->vddc;
2248 
2249 	if (pi->max_vddc_in_table < pl->vddc)
2250 		pi->max_vddc_in_table = pl->vddc;
2251 
2252 	/* patch up boot state */
2253 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2254 		u16 vddc, vddci, mvdd;
2255 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
2256 		pl->mclk = rdev->clock.default_mclk;
2257 		pl->sclk = rdev->clock.default_sclk;
2258 		pl->vddc = vddc;
2259 		pl->vddci = vddci;
2260 	}
2261 
2262 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2263 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2264 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2265 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2266 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2267 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2268 	}
2269 }
2270 
rv7xx_parse_power_table(struct radeon_device * rdev)2271 int rv7xx_parse_power_table(struct radeon_device *rdev)
2272 {
2273 	struct radeon_mode_info *mode_info = &rdev->mode_info;
2274 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2275 	union pplib_power_state *power_state;
2276 	int i, j;
2277 	union pplib_clock_info *clock_info;
2278 	union power_info *power_info;
2279 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2280 	u16 data_offset;
2281 	u8 frev, crev;
2282 	struct rv7xx_ps *ps;
2283 
2284 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2285 				   &frev, &crev, &data_offset))
2286 		return -EINVAL;
2287 	power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
2288 
2289 	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
2290 				  power_info->pplib.ucNumStates, GFP_KERNEL);
2291 	if (!rdev->pm.dpm.ps)
2292 		return -ENOMEM;
2293 
2294 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2295 		power_state = (union pplib_power_state *)
2296 			((uint8_t*)mode_info->atom_context->bios + data_offset +
2297 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2298 			 i * power_info->pplib.ucStateEntrySize);
2299 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2300 			((uint8_t*)mode_info->atom_context->bios + data_offset +
2301 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2302 			 (power_state->v1.ucNonClockStateIndex *
2303 			  power_info->pplib.ucNonClockSize));
2304 		if (power_info->pplib.ucStateEntrySize - 1) {
2305 			u8 *idx;
2306 			ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2307 			if (ps == NULL) {
2308 				kfree(rdev->pm.dpm.ps);
2309 				return -ENOMEM;
2310 			}
2311 			rdev->pm.dpm.ps[i].ps_priv = ps;
2312 			rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2313 							 non_clock_info,
2314 							 power_info->pplib.ucNonClockSize);
2315 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
2316 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2317 				clock_info = (union pplib_clock_info *)
2318 					((uint8_t*)mode_info->atom_context->bios + data_offset +
2319 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2320 					 (idx[j] * power_info->pplib.ucClockInfoSize));
2321 				rv7xx_parse_pplib_clock_info(rdev,
2322 							     &rdev->pm.dpm.ps[i], j,
2323 							     clock_info);
2324 			}
2325 		}
2326 	}
2327 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2328 	return 0;
2329 }
2330 
rv770_get_engine_memory_ss(struct radeon_device * rdev)2331 void rv770_get_engine_memory_ss(struct radeon_device *rdev)
2332 {
2333 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2334 	struct radeon_atom_ss ss;
2335 
2336 	pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2337 						       ASIC_INTERNAL_ENGINE_SS, 0);
2338 	pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2339 						       ASIC_INTERNAL_MEMORY_SS, 0);
2340 
2341 	if (pi->sclk_ss || pi->mclk_ss)
2342 		pi->dynamic_ss = true;
2343 	else
2344 		pi->dynamic_ss = false;
2345 }
2346 
rv770_dpm_init(struct radeon_device * rdev)2347 int rv770_dpm_init(struct radeon_device *rdev)
2348 {
2349 	struct rv7xx_power_info *pi;
2350 	struct atom_clock_dividers dividers;
2351 	int ret;
2352 
2353 	pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2354 	if (pi == NULL)
2355 		return -ENOMEM;
2356 	rdev->pm.dpm.priv = pi;
2357 
2358 	rv770_get_max_vddc(rdev);
2359 
2360 	pi->acpi_vddc = 0;
2361 	pi->min_vddc_in_table = 0;
2362 	pi->max_vddc_in_table = 0;
2363 
2364 	ret = r600_get_platform_caps(rdev);
2365 	if (ret)
2366 		return ret;
2367 
2368 	ret = rv7xx_parse_power_table(rdev);
2369 	if (ret)
2370 		return ret;
2371 
2372 	if (rdev->pm.dpm.voltage_response_time == 0)
2373 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2374 	if (rdev->pm.dpm.backbias_response_time == 0)
2375 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2376 
2377 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2378 					     0, false, &dividers);
2379 	if (ret)
2380 		pi->ref_div = dividers.ref_div + 1;
2381 	else
2382 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2383 
2384 	pi->mclk_strobe_mode_threshold = 30000;
2385 	pi->mclk_edc_enable_threshold = 30000;
2386 
2387 	pi->rlp = RV770_RLP_DFLT;
2388 	pi->rmp = RV770_RMP_DFLT;
2389 	pi->lhp = RV770_LHP_DFLT;
2390 	pi->lmp = RV770_LMP_DFLT;
2391 
2392 	pi->voltage_control =
2393 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2394 
2395 	pi->mvdd_control =
2396 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2397 
2398 	rv770_get_engine_memory_ss(rdev);
2399 
2400 	pi->asi = RV770_ASI_DFLT;
2401 	pi->pasi = RV770_HASI_DFLT;
2402 	pi->vrc = RV770_VRC_DFLT;
2403 
2404 	pi->power_gating = false;
2405 
2406 	pi->gfx_clock_gating = true;
2407 
2408 	pi->mg_clock_gating = true;
2409 	pi->mgcgtssm = true;
2410 
2411 	pi->dynamic_pcie_gen2 = true;
2412 
2413 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2414 		pi->thermal_protection = true;
2415 	else
2416 		pi->thermal_protection = false;
2417 
2418 	pi->display_gap = true;
2419 
2420 	if (rdev->flags & RADEON_IS_MOBILITY)
2421 		pi->dcodt = true;
2422 	else
2423 		pi->dcodt = false;
2424 
2425 	pi->ulps = true;
2426 
2427 	pi->mclk_stutter_mode_threshold = 0;
2428 
2429 	pi->sram_end = SMC_RAM_END;
2430 	pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2431 	pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2432 
2433 	return 0;
2434 }
2435 
rv770_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)2436 void rv770_dpm_print_power_state(struct radeon_device *rdev,
2437 				 struct radeon_ps *rps)
2438 {
2439 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2440 	struct rv7xx_pl *pl;
2441 
2442 	r600_dpm_print_class_info(rps->class, rps->class2);
2443 	r600_dpm_print_cap_info(rps->caps);
2444 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2445 	if (rdev->family >= CHIP_CEDAR) {
2446 		pl = &ps->low;
2447 		printk("\t\tpower level 0    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2448 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2449 		pl = &ps->medium;
2450 		printk("\t\tpower level 1    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2451 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2452 		pl = &ps->high;
2453 		printk("\t\tpower level 2    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2454 		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2455 	} else {
2456 		pl = &ps->low;
2457 		printk("\t\tpower level 0    sclk: %u mclk: %u vddc: %u\n",
2458 		       pl->sclk, pl->mclk, pl->vddc);
2459 		pl = &ps->medium;
2460 		printk("\t\tpower level 1    sclk: %u mclk: %u vddc: %u\n",
2461 		       pl->sclk, pl->mclk, pl->vddc);
2462 		pl = &ps->high;
2463 		printk("\t\tpower level 2    sclk: %u mclk: %u vddc: %u\n",
2464 		       pl->sclk, pl->mclk, pl->vddc);
2465 	}
2466 	r600_dpm_print_ps_status(rdev, rps);
2467 }
2468 
rv770_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)2469 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
2470 						       struct seq_file *m)
2471 {
2472 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2473 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2474 	struct rv7xx_pl *pl;
2475 	u32 current_index =
2476 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2477 		CURRENT_PROFILE_INDEX_SHIFT;
2478 
2479 	if (current_index > 2) {
2480 		seq_printf(m, "invalid dpm profile %d\n", current_index);
2481 	} else {
2482 		if (current_index == 0)
2483 			pl = &ps->low;
2484 		else if (current_index == 1)
2485 			pl = &ps->medium;
2486 		else /* current_index == 2 */
2487 			pl = &ps->high;
2488 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2489 		if (rdev->family >= CHIP_CEDAR) {
2490 			seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
2491 				   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2492 		} else {
2493 			seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u\n",
2494 				   current_index, pl->sclk, pl->mclk, pl->vddc);
2495 		}
2496 	}
2497 }
2498 
rv770_dpm_get_current_sclk(struct radeon_device * rdev)2499 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev)
2500 {
2501 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2502 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2503 	struct rv7xx_pl *pl;
2504 	u32 current_index =
2505 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2506 		CURRENT_PROFILE_INDEX_SHIFT;
2507 
2508 	if (current_index > 2) {
2509 		return 0;
2510 	} else {
2511 		if (current_index == 0)
2512 			pl = &ps->low;
2513 		else if (current_index == 1)
2514 			pl = &ps->medium;
2515 		else /* current_index == 2 */
2516 			pl = &ps->high;
2517 		return  pl->sclk;
2518 	}
2519 }
2520 
rv770_dpm_get_current_mclk(struct radeon_device * rdev)2521 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev)
2522 {
2523 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2524 	struct rv7xx_ps *ps = rv770_get_ps(rps);
2525 	struct rv7xx_pl *pl;
2526 	u32 current_index =
2527 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2528 		CURRENT_PROFILE_INDEX_SHIFT;
2529 
2530 	if (current_index > 2) {
2531 		return 0;
2532 	} else {
2533 		if (current_index == 0)
2534 			pl = &ps->low;
2535 		else if (current_index == 1)
2536 			pl = &ps->medium;
2537 		else /* current_index == 2 */
2538 			pl = &ps->high;
2539 		return  pl->mclk;
2540 	}
2541 }
2542 
rv770_dpm_fini(struct radeon_device * rdev)2543 void rv770_dpm_fini(struct radeon_device *rdev)
2544 {
2545 	int i;
2546 
2547 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2548 		kfree(rdev->pm.dpm.ps[i].ps_priv);
2549 	}
2550 	kfree(rdev->pm.dpm.ps);
2551 	kfree(rdev->pm.dpm.priv);
2552 }
2553 
rv770_dpm_get_sclk(struct radeon_device * rdev,bool low)2554 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2555 {
2556 	struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2557 
2558 	if (low)
2559 		return requested_state->low.sclk;
2560 	else
2561 		return requested_state->high.sclk;
2562 }
2563 
rv770_dpm_get_mclk(struct radeon_device * rdev,bool low)2564 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2565 {
2566 	struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2567 
2568 	if (low)
2569 		return requested_state->low.mclk;
2570 	else
2571 		return requested_state->high.mclk;
2572 }
2573 
rv770_dpm_vblank_too_short(struct radeon_device * rdev)2574 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev)
2575 {
2576 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2577 	u32 switch_limit = 200; /* 300 */
2578 
2579 	/* RV770 */
2580 	/* mclk switching doesn't seem to work reliably on desktop RV770s */
2581 	if ((rdev->family == CHIP_RV770) &&
2582 	    !(rdev->flags & RADEON_IS_MOBILITY))
2583 		switch_limit = 0xffffffff; /* disable mclk switching */
2584 
2585 	if (vblank_time < switch_limit)
2586 		return true;
2587 	else
2588 		return false;
2589 
2590 }
2591