xref: /dragonfly/sys/dev/drm/radeon/cypress_dpm.c (revision d78d3a22)
1 /*
2  * Copyright 2011 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "evergreend.h"
28 #include "r600_dpm.h"
29 #include "cypress_dpm.h"
30 #include "atom.h"
31 #include "radeon_asic.h"
32 
33 #define SMC_RAM_END 0x8000
34 
35 #define MC_CG_ARB_FREQ_F0           0x0a
36 #define MC_CG_ARB_FREQ_F1           0x0b
37 #define MC_CG_ARB_FREQ_F2           0x0c
38 #define MC_CG_ARB_FREQ_F3           0x0d
39 
40 #define MC_CG_SEQ_DRAMCONF_S0       0x05
41 #define MC_CG_SEQ_DRAMCONF_S1       0x06
42 #define MC_CG_SEQ_YCLK_SUSPEND      0x04
43 #define MC_CG_SEQ_YCLK_RESUME       0x0a
44 
45 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps);
46 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
47 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
48 
cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)49 static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
50 						 bool enable)
51 {
52 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
53 	u32 tmp, bif;
54 
55 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
56 	if (enable) {
57 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
58 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
59 			if (!pi->boot_in_gen2) {
60 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
61 				bif |= CG_CLIENT_REQ(0xd);
62 				WREG32(CG_BIF_REQ_AND_RSP, bif);
63 
64 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
65 				tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
66 				tmp |= LC_GEN2_EN_STRAP;
67 
68 				tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
69 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
70 				udelay(10);
71 				tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
72 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
73 			}
74 		}
75 	} else {
76 		if (!pi->boot_in_gen2) {
77 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
78 			tmp &= ~LC_GEN2_EN_STRAP;
79 		}
80 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
81 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
82 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
83 	}
84 }
85 
cypress_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)86 static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
87 					     bool enable)
88 {
89 	cypress_enable_bif_dynamic_pcie_gen2(rdev, enable);
90 
91 	if (enable)
92 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
93 	else
94 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
95 }
96 
97 #if 0
98 static int cypress_enter_ulp_state(struct radeon_device *rdev)
99 {
100 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
101 
102 	if (pi->gfx_clock_gating) {
103 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
104 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
105 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
106 
107 		RREG32(GB_ADDR_CONFIG);
108 	}
109 
110 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
111 		 ~HOST_SMC_MSG_MASK);
112 
113 	udelay(7000);
114 
115 	return 0;
116 }
117 #endif
118 
cypress_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)119 static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev,
120 					    bool enable)
121 {
122 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
123 
124 	if (enable) {
125 		if (eg_pi->light_sleep) {
126 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
127 
128 			WREG32_CG(CG_CGLS_TILE_0, 0xFFFFFFFF);
129 			WREG32_CG(CG_CGLS_TILE_1, 0xFFFFFFFF);
130 			WREG32_CG(CG_CGLS_TILE_2, 0xFFFFFFFF);
131 			WREG32_CG(CG_CGLS_TILE_3, 0xFFFFFFFF);
132 			WREG32_CG(CG_CGLS_TILE_4, 0xFFFFFFFF);
133 			WREG32_CG(CG_CGLS_TILE_5, 0xFFFFFFFF);
134 			WREG32_CG(CG_CGLS_TILE_6, 0xFFFFFFFF);
135 			WREG32_CG(CG_CGLS_TILE_7, 0xFFFFFFFF);
136 			WREG32_CG(CG_CGLS_TILE_8, 0xFFFFFFFF);
137 			WREG32_CG(CG_CGLS_TILE_9, 0xFFFFFFFF);
138 			WREG32_CG(CG_CGLS_TILE_10, 0xFFFFFFFF);
139 			WREG32_CG(CG_CGLS_TILE_11, 0xFFFFFFFF);
140 
141 			WREG32_P(SCLK_PWRMGT_CNTL, DYN_LIGHT_SLEEP_EN, ~DYN_LIGHT_SLEEP_EN);
142 		}
143 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
144 	} else {
145 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
146 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
147 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
148 		RREG32(GB_ADDR_CONFIG);
149 
150 		if (eg_pi->light_sleep) {
151 			WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_LIGHT_SLEEP_EN);
152 
153 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
154 
155 			WREG32_CG(CG_CGLS_TILE_0, 0);
156 			WREG32_CG(CG_CGLS_TILE_1, 0);
157 			WREG32_CG(CG_CGLS_TILE_2, 0);
158 			WREG32_CG(CG_CGLS_TILE_3, 0);
159 			WREG32_CG(CG_CGLS_TILE_4, 0);
160 			WREG32_CG(CG_CGLS_TILE_5, 0);
161 			WREG32_CG(CG_CGLS_TILE_6, 0);
162 			WREG32_CG(CG_CGLS_TILE_7, 0);
163 			WREG32_CG(CG_CGLS_TILE_8, 0);
164 			WREG32_CG(CG_CGLS_TILE_9, 0);
165 			WREG32_CG(CG_CGLS_TILE_10, 0);
166 			WREG32_CG(CG_CGLS_TILE_11, 0);
167 		}
168 	}
169 }
170 
cypress_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)171 static void cypress_mg_clock_gating_enable(struct radeon_device *rdev,
172 					   bool enable)
173 {
174 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
175 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
176 
177 	if (enable) {
178 		u32 cgts_sm_ctrl_reg;
179 
180 		if (rdev->family == CHIP_CEDAR)
181 			cgts_sm_ctrl_reg = CEDAR_MGCGCGTSSMCTRL_DFLT;
182 		else if (rdev->family == CHIP_REDWOOD)
183 			cgts_sm_ctrl_reg = REDWOOD_MGCGCGTSSMCTRL_DFLT;
184 		else
185 			cgts_sm_ctrl_reg = CYPRESS_MGCGCGTSSMCTRL_DFLT;
186 
187 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
188 
189 		WREG32_CG(CG_CGTT_LOCAL_0, CYPRESS_MGCGTTLOCAL0_DFLT);
190 		WREG32_CG(CG_CGTT_LOCAL_1, CYPRESS_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF);
191 		WREG32_CG(CG_CGTT_LOCAL_2, CYPRESS_MGCGTTLOCAL2_DFLT);
192 		WREG32_CG(CG_CGTT_LOCAL_3, CYPRESS_MGCGTTLOCAL3_DFLT);
193 
194 		if (pi->mgcgtssm)
195 			WREG32(CGTS_SM_CTRL_REG, cgts_sm_ctrl_reg);
196 
197 		if (eg_pi->mcls) {
198 			WREG32_P(MC_CITF_MISC_RD_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
199 			WREG32_P(MC_CITF_MISC_WR_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
200 			WREG32_P(MC_CITF_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
201 			WREG32_P(MC_HUB_MISC_HUB_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
202 			WREG32_P(MC_HUB_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
203 			WREG32_P(MC_HUB_MISC_SIP_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
204 			WREG32_P(MC_XPB_CLK_GAT, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
205 			WREG32_P(VM_L2_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
206 		}
207 	} else {
208 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
209 
210 		WREG32_CG(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
211 		WREG32_CG(CG_CGTT_LOCAL_1, 0xFFFFFFFF);
212 		WREG32_CG(CG_CGTT_LOCAL_2, 0xFFFFFFFF);
213 		WREG32_CG(CG_CGTT_LOCAL_3, 0xFFFFFFFF);
214 
215 		if (pi->mgcgtssm)
216 			WREG32(CGTS_SM_CTRL_REG, 0x81f44bc0);
217 	}
218 }
219 
cypress_enable_spread_spectrum(struct radeon_device * rdev,bool enable)220 void cypress_enable_spread_spectrum(struct radeon_device *rdev,
221 				    bool enable)
222 {
223 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
224 
225 	if (enable) {
226 		if (pi->sclk_ss)
227 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
228 
229 		if (pi->mclk_ss)
230 			WREG32_P(MPLL_CNTL_MODE, SS_SSEN, ~SS_SSEN);
231 	} else {
232 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
233 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
234 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_SSEN);
235 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_DSMODE_EN);
236 	}
237 }
238 
cypress_start_dpm(struct radeon_device * rdev)239 void cypress_start_dpm(struct radeon_device *rdev)
240 {
241 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
242 }
243 
cypress_enable_sclk_control(struct radeon_device * rdev,bool enable)244 void cypress_enable_sclk_control(struct radeon_device *rdev,
245 				 bool enable)
246 {
247 	if (enable)
248 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
249 	else
250 		WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
251 }
252 
cypress_enable_mclk_control(struct radeon_device * rdev,bool enable)253 void cypress_enable_mclk_control(struct radeon_device *rdev,
254 				 bool enable)
255 {
256 	if (enable)
257 		WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
258 	else
259 		WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
260 }
261 
cypress_notify_smc_display_change(struct radeon_device * rdev,bool has_display)262 int cypress_notify_smc_display_change(struct radeon_device *rdev,
263 				      bool has_display)
264 {
265 	PPSMC_Msg msg = has_display ?
266 		(PPSMC_Msg)PPSMC_MSG_HasDisplay : (PPSMC_Msg)PPSMC_MSG_NoDisplay;
267 
268 	if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
269 		return -EINVAL;
270 
271 	return 0;
272 }
273 
cypress_program_response_times(struct radeon_device * rdev)274 void cypress_program_response_times(struct radeon_device *rdev)
275 {
276 	u32 reference_clock;
277 	u32 mclk_switch_limit;
278 
279 	reference_clock = radeon_get_xclk(rdev);
280 	mclk_switch_limit = (460 * reference_clock) / 100;
281 
282 	rv770_write_smc_soft_register(rdev,
283 				      RV770_SMC_SOFT_REGISTER_mclk_switch_lim,
284 				      mclk_switch_limit);
285 
286 	rv770_write_smc_soft_register(rdev,
287 				      RV770_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
288 
289 	rv770_write_smc_soft_register(rdev,
290 				      RV770_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
291 
292 	rv770_program_response_times(rdev);
293 
294 	if (ASIC_IS_LOMBOK(rdev))
295 		rv770_write_smc_soft_register(rdev,
296 					      RV770_SMC_SOFT_REGISTER_is_asic_lombok, 1);
297 
298 }
299 
cypress_pcie_performance_request(struct radeon_device * rdev,u8 perf_req,bool advertise)300 static int cypress_pcie_performance_request(struct radeon_device *rdev,
301 					    u8 perf_req, bool advertise)
302 {
303 #if defined(CONFIG_ACPI)
304 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
305 #endif
306 	u32 tmp;
307 
308 	udelay(10);
309 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
310 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) && (tmp & LC_CURRENT_DATA_RATE))
311 		return 0;
312 
313 #if defined(CONFIG_ACPI)
314 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
315 	    (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
316 		eg_pi->pcie_performance_request_registered = true;
317 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
318 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
319 		   eg_pi->pcie_performance_request_registered) {
320 		eg_pi->pcie_performance_request_registered = false;
321 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
322 	}
323 #endif
324 
325 	return 0;
326 }
327 
cypress_advertise_gen2_capability(struct radeon_device * rdev)328 void cypress_advertise_gen2_capability(struct radeon_device *rdev)
329 {
330 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
331 	u32 tmp;
332 
333 #if defined(CONFIG_ACPI)
334 	radeon_acpi_pcie_notify_device_ready(rdev);
335 #endif
336 
337 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
338 
339 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
340 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
341 		pi->pcie_gen2 = true;
342 	else
343 		pi->pcie_gen2 = false;
344 
345 	if (!pi->pcie_gen2)
346 		cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
347 
348 }
349 
cypress_get_maximum_link_speed(struct radeon_ps * radeon_state)350 static enum radeon_pcie_gen cypress_get_maximum_link_speed(struct radeon_ps *radeon_state)
351 {
352 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
353 
354 	if (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
355 		return 1;
356 	return 0;
357 }
358 
cypress_notify_link_speed_change_after_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)359 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
360 							 struct radeon_ps *radeon_new_state,
361 							 struct radeon_ps *radeon_current_state)
362 {
363 	enum radeon_pcie_gen pcie_link_speed_target =
364 		cypress_get_maximum_link_speed(radeon_new_state);
365 	enum radeon_pcie_gen pcie_link_speed_current =
366 		cypress_get_maximum_link_speed(radeon_current_state);
367 	u8 request;
368 
369 	if (pcie_link_speed_target < pcie_link_speed_current) {
370 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
371 			request = PCIE_PERF_REQ_PECI_GEN1;
372 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
373 			request = PCIE_PERF_REQ_PECI_GEN2;
374 		else
375 			request = PCIE_PERF_REQ_PECI_GEN3;
376 
377 		cypress_pcie_performance_request(rdev, request, false);
378 	}
379 }
380 
cypress_notify_link_speed_change_before_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)381 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev,
382 							  struct radeon_ps *radeon_new_state,
383 							  struct radeon_ps *radeon_current_state)
384 {
385 	enum radeon_pcie_gen pcie_link_speed_target =
386 		cypress_get_maximum_link_speed(radeon_new_state);
387 	enum radeon_pcie_gen pcie_link_speed_current =
388 		cypress_get_maximum_link_speed(radeon_current_state);
389 	u8 request;
390 
391 	if (pcie_link_speed_target > pcie_link_speed_current) {
392 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
393 			request = PCIE_PERF_REQ_PECI_GEN1;
394 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
395 			request = PCIE_PERF_REQ_PECI_GEN2;
396 		else
397 			request = PCIE_PERF_REQ_PECI_GEN3;
398 
399 		cypress_pcie_performance_request(rdev, request, false);
400 	}
401 }
402 
cypress_populate_voltage_value(struct radeon_device * rdev,struct atom_voltage_table * table,u16 value,RV770_SMC_VOLTAGE_VALUE * voltage)403 static int cypress_populate_voltage_value(struct radeon_device *rdev,
404 					  struct atom_voltage_table *table,
405 					  u16 value, RV770_SMC_VOLTAGE_VALUE *voltage)
406 {
407 	unsigned int i;
408 
409 	for (i = 0; i < table->count; i++) {
410 		if (value <= table->entries[i].value) {
411 			voltage->index = (u8)i;
412 			voltage->value = cpu_to_be16(table->entries[i].value);
413 			break;
414 		}
415 	}
416 
417 	if (i == table->count)
418 		return -EINVAL;
419 
420 	return 0;
421 }
422 
cypress_get_strobe_mode_settings(struct radeon_device * rdev,u32 mclk)423 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk)
424 {
425 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
426 	u8 result = 0;
427 	bool strobe_mode = false;
428 
429 	if (pi->mem_gddr5) {
430 		if (mclk <= pi->mclk_strobe_mode_threshold)
431 			strobe_mode = true;
432 		result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode);
433 
434 		if (strobe_mode)
435 			result |= SMC_STROBE_ENABLE;
436 	}
437 
438 	return result;
439 }
440 
cypress_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)441 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
442 {
443 	u32 ref_clk = rdev->clock.mpll.reference_freq;
444 	u32 vco = clkf * ref_clk;
445 
446 	/* 100 Mhz ref clk */
447 	if (ref_clk == 10000) {
448 		if (vco > 500000)
449 			return 0xC6;
450 		if (vco > 400000)
451 			return 0x9D;
452 		if (vco > 330000)
453 			return 0x6C;
454 		if (vco > 250000)
455 			return 0x2B;
456 		if (vco >  160000)
457 			return 0x5B;
458 		if (vco > 120000)
459 			return 0x0A;
460 		return 0x4B;
461 	}
462 
463 	/* 27 Mhz ref clk */
464 	if (vco > 250000)
465 		return 0x8B;
466 	if (vco > 200000)
467 		return 0xCC;
468 	if (vco > 150000)
469 		return 0x9B;
470 	return 0x6B;
471 }
472 
cypress_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk,bool strobe_mode,bool dll_state_on)473 static int cypress_populate_mclk_value(struct radeon_device *rdev,
474 				       u32 engine_clock, u32 memory_clock,
475 				       RV7XX_SMC_MCLK_VALUE *mclk,
476 				       bool strobe_mode, bool dll_state_on)
477 {
478 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
479 
480 	u32 mpll_ad_func_cntl =
481 		pi->clk_regs.rv770.mpll_ad_func_cntl;
482 	u32 mpll_ad_func_cntl_2 =
483 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
484 	u32 mpll_dq_func_cntl =
485 		pi->clk_regs.rv770.mpll_dq_func_cntl;
486 	u32 mpll_dq_func_cntl_2 =
487 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
488 	u32 mclk_pwrmgt_cntl =
489 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
490 	u32 dll_cntl =
491 		pi->clk_regs.rv770.dll_cntl;
492 	u32 mpll_ss1 = pi->clk_regs.rv770.mpll_ss1;
493 	u32 mpll_ss2 = pi->clk_regs.rv770.mpll_ss2;
494 	struct atom_clock_dividers dividers;
495 	u32 ibias;
496 	u32 dll_speed;
497 	int ret;
498 	u32 mc_seq_misc7;
499 
500 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
501 					     memory_clock, strobe_mode, &dividers);
502 	if (ret)
503 		return ret;
504 
505 	if (!strobe_mode) {
506 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
507 
508 		if(mc_seq_misc7 & 0x8000000)
509 			dividers.post_div = 1;
510 	}
511 
512 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
513 
514 	mpll_ad_func_cntl &= ~(CLKR_MASK |
515 			       YCLK_POST_DIV_MASK |
516 			       CLKF_MASK |
517 			       CLKFRAC_MASK |
518 			       IBIAS_MASK);
519 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
520 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
521 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
522 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
523 	mpll_ad_func_cntl |= IBIAS(ibias);
524 
525 	if (dividers.vco_mode)
526 		mpll_ad_func_cntl_2 |= VCO_MODE;
527 	else
528 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
529 
530 	if (pi->mem_gddr5) {
531 		mpll_dq_func_cntl &= ~(CLKR_MASK |
532 				       YCLK_POST_DIV_MASK |
533 				       CLKF_MASK |
534 				       CLKFRAC_MASK |
535 				       IBIAS_MASK);
536 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
537 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
538 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
539 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
540 		mpll_dq_func_cntl |= IBIAS(ibias);
541 
542 		if (strobe_mode)
543 			mpll_dq_func_cntl &= ~PDNB;
544 		else
545 			mpll_dq_func_cntl |= PDNB;
546 
547 		if (dividers.vco_mode)
548 			mpll_dq_func_cntl_2 |= VCO_MODE;
549 		else
550 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
551 	}
552 
553 	if (pi->mclk_ss) {
554 		struct radeon_atom_ss ss;
555 		u32 vco_freq = memory_clock * dividers.post_div;
556 
557 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
558 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
559 			u32 reference_clock = rdev->clock.mpll.reference_freq;
560 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
561 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
562 			u32 clk_v = ss.percentage *
563 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
564 
565 			mpll_ss1 &= ~CLKV_MASK;
566 			mpll_ss1 |= CLKV(clk_v);
567 
568 			mpll_ss2 &= ~CLKS_MASK;
569 			mpll_ss2 |= CLKS(clk_s);
570 		}
571 	}
572 
573 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
574 					memory_clock);
575 
576 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
577 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
578 	if (dll_state_on)
579 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
580 				     MRDCKA1_PDNB |
581 				     MRDCKB0_PDNB |
582 				     MRDCKB1_PDNB |
583 				     MRDCKC0_PDNB |
584 				     MRDCKC1_PDNB |
585 				     MRDCKD0_PDNB |
586 				     MRDCKD1_PDNB);
587 	else
588 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
589 				      MRDCKA1_PDNB |
590 				      MRDCKB0_PDNB |
591 				      MRDCKB1_PDNB |
592 				      MRDCKC0_PDNB |
593 				      MRDCKC1_PDNB |
594 				      MRDCKD0_PDNB |
595 				      MRDCKD1_PDNB);
596 
597 	mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
598 	mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
599 	mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
600 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
601 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
602 	mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
603 	mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
604 	mclk->mclk770.vMPLL_SS = cpu_to_be32(mpll_ss1);
605 	mclk->mclk770.vMPLL_SS2 = cpu_to_be32(mpll_ss2);
606 
607 	return 0;
608 }
609 
cypress_get_mclk_frequency_ratio(struct radeon_device * rdev,u32 memory_clock,bool strobe_mode)610 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev,
611 				    u32 memory_clock, bool strobe_mode)
612 {
613 	u8 mc_para_index;
614 
615 	if (rdev->family >= CHIP_BARTS) {
616 		if (strobe_mode) {
617 			if (memory_clock < 10000)
618 				mc_para_index = 0x00;
619 			else if (memory_clock > 47500)
620 				mc_para_index = 0x0f;
621 			else
622 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
623 		} else {
624 			if (memory_clock < 65000)
625 				mc_para_index = 0x00;
626 			else if (memory_clock > 135000)
627 				mc_para_index = 0x0f;
628 			else
629 				mc_para_index = (u8)((memory_clock - 60000) / 5000);
630 		}
631 	} else {
632 		if (strobe_mode) {
633 			if (memory_clock < 10000)
634 				mc_para_index = 0x00;
635 			else if (memory_clock > 47500)
636 				mc_para_index = 0x0f;
637 			else
638 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
639 		} else {
640 			if (memory_clock < 40000)
641 				mc_para_index = 0x00;
642 			else if (memory_clock > 115000)
643 				mc_para_index = 0x0f;
644 			else
645 				mc_para_index = (u8)((memory_clock - 40000) / 5000);
646 		}
647 	}
648 	return mc_para_index;
649 }
650 
cypress_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)651 static int cypress_populate_mvdd_value(struct radeon_device *rdev,
652 				       u32 mclk,
653 				       RV770_SMC_VOLTAGE_VALUE *voltage)
654 {
655 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
656 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
657 
658 	if (!pi->mvdd_control) {
659 		voltage->index = eg_pi->mvdd_high_index;
660 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
661 		return 0;
662 	}
663 
664 	if (mclk <= pi->mvdd_split_frequency) {
665 		voltage->index = eg_pi->mvdd_low_index;
666 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
667 	} else {
668 		voltage->index = eg_pi->mvdd_high_index;
669 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
670 	}
671 
672 	return 0;
673 }
674 
cypress_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)675 int cypress_convert_power_level_to_smc(struct radeon_device *rdev,
676 				       struct rv7xx_pl *pl,
677 				       RV770_SMC_HW_PERFORMANCE_LEVEL *level,
678 				       u8 watermark_level)
679 {
680 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
681 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
682 	int ret;
683 	bool dll_state_on;
684 
685 	level->gen2PCIE = pi->pcie_gen2 ?
686 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
687 	level->gen2XSP  = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
688 	level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
689 	level->displayWatermark = watermark_level;
690 
691 	ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk);
692 	if (ret)
693 		return ret;
694 
695 	level->mcFlags =  0;
696 	if (pi->mclk_stutter_mode_threshold &&
697 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
698 	    !eg_pi->uvd_enabled) {
699 		level->mcFlags |= SMC_MC_STUTTER_EN;
700 		if (eg_pi->sclk_deep_sleep)
701 			level->stateFlags |= PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
702 		else
703 			level->stateFlags &= ~PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
704 	}
705 
706 	if (pi->mem_gddr5) {
707 		if (pl->mclk > pi->mclk_edc_enable_threshold)
708 			level->mcFlags |= SMC_MC_EDC_RD_FLAG;
709 
710 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
711 			level->mcFlags |= SMC_MC_EDC_WR_FLAG;
712 
713 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
714 
715 		if (level->strobeMode & SMC_STROBE_ENABLE) {
716 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
717 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
718 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
719 			else
720 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
721 		} else
722 			dll_state_on = eg_pi->dll_default_on;
723 
724 		ret = cypress_populate_mclk_value(rdev,
725 						  pl->sclk,
726 						  pl->mclk,
727 						  &level->mclk,
728 						  (level->strobeMode & SMC_STROBE_ENABLE) != 0,
729 						  dll_state_on);
730 	} else {
731 		ret = cypress_populate_mclk_value(rdev,
732 						  pl->sclk,
733 						  pl->mclk,
734 						  &level->mclk,
735 						  true,
736 						  true);
737 	}
738 	if (ret)
739 		return ret;
740 
741 	ret = cypress_populate_voltage_value(rdev,
742 					     &eg_pi->vddc_voltage_table,
743 					     pl->vddc,
744 					     &level->vddc);
745 	if (ret)
746 		return ret;
747 
748 	if (eg_pi->vddci_control) {
749 		ret = cypress_populate_voltage_value(rdev,
750 						     &eg_pi->vddci_voltage_table,
751 						     pl->vddci,
752 						     &level->vddci);
753 		if (ret)
754 			return ret;
755 	}
756 
757 	ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
758 
759 	return ret;
760 }
761 
cypress_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)762 static int cypress_convert_power_state_to_smc(struct radeon_device *rdev,
763 					      struct radeon_ps *radeon_state,
764 					      RV770_SMC_SWSTATE *smc_state)
765 {
766 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
767 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
768 	int ret;
769 
770 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
771 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
772 
773 	ret = cypress_convert_power_level_to_smc(rdev,
774 						 &state->low,
775 						 &smc_state->levels[0],
776 						 PPSMC_DISPLAY_WATERMARK_LOW);
777 	if (ret)
778 		return ret;
779 
780 	ret = cypress_convert_power_level_to_smc(rdev,
781 						 &state->medium,
782 						 &smc_state->levels[1],
783 						 PPSMC_DISPLAY_WATERMARK_LOW);
784 	if (ret)
785 		return ret;
786 
787 	ret = cypress_convert_power_level_to_smc(rdev,
788 						 &state->high,
789 						 &smc_state->levels[2],
790 						 PPSMC_DISPLAY_WATERMARK_HIGH);
791 	if (ret)
792 		return ret;
793 
794 	smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
795 	smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
796 	smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
797 
798 	if (eg_pi->dynamic_ac_timing) {
799 		smc_state->levels[0].ACIndex = 2;
800 		smc_state->levels[1].ACIndex = 3;
801 		smc_state->levels[2].ACIndex = 4;
802 	} else {
803 		smc_state->levels[0].ACIndex = 0;
804 		smc_state->levels[1].ACIndex = 0;
805 		smc_state->levels[2].ACIndex = 0;
806 	}
807 
808 	rv770_populate_smc_sp(rdev, radeon_state, smc_state);
809 
810 	return rv770_populate_smc_t(rdev, radeon_state, smc_state);
811 }
812 
cypress_convert_mc_registers(struct evergreen_mc_reg_entry * entry,SMC_Evergreen_MCRegisterSet * data,u32 num_entries,u32 valid_flag)813 static void cypress_convert_mc_registers(struct evergreen_mc_reg_entry *entry,
814 					 SMC_Evergreen_MCRegisterSet *data,
815 					 u32 num_entries, u32 valid_flag)
816 {
817 	u32 i, j;
818 
819 	for (i = 0, j = 0; j < num_entries; j++) {
820 		if (valid_flag & (1 << j)) {
821 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
822 			i++;
823 		}
824 	}
825 }
826 
cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_Evergreen_MCRegisterSet * mc_reg_table_data)827 static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
828 						      struct rv7xx_pl *pl,
829 						      SMC_Evergreen_MCRegisterSet *mc_reg_table_data)
830 {
831 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
832 	u32 i = 0;
833 
834 	for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) {
835 		if (pl->mclk <=
836 		    eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
837 			break;
838 	}
839 
840 	if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0))
841 		--i;
842 
843 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[i],
844 				     mc_reg_table_data,
845 				     eg_pi->mc_reg_table.last,
846 				     eg_pi->mc_reg_table.valid_flag);
847 }
848 
cypress_convert_mc_reg_table_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SMC_Evergreen_MCRegisters * mc_reg_table)849 static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
850 						struct radeon_ps *radeon_state,
851 						SMC_Evergreen_MCRegisters *mc_reg_table)
852 {
853 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
854 
855 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
856 						  &state->low,
857 						  &mc_reg_table->data[2]);
858 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
859 						  &state->medium,
860 						  &mc_reg_table->data[3]);
861 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
862 						  &state->high,
863 						  &mc_reg_table->data[4]);
864 }
865 
cypress_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)866 int cypress_upload_sw_state(struct radeon_device *rdev,
867 			    struct radeon_ps *radeon_new_state)
868 {
869 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
870 	u16 address = pi->state_table_start +
871 		offsetof(RV770_SMC_STATETABLE, driverState);
872 	RV770_SMC_SWSTATE state = { 0 };
873 	int ret;
874 
875 	ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state);
876 	if (ret)
877 		return ret;
878 
879 	return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state,
880 				    sizeof(RV770_SMC_SWSTATE),
881 				    pi->sram_end);
882 }
883 
cypress_upload_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)884 int cypress_upload_mc_reg_table(struct radeon_device *rdev,
885 				struct radeon_ps *radeon_new_state)
886 {
887 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
888 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
889 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
890 	u16 address;
891 
892 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table);
893 
894 	address = eg_pi->mc_reg_table_start +
895 		(u16)offsetof(SMC_Evergreen_MCRegisters, data[2]);
896 
897 	return rv770_copy_bytes_to_smc(rdev, address,
898 				       (u8 *)&mc_reg_table.data[2],
899 				       sizeof(SMC_Evergreen_MCRegisterSet) * 3,
900 				       pi->sram_end);
901 }
902 
cypress_calculate_burst_time(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock)903 u32 cypress_calculate_burst_time(struct radeon_device *rdev,
904 				 u32 engine_clock, u32 memory_clock)
905 {
906 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
907 	u32 multiplier = pi->mem_gddr5 ? 1 : 2;
908 	u32 result = (4 * multiplier * engine_clock) / (memory_clock / 2);
909 	u32 burst_time;
910 
911 	if (result <= 4)
912 		burst_time = 0;
913 	else if (result < 8)
914 		burst_time = result - 4;
915 	else {
916 		burst_time = result / 2 ;
917 		if (burst_time > 18)
918 			burst_time = 18;
919 	}
920 
921 	return burst_time;
922 }
923 
cypress_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)924 void cypress_program_memory_timing_parameters(struct radeon_device *rdev,
925 					      struct radeon_ps *radeon_new_state)
926 {
927 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
928 	u32 mc_arb_burst_time = RREG32(MC_ARB_BURST_TIME);
929 
930 	mc_arb_burst_time &= ~(STATE1_MASK | STATE2_MASK | STATE3_MASK);
931 
932 	mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev,
933 								 new_state->low.sclk,
934 								 new_state->low.mclk));
935 	mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev,
936 								 new_state->medium.sclk,
937 								 new_state->medium.mclk));
938 	mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev,
939 								 new_state->high.sclk,
940 								 new_state->high.mclk));
941 
942 	rv730_program_memory_timing_parameters(rdev, radeon_new_state);
943 
944 	WREG32(MC_ARB_BURST_TIME, mc_arb_burst_time);
945 }
946 
cypress_populate_mc_reg_addresses(struct radeon_device * rdev,SMC_Evergreen_MCRegisters * mc_reg_table)947 static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev,
948 					      SMC_Evergreen_MCRegisters *mc_reg_table)
949 {
950 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
951 	u32 i, j;
952 
953 	for (i = 0, j = 0; j < eg_pi->mc_reg_table.last; j++) {
954 		if (eg_pi->mc_reg_table.valid_flag & (1 << j)) {
955 			mc_reg_table->address[i].s0 =
956 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s0);
957 			mc_reg_table->address[i].s1 =
958 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s1);
959 			i++;
960 		}
961 	}
962 
963 	mc_reg_table->last = (u8)i;
964 }
965 
cypress_set_mc_reg_address_table(struct radeon_device * rdev)966 static void cypress_set_mc_reg_address_table(struct radeon_device *rdev)
967 {
968 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
969 	u32 i = 0;
970 
971 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RAS_TIMING_LP >> 2;
972 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RAS_TIMING >> 2;
973 	i++;
974 
975 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_CAS_TIMING_LP >> 2;
976 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_CAS_TIMING >> 2;
977 	i++;
978 
979 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING_LP >> 2;
980 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING >> 2;
981 	i++;
982 
983 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING2_LP >> 2;
984 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING2 >> 2;
985 	i++;
986 
987 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D0_LP >> 2;
988 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D0 >> 2;
989 	i++;
990 
991 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D1_LP >> 2;
992 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D1 >> 2;
993 	i++;
994 
995 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D0_LP >> 2;
996 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D0 >> 2;
997 	i++;
998 
999 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D1_LP >> 2;
1000 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D1 >> 2;
1001 	i++;
1002 
1003 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
1004 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_EMRS >> 2;
1005 	i++;
1006 
1007 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
1008 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS >> 2;
1009 	i++;
1010 
1011 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
1012 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS1 >> 2;
1013 	i++;
1014 
1015 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC1 >> 2;
1016 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC1 >> 2;
1017 	i++;
1018 
1019 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RESERVE_M >> 2;
1020 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RESERVE_M >> 2;
1021 	i++;
1022 
1023 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC3 >> 2;
1024 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC3 >> 2;
1025 	i++;
1026 
1027 	eg_pi->mc_reg_table.last = (u8)i;
1028 }
1029 
cypress_retrieve_ac_timing_for_one_entry(struct radeon_device * rdev,struct evergreen_mc_reg_entry * entry)1030 static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev,
1031 						     struct evergreen_mc_reg_entry *entry)
1032 {
1033 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1034 	u32 i;
1035 
1036 	for (i = 0; i < eg_pi->mc_reg_table.last; i++)
1037 		entry->mc_data[i] =
1038 			RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1039 
1040 }
1041 
cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device * rdev,struct atom_memory_clock_range_table * range_table)1042 static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev,
1043 						      struct atom_memory_clock_range_table *range_table)
1044 {
1045 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1046 	u32 i, j;
1047 
1048 	for (i = 0; i < range_table->num_entries; i++) {
1049 		eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max =
1050 			range_table->mclk[i];
1051 		radeon_atom_set_ac_timing(rdev, range_table->mclk[i]);
1052 		cypress_retrieve_ac_timing_for_one_entry(rdev,
1053 							 &eg_pi->mc_reg_table.mc_reg_table_entry[i]);
1054 	}
1055 
1056 	eg_pi->mc_reg_table.num_entries = range_table->num_entries;
1057 	eg_pi->mc_reg_table.valid_flag = 0;
1058 
1059 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1060 		for (j = 1; j < range_table->num_entries; j++) {
1061 			if (eg_pi->mc_reg_table.mc_reg_table_entry[j-1].mc_data[i] !=
1062 			    eg_pi->mc_reg_table.mc_reg_table_entry[j].mc_data[i]) {
1063 				eg_pi->mc_reg_table.valid_flag |= (1 << i);
1064 				break;
1065 			}
1066 		}
1067 	}
1068 }
1069 
cypress_initialize_mc_reg_table(struct radeon_device * rdev)1070 static int cypress_initialize_mc_reg_table(struct radeon_device *rdev)
1071 {
1072 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1073 	u8 module_index = rv770_get_memory_module_index(rdev);
1074 	struct atom_memory_clock_range_table range_table = { 0 };
1075 	int ret;
1076 
1077 	ret = radeon_atom_get_mclk_range_table(rdev,
1078 					       pi->mem_gddr5,
1079 					       module_index, &range_table);
1080 	if (ret)
1081 		return ret;
1082 
1083 	cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table);
1084 
1085 	return 0;
1086 }
1087 
cypress_wait_for_mc_sequencer(struct radeon_device * rdev,u8 value)1088 static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value)
1089 {
1090 	u32 i, j;
1091 	u32 channels = 2;
1092 
1093 	if ((rdev->family == CHIP_CYPRESS) ||
1094 	    (rdev->family == CHIP_HEMLOCK))
1095 		channels = 4;
1096 	else if (rdev->family == CHIP_CEDAR)
1097 		channels = 1;
1098 
1099 	for (i = 0; i < channels; i++) {
1100 		if ((rdev->family == CHIP_CYPRESS) ||
1101 		    (rdev->family == CHIP_HEMLOCK)) {
1102 			WREG32_P(MC_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1103 			WREG32_P(MC_CG_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1104 		} else {
1105 			WREG32_P(MC_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1106 			WREG32_P(MC_CG_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1107 		}
1108 		for (j = 0; j < rdev->usec_timeout; j++) {
1109 			if (((RREG32(MC_SEQ_CG) & CG_SEQ_RESP_MASK) >> CG_SEQ_RESP_SHIFT) == value)
1110 				break;
1111 			udelay(1);
1112 		}
1113 	}
1114 }
1115 
cypress_force_mc_use_s1(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1116 static void cypress_force_mc_use_s1(struct radeon_device *rdev,
1117 				    struct radeon_ps *radeon_boot_state)
1118 {
1119 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1120 	u32 strobe_mode;
1121 	u32 mc_seq_cg;
1122 	int i;
1123 
1124 	if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1125 		return;
1126 
1127 	radeon_atom_set_ac_timing(rdev, boot_state->low.mclk);
1128 	radeon_mc_wait_for_idle(rdev);
1129 
1130 	if ((rdev->family == CHIP_CYPRESS) ||
1131 	    (rdev->family == CHIP_HEMLOCK)) {
1132 		WREG32(MC_CONFIG_MCD, 0xf);
1133 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1134 	} else {
1135 		WREG32(MC_CONFIG, 0xf);
1136 		WREG32(MC_CG_CONFIG, 0xf);
1137 	}
1138 
1139 	for (i = 0; i < rdev->num_crtc; i++)
1140 		radeon_wait_for_vblank(rdev, i);
1141 
1142 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1143 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1144 
1145 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1146 						       boot_state->low.mclk);
1147 
1148 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S1);
1149 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1150 	WREG32(MC_SEQ_CG, mc_seq_cg);
1151 
1152 	for (i = 0; i < rdev->usec_timeout; i++) {
1153 		if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1154 			break;
1155 		udelay(1);
1156 	}
1157 
1158 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1159 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1160 	WREG32(MC_SEQ_CG, mc_seq_cg);
1161 
1162 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1163 }
1164 
cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device * rdev)1165 static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev)
1166 {
1167 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1168 	u32 value;
1169 	u32 i;
1170 
1171 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1172 		value = RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1173 		WREG32(eg_pi->mc_reg_table.mc_reg_address[i].s0 << 2, value);
1174 	}
1175 }
1176 
cypress_force_mc_use_s0(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1177 static void cypress_force_mc_use_s0(struct radeon_device *rdev,
1178 				    struct radeon_ps *radeon_boot_state)
1179 {
1180 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1181 	u32 strobe_mode;
1182 	u32 mc_seq_cg;
1183 	int i;
1184 
1185 	cypress_copy_ac_timing_from_s1_to_s0(rdev);
1186 	radeon_mc_wait_for_idle(rdev);
1187 
1188 	if ((rdev->family == CHIP_CYPRESS) ||
1189 	    (rdev->family == CHIP_HEMLOCK)) {
1190 		WREG32(MC_CONFIG_MCD, 0xf);
1191 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1192 	} else {
1193 		WREG32(MC_CONFIG, 0xf);
1194 		WREG32(MC_CG_CONFIG, 0xf);
1195 	}
1196 
1197 	for (i = 0; i < rdev->num_crtc; i++)
1198 		radeon_wait_for_vblank(rdev, i);
1199 
1200 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1201 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1202 
1203 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1204 						       boot_state->low.mclk);
1205 
1206 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S0);
1207 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1208 	WREG32(MC_SEQ_CG, mc_seq_cg);
1209 
1210 	for (i = 0; i < rdev->usec_timeout; i++) {
1211 		if (!(RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE))
1212 			break;
1213 		udelay(1);
1214 	}
1215 
1216 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1217 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1218 	WREG32(MC_SEQ_CG, mc_seq_cg);
1219 
1220 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1221 }
1222 
cypress_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1223 static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev,
1224 					       RV770_SMC_VOLTAGE_VALUE *voltage)
1225 {
1226 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1227 
1228 	voltage->index = eg_pi->mvdd_high_index;
1229 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1230 
1231 	return 0;
1232 }
1233 
cypress_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_initial_state,RV770_SMC_STATETABLE * table)1234 int cypress_populate_smc_initial_state(struct radeon_device *rdev,
1235 				       struct radeon_ps *radeon_initial_state,
1236 				       RV770_SMC_STATETABLE *table)
1237 {
1238 	struct rv7xx_ps *initial_state = rv770_get_ps(radeon_initial_state);
1239 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1240 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1241 	u32 a_t;
1242 
1243 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1244 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1245 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1246 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1247 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1248 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1249 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1250 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1251 	table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1252 		cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1253 	table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1254 		cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1255 
1256 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1257 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1258 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1259 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1260 
1261 	table->initialState.levels[0].mclk.mclk770.mclk_value =
1262 		cpu_to_be32(initial_state->low.mclk);
1263 
1264 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1265 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1266 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1267 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1268 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1269 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1270 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1271 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1272 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1273 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1274 
1275 	table->initialState.levels[0].sclk.sclk_value =
1276 		cpu_to_be32(initial_state->low.sclk);
1277 
1278 	table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1279 
1280 	table->initialState.levels[0].ACIndex = 0;
1281 
1282 	cypress_populate_voltage_value(rdev,
1283 				       &eg_pi->vddc_voltage_table,
1284 				       initial_state->low.vddc,
1285 				       &table->initialState.levels[0].vddc);
1286 
1287 	if (eg_pi->vddci_control)
1288 		cypress_populate_voltage_value(rdev,
1289 					       &eg_pi->vddci_voltage_table,
1290 					       initial_state->low.vddci,
1291 					       &table->initialState.levels[0].vddci);
1292 
1293 	cypress_populate_initial_mvdd_value(rdev,
1294 					    &table->initialState.levels[0].mvdd);
1295 
1296 	a_t = CG_R(0xffff) | CG_L(0);
1297 	table->initialState.levels[0].aT = cpu_to_be32(a_t);
1298 
1299 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1300 
1301 
1302 	if (pi->boot_in_gen2)
1303 		table->initialState.levels[0].gen2PCIE = 1;
1304 	else
1305 		table->initialState.levels[0].gen2PCIE = 0;
1306 	if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1307 		table->initialState.levels[0].gen2XSP = 1;
1308 	else
1309 		table->initialState.levels[0].gen2XSP = 0;
1310 
1311 	if (pi->mem_gddr5) {
1312 		table->initialState.levels[0].strobeMode =
1313 			cypress_get_strobe_mode_settings(rdev,
1314 							 initial_state->low.mclk);
1315 
1316 		if (initial_state->low.mclk > pi->mclk_edc_enable_threshold)
1317 			table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1318 		else
1319 			table->initialState.levels[0].mcFlags =  0;
1320 	}
1321 
1322 	table->initialState.levels[1] = table->initialState.levels[0];
1323 	table->initialState.levels[2] = table->initialState.levels[0];
1324 
1325 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1326 
1327 	return 0;
1328 }
1329 
cypress_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1330 int cypress_populate_smc_acpi_state(struct radeon_device *rdev,
1331 				    RV770_SMC_STATETABLE *table)
1332 {
1333 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1334 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1335 	u32 mpll_ad_func_cntl =
1336 		pi->clk_regs.rv770.mpll_ad_func_cntl;
1337 	u32 mpll_ad_func_cntl_2 =
1338 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
1339 	u32 mpll_dq_func_cntl =
1340 		pi->clk_regs.rv770.mpll_dq_func_cntl;
1341 	u32 mpll_dq_func_cntl_2 =
1342 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
1343 	u32 spll_func_cntl =
1344 		pi->clk_regs.rv770.cg_spll_func_cntl;
1345 	u32 spll_func_cntl_2 =
1346 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
1347 	u32 spll_func_cntl_3 =
1348 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
1349 	u32 mclk_pwrmgt_cntl =
1350 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
1351 	u32 dll_cntl =
1352 		pi->clk_regs.rv770.dll_cntl;
1353 
1354 	table->ACPIState = table->initialState;
1355 
1356 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1357 
1358 	if (pi->acpi_vddc) {
1359 		cypress_populate_voltage_value(rdev,
1360 					       &eg_pi->vddc_voltage_table,
1361 					       pi->acpi_vddc,
1362 					       &table->ACPIState.levels[0].vddc);
1363 		if (pi->pcie_gen2) {
1364 			if (pi->acpi_pcie_gen2)
1365 				table->ACPIState.levels[0].gen2PCIE = 1;
1366 			else
1367 				table->ACPIState.levels[0].gen2PCIE = 0;
1368 		} else
1369 			table->ACPIState.levels[0].gen2PCIE = 0;
1370 		if (pi->acpi_pcie_gen2)
1371 			table->ACPIState.levels[0].gen2XSP = 1;
1372 		else
1373 			table->ACPIState.levels[0].gen2XSP = 0;
1374 	} else {
1375 		cypress_populate_voltage_value(rdev,
1376 					       &eg_pi->vddc_voltage_table,
1377 					       pi->min_vddc_in_table,
1378 					       &table->ACPIState.levels[0].vddc);
1379 		table->ACPIState.levels[0].gen2PCIE = 0;
1380 	}
1381 
1382 	if (eg_pi->acpi_vddci) {
1383 		if (eg_pi->vddci_control) {
1384 			cypress_populate_voltage_value(rdev,
1385 						       &eg_pi->vddci_voltage_table,
1386 						       eg_pi->acpi_vddci,
1387 						       &table->ACPIState.levels[0].vddci);
1388 		}
1389 	}
1390 
1391 	mpll_ad_func_cntl &= ~PDNB;
1392 
1393 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1394 
1395 	if (pi->mem_gddr5)
1396 		mpll_dq_func_cntl &= ~PDNB;
1397 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1398 
1399 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1400 			     MRDCKA1_RESET |
1401 			     MRDCKB0_RESET |
1402 			     MRDCKB1_RESET |
1403 			     MRDCKC0_RESET |
1404 			     MRDCKC1_RESET |
1405 			     MRDCKD0_RESET |
1406 			     MRDCKD1_RESET);
1407 
1408 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1409 			      MRDCKA1_PDNB |
1410 			      MRDCKB0_PDNB |
1411 			      MRDCKB1_PDNB |
1412 			      MRDCKC0_PDNB |
1413 			      MRDCKC1_PDNB |
1414 			      MRDCKD0_PDNB |
1415 			      MRDCKD1_PDNB);
1416 
1417 	dll_cntl |= (MRDCKA0_BYPASS |
1418 		     MRDCKA1_BYPASS |
1419 		     MRDCKB0_BYPASS |
1420 		     MRDCKB1_BYPASS |
1421 		     MRDCKC0_BYPASS |
1422 		     MRDCKC1_BYPASS |
1423 		     MRDCKD0_BYPASS |
1424 		     MRDCKD1_BYPASS);
1425 
1426 	/* evergreen only */
1427 	if (rdev->family <= CHIP_HEMLOCK)
1428 		spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
1429 
1430 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1431 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1432 
1433 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1434 		cpu_to_be32(mpll_ad_func_cntl);
1435 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1436 		cpu_to_be32(mpll_ad_func_cntl_2);
1437 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1438 		cpu_to_be32(mpll_dq_func_cntl);
1439 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1440 		cpu_to_be32(mpll_dq_func_cntl_2);
1441 	table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1442 		cpu_to_be32(mclk_pwrmgt_cntl);
1443 	table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
1444 
1445 	table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
1446 
1447 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1448 		cpu_to_be32(spll_func_cntl);
1449 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1450 		cpu_to_be32(spll_func_cntl_2);
1451 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1452 		cpu_to_be32(spll_func_cntl_3);
1453 
1454 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1455 
1456 	cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1457 
1458 	if (eg_pi->dynamic_ac_timing)
1459 		table->ACPIState.levels[0].ACIndex = 1;
1460 
1461 	table->ACPIState.levels[1] = table->ACPIState.levels[0];
1462 	table->ACPIState.levels[2] = table->ACPIState.levels[0];
1463 
1464 	return 0;
1465 }
1466 
cypress_trim_voltage_table_to_fit_state_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table)1467 static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
1468 							  struct atom_voltage_table *voltage_table)
1469 {
1470 	unsigned int i, diff;
1471 
1472 	if (voltage_table->count <= MAX_NO_VREG_STEPS)
1473 		return;
1474 
1475 	diff = voltage_table->count - MAX_NO_VREG_STEPS;
1476 
1477 	for (i= 0; i < MAX_NO_VREG_STEPS; i++)
1478 		voltage_table->entries[i] = voltage_table->entries[i + diff];
1479 
1480 	voltage_table->count = MAX_NO_VREG_STEPS;
1481 }
1482 
cypress_construct_voltage_tables(struct radeon_device * rdev)1483 int cypress_construct_voltage_tables(struct radeon_device *rdev)
1484 {
1485 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1486 	int ret;
1487 
1488 	ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0,
1489 					    &eg_pi->vddc_voltage_table);
1490 	if (ret)
1491 		return ret;
1492 
1493 	if (eg_pi->vddc_voltage_table.count > MAX_NO_VREG_STEPS)
1494 		cypress_trim_voltage_table_to_fit_state_table(rdev,
1495 							      &eg_pi->vddc_voltage_table);
1496 
1497 	if (eg_pi->vddci_control) {
1498 		ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0,
1499 						    &eg_pi->vddci_voltage_table);
1500 		if (ret)
1501 			return ret;
1502 
1503 		if (eg_pi->vddci_voltage_table.count > MAX_NO_VREG_STEPS)
1504 			cypress_trim_voltage_table_to_fit_state_table(rdev,
1505 								      &eg_pi->vddci_voltage_table);
1506 	}
1507 
1508 	return 0;
1509 }
1510 
cypress_populate_smc_voltage_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table,RV770_SMC_STATETABLE * table)1511 static void cypress_populate_smc_voltage_table(struct radeon_device *rdev,
1512 					       struct atom_voltage_table *voltage_table,
1513 					       RV770_SMC_STATETABLE *table)
1514 {
1515 	unsigned int i;
1516 
1517 	for (i = 0; i < voltage_table->count; i++) {
1518 		table->highSMIO[i] = 0;
1519 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1520 	}
1521 }
1522 
cypress_populate_smc_voltage_tables(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1523 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev,
1524 					RV770_SMC_STATETABLE *table)
1525 {
1526 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1527 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1528 	unsigned char i;
1529 
1530 	if (eg_pi->vddc_voltage_table.count) {
1531 		cypress_populate_smc_voltage_table(rdev,
1532 						   &eg_pi->vddc_voltage_table,
1533 						   table);
1534 
1535 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1536 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1537 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1538 
1539 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1540 			if (pi->max_vddc_in_table <=
1541 			    eg_pi->vddc_voltage_table.entries[i].value) {
1542 				table->maxVDDCIndexInPPTable = i;
1543 				break;
1544 			}
1545 		}
1546 	}
1547 
1548 	if (eg_pi->vddci_voltage_table.count) {
1549 		cypress_populate_smc_voltage_table(rdev,
1550 						   &eg_pi->vddci_voltage_table,
1551 						   table);
1552 
1553 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 0;
1554 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDCI] =
1555 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1556 	}
1557 
1558 	return 0;
1559 }
1560 
cypress_get_mclk_split_point(struct atom_memory_info * memory_info)1561 static u32 cypress_get_mclk_split_point(struct atom_memory_info *memory_info)
1562 {
1563 	if ((memory_info->mem_type == MEM_TYPE_GDDR3) ||
1564 	    (memory_info->mem_type == MEM_TYPE_DDR3))
1565 		return 30000;
1566 
1567 	return 0;
1568 }
1569 
cypress_get_mvdd_configuration(struct radeon_device * rdev)1570 int cypress_get_mvdd_configuration(struct radeon_device *rdev)
1571 {
1572 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1573 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1574 	u8 module_index;
1575 	struct atom_memory_info memory_info;
1576 	u32 tmp = RREG32(GENERAL_PWRMGT);
1577 
1578 	if (!(tmp & BACKBIAS_PAD_EN)) {
1579 		eg_pi->mvdd_high_index = 0;
1580 		eg_pi->mvdd_low_index = 1;
1581 		pi->mvdd_control = false;
1582 		return 0;
1583 	}
1584 
1585 	if (tmp & BACKBIAS_VALUE)
1586 		eg_pi->mvdd_high_index = 1;
1587 	else
1588 		eg_pi->mvdd_high_index = 0;
1589 
1590 	eg_pi->mvdd_low_index =
1591 		(eg_pi->mvdd_high_index == 0) ? 1 : 0;
1592 
1593 	module_index = rv770_get_memory_module_index(rdev);
1594 
1595 	if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) {
1596 		pi->mvdd_control = false;
1597 		return 0;
1598 	}
1599 
1600 	pi->mvdd_split_frequency =
1601 		cypress_get_mclk_split_point(&memory_info);
1602 
1603 	if (pi->mvdd_split_frequency == 0) {
1604 		pi->mvdd_control = false;
1605 		return 0;
1606 	}
1607 
1608 	return 0;
1609 }
1610 
cypress_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1611 static int cypress_init_smc_table(struct radeon_device *rdev,
1612 				  struct radeon_ps *radeon_boot_state)
1613 {
1614 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1615 	RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1616 	int ret;
1617 
1618 	memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1619 
1620 	cypress_populate_smc_voltage_tables(rdev, table);
1621 
1622 	switch (rdev->pm.int_thermal_type) {
1623 	case THERMAL_TYPE_EVERGREEN:
1624 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1625 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1626 		break;
1627 	case THERMAL_TYPE_NONE:
1628 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1629 		break;
1630 	default:
1631 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1632 		break;
1633 	}
1634 
1635 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1636 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1637 
1638 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1639 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1640 
1641 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1642 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1643 
1644 	if (pi->mem_gddr5)
1645 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1646 
1647 	ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table);
1648 	if (ret)
1649 		return ret;
1650 
1651 	ret = cypress_populate_smc_acpi_state(rdev, table);
1652 	if (ret)
1653 		return ret;
1654 
1655 	table->driverState = table->initialState;
1656 
1657 	return rv770_copy_bytes_to_smc(rdev,
1658 				       pi->state_table_start,
1659 				       (u8 *)table, sizeof(RV770_SMC_STATETABLE),
1660 				       pi->sram_end);
1661 }
1662 
cypress_populate_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1663 int cypress_populate_mc_reg_table(struct radeon_device *rdev,
1664 				  struct radeon_ps *radeon_boot_state)
1665 {
1666 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1667 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1668 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1669 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
1670 
1671 	rv770_write_smc_soft_register(rdev,
1672 				      RV770_SMC_SOFT_REGISTER_seq_index, 1);
1673 
1674 	cypress_populate_mc_reg_addresses(rdev, &mc_reg_table);
1675 
1676 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
1677 						  &boot_state->low,
1678 						  &mc_reg_table.data[0]);
1679 
1680 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[0],
1681 				     &mc_reg_table.data[1], eg_pi->mc_reg_table.last,
1682 				     eg_pi->mc_reg_table.valid_flag);
1683 
1684 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table);
1685 
1686 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
1687 				       (u8 *)&mc_reg_table, sizeof(SMC_Evergreen_MCRegisters),
1688 				       pi->sram_end);
1689 }
1690 
cypress_get_table_locations(struct radeon_device * rdev)1691 int cypress_get_table_locations(struct radeon_device *rdev)
1692 {
1693 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1694 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1695 	u32 tmp;
1696 	int ret;
1697 
1698 	ret = rv770_read_smc_sram_dword(rdev,
1699 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1700 					EVERGREEN_SMC_FIRMWARE_HEADER_stateTable,
1701 					&tmp, pi->sram_end);
1702 	if (ret)
1703 		return ret;
1704 
1705 	pi->state_table_start = (u16)tmp;
1706 
1707 	ret = rv770_read_smc_sram_dword(rdev,
1708 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1709 					EVERGREEN_SMC_FIRMWARE_HEADER_softRegisters,
1710 					&tmp, pi->sram_end);
1711 	if (ret)
1712 		return ret;
1713 
1714 	pi->soft_regs_start = (u16)tmp;
1715 
1716 	ret = rv770_read_smc_sram_dword(rdev,
1717 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1718 					EVERGREEN_SMC_FIRMWARE_HEADER_mcRegisterTable,
1719 					&tmp, pi->sram_end);
1720 	if (ret)
1721 		return ret;
1722 
1723 	eg_pi->mc_reg_table_start = (u16)tmp;
1724 
1725 	return 0;
1726 }
1727 
cypress_enable_display_gap(struct radeon_device * rdev)1728 void cypress_enable_display_gap(struct radeon_device *rdev)
1729 {
1730 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1731 
1732 	tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1733 	tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) |
1734 		DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE));
1735 
1736 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1737 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) |
1738 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
1739 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1740 }
1741 
cypress_program_display_gap(struct radeon_device * rdev)1742 static void cypress_program_display_gap(struct radeon_device *rdev)
1743 {
1744 	u32 tmp, pipe;
1745 	int i;
1746 
1747 	tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1748 	if (rdev->pm.dpm.new_active_crtc_count > 0)
1749 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1750 	else
1751 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1752 
1753 	if (rdev->pm.dpm.new_active_crtc_count > 1)
1754 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1755 	else
1756 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1757 
1758 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1759 
1760 	tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG);
1761 	pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT;
1762 
1763 	if ((rdev->pm.dpm.new_active_crtc_count > 0) &&
1764 	    (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) {
1765 		/* find the first active crtc */
1766 		for (i = 0; i < rdev->num_crtc; i++) {
1767 			if (rdev->pm.dpm.new_active_crtcs & (1 << i))
1768 				break;
1769 		}
1770 		if (i == rdev->num_crtc)
1771 			pipe = 0;
1772 		else
1773 			pipe = i;
1774 
1775 		tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK;
1776 		tmp |= DCCG_DISP1_SLOW_SELECT(pipe);
1777 		WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp);
1778 	}
1779 
1780 	cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0);
1781 }
1782 
cypress_dpm_setup_asic(struct radeon_device * rdev)1783 void cypress_dpm_setup_asic(struct radeon_device *rdev)
1784 {
1785 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1786 
1787 	rv740_read_clock_registers(rdev);
1788 	rv770_read_voltage_smio_registers(rdev);
1789 	rv770_get_max_vddc(rdev);
1790 	rv770_get_memory_type(rdev);
1791 
1792 	if (eg_pi->pcie_performance_request)
1793 		eg_pi->pcie_performance_request_registered = false;
1794 
1795 	if (eg_pi->pcie_performance_request)
1796 		cypress_advertise_gen2_capability(rdev);
1797 
1798 	rv770_get_pcie_gen2_status(rdev);
1799 
1800 	rv770_enable_acpi_pm(rdev);
1801 }
1802 
cypress_dpm_enable(struct radeon_device * rdev)1803 int cypress_dpm_enable(struct radeon_device *rdev)
1804 {
1805 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1806 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1807 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1808 	int ret;
1809 
1810 	if (pi->gfx_clock_gating)
1811 		rv770_restore_cgcg(rdev);
1812 
1813 	if (rv770_dpm_enabled(rdev))
1814 		return -EINVAL;
1815 
1816 	if (pi->voltage_control) {
1817 		rv770_enable_voltage_control(rdev, true);
1818 		ret = cypress_construct_voltage_tables(rdev);
1819 		if (ret) {
1820 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
1821 			return ret;
1822 		}
1823 	}
1824 
1825 	if (pi->mvdd_control) {
1826 		ret = cypress_get_mvdd_configuration(rdev);
1827 		if (ret) {
1828 			DRM_ERROR("cypress_get_mvdd_configuration failed\n");
1829 			return ret;
1830 		}
1831 	}
1832 
1833 	if (eg_pi->dynamic_ac_timing) {
1834 		cypress_set_mc_reg_address_table(rdev);
1835 		cypress_force_mc_use_s0(rdev, boot_ps);
1836 		ret = cypress_initialize_mc_reg_table(rdev);
1837 		if (ret)
1838 			eg_pi->dynamic_ac_timing = false;
1839 		cypress_force_mc_use_s1(rdev, boot_ps);
1840 	}
1841 
1842 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1843 		rv770_enable_backbias(rdev, true);
1844 
1845 	if (pi->dynamic_ss)
1846 		cypress_enable_spread_spectrum(rdev, true);
1847 
1848 	if (pi->thermal_protection)
1849 		rv770_enable_thermal_protection(rdev, true);
1850 
1851 	rv770_setup_bsp(rdev);
1852 	rv770_program_git(rdev);
1853 	rv770_program_tp(rdev);
1854 	rv770_program_tpp(rdev);
1855 	rv770_program_sstp(rdev);
1856 	rv770_program_engine_speed_parameters(rdev);
1857 	cypress_enable_display_gap(rdev);
1858 	rv770_program_vc(rdev);
1859 
1860 	if (pi->dynamic_pcie_gen2)
1861 		cypress_enable_dynamic_pcie_gen2(rdev, true);
1862 
1863 	ret = rv770_upload_firmware(rdev);
1864 	if (ret) {
1865 		DRM_ERROR("rv770_upload_firmware failed\n");
1866 		return ret;
1867 	}
1868 
1869 	ret = cypress_get_table_locations(rdev);
1870 	if (ret) {
1871 		DRM_ERROR("cypress_get_table_locations failed\n");
1872 		return ret;
1873 	}
1874 	ret = cypress_init_smc_table(rdev, boot_ps);
1875 	if (ret) {
1876 		DRM_ERROR("cypress_init_smc_table failed\n");
1877 		return ret;
1878 	}
1879 	if (eg_pi->dynamic_ac_timing) {
1880 		ret = cypress_populate_mc_reg_table(rdev, boot_ps);
1881 		if (ret) {
1882 			DRM_ERROR("cypress_populate_mc_reg_table failed\n");
1883 			return ret;
1884 		}
1885 	}
1886 
1887 	cypress_program_response_times(rdev);
1888 
1889 	r7xx_start_smc(rdev);
1890 
1891 	ret = cypress_notify_smc_display_change(rdev, false);
1892 	if (ret) {
1893 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
1894 		return ret;
1895 	}
1896 	cypress_enable_sclk_control(rdev, true);
1897 
1898 	if (eg_pi->memory_transition)
1899 		cypress_enable_mclk_control(rdev, true);
1900 
1901 	cypress_start_dpm(rdev);
1902 
1903 	if (pi->gfx_clock_gating)
1904 		cypress_gfx_clock_gating_enable(rdev, true);
1905 
1906 	if (pi->mg_clock_gating)
1907 		cypress_mg_clock_gating_enable(rdev, true);
1908 
1909 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1910 
1911 	return 0;
1912 }
1913 
cypress_dpm_disable(struct radeon_device * rdev)1914 void cypress_dpm_disable(struct radeon_device *rdev)
1915 {
1916 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1917 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1918 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1919 
1920 	if (!rv770_dpm_enabled(rdev))
1921 		return;
1922 
1923 	rv770_clear_vc(rdev);
1924 
1925 	if (pi->thermal_protection)
1926 		rv770_enable_thermal_protection(rdev, false);
1927 
1928 	if (pi->dynamic_pcie_gen2)
1929 		cypress_enable_dynamic_pcie_gen2(rdev, false);
1930 
1931 	if (rdev->irq.installed &&
1932 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1933 		rdev->irq.dpm_thermal = false;
1934 		radeon_irq_set(rdev);
1935 	}
1936 
1937 	if (pi->gfx_clock_gating)
1938 		cypress_gfx_clock_gating_enable(rdev, false);
1939 
1940 	if (pi->mg_clock_gating)
1941 		cypress_mg_clock_gating_enable(rdev, false);
1942 
1943 	rv770_stop_dpm(rdev);
1944 	r7xx_stop_smc(rdev);
1945 
1946 	cypress_enable_spread_spectrum(rdev, false);
1947 
1948 	if (eg_pi->dynamic_ac_timing)
1949 		cypress_force_mc_use_s1(rdev, boot_ps);
1950 
1951 	rv770_reset_smio_status(rdev);
1952 }
1953 
cypress_dpm_set_power_state(struct radeon_device * rdev)1954 int cypress_dpm_set_power_state(struct radeon_device *rdev)
1955 {
1956 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1957 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
1958 	struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
1959 	int ret;
1960 
1961 	ret = rv770_restrict_performance_levels_before_switch(rdev);
1962 	if (ret) {
1963 		DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
1964 		return ret;
1965 	}
1966 	if (eg_pi->pcie_performance_request)
1967 		cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps);
1968 
1969 	rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
1970 	ret = rv770_halt_smc(rdev);
1971 	if (ret) {
1972 		DRM_ERROR("rv770_halt_smc failed\n");
1973 		return ret;
1974 	}
1975 	ret = cypress_upload_sw_state(rdev, new_ps);
1976 	if (ret) {
1977 		DRM_ERROR("cypress_upload_sw_state failed\n");
1978 		return ret;
1979 	}
1980 	if (eg_pi->dynamic_ac_timing) {
1981 		ret = cypress_upload_mc_reg_table(rdev, new_ps);
1982 		if (ret) {
1983 			DRM_ERROR("cypress_upload_mc_reg_table failed\n");
1984 			return ret;
1985 		}
1986 	}
1987 
1988 	cypress_program_memory_timing_parameters(rdev, new_ps);
1989 
1990 	ret = rv770_resume_smc(rdev);
1991 	if (ret) {
1992 		DRM_ERROR("rv770_resume_smc failed\n");
1993 		return ret;
1994 	}
1995 	ret = rv770_set_sw_state(rdev);
1996 	if (ret) {
1997 		DRM_ERROR("rv770_set_sw_state failed\n");
1998 		return ret;
1999 	}
2000 	rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2001 
2002 	if (eg_pi->pcie_performance_request)
2003 		cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps);
2004 
2005 	return 0;
2006 }
2007 
2008 #if 0
2009 void cypress_dpm_reset_asic(struct radeon_device *rdev)
2010 {
2011 	rv770_restrict_performance_levels_before_switch(rdev);
2012 	rv770_set_boot_state(rdev);
2013 }
2014 #endif
2015 
cypress_dpm_display_configuration_changed(struct radeon_device * rdev)2016 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev)
2017 {
2018 	cypress_program_display_gap(rdev);
2019 }
2020 
cypress_dpm_init(struct radeon_device * rdev)2021 int cypress_dpm_init(struct radeon_device *rdev)
2022 {
2023 	struct rv7xx_power_info *pi;
2024 	struct evergreen_power_info *eg_pi;
2025 	struct atom_clock_dividers dividers;
2026 	int ret;
2027 
2028 	eg_pi = kzalloc(sizeof(struct evergreen_power_info), GFP_KERNEL);
2029 	if (eg_pi == NULL)
2030 		return -ENOMEM;
2031 	rdev->pm.dpm.priv = eg_pi;
2032 	pi = &eg_pi->rv7xx;
2033 
2034 	rv770_get_max_vddc(rdev);
2035 
2036 	eg_pi->ulv.supported = false;
2037 	pi->acpi_vddc = 0;
2038 	eg_pi->acpi_vddci = 0;
2039 	pi->min_vddc_in_table = 0;
2040 	pi->max_vddc_in_table = 0;
2041 
2042 	ret = r600_get_platform_caps(rdev);
2043 	if (ret)
2044 		return ret;
2045 
2046 	ret = rv7xx_parse_power_table(rdev);
2047 	if (ret)
2048 		return ret;
2049 
2050 	if (rdev->pm.dpm.voltage_response_time == 0)
2051 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2052 	if (rdev->pm.dpm.backbias_response_time == 0)
2053 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2054 
2055 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2056 					     0, false, &dividers);
2057 	if (ret)
2058 		pi->ref_div = dividers.ref_div + 1;
2059 	else
2060 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2061 
2062 	pi->mclk_strobe_mode_threshold = 40000;
2063 	pi->mclk_edc_enable_threshold = 40000;
2064 	eg_pi->mclk_edc_wr_enable_threshold = 40000;
2065 
2066 	pi->rlp = RV770_RLP_DFLT;
2067 	pi->rmp = RV770_RMP_DFLT;
2068 	pi->lhp = RV770_LHP_DFLT;
2069 	pi->lmp = RV770_LMP_DFLT;
2070 
2071 	pi->voltage_control =
2072 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2073 
2074 	pi->mvdd_control =
2075 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2076 
2077 	eg_pi->vddci_control =
2078 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
2079 
2080 	rv770_get_engine_memory_ss(rdev);
2081 
2082 	pi->asi = RV770_ASI_DFLT;
2083 	pi->pasi = CYPRESS_HASI_DFLT;
2084 	pi->vrc = CYPRESS_VRC_DFLT;
2085 
2086 	pi->power_gating = false;
2087 
2088 	if ((rdev->family == CHIP_CYPRESS) ||
2089 	    (rdev->family == CHIP_HEMLOCK))
2090 		pi->gfx_clock_gating = false;
2091 	else
2092 		pi->gfx_clock_gating = true;
2093 
2094 	pi->mg_clock_gating = true;
2095 	pi->mgcgtssm = true;
2096 	eg_pi->ls_clock_gating = false;
2097 	eg_pi->sclk_deep_sleep = false;
2098 
2099 	pi->dynamic_pcie_gen2 = true;
2100 
2101 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2102 		pi->thermal_protection = true;
2103 	else
2104 		pi->thermal_protection = false;
2105 
2106 	pi->display_gap = true;
2107 
2108 	if (rdev->flags & RADEON_IS_MOBILITY)
2109 		pi->dcodt = true;
2110 	else
2111 		pi->dcodt = false;
2112 
2113 	pi->ulps = true;
2114 
2115 	eg_pi->dynamic_ac_timing = true;
2116 	eg_pi->abm = true;
2117 	eg_pi->mcls = true;
2118 	eg_pi->light_sleep = true;
2119 	eg_pi->memory_transition = true;
2120 #if defined(CONFIG_ACPI)
2121 	eg_pi->pcie_performance_request =
2122 		radeon_acpi_is_pcie_performance_request_supported(rdev);
2123 #else
2124 	eg_pi->pcie_performance_request = false;
2125 #endif
2126 
2127 	if ((rdev->family == CHIP_CYPRESS) ||
2128 	    (rdev->family == CHIP_HEMLOCK) ||
2129 	    (rdev->family == CHIP_JUNIPER))
2130 		eg_pi->dll_default_on = true;
2131 	else
2132 		eg_pi->dll_default_on = false;
2133 
2134 	eg_pi->sclk_deep_sleep = false;
2135 	pi->mclk_stutter_mode_threshold = 0;
2136 
2137 	pi->sram_end = SMC_RAM_END;
2138 
2139 	return 0;
2140 }
2141 
cypress_dpm_fini(struct radeon_device * rdev)2142 void cypress_dpm_fini(struct radeon_device *rdev)
2143 {
2144 	int i;
2145 
2146 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2147 		kfree(rdev->pm.dpm.ps[i].ps_priv);
2148 	}
2149 	kfree(rdev->pm.dpm.ps);
2150 	kfree(rdev->pm.dpm.priv);
2151 }
2152 
cypress_dpm_vblank_too_short(struct radeon_device * rdev)2153 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev)
2154 {
2155 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2156 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2157 	/* we never hit the non-gddr5 limit so disable it */
2158 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
2159 
2160 	if (vblank_time < switch_limit)
2161 		return true;
2162 	else
2163 		return false;
2164 
2165 }
2166