xref: /dragonfly/sys/dev/drm/radeon/cypress_dpm.c (revision 57e252bf)
1*57e252bfSMichael Neumann /*
2*57e252bfSMichael Neumann  * Copyright 2011 Advanced Micro Devices, Inc.
3*57e252bfSMichael Neumann  *
4*57e252bfSMichael Neumann  * Permission is hereby granted, free of charge, to any person obtaining a
5*57e252bfSMichael Neumann  * copy of this software and associated documentation files (the "Software"),
6*57e252bfSMichael Neumann  * to deal in the Software without restriction, including without limitation
7*57e252bfSMichael Neumann  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*57e252bfSMichael Neumann  * and/or sell copies of the Software, and to permit persons to whom the
9*57e252bfSMichael Neumann  * Software is furnished to do so, subject to the following conditions:
10*57e252bfSMichael Neumann  *
11*57e252bfSMichael Neumann  * The above copyright notice and this permission notice shall be included in
12*57e252bfSMichael Neumann  * all copies or substantial portions of the Software.
13*57e252bfSMichael Neumann  *
14*57e252bfSMichael Neumann  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*57e252bfSMichael Neumann  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*57e252bfSMichael Neumann  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17*57e252bfSMichael Neumann  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*57e252bfSMichael Neumann  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*57e252bfSMichael Neumann  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*57e252bfSMichael Neumann  * OTHER DEALINGS IN THE SOFTWARE.
21*57e252bfSMichael Neumann  *
22*57e252bfSMichael Neumann  * Authors: Alex Deucher
23*57e252bfSMichael Neumann  */
24*57e252bfSMichael Neumann 
25*57e252bfSMichael Neumann #include <drm/drmP.h>
26*57e252bfSMichael Neumann #include "radeon.h"
27*57e252bfSMichael Neumann #include "evergreend.h"
28*57e252bfSMichael Neumann #include "r600_dpm.h"
29*57e252bfSMichael Neumann #include "cypress_dpm.h"
30*57e252bfSMichael Neumann #include "atom.h"
31*57e252bfSMichael Neumann #include "radeon_asic.h"
32*57e252bfSMichael Neumann 
33*57e252bfSMichael Neumann #define SMC_RAM_END 0x8000
34*57e252bfSMichael Neumann 
35*57e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F0           0x0a
36*57e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F1           0x0b
37*57e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F2           0x0c
38*57e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F3           0x0d
39*57e252bfSMichael Neumann 
40*57e252bfSMichael Neumann #define MC_CG_SEQ_DRAMCONF_S0       0x05
41*57e252bfSMichael Neumann #define MC_CG_SEQ_DRAMCONF_S1       0x06
42*57e252bfSMichael Neumann #define MC_CG_SEQ_YCLK_SUSPEND      0x04
43*57e252bfSMichael Neumann #define MC_CG_SEQ_YCLK_RESUME       0x0a
44*57e252bfSMichael Neumann 
45*57e252bfSMichael Neumann struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps);
46*57e252bfSMichael Neumann struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
47*57e252bfSMichael Neumann struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
48*57e252bfSMichael Neumann void cypress_dpm_reset_asic(struct radeon_device *rdev);
49*57e252bfSMichael Neumann 
50*57e252bfSMichael Neumann static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
51*57e252bfSMichael Neumann 						 bool enable)
52*57e252bfSMichael Neumann {
53*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
54*57e252bfSMichael Neumann 	u32 tmp, bif;
55*57e252bfSMichael Neumann 
56*57e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
57*57e252bfSMichael Neumann 	if (enable) {
58*57e252bfSMichael Neumann 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
59*57e252bfSMichael Neumann 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
60*57e252bfSMichael Neumann 			if (!pi->boot_in_gen2) {
61*57e252bfSMichael Neumann 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
62*57e252bfSMichael Neumann 				bif |= CG_CLIENT_REQ(0xd);
63*57e252bfSMichael Neumann 				WREG32(CG_BIF_REQ_AND_RSP, bif);
64*57e252bfSMichael Neumann 
65*57e252bfSMichael Neumann 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
66*57e252bfSMichael Neumann 				tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
67*57e252bfSMichael Neumann 				tmp |= LC_GEN2_EN_STRAP;
68*57e252bfSMichael Neumann 
69*57e252bfSMichael Neumann 				tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
70*57e252bfSMichael Neumann 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
71*57e252bfSMichael Neumann 				DRM_UDELAY(10);
72*57e252bfSMichael Neumann 				tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
73*57e252bfSMichael Neumann 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
74*57e252bfSMichael Neumann 			}
75*57e252bfSMichael Neumann 		}
76*57e252bfSMichael Neumann 	} else {
77*57e252bfSMichael Neumann 		if (!pi->boot_in_gen2) {
78*57e252bfSMichael Neumann 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
79*57e252bfSMichael Neumann 			tmp &= ~LC_GEN2_EN_STRAP;
80*57e252bfSMichael Neumann 		}
81*57e252bfSMichael Neumann 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
82*57e252bfSMichael Neumann 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
83*57e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
84*57e252bfSMichael Neumann 	}
85*57e252bfSMichael Neumann }
86*57e252bfSMichael Neumann 
87*57e252bfSMichael Neumann static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
88*57e252bfSMichael Neumann 					     bool enable)
89*57e252bfSMichael Neumann {
90*57e252bfSMichael Neumann 	cypress_enable_bif_dynamic_pcie_gen2(rdev, enable);
91*57e252bfSMichael Neumann 
92*57e252bfSMichael Neumann 	if (enable)
93*57e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
94*57e252bfSMichael Neumann 	else
95*57e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
96*57e252bfSMichael Neumann }
97*57e252bfSMichael Neumann 
98*57e252bfSMichael Neumann #if 0
99*57e252bfSMichael Neumann static int cypress_enter_ulp_state(struct radeon_device *rdev)
100*57e252bfSMichael Neumann {
101*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
102*57e252bfSMichael Neumann 
103*57e252bfSMichael Neumann 	if (pi->gfx_clock_gating) {
104*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
105*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
106*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
107*57e252bfSMichael Neumann 
108*57e252bfSMichael Neumann 		RREG32(GB_ADDR_CONFIG);
109*57e252bfSMichael Neumann 	}
110*57e252bfSMichael Neumann 
111*57e252bfSMichael Neumann 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
112*57e252bfSMichael Neumann 		 ~HOST_SMC_MSG_MASK);
113*57e252bfSMichael Neumann 
114*57e252bfSMichael Neumann 	DRM_UDELAY(7000);
115*57e252bfSMichael Neumann 
116*57e252bfSMichael Neumann 	return 0;
117*57e252bfSMichael Neumann }
118*57e252bfSMichael Neumann #endif
119*57e252bfSMichael Neumann 
120*57e252bfSMichael Neumann static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev,
121*57e252bfSMichael Neumann 					    bool enable)
122*57e252bfSMichael Neumann {
123*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
124*57e252bfSMichael Neumann 
125*57e252bfSMichael Neumann 	if (enable) {
126*57e252bfSMichael Neumann 		if (eg_pi->light_sleep) {
127*57e252bfSMichael Neumann 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
128*57e252bfSMichael Neumann 
129*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_0, 0xFFFFFFFF);
130*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_1, 0xFFFFFFFF);
131*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_2, 0xFFFFFFFF);
132*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_3, 0xFFFFFFFF);
133*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_4, 0xFFFFFFFF);
134*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_5, 0xFFFFFFFF);
135*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_6, 0xFFFFFFFF);
136*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_7, 0xFFFFFFFF);
137*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_8, 0xFFFFFFFF);
138*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_9, 0xFFFFFFFF);
139*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_10, 0xFFFFFFFF);
140*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_11, 0xFFFFFFFF);
141*57e252bfSMichael Neumann 
142*57e252bfSMichael Neumann 			WREG32_P(SCLK_PWRMGT_CNTL, DYN_LIGHT_SLEEP_EN, ~DYN_LIGHT_SLEEP_EN);
143*57e252bfSMichael Neumann 		}
144*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
145*57e252bfSMichael Neumann 	} else {
146*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
147*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
148*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
149*57e252bfSMichael Neumann 		RREG32(GB_ADDR_CONFIG);
150*57e252bfSMichael Neumann 
151*57e252bfSMichael Neumann 		if (eg_pi->light_sleep) {
152*57e252bfSMichael Neumann 			WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_LIGHT_SLEEP_EN);
153*57e252bfSMichael Neumann 
154*57e252bfSMichael Neumann 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
155*57e252bfSMichael Neumann 
156*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_0, 0);
157*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_1, 0);
158*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_2, 0);
159*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_3, 0);
160*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_4, 0);
161*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_5, 0);
162*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_6, 0);
163*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_7, 0);
164*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_8, 0);
165*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_9, 0);
166*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_10, 0);
167*57e252bfSMichael Neumann 			WREG32_CG(CG_CGLS_TILE_11, 0);
168*57e252bfSMichael Neumann 		}
169*57e252bfSMichael Neumann 	}
170*57e252bfSMichael Neumann }
171*57e252bfSMichael Neumann 
172*57e252bfSMichael Neumann static void cypress_mg_clock_gating_enable(struct radeon_device *rdev,
173*57e252bfSMichael Neumann 					   bool enable)
174*57e252bfSMichael Neumann {
175*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
176*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
177*57e252bfSMichael Neumann 
178*57e252bfSMichael Neumann 	if (enable) {
179*57e252bfSMichael Neumann 		u32 cgts_sm_ctrl_reg;
180*57e252bfSMichael Neumann 
181*57e252bfSMichael Neumann 		if (rdev->family == CHIP_CEDAR)
182*57e252bfSMichael Neumann 			cgts_sm_ctrl_reg = CEDAR_MGCGCGTSSMCTRL_DFLT;
183*57e252bfSMichael Neumann 		else if (rdev->family == CHIP_REDWOOD)
184*57e252bfSMichael Neumann 			cgts_sm_ctrl_reg = REDWOOD_MGCGCGTSSMCTRL_DFLT;
185*57e252bfSMichael Neumann 		else
186*57e252bfSMichael Neumann 			cgts_sm_ctrl_reg = CYPRESS_MGCGCGTSSMCTRL_DFLT;
187*57e252bfSMichael Neumann 
188*57e252bfSMichael Neumann 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
189*57e252bfSMichael Neumann 
190*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_0, CYPRESS_MGCGTTLOCAL0_DFLT);
191*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_1, CYPRESS_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF);
192*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_2, CYPRESS_MGCGTTLOCAL2_DFLT);
193*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_3, CYPRESS_MGCGTTLOCAL3_DFLT);
194*57e252bfSMichael Neumann 
195*57e252bfSMichael Neumann 		if (pi->mgcgtssm)
196*57e252bfSMichael Neumann 			WREG32(CGTS_SM_CTRL_REG, cgts_sm_ctrl_reg);
197*57e252bfSMichael Neumann 
198*57e252bfSMichael Neumann 		if (eg_pi->mcls) {
199*57e252bfSMichael Neumann 			WREG32_P(MC_CITF_MISC_RD_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
200*57e252bfSMichael Neumann 			WREG32_P(MC_CITF_MISC_WR_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
201*57e252bfSMichael Neumann 			WREG32_P(MC_CITF_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
202*57e252bfSMichael Neumann 			WREG32_P(MC_HUB_MISC_HUB_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
203*57e252bfSMichael Neumann 			WREG32_P(MC_HUB_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
204*57e252bfSMichael Neumann 			WREG32_P(MC_HUB_MISC_SIP_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
205*57e252bfSMichael Neumann 			WREG32_P(MC_XPB_CLK_GAT, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
206*57e252bfSMichael Neumann 			WREG32_P(VM_L2_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
207*57e252bfSMichael Neumann 		}
208*57e252bfSMichael Neumann 	} else {
209*57e252bfSMichael Neumann 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
210*57e252bfSMichael Neumann 
211*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
212*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_1, 0xFFFFFFFF);
213*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_2, 0xFFFFFFFF);
214*57e252bfSMichael Neumann 		WREG32_CG(CG_CGTT_LOCAL_3, 0xFFFFFFFF);
215*57e252bfSMichael Neumann 
216*57e252bfSMichael Neumann 		if (pi->mgcgtssm)
217*57e252bfSMichael Neumann 			WREG32(CGTS_SM_CTRL_REG, 0x81f44bc0);
218*57e252bfSMichael Neumann 	}
219*57e252bfSMichael Neumann }
220*57e252bfSMichael Neumann 
221*57e252bfSMichael Neumann void cypress_enable_spread_spectrum(struct radeon_device *rdev,
222*57e252bfSMichael Neumann 				    bool enable)
223*57e252bfSMichael Neumann {
224*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
225*57e252bfSMichael Neumann 
226*57e252bfSMichael Neumann 	if (enable) {
227*57e252bfSMichael Neumann 		if (pi->sclk_ss)
228*57e252bfSMichael Neumann 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
229*57e252bfSMichael Neumann 
230*57e252bfSMichael Neumann 		if (pi->mclk_ss)
231*57e252bfSMichael Neumann 			WREG32_P(MPLL_CNTL_MODE, SS_SSEN, ~SS_SSEN);
232*57e252bfSMichael Neumann 	} else {
233*57e252bfSMichael Neumann 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
234*57e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
235*57e252bfSMichael Neumann 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_SSEN);
236*57e252bfSMichael Neumann 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_DSMODE_EN);
237*57e252bfSMichael Neumann 	}
238*57e252bfSMichael Neumann }
239*57e252bfSMichael Neumann 
240*57e252bfSMichael Neumann void cypress_start_dpm(struct radeon_device *rdev)
241*57e252bfSMichael Neumann {
242*57e252bfSMichael Neumann 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
243*57e252bfSMichael Neumann }
244*57e252bfSMichael Neumann 
245*57e252bfSMichael Neumann void cypress_enable_sclk_control(struct radeon_device *rdev,
246*57e252bfSMichael Neumann 				 bool enable)
247*57e252bfSMichael Neumann {
248*57e252bfSMichael Neumann 	if (enable)
249*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
250*57e252bfSMichael Neumann 	else
251*57e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
252*57e252bfSMichael Neumann }
253*57e252bfSMichael Neumann 
254*57e252bfSMichael Neumann void cypress_enable_mclk_control(struct radeon_device *rdev,
255*57e252bfSMichael Neumann 				 bool enable)
256*57e252bfSMichael Neumann {
257*57e252bfSMichael Neumann 	if (enable)
258*57e252bfSMichael Neumann 		WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
259*57e252bfSMichael Neumann 	else
260*57e252bfSMichael Neumann 		WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
261*57e252bfSMichael Neumann }
262*57e252bfSMichael Neumann 
263*57e252bfSMichael Neumann int cypress_notify_smc_display_change(struct radeon_device *rdev,
264*57e252bfSMichael Neumann 				      bool has_display)
265*57e252bfSMichael Neumann {
266*57e252bfSMichael Neumann 	PPSMC_Msg msg = has_display ?
267*57e252bfSMichael Neumann 		(PPSMC_Msg)PPSMC_MSG_HasDisplay : (PPSMC_Msg)PPSMC_MSG_NoDisplay;
268*57e252bfSMichael Neumann 
269*57e252bfSMichael Neumann 	if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
270*57e252bfSMichael Neumann 		return -EINVAL;
271*57e252bfSMichael Neumann 
272*57e252bfSMichael Neumann 	return 0;
273*57e252bfSMichael Neumann }
274*57e252bfSMichael Neumann 
275*57e252bfSMichael Neumann void cypress_program_response_times(struct radeon_device *rdev)
276*57e252bfSMichael Neumann {
277*57e252bfSMichael Neumann 	u32 reference_clock;
278*57e252bfSMichael Neumann 	u32 mclk_switch_limit;
279*57e252bfSMichael Neumann 
280*57e252bfSMichael Neumann 	reference_clock = radeon_get_xclk(rdev);
281*57e252bfSMichael Neumann 	mclk_switch_limit = (460 * reference_clock) / 100;
282*57e252bfSMichael Neumann 
283*57e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev,
284*57e252bfSMichael Neumann 				      RV770_SMC_SOFT_REGISTER_mclk_switch_lim,
285*57e252bfSMichael Neumann 				      mclk_switch_limit);
286*57e252bfSMichael Neumann 
287*57e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev,
288*57e252bfSMichael Neumann 				      RV770_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
289*57e252bfSMichael Neumann 
290*57e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev,
291*57e252bfSMichael Neumann 				      RV770_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
292*57e252bfSMichael Neumann 
293*57e252bfSMichael Neumann 	rv770_program_response_times(rdev);
294*57e252bfSMichael Neumann 
295*57e252bfSMichael Neumann 	if (ASIC_IS_LOMBOK(rdev))
296*57e252bfSMichael Neumann 		rv770_write_smc_soft_register(rdev,
297*57e252bfSMichael Neumann 					      RV770_SMC_SOFT_REGISTER_is_asic_lombok, 1);
298*57e252bfSMichael Neumann 
299*57e252bfSMichael Neumann }
300*57e252bfSMichael Neumann 
301*57e252bfSMichael Neumann static int cypress_pcie_performance_request(struct radeon_device *rdev,
302*57e252bfSMichael Neumann 					    u8 perf_req, bool advertise)
303*57e252bfSMichael Neumann {
304*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
305*57e252bfSMichael Neumann 	u32 tmp;
306*57e252bfSMichael Neumann 
307*57e252bfSMichael Neumann 	DRM_UDELAY(10);
308*57e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
309*57e252bfSMichael Neumann 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) && (tmp & LC_CURRENT_DATA_RATE))
310*57e252bfSMichael Neumann 		return 0;
311*57e252bfSMichael Neumann 
312*57e252bfSMichael Neumann #if defined(CONFIG_ACPI)
313*57e252bfSMichael Neumann 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
314*57e252bfSMichael Neumann 	    (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
315*57e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = true;
316*57e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
317*57e252bfSMichael Neumann 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
318*57e252bfSMichael Neumann 		   eg_pi->pcie_performance_request_registered) {
319*57e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = false;
320*57e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
321*57e252bfSMichael Neumann 	}
322*57e252bfSMichael Neumann #endif
323*57e252bfSMichael Neumann 
324*57e252bfSMichael Neumann 	return 0;
325*57e252bfSMichael Neumann }
326*57e252bfSMichael Neumann 
327*57e252bfSMichael Neumann void cypress_advertise_gen2_capability(struct radeon_device *rdev)
328*57e252bfSMichael Neumann {
329*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
330*57e252bfSMichael Neumann 	u32 tmp;
331*57e252bfSMichael Neumann 
332*57e252bfSMichael Neumann #if defined(CONFIG_ACPI)
333*57e252bfSMichael Neumann 	radeon_acpi_pcie_notify_device_ready(rdev);
334*57e252bfSMichael Neumann #endif
335*57e252bfSMichael Neumann 
336*57e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
337*57e252bfSMichael Neumann 
338*57e252bfSMichael Neumann 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
339*57e252bfSMichael Neumann 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
340*57e252bfSMichael Neumann 		pi->pcie_gen2 = true;
341*57e252bfSMichael Neumann 	else
342*57e252bfSMichael Neumann 		pi->pcie_gen2 = false;
343*57e252bfSMichael Neumann 
344*57e252bfSMichael Neumann 	if (!pi->pcie_gen2)
345*57e252bfSMichael Neumann 		cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
346*57e252bfSMichael Neumann 
347*57e252bfSMichael Neumann }
348*57e252bfSMichael Neumann 
349*57e252bfSMichael Neumann static enum radeon_pcie_gen cypress_get_maximum_link_speed(struct radeon_ps *radeon_state)
350*57e252bfSMichael Neumann {
351*57e252bfSMichael Neumann 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
352*57e252bfSMichael Neumann 
353*57e252bfSMichael Neumann 	if (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
354*57e252bfSMichael Neumann 		return 1;
355*57e252bfSMichael Neumann 	return 0;
356*57e252bfSMichael Neumann }
357*57e252bfSMichael Neumann 
358*57e252bfSMichael Neumann void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
359*57e252bfSMichael Neumann 							 struct radeon_ps *radeon_new_state,
360*57e252bfSMichael Neumann 							 struct radeon_ps *radeon_current_state)
361*57e252bfSMichael Neumann {
362*57e252bfSMichael Neumann 	enum radeon_pcie_gen pcie_link_speed_target =
363*57e252bfSMichael Neumann 		cypress_get_maximum_link_speed(radeon_new_state);
364*57e252bfSMichael Neumann 	enum radeon_pcie_gen pcie_link_speed_current =
365*57e252bfSMichael Neumann 		cypress_get_maximum_link_speed(radeon_current_state);
366*57e252bfSMichael Neumann 	u8 request;
367*57e252bfSMichael Neumann 
368*57e252bfSMichael Neumann 	if (pcie_link_speed_target < pcie_link_speed_current) {
369*57e252bfSMichael Neumann 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
370*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN1;
371*57e252bfSMichael Neumann 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
372*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN2;
373*57e252bfSMichael Neumann 		else
374*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN3;
375*57e252bfSMichael Neumann 
376*57e252bfSMichael Neumann 		cypress_pcie_performance_request(rdev, request, false);
377*57e252bfSMichael Neumann 	}
378*57e252bfSMichael Neumann }
379*57e252bfSMichael Neumann 
380*57e252bfSMichael Neumann void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev,
381*57e252bfSMichael Neumann 							  struct radeon_ps *radeon_new_state,
382*57e252bfSMichael Neumann 							  struct radeon_ps *radeon_current_state)
383*57e252bfSMichael Neumann {
384*57e252bfSMichael Neumann 	enum radeon_pcie_gen pcie_link_speed_target =
385*57e252bfSMichael Neumann 		cypress_get_maximum_link_speed(radeon_new_state);
386*57e252bfSMichael Neumann 	enum radeon_pcie_gen pcie_link_speed_current =
387*57e252bfSMichael Neumann 		cypress_get_maximum_link_speed(radeon_current_state);
388*57e252bfSMichael Neumann 	u8 request;
389*57e252bfSMichael Neumann 
390*57e252bfSMichael Neumann 	if (pcie_link_speed_target > pcie_link_speed_current) {
391*57e252bfSMichael Neumann 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
392*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN1;
393*57e252bfSMichael Neumann 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
394*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN2;
395*57e252bfSMichael Neumann 		else
396*57e252bfSMichael Neumann 			request = PCIE_PERF_REQ_PECI_GEN3;
397*57e252bfSMichael Neumann 
398*57e252bfSMichael Neumann 		cypress_pcie_performance_request(rdev, request, false);
399*57e252bfSMichael Neumann 	}
400*57e252bfSMichael Neumann }
401*57e252bfSMichael Neumann 
402*57e252bfSMichael Neumann static int cypress_populate_voltage_value(struct radeon_device *rdev,
403*57e252bfSMichael Neumann 					  struct atom_voltage_table *table,
404*57e252bfSMichael Neumann 					  u16 value, RV770_SMC_VOLTAGE_VALUE *voltage)
405*57e252bfSMichael Neumann {
406*57e252bfSMichael Neumann 	unsigned int i;
407*57e252bfSMichael Neumann 
408*57e252bfSMichael Neumann 	for (i = 0; i < table->count; i++) {
409*57e252bfSMichael Neumann 		if (value <= table->entries[i].value) {
410*57e252bfSMichael Neumann 			voltage->index = (u8)i;
411*57e252bfSMichael Neumann 			voltage->value = cpu_to_be16(table->entries[i].value);
412*57e252bfSMichael Neumann 			break;
413*57e252bfSMichael Neumann 		}
414*57e252bfSMichael Neumann 	}
415*57e252bfSMichael Neumann 
416*57e252bfSMichael Neumann 	if (i == table->count)
417*57e252bfSMichael Neumann 		return -EINVAL;
418*57e252bfSMichael Neumann 
419*57e252bfSMichael Neumann 	return 0;
420*57e252bfSMichael Neumann }
421*57e252bfSMichael Neumann 
422*57e252bfSMichael Neumann u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk)
423*57e252bfSMichael Neumann {
424*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
425*57e252bfSMichael Neumann 	u8 result = 0;
426*57e252bfSMichael Neumann 	bool strobe_mode = false;
427*57e252bfSMichael Neumann 
428*57e252bfSMichael Neumann 	if (pi->mem_gddr5) {
429*57e252bfSMichael Neumann 		if (mclk <= pi->mclk_strobe_mode_threshold)
430*57e252bfSMichael Neumann 			strobe_mode = true;
431*57e252bfSMichael Neumann 		result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode);
432*57e252bfSMichael Neumann 
433*57e252bfSMichael Neumann 		if (strobe_mode)
434*57e252bfSMichael Neumann 			result |= SMC_STROBE_ENABLE;
435*57e252bfSMichael Neumann 	}
436*57e252bfSMichael Neumann 
437*57e252bfSMichael Neumann 	return result;
438*57e252bfSMichael Neumann }
439*57e252bfSMichael Neumann 
440*57e252bfSMichael Neumann u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
441*57e252bfSMichael Neumann {
442*57e252bfSMichael Neumann 	u32 ref_clk = rdev->clock.mpll.reference_freq;
443*57e252bfSMichael Neumann 	u32 vco = clkf * ref_clk;
444*57e252bfSMichael Neumann 
445*57e252bfSMichael Neumann 	/* 100 Mhz ref clk */
446*57e252bfSMichael Neumann 	if (ref_clk == 10000) {
447*57e252bfSMichael Neumann 		if (vco > 500000)
448*57e252bfSMichael Neumann 			return 0xC6;
449*57e252bfSMichael Neumann 		if (vco > 400000)
450*57e252bfSMichael Neumann 			return 0x9D;
451*57e252bfSMichael Neumann 		if (vco > 330000)
452*57e252bfSMichael Neumann 			return 0x6C;
453*57e252bfSMichael Neumann 		if (vco > 250000)
454*57e252bfSMichael Neumann 			return 0x2B;
455*57e252bfSMichael Neumann 		if (vco >  160000)
456*57e252bfSMichael Neumann 			return 0x5B;
457*57e252bfSMichael Neumann 		if (vco > 120000)
458*57e252bfSMichael Neumann 			return 0x0A;
459*57e252bfSMichael Neumann 		return 0x4B;
460*57e252bfSMichael Neumann 	}
461*57e252bfSMichael Neumann 
462*57e252bfSMichael Neumann 	/* 27 Mhz ref clk */
463*57e252bfSMichael Neumann 	if (vco > 250000)
464*57e252bfSMichael Neumann 		return 0x8B;
465*57e252bfSMichael Neumann 	if (vco > 200000)
466*57e252bfSMichael Neumann 		return 0xCC;
467*57e252bfSMichael Neumann 	if (vco > 150000)
468*57e252bfSMichael Neumann 		return 0x9B;
469*57e252bfSMichael Neumann 	return 0x6B;
470*57e252bfSMichael Neumann }
471*57e252bfSMichael Neumann 
472*57e252bfSMichael Neumann static int cypress_populate_mclk_value(struct radeon_device *rdev,
473*57e252bfSMichael Neumann 				       u32 engine_clock, u32 memory_clock,
474*57e252bfSMichael Neumann 				       RV7XX_SMC_MCLK_VALUE *mclk,
475*57e252bfSMichael Neumann 				       bool strobe_mode, bool dll_state_on)
476*57e252bfSMichael Neumann {
477*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
478*57e252bfSMichael Neumann 
479*57e252bfSMichael Neumann 	u32 mpll_ad_func_cntl =
480*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_ad_func_cntl;
481*57e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 =
482*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
483*57e252bfSMichael Neumann 	u32 mpll_dq_func_cntl =
484*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_dq_func_cntl;
485*57e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 =
486*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
487*57e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl =
488*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
489*57e252bfSMichael Neumann 	u32 dll_cntl =
490*57e252bfSMichael Neumann 		pi->clk_regs.rv770.dll_cntl;
491*57e252bfSMichael Neumann 	u32 mpll_ss1 = pi->clk_regs.rv770.mpll_ss1;
492*57e252bfSMichael Neumann 	u32 mpll_ss2 = pi->clk_regs.rv770.mpll_ss2;
493*57e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
494*57e252bfSMichael Neumann 	u32 ibias;
495*57e252bfSMichael Neumann 	u32 dll_speed;
496*57e252bfSMichael Neumann 	int ret;
497*57e252bfSMichael Neumann 	u32 mc_seq_misc7;
498*57e252bfSMichael Neumann 
499*57e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
500*57e252bfSMichael Neumann 					     memory_clock, strobe_mode, &dividers);
501*57e252bfSMichael Neumann 	if (ret)
502*57e252bfSMichael Neumann 		return ret;
503*57e252bfSMichael Neumann 
504*57e252bfSMichael Neumann 	if (!strobe_mode) {
505*57e252bfSMichael Neumann 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
506*57e252bfSMichael Neumann 
507*57e252bfSMichael Neumann 		if(mc_seq_misc7 & 0x8000000)
508*57e252bfSMichael Neumann 			dividers.post_div = 1;
509*57e252bfSMichael Neumann 	}
510*57e252bfSMichael Neumann 
511*57e252bfSMichael Neumann 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
512*57e252bfSMichael Neumann 
513*57e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~(CLKR_MASK |
514*57e252bfSMichael Neumann 			       YCLK_POST_DIV_MASK |
515*57e252bfSMichael Neumann 			       CLKF_MASK |
516*57e252bfSMichael Neumann 			       CLKFRAC_MASK |
517*57e252bfSMichael Neumann 			       IBIAS_MASK);
518*57e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
519*57e252bfSMichael Neumann 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
520*57e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
521*57e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
522*57e252bfSMichael Neumann 	mpll_ad_func_cntl |= IBIAS(ibias);
523*57e252bfSMichael Neumann 
524*57e252bfSMichael Neumann 	if (dividers.vco_mode)
525*57e252bfSMichael Neumann 		mpll_ad_func_cntl_2 |= VCO_MODE;
526*57e252bfSMichael Neumann 	else
527*57e252bfSMichael Neumann 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
528*57e252bfSMichael Neumann 
529*57e252bfSMichael Neumann 	if (pi->mem_gddr5) {
530*57e252bfSMichael Neumann 		mpll_dq_func_cntl &= ~(CLKR_MASK |
531*57e252bfSMichael Neumann 				       YCLK_POST_DIV_MASK |
532*57e252bfSMichael Neumann 				       CLKF_MASK |
533*57e252bfSMichael Neumann 				       CLKFRAC_MASK |
534*57e252bfSMichael Neumann 				       IBIAS_MASK);
535*57e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
536*57e252bfSMichael Neumann 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
537*57e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
538*57e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
539*57e252bfSMichael Neumann 		mpll_dq_func_cntl |= IBIAS(ibias);
540*57e252bfSMichael Neumann 
541*57e252bfSMichael Neumann 		if (strobe_mode)
542*57e252bfSMichael Neumann 			mpll_dq_func_cntl &= ~PDNB;
543*57e252bfSMichael Neumann 		else
544*57e252bfSMichael Neumann 			mpll_dq_func_cntl |= PDNB;
545*57e252bfSMichael Neumann 
546*57e252bfSMichael Neumann 		if (dividers.vco_mode)
547*57e252bfSMichael Neumann 			mpll_dq_func_cntl_2 |= VCO_MODE;
548*57e252bfSMichael Neumann 		else
549*57e252bfSMichael Neumann 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
550*57e252bfSMichael Neumann 	}
551*57e252bfSMichael Neumann 
552*57e252bfSMichael Neumann 	if (pi->mclk_ss) {
553*57e252bfSMichael Neumann 		struct radeon_atom_ss ss;
554*57e252bfSMichael Neumann 		u32 vco_freq = memory_clock * dividers.post_div;
555*57e252bfSMichael Neumann 
556*57e252bfSMichael Neumann 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
557*57e252bfSMichael Neumann 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
558*57e252bfSMichael Neumann 			u32 reference_clock = rdev->clock.mpll.reference_freq;
559*57e252bfSMichael Neumann 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
560*57e252bfSMichael Neumann 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
561*57e252bfSMichael Neumann 			u32 clk_v = ss.percentage *
562*57e252bfSMichael Neumann 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
563*57e252bfSMichael Neumann 
564*57e252bfSMichael Neumann 			mpll_ss1 &= ~CLKV_MASK;
565*57e252bfSMichael Neumann 			mpll_ss1 |= CLKV(clk_v);
566*57e252bfSMichael Neumann 
567*57e252bfSMichael Neumann 			mpll_ss2 &= ~CLKS_MASK;
568*57e252bfSMichael Neumann 			mpll_ss2 |= CLKS(clk_s);
569*57e252bfSMichael Neumann 		}
570*57e252bfSMichael Neumann 	}
571*57e252bfSMichael Neumann 
572*57e252bfSMichael Neumann 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
573*57e252bfSMichael Neumann 					memory_clock);
574*57e252bfSMichael Neumann 
575*57e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
576*57e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
577*57e252bfSMichael Neumann 	if (dll_state_on)
578*57e252bfSMichael Neumann 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
579*57e252bfSMichael Neumann 				     MRDCKA1_PDNB |
580*57e252bfSMichael Neumann 				     MRDCKB0_PDNB |
581*57e252bfSMichael Neumann 				     MRDCKB1_PDNB |
582*57e252bfSMichael Neumann 				     MRDCKC0_PDNB |
583*57e252bfSMichael Neumann 				     MRDCKC1_PDNB |
584*57e252bfSMichael Neumann 				     MRDCKD0_PDNB |
585*57e252bfSMichael Neumann 				     MRDCKD1_PDNB);
586*57e252bfSMichael Neumann 	else
587*57e252bfSMichael Neumann 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
588*57e252bfSMichael Neumann 				      MRDCKA1_PDNB |
589*57e252bfSMichael Neumann 				      MRDCKB0_PDNB |
590*57e252bfSMichael Neumann 				      MRDCKB1_PDNB |
591*57e252bfSMichael Neumann 				      MRDCKC0_PDNB |
592*57e252bfSMichael Neumann 				      MRDCKC1_PDNB |
593*57e252bfSMichael Neumann 				      MRDCKD0_PDNB |
594*57e252bfSMichael Neumann 				      MRDCKD1_PDNB);
595*57e252bfSMichael Neumann 
596*57e252bfSMichael Neumann 	mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
597*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
598*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
599*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
600*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
601*57e252bfSMichael Neumann 	mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
602*57e252bfSMichael Neumann 	mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
603*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_SS = cpu_to_be32(mpll_ss1);
604*57e252bfSMichael Neumann 	mclk->mclk770.vMPLL_SS2 = cpu_to_be32(mpll_ss2);
605*57e252bfSMichael Neumann 
606*57e252bfSMichael Neumann 	return 0;
607*57e252bfSMichael Neumann }
608*57e252bfSMichael Neumann 
609*57e252bfSMichael Neumann u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev,
610*57e252bfSMichael Neumann 				    u32 memory_clock, bool strobe_mode)
611*57e252bfSMichael Neumann {
612*57e252bfSMichael Neumann 	u8 mc_para_index;
613*57e252bfSMichael Neumann 
614*57e252bfSMichael Neumann 	if (rdev->family >= CHIP_BARTS) {
615*57e252bfSMichael Neumann 		if (strobe_mode) {
616*57e252bfSMichael Neumann 			if (memory_clock < 10000)
617*57e252bfSMichael Neumann 				mc_para_index = 0x00;
618*57e252bfSMichael Neumann 			else if (memory_clock > 47500)
619*57e252bfSMichael Neumann 				mc_para_index = 0x0f;
620*57e252bfSMichael Neumann 			else
621*57e252bfSMichael Neumann 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
622*57e252bfSMichael Neumann 		} else {
623*57e252bfSMichael Neumann 			if (memory_clock < 65000)
624*57e252bfSMichael Neumann 				mc_para_index = 0x00;
625*57e252bfSMichael Neumann 			else if (memory_clock > 135000)
626*57e252bfSMichael Neumann 				mc_para_index = 0x0f;
627*57e252bfSMichael Neumann 			else
628*57e252bfSMichael Neumann 				mc_para_index = (u8)((memory_clock - 60000) / 5000);
629*57e252bfSMichael Neumann 		}
630*57e252bfSMichael Neumann 	} else {
631*57e252bfSMichael Neumann 		if (strobe_mode) {
632*57e252bfSMichael Neumann 			if (memory_clock < 10000)
633*57e252bfSMichael Neumann 				mc_para_index = 0x00;
634*57e252bfSMichael Neumann 			else if (memory_clock > 47500)
635*57e252bfSMichael Neumann 				mc_para_index = 0x0f;
636*57e252bfSMichael Neumann 			else
637*57e252bfSMichael Neumann 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
638*57e252bfSMichael Neumann 		} else {
639*57e252bfSMichael Neumann 			if (memory_clock < 40000)
640*57e252bfSMichael Neumann 				mc_para_index = 0x00;
641*57e252bfSMichael Neumann 			else if (memory_clock > 115000)
642*57e252bfSMichael Neumann 				mc_para_index = 0x0f;
643*57e252bfSMichael Neumann 			else
644*57e252bfSMichael Neumann 				mc_para_index = (u8)((memory_clock - 40000) / 5000);
645*57e252bfSMichael Neumann 		}
646*57e252bfSMichael Neumann 	}
647*57e252bfSMichael Neumann 	return mc_para_index;
648*57e252bfSMichael Neumann }
649*57e252bfSMichael Neumann 
650*57e252bfSMichael Neumann static int cypress_populate_mvdd_value(struct radeon_device *rdev,
651*57e252bfSMichael Neumann 				       u32 mclk,
652*57e252bfSMichael Neumann 				       RV770_SMC_VOLTAGE_VALUE *voltage)
653*57e252bfSMichael Neumann {
654*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
655*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
656*57e252bfSMichael Neumann 
657*57e252bfSMichael Neumann 	if (!pi->mvdd_control) {
658*57e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
659*57e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
660*57e252bfSMichael Neumann 		return 0;
661*57e252bfSMichael Neumann 	}
662*57e252bfSMichael Neumann 
663*57e252bfSMichael Neumann 	if (mclk <= pi->mvdd_split_frequency) {
664*57e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_low_index;
665*57e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
666*57e252bfSMichael Neumann 	} else {
667*57e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
668*57e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
669*57e252bfSMichael Neumann 	}
670*57e252bfSMichael Neumann 
671*57e252bfSMichael Neumann 	return 0;
672*57e252bfSMichael Neumann }
673*57e252bfSMichael Neumann 
674*57e252bfSMichael Neumann int cypress_convert_power_level_to_smc(struct radeon_device *rdev,
675*57e252bfSMichael Neumann 				       struct rv7xx_pl *pl,
676*57e252bfSMichael Neumann 				       RV770_SMC_HW_PERFORMANCE_LEVEL *level,
677*57e252bfSMichael Neumann 				       u8 watermark_level)
678*57e252bfSMichael Neumann {
679*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
680*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
681*57e252bfSMichael Neumann 	int ret;
682*57e252bfSMichael Neumann 	bool dll_state_on;
683*57e252bfSMichael Neumann 
684*57e252bfSMichael Neumann 	level->gen2PCIE = pi->pcie_gen2 ?
685*57e252bfSMichael Neumann 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
686*57e252bfSMichael Neumann 	level->gen2XSP  = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
687*57e252bfSMichael Neumann 	level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
688*57e252bfSMichael Neumann 	level->displayWatermark = watermark_level;
689*57e252bfSMichael Neumann 
690*57e252bfSMichael Neumann 	ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk);
691*57e252bfSMichael Neumann 	if (ret)
692*57e252bfSMichael Neumann 		return ret;
693*57e252bfSMichael Neumann 
694*57e252bfSMichael Neumann 	level->mcFlags =  0;
695*57e252bfSMichael Neumann 	if (pi->mclk_stutter_mode_threshold &&
696*57e252bfSMichael Neumann 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
697*57e252bfSMichael Neumann 	    !eg_pi->uvd_enabled) {
698*57e252bfSMichael Neumann 		level->mcFlags |= SMC_MC_STUTTER_EN;
699*57e252bfSMichael Neumann 		if (eg_pi->sclk_deep_sleep)
700*57e252bfSMichael Neumann 			level->stateFlags |= PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
701*57e252bfSMichael Neumann 		else
702*57e252bfSMichael Neumann 			level->stateFlags &= ~PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
703*57e252bfSMichael Neumann 	}
704*57e252bfSMichael Neumann 
705*57e252bfSMichael Neumann 	if (pi->mem_gddr5) {
706*57e252bfSMichael Neumann 		if (pl->mclk > pi->mclk_edc_enable_threshold)
707*57e252bfSMichael Neumann 			level->mcFlags |= SMC_MC_EDC_RD_FLAG;
708*57e252bfSMichael Neumann 
709*57e252bfSMichael Neumann 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
710*57e252bfSMichael Neumann 			level->mcFlags |= SMC_MC_EDC_WR_FLAG;
711*57e252bfSMichael Neumann 
712*57e252bfSMichael Neumann 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
713*57e252bfSMichael Neumann 
714*57e252bfSMichael Neumann 		if (level->strobeMode & SMC_STROBE_ENABLE) {
715*57e252bfSMichael Neumann 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
716*57e252bfSMichael Neumann 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
717*57e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
718*57e252bfSMichael Neumann 			else
719*57e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
720*57e252bfSMichael Neumann 		} else
721*57e252bfSMichael Neumann 			dll_state_on = eg_pi->dll_default_on;
722*57e252bfSMichael Neumann 
723*57e252bfSMichael Neumann 		ret = cypress_populate_mclk_value(rdev,
724*57e252bfSMichael Neumann 						  pl->sclk,
725*57e252bfSMichael Neumann 						  pl->mclk,
726*57e252bfSMichael Neumann 						  &level->mclk,
727*57e252bfSMichael Neumann 						  (level->strobeMode & SMC_STROBE_ENABLE) != 0,
728*57e252bfSMichael Neumann 						  dll_state_on);
729*57e252bfSMichael Neumann 	} else {
730*57e252bfSMichael Neumann 		ret = cypress_populate_mclk_value(rdev,
731*57e252bfSMichael Neumann 						  pl->sclk,
732*57e252bfSMichael Neumann 						  pl->mclk,
733*57e252bfSMichael Neumann 						  &level->mclk,
734*57e252bfSMichael Neumann 						  true,
735*57e252bfSMichael Neumann 						  true);
736*57e252bfSMichael Neumann 	}
737*57e252bfSMichael Neumann 	if (ret)
738*57e252bfSMichael Neumann 		return ret;
739*57e252bfSMichael Neumann 
740*57e252bfSMichael Neumann 	ret = cypress_populate_voltage_value(rdev,
741*57e252bfSMichael Neumann 					     &eg_pi->vddc_voltage_table,
742*57e252bfSMichael Neumann 					     pl->vddc,
743*57e252bfSMichael Neumann 					     &level->vddc);
744*57e252bfSMichael Neumann 	if (ret)
745*57e252bfSMichael Neumann 		return ret;
746*57e252bfSMichael Neumann 
747*57e252bfSMichael Neumann 	if (eg_pi->vddci_control) {
748*57e252bfSMichael Neumann 		ret = cypress_populate_voltage_value(rdev,
749*57e252bfSMichael Neumann 						     &eg_pi->vddci_voltage_table,
750*57e252bfSMichael Neumann 						     pl->vddci,
751*57e252bfSMichael Neumann 						     &level->vddci);
752*57e252bfSMichael Neumann 		if (ret)
753*57e252bfSMichael Neumann 			return ret;
754*57e252bfSMichael Neumann 	}
755*57e252bfSMichael Neumann 
756*57e252bfSMichael Neumann 	ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
757*57e252bfSMichael Neumann 
758*57e252bfSMichael Neumann 	return ret;
759*57e252bfSMichael Neumann }
760*57e252bfSMichael Neumann 
761*57e252bfSMichael Neumann static int cypress_convert_power_state_to_smc(struct radeon_device *rdev,
762*57e252bfSMichael Neumann 					      struct radeon_ps *radeon_state,
763*57e252bfSMichael Neumann 					      RV770_SMC_SWSTATE *smc_state)
764*57e252bfSMichael Neumann {
765*57e252bfSMichael Neumann 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
766*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
767*57e252bfSMichael Neumann 	int ret;
768*57e252bfSMichael Neumann 
769*57e252bfSMichael Neumann 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
770*57e252bfSMichael Neumann 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
771*57e252bfSMichael Neumann 
772*57e252bfSMichael Neumann 	ret = cypress_convert_power_level_to_smc(rdev,
773*57e252bfSMichael Neumann 						 &state->low,
774*57e252bfSMichael Neumann 						 &smc_state->levels[0],
775*57e252bfSMichael Neumann 						 PPSMC_DISPLAY_WATERMARK_LOW);
776*57e252bfSMichael Neumann 	if (ret)
777*57e252bfSMichael Neumann 		return ret;
778*57e252bfSMichael Neumann 
779*57e252bfSMichael Neumann 	ret = cypress_convert_power_level_to_smc(rdev,
780*57e252bfSMichael Neumann 						 &state->medium,
781*57e252bfSMichael Neumann 						 &smc_state->levels[1],
782*57e252bfSMichael Neumann 						 PPSMC_DISPLAY_WATERMARK_LOW);
783*57e252bfSMichael Neumann 	if (ret)
784*57e252bfSMichael Neumann 		return ret;
785*57e252bfSMichael Neumann 
786*57e252bfSMichael Neumann 	ret = cypress_convert_power_level_to_smc(rdev,
787*57e252bfSMichael Neumann 						 &state->high,
788*57e252bfSMichael Neumann 						 &smc_state->levels[2],
789*57e252bfSMichael Neumann 						 PPSMC_DISPLAY_WATERMARK_HIGH);
790*57e252bfSMichael Neumann 	if (ret)
791*57e252bfSMichael Neumann 		return ret;
792*57e252bfSMichael Neumann 
793*57e252bfSMichael Neumann 	smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
794*57e252bfSMichael Neumann 	smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
795*57e252bfSMichael Neumann 	smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
796*57e252bfSMichael Neumann 
797*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
798*57e252bfSMichael Neumann 		smc_state->levels[0].ACIndex = 2;
799*57e252bfSMichael Neumann 		smc_state->levels[1].ACIndex = 3;
800*57e252bfSMichael Neumann 		smc_state->levels[2].ACIndex = 4;
801*57e252bfSMichael Neumann 	} else {
802*57e252bfSMichael Neumann 		smc_state->levels[0].ACIndex = 0;
803*57e252bfSMichael Neumann 		smc_state->levels[1].ACIndex = 0;
804*57e252bfSMichael Neumann 		smc_state->levels[2].ACIndex = 0;
805*57e252bfSMichael Neumann 	}
806*57e252bfSMichael Neumann 
807*57e252bfSMichael Neumann 	rv770_populate_smc_sp(rdev, radeon_state, smc_state);
808*57e252bfSMichael Neumann 
809*57e252bfSMichael Neumann 	return rv770_populate_smc_t(rdev, radeon_state, smc_state);
810*57e252bfSMichael Neumann }
811*57e252bfSMichael Neumann 
812*57e252bfSMichael Neumann static void cypress_convert_mc_registers(struct evergreen_mc_reg_entry *entry,
813*57e252bfSMichael Neumann 					 SMC_Evergreen_MCRegisterSet *data,
814*57e252bfSMichael Neumann 					 u32 num_entries, u32 valid_flag)
815*57e252bfSMichael Neumann {
816*57e252bfSMichael Neumann 	u32 i, j;
817*57e252bfSMichael Neumann 
818*57e252bfSMichael Neumann 	for (i = 0, j = 0; j < num_entries; j++) {
819*57e252bfSMichael Neumann 		if (valid_flag & (1 << j)) {
820*57e252bfSMichael Neumann 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
821*57e252bfSMichael Neumann 			i++;
822*57e252bfSMichael Neumann 		}
823*57e252bfSMichael Neumann 	}
824*57e252bfSMichael Neumann }
825*57e252bfSMichael Neumann 
826*57e252bfSMichael Neumann static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
827*57e252bfSMichael Neumann 						      struct rv7xx_pl *pl,
828*57e252bfSMichael Neumann 						      SMC_Evergreen_MCRegisterSet *mc_reg_table_data)
829*57e252bfSMichael Neumann {
830*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
831*57e252bfSMichael Neumann 	u32 i = 0;
832*57e252bfSMichael Neumann 
833*57e252bfSMichael Neumann 	for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) {
834*57e252bfSMichael Neumann 		if (pl->mclk <=
835*57e252bfSMichael Neumann 		    eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
836*57e252bfSMichael Neumann 			break;
837*57e252bfSMichael Neumann 	}
838*57e252bfSMichael Neumann 
839*57e252bfSMichael Neumann 	if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0))
840*57e252bfSMichael Neumann 		--i;
841*57e252bfSMichael Neumann 
842*57e252bfSMichael Neumann 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[i],
843*57e252bfSMichael Neumann 				     mc_reg_table_data,
844*57e252bfSMichael Neumann 				     eg_pi->mc_reg_table.last,
845*57e252bfSMichael Neumann 				     eg_pi->mc_reg_table.valid_flag);
846*57e252bfSMichael Neumann }
847*57e252bfSMichael Neumann 
848*57e252bfSMichael Neumann static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
849*57e252bfSMichael Neumann 						struct radeon_ps *radeon_state,
850*57e252bfSMichael Neumann 						SMC_Evergreen_MCRegisters *mc_reg_table)
851*57e252bfSMichael Neumann {
852*57e252bfSMichael Neumann 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
853*57e252bfSMichael Neumann 
854*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
855*57e252bfSMichael Neumann 						  &state->low,
856*57e252bfSMichael Neumann 						  &mc_reg_table->data[2]);
857*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
858*57e252bfSMichael Neumann 						  &state->medium,
859*57e252bfSMichael Neumann 						  &mc_reg_table->data[3]);
860*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
861*57e252bfSMichael Neumann 						  &state->high,
862*57e252bfSMichael Neumann 						  &mc_reg_table->data[4]);
863*57e252bfSMichael Neumann }
864*57e252bfSMichael Neumann 
865*57e252bfSMichael Neumann int cypress_upload_sw_state(struct radeon_device *rdev,
866*57e252bfSMichael Neumann 			    struct radeon_ps *radeon_new_state)
867*57e252bfSMichael Neumann {
868*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
869*57e252bfSMichael Neumann 	u16 address = pi->state_table_start +
870*57e252bfSMichael Neumann 		offsetof(RV770_SMC_STATETABLE, driverState);
871*57e252bfSMichael Neumann 	RV770_SMC_SWSTATE state = { 0 };
872*57e252bfSMichael Neumann 	int ret;
873*57e252bfSMichael Neumann 
874*57e252bfSMichael Neumann 	ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state);
875*57e252bfSMichael Neumann 	if (ret)
876*57e252bfSMichael Neumann 		return ret;
877*57e252bfSMichael Neumann 
878*57e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state,
879*57e252bfSMichael Neumann 				    sizeof(RV770_SMC_SWSTATE),
880*57e252bfSMichael Neumann 				    pi->sram_end);
881*57e252bfSMichael Neumann }
882*57e252bfSMichael Neumann 
883*57e252bfSMichael Neumann int cypress_upload_mc_reg_table(struct radeon_device *rdev,
884*57e252bfSMichael Neumann 				struct radeon_ps *radeon_new_state)
885*57e252bfSMichael Neumann {
886*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
887*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
888*57e252bfSMichael Neumann 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
889*57e252bfSMichael Neumann 	u16 address;
890*57e252bfSMichael Neumann 
891*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table);
892*57e252bfSMichael Neumann 
893*57e252bfSMichael Neumann 	address = eg_pi->mc_reg_table_start +
894*57e252bfSMichael Neumann 		(u16)offsetof(SMC_Evergreen_MCRegisters, data[2]);
895*57e252bfSMichael Neumann 
896*57e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, address,
897*57e252bfSMichael Neumann 				       (u8 *)&mc_reg_table.data[2],
898*57e252bfSMichael Neumann 				       sizeof(SMC_Evergreen_MCRegisterSet) * 3,
899*57e252bfSMichael Neumann 				       pi->sram_end);
900*57e252bfSMichael Neumann }
901*57e252bfSMichael Neumann 
902*57e252bfSMichael Neumann u32 cypress_calculate_burst_time(struct radeon_device *rdev,
903*57e252bfSMichael Neumann 				 u32 engine_clock, u32 memory_clock)
904*57e252bfSMichael Neumann {
905*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
906*57e252bfSMichael Neumann 	u32 multiplier = pi->mem_gddr5 ? 1 : 2;
907*57e252bfSMichael Neumann 	u32 result = (4 * multiplier * engine_clock) / (memory_clock / 2);
908*57e252bfSMichael Neumann 	u32 burst_time;
909*57e252bfSMichael Neumann 
910*57e252bfSMichael Neumann 	if (result <= 4)
911*57e252bfSMichael Neumann 		burst_time = 0;
912*57e252bfSMichael Neumann 	else if (result < 8)
913*57e252bfSMichael Neumann 		burst_time = result - 4;
914*57e252bfSMichael Neumann 	else {
915*57e252bfSMichael Neumann 		burst_time = result / 2 ;
916*57e252bfSMichael Neumann 		if (burst_time > 18)
917*57e252bfSMichael Neumann 			burst_time = 18;
918*57e252bfSMichael Neumann 	}
919*57e252bfSMichael Neumann 
920*57e252bfSMichael Neumann 	return burst_time;
921*57e252bfSMichael Neumann }
922*57e252bfSMichael Neumann 
923*57e252bfSMichael Neumann void cypress_program_memory_timing_parameters(struct radeon_device *rdev,
924*57e252bfSMichael Neumann 					      struct radeon_ps *radeon_new_state)
925*57e252bfSMichael Neumann {
926*57e252bfSMichael Neumann 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
927*57e252bfSMichael Neumann 	u32 mc_arb_burst_time = RREG32(MC_ARB_BURST_TIME);
928*57e252bfSMichael Neumann 
929*57e252bfSMichael Neumann 	mc_arb_burst_time &= ~(STATE1_MASK | STATE2_MASK | STATE3_MASK);
930*57e252bfSMichael Neumann 
931*57e252bfSMichael Neumann 	mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev,
932*57e252bfSMichael Neumann 								 new_state->low.sclk,
933*57e252bfSMichael Neumann 								 new_state->low.mclk));
934*57e252bfSMichael Neumann 	mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev,
935*57e252bfSMichael Neumann 								 new_state->medium.sclk,
936*57e252bfSMichael Neumann 								 new_state->medium.mclk));
937*57e252bfSMichael Neumann 	mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev,
938*57e252bfSMichael Neumann 								 new_state->high.sclk,
939*57e252bfSMichael Neumann 								 new_state->high.mclk));
940*57e252bfSMichael Neumann 
941*57e252bfSMichael Neumann 	rv730_program_memory_timing_parameters(rdev, radeon_new_state);
942*57e252bfSMichael Neumann 
943*57e252bfSMichael Neumann 	WREG32(MC_ARB_BURST_TIME, mc_arb_burst_time);
944*57e252bfSMichael Neumann }
945*57e252bfSMichael Neumann 
946*57e252bfSMichael Neumann static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev,
947*57e252bfSMichael Neumann 					      SMC_Evergreen_MCRegisters *mc_reg_table)
948*57e252bfSMichael Neumann {
949*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
950*57e252bfSMichael Neumann 	u32 i, j;
951*57e252bfSMichael Neumann 
952*57e252bfSMichael Neumann 	for (i = 0, j = 0; j < eg_pi->mc_reg_table.last; j++) {
953*57e252bfSMichael Neumann 		if (eg_pi->mc_reg_table.valid_flag & (1 << j)) {
954*57e252bfSMichael Neumann 			mc_reg_table->address[i].s0 =
955*57e252bfSMichael Neumann 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s0);
956*57e252bfSMichael Neumann 			mc_reg_table->address[i].s1 =
957*57e252bfSMichael Neumann 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s1);
958*57e252bfSMichael Neumann 			i++;
959*57e252bfSMichael Neumann 		}
960*57e252bfSMichael Neumann 	}
961*57e252bfSMichael Neumann 
962*57e252bfSMichael Neumann 	mc_reg_table->last = (u8)i;
963*57e252bfSMichael Neumann }
964*57e252bfSMichael Neumann 
965*57e252bfSMichael Neumann static void cypress_set_mc_reg_address_table(struct radeon_device *rdev)
966*57e252bfSMichael Neumann {
967*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
968*57e252bfSMichael Neumann 	u32 i = 0;
969*57e252bfSMichael Neumann 
970*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RAS_TIMING_LP >> 2;
971*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RAS_TIMING >> 2;
972*57e252bfSMichael Neumann 	i++;
973*57e252bfSMichael Neumann 
974*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_CAS_TIMING_LP >> 2;
975*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_CAS_TIMING >> 2;
976*57e252bfSMichael Neumann 	i++;
977*57e252bfSMichael Neumann 
978*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING_LP >> 2;
979*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING >> 2;
980*57e252bfSMichael Neumann 	i++;
981*57e252bfSMichael Neumann 
982*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING2_LP >> 2;
983*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING2 >> 2;
984*57e252bfSMichael Neumann 	i++;
985*57e252bfSMichael Neumann 
986*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D0_LP >> 2;
987*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D0 >> 2;
988*57e252bfSMichael Neumann 	i++;
989*57e252bfSMichael Neumann 
990*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D1_LP >> 2;
991*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D1 >> 2;
992*57e252bfSMichael Neumann 	i++;
993*57e252bfSMichael Neumann 
994*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D0_LP >> 2;
995*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D0 >> 2;
996*57e252bfSMichael Neumann 	i++;
997*57e252bfSMichael Neumann 
998*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D1_LP >> 2;
999*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D1 >> 2;
1000*57e252bfSMichael Neumann 	i++;
1001*57e252bfSMichael Neumann 
1002*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
1003*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_EMRS >> 2;
1004*57e252bfSMichael Neumann 	i++;
1005*57e252bfSMichael Neumann 
1006*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
1007*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS >> 2;
1008*57e252bfSMichael Neumann 	i++;
1009*57e252bfSMichael Neumann 
1010*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
1011*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS1 >> 2;
1012*57e252bfSMichael Neumann 	i++;
1013*57e252bfSMichael Neumann 
1014*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC1 >> 2;
1015*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC1 >> 2;
1016*57e252bfSMichael Neumann 	i++;
1017*57e252bfSMichael Neumann 
1018*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RESERVE_M >> 2;
1019*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RESERVE_M >> 2;
1020*57e252bfSMichael Neumann 	i++;
1021*57e252bfSMichael Neumann 
1022*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC3 >> 2;
1023*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC3 >> 2;
1024*57e252bfSMichael Neumann 	i++;
1025*57e252bfSMichael Neumann 
1026*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.last = (u8)i;
1027*57e252bfSMichael Neumann }
1028*57e252bfSMichael Neumann 
1029*57e252bfSMichael Neumann static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev,
1030*57e252bfSMichael Neumann 						     struct evergreen_mc_reg_entry *entry)
1031*57e252bfSMichael Neumann {
1032*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1033*57e252bfSMichael Neumann 	u32 i;
1034*57e252bfSMichael Neumann 
1035*57e252bfSMichael Neumann 	for (i = 0; i < eg_pi->mc_reg_table.last; i++)
1036*57e252bfSMichael Neumann 		entry->mc_data[i] =
1037*57e252bfSMichael Neumann 			RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1038*57e252bfSMichael Neumann 
1039*57e252bfSMichael Neumann }
1040*57e252bfSMichael Neumann 
1041*57e252bfSMichael Neumann static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev,
1042*57e252bfSMichael Neumann 						      struct atom_memory_clock_range_table *range_table)
1043*57e252bfSMichael Neumann {
1044*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1045*57e252bfSMichael Neumann 	u32 i, j;
1046*57e252bfSMichael Neumann 
1047*57e252bfSMichael Neumann 	for (i = 0; i < range_table->num_entries; i++) {
1048*57e252bfSMichael Neumann 		eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max =
1049*57e252bfSMichael Neumann 			range_table->mclk[i];
1050*57e252bfSMichael Neumann 		radeon_atom_set_ac_timing(rdev, range_table->mclk[i]);
1051*57e252bfSMichael Neumann 		cypress_retrieve_ac_timing_for_one_entry(rdev,
1052*57e252bfSMichael Neumann 							 &eg_pi->mc_reg_table.mc_reg_table_entry[i]);
1053*57e252bfSMichael Neumann 	}
1054*57e252bfSMichael Neumann 
1055*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.num_entries = range_table->num_entries;
1056*57e252bfSMichael Neumann 	eg_pi->mc_reg_table.valid_flag = 0;
1057*57e252bfSMichael Neumann 
1058*57e252bfSMichael Neumann 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1059*57e252bfSMichael Neumann 		for (j = 1; j < range_table->num_entries; j++) {
1060*57e252bfSMichael Neumann 			if (eg_pi->mc_reg_table.mc_reg_table_entry[j-1].mc_data[i] !=
1061*57e252bfSMichael Neumann 			    eg_pi->mc_reg_table.mc_reg_table_entry[j].mc_data[i]) {
1062*57e252bfSMichael Neumann 				eg_pi->mc_reg_table.valid_flag |= (1 << i);
1063*57e252bfSMichael Neumann 				break;
1064*57e252bfSMichael Neumann 			}
1065*57e252bfSMichael Neumann 		}
1066*57e252bfSMichael Neumann 	}
1067*57e252bfSMichael Neumann }
1068*57e252bfSMichael Neumann 
1069*57e252bfSMichael Neumann static int cypress_initialize_mc_reg_table(struct radeon_device *rdev)
1070*57e252bfSMichael Neumann {
1071*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1072*57e252bfSMichael Neumann 	u8 module_index = rv770_get_memory_module_index(rdev);
1073*57e252bfSMichael Neumann 	struct atom_memory_clock_range_table range_table = { 0 };
1074*57e252bfSMichael Neumann 	int ret;
1075*57e252bfSMichael Neumann 
1076*57e252bfSMichael Neumann 	ret = radeon_atom_get_mclk_range_table(rdev,
1077*57e252bfSMichael Neumann 					       pi->mem_gddr5,
1078*57e252bfSMichael Neumann 					       module_index, &range_table);
1079*57e252bfSMichael Neumann 	if (ret)
1080*57e252bfSMichael Neumann 		return ret;
1081*57e252bfSMichael Neumann 
1082*57e252bfSMichael Neumann 	cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table);
1083*57e252bfSMichael Neumann 
1084*57e252bfSMichael Neumann 	return 0;
1085*57e252bfSMichael Neumann }
1086*57e252bfSMichael Neumann 
1087*57e252bfSMichael Neumann static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value)
1088*57e252bfSMichael Neumann {
1089*57e252bfSMichael Neumann 	u32 i, j;
1090*57e252bfSMichael Neumann 	u32 channels = 2;
1091*57e252bfSMichael Neumann 
1092*57e252bfSMichael Neumann 	if ((rdev->family == CHIP_CYPRESS) ||
1093*57e252bfSMichael Neumann 	    (rdev->family == CHIP_HEMLOCK))
1094*57e252bfSMichael Neumann 		channels = 4;
1095*57e252bfSMichael Neumann 	else if (rdev->family == CHIP_CEDAR)
1096*57e252bfSMichael Neumann 		channels = 1;
1097*57e252bfSMichael Neumann 
1098*57e252bfSMichael Neumann 	for (i = 0; i < channels; i++) {
1099*57e252bfSMichael Neumann 		if ((rdev->family == CHIP_CYPRESS) ||
1100*57e252bfSMichael Neumann 		    (rdev->family == CHIP_HEMLOCK)) {
1101*57e252bfSMichael Neumann 			WREG32_P(MC_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1102*57e252bfSMichael Neumann 			WREG32_P(MC_CG_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1103*57e252bfSMichael Neumann 		} else {
1104*57e252bfSMichael Neumann 			WREG32_P(MC_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1105*57e252bfSMichael Neumann 			WREG32_P(MC_CG_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1106*57e252bfSMichael Neumann 		}
1107*57e252bfSMichael Neumann 		for (j = 0; j < rdev->usec_timeout; j++) {
1108*57e252bfSMichael Neumann 			if (((RREG32(MC_SEQ_CG) & CG_SEQ_RESP_MASK) >> CG_SEQ_RESP_SHIFT) == value)
1109*57e252bfSMichael Neumann 				break;
1110*57e252bfSMichael Neumann 			DRM_UDELAY(1);
1111*57e252bfSMichael Neumann 		}
1112*57e252bfSMichael Neumann 	}
1113*57e252bfSMichael Neumann }
1114*57e252bfSMichael Neumann 
1115*57e252bfSMichael Neumann static void cypress_force_mc_use_s1(struct radeon_device *rdev,
1116*57e252bfSMichael Neumann 				    struct radeon_ps *radeon_boot_state)
1117*57e252bfSMichael Neumann {
1118*57e252bfSMichael Neumann 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1119*57e252bfSMichael Neumann 	u32 strobe_mode;
1120*57e252bfSMichael Neumann 	u32 mc_seq_cg;
1121*57e252bfSMichael Neumann 	int i;
1122*57e252bfSMichael Neumann 
1123*57e252bfSMichael Neumann 	if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1124*57e252bfSMichael Neumann 		return;
1125*57e252bfSMichael Neumann 
1126*57e252bfSMichael Neumann 	radeon_atom_set_ac_timing(rdev, boot_state->low.mclk);
1127*57e252bfSMichael Neumann 	radeon_mc_wait_for_idle(rdev);
1128*57e252bfSMichael Neumann 
1129*57e252bfSMichael Neumann 	if ((rdev->family == CHIP_CYPRESS) ||
1130*57e252bfSMichael Neumann 	    (rdev->family == CHIP_HEMLOCK)) {
1131*57e252bfSMichael Neumann 		WREG32(MC_CONFIG_MCD, 0xf);
1132*57e252bfSMichael Neumann 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1133*57e252bfSMichael Neumann 	} else {
1134*57e252bfSMichael Neumann 		WREG32(MC_CONFIG, 0xf);
1135*57e252bfSMichael Neumann 		WREG32(MC_CG_CONFIG, 0xf);
1136*57e252bfSMichael Neumann 	}
1137*57e252bfSMichael Neumann 
1138*57e252bfSMichael Neumann 	for (i = 0; i < rdev->num_crtc; i++)
1139*57e252bfSMichael Neumann 		radeon_wait_for_vblank(rdev, i);
1140*57e252bfSMichael Neumann 
1141*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1142*57e252bfSMichael Neumann 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1143*57e252bfSMichael Neumann 
1144*57e252bfSMichael Neumann 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1145*57e252bfSMichael Neumann 						       boot_state->low.mclk);
1146*57e252bfSMichael Neumann 
1147*57e252bfSMichael Neumann 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S1);
1148*57e252bfSMichael Neumann 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1149*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, mc_seq_cg);
1150*57e252bfSMichael Neumann 
1151*57e252bfSMichael Neumann 	for (i = 0; i < rdev->usec_timeout; i++) {
1152*57e252bfSMichael Neumann 		if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1153*57e252bfSMichael Neumann 			break;
1154*57e252bfSMichael Neumann 		DRM_UDELAY(1);
1155*57e252bfSMichael Neumann 	}
1156*57e252bfSMichael Neumann 
1157*57e252bfSMichael Neumann 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1158*57e252bfSMichael Neumann 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1159*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, mc_seq_cg);
1160*57e252bfSMichael Neumann 
1161*57e252bfSMichael Neumann 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1162*57e252bfSMichael Neumann }
1163*57e252bfSMichael Neumann 
1164*57e252bfSMichael Neumann static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev)
1165*57e252bfSMichael Neumann {
1166*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1167*57e252bfSMichael Neumann 	u32 value;
1168*57e252bfSMichael Neumann 	u32 i;
1169*57e252bfSMichael Neumann 
1170*57e252bfSMichael Neumann 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1171*57e252bfSMichael Neumann 		value = RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1172*57e252bfSMichael Neumann 		WREG32(eg_pi->mc_reg_table.mc_reg_address[i].s0 << 2, value);
1173*57e252bfSMichael Neumann 	}
1174*57e252bfSMichael Neumann }
1175*57e252bfSMichael Neumann 
1176*57e252bfSMichael Neumann static void cypress_force_mc_use_s0(struct radeon_device *rdev,
1177*57e252bfSMichael Neumann 				    struct radeon_ps *radeon_boot_state)
1178*57e252bfSMichael Neumann {
1179*57e252bfSMichael Neumann 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1180*57e252bfSMichael Neumann 	u32 strobe_mode;
1181*57e252bfSMichael Neumann 	u32 mc_seq_cg;
1182*57e252bfSMichael Neumann 	int i;
1183*57e252bfSMichael Neumann 
1184*57e252bfSMichael Neumann 	cypress_copy_ac_timing_from_s1_to_s0(rdev);
1185*57e252bfSMichael Neumann 	radeon_mc_wait_for_idle(rdev);
1186*57e252bfSMichael Neumann 
1187*57e252bfSMichael Neumann 	if ((rdev->family == CHIP_CYPRESS) ||
1188*57e252bfSMichael Neumann 	    (rdev->family == CHIP_HEMLOCK)) {
1189*57e252bfSMichael Neumann 		WREG32(MC_CONFIG_MCD, 0xf);
1190*57e252bfSMichael Neumann 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1191*57e252bfSMichael Neumann 	} else {
1192*57e252bfSMichael Neumann 		WREG32(MC_CONFIG, 0xf);
1193*57e252bfSMichael Neumann 		WREG32(MC_CG_CONFIG, 0xf);
1194*57e252bfSMichael Neumann 	}
1195*57e252bfSMichael Neumann 
1196*57e252bfSMichael Neumann 	for (i = 0; i < rdev->num_crtc; i++)
1197*57e252bfSMichael Neumann 		radeon_wait_for_vblank(rdev, i);
1198*57e252bfSMichael Neumann 
1199*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1200*57e252bfSMichael Neumann 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1201*57e252bfSMichael Neumann 
1202*57e252bfSMichael Neumann 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1203*57e252bfSMichael Neumann 						       boot_state->low.mclk);
1204*57e252bfSMichael Neumann 
1205*57e252bfSMichael Neumann 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S0);
1206*57e252bfSMichael Neumann 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1207*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, mc_seq_cg);
1208*57e252bfSMichael Neumann 
1209*57e252bfSMichael Neumann 	for (i = 0; i < rdev->usec_timeout; i++) {
1210*57e252bfSMichael Neumann 		if (!(RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE))
1211*57e252bfSMichael Neumann 			break;
1212*57e252bfSMichael Neumann 		DRM_UDELAY(1);
1213*57e252bfSMichael Neumann 	}
1214*57e252bfSMichael Neumann 
1215*57e252bfSMichael Neumann 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1216*57e252bfSMichael Neumann 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1217*57e252bfSMichael Neumann 	WREG32(MC_SEQ_CG, mc_seq_cg);
1218*57e252bfSMichael Neumann 
1219*57e252bfSMichael Neumann 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1220*57e252bfSMichael Neumann }
1221*57e252bfSMichael Neumann 
1222*57e252bfSMichael Neumann static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev,
1223*57e252bfSMichael Neumann 					       RV770_SMC_VOLTAGE_VALUE *voltage)
1224*57e252bfSMichael Neumann {
1225*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1226*57e252bfSMichael Neumann 
1227*57e252bfSMichael Neumann 	voltage->index = eg_pi->mvdd_high_index;
1228*57e252bfSMichael Neumann 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1229*57e252bfSMichael Neumann 
1230*57e252bfSMichael Neumann 	return 0;
1231*57e252bfSMichael Neumann }
1232*57e252bfSMichael Neumann 
1233*57e252bfSMichael Neumann int cypress_populate_smc_initial_state(struct radeon_device *rdev,
1234*57e252bfSMichael Neumann 				       struct radeon_ps *radeon_initial_state,
1235*57e252bfSMichael Neumann 				       RV770_SMC_STATETABLE *table)
1236*57e252bfSMichael Neumann {
1237*57e252bfSMichael Neumann 	struct rv7xx_ps *initial_state = rv770_get_ps(radeon_initial_state);
1238*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1239*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1240*57e252bfSMichael Neumann 	u32 a_t;
1241*57e252bfSMichael Neumann 
1242*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1243*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1244*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1245*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1246*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1247*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1248*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1249*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1250*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1251*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1252*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1253*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1254*57e252bfSMichael Neumann 
1255*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1256*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1257*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1258*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1259*57e252bfSMichael Neumann 
1260*57e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk770.mclk_value =
1261*57e252bfSMichael Neumann 		cpu_to_be32(initial_state->low.mclk);
1262*57e252bfSMichael Neumann 
1263*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1264*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1265*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1266*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1267*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1268*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1269*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1270*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1271*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1272*57e252bfSMichael Neumann 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1273*57e252bfSMichael Neumann 
1274*57e252bfSMichael Neumann 	table->initialState.levels[0].sclk.sclk_value =
1275*57e252bfSMichael Neumann 		cpu_to_be32(initial_state->low.sclk);
1276*57e252bfSMichael Neumann 
1277*57e252bfSMichael Neumann 	table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1278*57e252bfSMichael Neumann 
1279*57e252bfSMichael Neumann 	table->initialState.levels[0].ACIndex = 0;
1280*57e252bfSMichael Neumann 
1281*57e252bfSMichael Neumann 	cypress_populate_voltage_value(rdev,
1282*57e252bfSMichael Neumann 				       &eg_pi->vddc_voltage_table,
1283*57e252bfSMichael Neumann 				       initial_state->low.vddc,
1284*57e252bfSMichael Neumann 				       &table->initialState.levels[0].vddc);
1285*57e252bfSMichael Neumann 
1286*57e252bfSMichael Neumann 	if (eg_pi->vddci_control)
1287*57e252bfSMichael Neumann 		cypress_populate_voltage_value(rdev,
1288*57e252bfSMichael Neumann 					       &eg_pi->vddci_voltage_table,
1289*57e252bfSMichael Neumann 					       initial_state->low.vddci,
1290*57e252bfSMichael Neumann 					       &table->initialState.levels[0].vddci);
1291*57e252bfSMichael Neumann 
1292*57e252bfSMichael Neumann 	cypress_populate_initial_mvdd_value(rdev,
1293*57e252bfSMichael Neumann 					    &table->initialState.levels[0].mvdd);
1294*57e252bfSMichael Neumann 
1295*57e252bfSMichael Neumann 	a_t = CG_R(0xffff) | CG_L(0);
1296*57e252bfSMichael Neumann 	table->initialState.levels[0].aT = cpu_to_be32(a_t);
1297*57e252bfSMichael Neumann 
1298*57e252bfSMichael Neumann 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1299*57e252bfSMichael Neumann 
1300*57e252bfSMichael Neumann 
1301*57e252bfSMichael Neumann 	if (pi->boot_in_gen2)
1302*57e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 1;
1303*57e252bfSMichael Neumann 	else
1304*57e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 0;
1305*57e252bfSMichael Neumann 	if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1306*57e252bfSMichael Neumann 		table->initialState.levels[0].gen2XSP = 1;
1307*57e252bfSMichael Neumann 	else
1308*57e252bfSMichael Neumann 		table->initialState.levels[0].gen2XSP = 0;
1309*57e252bfSMichael Neumann 
1310*57e252bfSMichael Neumann 	if (pi->mem_gddr5) {
1311*57e252bfSMichael Neumann 		table->initialState.levels[0].strobeMode =
1312*57e252bfSMichael Neumann 			cypress_get_strobe_mode_settings(rdev,
1313*57e252bfSMichael Neumann 							 initial_state->low.mclk);
1314*57e252bfSMichael Neumann 
1315*57e252bfSMichael Neumann 		if (initial_state->low.mclk > pi->mclk_edc_enable_threshold)
1316*57e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1317*57e252bfSMichael Neumann 		else
1318*57e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags =  0;
1319*57e252bfSMichael Neumann 	}
1320*57e252bfSMichael Neumann 
1321*57e252bfSMichael Neumann 	table->initialState.levels[1] = table->initialState.levels[0];
1322*57e252bfSMichael Neumann 	table->initialState.levels[2] = table->initialState.levels[0];
1323*57e252bfSMichael Neumann 
1324*57e252bfSMichael Neumann 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1325*57e252bfSMichael Neumann 
1326*57e252bfSMichael Neumann 	return 0;
1327*57e252bfSMichael Neumann }
1328*57e252bfSMichael Neumann 
1329*57e252bfSMichael Neumann int cypress_populate_smc_acpi_state(struct radeon_device *rdev,
1330*57e252bfSMichael Neumann 				    RV770_SMC_STATETABLE *table)
1331*57e252bfSMichael Neumann {
1332*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1333*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1334*57e252bfSMichael Neumann 	u32 mpll_ad_func_cntl =
1335*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_ad_func_cntl;
1336*57e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 =
1337*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
1338*57e252bfSMichael Neumann 	u32 mpll_dq_func_cntl =
1339*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_dq_func_cntl;
1340*57e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 =
1341*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
1342*57e252bfSMichael Neumann 	u32 spll_func_cntl =
1343*57e252bfSMichael Neumann 		pi->clk_regs.rv770.cg_spll_func_cntl;
1344*57e252bfSMichael Neumann 	u32 spll_func_cntl_2 =
1345*57e252bfSMichael Neumann 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
1346*57e252bfSMichael Neumann 	u32 spll_func_cntl_3 =
1347*57e252bfSMichael Neumann 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
1348*57e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl =
1349*57e252bfSMichael Neumann 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
1350*57e252bfSMichael Neumann 	u32 dll_cntl =
1351*57e252bfSMichael Neumann 		pi->clk_regs.rv770.dll_cntl;
1352*57e252bfSMichael Neumann 
1353*57e252bfSMichael Neumann 	table->ACPIState = table->initialState;
1354*57e252bfSMichael Neumann 
1355*57e252bfSMichael Neumann 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1356*57e252bfSMichael Neumann 
1357*57e252bfSMichael Neumann 	if (pi->acpi_vddc) {
1358*57e252bfSMichael Neumann 		cypress_populate_voltage_value(rdev,
1359*57e252bfSMichael Neumann 					       &eg_pi->vddc_voltage_table,
1360*57e252bfSMichael Neumann 					       pi->acpi_vddc,
1361*57e252bfSMichael Neumann 					       &table->ACPIState.levels[0].vddc);
1362*57e252bfSMichael Neumann 		if (pi->pcie_gen2) {
1363*57e252bfSMichael Neumann 			if (pi->acpi_pcie_gen2)
1364*57e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 1;
1365*57e252bfSMichael Neumann 			else
1366*57e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 0;
1367*57e252bfSMichael Neumann 		} else
1368*57e252bfSMichael Neumann 			table->ACPIState.levels[0].gen2PCIE = 0;
1369*57e252bfSMichael Neumann 		if (pi->acpi_pcie_gen2)
1370*57e252bfSMichael Neumann 			table->ACPIState.levels[0].gen2XSP = 1;
1371*57e252bfSMichael Neumann 		else
1372*57e252bfSMichael Neumann 			table->ACPIState.levels[0].gen2XSP = 0;
1373*57e252bfSMichael Neumann 	} else {
1374*57e252bfSMichael Neumann 		cypress_populate_voltage_value(rdev,
1375*57e252bfSMichael Neumann 					       &eg_pi->vddc_voltage_table,
1376*57e252bfSMichael Neumann 					       pi->min_vddc_in_table,
1377*57e252bfSMichael Neumann 					       &table->ACPIState.levels[0].vddc);
1378*57e252bfSMichael Neumann 		table->ACPIState.levels[0].gen2PCIE = 0;
1379*57e252bfSMichael Neumann 	}
1380*57e252bfSMichael Neumann 
1381*57e252bfSMichael Neumann 	if (eg_pi->acpi_vddci) {
1382*57e252bfSMichael Neumann 		if (eg_pi->vddci_control) {
1383*57e252bfSMichael Neumann 			cypress_populate_voltage_value(rdev,
1384*57e252bfSMichael Neumann 						       &eg_pi->vddci_voltage_table,
1385*57e252bfSMichael Neumann 						       eg_pi->acpi_vddci,
1386*57e252bfSMichael Neumann 						       &table->ACPIState.levels[0].vddci);
1387*57e252bfSMichael Neumann 		}
1388*57e252bfSMichael Neumann 	}
1389*57e252bfSMichael Neumann 
1390*57e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~PDNB;
1391*57e252bfSMichael Neumann 
1392*57e252bfSMichael Neumann 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1393*57e252bfSMichael Neumann 
1394*57e252bfSMichael Neumann 	if (pi->mem_gddr5)
1395*57e252bfSMichael Neumann 		mpll_dq_func_cntl &= ~PDNB;
1396*57e252bfSMichael Neumann 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1397*57e252bfSMichael Neumann 
1398*57e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1399*57e252bfSMichael Neumann 			     MRDCKA1_RESET |
1400*57e252bfSMichael Neumann 			     MRDCKB0_RESET |
1401*57e252bfSMichael Neumann 			     MRDCKB1_RESET |
1402*57e252bfSMichael Neumann 			     MRDCKC0_RESET |
1403*57e252bfSMichael Neumann 			     MRDCKC1_RESET |
1404*57e252bfSMichael Neumann 			     MRDCKD0_RESET |
1405*57e252bfSMichael Neumann 			     MRDCKD1_RESET);
1406*57e252bfSMichael Neumann 
1407*57e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1408*57e252bfSMichael Neumann 			      MRDCKA1_PDNB |
1409*57e252bfSMichael Neumann 			      MRDCKB0_PDNB |
1410*57e252bfSMichael Neumann 			      MRDCKB1_PDNB |
1411*57e252bfSMichael Neumann 			      MRDCKC0_PDNB |
1412*57e252bfSMichael Neumann 			      MRDCKC1_PDNB |
1413*57e252bfSMichael Neumann 			      MRDCKD0_PDNB |
1414*57e252bfSMichael Neumann 			      MRDCKD1_PDNB);
1415*57e252bfSMichael Neumann 
1416*57e252bfSMichael Neumann 	dll_cntl |= (MRDCKA0_BYPASS |
1417*57e252bfSMichael Neumann 		     MRDCKA1_BYPASS |
1418*57e252bfSMichael Neumann 		     MRDCKB0_BYPASS |
1419*57e252bfSMichael Neumann 		     MRDCKB1_BYPASS |
1420*57e252bfSMichael Neumann 		     MRDCKC0_BYPASS |
1421*57e252bfSMichael Neumann 		     MRDCKC1_BYPASS |
1422*57e252bfSMichael Neumann 		     MRDCKD0_BYPASS |
1423*57e252bfSMichael Neumann 		     MRDCKD1_BYPASS);
1424*57e252bfSMichael Neumann 
1425*57e252bfSMichael Neumann 	/* evergreen only */
1426*57e252bfSMichael Neumann 	if (rdev->family <= CHIP_HEMLOCK)
1427*57e252bfSMichael Neumann 		spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
1428*57e252bfSMichael Neumann 
1429*57e252bfSMichael Neumann 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1430*57e252bfSMichael Neumann 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1431*57e252bfSMichael Neumann 
1432*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1433*57e252bfSMichael Neumann 		cpu_to_be32(mpll_ad_func_cntl);
1434*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1435*57e252bfSMichael Neumann 		cpu_to_be32(mpll_ad_func_cntl_2);
1436*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1437*57e252bfSMichael Neumann 		cpu_to_be32(mpll_dq_func_cntl);
1438*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1439*57e252bfSMichael Neumann 		cpu_to_be32(mpll_dq_func_cntl_2);
1440*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1441*57e252bfSMichael Neumann 		cpu_to_be32(mclk_pwrmgt_cntl);
1442*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
1443*57e252bfSMichael Neumann 
1444*57e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
1445*57e252bfSMichael Neumann 
1446*57e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1447*57e252bfSMichael Neumann 		cpu_to_be32(spll_func_cntl);
1448*57e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1449*57e252bfSMichael Neumann 		cpu_to_be32(spll_func_cntl_2);
1450*57e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1451*57e252bfSMichael Neumann 		cpu_to_be32(spll_func_cntl_3);
1452*57e252bfSMichael Neumann 
1453*57e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1454*57e252bfSMichael Neumann 
1455*57e252bfSMichael Neumann 	cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1456*57e252bfSMichael Neumann 
1457*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing)
1458*57e252bfSMichael Neumann 		table->ACPIState.levels[0].ACIndex = 1;
1459*57e252bfSMichael Neumann 
1460*57e252bfSMichael Neumann 	table->ACPIState.levels[1] = table->ACPIState.levels[0];
1461*57e252bfSMichael Neumann 	table->ACPIState.levels[2] = table->ACPIState.levels[0];
1462*57e252bfSMichael Neumann 
1463*57e252bfSMichael Neumann 	return 0;
1464*57e252bfSMichael Neumann }
1465*57e252bfSMichael Neumann 
1466*57e252bfSMichael Neumann static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
1467*57e252bfSMichael Neumann 							  struct atom_voltage_table *voltage_table)
1468*57e252bfSMichael Neumann {
1469*57e252bfSMichael Neumann 	unsigned int i, diff;
1470*57e252bfSMichael Neumann 
1471*57e252bfSMichael Neumann 	if (voltage_table->count <= MAX_NO_VREG_STEPS)
1472*57e252bfSMichael Neumann 		return;
1473*57e252bfSMichael Neumann 
1474*57e252bfSMichael Neumann 	diff = voltage_table->count - MAX_NO_VREG_STEPS;
1475*57e252bfSMichael Neumann 
1476*57e252bfSMichael Neumann 	for (i= 0; i < MAX_NO_VREG_STEPS; i++)
1477*57e252bfSMichael Neumann 		voltage_table->entries[i] = voltage_table->entries[i + diff];
1478*57e252bfSMichael Neumann 
1479*57e252bfSMichael Neumann 	voltage_table->count = MAX_NO_VREG_STEPS;
1480*57e252bfSMichael Neumann }
1481*57e252bfSMichael Neumann 
1482*57e252bfSMichael Neumann int cypress_construct_voltage_tables(struct radeon_device *rdev)
1483*57e252bfSMichael Neumann {
1484*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1485*57e252bfSMichael Neumann 	int ret;
1486*57e252bfSMichael Neumann 
1487*57e252bfSMichael Neumann 	ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0,
1488*57e252bfSMichael Neumann 					    &eg_pi->vddc_voltage_table);
1489*57e252bfSMichael Neumann 	if (ret)
1490*57e252bfSMichael Neumann 		return ret;
1491*57e252bfSMichael Neumann 
1492*57e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count > MAX_NO_VREG_STEPS)
1493*57e252bfSMichael Neumann 		cypress_trim_voltage_table_to_fit_state_table(rdev,
1494*57e252bfSMichael Neumann 							      &eg_pi->vddc_voltage_table);
1495*57e252bfSMichael Neumann 
1496*57e252bfSMichael Neumann 	if (eg_pi->vddci_control) {
1497*57e252bfSMichael Neumann 		ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0,
1498*57e252bfSMichael Neumann 						    &eg_pi->vddci_voltage_table);
1499*57e252bfSMichael Neumann 		if (ret)
1500*57e252bfSMichael Neumann 			return ret;
1501*57e252bfSMichael Neumann 
1502*57e252bfSMichael Neumann 		if (eg_pi->vddci_voltage_table.count > MAX_NO_VREG_STEPS)
1503*57e252bfSMichael Neumann 			cypress_trim_voltage_table_to_fit_state_table(rdev,
1504*57e252bfSMichael Neumann 								      &eg_pi->vddci_voltage_table);
1505*57e252bfSMichael Neumann 	}
1506*57e252bfSMichael Neumann 
1507*57e252bfSMichael Neumann 	return 0;
1508*57e252bfSMichael Neumann }
1509*57e252bfSMichael Neumann 
1510*57e252bfSMichael Neumann static void cypress_populate_smc_voltage_table(struct radeon_device *rdev,
1511*57e252bfSMichael Neumann 					       struct atom_voltage_table *voltage_table,
1512*57e252bfSMichael Neumann 					       RV770_SMC_STATETABLE *table)
1513*57e252bfSMichael Neumann {
1514*57e252bfSMichael Neumann 	unsigned int i;
1515*57e252bfSMichael Neumann 
1516*57e252bfSMichael Neumann 	for (i = 0; i < voltage_table->count; i++) {
1517*57e252bfSMichael Neumann 		table->highSMIO[i] = 0;
1518*57e252bfSMichael Neumann 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1519*57e252bfSMichael Neumann 	}
1520*57e252bfSMichael Neumann }
1521*57e252bfSMichael Neumann 
1522*57e252bfSMichael Neumann int cypress_populate_smc_voltage_tables(struct radeon_device *rdev,
1523*57e252bfSMichael Neumann 					RV770_SMC_STATETABLE *table)
1524*57e252bfSMichael Neumann {
1525*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1526*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1527*57e252bfSMichael Neumann 	unsigned char i;
1528*57e252bfSMichael Neumann 
1529*57e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count) {
1530*57e252bfSMichael Neumann 		cypress_populate_smc_voltage_table(rdev,
1531*57e252bfSMichael Neumann 						   &eg_pi->vddc_voltage_table,
1532*57e252bfSMichael Neumann 						   table);
1533*57e252bfSMichael Neumann 
1534*57e252bfSMichael Neumann 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1535*57e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1536*57e252bfSMichael Neumann 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1537*57e252bfSMichael Neumann 
1538*57e252bfSMichael Neumann 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1539*57e252bfSMichael Neumann 			if (pi->max_vddc_in_table <=
1540*57e252bfSMichael Neumann 			    eg_pi->vddc_voltage_table.entries[i].value) {
1541*57e252bfSMichael Neumann 				table->maxVDDCIndexInPPTable = i;
1542*57e252bfSMichael Neumann 				break;
1543*57e252bfSMichael Neumann 			}
1544*57e252bfSMichael Neumann 		}
1545*57e252bfSMichael Neumann 	}
1546*57e252bfSMichael Neumann 
1547*57e252bfSMichael Neumann 	if (eg_pi->vddci_voltage_table.count) {
1548*57e252bfSMichael Neumann 		cypress_populate_smc_voltage_table(rdev,
1549*57e252bfSMichael Neumann 						   &eg_pi->vddci_voltage_table,
1550*57e252bfSMichael Neumann 						   table);
1551*57e252bfSMichael Neumann 
1552*57e252bfSMichael Neumann 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 0;
1553*57e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDCI] =
1554*57e252bfSMichael Neumann 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1555*57e252bfSMichael Neumann 	}
1556*57e252bfSMichael Neumann 
1557*57e252bfSMichael Neumann 	return 0;
1558*57e252bfSMichael Neumann }
1559*57e252bfSMichael Neumann 
1560*57e252bfSMichael Neumann static u32 cypress_get_mclk_split_point(struct atom_memory_info *memory_info)
1561*57e252bfSMichael Neumann {
1562*57e252bfSMichael Neumann 	if ((memory_info->mem_type == MEM_TYPE_GDDR3) ||
1563*57e252bfSMichael Neumann 	    (memory_info->mem_type == MEM_TYPE_DDR3))
1564*57e252bfSMichael Neumann 		return 30000;
1565*57e252bfSMichael Neumann 
1566*57e252bfSMichael Neumann 	return 0;
1567*57e252bfSMichael Neumann }
1568*57e252bfSMichael Neumann 
1569*57e252bfSMichael Neumann int cypress_get_mvdd_configuration(struct radeon_device *rdev)
1570*57e252bfSMichael Neumann {
1571*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1572*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1573*57e252bfSMichael Neumann 	u8 module_index;
1574*57e252bfSMichael Neumann 	struct atom_memory_info memory_info;
1575*57e252bfSMichael Neumann 	u32 tmp = RREG32(GENERAL_PWRMGT);
1576*57e252bfSMichael Neumann 
1577*57e252bfSMichael Neumann 	if (!(tmp & BACKBIAS_PAD_EN)) {
1578*57e252bfSMichael Neumann 		eg_pi->mvdd_high_index = 0;
1579*57e252bfSMichael Neumann 		eg_pi->mvdd_low_index = 1;
1580*57e252bfSMichael Neumann 		pi->mvdd_control = false;
1581*57e252bfSMichael Neumann 		return 0;
1582*57e252bfSMichael Neumann 	}
1583*57e252bfSMichael Neumann 
1584*57e252bfSMichael Neumann 	if (tmp & BACKBIAS_VALUE)
1585*57e252bfSMichael Neumann 		eg_pi->mvdd_high_index = 1;
1586*57e252bfSMichael Neumann 	else
1587*57e252bfSMichael Neumann 		eg_pi->mvdd_high_index = 0;
1588*57e252bfSMichael Neumann 
1589*57e252bfSMichael Neumann 	eg_pi->mvdd_low_index =
1590*57e252bfSMichael Neumann 		(eg_pi->mvdd_high_index == 0) ? 1 : 0;
1591*57e252bfSMichael Neumann 
1592*57e252bfSMichael Neumann 	module_index = rv770_get_memory_module_index(rdev);
1593*57e252bfSMichael Neumann 
1594*57e252bfSMichael Neumann 	if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) {
1595*57e252bfSMichael Neumann 		pi->mvdd_control = false;
1596*57e252bfSMichael Neumann 		return 0;
1597*57e252bfSMichael Neumann 	}
1598*57e252bfSMichael Neumann 
1599*57e252bfSMichael Neumann 	pi->mvdd_split_frequency =
1600*57e252bfSMichael Neumann 		cypress_get_mclk_split_point(&memory_info);
1601*57e252bfSMichael Neumann 
1602*57e252bfSMichael Neumann 	if (pi->mvdd_split_frequency == 0) {
1603*57e252bfSMichael Neumann 		pi->mvdd_control = false;
1604*57e252bfSMichael Neumann 		return 0;
1605*57e252bfSMichael Neumann 	}
1606*57e252bfSMichael Neumann 
1607*57e252bfSMichael Neumann 	return 0;
1608*57e252bfSMichael Neumann }
1609*57e252bfSMichael Neumann 
1610*57e252bfSMichael Neumann static int cypress_init_smc_table(struct radeon_device *rdev,
1611*57e252bfSMichael Neumann 				  struct radeon_ps *radeon_boot_state)
1612*57e252bfSMichael Neumann {
1613*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1614*57e252bfSMichael Neumann 	RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1615*57e252bfSMichael Neumann 	int ret;
1616*57e252bfSMichael Neumann 
1617*57e252bfSMichael Neumann 	memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1618*57e252bfSMichael Neumann 
1619*57e252bfSMichael Neumann 	cypress_populate_smc_voltage_tables(rdev, table);
1620*57e252bfSMichael Neumann 
1621*57e252bfSMichael Neumann 	switch (rdev->pm.int_thermal_type) {
1622*57e252bfSMichael Neumann         case THERMAL_TYPE_EVERGREEN:
1623*57e252bfSMichael Neumann         case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1624*57e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1625*57e252bfSMichael Neumann 		break;
1626*57e252bfSMichael Neumann         case THERMAL_TYPE_NONE:
1627*57e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1628*57e252bfSMichael Neumann 		break;
1629*57e252bfSMichael Neumann         default:
1630*57e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1631*57e252bfSMichael Neumann 		break;
1632*57e252bfSMichael Neumann 	}
1633*57e252bfSMichael Neumann 
1634*57e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1635*57e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1636*57e252bfSMichael Neumann 
1637*57e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1638*57e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1639*57e252bfSMichael Neumann 
1640*57e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1641*57e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1642*57e252bfSMichael Neumann 
1643*57e252bfSMichael Neumann 	if (pi->mem_gddr5)
1644*57e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1645*57e252bfSMichael Neumann 
1646*57e252bfSMichael Neumann 	ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table);
1647*57e252bfSMichael Neumann 	if (ret)
1648*57e252bfSMichael Neumann 		return ret;
1649*57e252bfSMichael Neumann 
1650*57e252bfSMichael Neumann 	ret = cypress_populate_smc_acpi_state(rdev, table);
1651*57e252bfSMichael Neumann 	if (ret)
1652*57e252bfSMichael Neumann 		return ret;
1653*57e252bfSMichael Neumann 
1654*57e252bfSMichael Neumann 	table->driverState = table->initialState;
1655*57e252bfSMichael Neumann 
1656*57e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev,
1657*57e252bfSMichael Neumann 				       pi->state_table_start,
1658*57e252bfSMichael Neumann 				       (u8 *)table, sizeof(RV770_SMC_STATETABLE),
1659*57e252bfSMichael Neumann 				       pi->sram_end);
1660*57e252bfSMichael Neumann }
1661*57e252bfSMichael Neumann 
1662*57e252bfSMichael Neumann int cypress_populate_mc_reg_table(struct radeon_device *rdev,
1663*57e252bfSMichael Neumann 				  struct radeon_ps *radeon_boot_state)
1664*57e252bfSMichael Neumann {
1665*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1666*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1667*57e252bfSMichael Neumann 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1668*57e252bfSMichael Neumann 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
1669*57e252bfSMichael Neumann 
1670*57e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev,
1671*57e252bfSMichael Neumann 				      RV770_SMC_SOFT_REGISTER_seq_index, 1);
1672*57e252bfSMichael Neumann 
1673*57e252bfSMichael Neumann 	cypress_populate_mc_reg_addresses(rdev, &mc_reg_table);
1674*57e252bfSMichael Neumann 
1675*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
1676*57e252bfSMichael Neumann 						  &boot_state->low,
1677*57e252bfSMichael Neumann 						  &mc_reg_table.data[0]);
1678*57e252bfSMichael Neumann 
1679*57e252bfSMichael Neumann 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[0],
1680*57e252bfSMichael Neumann 				     &mc_reg_table.data[1], eg_pi->mc_reg_table.last,
1681*57e252bfSMichael Neumann 				     eg_pi->mc_reg_table.valid_flag);
1682*57e252bfSMichael Neumann 
1683*57e252bfSMichael Neumann 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table);
1684*57e252bfSMichael Neumann 
1685*57e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
1686*57e252bfSMichael Neumann 				       (u8 *)&mc_reg_table, sizeof(SMC_Evergreen_MCRegisters),
1687*57e252bfSMichael Neumann 				       pi->sram_end);
1688*57e252bfSMichael Neumann }
1689*57e252bfSMichael Neumann 
1690*57e252bfSMichael Neumann int cypress_get_table_locations(struct radeon_device *rdev)
1691*57e252bfSMichael Neumann {
1692*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1693*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1694*57e252bfSMichael Neumann 	u32 tmp;
1695*57e252bfSMichael Neumann 	int ret;
1696*57e252bfSMichael Neumann 
1697*57e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
1698*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1699*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_stateTable,
1700*57e252bfSMichael Neumann 					&tmp, pi->sram_end);
1701*57e252bfSMichael Neumann 	if (ret)
1702*57e252bfSMichael Neumann 		return ret;
1703*57e252bfSMichael Neumann 
1704*57e252bfSMichael Neumann 	pi->state_table_start = (u16)tmp;
1705*57e252bfSMichael Neumann 
1706*57e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
1707*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1708*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_softRegisters,
1709*57e252bfSMichael Neumann 					&tmp, pi->sram_end);
1710*57e252bfSMichael Neumann 	if (ret)
1711*57e252bfSMichael Neumann 		return ret;
1712*57e252bfSMichael Neumann 
1713*57e252bfSMichael Neumann 	pi->soft_regs_start = (u16)tmp;
1714*57e252bfSMichael Neumann 
1715*57e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
1716*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1717*57e252bfSMichael Neumann 					EVERGREEN_SMC_FIRMWARE_HEADER_mcRegisterTable,
1718*57e252bfSMichael Neumann 					&tmp, pi->sram_end);
1719*57e252bfSMichael Neumann 	if (ret)
1720*57e252bfSMichael Neumann 		return ret;
1721*57e252bfSMichael Neumann 
1722*57e252bfSMichael Neumann 	eg_pi->mc_reg_table_start = (u16)tmp;
1723*57e252bfSMichael Neumann 
1724*57e252bfSMichael Neumann 	return 0;
1725*57e252bfSMichael Neumann }
1726*57e252bfSMichael Neumann 
1727*57e252bfSMichael Neumann void cypress_enable_display_gap(struct radeon_device *rdev)
1728*57e252bfSMichael Neumann {
1729*57e252bfSMichael Neumann 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1730*57e252bfSMichael Neumann 
1731*57e252bfSMichael Neumann 	tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1732*57e252bfSMichael Neumann 	tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) |
1733*57e252bfSMichael Neumann 		DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE));
1734*57e252bfSMichael Neumann 
1735*57e252bfSMichael Neumann 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1736*57e252bfSMichael Neumann 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) |
1737*57e252bfSMichael Neumann 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
1738*57e252bfSMichael Neumann 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1739*57e252bfSMichael Neumann }
1740*57e252bfSMichael Neumann 
1741*57e252bfSMichael Neumann static void cypress_program_display_gap(struct radeon_device *rdev)
1742*57e252bfSMichael Neumann {
1743*57e252bfSMichael Neumann 	u32 tmp, pipe;
1744*57e252bfSMichael Neumann 	int i;
1745*57e252bfSMichael Neumann 
1746*57e252bfSMichael Neumann 	tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1747*57e252bfSMichael Neumann 	if (rdev->pm.dpm.new_active_crtc_count > 0)
1748*57e252bfSMichael Neumann 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1749*57e252bfSMichael Neumann 	else
1750*57e252bfSMichael Neumann 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1751*57e252bfSMichael Neumann 
1752*57e252bfSMichael Neumann 	if (rdev->pm.dpm.new_active_crtc_count > 1)
1753*57e252bfSMichael Neumann 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1754*57e252bfSMichael Neumann 	else
1755*57e252bfSMichael Neumann 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1756*57e252bfSMichael Neumann 
1757*57e252bfSMichael Neumann 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1758*57e252bfSMichael Neumann 
1759*57e252bfSMichael Neumann 	tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG);
1760*57e252bfSMichael Neumann 	pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT;
1761*57e252bfSMichael Neumann 
1762*57e252bfSMichael Neumann 	if ((rdev->pm.dpm.new_active_crtc_count > 0) &&
1763*57e252bfSMichael Neumann 	    (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) {
1764*57e252bfSMichael Neumann 		/* find the first active crtc */
1765*57e252bfSMichael Neumann 		for (i = 0; i < rdev->num_crtc; i++) {
1766*57e252bfSMichael Neumann 			if (rdev->pm.dpm.new_active_crtcs & (1 << i))
1767*57e252bfSMichael Neumann 				break;
1768*57e252bfSMichael Neumann 		}
1769*57e252bfSMichael Neumann 		if (i == rdev->num_crtc)
1770*57e252bfSMichael Neumann 			pipe = 0;
1771*57e252bfSMichael Neumann 		else
1772*57e252bfSMichael Neumann 			pipe = i;
1773*57e252bfSMichael Neumann 
1774*57e252bfSMichael Neumann 		tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK;
1775*57e252bfSMichael Neumann 		tmp |= DCCG_DISP1_SLOW_SELECT(pipe);
1776*57e252bfSMichael Neumann 		WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp);
1777*57e252bfSMichael Neumann 	}
1778*57e252bfSMichael Neumann 
1779*57e252bfSMichael Neumann 	cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0);
1780*57e252bfSMichael Neumann }
1781*57e252bfSMichael Neumann 
1782*57e252bfSMichael Neumann void cypress_dpm_setup_asic(struct radeon_device *rdev)
1783*57e252bfSMichael Neumann {
1784*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1785*57e252bfSMichael Neumann 
1786*57e252bfSMichael Neumann 	rv740_read_clock_registers(rdev);
1787*57e252bfSMichael Neumann 	rv770_read_voltage_smio_registers(rdev);
1788*57e252bfSMichael Neumann 	rv770_get_max_vddc(rdev);
1789*57e252bfSMichael Neumann 	rv770_get_memory_type(rdev);
1790*57e252bfSMichael Neumann 
1791*57e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
1792*57e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = false;
1793*57e252bfSMichael Neumann 
1794*57e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
1795*57e252bfSMichael Neumann 		cypress_advertise_gen2_capability(rdev);
1796*57e252bfSMichael Neumann 
1797*57e252bfSMichael Neumann 	rv770_get_pcie_gen2_status(rdev);
1798*57e252bfSMichael Neumann 
1799*57e252bfSMichael Neumann 	rv770_enable_acpi_pm(rdev);
1800*57e252bfSMichael Neumann }
1801*57e252bfSMichael Neumann 
1802*57e252bfSMichael Neumann int cypress_dpm_enable(struct radeon_device *rdev)
1803*57e252bfSMichael Neumann {
1804*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1805*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1806*57e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1807*57e252bfSMichael Neumann 	int ret;
1808*57e252bfSMichael Neumann 
1809*57e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
1810*57e252bfSMichael Neumann 		rv770_restore_cgcg(rdev);
1811*57e252bfSMichael Neumann 
1812*57e252bfSMichael Neumann 	if (rv770_dpm_enabled(rdev))
1813*57e252bfSMichael Neumann 		return -EINVAL;
1814*57e252bfSMichael Neumann 
1815*57e252bfSMichael Neumann 	if (pi->voltage_control) {
1816*57e252bfSMichael Neumann 		rv770_enable_voltage_control(rdev, true);
1817*57e252bfSMichael Neumann 		ret = cypress_construct_voltage_tables(rdev);
1818*57e252bfSMichael Neumann 		if (ret) {
1819*57e252bfSMichael Neumann 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
1820*57e252bfSMichael Neumann 			return ret;
1821*57e252bfSMichael Neumann 		}
1822*57e252bfSMichael Neumann 	}
1823*57e252bfSMichael Neumann 
1824*57e252bfSMichael Neumann 	if (pi->mvdd_control) {
1825*57e252bfSMichael Neumann 		ret = cypress_get_mvdd_configuration(rdev);
1826*57e252bfSMichael Neumann 		if (ret) {
1827*57e252bfSMichael Neumann 			DRM_ERROR("cypress_get_mvdd_configuration failed\n");
1828*57e252bfSMichael Neumann 			return ret;
1829*57e252bfSMichael Neumann 		}
1830*57e252bfSMichael Neumann 	}
1831*57e252bfSMichael Neumann 
1832*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
1833*57e252bfSMichael Neumann 		cypress_set_mc_reg_address_table(rdev);
1834*57e252bfSMichael Neumann 		cypress_force_mc_use_s0(rdev, boot_ps);
1835*57e252bfSMichael Neumann 		ret = cypress_initialize_mc_reg_table(rdev);
1836*57e252bfSMichael Neumann 		if (ret)
1837*57e252bfSMichael Neumann 			eg_pi->dynamic_ac_timing = false;
1838*57e252bfSMichael Neumann 		cypress_force_mc_use_s1(rdev, boot_ps);
1839*57e252bfSMichael Neumann 	}
1840*57e252bfSMichael Neumann 
1841*57e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1842*57e252bfSMichael Neumann 		rv770_enable_backbias(rdev, true);
1843*57e252bfSMichael Neumann 
1844*57e252bfSMichael Neumann 	if (pi->dynamic_ss)
1845*57e252bfSMichael Neumann 		cypress_enable_spread_spectrum(rdev, true);
1846*57e252bfSMichael Neumann 
1847*57e252bfSMichael Neumann 	if (pi->thermal_protection)
1848*57e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, true);
1849*57e252bfSMichael Neumann 
1850*57e252bfSMichael Neumann 	rv770_setup_bsp(rdev);
1851*57e252bfSMichael Neumann 	rv770_program_git(rdev);
1852*57e252bfSMichael Neumann 	rv770_program_tp(rdev);
1853*57e252bfSMichael Neumann 	rv770_program_tpp(rdev);
1854*57e252bfSMichael Neumann 	rv770_program_sstp(rdev);
1855*57e252bfSMichael Neumann 	rv770_program_engine_speed_parameters(rdev);
1856*57e252bfSMichael Neumann 	cypress_enable_display_gap(rdev);
1857*57e252bfSMichael Neumann 	rv770_program_vc(rdev);
1858*57e252bfSMichael Neumann 
1859*57e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
1860*57e252bfSMichael Neumann 		cypress_enable_dynamic_pcie_gen2(rdev, true);
1861*57e252bfSMichael Neumann 
1862*57e252bfSMichael Neumann 	ret = rv770_upload_firmware(rdev);
1863*57e252bfSMichael Neumann 	if (ret) {
1864*57e252bfSMichael Neumann 		DRM_ERROR("rv770_upload_firmware failed\n");
1865*57e252bfSMichael Neumann 		return ret;
1866*57e252bfSMichael Neumann 	}
1867*57e252bfSMichael Neumann 
1868*57e252bfSMichael Neumann 	ret = cypress_get_table_locations(rdev);
1869*57e252bfSMichael Neumann 	if (ret) {
1870*57e252bfSMichael Neumann 		DRM_ERROR("cypress_get_table_locations failed\n");
1871*57e252bfSMichael Neumann 		return ret;
1872*57e252bfSMichael Neumann 	}
1873*57e252bfSMichael Neumann 	ret = cypress_init_smc_table(rdev, boot_ps);
1874*57e252bfSMichael Neumann 	if (ret) {
1875*57e252bfSMichael Neumann 		DRM_ERROR("cypress_init_smc_table failed\n");
1876*57e252bfSMichael Neumann 		return ret;
1877*57e252bfSMichael Neumann 	}
1878*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
1879*57e252bfSMichael Neumann 		ret = cypress_populate_mc_reg_table(rdev, boot_ps);
1880*57e252bfSMichael Neumann 		if (ret) {
1881*57e252bfSMichael Neumann 			DRM_ERROR("cypress_populate_mc_reg_table failed\n");
1882*57e252bfSMichael Neumann 			return ret;
1883*57e252bfSMichael Neumann 		}
1884*57e252bfSMichael Neumann 	}
1885*57e252bfSMichael Neumann 
1886*57e252bfSMichael Neumann 	cypress_program_response_times(rdev);
1887*57e252bfSMichael Neumann 
1888*57e252bfSMichael Neumann 	r7xx_start_smc(rdev);
1889*57e252bfSMichael Neumann 
1890*57e252bfSMichael Neumann 	ret = cypress_notify_smc_display_change(rdev, false);
1891*57e252bfSMichael Neumann 	if (ret) {
1892*57e252bfSMichael Neumann 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
1893*57e252bfSMichael Neumann 		return ret;
1894*57e252bfSMichael Neumann 	}
1895*57e252bfSMichael Neumann 	cypress_enable_sclk_control(rdev, true);
1896*57e252bfSMichael Neumann 
1897*57e252bfSMichael Neumann 	if (eg_pi->memory_transition)
1898*57e252bfSMichael Neumann 		cypress_enable_mclk_control(rdev, true);
1899*57e252bfSMichael Neumann 
1900*57e252bfSMichael Neumann 	cypress_start_dpm(rdev);
1901*57e252bfSMichael Neumann 
1902*57e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
1903*57e252bfSMichael Neumann 		cypress_gfx_clock_gating_enable(rdev, true);
1904*57e252bfSMichael Neumann 
1905*57e252bfSMichael Neumann 	if (pi->mg_clock_gating)
1906*57e252bfSMichael Neumann 		cypress_mg_clock_gating_enable(rdev, true);
1907*57e252bfSMichael Neumann 
1908*57e252bfSMichael Neumann 	if (rdev->irq.installed &&
1909*57e252bfSMichael Neumann 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1910*57e252bfSMichael Neumann 		PPSMC_Result result;
1911*57e252bfSMichael Neumann 
1912*57e252bfSMichael Neumann 		ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1913*57e252bfSMichael Neumann 		if (ret)
1914*57e252bfSMichael Neumann 			return ret;
1915*57e252bfSMichael Neumann 		rdev->irq.dpm_thermal = true;
1916*57e252bfSMichael Neumann 		radeon_irq_set(rdev);
1917*57e252bfSMichael Neumann 		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1918*57e252bfSMichael Neumann 
1919*57e252bfSMichael Neumann 		if (result != PPSMC_Result_OK)
1920*57e252bfSMichael Neumann 			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1921*57e252bfSMichael Neumann 	}
1922*57e252bfSMichael Neumann 
1923*57e252bfSMichael Neumann 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1924*57e252bfSMichael Neumann 
1925*57e252bfSMichael Neumann 	return 0;
1926*57e252bfSMichael Neumann }
1927*57e252bfSMichael Neumann 
1928*57e252bfSMichael Neumann void cypress_dpm_disable(struct radeon_device *rdev)
1929*57e252bfSMichael Neumann {
1930*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1931*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1932*57e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1933*57e252bfSMichael Neumann 
1934*57e252bfSMichael Neumann 	if (!rv770_dpm_enabled(rdev))
1935*57e252bfSMichael Neumann 		return;
1936*57e252bfSMichael Neumann 
1937*57e252bfSMichael Neumann 	rv770_clear_vc(rdev);
1938*57e252bfSMichael Neumann 
1939*57e252bfSMichael Neumann 	if (pi->thermal_protection)
1940*57e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, false);
1941*57e252bfSMichael Neumann 
1942*57e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
1943*57e252bfSMichael Neumann 		cypress_enable_dynamic_pcie_gen2(rdev, false);
1944*57e252bfSMichael Neumann 
1945*57e252bfSMichael Neumann 	if (rdev->irq.installed &&
1946*57e252bfSMichael Neumann 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1947*57e252bfSMichael Neumann 		rdev->irq.dpm_thermal = false;
1948*57e252bfSMichael Neumann 		radeon_irq_set(rdev);
1949*57e252bfSMichael Neumann 	}
1950*57e252bfSMichael Neumann 
1951*57e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
1952*57e252bfSMichael Neumann 		cypress_gfx_clock_gating_enable(rdev, false);
1953*57e252bfSMichael Neumann 
1954*57e252bfSMichael Neumann 	if (pi->mg_clock_gating)
1955*57e252bfSMichael Neumann 		cypress_mg_clock_gating_enable(rdev, false);
1956*57e252bfSMichael Neumann 
1957*57e252bfSMichael Neumann 	rv770_stop_dpm(rdev);
1958*57e252bfSMichael Neumann 	r7xx_stop_smc(rdev);
1959*57e252bfSMichael Neumann 
1960*57e252bfSMichael Neumann 	cypress_enable_spread_spectrum(rdev, false);
1961*57e252bfSMichael Neumann 
1962*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing)
1963*57e252bfSMichael Neumann 		cypress_force_mc_use_s1(rdev, boot_ps);
1964*57e252bfSMichael Neumann 
1965*57e252bfSMichael Neumann 	rv770_reset_smio_status(rdev);
1966*57e252bfSMichael Neumann }
1967*57e252bfSMichael Neumann 
1968*57e252bfSMichael Neumann int cypress_dpm_set_power_state(struct radeon_device *rdev)
1969*57e252bfSMichael Neumann {
1970*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1971*57e252bfSMichael Neumann 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
1972*57e252bfSMichael Neumann 	struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
1973*57e252bfSMichael Neumann 	int ret;
1974*57e252bfSMichael Neumann 
1975*57e252bfSMichael Neumann 	ret = rv770_restrict_performance_levels_before_switch(rdev);
1976*57e252bfSMichael Neumann 	if (ret) {
1977*57e252bfSMichael Neumann 		DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
1978*57e252bfSMichael Neumann 		return ret;
1979*57e252bfSMichael Neumann 	}
1980*57e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
1981*57e252bfSMichael Neumann 		cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps);
1982*57e252bfSMichael Neumann 
1983*57e252bfSMichael Neumann 	rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
1984*57e252bfSMichael Neumann 	ret = rv770_halt_smc(rdev);
1985*57e252bfSMichael Neumann 	if (ret) {
1986*57e252bfSMichael Neumann 		DRM_ERROR("rv770_halt_smc failed\n");
1987*57e252bfSMichael Neumann 		return ret;
1988*57e252bfSMichael Neumann 	}
1989*57e252bfSMichael Neumann 	ret = cypress_upload_sw_state(rdev, new_ps);
1990*57e252bfSMichael Neumann 	if (ret) {
1991*57e252bfSMichael Neumann 		DRM_ERROR("cypress_upload_sw_state failed\n");
1992*57e252bfSMichael Neumann 		return ret;
1993*57e252bfSMichael Neumann 	}
1994*57e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
1995*57e252bfSMichael Neumann 		ret = cypress_upload_mc_reg_table(rdev, new_ps);
1996*57e252bfSMichael Neumann 		if (ret) {
1997*57e252bfSMichael Neumann 			DRM_ERROR("cypress_upload_mc_reg_table failed\n");
1998*57e252bfSMichael Neumann 			return ret;
1999*57e252bfSMichael Neumann 		}
2000*57e252bfSMichael Neumann 	}
2001*57e252bfSMichael Neumann 
2002*57e252bfSMichael Neumann 	cypress_program_memory_timing_parameters(rdev, new_ps);
2003*57e252bfSMichael Neumann 
2004*57e252bfSMichael Neumann 	ret = rv770_resume_smc(rdev);
2005*57e252bfSMichael Neumann 	if (ret) {
2006*57e252bfSMichael Neumann 		DRM_ERROR("rv770_resume_smc failed\n");
2007*57e252bfSMichael Neumann 		return ret;
2008*57e252bfSMichael Neumann 	}
2009*57e252bfSMichael Neumann 	ret = rv770_set_sw_state(rdev);
2010*57e252bfSMichael Neumann 	if (ret) {
2011*57e252bfSMichael Neumann 		DRM_ERROR("rv770_set_sw_state failed\n");
2012*57e252bfSMichael Neumann 		return ret;
2013*57e252bfSMichael Neumann 	}
2014*57e252bfSMichael Neumann 	rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2015*57e252bfSMichael Neumann 
2016*57e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
2017*57e252bfSMichael Neumann 		cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps);
2018*57e252bfSMichael Neumann 
2019*57e252bfSMichael Neumann 	ret = rv770_dpm_force_performance_level(rdev, RADEON_DPM_FORCED_LEVEL_AUTO);
2020*57e252bfSMichael Neumann 	if (ret) {
2021*57e252bfSMichael Neumann 		DRM_ERROR("rv770_dpm_force_performance_level failed\n");
2022*57e252bfSMichael Neumann 		return ret;
2023*57e252bfSMichael Neumann 	}
2024*57e252bfSMichael Neumann 
2025*57e252bfSMichael Neumann 	return 0;
2026*57e252bfSMichael Neumann }
2027*57e252bfSMichael Neumann 
2028*57e252bfSMichael Neumann void cypress_dpm_reset_asic(struct radeon_device *rdev)
2029*57e252bfSMichael Neumann {
2030*57e252bfSMichael Neumann 	rv770_restrict_performance_levels_before_switch(rdev);
2031*57e252bfSMichael Neumann 	rv770_set_boot_state(rdev);
2032*57e252bfSMichael Neumann }
2033*57e252bfSMichael Neumann 
2034*57e252bfSMichael Neumann void cypress_dpm_display_configuration_changed(struct radeon_device *rdev)
2035*57e252bfSMichael Neumann {
2036*57e252bfSMichael Neumann 	cypress_program_display_gap(rdev);
2037*57e252bfSMichael Neumann }
2038*57e252bfSMichael Neumann 
2039*57e252bfSMichael Neumann int cypress_dpm_init(struct radeon_device *rdev)
2040*57e252bfSMichael Neumann {
2041*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi;
2042*57e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi;
2043*57e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
2044*57e252bfSMichael Neumann 	int ret;
2045*57e252bfSMichael Neumann 
2046*57e252bfSMichael Neumann 	eg_pi = kzalloc(sizeof(struct evergreen_power_info), GFP_KERNEL);
2047*57e252bfSMichael Neumann 	if (eg_pi == NULL)
2048*57e252bfSMichael Neumann 		return -ENOMEM;
2049*57e252bfSMichael Neumann 	rdev->pm.dpm.priv = eg_pi;
2050*57e252bfSMichael Neumann 	pi = &eg_pi->rv7xx;
2051*57e252bfSMichael Neumann 
2052*57e252bfSMichael Neumann 	rv770_get_max_vddc(rdev);
2053*57e252bfSMichael Neumann 
2054*57e252bfSMichael Neumann 	eg_pi->ulv.supported = false;
2055*57e252bfSMichael Neumann 	pi->acpi_vddc = 0;
2056*57e252bfSMichael Neumann 	eg_pi->acpi_vddci = 0;
2057*57e252bfSMichael Neumann 	pi->min_vddc_in_table = 0;
2058*57e252bfSMichael Neumann 	pi->max_vddc_in_table = 0;
2059*57e252bfSMichael Neumann 
2060*57e252bfSMichael Neumann 	ret = rv7xx_parse_power_table(rdev);
2061*57e252bfSMichael Neumann 	if (ret)
2062*57e252bfSMichael Neumann 		return ret;
2063*57e252bfSMichael Neumann 
2064*57e252bfSMichael Neumann 	if (rdev->pm.dpm.voltage_response_time == 0)
2065*57e252bfSMichael Neumann 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2066*57e252bfSMichael Neumann 	if (rdev->pm.dpm.backbias_response_time == 0)
2067*57e252bfSMichael Neumann 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2068*57e252bfSMichael Neumann 
2069*57e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2070*57e252bfSMichael Neumann 					     0, false, &dividers);
2071*57e252bfSMichael Neumann 	if (ret)
2072*57e252bfSMichael Neumann 		pi->ref_div = dividers.ref_div + 1;
2073*57e252bfSMichael Neumann 	else
2074*57e252bfSMichael Neumann 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2075*57e252bfSMichael Neumann 
2076*57e252bfSMichael Neumann 	pi->mclk_strobe_mode_threshold = 40000;
2077*57e252bfSMichael Neumann 	pi->mclk_edc_enable_threshold = 40000;
2078*57e252bfSMichael Neumann 	eg_pi->mclk_edc_wr_enable_threshold = 40000;
2079*57e252bfSMichael Neumann 
2080*57e252bfSMichael Neumann 	pi->rlp = RV770_RLP_DFLT;
2081*57e252bfSMichael Neumann 	pi->rmp = RV770_RMP_DFLT;
2082*57e252bfSMichael Neumann 	pi->lhp = RV770_LHP_DFLT;
2083*57e252bfSMichael Neumann 	pi->lmp = RV770_LMP_DFLT;
2084*57e252bfSMichael Neumann 
2085*57e252bfSMichael Neumann 	pi->voltage_control =
2086*57e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2087*57e252bfSMichael Neumann 
2088*57e252bfSMichael Neumann 	pi->mvdd_control =
2089*57e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2090*57e252bfSMichael Neumann 
2091*57e252bfSMichael Neumann 	eg_pi->vddci_control =
2092*57e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
2093*57e252bfSMichael Neumann 
2094*57e252bfSMichael Neumann 	rv770_get_engine_memory_ss(rdev);
2095*57e252bfSMichael Neumann 
2096*57e252bfSMichael Neumann 	pi->asi = RV770_ASI_DFLT;
2097*57e252bfSMichael Neumann 	pi->pasi = CYPRESS_HASI_DFLT;
2098*57e252bfSMichael Neumann 	pi->vrc = CYPRESS_VRC_DFLT;
2099*57e252bfSMichael Neumann 
2100*57e252bfSMichael Neumann 	pi->power_gating = false;
2101*57e252bfSMichael Neumann 
2102*57e252bfSMichael Neumann 	if ((rdev->family == CHIP_CYPRESS) ||
2103*57e252bfSMichael Neumann 	    (rdev->family == CHIP_HEMLOCK))
2104*57e252bfSMichael Neumann 		pi->gfx_clock_gating = false;
2105*57e252bfSMichael Neumann 	else
2106*57e252bfSMichael Neumann 		pi->gfx_clock_gating = true;
2107*57e252bfSMichael Neumann 
2108*57e252bfSMichael Neumann 	pi->mg_clock_gating = true;
2109*57e252bfSMichael Neumann 	pi->mgcgtssm = true;
2110*57e252bfSMichael Neumann 	eg_pi->ls_clock_gating = false;
2111*57e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
2112*57e252bfSMichael Neumann 
2113*57e252bfSMichael Neumann 	pi->dynamic_pcie_gen2 = true;
2114*57e252bfSMichael Neumann 
2115*57e252bfSMichael Neumann 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2116*57e252bfSMichael Neumann 		pi->thermal_protection = true;
2117*57e252bfSMichael Neumann 	else
2118*57e252bfSMichael Neumann 		pi->thermal_protection = false;
2119*57e252bfSMichael Neumann 
2120*57e252bfSMichael Neumann 	pi->display_gap = true;
2121*57e252bfSMichael Neumann 
2122*57e252bfSMichael Neumann 	if (rdev->flags & RADEON_IS_MOBILITY)
2123*57e252bfSMichael Neumann 		pi->dcodt = true;
2124*57e252bfSMichael Neumann 	else
2125*57e252bfSMichael Neumann 		pi->dcodt = false;
2126*57e252bfSMichael Neumann 
2127*57e252bfSMichael Neumann 	pi->ulps = true;
2128*57e252bfSMichael Neumann 
2129*57e252bfSMichael Neumann 	eg_pi->dynamic_ac_timing = true;
2130*57e252bfSMichael Neumann 	eg_pi->abm = true;
2131*57e252bfSMichael Neumann 	eg_pi->mcls = true;
2132*57e252bfSMichael Neumann 	eg_pi->light_sleep = true;
2133*57e252bfSMichael Neumann 	eg_pi->memory_transition = true;
2134*57e252bfSMichael Neumann #if defined(CONFIG_ACPI)
2135*57e252bfSMichael Neumann 	eg_pi->pcie_performance_request =
2136*57e252bfSMichael Neumann 		radeon_acpi_is_pcie_performance_request_supported(rdev);
2137*57e252bfSMichael Neumann #else
2138*57e252bfSMichael Neumann 	eg_pi->pcie_performance_request = false;
2139*57e252bfSMichael Neumann #endif
2140*57e252bfSMichael Neumann 
2141*57e252bfSMichael Neumann 	if ((rdev->family == CHIP_CYPRESS) ||
2142*57e252bfSMichael Neumann 	    (rdev->family == CHIP_HEMLOCK) ||
2143*57e252bfSMichael Neumann 	    (rdev->family == CHIP_JUNIPER))
2144*57e252bfSMichael Neumann 		eg_pi->dll_default_on = true;
2145*57e252bfSMichael Neumann 	else
2146*57e252bfSMichael Neumann 		eg_pi->dll_default_on = false;
2147*57e252bfSMichael Neumann 
2148*57e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
2149*57e252bfSMichael Neumann 	pi->mclk_stutter_mode_threshold = 0;
2150*57e252bfSMichael Neumann 
2151*57e252bfSMichael Neumann 	pi->sram_end = SMC_RAM_END;
2152*57e252bfSMichael Neumann 
2153*57e252bfSMichael Neumann 	return 0;
2154*57e252bfSMichael Neumann }
2155*57e252bfSMichael Neumann 
2156*57e252bfSMichael Neumann void cypress_dpm_fini(struct radeon_device *rdev)
2157*57e252bfSMichael Neumann {
2158*57e252bfSMichael Neumann 	int i;
2159*57e252bfSMichael Neumann 
2160*57e252bfSMichael Neumann 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2161*57e252bfSMichael Neumann 		kfree(rdev->pm.dpm.ps[i].ps_priv);
2162*57e252bfSMichael Neumann 	}
2163*57e252bfSMichael Neumann 	kfree(rdev->pm.dpm.ps);
2164*57e252bfSMichael Neumann 	kfree(rdev->pm.dpm.priv);
2165*57e252bfSMichael Neumann }
2166*57e252bfSMichael Neumann 
2167*57e252bfSMichael Neumann bool cypress_dpm_vblank_too_short(struct radeon_device *rdev)
2168*57e252bfSMichael Neumann {
2169*57e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2170*57e252bfSMichael Neumann 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2171*57e252bfSMichael Neumann 	u32 switch_limit = pi->mem_gddr5 ? 450 : 300;
2172*57e252bfSMichael Neumann 
2173*57e252bfSMichael Neumann 	if (vblank_time < switch_limit)
2174*57e252bfSMichael Neumann 		return true;
2175*57e252bfSMichael Neumann 	else
2176*57e252bfSMichael Neumann 		return false;
2177*57e252bfSMichael Neumann 
2178*57e252bfSMichael Neumann }
2179