157e252bfSMichael Neumann /*
257e252bfSMichael Neumann * Copyright 2011 Advanced Micro Devices, Inc.
357e252bfSMichael Neumann *
457e252bfSMichael Neumann * Permission is hereby granted, free of charge, to any person obtaining a
557e252bfSMichael Neumann * copy of this software and associated documentation files (the "Software"),
657e252bfSMichael Neumann * to deal in the Software without restriction, including without limitation
757e252bfSMichael Neumann * the rights to use, copy, modify, merge, publish, distribute, sublicense,
857e252bfSMichael Neumann * and/or sell copies of the Software, and to permit persons to whom the
957e252bfSMichael Neumann * Software is furnished to do so, subject to the following conditions:
1057e252bfSMichael Neumann *
1157e252bfSMichael Neumann * The above copyright notice and this permission notice shall be included in
1257e252bfSMichael Neumann * all copies or substantial portions of the Software.
1357e252bfSMichael Neumann *
1457e252bfSMichael Neumann * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1557e252bfSMichael Neumann * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1657e252bfSMichael Neumann * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1757e252bfSMichael Neumann * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
1857e252bfSMichael Neumann * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
1957e252bfSMichael Neumann * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
2057e252bfSMichael Neumann * OTHER DEALINGS IN THE SOFTWARE.
2157e252bfSMichael Neumann *
2257e252bfSMichael Neumann * Authors: Alex Deucher
2357e252bfSMichael Neumann */
2457e252bfSMichael Neumann
25*3f2dd94aSFrançois Tigeot #include <drm/drmP.h>
2657e252bfSMichael Neumann #include "radeon.h"
2757e252bfSMichael Neumann #include "radeon_asic.h"
2857e252bfSMichael Neumann #include "rv6xxd.h"
2957e252bfSMichael Neumann #include "r600_dpm.h"
3057e252bfSMichael Neumann #include "rv6xx_dpm.h"
3157e252bfSMichael Neumann #include "atom.h"
3257e252bfSMichael Neumann #include <linux/seq_file.h>
3357e252bfSMichael Neumann
3457e252bfSMichael Neumann static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
3557e252bfSMichael Neumann u32 unscaled_count, u32 unit);
3657e252bfSMichael Neumann
rv6xx_get_ps(struct radeon_ps * rps)3757e252bfSMichael Neumann static struct rv6xx_ps *rv6xx_get_ps(struct radeon_ps *rps)
3857e252bfSMichael Neumann {
3957e252bfSMichael Neumann struct rv6xx_ps *ps = rps->ps_priv;
4057e252bfSMichael Neumann
4157e252bfSMichael Neumann return ps;
4257e252bfSMichael Neumann }
4357e252bfSMichael Neumann
rv6xx_get_pi(struct radeon_device * rdev)4457e252bfSMichael Neumann static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev)
4557e252bfSMichael Neumann {
4657e252bfSMichael Neumann struct rv6xx_power_info *pi = rdev->pm.dpm.priv;
4757e252bfSMichael Neumann
4857e252bfSMichael Neumann return pi;
4957e252bfSMichael Neumann }
5057e252bfSMichael Neumann
rv6xx_force_pcie_gen1(struct radeon_device * rdev)5157e252bfSMichael Neumann static void rv6xx_force_pcie_gen1(struct radeon_device *rdev)
5257e252bfSMichael Neumann {
5357e252bfSMichael Neumann u32 tmp;
5457e252bfSMichael Neumann int i;
5557e252bfSMichael Neumann
5657e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5757e252bfSMichael Neumann tmp &= LC_GEN2_EN;
5857e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
5957e252bfSMichael Neumann
6057e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6157e252bfSMichael Neumann tmp |= LC_INITIATE_LINK_SPEED_CHANGE;
6257e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
6357e252bfSMichael Neumann
6457e252bfSMichael Neumann for (i = 0; i < rdev->usec_timeout; i++) {
6557e252bfSMichael Neumann if (!(RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL) & LC_CURRENT_DATA_RATE))
6657e252bfSMichael Neumann break;
67c4ef309bSzrj udelay(1);
6857e252bfSMichael Neumann }
6957e252bfSMichael Neumann
7057e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7157e252bfSMichael Neumann tmp &= ~LC_INITIATE_LINK_SPEED_CHANGE;
7257e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
7357e252bfSMichael Neumann }
7457e252bfSMichael Neumann
rv6xx_enable_pcie_gen2_support(struct radeon_device * rdev)7557e252bfSMichael Neumann static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev)
7657e252bfSMichael Neumann {
7757e252bfSMichael Neumann u32 tmp;
7857e252bfSMichael Neumann
7957e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
8057e252bfSMichael Neumann
8157e252bfSMichael Neumann if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
8257e252bfSMichael Neumann (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
8357e252bfSMichael Neumann tmp |= LC_GEN2_EN;
8457e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
8557e252bfSMichael Neumann }
8657e252bfSMichael Neumann }
8757e252bfSMichael Neumann
rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)8857e252bfSMichael Neumann static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
8957e252bfSMichael Neumann bool enable)
9057e252bfSMichael Neumann {
9157e252bfSMichael Neumann u32 tmp;
9257e252bfSMichael Neumann
9357e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL) & ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
9457e252bfSMichael Neumann if (enable)
9557e252bfSMichael Neumann tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
9657e252bfSMichael Neumann else
9757e252bfSMichael Neumann tmp |= LC_HW_VOLTAGE_IF_CONTROL(0);
9857e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
9957e252bfSMichael Neumann }
10057e252bfSMichael Neumann
rv6xx_enable_l0s(struct radeon_device * rdev)10157e252bfSMichael Neumann static void rv6xx_enable_l0s(struct radeon_device *rdev)
10257e252bfSMichael Neumann {
10357e252bfSMichael Neumann u32 tmp;
10457e252bfSMichael Neumann
10557e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
10657e252bfSMichael Neumann tmp |= LC_L0S_INACTIVITY(3);
10757e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
10857e252bfSMichael Neumann }
10957e252bfSMichael Neumann
rv6xx_enable_l1(struct radeon_device * rdev)11057e252bfSMichael Neumann static void rv6xx_enable_l1(struct radeon_device *rdev)
11157e252bfSMichael Neumann {
11257e252bfSMichael Neumann u32 tmp;
11357e252bfSMichael Neumann
11457e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
11557e252bfSMichael Neumann tmp &= ~LC_L1_INACTIVITY_MASK;
11657e252bfSMichael Neumann tmp |= LC_L1_INACTIVITY(4);
11757e252bfSMichael Neumann tmp &= ~LC_PMI_TO_L1_DIS;
11857e252bfSMichael Neumann tmp &= ~LC_ASPM_TO_L1_DIS;
11957e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
12057e252bfSMichael Neumann }
12157e252bfSMichael Neumann
rv6xx_enable_pll_sleep_in_l1(struct radeon_device * rdev)12257e252bfSMichael Neumann static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev)
12357e252bfSMichael Neumann {
12457e252bfSMichael Neumann u32 tmp;
12557e252bfSMichael Neumann
12657e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
12757e252bfSMichael Neumann tmp |= LC_L1_INACTIVITY(8);
12857e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
12957e252bfSMichael Neumann
13057e252bfSMichael Neumann /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
13157e252bfSMichael Neumann tmp = RREG32_PCIE(PCIE_P_CNTL);
13257e252bfSMichael Neumann tmp |= P_PLL_PWRDN_IN_L1L23;
13357e252bfSMichael Neumann tmp &= ~P_PLL_BUF_PDNB;
13457e252bfSMichael Neumann tmp &= ~P_PLL_PDNB;
13557e252bfSMichael Neumann tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
13657e252bfSMichael Neumann WREG32_PCIE(PCIE_P_CNTL, tmp);
13757e252bfSMichael Neumann }
13857e252bfSMichael Neumann
rv6xx_convert_clock_to_stepping(struct radeon_device * rdev,u32 clock,struct rv6xx_sclk_stepping * step)13957e252bfSMichael Neumann static int rv6xx_convert_clock_to_stepping(struct radeon_device *rdev,
14057e252bfSMichael Neumann u32 clock, struct rv6xx_sclk_stepping *step)
14157e252bfSMichael Neumann {
14257e252bfSMichael Neumann int ret;
14357e252bfSMichael Neumann struct atom_clock_dividers dividers;
14457e252bfSMichael Neumann
14557e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
14657e252bfSMichael Neumann clock, false, ÷rs);
14757e252bfSMichael Neumann if (ret)
14857e252bfSMichael Neumann return ret;
14957e252bfSMichael Neumann
15057e252bfSMichael Neumann if (dividers.enable_post_div)
15157e252bfSMichael Neumann step->post_divider = 2 + (dividers.post_div & 0xF) + (dividers.post_div >> 4);
15257e252bfSMichael Neumann else
15357e252bfSMichael Neumann step->post_divider = 1;
15457e252bfSMichael Neumann
15557e252bfSMichael Neumann step->vco_frequency = clock * step->post_divider;
15657e252bfSMichael Neumann
15757e252bfSMichael Neumann return 0;
15857e252bfSMichael Neumann }
15957e252bfSMichael Neumann
rv6xx_output_stepping(struct radeon_device * rdev,u32 step_index,struct rv6xx_sclk_stepping * step)16057e252bfSMichael Neumann static void rv6xx_output_stepping(struct radeon_device *rdev,
16157e252bfSMichael Neumann u32 step_index, struct rv6xx_sclk_stepping *step)
16257e252bfSMichael Neumann {
16357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
16457e252bfSMichael Neumann u32 ref_clk = rdev->clock.spll.reference_freq;
16557e252bfSMichael Neumann u32 fb_divider;
16657e252bfSMichael Neumann u32 spll_step_count = rv6xx_scale_count_given_unit(rdev,
16757e252bfSMichael Neumann R600_SPLLSTEPTIME_DFLT *
16857e252bfSMichael Neumann pi->spll_ref_div,
16957e252bfSMichael Neumann R600_SPLLSTEPUNIT_DFLT);
17057e252bfSMichael Neumann
17157e252bfSMichael Neumann r600_engine_clock_entry_enable(rdev, step_index, true);
17257e252bfSMichael Neumann r600_engine_clock_entry_enable_pulse_skipping(rdev, step_index, false);
17357e252bfSMichael Neumann
17457e252bfSMichael Neumann if (step->post_divider == 1)
17557e252bfSMichael Neumann r600_engine_clock_entry_enable_post_divider(rdev, step_index, false);
17657e252bfSMichael Neumann else {
17757e252bfSMichael Neumann u32 lo_len = (step->post_divider - 2) / 2;
17857e252bfSMichael Neumann u32 hi_len = step->post_divider - 2 - lo_len;
17957e252bfSMichael Neumann
18057e252bfSMichael Neumann r600_engine_clock_entry_enable_post_divider(rdev, step_index, true);
18157e252bfSMichael Neumann r600_engine_clock_entry_set_post_divider(rdev, step_index, (hi_len << 4) | lo_len);
18257e252bfSMichael Neumann }
18357e252bfSMichael Neumann
18457e252bfSMichael Neumann fb_divider = ((step->vco_frequency * pi->spll_ref_div) / ref_clk) >>
18557e252bfSMichael Neumann pi->fb_div_scale;
18657e252bfSMichael Neumann
18757e252bfSMichael Neumann r600_engine_clock_entry_set_reference_divider(rdev, step_index,
18857e252bfSMichael Neumann pi->spll_ref_div - 1);
18957e252bfSMichael Neumann r600_engine_clock_entry_set_feedback_divider(rdev, step_index, fb_divider);
19057e252bfSMichael Neumann r600_engine_clock_entry_set_step_time(rdev, step_index, spll_step_count);
19157e252bfSMichael Neumann
19257e252bfSMichael Neumann }
19357e252bfSMichael Neumann
rv6xx_next_vco_step(struct radeon_device * rdev,struct rv6xx_sclk_stepping * cur,bool increasing_vco,u32 step_size)19457e252bfSMichael Neumann static struct rv6xx_sclk_stepping rv6xx_next_vco_step(struct radeon_device *rdev,
19557e252bfSMichael Neumann struct rv6xx_sclk_stepping *cur,
19657e252bfSMichael Neumann bool increasing_vco, u32 step_size)
19757e252bfSMichael Neumann {
19857e252bfSMichael Neumann struct rv6xx_sclk_stepping next;
19957e252bfSMichael Neumann
20057e252bfSMichael Neumann next.post_divider = cur->post_divider;
20157e252bfSMichael Neumann
20257e252bfSMichael Neumann if (increasing_vco)
20357e252bfSMichael Neumann next.vco_frequency = (cur->vco_frequency * (100 + step_size)) / 100;
20457e252bfSMichael Neumann else
20557e252bfSMichael Neumann next.vco_frequency = (cur->vco_frequency * 100 + 99 + step_size) / (100 + step_size);
20657e252bfSMichael Neumann
20757e252bfSMichael Neumann return next;
20857e252bfSMichael Neumann }
20957e252bfSMichael Neumann
rv6xx_can_step_post_div(struct radeon_device * rdev,struct rv6xx_sclk_stepping * cur,struct rv6xx_sclk_stepping * target)21057e252bfSMichael Neumann static bool rv6xx_can_step_post_div(struct radeon_device *rdev,
21157e252bfSMichael Neumann struct rv6xx_sclk_stepping *cur,
21257e252bfSMichael Neumann struct rv6xx_sclk_stepping *target)
21357e252bfSMichael Neumann {
21457e252bfSMichael Neumann return (cur->post_divider > target->post_divider) &&
21557e252bfSMichael Neumann ((cur->vco_frequency * target->post_divider) <=
21657e252bfSMichael Neumann (target->vco_frequency * (cur->post_divider - 1)));
21757e252bfSMichael Neumann }
21857e252bfSMichael Neumann
rv6xx_next_post_div_step(struct radeon_device * rdev,struct rv6xx_sclk_stepping * cur,struct rv6xx_sclk_stepping * target)21957e252bfSMichael Neumann static struct rv6xx_sclk_stepping rv6xx_next_post_div_step(struct radeon_device *rdev,
22057e252bfSMichael Neumann struct rv6xx_sclk_stepping *cur,
22157e252bfSMichael Neumann struct rv6xx_sclk_stepping *target)
22257e252bfSMichael Neumann {
22357e252bfSMichael Neumann struct rv6xx_sclk_stepping next = *cur;
22457e252bfSMichael Neumann
22557e252bfSMichael Neumann while (rv6xx_can_step_post_div(rdev, &next, target))
22657e252bfSMichael Neumann next.post_divider--;
22757e252bfSMichael Neumann
22857e252bfSMichael Neumann return next;
22957e252bfSMichael Neumann }
23057e252bfSMichael Neumann
rv6xx_reached_stepping_target(struct radeon_device * rdev,struct rv6xx_sclk_stepping * cur,struct rv6xx_sclk_stepping * target,bool increasing_vco)23157e252bfSMichael Neumann static bool rv6xx_reached_stepping_target(struct radeon_device *rdev,
23257e252bfSMichael Neumann struct rv6xx_sclk_stepping *cur,
23357e252bfSMichael Neumann struct rv6xx_sclk_stepping *target,
23457e252bfSMichael Neumann bool increasing_vco)
23557e252bfSMichael Neumann {
23657e252bfSMichael Neumann return (increasing_vco && (cur->vco_frequency >= target->vco_frequency)) ||
23757e252bfSMichael Neumann (!increasing_vco && (cur->vco_frequency <= target->vco_frequency));
23857e252bfSMichael Neumann }
23957e252bfSMichael Neumann
rv6xx_generate_steps(struct radeon_device * rdev,u32 low,u32 high,u32 start_index,u8 * end_index)24057e252bfSMichael Neumann static void rv6xx_generate_steps(struct radeon_device *rdev,
24157e252bfSMichael Neumann u32 low, u32 high,
24257e252bfSMichael Neumann u32 start_index, u8 *end_index)
24357e252bfSMichael Neumann {
24457e252bfSMichael Neumann struct rv6xx_sclk_stepping cur;
24557e252bfSMichael Neumann struct rv6xx_sclk_stepping target;
24657e252bfSMichael Neumann bool increasing_vco;
24757e252bfSMichael Neumann u32 step_index = start_index;
24857e252bfSMichael Neumann
24957e252bfSMichael Neumann rv6xx_convert_clock_to_stepping(rdev, low, &cur);
25057e252bfSMichael Neumann rv6xx_convert_clock_to_stepping(rdev, high, &target);
25157e252bfSMichael Neumann
25257e252bfSMichael Neumann rv6xx_output_stepping(rdev, step_index++, &cur);
25357e252bfSMichael Neumann
25457e252bfSMichael Neumann increasing_vco = (target.vco_frequency >= cur.vco_frequency);
25557e252bfSMichael Neumann
25657e252bfSMichael Neumann if (target.post_divider > cur.post_divider)
25757e252bfSMichael Neumann cur.post_divider = target.post_divider;
25857e252bfSMichael Neumann
25957e252bfSMichael Neumann while (1) {
26057e252bfSMichael Neumann struct rv6xx_sclk_stepping next;
26157e252bfSMichael Neumann
26257e252bfSMichael Neumann if (rv6xx_can_step_post_div(rdev, &cur, &target))
26357e252bfSMichael Neumann next = rv6xx_next_post_div_step(rdev, &cur, &target);
26457e252bfSMichael Neumann else
26557e252bfSMichael Neumann next = rv6xx_next_vco_step(rdev, &cur, increasing_vco, R600_VCOSTEPPCT_DFLT);
26657e252bfSMichael Neumann
26757e252bfSMichael Neumann if (rv6xx_reached_stepping_target(rdev, &next, &target, increasing_vco)) {
26857e252bfSMichael Neumann struct rv6xx_sclk_stepping tiny =
26957e252bfSMichael Neumann rv6xx_next_vco_step(rdev, &target, !increasing_vco, R600_ENDINGVCOSTEPPCT_DFLT);
27057e252bfSMichael Neumann tiny.post_divider = next.post_divider;
27157e252bfSMichael Neumann
27257e252bfSMichael Neumann if (!rv6xx_reached_stepping_target(rdev, &tiny, &cur, !increasing_vco))
27357e252bfSMichael Neumann rv6xx_output_stepping(rdev, step_index++, &tiny);
27457e252bfSMichael Neumann
27557e252bfSMichael Neumann if ((next.post_divider != target.post_divider) &&
27657e252bfSMichael Neumann (next.vco_frequency != target.vco_frequency)) {
27757e252bfSMichael Neumann struct rv6xx_sclk_stepping final_vco;
27857e252bfSMichael Neumann
27957e252bfSMichael Neumann final_vco.vco_frequency = target.vco_frequency;
28057e252bfSMichael Neumann final_vco.post_divider = next.post_divider;
28157e252bfSMichael Neumann
28257e252bfSMichael Neumann rv6xx_output_stepping(rdev, step_index++, &final_vco);
28357e252bfSMichael Neumann }
28457e252bfSMichael Neumann
28557e252bfSMichael Neumann rv6xx_output_stepping(rdev, step_index++, &target);
28657e252bfSMichael Neumann break;
28757e252bfSMichael Neumann } else
28857e252bfSMichael Neumann rv6xx_output_stepping(rdev, step_index++, &next);
28957e252bfSMichael Neumann
29057e252bfSMichael Neumann cur = next;
29157e252bfSMichael Neumann }
29257e252bfSMichael Neumann
29357e252bfSMichael Neumann *end_index = (u8)step_index - 1;
29457e252bfSMichael Neumann
29557e252bfSMichael Neumann }
29657e252bfSMichael Neumann
rv6xx_generate_single_step(struct radeon_device * rdev,u32 clock,u32 index)29757e252bfSMichael Neumann static void rv6xx_generate_single_step(struct radeon_device *rdev,
29857e252bfSMichael Neumann u32 clock, u32 index)
29957e252bfSMichael Neumann {
30057e252bfSMichael Neumann struct rv6xx_sclk_stepping step;
30157e252bfSMichael Neumann
30257e252bfSMichael Neumann rv6xx_convert_clock_to_stepping(rdev, clock, &step);
30357e252bfSMichael Neumann rv6xx_output_stepping(rdev, index, &step);
30457e252bfSMichael Neumann }
30557e252bfSMichael Neumann
rv6xx_invalidate_intermediate_steps_range(struct radeon_device * rdev,u32 start_index,u32 end_index)30657e252bfSMichael Neumann static void rv6xx_invalidate_intermediate_steps_range(struct radeon_device *rdev,
30757e252bfSMichael Neumann u32 start_index, u32 end_index)
30857e252bfSMichael Neumann {
30957e252bfSMichael Neumann u32 step_index;
31057e252bfSMichael Neumann
31157e252bfSMichael Neumann for (step_index = start_index + 1; step_index < end_index; step_index++)
31257e252bfSMichael Neumann r600_engine_clock_entry_enable(rdev, step_index, false);
31357e252bfSMichael Neumann }
31457e252bfSMichael Neumann
rv6xx_set_engine_spread_spectrum_clk_s(struct radeon_device * rdev,u32 index,u32 clk_s)31557e252bfSMichael Neumann static void rv6xx_set_engine_spread_spectrum_clk_s(struct radeon_device *rdev,
31657e252bfSMichael Neumann u32 index, u32 clk_s)
31757e252bfSMichael Neumann {
31857e252bfSMichael Neumann WREG32_P(CG_SPLL_SPREAD_SPECTRUM_LOW + (index * 4),
31957e252bfSMichael Neumann CLKS(clk_s), ~CLKS_MASK);
32057e252bfSMichael Neumann }
32157e252bfSMichael Neumann
rv6xx_set_engine_spread_spectrum_clk_v(struct radeon_device * rdev,u32 index,u32 clk_v)32257e252bfSMichael Neumann static void rv6xx_set_engine_spread_spectrum_clk_v(struct radeon_device *rdev,
32357e252bfSMichael Neumann u32 index, u32 clk_v)
32457e252bfSMichael Neumann {
32557e252bfSMichael Neumann WREG32_P(CG_SPLL_SPREAD_SPECTRUM_LOW + (index * 4),
32657e252bfSMichael Neumann CLKV(clk_v), ~CLKV_MASK);
32757e252bfSMichael Neumann }
32857e252bfSMichael Neumann
rv6xx_enable_engine_spread_spectrum(struct radeon_device * rdev,u32 index,bool enable)32957e252bfSMichael Neumann static void rv6xx_enable_engine_spread_spectrum(struct radeon_device *rdev,
33057e252bfSMichael Neumann u32 index, bool enable)
33157e252bfSMichael Neumann {
33257e252bfSMichael Neumann if (enable)
33357e252bfSMichael Neumann WREG32_P(CG_SPLL_SPREAD_SPECTRUM_LOW + (index * 4),
33457e252bfSMichael Neumann SSEN, ~SSEN);
33557e252bfSMichael Neumann else
33657e252bfSMichael Neumann WREG32_P(CG_SPLL_SPREAD_SPECTRUM_LOW + (index * 4),
33757e252bfSMichael Neumann 0, ~SSEN);
33857e252bfSMichael Neumann }
33957e252bfSMichael Neumann
rv6xx_set_memory_spread_spectrum_clk_s(struct radeon_device * rdev,u32 clk_s)34057e252bfSMichael Neumann static void rv6xx_set_memory_spread_spectrum_clk_s(struct radeon_device *rdev,
34157e252bfSMichael Neumann u32 clk_s)
34257e252bfSMichael Neumann {
34357e252bfSMichael Neumann WREG32_P(CG_MPLL_SPREAD_SPECTRUM, CLKS(clk_s), ~CLKS_MASK);
34457e252bfSMichael Neumann }
34557e252bfSMichael Neumann
rv6xx_set_memory_spread_spectrum_clk_v(struct radeon_device * rdev,u32 clk_v)34657e252bfSMichael Neumann static void rv6xx_set_memory_spread_spectrum_clk_v(struct radeon_device *rdev,
34757e252bfSMichael Neumann u32 clk_v)
34857e252bfSMichael Neumann {
34957e252bfSMichael Neumann WREG32_P(CG_MPLL_SPREAD_SPECTRUM, CLKV(clk_v), ~CLKV_MASK);
35057e252bfSMichael Neumann }
35157e252bfSMichael Neumann
rv6xx_enable_memory_spread_spectrum(struct radeon_device * rdev,bool enable)35257e252bfSMichael Neumann static void rv6xx_enable_memory_spread_spectrum(struct radeon_device *rdev,
35357e252bfSMichael Neumann bool enable)
35457e252bfSMichael Neumann {
35557e252bfSMichael Neumann if (enable)
35657e252bfSMichael Neumann WREG32_P(CG_MPLL_SPREAD_SPECTRUM, SSEN, ~SSEN);
35757e252bfSMichael Neumann else
35857e252bfSMichael Neumann WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
35957e252bfSMichael Neumann }
36057e252bfSMichael Neumann
rv6xx_enable_dynamic_spread_spectrum(struct radeon_device * rdev,bool enable)36157e252bfSMichael Neumann static void rv6xx_enable_dynamic_spread_spectrum(struct radeon_device *rdev,
36257e252bfSMichael Neumann bool enable)
36357e252bfSMichael Neumann {
36457e252bfSMichael Neumann if (enable)
36557e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
36657e252bfSMichael Neumann else
36757e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
36857e252bfSMichael Neumann }
36957e252bfSMichael Neumann
rv6xx_memory_clock_entry_enable_post_divider(struct radeon_device * rdev,u32 index,bool enable)37057e252bfSMichael Neumann static void rv6xx_memory_clock_entry_enable_post_divider(struct radeon_device *rdev,
37157e252bfSMichael Neumann u32 index, bool enable)
37257e252bfSMichael Neumann {
37357e252bfSMichael Neumann if (enable)
37457e252bfSMichael Neumann WREG32_P(MPLL_FREQ_LEVEL_0 + (index * 4),
37557e252bfSMichael Neumann LEVEL0_MPLL_DIV_EN, ~LEVEL0_MPLL_DIV_EN);
37657e252bfSMichael Neumann else
37757e252bfSMichael Neumann WREG32_P(MPLL_FREQ_LEVEL_0 + (index * 4), 0, ~LEVEL0_MPLL_DIV_EN);
37857e252bfSMichael Neumann }
37957e252bfSMichael Neumann
rv6xx_memory_clock_entry_set_post_divider(struct radeon_device * rdev,u32 index,u32 divider)38057e252bfSMichael Neumann static void rv6xx_memory_clock_entry_set_post_divider(struct radeon_device *rdev,
38157e252bfSMichael Neumann u32 index, u32 divider)
38257e252bfSMichael Neumann {
38357e252bfSMichael Neumann WREG32_P(MPLL_FREQ_LEVEL_0 + (index * 4),
38457e252bfSMichael Neumann LEVEL0_MPLL_POST_DIV(divider), ~LEVEL0_MPLL_POST_DIV_MASK);
38557e252bfSMichael Neumann }
38657e252bfSMichael Neumann
rv6xx_memory_clock_entry_set_feedback_divider(struct radeon_device * rdev,u32 index,u32 divider)38757e252bfSMichael Neumann static void rv6xx_memory_clock_entry_set_feedback_divider(struct radeon_device *rdev,
38857e252bfSMichael Neumann u32 index, u32 divider)
38957e252bfSMichael Neumann {
39057e252bfSMichael Neumann WREG32_P(MPLL_FREQ_LEVEL_0 + (index * 4), LEVEL0_MPLL_FB_DIV(divider),
39157e252bfSMichael Neumann ~LEVEL0_MPLL_FB_DIV_MASK);
39257e252bfSMichael Neumann }
39357e252bfSMichael Neumann
rv6xx_memory_clock_entry_set_reference_divider(struct radeon_device * rdev,u32 index,u32 divider)39457e252bfSMichael Neumann static void rv6xx_memory_clock_entry_set_reference_divider(struct radeon_device *rdev,
39557e252bfSMichael Neumann u32 index, u32 divider)
39657e252bfSMichael Neumann {
39757e252bfSMichael Neumann WREG32_P(MPLL_FREQ_LEVEL_0 + (index * 4),
39857e252bfSMichael Neumann LEVEL0_MPLL_REF_DIV(divider), ~LEVEL0_MPLL_REF_DIV_MASK);
39957e252bfSMichael Neumann }
40057e252bfSMichael Neumann
rv6xx_vid_response_set_brt(struct radeon_device * rdev,u32 rt)40157e252bfSMichael Neumann static void rv6xx_vid_response_set_brt(struct radeon_device *rdev, u32 rt)
40257e252bfSMichael Neumann {
40357e252bfSMichael Neumann WREG32_P(VID_RT, BRT(rt), ~BRT_MASK);
40457e252bfSMichael Neumann }
40557e252bfSMichael Neumann
rv6xx_enable_engine_feedback_and_reference_sync(struct radeon_device * rdev)40657e252bfSMichael Neumann static void rv6xx_enable_engine_feedback_and_reference_sync(struct radeon_device *rdev)
40757e252bfSMichael Neumann {
40857e252bfSMichael Neumann WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
40957e252bfSMichael Neumann }
41057e252bfSMichael Neumann
rv6xx_clocks_per_unit(u32 unit)411c6f73aabSFrançois Tigeot static u32 rv6xx_clocks_per_unit(u32 unit)
41257e252bfSMichael Neumann {
413c6f73aabSFrançois Tigeot u32 tmp = 1 << (2 * unit);
41457e252bfSMichael Neumann
41557e252bfSMichael Neumann return tmp;
41657e252bfSMichael Neumann }
41757e252bfSMichael Neumann
rv6xx_scale_count_given_unit(struct radeon_device * rdev,u32 unscaled_count,u32 unit)41857e252bfSMichael Neumann static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
41957e252bfSMichael Neumann u32 unscaled_count, u32 unit)
42057e252bfSMichael Neumann {
421c6f73aabSFrançois Tigeot u32 count_per_unit = rv6xx_clocks_per_unit(unit);
42257e252bfSMichael Neumann
42357e252bfSMichael Neumann return (unscaled_count + count_per_unit - 1) / count_per_unit;
42457e252bfSMichael Neumann }
42557e252bfSMichael Neumann
rv6xx_compute_count_for_delay(struct radeon_device * rdev,u32 delay_us,u32 unit)42657e252bfSMichael Neumann static u32 rv6xx_compute_count_for_delay(struct radeon_device *rdev,
42757e252bfSMichael Neumann u32 delay_us, u32 unit)
42857e252bfSMichael Neumann {
42957e252bfSMichael Neumann u32 ref_clk = rdev->clock.spll.reference_freq;
43057e252bfSMichael Neumann
43157e252bfSMichael Neumann return rv6xx_scale_count_given_unit(rdev, delay_us * (ref_clk / 100), unit);
43257e252bfSMichael Neumann }
43357e252bfSMichael Neumann
rv6xx_calculate_engine_speed_stepping_parameters(struct radeon_device * rdev,struct rv6xx_ps * state)43457e252bfSMichael Neumann static void rv6xx_calculate_engine_speed_stepping_parameters(struct radeon_device *rdev,
43557e252bfSMichael Neumann struct rv6xx_ps *state)
43657e252bfSMichael Neumann {
43757e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
43857e252bfSMichael Neumann
43957e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_LOW] =
44057e252bfSMichael Neumann state->low.sclk;
44157e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_MEDIUM] =
44257e252bfSMichael Neumann state->medium.sclk;
44357e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_HIGH] =
44457e252bfSMichael Neumann state->high.sclk;
44557e252bfSMichael Neumann
44657e252bfSMichael Neumann pi->hw.low_sclk_index = R600_POWER_LEVEL_LOW;
44757e252bfSMichael Neumann pi->hw.medium_sclk_index = R600_POWER_LEVEL_MEDIUM;
44857e252bfSMichael Neumann pi->hw.high_sclk_index = R600_POWER_LEVEL_HIGH;
44957e252bfSMichael Neumann }
45057e252bfSMichael Neumann
rv6xx_calculate_memory_clock_stepping_parameters(struct radeon_device * rdev,struct rv6xx_ps * state)45157e252bfSMichael Neumann static void rv6xx_calculate_memory_clock_stepping_parameters(struct radeon_device *rdev,
45257e252bfSMichael Neumann struct rv6xx_ps *state)
45357e252bfSMichael Neumann {
45457e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
45557e252bfSMichael Neumann
45657e252bfSMichael Neumann pi->hw.mclks[R600_POWER_LEVEL_CTXSW] =
45757e252bfSMichael Neumann state->high.mclk;
45857e252bfSMichael Neumann pi->hw.mclks[R600_POWER_LEVEL_HIGH] =
45957e252bfSMichael Neumann state->high.mclk;
46057e252bfSMichael Neumann pi->hw.mclks[R600_POWER_LEVEL_MEDIUM] =
46157e252bfSMichael Neumann state->medium.mclk;
46257e252bfSMichael Neumann pi->hw.mclks[R600_POWER_LEVEL_LOW] =
46357e252bfSMichael Neumann state->low.mclk;
46457e252bfSMichael Neumann
46557e252bfSMichael Neumann pi->hw.high_mclk_index = R600_POWER_LEVEL_HIGH;
46657e252bfSMichael Neumann
46757e252bfSMichael Neumann if (state->high.mclk == state->medium.mclk)
46857e252bfSMichael Neumann pi->hw.medium_mclk_index =
46957e252bfSMichael Neumann pi->hw.high_mclk_index;
47057e252bfSMichael Neumann else
47157e252bfSMichael Neumann pi->hw.medium_mclk_index = R600_POWER_LEVEL_MEDIUM;
47257e252bfSMichael Neumann
47357e252bfSMichael Neumann
47457e252bfSMichael Neumann if (state->medium.mclk == state->low.mclk)
47557e252bfSMichael Neumann pi->hw.low_mclk_index =
47657e252bfSMichael Neumann pi->hw.medium_mclk_index;
47757e252bfSMichael Neumann else
47857e252bfSMichael Neumann pi->hw.low_mclk_index = R600_POWER_LEVEL_LOW;
47957e252bfSMichael Neumann }
48057e252bfSMichael Neumann
rv6xx_calculate_voltage_stepping_parameters(struct radeon_device * rdev,struct rv6xx_ps * state)48157e252bfSMichael Neumann static void rv6xx_calculate_voltage_stepping_parameters(struct radeon_device *rdev,
48257e252bfSMichael Neumann struct rv6xx_ps *state)
48357e252bfSMichael Neumann {
48457e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
48557e252bfSMichael Neumann
48657e252bfSMichael Neumann pi->hw.vddc[R600_POWER_LEVEL_CTXSW] = state->high.vddc;
48757e252bfSMichael Neumann pi->hw.vddc[R600_POWER_LEVEL_HIGH] = state->high.vddc;
48857e252bfSMichael Neumann pi->hw.vddc[R600_POWER_LEVEL_MEDIUM] = state->medium.vddc;
48957e252bfSMichael Neumann pi->hw.vddc[R600_POWER_LEVEL_LOW] = state->low.vddc;
49057e252bfSMichael Neumann
49157e252bfSMichael Neumann pi->hw.backbias[R600_POWER_LEVEL_CTXSW] =
49257e252bfSMichael Neumann (state->high.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? true : false;
49357e252bfSMichael Neumann pi->hw.backbias[R600_POWER_LEVEL_HIGH] =
49457e252bfSMichael Neumann (state->high.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? true : false;
49557e252bfSMichael Neumann pi->hw.backbias[R600_POWER_LEVEL_MEDIUM] =
49657e252bfSMichael Neumann (state->medium.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? true : false;
49757e252bfSMichael Neumann pi->hw.backbias[R600_POWER_LEVEL_LOW] =
49857e252bfSMichael Neumann (state->low.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? true : false;
49957e252bfSMichael Neumann
50057e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_HIGH] =
50157e252bfSMichael Neumann (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? true : false;
50257e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_MEDIUM] =
50357e252bfSMichael Neumann (state->medium.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? true : false;
50457e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_LOW] =
50557e252bfSMichael Neumann (state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? true : false;
50657e252bfSMichael Neumann
50757e252bfSMichael Neumann pi->hw.high_vddc_index = R600_POWER_LEVEL_HIGH;
50857e252bfSMichael Neumann
50957e252bfSMichael Neumann if ((state->high.vddc == state->medium.vddc) &&
51057e252bfSMichael Neumann ((state->high.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ==
51157e252bfSMichael Neumann (state->medium.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE)))
51257e252bfSMichael Neumann pi->hw.medium_vddc_index =
51357e252bfSMichael Neumann pi->hw.high_vddc_index;
51457e252bfSMichael Neumann else
51557e252bfSMichael Neumann pi->hw.medium_vddc_index = R600_POWER_LEVEL_MEDIUM;
51657e252bfSMichael Neumann
51757e252bfSMichael Neumann if ((state->medium.vddc == state->low.vddc) &&
51857e252bfSMichael Neumann ((state->medium.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ==
51957e252bfSMichael Neumann (state->low.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE)))
52057e252bfSMichael Neumann pi->hw.low_vddc_index =
52157e252bfSMichael Neumann pi->hw.medium_vddc_index;
52257e252bfSMichael Neumann else
52357e252bfSMichael Neumann pi->hw.medium_vddc_index = R600_POWER_LEVEL_LOW;
52457e252bfSMichael Neumann }
52557e252bfSMichael Neumann
rv6xx_calculate_vco_frequency(u32 ref_clock,struct atom_clock_dividers * dividers,u32 fb_divider_scale)52657e252bfSMichael Neumann static inline u32 rv6xx_calculate_vco_frequency(u32 ref_clock,
52757e252bfSMichael Neumann struct atom_clock_dividers *dividers,
52857e252bfSMichael Neumann u32 fb_divider_scale)
52957e252bfSMichael Neumann {
53057e252bfSMichael Neumann return ref_clock * ((dividers->fb_div & ~1) << fb_divider_scale) /
53157e252bfSMichael Neumann (dividers->ref_div + 1);
53257e252bfSMichael Neumann }
53357e252bfSMichael Neumann
rv6xx_calculate_spread_spectrum_clk_v(u32 vco_freq,u32 ref_freq,u32 ss_rate,u32 ss_percent,u32 fb_divider_scale)53457e252bfSMichael Neumann static inline u32 rv6xx_calculate_spread_spectrum_clk_v(u32 vco_freq, u32 ref_freq,
53557e252bfSMichael Neumann u32 ss_rate, u32 ss_percent,
53657e252bfSMichael Neumann u32 fb_divider_scale)
53757e252bfSMichael Neumann {
53857e252bfSMichael Neumann u32 fb_divider = vco_freq / ref_freq;
53957e252bfSMichael Neumann
54057e252bfSMichael Neumann return (ss_percent * ss_rate * 4 * (fb_divider * fb_divider) /
54157e252bfSMichael Neumann (5375 * ((vco_freq * 10) / (4096 >> fb_divider_scale))));
54257e252bfSMichael Neumann }
54357e252bfSMichael Neumann
rv6xx_calculate_spread_spectrum_clk_s(u32 ss_rate,u32 ref_freq)54457e252bfSMichael Neumann static inline u32 rv6xx_calculate_spread_spectrum_clk_s(u32 ss_rate, u32 ref_freq)
54557e252bfSMichael Neumann {
54657e252bfSMichael Neumann return (((ref_freq * 10) / (ss_rate * 2)) - 1) / 4;
54757e252bfSMichael Neumann }
54857e252bfSMichael Neumann
rv6xx_program_engine_spread_spectrum(struct radeon_device * rdev,u32 clock,enum r600_power_level level)54957e252bfSMichael Neumann static void rv6xx_program_engine_spread_spectrum(struct radeon_device *rdev,
55057e252bfSMichael Neumann u32 clock, enum r600_power_level level)
55157e252bfSMichael Neumann {
55257e252bfSMichael Neumann u32 ref_clk = rdev->clock.spll.reference_freq;
55357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
55457e252bfSMichael Neumann struct atom_clock_dividers dividers;
55557e252bfSMichael Neumann struct radeon_atom_ss ss;
55657e252bfSMichael Neumann u32 vco_freq, clk_v, clk_s;
55757e252bfSMichael Neumann
55857e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, level, false);
55957e252bfSMichael Neumann
56057e252bfSMichael Neumann if (clock && pi->sclk_ss) {
56157e252bfSMichael Neumann if (radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, clock, false, ÷rs) == 0) {
56257e252bfSMichael Neumann vco_freq = rv6xx_calculate_vco_frequency(ref_clk, ÷rs,
56357e252bfSMichael Neumann pi->fb_div_scale);
56457e252bfSMichael Neumann
56557e252bfSMichael Neumann if (radeon_atombios_get_asic_ss_info(rdev, &ss,
56657e252bfSMichael Neumann ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
56757e252bfSMichael Neumann clk_v = rv6xx_calculate_spread_spectrum_clk_v(vco_freq,
56857e252bfSMichael Neumann (ref_clk / (dividers.ref_div + 1)),
56957e252bfSMichael Neumann ss.rate,
57057e252bfSMichael Neumann ss.percentage,
57157e252bfSMichael Neumann pi->fb_div_scale);
57257e252bfSMichael Neumann
57357e252bfSMichael Neumann clk_s = rv6xx_calculate_spread_spectrum_clk_s(ss.rate,
57457e252bfSMichael Neumann (ref_clk / (dividers.ref_div + 1)));
57557e252bfSMichael Neumann
57657e252bfSMichael Neumann rv6xx_set_engine_spread_spectrum_clk_v(rdev, level, clk_v);
57757e252bfSMichael Neumann rv6xx_set_engine_spread_spectrum_clk_s(rdev, level, clk_s);
57857e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, level, true);
57957e252bfSMichael Neumann }
58057e252bfSMichael Neumann }
58157e252bfSMichael Neumann }
58257e252bfSMichael Neumann }
58357e252bfSMichael Neumann
rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry(struct radeon_device * rdev)58457e252bfSMichael Neumann static void rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry(struct radeon_device *rdev)
58557e252bfSMichael Neumann {
58657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
58757e252bfSMichael Neumann
58857e252bfSMichael Neumann rv6xx_program_engine_spread_spectrum(rdev,
58957e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_HIGH],
59057e252bfSMichael Neumann R600_POWER_LEVEL_HIGH);
59157e252bfSMichael Neumann
59257e252bfSMichael Neumann rv6xx_program_engine_spread_spectrum(rdev,
59357e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_MEDIUM],
59457e252bfSMichael Neumann R600_POWER_LEVEL_MEDIUM);
59557e252bfSMichael Neumann
59657e252bfSMichael Neumann }
59757e252bfSMichael Neumann
rv6xx_program_mclk_stepping_entry(struct radeon_device * rdev,u32 entry,u32 clock)59857e252bfSMichael Neumann static int rv6xx_program_mclk_stepping_entry(struct radeon_device *rdev,
59957e252bfSMichael Neumann u32 entry, u32 clock)
60057e252bfSMichael Neumann {
60157e252bfSMichael Neumann struct atom_clock_dividers dividers;
60257e252bfSMichael Neumann
60357e252bfSMichael Neumann if (radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, clock, false, ÷rs))
60457e252bfSMichael Neumann return -EINVAL;
60557e252bfSMichael Neumann
60657e252bfSMichael Neumann
60757e252bfSMichael Neumann rv6xx_memory_clock_entry_set_reference_divider(rdev, entry, dividers.ref_div);
60857e252bfSMichael Neumann rv6xx_memory_clock_entry_set_feedback_divider(rdev, entry, dividers.fb_div);
60957e252bfSMichael Neumann rv6xx_memory_clock_entry_set_post_divider(rdev, entry, dividers.post_div);
61057e252bfSMichael Neumann
61157e252bfSMichael Neumann if (dividers.enable_post_div)
61257e252bfSMichael Neumann rv6xx_memory_clock_entry_enable_post_divider(rdev, entry, true);
61357e252bfSMichael Neumann else
61457e252bfSMichael Neumann rv6xx_memory_clock_entry_enable_post_divider(rdev, entry, false);
61557e252bfSMichael Neumann
61657e252bfSMichael Neumann return 0;
61757e252bfSMichael Neumann }
61857e252bfSMichael Neumann
rv6xx_program_mclk_stepping_parameters_except_lowest_entry(struct radeon_device * rdev)61957e252bfSMichael Neumann static void rv6xx_program_mclk_stepping_parameters_except_lowest_entry(struct radeon_device *rdev)
62057e252bfSMichael Neumann {
62157e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
62257e252bfSMichael Neumann int i;
62357e252bfSMichael Neumann
62457e252bfSMichael Neumann for (i = 1; i < R600_PM_NUMBER_OF_MCLKS; i++) {
62557e252bfSMichael Neumann if (pi->hw.mclks[i])
62657e252bfSMichael Neumann rv6xx_program_mclk_stepping_entry(rdev, i,
62757e252bfSMichael Neumann pi->hw.mclks[i]);
62857e252bfSMichael Neumann }
62957e252bfSMichael Neumann }
63057e252bfSMichael Neumann
rv6xx_find_memory_clock_with_highest_vco(struct radeon_device * rdev,u32 requested_memory_clock,u32 ref_clk,struct atom_clock_dividers * dividers,u32 * vco_freq)63157e252bfSMichael Neumann static void rv6xx_find_memory_clock_with_highest_vco(struct radeon_device *rdev,
63257e252bfSMichael Neumann u32 requested_memory_clock,
63357e252bfSMichael Neumann u32 ref_clk,
63457e252bfSMichael Neumann struct atom_clock_dividers *dividers,
63557e252bfSMichael Neumann u32 *vco_freq)
63657e252bfSMichael Neumann {
63757e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
63857e252bfSMichael Neumann struct atom_clock_dividers req_dividers;
63957e252bfSMichael Neumann u32 vco_freq_temp;
64057e252bfSMichael Neumann
64157e252bfSMichael Neumann if (radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
64257e252bfSMichael Neumann requested_memory_clock, false, &req_dividers) == 0) {
64357e252bfSMichael Neumann vco_freq_temp = rv6xx_calculate_vco_frequency(ref_clk, &req_dividers,
64457e252bfSMichael Neumann pi->fb_div_scale);
64557e252bfSMichael Neumann
64657e252bfSMichael Neumann if (vco_freq_temp > *vco_freq) {
64757e252bfSMichael Neumann *dividers = req_dividers;
64857e252bfSMichael Neumann *vco_freq = vco_freq_temp;
64957e252bfSMichael Neumann }
65057e252bfSMichael Neumann }
65157e252bfSMichael Neumann }
65257e252bfSMichael Neumann
rv6xx_program_mclk_spread_spectrum_parameters(struct radeon_device * rdev)65357e252bfSMichael Neumann static void rv6xx_program_mclk_spread_spectrum_parameters(struct radeon_device *rdev)
65457e252bfSMichael Neumann {
65557e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
65657e252bfSMichael Neumann u32 ref_clk = rdev->clock.mpll.reference_freq;
65757e252bfSMichael Neumann struct atom_clock_dividers dividers;
65857e252bfSMichael Neumann struct radeon_atom_ss ss;
65957e252bfSMichael Neumann u32 vco_freq = 0, clk_v, clk_s;
66057e252bfSMichael Neumann
66157e252bfSMichael Neumann rv6xx_enable_memory_spread_spectrum(rdev, false);
66257e252bfSMichael Neumann
66357e252bfSMichael Neumann if (pi->mclk_ss) {
66457e252bfSMichael Neumann rv6xx_find_memory_clock_with_highest_vco(rdev,
66557e252bfSMichael Neumann pi->hw.mclks[pi->hw.high_mclk_index],
66657e252bfSMichael Neumann ref_clk,
66757e252bfSMichael Neumann ÷rs,
66857e252bfSMichael Neumann &vco_freq);
66957e252bfSMichael Neumann
67057e252bfSMichael Neumann rv6xx_find_memory_clock_with_highest_vco(rdev,
67157e252bfSMichael Neumann pi->hw.mclks[pi->hw.medium_mclk_index],
67257e252bfSMichael Neumann ref_clk,
67357e252bfSMichael Neumann ÷rs,
67457e252bfSMichael Neumann &vco_freq);
67557e252bfSMichael Neumann
67657e252bfSMichael Neumann rv6xx_find_memory_clock_with_highest_vco(rdev,
67757e252bfSMichael Neumann pi->hw.mclks[pi->hw.low_mclk_index],
67857e252bfSMichael Neumann ref_clk,
67957e252bfSMichael Neumann ÷rs,
68057e252bfSMichael Neumann &vco_freq);
68157e252bfSMichael Neumann
68257e252bfSMichael Neumann if (vco_freq) {
68357e252bfSMichael Neumann if (radeon_atombios_get_asic_ss_info(rdev, &ss,
68457e252bfSMichael Neumann ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
68557e252bfSMichael Neumann clk_v = rv6xx_calculate_spread_spectrum_clk_v(vco_freq,
68657e252bfSMichael Neumann (ref_clk / (dividers.ref_div + 1)),
68757e252bfSMichael Neumann ss.rate,
68857e252bfSMichael Neumann ss.percentage,
68957e252bfSMichael Neumann pi->fb_div_scale);
69057e252bfSMichael Neumann
69157e252bfSMichael Neumann clk_s = rv6xx_calculate_spread_spectrum_clk_s(ss.rate,
69257e252bfSMichael Neumann (ref_clk / (dividers.ref_div + 1)));
69357e252bfSMichael Neumann
69457e252bfSMichael Neumann rv6xx_set_memory_spread_spectrum_clk_v(rdev, clk_v);
69557e252bfSMichael Neumann rv6xx_set_memory_spread_spectrum_clk_s(rdev, clk_s);
69657e252bfSMichael Neumann rv6xx_enable_memory_spread_spectrum(rdev, true);
69757e252bfSMichael Neumann }
69857e252bfSMichael Neumann }
69957e252bfSMichael Neumann }
70057e252bfSMichael Neumann }
70157e252bfSMichael Neumann
rv6xx_program_voltage_stepping_entry(struct radeon_device * rdev,u32 entry,u16 voltage)70257e252bfSMichael Neumann static int rv6xx_program_voltage_stepping_entry(struct radeon_device *rdev,
70357e252bfSMichael Neumann u32 entry, u16 voltage)
70457e252bfSMichael Neumann {
70557e252bfSMichael Neumann u32 mask, set_pins;
70657e252bfSMichael Neumann int ret;
70757e252bfSMichael Neumann
70857e252bfSMichael Neumann ret = radeon_atom_get_voltage_gpio_settings(rdev, voltage,
70957e252bfSMichael Neumann SET_VOLTAGE_TYPE_ASIC_VDDC,
71057e252bfSMichael Neumann &set_pins, &mask);
71157e252bfSMichael Neumann if (ret)
71257e252bfSMichael Neumann return ret;
71357e252bfSMichael Neumann
71457e252bfSMichael Neumann r600_voltage_control_program_voltages(rdev, entry, set_pins);
71557e252bfSMichael Neumann
71657e252bfSMichael Neumann return 0;
71757e252bfSMichael Neumann }
71857e252bfSMichael Neumann
rv6xx_program_voltage_stepping_parameters_except_lowest_entry(struct radeon_device * rdev)71957e252bfSMichael Neumann static void rv6xx_program_voltage_stepping_parameters_except_lowest_entry(struct radeon_device *rdev)
72057e252bfSMichael Neumann {
72157e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
72257e252bfSMichael Neumann int i;
72357e252bfSMichael Neumann
72457e252bfSMichael Neumann for (i = 1; i < R600_PM_NUMBER_OF_VOLTAGE_LEVELS; i++)
72557e252bfSMichael Neumann rv6xx_program_voltage_stepping_entry(rdev, i,
72657e252bfSMichael Neumann pi->hw.vddc[i]);
72757e252bfSMichael Neumann
72857e252bfSMichael Neumann }
72957e252bfSMichael Neumann
rv6xx_program_backbias_stepping_parameters_except_lowest_entry(struct radeon_device * rdev)73057e252bfSMichael Neumann static void rv6xx_program_backbias_stepping_parameters_except_lowest_entry(struct radeon_device *rdev)
73157e252bfSMichael Neumann {
73257e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
73357e252bfSMichael Neumann
73457e252bfSMichael Neumann if (pi->hw.backbias[1])
73557e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, MEDIUM_BACKBIAS_VALUE, ~MEDIUM_BACKBIAS_VALUE);
73657e252bfSMichael Neumann else
73757e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, 0, ~MEDIUM_BACKBIAS_VALUE);
73857e252bfSMichael Neumann
73957e252bfSMichael Neumann if (pi->hw.backbias[2])
74057e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, HIGH_BACKBIAS_VALUE, ~HIGH_BACKBIAS_VALUE);
74157e252bfSMichael Neumann else
74257e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, 0, ~HIGH_BACKBIAS_VALUE);
74357e252bfSMichael Neumann }
74457e252bfSMichael Neumann
rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry(struct radeon_device * rdev)74557e252bfSMichael Neumann static void rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry(struct radeon_device *rdev)
74657e252bfSMichael Neumann {
74757e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
74857e252bfSMichael Neumann
74957e252bfSMichael Neumann rv6xx_program_engine_spread_spectrum(rdev,
75057e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_LOW],
75157e252bfSMichael Neumann R600_POWER_LEVEL_LOW);
75257e252bfSMichael Neumann }
75357e252bfSMichael Neumann
rv6xx_program_mclk_stepping_parameters_lowest_entry(struct radeon_device * rdev)75457e252bfSMichael Neumann static void rv6xx_program_mclk_stepping_parameters_lowest_entry(struct radeon_device *rdev)
75557e252bfSMichael Neumann {
75657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
75757e252bfSMichael Neumann
75857e252bfSMichael Neumann if (pi->hw.mclks[0])
75957e252bfSMichael Neumann rv6xx_program_mclk_stepping_entry(rdev, 0,
76057e252bfSMichael Neumann pi->hw.mclks[0]);
76157e252bfSMichael Neumann }
76257e252bfSMichael Neumann
rv6xx_program_voltage_stepping_parameters_lowest_entry(struct radeon_device * rdev)76357e252bfSMichael Neumann static void rv6xx_program_voltage_stepping_parameters_lowest_entry(struct radeon_device *rdev)
76457e252bfSMichael Neumann {
76557e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
76657e252bfSMichael Neumann
76757e252bfSMichael Neumann rv6xx_program_voltage_stepping_entry(rdev, 0,
76857e252bfSMichael Neumann pi->hw.vddc[0]);
76957e252bfSMichael Neumann
77057e252bfSMichael Neumann }
77157e252bfSMichael Neumann
rv6xx_program_backbias_stepping_parameters_lowest_entry(struct radeon_device * rdev)77257e252bfSMichael Neumann static void rv6xx_program_backbias_stepping_parameters_lowest_entry(struct radeon_device *rdev)
77357e252bfSMichael Neumann {
77457e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
77557e252bfSMichael Neumann
77657e252bfSMichael Neumann if (pi->hw.backbias[0])
77757e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, LOW_BACKBIAS_VALUE, ~LOW_BACKBIAS_VALUE);
77857e252bfSMichael Neumann else
77957e252bfSMichael Neumann WREG32_P(VID_UPPER_GPIO_CNTL, 0, ~LOW_BACKBIAS_VALUE);
78057e252bfSMichael Neumann }
78157e252bfSMichael Neumann
calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)78257e252bfSMichael Neumann static u32 calculate_memory_refresh_rate(struct radeon_device *rdev,
78357e252bfSMichael Neumann u32 engine_clock)
78457e252bfSMichael Neumann {
78557e252bfSMichael Neumann u32 dram_rows, dram_refresh_rate;
78657e252bfSMichael Neumann u32 tmp;
78757e252bfSMichael Neumann
78857e252bfSMichael Neumann tmp = (RREG32(RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
78957e252bfSMichael Neumann dram_rows = 1 << (tmp + 10);
79057e252bfSMichael Neumann dram_refresh_rate = 1 << ((RREG32(MC_SEQ_RESERVE_M) & 0x3) + 3);
79157e252bfSMichael Neumann
79257e252bfSMichael Neumann return ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
79357e252bfSMichael Neumann }
79457e252bfSMichael Neumann
rv6xx_program_memory_timing_parameters(struct radeon_device * rdev)79557e252bfSMichael Neumann static void rv6xx_program_memory_timing_parameters(struct radeon_device *rdev)
79657e252bfSMichael Neumann {
79757e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
79857e252bfSMichael Neumann u32 sqm_ratio;
79957e252bfSMichael Neumann u32 arb_refresh_rate;
80057e252bfSMichael Neumann u32 high_clock;
80157e252bfSMichael Neumann
80257e252bfSMichael Neumann if (pi->hw.sclks[R600_POWER_LEVEL_HIGH] <
80357e252bfSMichael Neumann (pi->hw.sclks[R600_POWER_LEVEL_LOW] * 0xFF / 0x40))
80457e252bfSMichael Neumann high_clock = pi->hw.sclks[R600_POWER_LEVEL_HIGH];
80557e252bfSMichael Neumann else
80657e252bfSMichael Neumann high_clock =
80757e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_LOW] * 0xFF / 0x40;
80857e252bfSMichael Neumann
80957e252bfSMichael Neumann radeon_atom_set_engine_dram_timings(rdev, high_clock, 0);
81057e252bfSMichael Neumann
81157e252bfSMichael Neumann sqm_ratio = (STATE0(64 * high_clock / pi->hw.sclks[R600_POWER_LEVEL_LOW]) |
81257e252bfSMichael Neumann STATE1(64 * high_clock / pi->hw.sclks[R600_POWER_LEVEL_MEDIUM]) |
81357e252bfSMichael Neumann STATE2(64 * high_clock / pi->hw.sclks[R600_POWER_LEVEL_HIGH]) |
81457e252bfSMichael Neumann STATE3(64 * high_clock / pi->hw.sclks[R600_POWER_LEVEL_HIGH]));
81557e252bfSMichael Neumann WREG32(SQM_RATIO, sqm_ratio);
81657e252bfSMichael Neumann
81757e252bfSMichael Neumann arb_refresh_rate =
81857e252bfSMichael Neumann (POWERMODE0(calculate_memory_refresh_rate(rdev,
81957e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_LOW])) |
82057e252bfSMichael Neumann POWERMODE1(calculate_memory_refresh_rate(rdev,
82157e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_MEDIUM])) |
82257e252bfSMichael Neumann POWERMODE2(calculate_memory_refresh_rate(rdev,
82357e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_HIGH])) |
82457e252bfSMichael Neumann POWERMODE3(calculate_memory_refresh_rate(rdev,
82557e252bfSMichael Neumann pi->hw.sclks[R600_POWER_LEVEL_HIGH])));
82657e252bfSMichael Neumann WREG32(ARB_RFSH_RATE, arb_refresh_rate);
82757e252bfSMichael Neumann }
82857e252bfSMichael Neumann
rv6xx_program_mpll_timing_parameters(struct radeon_device * rdev)82957e252bfSMichael Neumann static void rv6xx_program_mpll_timing_parameters(struct radeon_device *rdev)
83057e252bfSMichael Neumann {
83157e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
83257e252bfSMichael Neumann
83357e252bfSMichael Neumann r600_set_mpll_lock_time(rdev, R600_MPLLLOCKTIME_DFLT *
83457e252bfSMichael Neumann pi->mpll_ref_div);
83557e252bfSMichael Neumann r600_set_mpll_reset_time(rdev, R600_MPLLRESETTIME_DFLT);
83657e252bfSMichael Neumann }
83757e252bfSMichael Neumann
rv6xx_program_bsp(struct radeon_device * rdev)83857e252bfSMichael Neumann static void rv6xx_program_bsp(struct radeon_device *rdev)
83957e252bfSMichael Neumann {
84057e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
84157e252bfSMichael Neumann u32 ref_clk = rdev->clock.spll.reference_freq;
84257e252bfSMichael Neumann
84357e252bfSMichael Neumann r600_calculate_u_and_p(R600_ASI_DFLT,
84457e252bfSMichael Neumann ref_clk, 16,
84557e252bfSMichael Neumann &pi->bsp,
84657e252bfSMichael Neumann &pi->bsu);
84757e252bfSMichael Neumann
84857e252bfSMichael Neumann r600_set_bsp(rdev, pi->bsu, pi->bsp);
84957e252bfSMichael Neumann }
85057e252bfSMichael Neumann
rv6xx_program_at(struct radeon_device * rdev)85157e252bfSMichael Neumann static void rv6xx_program_at(struct radeon_device *rdev)
85257e252bfSMichael Neumann {
85357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
85457e252bfSMichael Neumann
85557e252bfSMichael Neumann r600_set_at(rdev,
85657e252bfSMichael Neumann (pi->hw.rp[0] * pi->bsp) / 200,
85757e252bfSMichael Neumann (pi->hw.rp[1] * pi->bsp) / 200,
85857e252bfSMichael Neumann (pi->hw.lp[2] * pi->bsp) / 200,
85957e252bfSMichael Neumann (pi->hw.lp[1] * pi->bsp) / 200);
86057e252bfSMichael Neumann }
86157e252bfSMichael Neumann
rv6xx_program_git(struct radeon_device * rdev)86257e252bfSMichael Neumann static void rv6xx_program_git(struct radeon_device *rdev)
86357e252bfSMichael Neumann {
86457e252bfSMichael Neumann r600_set_git(rdev, R600_GICST_DFLT);
86557e252bfSMichael Neumann }
86657e252bfSMichael Neumann
rv6xx_program_tp(struct radeon_device * rdev)86757e252bfSMichael Neumann static void rv6xx_program_tp(struct radeon_device *rdev)
86857e252bfSMichael Neumann {
86957e252bfSMichael Neumann int i;
87057e252bfSMichael Neumann
87157e252bfSMichael Neumann for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
87257e252bfSMichael Neumann r600_set_tc(rdev, i, r600_utc[i], r600_dtc[i]);
87357e252bfSMichael Neumann
87457e252bfSMichael Neumann r600_select_td(rdev, R600_TD_DFLT);
87557e252bfSMichael Neumann }
87657e252bfSMichael Neumann
rv6xx_program_vc(struct radeon_device * rdev)87757e252bfSMichael Neumann static void rv6xx_program_vc(struct radeon_device *rdev)
87857e252bfSMichael Neumann {
87957e252bfSMichael Neumann r600_set_vrc(rdev, R600_VRC_DFLT);
88057e252bfSMichael Neumann }
88157e252bfSMichael Neumann
rv6xx_clear_vc(struct radeon_device * rdev)88257e252bfSMichael Neumann static void rv6xx_clear_vc(struct radeon_device *rdev)
88357e252bfSMichael Neumann {
88457e252bfSMichael Neumann r600_set_vrc(rdev, 0);
88557e252bfSMichael Neumann }
88657e252bfSMichael Neumann
rv6xx_program_tpp(struct radeon_device * rdev)88757e252bfSMichael Neumann static void rv6xx_program_tpp(struct radeon_device *rdev)
88857e252bfSMichael Neumann {
88957e252bfSMichael Neumann r600_set_tpu(rdev, R600_TPU_DFLT);
89057e252bfSMichael Neumann r600_set_tpc(rdev, R600_TPC_DFLT);
89157e252bfSMichael Neumann }
89257e252bfSMichael Neumann
rv6xx_program_sstp(struct radeon_device * rdev)89357e252bfSMichael Neumann static void rv6xx_program_sstp(struct radeon_device *rdev)
89457e252bfSMichael Neumann {
89557e252bfSMichael Neumann r600_set_sstu(rdev, R600_SSTU_DFLT);
89657e252bfSMichael Neumann r600_set_sst(rdev, R600_SST_DFLT);
89757e252bfSMichael Neumann }
89857e252bfSMichael Neumann
rv6xx_program_fcp(struct radeon_device * rdev)89957e252bfSMichael Neumann static void rv6xx_program_fcp(struct radeon_device *rdev)
90057e252bfSMichael Neumann {
90157e252bfSMichael Neumann r600_set_fctu(rdev, R600_FCTU_DFLT);
90257e252bfSMichael Neumann r600_set_fct(rdev, R600_FCT_DFLT);
90357e252bfSMichael Neumann }
90457e252bfSMichael Neumann
rv6xx_program_vddc3d_parameters(struct radeon_device * rdev)90557e252bfSMichael Neumann static void rv6xx_program_vddc3d_parameters(struct radeon_device *rdev)
90657e252bfSMichael Neumann {
90757e252bfSMichael Neumann r600_set_vddc3d_oorsu(rdev, R600_VDDC3DOORSU_DFLT);
90857e252bfSMichael Neumann r600_set_vddc3d_oorphc(rdev, R600_VDDC3DOORPHC_DFLT);
90957e252bfSMichael Neumann r600_set_vddc3d_oorsdc(rdev, R600_VDDC3DOORSDC_DFLT);
91057e252bfSMichael Neumann r600_set_ctxcgtt3d_rphc(rdev, R600_CTXCGTT3DRPHC_DFLT);
91157e252bfSMichael Neumann r600_set_ctxcgtt3d_rsdc(rdev, R600_CTXCGTT3DRSDC_DFLT);
91257e252bfSMichael Neumann }
91357e252bfSMichael Neumann
rv6xx_program_voltage_timing_parameters(struct radeon_device * rdev)91457e252bfSMichael Neumann static void rv6xx_program_voltage_timing_parameters(struct radeon_device *rdev)
91557e252bfSMichael Neumann {
91657e252bfSMichael Neumann u32 rt;
91757e252bfSMichael Neumann
91857e252bfSMichael Neumann r600_vid_rt_set_vru(rdev, R600_VRU_DFLT);
91957e252bfSMichael Neumann
92057e252bfSMichael Neumann r600_vid_rt_set_vrt(rdev,
92157e252bfSMichael Neumann rv6xx_compute_count_for_delay(rdev,
92257e252bfSMichael Neumann rdev->pm.dpm.voltage_response_time,
92357e252bfSMichael Neumann R600_VRU_DFLT));
92457e252bfSMichael Neumann
92557e252bfSMichael Neumann rt = rv6xx_compute_count_for_delay(rdev,
92657e252bfSMichael Neumann rdev->pm.dpm.backbias_response_time,
92757e252bfSMichael Neumann R600_VRU_DFLT);
92857e252bfSMichael Neumann
92957e252bfSMichael Neumann rv6xx_vid_response_set_brt(rdev, (rt + 0x1F) >> 5);
93057e252bfSMichael Neumann }
93157e252bfSMichael Neumann
rv6xx_program_engine_speed_parameters(struct radeon_device * rdev)93257e252bfSMichael Neumann static void rv6xx_program_engine_speed_parameters(struct radeon_device *rdev)
93357e252bfSMichael Neumann {
93457e252bfSMichael Neumann r600_vid_rt_set_ssu(rdev, R600_SPLLSTEPUNIT_DFLT);
93557e252bfSMichael Neumann rv6xx_enable_engine_feedback_and_reference_sync(rdev);
93657e252bfSMichael Neumann }
93757e252bfSMichael Neumann
rv6xx_get_master_voltage_mask(struct radeon_device * rdev)93857e252bfSMichael Neumann static u64 rv6xx_get_master_voltage_mask(struct radeon_device *rdev)
93957e252bfSMichael Neumann {
94057e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
94157e252bfSMichael Neumann u64 master_mask = 0;
94257e252bfSMichael Neumann int i;
94357e252bfSMichael Neumann
94457e252bfSMichael Neumann for (i = 0; i < R600_PM_NUMBER_OF_VOLTAGE_LEVELS; i++) {
94557e252bfSMichael Neumann u32 tmp_mask, tmp_set_pins;
94657e252bfSMichael Neumann int ret;
94757e252bfSMichael Neumann
94857e252bfSMichael Neumann ret = radeon_atom_get_voltage_gpio_settings(rdev,
94957e252bfSMichael Neumann pi->hw.vddc[i],
95057e252bfSMichael Neumann SET_VOLTAGE_TYPE_ASIC_VDDC,
95157e252bfSMichael Neumann &tmp_set_pins, &tmp_mask);
95257e252bfSMichael Neumann
95357e252bfSMichael Neumann if (ret == 0)
95457e252bfSMichael Neumann master_mask |= tmp_mask;
95557e252bfSMichael Neumann }
95657e252bfSMichael Neumann
95757e252bfSMichael Neumann return master_mask;
95857e252bfSMichael Neumann }
95957e252bfSMichael Neumann
rv6xx_program_voltage_gpio_pins(struct radeon_device * rdev)96057e252bfSMichael Neumann static void rv6xx_program_voltage_gpio_pins(struct radeon_device *rdev)
96157e252bfSMichael Neumann {
96257e252bfSMichael Neumann r600_voltage_control_enable_pins(rdev,
96357e252bfSMichael Neumann rv6xx_get_master_voltage_mask(rdev));
96457e252bfSMichael Neumann }
96557e252bfSMichael Neumann
rv6xx_enable_static_voltage_control(struct radeon_device * rdev,struct radeon_ps * new_ps,bool enable)96657e252bfSMichael Neumann static void rv6xx_enable_static_voltage_control(struct radeon_device *rdev,
96757e252bfSMichael Neumann struct radeon_ps *new_ps,
96857e252bfSMichael Neumann bool enable)
96957e252bfSMichael Neumann {
97057e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
97157e252bfSMichael Neumann
97257e252bfSMichael Neumann if (enable)
97357e252bfSMichael Neumann radeon_atom_set_voltage(rdev,
97457e252bfSMichael Neumann new_state->low.vddc,
97557e252bfSMichael Neumann SET_VOLTAGE_TYPE_ASIC_VDDC);
97657e252bfSMichael Neumann else
97757e252bfSMichael Neumann r600_voltage_control_deactivate_static_control(rdev,
97857e252bfSMichael Neumann rv6xx_get_master_voltage_mask(rdev));
97957e252bfSMichael Neumann }
98057e252bfSMichael Neumann
rv6xx_enable_display_gap(struct radeon_device * rdev,bool enable)98157e252bfSMichael Neumann static void rv6xx_enable_display_gap(struct radeon_device *rdev, bool enable)
98257e252bfSMichael Neumann {
98357e252bfSMichael Neumann if (enable) {
98457e252bfSMichael Neumann u32 tmp = (DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM) |
98557e252bfSMichael Neumann DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM) |
98657e252bfSMichael Neumann DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
98757e252bfSMichael Neumann DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
98857e252bfSMichael Neumann VBI_TIMER_COUNT(0x3FFF) |
98957e252bfSMichael Neumann VBI_TIMER_UNIT(7));
99057e252bfSMichael Neumann WREG32(CG_DISPLAY_GAP_CNTL, tmp);
99157e252bfSMichael Neumann
99257e252bfSMichael Neumann WREG32_P(MCLK_PWRMGT_CNTL, USE_DISPLAY_GAP, ~USE_DISPLAY_GAP);
99357e252bfSMichael Neumann } else
99457e252bfSMichael Neumann WREG32_P(MCLK_PWRMGT_CNTL, 0, ~USE_DISPLAY_GAP);
99557e252bfSMichael Neumann }
99657e252bfSMichael Neumann
rv6xx_program_power_level_enter_state(struct radeon_device * rdev)99757e252bfSMichael Neumann static void rv6xx_program_power_level_enter_state(struct radeon_device *rdev)
99857e252bfSMichael Neumann {
99957e252bfSMichael Neumann r600_power_level_set_enter_index(rdev, R600_POWER_LEVEL_MEDIUM);
100057e252bfSMichael Neumann }
100157e252bfSMichael Neumann
rv6xx_calculate_t(u32 l_f,u32 h_f,int h,int d_l,int d_r,u8 * l,u8 * r)100257e252bfSMichael Neumann static void rv6xx_calculate_t(u32 l_f, u32 h_f, int h,
100357e252bfSMichael Neumann int d_l, int d_r, u8 *l, u8 *r)
100457e252bfSMichael Neumann {
100557e252bfSMichael Neumann int a_n, a_d, h_r, l_r;
100657e252bfSMichael Neumann
100757e252bfSMichael Neumann h_r = d_l;
100857e252bfSMichael Neumann l_r = 100 - d_r;
100957e252bfSMichael Neumann
101057e252bfSMichael Neumann a_n = (int)h_f * d_l + (int)l_f * (h - d_r);
101157e252bfSMichael Neumann a_d = (int)l_f * l_r + (int)h_f * h_r;
101257e252bfSMichael Neumann
101357e252bfSMichael Neumann if (a_d != 0) {
101457e252bfSMichael Neumann *l = d_l - h_r * a_n / a_d;
101557e252bfSMichael Neumann *r = d_r + l_r * a_n / a_d;
101657e252bfSMichael Neumann }
101757e252bfSMichael Neumann }
101857e252bfSMichael Neumann
rv6xx_calculate_ap(struct radeon_device * rdev,struct rv6xx_ps * state)101957e252bfSMichael Neumann static void rv6xx_calculate_ap(struct radeon_device *rdev,
102057e252bfSMichael Neumann struct rv6xx_ps *state)
102157e252bfSMichael Neumann {
102257e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
102357e252bfSMichael Neumann
102457e252bfSMichael Neumann pi->hw.lp[0] = 0;
102557e252bfSMichael Neumann pi->hw.rp[R600_PM_NUMBER_OF_ACTIVITY_LEVELS - 1]
102657e252bfSMichael Neumann = 100;
102757e252bfSMichael Neumann
102857e252bfSMichael Neumann rv6xx_calculate_t(state->low.sclk,
102957e252bfSMichael Neumann state->medium.sclk,
103057e252bfSMichael Neumann R600_AH_DFLT,
103157e252bfSMichael Neumann R600_LMP_DFLT,
103257e252bfSMichael Neumann R600_RLP_DFLT,
103357e252bfSMichael Neumann &pi->hw.lp[1],
103457e252bfSMichael Neumann &pi->hw.rp[0]);
103557e252bfSMichael Neumann
103657e252bfSMichael Neumann rv6xx_calculate_t(state->medium.sclk,
103757e252bfSMichael Neumann state->high.sclk,
103857e252bfSMichael Neumann R600_AH_DFLT,
103957e252bfSMichael Neumann R600_LHP_DFLT,
104057e252bfSMichael Neumann R600_RMP_DFLT,
104157e252bfSMichael Neumann &pi->hw.lp[2],
104257e252bfSMichael Neumann &pi->hw.rp[1]);
104357e252bfSMichael Neumann
104457e252bfSMichael Neumann }
104557e252bfSMichael Neumann
rv6xx_calculate_stepping_parameters(struct radeon_device * rdev,struct radeon_ps * new_ps)104657e252bfSMichael Neumann static void rv6xx_calculate_stepping_parameters(struct radeon_device *rdev,
104757e252bfSMichael Neumann struct radeon_ps *new_ps)
104857e252bfSMichael Neumann {
104957e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
105057e252bfSMichael Neumann
105157e252bfSMichael Neumann rv6xx_calculate_engine_speed_stepping_parameters(rdev, new_state);
105257e252bfSMichael Neumann rv6xx_calculate_memory_clock_stepping_parameters(rdev, new_state);
105357e252bfSMichael Neumann rv6xx_calculate_voltage_stepping_parameters(rdev, new_state);
105457e252bfSMichael Neumann rv6xx_calculate_ap(rdev, new_state);
105557e252bfSMichael Neumann }
105657e252bfSMichael Neumann
rv6xx_program_stepping_parameters_except_lowest_entry(struct radeon_device * rdev)105757e252bfSMichael Neumann static void rv6xx_program_stepping_parameters_except_lowest_entry(struct radeon_device *rdev)
105857e252bfSMichael Neumann {
105957e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
106057e252bfSMichael Neumann
106157e252bfSMichael Neumann rv6xx_program_mclk_stepping_parameters_except_lowest_entry(rdev);
106257e252bfSMichael Neumann if (pi->voltage_control)
106357e252bfSMichael Neumann rv6xx_program_voltage_stepping_parameters_except_lowest_entry(rdev);
106457e252bfSMichael Neumann rv6xx_program_backbias_stepping_parameters_except_lowest_entry(rdev);
106557e252bfSMichael Neumann rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry(rdev);
106657e252bfSMichael Neumann rv6xx_program_mclk_spread_spectrum_parameters(rdev);
106757e252bfSMichael Neumann rv6xx_program_memory_timing_parameters(rdev);
106857e252bfSMichael Neumann }
106957e252bfSMichael Neumann
rv6xx_program_stepping_parameters_lowest_entry(struct radeon_device * rdev)107057e252bfSMichael Neumann static void rv6xx_program_stepping_parameters_lowest_entry(struct radeon_device *rdev)
107157e252bfSMichael Neumann {
107257e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
107357e252bfSMichael Neumann
107457e252bfSMichael Neumann rv6xx_program_mclk_stepping_parameters_lowest_entry(rdev);
107557e252bfSMichael Neumann if (pi->voltage_control)
107657e252bfSMichael Neumann rv6xx_program_voltage_stepping_parameters_lowest_entry(rdev);
107757e252bfSMichael Neumann rv6xx_program_backbias_stepping_parameters_lowest_entry(rdev);
107857e252bfSMichael Neumann rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry(rdev);
107957e252bfSMichael Neumann }
108057e252bfSMichael Neumann
rv6xx_program_power_level_low(struct radeon_device * rdev)108157e252bfSMichael Neumann static void rv6xx_program_power_level_low(struct radeon_device *rdev)
108257e252bfSMichael Neumann {
108357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
108457e252bfSMichael Neumann
108557e252bfSMichael Neumann r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_LOW,
108657e252bfSMichael Neumann pi->hw.low_vddc_index);
108757e252bfSMichael Neumann r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_LOW,
108857e252bfSMichael Neumann pi->hw.low_mclk_index);
108957e252bfSMichael Neumann r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_LOW,
109057e252bfSMichael Neumann pi->hw.low_sclk_index);
109157e252bfSMichael Neumann r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_LOW,
109257e252bfSMichael Neumann R600_DISPLAY_WATERMARK_LOW);
109357e252bfSMichael Neumann r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_LOW,
109457e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_LOW]);
109557e252bfSMichael Neumann }
109657e252bfSMichael Neumann
rv6xx_program_power_level_low_to_lowest_state(struct radeon_device * rdev)109757e252bfSMichael Neumann static void rv6xx_program_power_level_low_to_lowest_state(struct radeon_device *rdev)
109857e252bfSMichael Neumann {
109957e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
110057e252bfSMichael Neumann
110157e252bfSMichael Neumann r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_LOW, 0);
110257e252bfSMichael Neumann r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_LOW, 0);
110357e252bfSMichael Neumann r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_LOW, 0);
110457e252bfSMichael Neumann
110557e252bfSMichael Neumann r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_LOW,
110657e252bfSMichael Neumann R600_DISPLAY_WATERMARK_LOW);
110757e252bfSMichael Neumann
110857e252bfSMichael Neumann r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_LOW,
110957e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_LOW]);
111057e252bfSMichael Neumann
111157e252bfSMichael Neumann }
111257e252bfSMichael Neumann
rv6xx_program_power_level_medium(struct radeon_device * rdev)111357e252bfSMichael Neumann static void rv6xx_program_power_level_medium(struct radeon_device *rdev)
111457e252bfSMichael Neumann {
111557e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
111657e252bfSMichael Neumann
111757e252bfSMichael Neumann r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_MEDIUM,
111857e252bfSMichael Neumann pi->hw.medium_vddc_index);
111957e252bfSMichael Neumann r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_MEDIUM,
112057e252bfSMichael Neumann pi->hw.medium_mclk_index);
112157e252bfSMichael Neumann r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_MEDIUM,
112257e252bfSMichael Neumann pi->hw.medium_sclk_index);
112357e252bfSMichael Neumann r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_MEDIUM,
112457e252bfSMichael Neumann R600_DISPLAY_WATERMARK_LOW);
112557e252bfSMichael Neumann r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_MEDIUM,
112657e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_MEDIUM]);
112757e252bfSMichael Neumann }
112857e252bfSMichael Neumann
rv6xx_program_power_level_medium_for_transition(struct radeon_device * rdev)112957e252bfSMichael Neumann static void rv6xx_program_power_level_medium_for_transition(struct radeon_device *rdev)
113057e252bfSMichael Neumann {
113157e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
113257e252bfSMichael Neumann
113357e252bfSMichael Neumann rv6xx_program_mclk_stepping_entry(rdev,
113457e252bfSMichael Neumann R600_POWER_LEVEL_CTXSW,
113557e252bfSMichael Neumann pi->hw.mclks[pi->hw.low_mclk_index]);
113657e252bfSMichael Neumann
113757e252bfSMichael Neumann r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_MEDIUM, 1);
113857e252bfSMichael Neumann
113957e252bfSMichael Neumann r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_MEDIUM,
114057e252bfSMichael Neumann R600_POWER_LEVEL_CTXSW);
114157e252bfSMichael Neumann r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_MEDIUM,
114257e252bfSMichael Neumann pi->hw.medium_sclk_index);
114357e252bfSMichael Neumann
114457e252bfSMichael Neumann r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_MEDIUM,
114557e252bfSMichael Neumann R600_DISPLAY_WATERMARK_LOW);
114657e252bfSMichael Neumann
114757e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_MEDIUM, false);
114857e252bfSMichael Neumann
114957e252bfSMichael Neumann r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_MEDIUM,
115057e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_LOW]);
115157e252bfSMichael Neumann }
115257e252bfSMichael Neumann
rv6xx_program_power_level_high(struct radeon_device * rdev)115357e252bfSMichael Neumann static void rv6xx_program_power_level_high(struct radeon_device *rdev)
115457e252bfSMichael Neumann {
115557e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
115657e252bfSMichael Neumann
115757e252bfSMichael Neumann r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_HIGH,
115857e252bfSMichael Neumann pi->hw.high_vddc_index);
115957e252bfSMichael Neumann r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_HIGH,
116057e252bfSMichael Neumann pi->hw.high_mclk_index);
116157e252bfSMichael Neumann r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_HIGH,
116257e252bfSMichael Neumann pi->hw.high_sclk_index);
116357e252bfSMichael Neumann
116457e252bfSMichael Neumann r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_HIGH,
116557e252bfSMichael Neumann R600_DISPLAY_WATERMARK_HIGH);
116657e252bfSMichael Neumann
116757e252bfSMichael Neumann r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_HIGH,
116857e252bfSMichael Neumann pi->hw.pcie_gen2[R600_POWER_LEVEL_HIGH]);
116957e252bfSMichael Neumann }
117057e252bfSMichael Neumann
rv6xx_enable_backbias(struct radeon_device * rdev,bool enable)117157e252bfSMichael Neumann static void rv6xx_enable_backbias(struct radeon_device *rdev, bool enable)
117257e252bfSMichael Neumann {
117357e252bfSMichael Neumann if (enable)
117457e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN | BACKBIAS_DPM_CNTL,
117557e252bfSMichael Neumann ~(BACKBIAS_PAD_EN | BACKBIAS_DPM_CNTL));
117657e252bfSMichael Neumann else
117757e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0,
117857e252bfSMichael Neumann ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN | BACKBIAS_DPM_CNTL));
117957e252bfSMichael Neumann }
118057e252bfSMichael Neumann
rv6xx_program_display_gap(struct radeon_device * rdev)118157e252bfSMichael Neumann static void rv6xx_program_display_gap(struct radeon_device *rdev)
118257e252bfSMichael Neumann {
118357e252bfSMichael Neumann u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
118457e252bfSMichael Neumann
118557e252bfSMichael Neumann tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
118657e252bfSMichael Neumann if (rdev->pm.dpm.new_active_crtcs & 1) {
118757e252bfSMichael Neumann tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
118857e252bfSMichael Neumann tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
118957e252bfSMichael Neumann } else if (rdev->pm.dpm.new_active_crtcs & 2) {
119057e252bfSMichael Neumann tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
119157e252bfSMichael Neumann tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
119257e252bfSMichael Neumann } else {
119357e252bfSMichael Neumann tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
119457e252bfSMichael Neumann tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
119557e252bfSMichael Neumann }
119657e252bfSMichael Neumann WREG32(CG_DISPLAY_GAP_CNTL, tmp);
119757e252bfSMichael Neumann }
119857e252bfSMichael Neumann
rv6xx_set_sw_voltage_to_safe(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)119957e252bfSMichael Neumann static void rv6xx_set_sw_voltage_to_safe(struct radeon_device *rdev,
120057e252bfSMichael Neumann struct radeon_ps *new_ps,
120157e252bfSMichael Neumann struct radeon_ps *old_ps)
120257e252bfSMichael Neumann {
120357e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
120457e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
120557e252bfSMichael Neumann u16 safe_voltage;
120657e252bfSMichael Neumann
120757e252bfSMichael Neumann safe_voltage = (new_state->low.vddc >= old_state->low.vddc) ?
120857e252bfSMichael Neumann new_state->low.vddc : old_state->low.vddc;
120957e252bfSMichael Neumann
121057e252bfSMichael Neumann rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW,
121157e252bfSMichael Neumann safe_voltage);
121257e252bfSMichael Neumann
121357e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, SW_GPIO_INDEX(R600_POWER_LEVEL_CTXSW),
121457e252bfSMichael Neumann ~SW_GPIO_INDEX_MASK);
121557e252bfSMichael Neumann }
121657e252bfSMichael Neumann
rv6xx_set_sw_voltage_to_low(struct radeon_device * rdev,struct radeon_ps * old_ps)121757e252bfSMichael Neumann static void rv6xx_set_sw_voltage_to_low(struct radeon_device *rdev,
121857e252bfSMichael Neumann struct radeon_ps *old_ps)
121957e252bfSMichael Neumann {
122057e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
122157e252bfSMichael Neumann
122257e252bfSMichael Neumann rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW,
122357e252bfSMichael Neumann old_state->low.vddc);
122457e252bfSMichael Neumann
122557e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, SW_GPIO_INDEX(R600_POWER_LEVEL_CTXSW),
122657e252bfSMichael Neumann ~SW_GPIO_INDEX_MASK);
122757e252bfSMichael Neumann }
122857e252bfSMichael Neumann
rv6xx_set_safe_backbias(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)122957e252bfSMichael Neumann static void rv6xx_set_safe_backbias(struct radeon_device *rdev,
123057e252bfSMichael Neumann struct radeon_ps *new_ps,
123157e252bfSMichael Neumann struct radeon_ps *old_ps)
123257e252bfSMichael Neumann {
123357e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
123457e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
123557e252bfSMichael Neumann
123657e252bfSMichael Neumann if ((new_state->low.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) &&
123757e252bfSMichael Neumann (old_state->low.flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE))
123857e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, BACKBIAS_VALUE, ~BACKBIAS_VALUE);
123957e252bfSMichael Neumann else
124057e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~BACKBIAS_VALUE);
124157e252bfSMichael Neumann }
124257e252bfSMichael Neumann
rv6xx_set_safe_pcie_gen2(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)124357e252bfSMichael Neumann static void rv6xx_set_safe_pcie_gen2(struct radeon_device *rdev,
124457e252bfSMichael Neumann struct radeon_ps *new_ps,
124557e252bfSMichael Neumann struct radeon_ps *old_ps)
124657e252bfSMichael Neumann {
124757e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
124857e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
124957e252bfSMichael Neumann
125057e252bfSMichael Neumann if ((new_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) !=
125157e252bfSMichael Neumann (old_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2))
125257e252bfSMichael Neumann rv6xx_force_pcie_gen1(rdev);
125357e252bfSMichael Neumann }
125457e252bfSMichael Neumann
rv6xx_enable_dynamic_voltage_control(struct radeon_device * rdev,bool enable)125557e252bfSMichael Neumann static void rv6xx_enable_dynamic_voltage_control(struct radeon_device *rdev,
125657e252bfSMichael Neumann bool enable)
125757e252bfSMichael Neumann {
125857e252bfSMichael Neumann if (enable)
125957e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
126057e252bfSMichael Neumann else
126157e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
126257e252bfSMichael Neumann }
126357e252bfSMichael Neumann
rv6xx_enable_dynamic_backbias_control(struct radeon_device * rdev,bool enable)126457e252bfSMichael Neumann static void rv6xx_enable_dynamic_backbias_control(struct radeon_device *rdev,
126557e252bfSMichael Neumann bool enable)
126657e252bfSMichael Neumann {
126757e252bfSMichael Neumann if (enable)
126857e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, BACKBIAS_DPM_CNTL, ~BACKBIAS_DPM_CNTL);
126957e252bfSMichael Neumann else
127057e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~BACKBIAS_DPM_CNTL);
127157e252bfSMichael Neumann }
127257e252bfSMichael Neumann
rv6xx_step_sw_voltage(struct radeon_device * rdev,u16 initial_voltage,u16 target_voltage)127357e252bfSMichael Neumann static int rv6xx_step_sw_voltage(struct radeon_device *rdev,
127457e252bfSMichael Neumann u16 initial_voltage,
127557e252bfSMichael Neumann u16 target_voltage)
127657e252bfSMichael Neumann {
127757e252bfSMichael Neumann u16 current_voltage;
127857e252bfSMichael Neumann u16 true_target_voltage;
127957e252bfSMichael Neumann u16 voltage_step;
128057e252bfSMichael Neumann int signed_voltage_step;
128157e252bfSMichael Neumann
128257e252bfSMichael Neumann if ((radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
128357e252bfSMichael Neumann &voltage_step)) ||
128457e252bfSMichael Neumann (radeon_atom_round_to_true_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
128557e252bfSMichael Neumann initial_voltage, ¤t_voltage)) ||
128657e252bfSMichael Neumann (radeon_atom_round_to_true_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
128757e252bfSMichael Neumann target_voltage, &true_target_voltage)))
128857e252bfSMichael Neumann return -EINVAL;
128957e252bfSMichael Neumann
129057e252bfSMichael Neumann if (true_target_voltage < current_voltage)
129157e252bfSMichael Neumann signed_voltage_step = -(int)voltage_step;
129257e252bfSMichael Neumann else
129357e252bfSMichael Neumann signed_voltage_step = voltage_step;
129457e252bfSMichael Neumann
129557e252bfSMichael Neumann while (current_voltage != true_target_voltage) {
129657e252bfSMichael Neumann current_voltage += signed_voltage_step;
129757e252bfSMichael Neumann rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW,
129857e252bfSMichael Neumann current_voltage);
129957e252bfSMichael Neumann msleep((rdev->pm.dpm.voltage_response_time + 999) / 1000);
130057e252bfSMichael Neumann }
130157e252bfSMichael Neumann
130257e252bfSMichael Neumann return 0;
130357e252bfSMichael Neumann }
130457e252bfSMichael Neumann
rv6xx_step_voltage_if_increasing(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)130557e252bfSMichael Neumann static int rv6xx_step_voltage_if_increasing(struct radeon_device *rdev,
130657e252bfSMichael Neumann struct radeon_ps *new_ps,
130757e252bfSMichael Neumann struct radeon_ps *old_ps)
130857e252bfSMichael Neumann {
130957e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
131057e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
131157e252bfSMichael Neumann
131257e252bfSMichael Neumann if (new_state->low.vddc > old_state->low.vddc)
131357e252bfSMichael Neumann return rv6xx_step_sw_voltage(rdev,
131457e252bfSMichael Neumann old_state->low.vddc,
131557e252bfSMichael Neumann new_state->low.vddc);
131657e252bfSMichael Neumann
131757e252bfSMichael Neumann return 0;
131857e252bfSMichael Neumann }
131957e252bfSMichael Neumann
rv6xx_step_voltage_if_decreasing(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)132057e252bfSMichael Neumann static int rv6xx_step_voltage_if_decreasing(struct radeon_device *rdev,
132157e252bfSMichael Neumann struct radeon_ps *new_ps,
132257e252bfSMichael Neumann struct radeon_ps *old_ps)
132357e252bfSMichael Neumann {
132457e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
132557e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
132657e252bfSMichael Neumann
132757e252bfSMichael Neumann if (new_state->low.vddc < old_state->low.vddc)
132857e252bfSMichael Neumann return rv6xx_step_sw_voltage(rdev,
132957e252bfSMichael Neumann old_state->low.vddc,
133057e252bfSMichael Neumann new_state->low.vddc);
133157e252bfSMichael Neumann else
133257e252bfSMichael Neumann return 0;
133357e252bfSMichael Neumann }
133457e252bfSMichael Neumann
rv6xx_enable_high(struct radeon_device * rdev)133557e252bfSMichael Neumann static void rv6xx_enable_high(struct radeon_device *rdev)
133657e252bfSMichael Neumann {
133757e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
133857e252bfSMichael Neumann
133957e252bfSMichael Neumann if ((pi->restricted_levels < 1) ||
134057e252bfSMichael Neumann (pi->restricted_levels == 3))
134157e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, true);
134257e252bfSMichael Neumann }
134357e252bfSMichael Neumann
rv6xx_enable_medium(struct radeon_device * rdev)134457e252bfSMichael Neumann static void rv6xx_enable_medium(struct radeon_device *rdev)
134557e252bfSMichael Neumann {
134657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
134757e252bfSMichael Neumann
134857e252bfSMichael Neumann if (pi->restricted_levels < 2)
134957e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true);
135057e252bfSMichael Neumann }
135157e252bfSMichael Neumann
rv6xx_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)135257e252bfSMichael Neumann static void rv6xx_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
135357e252bfSMichael Neumann {
135457e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
135557e252bfSMichael Neumann bool want_thermal_protection;
135657e252bfSMichael Neumann enum radeon_dpm_event_src dpm_event_src;
135757e252bfSMichael Neumann
135857e252bfSMichael Neumann switch (sources) {
135957e252bfSMichael Neumann case 0:
136057e252bfSMichael Neumann default:
136157e252bfSMichael Neumann want_thermal_protection = false;
136257e252bfSMichael Neumann break;
136357e252bfSMichael Neumann case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
136457e252bfSMichael Neumann want_thermal_protection = true;
136557e252bfSMichael Neumann dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
136657e252bfSMichael Neumann break;
136757e252bfSMichael Neumann
136857e252bfSMichael Neumann case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
136957e252bfSMichael Neumann want_thermal_protection = true;
137057e252bfSMichael Neumann dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
137157e252bfSMichael Neumann break;
137257e252bfSMichael Neumann
137357e252bfSMichael Neumann case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
137457e252bfSMichael Neumann (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
137557e252bfSMichael Neumann want_thermal_protection = true;
137657e252bfSMichael Neumann dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
137757e252bfSMichael Neumann break;
137857e252bfSMichael Neumann }
137957e252bfSMichael Neumann
138057e252bfSMichael Neumann if (want_thermal_protection) {
138157e252bfSMichael Neumann WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
138257e252bfSMichael Neumann if (pi->thermal_protection)
138357e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
138457e252bfSMichael Neumann } else {
138557e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
138657e252bfSMichael Neumann }
138757e252bfSMichael Neumann }
138857e252bfSMichael Neumann
rv6xx_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)138957e252bfSMichael Neumann static void rv6xx_enable_auto_throttle_source(struct radeon_device *rdev,
139057e252bfSMichael Neumann enum radeon_dpm_auto_throttle_src source,
139157e252bfSMichael Neumann bool enable)
139257e252bfSMichael Neumann {
139357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
139457e252bfSMichael Neumann
139557e252bfSMichael Neumann if (enable) {
139657e252bfSMichael Neumann if (!(pi->active_auto_throttle_sources & (1 << source))) {
139757e252bfSMichael Neumann pi->active_auto_throttle_sources |= 1 << source;
139857e252bfSMichael Neumann rv6xx_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
139957e252bfSMichael Neumann }
140057e252bfSMichael Neumann } else {
140157e252bfSMichael Neumann if (pi->active_auto_throttle_sources & (1 << source)) {
140257e252bfSMichael Neumann pi->active_auto_throttle_sources &= ~(1 << source);
140357e252bfSMichael Neumann rv6xx_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
140457e252bfSMichael Neumann }
140557e252bfSMichael Neumann }
140657e252bfSMichael Neumann }
140757e252bfSMichael Neumann
140857e252bfSMichael Neumann
rv6xx_enable_thermal_protection(struct radeon_device * rdev,bool enable)140957e252bfSMichael Neumann static void rv6xx_enable_thermal_protection(struct radeon_device *rdev,
141057e252bfSMichael Neumann bool enable)
141157e252bfSMichael Neumann {
141257e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
141357e252bfSMichael Neumann
141457e252bfSMichael Neumann if (pi->active_auto_throttle_sources)
141557e252bfSMichael Neumann r600_enable_thermal_protection(rdev, enable);
141657e252bfSMichael Neumann }
141757e252bfSMichael Neumann
rv6xx_generate_transition_stepping(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)141857e252bfSMichael Neumann static void rv6xx_generate_transition_stepping(struct radeon_device *rdev,
141957e252bfSMichael Neumann struct radeon_ps *new_ps,
142057e252bfSMichael Neumann struct radeon_ps *old_ps)
142157e252bfSMichael Neumann {
142257e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
142357e252bfSMichael Neumann struct rv6xx_ps *old_state = rv6xx_get_ps(old_ps);
142457e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
142557e252bfSMichael Neumann
142657e252bfSMichael Neumann rv6xx_generate_steps(rdev,
142757e252bfSMichael Neumann old_state->low.sclk,
142857e252bfSMichael Neumann new_state->low.sclk,
142957e252bfSMichael Neumann 0, &pi->hw.medium_sclk_index);
143057e252bfSMichael Neumann }
143157e252bfSMichael Neumann
rv6xx_generate_low_step(struct radeon_device * rdev,struct radeon_ps * new_ps)143257e252bfSMichael Neumann static void rv6xx_generate_low_step(struct radeon_device *rdev,
143357e252bfSMichael Neumann struct radeon_ps *new_ps)
143457e252bfSMichael Neumann {
143557e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
143657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
143757e252bfSMichael Neumann
143857e252bfSMichael Neumann pi->hw.low_sclk_index = 0;
143957e252bfSMichael Neumann rv6xx_generate_single_step(rdev,
144057e252bfSMichael Neumann new_state->low.sclk,
144157e252bfSMichael Neumann 0);
144257e252bfSMichael Neumann }
144357e252bfSMichael Neumann
rv6xx_invalidate_intermediate_steps(struct radeon_device * rdev)144457e252bfSMichael Neumann static void rv6xx_invalidate_intermediate_steps(struct radeon_device *rdev)
144557e252bfSMichael Neumann {
144657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
144757e252bfSMichael Neumann
144857e252bfSMichael Neumann rv6xx_invalidate_intermediate_steps_range(rdev, 0,
144957e252bfSMichael Neumann pi->hw.medium_sclk_index);
145057e252bfSMichael Neumann }
145157e252bfSMichael Neumann
rv6xx_generate_stepping_table(struct radeon_device * rdev,struct radeon_ps * new_ps)145257e252bfSMichael Neumann static void rv6xx_generate_stepping_table(struct radeon_device *rdev,
145357e252bfSMichael Neumann struct radeon_ps *new_ps)
145457e252bfSMichael Neumann {
145557e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
145657e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
145757e252bfSMichael Neumann
145857e252bfSMichael Neumann pi->hw.low_sclk_index = 0;
145957e252bfSMichael Neumann
146057e252bfSMichael Neumann rv6xx_generate_steps(rdev,
146157e252bfSMichael Neumann new_state->low.sclk,
146257e252bfSMichael Neumann new_state->medium.sclk,
146357e252bfSMichael Neumann 0,
146457e252bfSMichael Neumann &pi->hw.medium_sclk_index);
146557e252bfSMichael Neumann rv6xx_generate_steps(rdev,
146657e252bfSMichael Neumann new_state->medium.sclk,
146757e252bfSMichael Neumann new_state->high.sclk,
146857e252bfSMichael Neumann pi->hw.medium_sclk_index,
146957e252bfSMichael Neumann &pi->hw.high_sclk_index);
147057e252bfSMichael Neumann }
147157e252bfSMichael Neumann
rv6xx_enable_spread_spectrum(struct radeon_device * rdev,bool enable)147257e252bfSMichael Neumann static void rv6xx_enable_spread_spectrum(struct radeon_device *rdev,
147357e252bfSMichael Neumann bool enable)
147457e252bfSMichael Neumann {
147557e252bfSMichael Neumann if (enable)
147657e252bfSMichael Neumann rv6xx_enable_dynamic_spread_spectrum(rdev, true);
147757e252bfSMichael Neumann else {
147857e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_LOW, false);
147957e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_MEDIUM, false);
148057e252bfSMichael Neumann rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_HIGH, false);
148157e252bfSMichael Neumann rv6xx_enable_dynamic_spread_spectrum(rdev, false);
148257e252bfSMichael Neumann rv6xx_enable_memory_spread_spectrum(rdev, false);
148357e252bfSMichael Neumann }
148457e252bfSMichael Neumann }
148557e252bfSMichael Neumann
rv6xx_reset_lvtm_data_sync(struct radeon_device * rdev)148657e252bfSMichael Neumann static void rv6xx_reset_lvtm_data_sync(struct radeon_device *rdev)
148757e252bfSMichael Neumann {
148857e252bfSMichael Neumann if (ASIC_IS_DCE3(rdev))
148957e252bfSMichael Neumann WREG32_P(DCE3_LVTMA_DATA_SYNCHRONIZATION, LVTMA_PFREQCHG, ~LVTMA_PFREQCHG);
149057e252bfSMichael Neumann else
149157e252bfSMichael Neumann WREG32_P(LVTMA_DATA_SYNCHRONIZATION, LVTMA_PFREQCHG, ~LVTMA_PFREQCHG);
149257e252bfSMichael Neumann }
149357e252bfSMichael Neumann
rv6xx_enable_dynamic_pcie_gen2(struct radeon_device * rdev,struct radeon_ps * new_ps,bool enable)149457e252bfSMichael Neumann static void rv6xx_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
149557e252bfSMichael Neumann struct radeon_ps *new_ps,
149657e252bfSMichael Neumann bool enable)
149757e252bfSMichael Neumann {
149857e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
149957e252bfSMichael Neumann
150057e252bfSMichael Neumann if (enable) {
150157e252bfSMichael Neumann rv6xx_enable_bif_dynamic_pcie_gen2(rdev, true);
150257e252bfSMichael Neumann rv6xx_enable_pcie_gen2_support(rdev);
150357e252bfSMichael Neumann r600_enable_dynamic_pcie_gen2(rdev, true);
150457e252bfSMichael Neumann } else {
150557e252bfSMichael Neumann if (!(new_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2))
150657e252bfSMichael Neumann rv6xx_force_pcie_gen1(rdev);
150757e252bfSMichael Neumann rv6xx_enable_bif_dynamic_pcie_gen2(rdev, false);
150857e252bfSMichael Neumann r600_enable_dynamic_pcie_gen2(rdev, false);
150957e252bfSMichael Neumann }
151057e252bfSMichael Neumann }
151157e252bfSMichael Neumann
rv6xx_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)151257e252bfSMichael Neumann static void rv6xx_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
151357e252bfSMichael Neumann struct radeon_ps *new_ps,
151457e252bfSMichael Neumann struct radeon_ps *old_ps)
151557e252bfSMichael Neumann {
151657e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
151757e252bfSMichael Neumann struct rv6xx_ps *current_state = rv6xx_get_ps(old_ps);
151857e252bfSMichael Neumann
151957e252bfSMichael Neumann if ((new_ps->vclk == old_ps->vclk) &&
152057e252bfSMichael Neumann (new_ps->dclk == old_ps->dclk))
152157e252bfSMichael Neumann return;
152257e252bfSMichael Neumann
152357e252bfSMichael Neumann if (new_state->high.sclk >= current_state->high.sclk)
152457e252bfSMichael Neumann return;
152557e252bfSMichael Neumann
152657e252bfSMichael Neumann radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
152757e252bfSMichael Neumann }
152857e252bfSMichael Neumann
rv6xx_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)152957e252bfSMichael Neumann static void rv6xx_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
153057e252bfSMichael Neumann struct radeon_ps *new_ps,
153157e252bfSMichael Neumann struct radeon_ps *old_ps)
153257e252bfSMichael Neumann {
153357e252bfSMichael Neumann struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps);
153457e252bfSMichael Neumann struct rv6xx_ps *current_state = rv6xx_get_ps(old_ps);
153557e252bfSMichael Neumann
153657e252bfSMichael Neumann if ((new_ps->vclk == old_ps->vclk) &&
153757e252bfSMichael Neumann (new_ps->dclk == old_ps->dclk))
153857e252bfSMichael Neumann return;
153957e252bfSMichael Neumann
154057e252bfSMichael Neumann if (new_state->high.sclk < current_state->high.sclk)
154157e252bfSMichael Neumann return;
154257e252bfSMichael Neumann
154357e252bfSMichael Neumann radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
154457e252bfSMichael Neumann }
154557e252bfSMichael Neumann
rv6xx_dpm_enable(struct radeon_device * rdev)154657e252bfSMichael Neumann int rv6xx_dpm_enable(struct radeon_device *rdev)
154757e252bfSMichael Neumann {
154857e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
154957e252bfSMichael Neumann struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
155057e252bfSMichael Neumann
155157e252bfSMichael Neumann if (r600_dynamicpm_enabled(rdev))
155257e252bfSMichael Neumann return -EINVAL;
155357e252bfSMichael Neumann
155457e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
155557e252bfSMichael Neumann rv6xx_enable_backbias(rdev, true);
155657e252bfSMichael Neumann
155757e252bfSMichael Neumann if (pi->dynamic_ss)
155857e252bfSMichael Neumann rv6xx_enable_spread_spectrum(rdev, true);
155957e252bfSMichael Neumann
156057e252bfSMichael Neumann rv6xx_program_mpll_timing_parameters(rdev);
156157e252bfSMichael Neumann rv6xx_program_bsp(rdev);
156257e252bfSMichael Neumann rv6xx_program_git(rdev);
156357e252bfSMichael Neumann rv6xx_program_tp(rdev);
156457e252bfSMichael Neumann rv6xx_program_tpp(rdev);
156557e252bfSMichael Neumann rv6xx_program_sstp(rdev);
156657e252bfSMichael Neumann rv6xx_program_fcp(rdev);
156757e252bfSMichael Neumann rv6xx_program_vddc3d_parameters(rdev);
156857e252bfSMichael Neumann rv6xx_program_voltage_timing_parameters(rdev);
156957e252bfSMichael Neumann rv6xx_program_engine_speed_parameters(rdev);
157057e252bfSMichael Neumann
157157e252bfSMichael Neumann rv6xx_enable_display_gap(rdev, true);
157257e252bfSMichael Neumann if (pi->display_gap == false)
157357e252bfSMichael Neumann rv6xx_enable_display_gap(rdev, false);
157457e252bfSMichael Neumann
157557e252bfSMichael Neumann rv6xx_program_power_level_enter_state(rdev);
157657e252bfSMichael Neumann
157757e252bfSMichael Neumann rv6xx_calculate_stepping_parameters(rdev, boot_ps);
157857e252bfSMichael Neumann
157957e252bfSMichael Neumann if (pi->voltage_control)
158057e252bfSMichael Neumann rv6xx_program_voltage_gpio_pins(rdev);
158157e252bfSMichael Neumann
158257e252bfSMichael Neumann rv6xx_generate_stepping_table(rdev, boot_ps);
158357e252bfSMichael Neumann
158457e252bfSMichael Neumann rv6xx_program_stepping_parameters_except_lowest_entry(rdev);
158557e252bfSMichael Neumann rv6xx_program_stepping_parameters_lowest_entry(rdev);
158657e252bfSMichael Neumann
158757e252bfSMichael Neumann rv6xx_program_power_level_low(rdev);
158857e252bfSMichael Neumann rv6xx_program_power_level_medium(rdev);
158957e252bfSMichael Neumann rv6xx_program_power_level_high(rdev);
159057e252bfSMichael Neumann rv6xx_program_vc(rdev);
159157e252bfSMichael Neumann rv6xx_program_at(rdev);
159257e252bfSMichael Neumann
159357e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true);
159457e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true);
159557e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, true);
159657e252bfSMichael Neumann
159757e252bfSMichael Neumann rv6xx_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
159857e252bfSMichael Neumann
159957e252bfSMichael Neumann r600_start_dpm(rdev);
160057e252bfSMichael Neumann
160157e252bfSMichael Neumann if (pi->voltage_control)
160257e252bfSMichael Neumann rv6xx_enable_static_voltage_control(rdev, boot_ps, false);
160357e252bfSMichael Neumann
160457e252bfSMichael Neumann if (pi->dynamic_pcie_gen2)
160557e252bfSMichael Neumann rv6xx_enable_dynamic_pcie_gen2(rdev, boot_ps, true);
160657e252bfSMichael Neumann
160757e252bfSMichael Neumann if (pi->gfx_clock_gating)
160857e252bfSMichael Neumann r600_gfx_clockgating_enable(rdev, true);
160957e252bfSMichael Neumann
161057e252bfSMichael Neumann return 0;
161157e252bfSMichael Neumann }
161257e252bfSMichael Neumann
rv6xx_dpm_disable(struct radeon_device * rdev)161357e252bfSMichael Neumann void rv6xx_dpm_disable(struct radeon_device *rdev)
161457e252bfSMichael Neumann {
161557e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
161657e252bfSMichael Neumann struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
161757e252bfSMichael Neumann
161857e252bfSMichael Neumann if (!r600_dynamicpm_enabled(rdev))
161957e252bfSMichael Neumann return;
162057e252bfSMichael Neumann
162157e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true);
162257e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true);
162357e252bfSMichael Neumann rv6xx_enable_display_gap(rdev, false);
162457e252bfSMichael Neumann rv6xx_clear_vc(rdev);
162557e252bfSMichael Neumann r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF);
162657e252bfSMichael Neumann
162757e252bfSMichael Neumann if (pi->thermal_protection)
162857e252bfSMichael Neumann r600_enable_thermal_protection(rdev, false);
162957e252bfSMichael Neumann
163057e252bfSMichael Neumann r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW);
163157e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false);
163257e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false);
163357e252bfSMichael Neumann
163457e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
163557e252bfSMichael Neumann rv6xx_enable_backbias(rdev, false);
163657e252bfSMichael Neumann
163757e252bfSMichael Neumann rv6xx_enable_spread_spectrum(rdev, false);
163857e252bfSMichael Neumann
163957e252bfSMichael Neumann if (pi->voltage_control)
164057e252bfSMichael Neumann rv6xx_enable_static_voltage_control(rdev, boot_ps, true);
164157e252bfSMichael Neumann
164257e252bfSMichael Neumann if (pi->dynamic_pcie_gen2)
164357e252bfSMichael Neumann rv6xx_enable_dynamic_pcie_gen2(rdev, boot_ps, false);
164457e252bfSMichael Neumann
164557e252bfSMichael Neumann if (rdev->irq.installed &&
164657e252bfSMichael Neumann r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
164757e252bfSMichael Neumann rdev->irq.dpm_thermal = false;
164857e252bfSMichael Neumann radeon_irq_set(rdev);
164957e252bfSMichael Neumann }
165057e252bfSMichael Neumann
165157e252bfSMichael Neumann if (pi->gfx_clock_gating)
165257e252bfSMichael Neumann r600_gfx_clockgating_enable(rdev, false);
165357e252bfSMichael Neumann
165457e252bfSMichael Neumann r600_stop_dpm(rdev);
165557e252bfSMichael Neumann }
165657e252bfSMichael Neumann
rv6xx_dpm_set_power_state(struct radeon_device * rdev)165757e252bfSMichael Neumann int rv6xx_dpm_set_power_state(struct radeon_device *rdev)
165857e252bfSMichael Neumann {
165957e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
166057e252bfSMichael Neumann struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
166157e252bfSMichael Neumann struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
166257e252bfSMichael Neumann int ret;
166357e252bfSMichael Neumann
166457e252bfSMichael Neumann pi->restricted_levels = 0;
166557e252bfSMichael Neumann
166657e252bfSMichael Neumann rv6xx_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
166757e252bfSMichael Neumann
166857e252bfSMichael Neumann rv6xx_clear_vc(rdev);
166957e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true);
167057e252bfSMichael Neumann r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF);
167157e252bfSMichael Neumann
167257e252bfSMichael Neumann if (pi->thermal_protection)
167357e252bfSMichael Neumann r600_enable_thermal_protection(rdev, false);
167457e252bfSMichael Neumann
167557e252bfSMichael Neumann r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW);
167657e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false);
167757e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false);
167857e252bfSMichael Neumann
167957e252bfSMichael Neumann rv6xx_generate_transition_stepping(rdev, new_ps, old_ps);
168057e252bfSMichael Neumann rv6xx_program_power_level_medium_for_transition(rdev);
168157e252bfSMichael Neumann
168257e252bfSMichael Neumann if (pi->voltage_control) {
168357e252bfSMichael Neumann rv6xx_set_sw_voltage_to_safe(rdev, new_ps, old_ps);
168457e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
168557e252bfSMichael Neumann rv6xx_set_sw_voltage_to_low(rdev, old_ps);
168657e252bfSMichael Neumann }
168757e252bfSMichael Neumann
168857e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
168957e252bfSMichael Neumann rv6xx_set_safe_backbias(rdev, new_ps, old_ps);
169057e252bfSMichael Neumann
169157e252bfSMichael Neumann if (pi->dynamic_pcie_gen2)
169257e252bfSMichael Neumann rv6xx_set_safe_pcie_gen2(rdev, new_ps, old_ps);
169357e252bfSMichael Neumann
169457e252bfSMichael Neumann if (pi->voltage_control)
169557e252bfSMichael Neumann rv6xx_enable_dynamic_voltage_control(rdev, false);
169657e252bfSMichael Neumann
169757e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
169857e252bfSMichael Neumann rv6xx_enable_dynamic_backbias_control(rdev, false);
169957e252bfSMichael Neumann
170057e252bfSMichael Neumann if (pi->voltage_control) {
170157e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
170257e252bfSMichael Neumann rv6xx_step_voltage_if_increasing(rdev, new_ps, old_ps);
170357e252bfSMichael Neumann msleep((rdev->pm.dpm.voltage_response_time + 999) / 1000);
170457e252bfSMichael Neumann }
170557e252bfSMichael Neumann
170657e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true);
170757e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, false);
170857e252bfSMichael Neumann r600_wait_for_power_level_unequal(rdev, R600_POWER_LEVEL_LOW);
170957e252bfSMichael Neumann
171057e252bfSMichael Neumann rv6xx_generate_low_step(rdev, new_ps);
171157e252bfSMichael Neumann rv6xx_invalidate_intermediate_steps(rdev);
171257e252bfSMichael Neumann rv6xx_calculate_stepping_parameters(rdev, new_ps);
171357e252bfSMichael Neumann rv6xx_program_stepping_parameters_lowest_entry(rdev);
171457e252bfSMichael Neumann rv6xx_program_power_level_low_to_lowest_state(rdev);
171557e252bfSMichael Neumann
171657e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true);
171757e252bfSMichael Neumann r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW);
171857e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false);
171957e252bfSMichael Neumann
172057e252bfSMichael Neumann if (pi->voltage_control) {
172157e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) {
172257e252bfSMichael Neumann ret = rv6xx_step_voltage_if_decreasing(rdev, new_ps, old_ps);
172357e252bfSMichael Neumann if (ret)
172457e252bfSMichael Neumann return ret;
172557e252bfSMichael Neumann }
172657e252bfSMichael Neumann rv6xx_enable_dynamic_voltage_control(rdev, true);
172757e252bfSMichael Neumann }
172857e252bfSMichael Neumann
172957e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
173057e252bfSMichael Neumann rv6xx_enable_dynamic_backbias_control(rdev, true);
173157e252bfSMichael Neumann
173257e252bfSMichael Neumann if (pi->dynamic_pcie_gen2)
173357e252bfSMichael Neumann rv6xx_enable_dynamic_pcie_gen2(rdev, new_ps, true);
173457e252bfSMichael Neumann
173557e252bfSMichael Neumann rv6xx_reset_lvtm_data_sync(rdev);
173657e252bfSMichael Neumann
173757e252bfSMichael Neumann rv6xx_generate_stepping_table(rdev, new_ps);
173857e252bfSMichael Neumann rv6xx_program_stepping_parameters_except_lowest_entry(rdev);
173957e252bfSMichael Neumann rv6xx_program_power_level_low(rdev);
174057e252bfSMichael Neumann rv6xx_program_power_level_medium(rdev);
174157e252bfSMichael Neumann rv6xx_program_power_level_high(rdev);
174257e252bfSMichael Neumann rv6xx_enable_medium(rdev);
174357e252bfSMichael Neumann rv6xx_enable_high(rdev);
174457e252bfSMichael Neumann
174557e252bfSMichael Neumann if (pi->thermal_protection)
174657e252bfSMichael Neumann rv6xx_enable_thermal_protection(rdev, true);
174757e252bfSMichael Neumann rv6xx_program_vc(rdev);
174857e252bfSMichael Neumann rv6xx_program_at(rdev);
174957e252bfSMichael Neumann
175057e252bfSMichael Neumann rv6xx_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
175157e252bfSMichael Neumann
175257e252bfSMichael Neumann return 0;
175357e252bfSMichael Neumann }
175457e252bfSMichael Neumann
rv6xx_setup_asic(struct radeon_device * rdev)175557e252bfSMichael Neumann void rv6xx_setup_asic(struct radeon_device *rdev)
175657e252bfSMichael Neumann {
175757e252bfSMichael Neumann r600_enable_acpi_pm(rdev);
175857e252bfSMichael Neumann
175957e252bfSMichael Neumann if (radeon_aspm != 0) {
176057e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
176157e252bfSMichael Neumann rv6xx_enable_l0s(rdev);
176257e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
176357e252bfSMichael Neumann rv6xx_enable_l1(rdev);
176457e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
176557e252bfSMichael Neumann rv6xx_enable_pll_sleep_in_l1(rdev);
176657e252bfSMichael Neumann }
176757e252bfSMichael Neumann }
176857e252bfSMichael Neumann
rv6xx_dpm_display_configuration_changed(struct radeon_device * rdev)176957e252bfSMichael Neumann void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev)
177057e252bfSMichael Neumann {
177157e252bfSMichael Neumann rv6xx_program_display_gap(rdev);
177257e252bfSMichael Neumann }
177357e252bfSMichael Neumann
177457e252bfSMichael Neumann union power_info {
177557e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO info;
177657e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO_V2 info_2;
177757e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO_V3 info_3;
177857e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
177957e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
178057e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
178157e252bfSMichael Neumann };
178257e252bfSMichael Neumann
178357e252bfSMichael Neumann union pplib_clock_info {
178457e252bfSMichael Neumann struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
178557e252bfSMichael Neumann struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
178657e252bfSMichael Neumann struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
178757e252bfSMichael Neumann struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
178857e252bfSMichael Neumann };
178957e252bfSMichael Neumann
179057e252bfSMichael Neumann union pplib_power_state {
179157e252bfSMichael Neumann struct _ATOM_PPLIB_STATE v1;
179257e252bfSMichael Neumann struct _ATOM_PPLIB_STATE_V2 v2;
179357e252bfSMichael Neumann };
179457e252bfSMichael Neumann
rv6xx_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info)179557e252bfSMichael Neumann static void rv6xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
179657e252bfSMichael Neumann struct radeon_ps *rps,
179757e252bfSMichael Neumann struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info)
179857e252bfSMichael Neumann {
179957e252bfSMichael Neumann rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
180057e252bfSMichael Neumann rps->class = le16_to_cpu(non_clock_info->usClassification);
180157e252bfSMichael Neumann rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
180257e252bfSMichael Neumann
180357e252bfSMichael Neumann if (r600_is_uvd_state(rps->class, rps->class2)) {
180457e252bfSMichael Neumann rps->vclk = RV6XX_DEFAULT_VCLK_FREQ;
180557e252bfSMichael Neumann rps->dclk = RV6XX_DEFAULT_DCLK_FREQ;
180657e252bfSMichael Neumann } else {
180757e252bfSMichael Neumann rps->vclk = 0;
180857e252bfSMichael Neumann rps->dclk = 0;
180957e252bfSMichael Neumann }
181057e252bfSMichael Neumann
181157e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
181257e252bfSMichael Neumann rdev->pm.dpm.boot_ps = rps;
181357e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
181457e252bfSMichael Neumann rdev->pm.dpm.uvd_ps = rps;
181557e252bfSMichael Neumann }
181657e252bfSMichael Neumann
rv6xx_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)181757e252bfSMichael Neumann static void rv6xx_parse_pplib_clock_info(struct radeon_device *rdev,
181857e252bfSMichael Neumann struct radeon_ps *rps, int index,
181957e252bfSMichael Neumann union pplib_clock_info *clock_info)
182057e252bfSMichael Neumann {
182157e252bfSMichael Neumann struct rv6xx_ps *ps = rv6xx_get_ps(rps);
182257e252bfSMichael Neumann u32 sclk, mclk;
182357e252bfSMichael Neumann u16 vddc;
182457e252bfSMichael Neumann struct rv6xx_pl *pl;
182557e252bfSMichael Neumann
182657e252bfSMichael Neumann switch (index) {
182757e252bfSMichael Neumann case 0:
182857e252bfSMichael Neumann pl = &ps->low;
182957e252bfSMichael Neumann break;
183057e252bfSMichael Neumann case 1:
183157e252bfSMichael Neumann pl = &ps->medium;
183257e252bfSMichael Neumann break;
183357e252bfSMichael Neumann case 2:
183457e252bfSMichael Neumann default:
183557e252bfSMichael Neumann pl = &ps->high;
183657e252bfSMichael Neumann break;
183757e252bfSMichael Neumann }
183857e252bfSMichael Neumann
183957e252bfSMichael Neumann sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
184057e252bfSMichael Neumann sclk |= clock_info->r600.ucEngineClockHigh << 16;
184157e252bfSMichael Neumann mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
184257e252bfSMichael Neumann mclk |= clock_info->r600.ucMemoryClockHigh << 16;
184357e252bfSMichael Neumann
184457e252bfSMichael Neumann pl->mclk = mclk;
184557e252bfSMichael Neumann pl->sclk = sclk;
184657e252bfSMichael Neumann pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
184757e252bfSMichael Neumann pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
184857e252bfSMichael Neumann
184957e252bfSMichael Neumann /* patch up vddc if necessary */
185057e252bfSMichael Neumann if (pl->vddc == 0xff01) {
185157e252bfSMichael Neumann if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
185257e252bfSMichael Neumann pl->vddc = vddc;
185357e252bfSMichael Neumann }
185457e252bfSMichael Neumann
185557e252bfSMichael Neumann /* fix up pcie gen2 */
185657e252bfSMichael Neumann if (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) {
185757e252bfSMichael Neumann if ((rdev->family == CHIP_RV610) || (rdev->family == CHIP_RV630)) {
185857e252bfSMichael Neumann if (pl->vddc < 1100)
185957e252bfSMichael Neumann pl->flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
186057e252bfSMichael Neumann }
186157e252bfSMichael Neumann }
186257e252bfSMichael Neumann
186357e252bfSMichael Neumann /* patch up boot state */
186457e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
186557e252bfSMichael Neumann u16 vddc, vddci, mvdd;
186657e252bfSMichael Neumann radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
186757e252bfSMichael Neumann pl->mclk = rdev->clock.default_mclk;
186857e252bfSMichael Neumann pl->sclk = rdev->clock.default_sclk;
186957e252bfSMichael Neumann pl->vddc = vddc;
187057e252bfSMichael Neumann }
187157e252bfSMichael Neumann }
187257e252bfSMichael Neumann
rv6xx_parse_power_table(struct radeon_device * rdev)187357e252bfSMichael Neumann static int rv6xx_parse_power_table(struct radeon_device *rdev)
187457e252bfSMichael Neumann {
187557e252bfSMichael Neumann struct radeon_mode_info *mode_info = &rdev->mode_info;
187657e252bfSMichael Neumann struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
187757e252bfSMichael Neumann union pplib_power_state *power_state;
187857e252bfSMichael Neumann int i, j;
187957e252bfSMichael Neumann union pplib_clock_info *clock_info;
188057e252bfSMichael Neumann union power_info *power_info;
188157e252bfSMichael Neumann int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
188257e252bfSMichael Neumann u16 data_offset;
188357e252bfSMichael Neumann u8 frev, crev;
188457e252bfSMichael Neumann struct rv6xx_ps *ps;
188557e252bfSMichael Neumann
188657e252bfSMichael Neumann if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
188757e252bfSMichael Neumann &frev, &crev, &data_offset))
188857e252bfSMichael Neumann return -EINVAL;
1889c59a5c48SFrançois Tigeot power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
189057e252bfSMichael Neumann
189157e252bfSMichael Neumann rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
189257e252bfSMichael Neumann power_info->pplib.ucNumStates, GFP_KERNEL);
189357e252bfSMichael Neumann if (!rdev->pm.dpm.ps)
189457e252bfSMichael Neumann return -ENOMEM;
189557e252bfSMichael Neumann
189657e252bfSMichael Neumann for (i = 0; i < power_info->pplib.ucNumStates; i++) {
189757e252bfSMichael Neumann power_state = (union pplib_power_state *)
1898c59a5c48SFrançois Tigeot (mode_info->atom_context->bios + data_offset +
189957e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usStateArrayOffset) +
190057e252bfSMichael Neumann i * power_info->pplib.ucStateEntrySize);
190157e252bfSMichael Neumann non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
1902c59a5c48SFrançois Tigeot (mode_info->atom_context->bios + data_offset +
190357e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
190457e252bfSMichael Neumann (power_state->v1.ucNonClockStateIndex *
190557e252bfSMichael Neumann power_info->pplib.ucNonClockSize));
190657e252bfSMichael Neumann if (power_info->pplib.ucStateEntrySize - 1) {
19074cd92098Szrj u8 *idx;
190857e252bfSMichael Neumann ps = kzalloc(sizeof(struct rv6xx_ps), GFP_KERNEL);
190957e252bfSMichael Neumann if (ps == NULL) {
191057e252bfSMichael Neumann kfree(rdev->pm.dpm.ps);
191157e252bfSMichael Neumann return -ENOMEM;
191257e252bfSMichael Neumann }
191357e252bfSMichael Neumann rdev->pm.dpm.ps[i].ps_priv = ps;
191457e252bfSMichael Neumann rv6xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
191557e252bfSMichael Neumann non_clock_info);
19164cd92098Szrj idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
191757e252bfSMichael Neumann for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
191857e252bfSMichael Neumann clock_info = (union pplib_clock_info *)
1919c59a5c48SFrançois Tigeot (mode_info->atom_context->bios + data_offset +
192057e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
19214cd92098Szrj (idx[j] * power_info->pplib.ucClockInfoSize));
192257e252bfSMichael Neumann rv6xx_parse_pplib_clock_info(rdev,
192357e252bfSMichael Neumann &rdev->pm.dpm.ps[i], j,
192457e252bfSMichael Neumann clock_info);
192557e252bfSMichael Neumann }
192657e252bfSMichael Neumann }
192757e252bfSMichael Neumann }
192857e252bfSMichael Neumann rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
192957e252bfSMichael Neumann return 0;
193057e252bfSMichael Neumann }
193157e252bfSMichael Neumann
rv6xx_dpm_init(struct radeon_device * rdev)193257e252bfSMichael Neumann int rv6xx_dpm_init(struct radeon_device *rdev)
193357e252bfSMichael Neumann {
193457e252bfSMichael Neumann struct radeon_atom_ss ss;
193557e252bfSMichael Neumann struct atom_clock_dividers dividers;
193657e252bfSMichael Neumann struct rv6xx_power_info *pi;
193757e252bfSMichael Neumann int ret;
193857e252bfSMichael Neumann
193957e252bfSMichael Neumann pi = kzalloc(sizeof(struct rv6xx_power_info), GFP_KERNEL);
194057e252bfSMichael Neumann if (pi == NULL)
194157e252bfSMichael Neumann return -ENOMEM;
194257e252bfSMichael Neumann rdev->pm.dpm.priv = pi;
194357e252bfSMichael Neumann
1944c6f73aabSFrançois Tigeot ret = r600_get_platform_caps(rdev);
1945c6f73aabSFrançois Tigeot if (ret)
1946c6f73aabSFrançois Tigeot return ret;
1947c6f73aabSFrançois Tigeot
194857e252bfSMichael Neumann ret = rv6xx_parse_power_table(rdev);
194957e252bfSMichael Neumann if (ret)
195057e252bfSMichael Neumann return ret;
195157e252bfSMichael Neumann
195257e252bfSMichael Neumann if (rdev->pm.dpm.voltage_response_time == 0)
195357e252bfSMichael Neumann rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
195457e252bfSMichael Neumann if (rdev->pm.dpm.backbias_response_time == 0)
195557e252bfSMichael Neumann rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
195657e252bfSMichael Neumann
195757e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
195857e252bfSMichael Neumann 0, false, ÷rs);
195957e252bfSMichael Neumann if (ret)
196057e252bfSMichael Neumann pi->spll_ref_div = dividers.ref_div + 1;
196157e252bfSMichael Neumann else
196257e252bfSMichael Neumann pi->spll_ref_div = R600_REFERENCEDIVIDER_DFLT;
196357e252bfSMichael Neumann
196457e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
196557e252bfSMichael Neumann 0, false, ÷rs);
196657e252bfSMichael Neumann if (ret)
196757e252bfSMichael Neumann pi->mpll_ref_div = dividers.ref_div + 1;
196857e252bfSMichael Neumann else
196957e252bfSMichael Neumann pi->mpll_ref_div = R600_REFERENCEDIVIDER_DFLT;
197057e252bfSMichael Neumann
197157e252bfSMichael Neumann if (rdev->family >= CHIP_RV670)
197257e252bfSMichael Neumann pi->fb_div_scale = 1;
197357e252bfSMichael Neumann else
197457e252bfSMichael Neumann pi->fb_div_scale = 0;
197557e252bfSMichael Neumann
197657e252bfSMichael Neumann pi->voltage_control =
197757e252bfSMichael Neumann radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
197857e252bfSMichael Neumann
197957e252bfSMichael Neumann pi->gfx_clock_gating = true;
198057e252bfSMichael Neumann
198157e252bfSMichael Neumann pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
198257e252bfSMichael Neumann ASIC_INTERNAL_ENGINE_SS, 0);
198357e252bfSMichael Neumann pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
198457e252bfSMichael Neumann ASIC_INTERNAL_MEMORY_SS, 0);
198557e252bfSMichael Neumann
198657e252bfSMichael Neumann /* Disable sclk ss, causes hangs on a lot of systems */
198757e252bfSMichael Neumann pi->sclk_ss = false;
198857e252bfSMichael Neumann
198957e252bfSMichael Neumann if (pi->sclk_ss || pi->mclk_ss)
199057e252bfSMichael Neumann pi->dynamic_ss = true;
199157e252bfSMichael Neumann else
199257e252bfSMichael Neumann pi->dynamic_ss = false;
199357e252bfSMichael Neumann
199457e252bfSMichael Neumann pi->dynamic_pcie_gen2 = true;
199557e252bfSMichael Neumann
199657e252bfSMichael Neumann if (pi->gfx_clock_gating &&
199757e252bfSMichael Neumann (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
199857e252bfSMichael Neumann pi->thermal_protection = true;
199957e252bfSMichael Neumann else
200057e252bfSMichael Neumann pi->thermal_protection = false;
200157e252bfSMichael Neumann
200257e252bfSMichael Neumann pi->display_gap = true;
200357e252bfSMichael Neumann
200457e252bfSMichael Neumann return 0;
200557e252bfSMichael Neumann }
200657e252bfSMichael Neumann
rv6xx_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)200757e252bfSMichael Neumann void rv6xx_dpm_print_power_state(struct radeon_device *rdev,
200857e252bfSMichael Neumann struct radeon_ps *rps)
200957e252bfSMichael Neumann {
201057e252bfSMichael Neumann struct rv6xx_ps *ps = rv6xx_get_ps(rps);
201157e252bfSMichael Neumann struct rv6xx_pl *pl;
201257e252bfSMichael Neumann
201357e252bfSMichael Neumann r600_dpm_print_class_info(rps->class, rps->class2);
201457e252bfSMichael Neumann r600_dpm_print_cap_info(rps->caps);
201557e252bfSMichael Neumann printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
201657e252bfSMichael Neumann pl = &ps->low;
201757e252bfSMichael Neumann printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
201857e252bfSMichael Neumann pl->sclk, pl->mclk, pl->vddc);
201957e252bfSMichael Neumann pl = &ps->medium;
202057e252bfSMichael Neumann printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
202157e252bfSMichael Neumann pl->sclk, pl->mclk, pl->vddc);
202257e252bfSMichael Neumann pl = &ps->high;
202357e252bfSMichael Neumann printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
202457e252bfSMichael Neumann pl->sclk, pl->mclk, pl->vddc);
202557e252bfSMichael Neumann r600_dpm_print_ps_status(rdev, rps);
202657e252bfSMichael Neumann }
202757e252bfSMichael Neumann
rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)202857e252bfSMichael Neumann void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
202957e252bfSMichael Neumann struct seq_file *m)
203057e252bfSMichael Neumann {
203157e252bfSMichael Neumann struct radeon_ps *rps = rdev->pm.dpm.current_ps;
203257e252bfSMichael Neumann struct rv6xx_ps *ps = rv6xx_get_ps(rps);
203357e252bfSMichael Neumann struct rv6xx_pl *pl;
203457e252bfSMichael Neumann u32 current_index =
203557e252bfSMichael Neumann (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
203657e252bfSMichael Neumann CURRENT_PROFILE_INDEX_SHIFT;
203757e252bfSMichael Neumann
203857e252bfSMichael Neumann if (current_index > 2) {
203957e252bfSMichael Neumann seq_printf(m, "invalid dpm profile %d\n", current_index);
204057e252bfSMichael Neumann } else {
204157e252bfSMichael Neumann if (current_index == 0)
204257e252bfSMichael Neumann pl = &ps->low;
204357e252bfSMichael Neumann else if (current_index == 1)
204457e252bfSMichael Neumann pl = &ps->medium;
204557e252bfSMichael Neumann else /* current_index == 2 */
204657e252bfSMichael Neumann pl = &ps->high;
204757e252bfSMichael Neumann seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
204857e252bfSMichael Neumann seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u\n",
204957e252bfSMichael Neumann current_index, pl->sclk, pl->mclk, pl->vddc);
205057e252bfSMichael Neumann }
205157e252bfSMichael Neumann }
205257e252bfSMichael Neumann
2053c59a5c48SFrançois Tigeot /* get the current sclk in 10 khz units */
rv6xx_dpm_get_current_sclk(struct radeon_device * rdev)2054c59a5c48SFrançois Tigeot u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev)
2055c59a5c48SFrançois Tigeot {
2056c59a5c48SFrançois Tigeot struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2057c59a5c48SFrançois Tigeot struct rv6xx_ps *ps = rv6xx_get_ps(rps);
2058c59a5c48SFrançois Tigeot struct rv6xx_pl *pl;
2059c59a5c48SFrançois Tigeot u32 current_index =
2060c59a5c48SFrançois Tigeot (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2061c59a5c48SFrançois Tigeot CURRENT_PROFILE_INDEX_SHIFT;
2062c59a5c48SFrançois Tigeot
2063c59a5c48SFrançois Tigeot if (current_index > 2) {
2064c59a5c48SFrançois Tigeot return 0;
2065c59a5c48SFrançois Tigeot } else {
2066c59a5c48SFrançois Tigeot if (current_index == 0)
2067c59a5c48SFrançois Tigeot pl = &ps->low;
2068c59a5c48SFrançois Tigeot else if (current_index == 1)
2069c59a5c48SFrançois Tigeot pl = &ps->medium;
2070c59a5c48SFrançois Tigeot else /* current_index == 2 */
2071c59a5c48SFrançois Tigeot pl = &ps->high;
2072c59a5c48SFrançois Tigeot return pl->sclk;
2073c59a5c48SFrançois Tigeot }
2074c59a5c48SFrançois Tigeot }
2075c59a5c48SFrançois Tigeot
2076c59a5c48SFrançois Tigeot /* get the current mclk in 10 khz units */
rv6xx_dpm_get_current_mclk(struct radeon_device * rdev)2077c59a5c48SFrançois Tigeot u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev)
2078c59a5c48SFrançois Tigeot {
2079c59a5c48SFrançois Tigeot struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2080c59a5c48SFrançois Tigeot struct rv6xx_ps *ps = rv6xx_get_ps(rps);
2081c59a5c48SFrançois Tigeot struct rv6xx_pl *pl;
2082c59a5c48SFrançois Tigeot u32 current_index =
2083c59a5c48SFrançois Tigeot (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2084c59a5c48SFrançois Tigeot CURRENT_PROFILE_INDEX_SHIFT;
2085c59a5c48SFrançois Tigeot
2086c59a5c48SFrançois Tigeot if (current_index > 2) {
2087c59a5c48SFrançois Tigeot return 0;
2088c59a5c48SFrançois Tigeot } else {
2089c59a5c48SFrançois Tigeot if (current_index == 0)
2090c59a5c48SFrançois Tigeot pl = &ps->low;
2091c59a5c48SFrançois Tigeot else if (current_index == 1)
2092c59a5c48SFrançois Tigeot pl = &ps->medium;
2093c59a5c48SFrançois Tigeot else /* current_index == 2 */
2094c59a5c48SFrançois Tigeot pl = &ps->high;
2095c59a5c48SFrançois Tigeot return pl->mclk;
2096c59a5c48SFrançois Tigeot }
2097c59a5c48SFrançois Tigeot }
2098c59a5c48SFrançois Tigeot
rv6xx_dpm_fini(struct radeon_device * rdev)209957e252bfSMichael Neumann void rv6xx_dpm_fini(struct radeon_device *rdev)
210057e252bfSMichael Neumann {
210157e252bfSMichael Neumann int i;
210257e252bfSMichael Neumann
210357e252bfSMichael Neumann for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
210457e252bfSMichael Neumann kfree(rdev->pm.dpm.ps[i].ps_priv);
210557e252bfSMichael Neumann }
210657e252bfSMichael Neumann kfree(rdev->pm.dpm.ps);
210757e252bfSMichael Neumann kfree(rdev->pm.dpm.priv);
210857e252bfSMichael Neumann }
210957e252bfSMichael Neumann
rv6xx_dpm_get_sclk(struct radeon_device * rdev,bool low)211057e252bfSMichael Neumann u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low)
211157e252bfSMichael Neumann {
211257e252bfSMichael Neumann struct rv6xx_ps *requested_state = rv6xx_get_ps(rdev->pm.dpm.requested_ps);
211357e252bfSMichael Neumann
211457e252bfSMichael Neumann if (low)
211557e252bfSMichael Neumann return requested_state->low.sclk;
211657e252bfSMichael Neumann else
211757e252bfSMichael Neumann return requested_state->high.sclk;
211857e252bfSMichael Neumann }
211957e252bfSMichael Neumann
rv6xx_dpm_get_mclk(struct radeon_device * rdev,bool low)212057e252bfSMichael Neumann u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low)
212157e252bfSMichael Neumann {
212257e252bfSMichael Neumann struct rv6xx_ps *requested_state = rv6xx_get_ps(rdev->pm.dpm.requested_ps);
212357e252bfSMichael Neumann
212457e252bfSMichael Neumann if (low)
212557e252bfSMichael Neumann return requested_state->low.mclk;
212657e252bfSMichael Neumann else
212757e252bfSMichael Neumann return requested_state->high.mclk;
212857e252bfSMichael Neumann }
212957e252bfSMichael Neumann
rv6xx_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)213057e252bfSMichael Neumann int rv6xx_dpm_force_performance_level(struct radeon_device *rdev,
213157e252bfSMichael Neumann enum radeon_dpm_forced_level level)
213257e252bfSMichael Neumann {
213357e252bfSMichael Neumann struct rv6xx_power_info *pi = rv6xx_get_pi(rdev);
213457e252bfSMichael Neumann
213557e252bfSMichael Neumann if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
213657e252bfSMichael Neumann pi->restricted_levels = 3;
213757e252bfSMichael Neumann } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
213857e252bfSMichael Neumann pi->restricted_levels = 2;
213957e252bfSMichael Neumann } else {
214057e252bfSMichael Neumann pi->restricted_levels = 0;
214157e252bfSMichael Neumann }
214257e252bfSMichael Neumann
214357e252bfSMichael Neumann rv6xx_clear_vc(rdev);
214457e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true);
214557e252bfSMichael Neumann r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF);
214657e252bfSMichael Neumann r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW);
214757e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false);
214857e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false);
214957e252bfSMichael Neumann rv6xx_enable_medium(rdev);
215057e252bfSMichael Neumann rv6xx_enable_high(rdev);
215157e252bfSMichael Neumann if (pi->restricted_levels == 3)
215257e252bfSMichael Neumann r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, false);
215357e252bfSMichael Neumann rv6xx_program_vc(rdev);
215457e252bfSMichael Neumann rv6xx_program_at(rdev);
215557e252bfSMichael Neumann
215657e252bfSMichael Neumann rdev->pm.dpm.forced_level = level;
215757e252bfSMichael Neumann
215857e252bfSMichael Neumann return 0;
215957e252bfSMichael Neumann }
2160