Home
last modified time | relevance | path

Searched refs:post_div (Results 1 – 25 of 29) sorted by relevance

12

/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_pll.c88 ref_div_max = min(128 / post_div, ref_div_max); in amdgpu_pll_get_fb_ref_div()
126 unsigned post_div_min, post_div_max, post_div; in amdgpu_pll_compute() local
154 post_div_min = pll->post_div; in amdgpu_pll_compute()
155 post_div_max = pll->post_div; in amdgpu_pll_compute()
199 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) { in amdgpu_pll_compute()
204 (ref_div * post_div)); in amdgpu_pll_compute()
209 post_div_best = post_div; in amdgpu_pll_compute()
213 post_div = post_div_best; in amdgpu_pll_compute()
244 (ref_div * post_div * 10); in amdgpu_pll_compute()
246 *post_div_p = post_div; in amdgpu_pll_compute()
[all …]
H A Datombios_crtc.c585 u32 post_div, in amdgpu_atombios_crtc_program_pll() argument
612 args.v1.ucPostDiv = post_div; in amdgpu_atombios_crtc_program_pll()
622 args.v2.ucPostDiv = post_div; in amdgpu_atombios_crtc_program_pll()
632 args.v3.ucPostDiv = post_div; in amdgpu_atombios_crtc_program_pll()
649 args.v5.ucPostDiv = post_div; in amdgpu_atombios_crtc_program_pll()
679 args.v6.ucPostDiv = post_div; in amdgpu_atombios_crtc_program_pll()
826 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0; in amdgpu_atombios_crtc_set_pll() local
852 pll->post_div = amdgpu_crtc->pll_post_div; in amdgpu_atombios_crtc_set_pll()
855 &fb_div, &frac_fb_div, &ref_div, &post_div); in amdgpu_atombios_crtc_set_pll()
862 ref_div, fb_div, frac_fb_div, post_div, in amdgpu_atombios_crtc_set_pll()
H A Damdgpu_atombios.h28 u32 post_div; member
68 u32 post_div; member
H A Datombios_crtc.h51 u32 post_div,
H A Damdgpu_atombios.c1022 dividers->post_div = args.v3.ucPostDiv; in amdgpu_atombios_get_clock_dividers()
1042 dividers->post_div = args.v5.ucPostDiv; in amdgpu_atombios_get_clock_dividers()
1060 dividers->post_divider = dividers->post_div = args.v4.ucPostDiv; in amdgpu_atombios_get_clock_dividers()
1074 dividers->post_div = args.v6_out.ucPllPostDiv; in amdgpu_atombios_get_clock_dividers()
1114 mpll_param->post_div = args.ucPostDiv; in amdgpu_atombios_get_memory_pll_dividers()
H A Damdgpu_mode.h197 uint32_t post_div; member
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_clocks.c55 if (post_div == 2) in radeon_legacy_get_engine_clock()
57 else if (post_div == 3) in radeon_legacy_get_engine_clock()
59 else if (post_div == 4) in radeon_legacy_get_engine_clock()
85 if (post_div == 2) in radeon_legacy_get_memory_clock()
87 else if (post_div == 3) in radeon_legacy_get_memory_clock()
89 else if (post_div == 4) in radeon_legacy_get_memory_clock()
360 *post_div = 8; in calc_eng_mem_clock()
363 *post_div = 4; in calc_eng_mem_clock()
366 *post_div = 2; in calc_eng_mem_clock()
369 *post_div = 1; in calc_eng_mem_clock()
[all …]
H A Dradeon_display.c1037 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) { in radeon_compute_pll_avivo()
1042 (ref_div * post_div)); in radeon_compute_pll_avivo()
1051 post_div = post_div_best; in radeon_compute_pll_avivo()
1084 *post_div_p = post_div; in radeon_compute_pll_avivo()
1124 uint32_t post_div; in radeon_compute_pll_legacy() local
1164 for (post_div = max_post_div; post_div >= min_post_div; --post_div) { in radeon_compute_pll_legacy()
1172 if ((post_div == 5) || in radeon_compute_pll_legacy()
1173 (post_div == 7) || in radeon_compute_pll_legacy()
1174 (post_div == 9) || in radeon_compute_pll_legacy()
1175 (post_div == 10) || in radeon_compute_pll_legacy()
[all …]
H A Dradeon_legacy_tv.c869 int post_div; in get_post_div() local
871 case 1: post_div = 0; break; in get_post_div()
872 case 2: post_div = 1; break; in get_post_div()
873 case 3: post_div = 4; break; in get_post_div()
874 case 4: post_div = 2; break; in get_post_div()
875 case 6: post_div = 6; break; in get_post_div()
876 case 8: post_div = 3; break; in get_post_div()
877 case 12: post_div = 7; break; in get_post_div()
879 default: post_div = 5; break; in get_post_div()
881 return post_div; in get_post_div()
H A Drv730_dpm.c65 post_divider = ((dividers.post_div >> 4) & 0xf) + in rv730_populate_sclk_value()
66 (dividers.post_div & 0xf) + 2; in rv730_populate_sclk_value()
81 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv730_populate_sclk_value()
82 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); in rv730_populate_sclk_value()
143 post_divider = ((dividers.post_div >> 4) & 0xf) + in rv730_populate_mclk_value()
144 (dividers.post_div & 0xf) + 2; in rv730_populate_mclk_value()
156 mpll_func_cntl |= MPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv730_populate_mclk_value()
157 mpll_func_cntl |= MPLL_LOLEN(dividers.post_div & 0xf); in rv730_populate_mclk_value()
H A Drv740_dpm.c144 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384; in rv740_populate_sclk_value()
150 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in rv740_populate_sclk_value()
161 u32 vco_freq = engine_clock * dividers.post_div; in rv740_populate_sclk_value()
218 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in rv740_populate_mclk_value()
235 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in rv740_populate_mclk_value()
248 u32 vco_freq = memory_clock * dividers.post_div; in rv740_populate_mclk_value()
H A Dradeon_uvd.c922 unsigned post_div = vco_freq / target_freq; in radeon_uvd_calc_upll_post_div() local
925 if (post_div < pd_min) in radeon_uvd_calc_upll_post_div()
926 post_div = pd_min; in radeon_uvd_calc_upll_post_div()
929 if ((vco_freq / post_div) > target_freq) in radeon_uvd_calc_upll_post_div()
930 post_div += 1; in radeon_uvd_calc_upll_post_div()
933 if (post_div > pd_even && post_div % 2) in radeon_uvd_calc_upll_post_div()
934 post_div += 1; in radeon_uvd_calc_upll_post_div()
936 return post_div; in radeon_uvd_calc_upll_post_div()
H A Dradeon_legacy_crtc.c759 } *post_div, post_divs[] = { in radeon_set_pll() local
825 for (post_div = &post_divs[0]; post_div->divider; ++post_div) { in radeon_set_pll()
826 if (post_div->divider == post_divider) in radeon_set_pll()
830 if (!post_div->divider) in radeon_set_pll()
831 post_div = &post_divs[0]; in radeon_set_pll()
846 pll_fb_post_div = (feedback_div | (post_div->bitvalue << 16)); in radeon_set_pll()
H A Drs780_dpm.c88 r600_engine_clock_entry_set_post_divider(rdev, 0, dividers.post_div); in rs780_initialize_dpm_power_state()
454 (min_dividers.post_div != max_dividers.post_div) || in rs780_set_engine_clock_scaling()
456 (max_dividers.post_div != current_max_dividers.post_div)) in rs780_set_engine_clock_scaling()
988 u32 post_div = ((func_cntl & SPLL_SW_HILEN_MASK) >> SPLL_SW_HILEN_SHIFT) + 1 + in rs780_dpm_debugfs_print_current_performance_level() local
991 (post_div * ref_div); in rs780_dpm_debugfs_print_current_performance_level()
1010 u32 post_div = ((func_cntl & SPLL_SW_HILEN_MASK) >> SPLL_SW_HILEN_SHIFT) + 1 + in rs780_dpm_get_current_sclk() local
1013 (post_div * ref_div); in rs780_dpm_get_current_sclk()
H A Datombios_crtc.c829 u32 post_div, in atombios_crtc_program_pll() argument
856 args.v1.ucPostDiv = post_div; in atombios_crtc_program_pll()
866 args.v2.ucPostDiv = post_div; in atombios_crtc_program_pll()
876 args.v3.ucPostDiv = post_div; in atombios_crtc_program_pll()
893 args.v5.ucPostDiv = post_div; in atombios_crtc_program_pll()
922 args.v6.ucPostDiv = post_div; in atombios_crtc_program_pll()
1096 pll->post_div = radeon_crtc->pll_post_div; in atombios_crtc_set_pll()
1101 &fb_div, &frac_fb_div, &ref_div, &post_div); in atombios_crtc_set_pll()
1104 &fb_div, &frac_fb_div, &ref_div, &post_div); in atombios_crtc_set_pll()
1107 &fb_div, &frac_fb_div, &ref_div, &post_div); in atombios_crtc_set_pll()
[all …]
H A Dradeon_mode.h172 uint32_t post_div; member
586 u32 post_div; member
626 u32 post_div; member
H A Drv770_dpm.c337 post_divider = dividers->post_div; in rv770_calculate_fractional_mpll_feedback_divider()
426 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); in rv770_populate_mclk_value()
456 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); in rv770_populate_mclk_value()
518 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2; in rv770_populate_sclk_value()
532 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv770_populate_sclk_value()
533 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); in rv770_populate_sclk_value()
H A Dkv_dpm.c542 pi->graphics_level[index].SclkDid = (u8)dividers.post_div; in kv_set_divider_value()
845 pi->uvd_level[i].VclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table()
851 pi->uvd_level[i].DclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table()
913 pi->vce_level[i].Divider = (u8)dividers.post_div; in kv_populate_vce_table()
976 pi->samu_level[i].Divider = (u8)dividers.post_div; in kv_populate_samu_table()
1035 pi->acp_level[i].Divider = (u8)dividers.post_div; in kv_populate_acp_table()
H A Dcypress_dpm.c509 dividers.post_div = 1; in cypress_populate_mclk_value()
520 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in cypress_populate_mclk_value()
537 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in cypress_populate_mclk_value()
555 u32 vco_freq = memory_clock * dividers.post_div; in cypress_populate_mclk_value()
H A Dni_dpm.c2023 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834; in ni_calculate_sclk_params()
2029 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in ni_calculate_sclk_params()
2040 u32 vco_freq = engine_clock * dividers.post_div; in ni_calculate_sclk_params()
2190 dividers.post_div = 1; in ni_populate_mclk_value()
2201 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in ni_populate_mclk_value()
2218 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in ni_populate_mclk_value()
2236 u32 vco_freq = memory_clock * dividers.post_div; in ni_populate_mclk_value()
H A Dradeon_atombios.c2840 dividers->post_div = args.v1.ucPostDiv; in radeon_atom_get_clock_dividers()
2854 dividers->post_div = args.v2.ucPostDiv; in radeon_atom_get_clock_dividers()
2869 dividers->post_div = args.v3.ucPostDiv; in radeon_atom_get_clock_dividers()
2889 dividers->post_div = args.v5.ucPostDiv; in radeon_atom_get_clock_dividers()
2908 dividers->post_divider = dividers->post_div = args.v4.ucPostDiv; in radeon_atom_get_clock_dividers()
2922 dividers->post_div = args.v6_out.ucPllPostDiv; in radeon_atom_get_clock_dividers()
2962 mpll_param->post_div = args.ucPostDiv; in radeon_atom_get_memory_pll_dividers()
H A Dtrinity_dpm.c387 value |= PDS_DIV(dividers.post_div); in trinity_gfx_powergating_initialize()
603 value |= CLK_DIVIDER(dividers.post_div); in trinity_set_divider_value()
613 value |= PD_SCLK_DIVIDER(dividers.post_div); in trinity_set_divider_value()
H A Drv6xx_dpm.c151 step->post_divider = 2 + (dividers.post_div & 0xF) + (dividers.post_div >> 4); in rv6xx_convert_clock_to_stepping()
609 rv6xx_memory_clock_entry_set_post_divider(rdev, entry, dividers.post_div); in rv6xx_program_mclk_stepping_entry()
H A Dsumo_dpm.c562 sumo_set_divider_value(rdev, index, dividers.post_div); in sumo_program_power_level()
798 WREG32_P(CG_ACPI_CNTL, SCLK_ACPI_DIV(dividers.post_div), ~SCLK_ACPI_DIV_MASK); in sumo_program_acpi_power_level()
H A Dci_dpm.c2853 mpll_ad_func_cntl |= YCLK_POST_DIV(mpll_param.post_div); in ci_calculate_mclk_params()
2858 YCLK_POST_DIV(mpll_param.post_div); in ci_calculate_mclk_params()
2868 freq_nom = memory_clock * 4 * (1 << mpll_param.post_div); in ci_calculate_mclk_params()
2870 freq_nom = memory_clock * 2 * (1 << mpll_param.post_div); in ci_calculate_mclk_params()
3223 u32 vco_freq = engine_clock * dividers.post_div; in ci_calculate_sclk_params()

12