1 /* 2 * Copyright 2012-2021 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: AMD 23 * 24 */ 25 26 #include "dcn20_hubp.h" 27 28 #include "dm_services.h" 29 #include "dce_calcs.h" 30 #include "reg_helper.h" 31 #include "basics/conversion.h" 32 33 #define DC_LOGGER_INIT(logger) 34 35 #define REG(reg)\ 36 hubp2->hubp_regs->reg 37 38 #define CTX \ 39 hubp2->base.ctx 40 41 #undef FN 42 #define FN(reg_name, field_name) \ 43 hubp2->hubp_shift->field_name, hubp2->hubp_mask->field_name 44 45 void hubp2_set_vm_system_aperture_settings(struct hubp *hubp, 46 struct vm_system_aperture_param *apt) 47 { 48 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 49 50 PHYSICAL_ADDRESS_LOC mc_vm_apt_default; 51 PHYSICAL_ADDRESS_LOC mc_vm_apt_low; 52 PHYSICAL_ADDRESS_LOC mc_vm_apt_high; 53 54 // The format of default addr is 48:12 of the 48 bit addr 55 mc_vm_apt_default.quad_part = apt->sys_default.quad_part >> 12; 56 57 // The format of high/low are 48:18 of the 48 bit addr 58 mc_vm_apt_low.quad_part = apt->sys_low.quad_part >> 18; 59 mc_vm_apt_high.quad_part = apt->sys_high.quad_part >> 18; 60 61 REG_UPDATE_2(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, 62 DCN_VM_SYSTEM_APERTURE_DEFAULT_SYSTEM, 1, /* 1 = system physical memory */ 63 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, mc_vm_apt_default.high_part); 64 65 REG_SET(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, 0, 66 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, mc_vm_apt_default.low_part); 67 68 REG_SET(DCN_VM_SYSTEM_APERTURE_LOW_ADDR, 0, 69 MC_VM_SYSTEM_APERTURE_LOW_ADDR, mc_vm_apt_low.quad_part); 70 71 REG_SET(DCN_VM_SYSTEM_APERTURE_HIGH_ADDR, 0, 72 MC_VM_SYSTEM_APERTURE_HIGH_ADDR, mc_vm_apt_high.quad_part); 73 74 REG_SET_2(DCN_VM_MX_L1_TLB_CNTL, 0, 75 ENABLE_L1_TLB, 1, 76 SYSTEM_ACCESS_MODE, 0x3); 77 } 78 79 void hubp2_program_deadline( 80 struct hubp *hubp, 81 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 82 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 83 { 84 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 85 86 /* DLG - Per hubp */ 87 REG_SET_2(BLANK_OFFSET_0, 0, 88 REFCYC_H_BLANK_END, dlg_attr->refcyc_h_blank_end, 89 DLG_V_BLANK_END, dlg_attr->dlg_vblank_end); 90 91 REG_SET(BLANK_OFFSET_1, 0, 92 MIN_DST_Y_NEXT_START, dlg_attr->min_dst_y_next_start); 93 94 REG_SET(DST_DIMENSIONS, 0, 95 REFCYC_PER_HTOTAL, dlg_attr->refcyc_per_htotal); 96 97 REG_SET_2(DST_AFTER_SCALER, 0, 98 REFCYC_X_AFTER_SCALER, dlg_attr->refcyc_x_after_scaler, 99 DST_Y_AFTER_SCALER, dlg_attr->dst_y_after_scaler); 100 101 REG_SET(REF_FREQ_TO_PIX_FREQ, 0, 102 REF_FREQ_TO_PIX_FREQ, dlg_attr->ref_freq_to_pix_freq); 103 104 /* DLG - Per luma/chroma */ 105 REG_SET(VBLANK_PARAMETERS_1, 0, 106 REFCYC_PER_PTE_GROUP_VBLANK_L, dlg_attr->refcyc_per_pte_group_vblank_l); 107 108 if (REG(NOM_PARAMETERS_0)) 109 REG_SET(NOM_PARAMETERS_0, 0, 110 DST_Y_PER_PTE_ROW_NOM_L, dlg_attr->dst_y_per_pte_row_nom_l); 111 112 if (REG(NOM_PARAMETERS_1)) 113 REG_SET(NOM_PARAMETERS_1, 0, 114 REFCYC_PER_PTE_GROUP_NOM_L, dlg_attr->refcyc_per_pte_group_nom_l); 115 116 REG_SET(NOM_PARAMETERS_4, 0, 117 DST_Y_PER_META_ROW_NOM_L, dlg_attr->dst_y_per_meta_row_nom_l); 118 119 REG_SET(NOM_PARAMETERS_5, 0, 120 REFCYC_PER_META_CHUNK_NOM_L, dlg_attr->refcyc_per_meta_chunk_nom_l); 121 122 REG_SET_2(PER_LINE_DELIVERY, 0, 123 REFCYC_PER_LINE_DELIVERY_L, dlg_attr->refcyc_per_line_delivery_l, 124 REFCYC_PER_LINE_DELIVERY_C, dlg_attr->refcyc_per_line_delivery_c); 125 126 REG_SET(VBLANK_PARAMETERS_2, 0, 127 REFCYC_PER_PTE_GROUP_VBLANK_C, dlg_attr->refcyc_per_pte_group_vblank_c); 128 129 if (REG(NOM_PARAMETERS_2)) 130 REG_SET(NOM_PARAMETERS_2, 0, 131 DST_Y_PER_PTE_ROW_NOM_C, dlg_attr->dst_y_per_pte_row_nom_c); 132 133 if (REG(NOM_PARAMETERS_3)) 134 REG_SET(NOM_PARAMETERS_3, 0, 135 REFCYC_PER_PTE_GROUP_NOM_C, dlg_attr->refcyc_per_pte_group_nom_c); 136 137 REG_SET(NOM_PARAMETERS_6, 0, 138 DST_Y_PER_META_ROW_NOM_C, dlg_attr->dst_y_per_meta_row_nom_c); 139 140 REG_SET(NOM_PARAMETERS_7, 0, 141 REFCYC_PER_META_CHUNK_NOM_C, dlg_attr->refcyc_per_meta_chunk_nom_c); 142 143 /* TTU - per hubp */ 144 REG_SET_2(DCN_TTU_QOS_WM, 0, 145 QoS_LEVEL_LOW_WM, ttu_attr->qos_level_low_wm, 146 QoS_LEVEL_HIGH_WM, ttu_attr->qos_level_high_wm); 147 148 /* TTU - per luma/chroma */ 149 /* Assumed surf0 is luma and 1 is chroma */ 150 151 REG_SET_3(DCN_SURF0_TTU_CNTL0, 0, 152 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_l, 153 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_l, 154 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_l); 155 156 REG_SET_3(DCN_SURF1_TTU_CNTL0, 0, 157 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_c, 158 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_c, 159 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_c); 160 161 REG_SET_3(DCN_CUR0_TTU_CNTL0, 0, 162 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_cur0, 163 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_cur0, 164 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_cur0); 165 166 REG_SET(FLIP_PARAMETERS_1, 0, 167 REFCYC_PER_PTE_GROUP_FLIP_L, dlg_attr->refcyc_per_pte_group_flip_l); 168 } 169 170 void hubp2_vready_at_or_After_vsync(struct hubp *hubp, 171 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 172 { 173 uint32_t value = 0; 174 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 175 /* disable_dlg_test_mode Set 9th bit to 1 to disable "dv" mode */ 176 REG_WRITE(HUBPREQ_DEBUG_DB, 1 << 8); 177 /* 178 if (VSTARTUP_START - (VREADY_OFFSET+VUPDATE_WIDTH+VUPDATE_OFFSET)/htotal) 179 <= OTG_V_BLANK_END 180 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 1 181 else 182 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 0 183 */ 184 if (pipe_dest->htotal != 0) { 185 if ((pipe_dest->vstartup_start - (pipe_dest->vready_offset+pipe_dest->vupdate_width 186 + pipe_dest->vupdate_offset) / pipe_dest->htotal) <= pipe_dest->vblank_end) { 187 value = 1; 188 } else 189 value = 0; 190 } 191 192 REG_UPDATE(DCHUBP_CNTL, HUBP_VREADY_AT_OR_AFTER_VSYNC, value); 193 } 194 195 void hubp2_program_requestor( 196 struct hubp *hubp, 197 struct _vcs_dpi_display_rq_regs_st *rq_regs) 198 { 199 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 200 201 REG_UPDATE(HUBPRET_CONTROL, 202 DET_BUF_PLANE1_BASE_ADDRESS, rq_regs->plane1_base_address); 203 REG_SET_4(DCN_EXPANSION_MODE, 0, 204 DRQ_EXPANSION_MODE, rq_regs->drq_expansion_mode, 205 PRQ_EXPANSION_MODE, rq_regs->prq_expansion_mode, 206 MRQ_EXPANSION_MODE, rq_regs->mrq_expansion_mode, 207 CRQ_EXPANSION_MODE, rq_regs->crq_expansion_mode); 208 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG, 0, 209 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, 210 MIN_CHUNK_SIZE, rq_regs->rq_regs_l.min_chunk_size, 211 META_CHUNK_SIZE, rq_regs->rq_regs_l.meta_chunk_size, 212 MIN_META_CHUNK_SIZE, rq_regs->rq_regs_l.min_meta_chunk_size, 213 DPTE_GROUP_SIZE, rq_regs->rq_regs_l.dpte_group_size, 214 MPTE_GROUP_SIZE, rq_regs->rq_regs_l.mpte_group_size, 215 SWATH_HEIGHT, rq_regs->rq_regs_l.swath_height, 216 PTE_ROW_HEIGHT_LINEAR, rq_regs->rq_regs_l.pte_row_height_linear); 217 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG_C, 0, 218 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, 219 MIN_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_chunk_size, 220 META_CHUNK_SIZE_C, rq_regs->rq_regs_c.meta_chunk_size, 221 MIN_META_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_meta_chunk_size, 222 DPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.dpte_group_size, 223 MPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.mpte_group_size, 224 SWATH_HEIGHT_C, rq_regs->rq_regs_c.swath_height, 225 PTE_ROW_HEIGHT_LINEAR_C, rq_regs->rq_regs_c.pte_row_height_linear); 226 } 227 228 static void hubp2_setup( 229 struct hubp *hubp, 230 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 231 struct _vcs_dpi_display_ttu_regs_st *ttu_attr, 232 struct _vcs_dpi_display_rq_regs_st *rq_regs, 233 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 234 { 235 /* otg is locked when this func is called. Register are double buffered. 236 * disable the requestors is not needed 237 */ 238 239 hubp2_vready_at_or_After_vsync(hubp, pipe_dest); 240 hubp2_program_requestor(hubp, rq_regs); 241 hubp2_program_deadline(hubp, dlg_attr, ttu_attr); 242 243 } 244 245 void hubp2_setup_interdependent( 246 struct hubp *hubp, 247 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 248 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 249 { 250 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 251 252 REG_SET_2(PREFETCH_SETTINGS, 0, 253 DST_Y_PREFETCH, dlg_attr->dst_y_prefetch, 254 VRATIO_PREFETCH, dlg_attr->vratio_prefetch); 255 256 REG_SET(PREFETCH_SETTINGS_C, 0, 257 VRATIO_PREFETCH_C, dlg_attr->vratio_prefetch_c); 258 259 REG_SET_2(VBLANK_PARAMETERS_0, 0, 260 DST_Y_PER_VM_VBLANK, dlg_attr->dst_y_per_vm_vblank, 261 DST_Y_PER_ROW_VBLANK, dlg_attr->dst_y_per_row_vblank); 262 263 REG_SET_2(FLIP_PARAMETERS_0, 0, 264 DST_Y_PER_VM_FLIP, dlg_attr->dst_y_per_vm_flip, 265 DST_Y_PER_ROW_FLIP, dlg_attr->dst_y_per_row_flip); 266 267 REG_SET(VBLANK_PARAMETERS_3, 0, 268 REFCYC_PER_META_CHUNK_VBLANK_L, dlg_attr->refcyc_per_meta_chunk_vblank_l); 269 270 REG_SET(VBLANK_PARAMETERS_4, 0, 271 REFCYC_PER_META_CHUNK_VBLANK_C, dlg_attr->refcyc_per_meta_chunk_vblank_c); 272 273 REG_SET(FLIP_PARAMETERS_2, 0, 274 REFCYC_PER_META_CHUNK_FLIP_L, dlg_attr->refcyc_per_meta_chunk_flip_l); 275 276 REG_SET_2(PER_LINE_DELIVERY_PRE, 0, 277 REFCYC_PER_LINE_DELIVERY_PRE_L, dlg_attr->refcyc_per_line_delivery_pre_l, 278 REFCYC_PER_LINE_DELIVERY_PRE_C, dlg_attr->refcyc_per_line_delivery_pre_c); 279 280 REG_SET(DCN_SURF0_TTU_CNTL1, 0, 281 REFCYC_PER_REQ_DELIVERY_PRE, 282 ttu_attr->refcyc_per_req_delivery_pre_l); 283 REG_SET(DCN_SURF1_TTU_CNTL1, 0, 284 REFCYC_PER_REQ_DELIVERY_PRE, 285 ttu_attr->refcyc_per_req_delivery_pre_c); 286 REG_SET(DCN_CUR0_TTU_CNTL1, 0, 287 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur0); 288 REG_SET(DCN_CUR1_TTU_CNTL1, 0, 289 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur1); 290 291 REG_SET_2(DCN_GLOBAL_TTU_CNTL, 0, 292 MIN_TTU_VBLANK, ttu_attr->min_ttu_vblank, 293 QoS_LEVEL_FLIP, ttu_attr->qos_level_flip); 294 } 295 296 /* DCN2 (GFX10), the following GFX fields are deprecated. They can be set but they will not be used: 297 * NUM_BANKS 298 * NUM_SE 299 * NUM_RB_PER_SE 300 * RB_ALIGNED 301 * Other things can be defaulted, since they never change: 302 * PIPE_ALIGNED = 0 303 * META_LINEAR = 0 304 * In GFX10, only these apply: 305 * PIPE_INTERLEAVE 306 * NUM_PIPES 307 * MAX_COMPRESSED_FRAGS 308 * SW_MODE 309 */ 310 static void hubp2_program_tiling( 311 struct dcn20_hubp *hubp2, 312 const union dc_tiling_info *info, 313 const enum surface_pixel_format pixel_format) 314 { 315 REG_UPDATE_3(DCSURF_ADDR_CONFIG, 316 NUM_PIPES, log_2(info->gfx9.num_pipes), 317 PIPE_INTERLEAVE, info->gfx9.pipe_interleave, 318 MAX_COMPRESSED_FRAGS, log_2(info->gfx9.max_compressed_frags)); 319 320 REG_UPDATE_4(DCSURF_TILING_CONFIG, 321 SW_MODE, info->gfx9.swizzle, 322 META_LINEAR, 0, 323 RB_ALIGNED, 0, 324 PIPE_ALIGNED, 0); 325 } 326 327 void hubp2_program_size( 328 struct hubp *hubp, 329 enum surface_pixel_format format, 330 const struct plane_size *plane_size, 331 struct dc_plane_dcc_param *dcc) 332 { 333 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 334 uint32_t pitch, meta_pitch, pitch_c, meta_pitch_c; 335 bool use_pitch_c = false; 336 337 /* Program data and meta surface pitch (calculation from addrlib) 338 * 444 or 420 luma 339 */ 340 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN 341 && format < SURFACE_PIXEL_FORMAT_SUBSAMPLE_END; 342 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 343 use_pitch_c = use_pitch_c 344 || (format == SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA); 345 #endif 346 if (use_pitch_c) { 347 ASSERT(plane_size->chroma_pitch != 0); 348 /* Chroma pitch zero can cause system hang! */ 349 350 pitch = plane_size->surface_pitch - 1; 351 meta_pitch = dcc->meta_pitch - 1; 352 pitch_c = plane_size->chroma_pitch - 1; 353 meta_pitch_c = dcc->meta_pitch_c - 1; 354 } else { 355 pitch = plane_size->surface_pitch - 1; 356 meta_pitch = dcc->meta_pitch - 1; 357 pitch_c = 0; 358 meta_pitch_c = 0; 359 } 360 361 if (!dcc->enable) { 362 meta_pitch = 0; 363 meta_pitch_c = 0; 364 } 365 366 REG_UPDATE_2(DCSURF_SURFACE_PITCH, 367 PITCH, pitch, META_PITCH, meta_pitch); 368 369 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN; 370 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 371 use_pitch_c = use_pitch_c 372 || (format == SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA); 373 #endif 374 if (use_pitch_c) 375 REG_UPDATE_2(DCSURF_SURFACE_PITCH_C, 376 PITCH_C, pitch_c, META_PITCH_C, meta_pitch_c); 377 } 378 379 void hubp2_program_rotation( 380 struct hubp *hubp, 381 enum dc_rotation_angle rotation, 382 bool horizontal_mirror) 383 { 384 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 385 uint32_t mirror; 386 387 388 if (horizontal_mirror) 389 mirror = 1; 390 else 391 mirror = 0; 392 393 /* Program rotation angle and horz mirror - no mirror */ 394 if (rotation == ROTATION_ANGLE_0) 395 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 396 ROTATION_ANGLE, 0, 397 H_MIRROR_EN, mirror); 398 else if (rotation == ROTATION_ANGLE_90) 399 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 400 ROTATION_ANGLE, 1, 401 H_MIRROR_EN, mirror); 402 else if (rotation == ROTATION_ANGLE_180) 403 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 404 ROTATION_ANGLE, 2, 405 H_MIRROR_EN, mirror); 406 else if (rotation == ROTATION_ANGLE_270) 407 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 408 ROTATION_ANGLE, 3, 409 H_MIRROR_EN, mirror); 410 } 411 412 void hubp2_dcc_control(struct hubp *hubp, bool enable, 413 enum hubp_ind_block_size independent_64b_blks) 414 { 415 uint32_t dcc_en = enable ? 1 : 0; 416 uint32_t dcc_ind_64b_blk = independent_64b_blks ? 1 : 0; 417 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 418 419 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 420 PRIMARY_SURFACE_DCC_EN, dcc_en, 421 PRIMARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk, 422 SECONDARY_SURFACE_DCC_EN, dcc_en, 423 SECONDARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk); 424 } 425 426 void hubp2_program_pixel_format( 427 struct hubp *hubp, 428 enum surface_pixel_format format) 429 { 430 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 431 uint32_t red_bar = 3; 432 uint32_t blue_bar = 2; 433 434 /* swap for ABGR format */ 435 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 436 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 437 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS 438 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) { 439 red_bar = 2; 440 blue_bar = 3; 441 } 442 443 REG_UPDATE_2(HUBPRET_CONTROL, 444 CROSSBAR_SRC_CB_B, blue_bar, 445 CROSSBAR_SRC_CR_R, red_bar); 446 447 /* Mapping is same as ipp programming (cnvc) */ 448 449 switch (format) { 450 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555: 451 REG_UPDATE(DCSURF_SURFACE_CONFIG, 452 SURFACE_PIXEL_FORMAT, 1); 453 break; 454 case SURFACE_PIXEL_FORMAT_GRPH_RGB565: 455 REG_UPDATE(DCSURF_SURFACE_CONFIG, 456 SURFACE_PIXEL_FORMAT, 3); 457 break; 458 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888: 459 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888: 460 REG_UPDATE(DCSURF_SURFACE_CONFIG, 461 SURFACE_PIXEL_FORMAT, 8); 462 break; 463 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010: 464 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010: 465 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS: 466 REG_UPDATE(DCSURF_SURFACE_CONFIG, 467 SURFACE_PIXEL_FORMAT, 10); 468 break; 469 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616: 470 REG_UPDATE(DCSURF_SURFACE_CONFIG, 471 SURFACE_PIXEL_FORMAT, 22); 472 break; 473 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: 474 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:/*we use crossbar already*/ 475 REG_UPDATE(DCSURF_SURFACE_CONFIG, 476 SURFACE_PIXEL_FORMAT, 24); 477 break; 478 479 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr: 480 REG_UPDATE(DCSURF_SURFACE_CONFIG, 481 SURFACE_PIXEL_FORMAT, 65); 482 break; 483 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb: 484 REG_UPDATE(DCSURF_SURFACE_CONFIG, 485 SURFACE_PIXEL_FORMAT, 64); 486 break; 487 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr: 488 REG_UPDATE(DCSURF_SURFACE_CONFIG, 489 SURFACE_PIXEL_FORMAT, 67); 490 break; 491 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb: 492 REG_UPDATE(DCSURF_SURFACE_CONFIG, 493 SURFACE_PIXEL_FORMAT, 66); 494 break; 495 case SURFACE_PIXEL_FORMAT_VIDEO_AYCrCb8888: 496 REG_UPDATE(DCSURF_SURFACE_CONFIG, 497 SURFACE_PIXEL_FORMAT, 12); 498 break; 499 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX: 500 REG_UPDATE(DCSURF_SURFACE_CONFIG, 501 SURFACE_PIXEL_FORMAT, 112); 502 break; 503 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX: 504 REG_UPDATE(DCSURF_SURFACE_CONFIG, 505 SURFACE_PIXEL_FORMAT, 113); 506 break; 507 case SURFACE_PIXEL_FORMAT_VIDEO_ACrYCb2101010: 508 REG_UPDATE(DCSURF_SURFACE_CONFIG, 509 SURFACE_PIXEL_FORMAT, 114); 510 break; 511 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT: 512 REG_UPDATE(DCSURF_SURFACE_CONFIG, 513 SURFACE_PIXEL_FORMAT, 118); 514 break; 515 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT: 516 REG_UPDATE(DCSURF_SURFACE_CONFIG, 517 SURFACE_PIXEL_FORMAT, 119); 518 break; 519 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 520 case SURFACE_PIXEL_FORMAT_GRPH_RGBE: 521 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 522 SURFACE_PIXEL_FORMAT, 116, 523 ALPHA_PLANE_EN, 0); 524 break; 525 case SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA: 526 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 527 SURFACE_PIXEL_FORMAT, 116, 528 ALPHA_PLANE_EN, 1); 529 break; 530 #endif 531 default: 532 BREAK_TO_DEBUGGER(); 533 break; 534 } 535 536 /* don't see the need of program the xbar in DCN 1.0 */ 537 } 538 539 void hubp2_program_surface_config( 540 struct hubp *hubp, 541 enum surface_pixel_format format, 542 union dc_tiling_info *tiling_info, 543 struct plane_size *plane_size, 544 enum dc_rotation_angle rotation, 545 struct dc_plane_dcc_param *dcc, 546 bool horizontal_mirror, 547 unsigned int compat_level) 548 { 549 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 550 551 hubp2_dcc_control(hubp, dcc->enable, dcc->independent_64b_blks); 552 hubp2_program_tiling(hubp2, tiling_info, format); 553 hubp2_program_size(hubp, format, plane_size, dcc); 554 hubp2_program_rotation(hubp, rotation, horizontal_mirror); 555 hubp2_program_pixel_format(hubp, format); 556 } 557 558 enum cursor_lines_per_chunk hubp2_get_lines_per_chunk( 559 unsigned int cursor_width, 560 enum dc_cursor_color_format cursor_mode) 561 { 562 enum cursor_lines_per_chunk line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 563 564 if (cursor_mode == CURSOR_MODE_MONO) 565 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 566 else if (cursor_mode == CURSOR_MODE_COLOR_1BIT_AND || 567 cursor_mode == CURSOR_MODE_COLOR_PRE_MULTIPLIED_ALPHA || 568 cursor_mode == CURSOR_MODE_COLOR_UN_PRE_MULTIPLIED_ALPHA) { 569 if (cursor_width >= 1 && cursor_width <= 32) 570 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 571 else if (cursor_width >= 33 && cursor_width <= 64) 572 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 573 else if (cursor_width >= 65 && cursor_width <= 128) 574 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 575 else if (cursor_width >= 129 && cursor_width <= 256) 576 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 577 } else if (cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_PRE_MULTIPLIED || 578 cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_UN_PRE_MULTIPLIED) { 579 if (cursor_width >= 1 && cursor_width <= 16) 580 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 581 else if (cursor_width >= 17 && cursor_width <= 32) 582 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 583 else if (cursor_width >= 33 && cursor_width <= 64) 584 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 585 else if (cursor_width >= 65 && cursor_width <= 128) 586 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 587 else if (cursor_width >= 129 && cursor_width <= 256) 588 line_per_chunk = CURSOR_LINE_PER_CHUNK_1; 589 } 590 591 return line_per_chunk; 592 } 593 594 void hubp2_cursor_set_attributes( 595 struct hubp *hubp, 596 const struct dc_cursor_attributes *attr) 597 { 598 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 599 enum cursor_pitch hw_pitch = hubp1_get_cursor_pitch(attr->pitch); 600 enum cursor_lines_per_chunk lpc = hubp2_get_lines_per_chunk( 601 attr->width, attr->color_format); 602 603 hubp->curs_attr = *attr; 604 605 REG_UPDATE(CURSOR_SURFACE_ADDRESS_HIGH, 606 CURSOR_SURFACE_ADDRESS_HIGH, attr->address.high_part); 607 REG_UPDATE(CURSOR_SURFACE_ADDRESS, 608 CURSOR_SURFACE_ADDRESS, attr->address.low_part); 609 610 REG_UPDATE_2(CURSOR_SIZE, 611 CURSOR_WIDTH, attr->width, 612 CURSOR_HEIGHT, attr->height); 613 614 REG_UPDATE_4(CURSOR_CONTROL, 615 CURSOR_MODE, attr->color_format, 616 CURSOR_2X_MAGNIFY, attr->attribute_flags.bits.ENABLE_MAGNIFICATION, 617 CURSOR_PITCH, hw_pitch, 618 CURSOR_LINES_PER_CHUNK, lpc); 619 620 REG_SET_2(CURSOR_SETTINGS, 0, 621 /* no shift of the cursor HDL schedule */ 622 CURSOR0_DST_Y_OFFSET, 0, 623 /* used to shift the cursor chunk request deadline */ 624 CURSOR0_CHUNK_HDL_ADJUST, 3); 625 } 626 627 void hubp2_dmdata_set_attributes( 628 struct hubp *hubp, 629 const struct dc_dmdata_attributes *attr) 630 { 631 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 632 633 if (attr->dmdata_mode == DMDATA_HW_MODE) { 634 /* set to HW mode */ 635 REG_UPDATE(DMDATA_CNTL, 636 DMDATA_MODE, 1); 637 638 /* for DMDATA flip, need to use SURFACE_UPDATE_LOCK */ 639 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 1); 640 641 /* toggle DMDATA_UPDATED and set repeat and size */ 642 REG_UPDATE(DMDATA_CNTL, 643 DMDATA_UPDATED, 0); 644 REG_UPDATE_3(DMDATA_CNTL, 645 DMDATA_UPDATED, 1, 646 DMDATA_REPEAT, attr->dmdata_repeat, 647 DMDATA_SIZE, attr->dmdata_size); 648 649 /* set DMDATA address */ 650 REG_WRITE(DMDATA_ADDRESS_LOW, attr->address.low_part); 651 REG_UPDATE(DMDATA_ADDRESS_HIGH, 652 DMDATA_ADDRESS_HIGH, attr->address.high_part); 653 654 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 0); 655 656 } else { 657 /* set to SW mode before loading data */ 658 REG_SET(DMDATA_CNTL, 0, 659 DMDATA_MODE, 0); 660 /* toggle DMDATA_SW_UPDATED to start loading sequence */ 661 REG_UPDATE(DMDATA_SW_CNTL, 662 DMDATA_SW_UPDATED, 0); 663 REG_UPDATE_3(DMDATA_SW_CNTL, 664 DMDATA_SW_UPDATED, 1, 665 DMDATA_SW_REPEAT, attr->dmdata_repeat, 666 DMDATA_SW_SIZE, attr->dmdata_size); 667 /* load data into hubp dmdata buffer */ 668 hubp2_dmdata_load(hubp, attr->dmdata_size, attr->dmdata_sw_data); 669 } 670 671 /* Note that DL_DELTA must be programmed if we want to use TTU mode */ 672 REG_SET_3(DMDATA_QOS_CNTL, 0, 673 DMDATA_QOS_MODE, attr->dmdata_qos_mode, 674 DMDATA_QOS_LEVEL, attr->dmdata_qos_level, 675 DMDATA_DL_DELTA, attr->dmdata_dl_delta); 676 } 677 678 void hubp2_dmdata_load( 679 struct hubp *hubp, 680 uint32_t dmdata_sw_size, 681 const uint32_t *dmdata_sw_data) 682 { 683 int i; 684 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 685 686 /* load dmdata into HUBP buffer in SW mode */ 687 for (i = 0; i < dmdata_sw_size / 4; i++) 688 REG_WRITE(DMDATA_SW_DATA, dmdata_sw_data[i]); 689 } 690 691 bool hubp2_dmdata_status_done(struct hubp *hubp) 692 { 693 uint32_t status; 694 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 695 696 REG_GET(DMDATA_STATUS, DMDATA_DONE, &status); 697 return (status == 1); 698 } 699 700 bool hubp2_program_surface_flip_and_addr( 701 struct hubp *hubp, 702 const struct dc_plane_address *address, 703 bool flip_immediate) 704 { 705 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 706 707 //program flip type 708 REG_UPDATE(DCSURF_FLIP_CONTROL, 709 SURFACE_FLIP_TYPE, flip_immediate); 710 711 // Program VMID reg 712 REG_UPDATE(VMID_SETTINGS_0, 713 VMID, address->vmid); 714 715 if (address->type == PLN_ADDR_TYPE_GRPH_STEREO) { 716 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x1); 717 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x1); 718 719 } else { 720 // turn off stereo if not in stereo 721 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x0); 722 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x0); 723 } 724 725 726 727 /* HW automatically latch rest of address register on write to 728 * DCSURF_PRIMARY_SURFACE_ADDRESS if SURFACE_UPDATE_LOCK is not used 729 * 730 * program high first and then the low addr, order matters! 731 */ 732 switch (address->type) { 733 case PLN_ADDR_TYPE_GRAPHICS: 734 /* DCN1.0 does not support const color 735 * TODO: program DCHUBBUB_RET_PATH_DCC_CFGx_0/1 736 * base on address->grph.dcc_const_color 737 * x = 0, 2, 4, 6 for pipe 0, 1, 2, 3 for rgb and luma 738 * x = 1, 3, 5, 7 for pipe 0, 1, 2, 3 for chroma 739 */ 740 741 if (address->grph.addr.quad_part == 0) 742 break; 743 744 REG_UPDATE_2(DCSURF_SURFACE_CONTROL, 745 PRIMARY_SURFACE_TMZ, address->tmz_surface, 746 PRIMARY_META_SURFACE_TMZ, address->tmz_surface); 747 748 if (address->grph.meta_addr.quad_part != 0) { 749 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 750 PRIMARY_META_SURFACE_ADDRESS_HIGH, 751 address->grph.meta_addr.high_part); 752 753 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 754 PRIMARY_META_SURFACE_ADDRESS, 755 address->grph.meta_addr.low_part); 756 } 757 758 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 759 PRIMARY_SURFACE_ADDRESS_HIGH, 760 address->grph.addr.high_part); 761 762 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 763 PRIMARY_SURFACE_ADDRESS, 764 address->grph.addr.low_part); 765 break; 766 case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE: 767 if (address->video_progressive.luma_addr.quad_part == 0 768 || address->video_progressive.chroma_addr.quad_part == 0) 769 break; 770 771 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 772 PRIMARY_SURFACE_TMZ, address->tmz_surface, 773 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 774 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 775 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface); 776 777 if (address->video_progressive.luma_meta_addr.quad_part != 0) { 778 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0, 779 PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 780 address->video_progressive.chroma_meta_addr.high_part); 781 782 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0, 783 PRIMARY_META_SURFACE_ADDRESS_C, 784 address->video_progressive.chroma_meta_addr.low_part); 785 786 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 787 PRIMARY_META_SURFACE_ADDRESS_HIGH, 788 address->video_progressive.luma_meta_addr.high_part); 789 790 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 791 PRIMARY_META_SURFACE_ADDRESS, 792 address->video_progressive.luma_meta_addr.low_part); 793 } 794 795 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0, 796 PRIMARY_SURFACE_ADDRESS_HIGH_C, 797 address->video_progressive.chroma_addr.high_part); 798 799 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, 800 PRIMARY_SURFACE_ADDRESS_C, 801 address->video_progressive.chroma_addr.low_part); 802 803 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 804 PRIMARY_SURFACE_ADDRESS_HIGH, 805 address->video_progressive.luma_addr.high_part); 806 807 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 808 PRIMARY_SURFACE_ADDRESS, 809 address->video_progressive.luma_addr.low_part); 810 break; 811 case PLN_ADDR_TYPE_GRPH_STEREO: 812 if (address->grph_stereo.left_addr.quad_part == 0) 813 break; 814 if (address->grph_stereo.right_addr.quad_part == 0) 815 break; 816 817 REG_UPDATE_8(DCSURF_SURFACE_CONTROL, 818 PRIMARY_SURFACE_TMZ, address->tmz_surface, 819 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 820 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 821 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface, 822 SECONDARY_SURFACE_TMZ, address->tmz_surface, 823 SECONDARY_SURFACE_TMZ_C, address->tmz_surface, 824 SECONDARY_META_SURFACE_TMZ, address->tmz_surface, 825 SECONDARY_META_SURFACE_TMZ_C, address->tmz_surface); 826 827 if (address->grph_stereo.right_meta_addr.quad_part != 0) { 828 829 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0, 830 SECONDARY_META_SURFACE_ADDRESS_HIGH, 831 address->grph_stereo.right_meta_addr.high_part); 832 833 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0, 834 SECONDARY_META_SURFACE_ADDRESS, 835 address->grph_stereo.right_meta_addr.low_part); 836 } 837 if (address->grph_stereo.left_meta_addr.quad_part != 0) { 838 839 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 840 PRIMARY_META_SURFACE_ADDRESS_HIGH, 841 address->grph_stereo.left_meta_addr.high_part); 842 843 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 844 PRIMARY_META_SURFACE_ADDRESS, 845 address->grph_stereo.left_meta_addr.low_part); 846 } 847 848 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0, 849 SECONDARY_SURFACE_ADDRESS_HIGH, 850 address->grph_stereo.right_addr.high_part); 851 852 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0, 853 SECONDARY_SURFACE_ADDRESS, 854 address->grph_stereo.right_addr.low_part); 855 856 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 857 PRIMARY_SURFACE_ADDRESS_HIGH, 858 address->grph_stereo.left_addr.high_part); 859 860 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 861 PRIMARY_SURFACE_ADDRESS, 862 address->grph_stereo.left_addr.low_part); 863 break; 864 default: 865 BREAK_TO_DEBUGGER(); 866 break; 867 } 868 869 hubp->request_address = *address; 870 871 return true; 872 } 873 874 void hubp2_enable_triplebuffer( 875 struct hubp *hubp, 876 bool enable) 877 { 878 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 879 uint32_t triple_buffer_en = 0; 880 bool tri_buffer_en; 881 882 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 883 tri_buffer_en = (triple_buffer_en == 1); 884 if (tri_buffer_en != enable) { 885 REG_UPDATE(DCSURF_FLIP_CONTROL2, 886 SURFACE_TRIPLE_BUFFER_ENABLE, enable ? DC_TRIPLEBUFFER_ENABLE : DC_TRIPLEBUFFER_DISABLE); 887 } 888 } 889 890 bool hubp2_is_triplebuffer_enabled( 891 struct hubp *hubp) 892 { 893 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 894 uint32_t triple_buffer_en = 0; 895 896 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 897 898 return (bool)triple_buffer_en; 899 } 900 901 void hubp2_set_flip_control_surface_gsl(struct hubp *hubp, bool enable) 902 { 903 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 904 905 REG_UPDATE(DCSURF_FLIP_CONTROL2, SURFACE_GSL_ENABLE, enable ? 1 : 0); 906 } 907 908 bool hubp2_is_flip_pending(struct hubp *hubp) 909 { 910 uint32_t flip_pending = 0; 911 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 912 struct dc_plane_address earliest_inuse_address; 913 914 if (hubp && hubp->power_gated) 915 return false; 916 917 REG_GET(DCSURF_FLIP_CONTROL, 918 SURFACE_FLIP_PENDING, &flip_pending); 919 920 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 921 SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part); 922 923 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 924 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part); 925 926 if (flip_pending) 927 return true; 928 929 if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part) 930 return true; 931 932 return false; 933 } 934 935 void hubp2_set_blank(struct hubp *hubp, bool blank) 936 { 937 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 938 uint32_t blank_en = blank ? 1 : 0; 939 940 REG_UPDATE_2(DCHUBP_CNTL, 941 HUBP_BLANK_EN, blank_en, 942 HUBP_TTU_DISABLE, blank_en); 943 944 if (blank) { 945 uint32_t reg_val = REG_READ(DCHUBP_CNTL); 946 947 if (reg_val) { 948 /* init sequence workaround: in case HUBP is 949 * power gated, this wait would timeout. 950 * 951 * we just wrote reg_val to non-0, if it stay 0 952 * it means HUBP is gated 953 */ 954 REG_WAIT(DCHUBP_CNTL, 955 HUBP_NO_OUTSTANDING_REQ, 1, 956 1, 200); 957 } 958 959 hubp->mpcc_id = 0xf; 960 hubp->opp_id = OPP_ID_INVALID; 961 } 962 } 963 964 void hubp2_cursor_set_position( 965 struct hubp *hubp, 966 const struct dc_cursor_position *pos, 967 const struct dc_cursor_mi_param *param) 968 { 969 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 970 int src_x_offset = pos->x - pos->x_hotspot - param->viewport.x; 971 int src_y_offset = pos->y - pos->y_hotspot - param->viewport.y; 972 int x_hotspot = pos->x_hotspot; 973 int y_hotspot = pos->y_hotspot; 974 int cursor_height = (int)hubp->curs_attr.height; 975 int cursor_width = (int)hubp->curs_attr.width; 976 uint32_t dst_x_offset; 977 uint32_t cur_en = pos->enable ? 1 : 0; 978 979 /* 980 * Guard aganst cursor_set_position() from being called with invalid 981 * attributes 982 * 983 * TODO: Look at combining cursor_set_position() and 984 * cursor_set_attributes() into cursor_update() 985 */ 986 if (hubp->curs_attr.address.quad_part == 0) 987 return; 988 989 // Rotated cursor width/height and hotspots tweaks for offset calculation 990 if (param->rotation == ROTATION_ANGLE_90 || param->rotation == ROTATION_ANGLE_270) { 991 swap(cursor_height, cursor_width); 992 if (param->rotation == ROTATION_ANGLE_90) { 993 src_x_offset = pos->x - pos->y_hotspot - param->viewport.x; 994 src_y_offset = pos->y - pos->x_hotspot - param->viewport.y; 995 } 996 } else if (param->rotation == ROTATION_ANGLE_180) { 997 src_x_offset = pos->x - param->viewport.x; 998 src_y_offset = pos->y - param->viewport.y; 999 } 1000 1001 if (param->mirror) { 1002 x_hotspot = param->viewport.width - x_hotspot; 1003 src_x_offset = param->viewport.x + param->viewport.width - src_x_offset; 1004 } 1005 1006 dst_x_offset = (src_x_offset >= 0) ? src_x_offset : 0; 1007 dst_x_offset *= param->ref_clk_khz; 1008 dst_x_offset /= param->pixel_clk_khz; 1009 1010 ASSERT(param->h_scale_ratio.value); 1011 1012 if (param->h_scale_ratio.value) 1013 dst_x_offset = dc_fixpt_floor(dc_fixpt_div( 1014 dc_fixpt_from_int(dst_x_offset), 1015 param->h_scale_ratio)); 1016 1017 if (src_x_offset >= (int)param->viewport.width) 1018 cur_en = 0; /* not visible beyond right edge*/ 1019 1020 if (src_x_offset + cursor_width <= 0) 1021 cur_en = 0; /* not visible beyond left edge*/ 1022 1023 if (src_y_offset >= (int)param->viewport.height) 1024 cur_en = 0; /* not visible beyond bottom edge*/ 1025 1026 if (src_y_offset + cursor_height <= 0) 1027 cur_en = 0; /* not visible beyond top edge*/ 1028 1029 if (cur_en && REG_READ(CURSOR_SURFACE_ADDRESS) == 0) 1030 hubp->funcs->set_cursor_attributes(hubp, &hubp->curs_attr); 1031 1032 REG_UPDATE(CURSOR_CONTROL, 1033 CURSOR_ENABLE, cur_en); 1034 1035 REG_SET_2(CURSOR_POSITION, 0, 1036 CURSOR_X_POSITION, pos->x, 1037 CURSOR_Y_POSITION, pos->y); 1038 1039 REG_SET_2(CURSOR_HOT_SPOT, 0, 1040 CURSOR_HOT_SPOT_X, x_hotspot, 1041 CURSOR_HOT_SPOT_Y, y_hotspot); 1042 1043 REG_SET(CURSOR_DST_OFFSET, 0, 1044 CURSOR_DST_X_OFFSET, dst_x_offset); 1045 /* TODO Handle surface pixel formats other than 4:4:4 */ 1046 } 1047 1048 void hubp2_clk_cntl(struct hubp *hubp, bool enable) 1049 { 1050 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1051 uint32_t clk_enable = enable ? 1 : 0; 1052 1053 REG_UPDATE(HUBP_CLK_CNTL, HUBP_CLOCK_ENABLE, clk_enable); 1054 } 1055 1056 void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst) 1057 { 1058 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1059 1060 REG_UPDATE(DCHUBP_CNTL, HUBP_VTG_SEL, otg_inst); 1061 } 1062 1063 void hubp2_clear_underflow(struct hubp *hubp) 1064 { 1065 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1066 1067 REG_UPDATE(DCHUBP_CNTL, HUBP_UNDERFLOW_CLEAR, 1); 1068 } 1069 1070 void hubp2_read_state_common(struct hubp *hubp) 1071 { 1072 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1073 struct dcn_hubp_state *s = &hubp2->state; 1074 struct _vcs_dpi_display_dlg_regs_st *dlg_attr = &s->dlg_attr; 1075 struct _vcs_dpi_display_ttu_regs_st *ttu_attr = &s->ttu_attr; 1076 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1077 1078 /* Requester */ 1079 REG_GET(HUBPRET_CONTROL, 1080 DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs->plane1_base_address); 1081 REG_GET_4(DCN_EXPANSION_MODE, 1082 DRQ_EXPANSION_MODE, &rq_regs->drq_expansion_mode, 1083 PRQ_EXPANSION_MODE, &rq_regs->prq_expansion_mode, 1084 MRQ_EXPANSION_MODE, &rq_regs->mrq_expansion_mode, 1085 CRQ_EXPANSION_MODE, &rq_regs->crq_expansion_mode); 1086 1087 /* DLG - Per hubp */ 1088 REG_GET_2(BLANK_OFFSET_0, 1089 REFCYC_H_BLANK_END, &dlg_attr->refcyc_h_blank_end, 1090 DLG_V_BLANK_END, &dlg_attr->dlg_vblank_end); 1091 1092 REG_GET(BLANK_OFFSET_1, 1093 MIN_DST_Y_NEXT_START, &dlg_attr->min_dst_y_next_start); 1094 1095 REG_GET(DST_DIMENSIONS, 1096 REFCYC_PER_HTOTAL, &dlg_attr->refcyc_per_htotal); 1097 1098 REG_GET_2(DST_AFTER_SCALER, 1099 REFCYC_X_AFTER_SCALER, &dlg_attr->refcyc_x_after_scaler, 1100 DST_Y_AFTER_SCALER, &dlg_attr->dst_y_after_scaler); 1101 1102 if (REG(PREFETCH_SETTINS)) 1103 REG_GET_2(PREFETCH_SETTINS, 1104 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1105 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1106 else 1107 REG_GET_2(PREFETCH_SETTINGS, 1108 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1109 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1110 1111 REG_GET_2(VBLANK_PARAMETERS_0, 1112 DST_Y_PER_VM_VBLANK, &dlg_attr->dst_y_per_vm_vblank, 1113 DST_Y_PER_ROW_VBLANK, &dlg_attr->dst_y_per_row_vblank); 1114 1115 REG_GET(REF_FREQ_TO_PIX_FREQ, 1116 REF_FREQ_TO_PIX_FREQ, &dlg_attr->ref_freq_to_pix_freq); 1117 1118 /* DLG - Per luma/chroma */ 1119 REG_GET(VBLANK_PARAMETERS_1, 1120 REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr->refcyc_per_pte_group_vblank_l); 1121 1122 REG_GET(VBLANK_PARAMETERS_3, 1123 REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr->refcyc_per_meta_chunk_vblank_l); 1124 1125 if (REG(NOM_PARAMETERS_0)) 1126 REG_GET(NOM_PARAMETERS_0, 1127 DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr->dst_y_per_pte_row_nom_l); 1128 1129 if (REG(NOM_PARAMETERS_1)) 1130 REG_GET(NOM_PARAMETERS_1, 1131 REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr->refcyc_per_pte_group_nom_l); 1132 1133 REG_GET(NOM_PARAMETERS_4, 1134 DST_Y_PER_META_ROW_NOM_L, &dlg_attr->dst_y_per_meta_row_nom_l); 1135 1136 REG_GET(NOM_PARAMETERS_5, 1137 REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr->refcyc_per_meta_chunk_nom_l); 1138 1139 REG_GET_2(PER_LINE_DELIVERY_PRE, 1140 REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr->refcyc_per_line_delivery_pre_l, 1141 REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr->refcyc_per_line_delivery_pre_c); 1142 1143 REG_GET_2(PER_LINE_DELIVERY, 1144 REFCYC_PER_LINE_DELIVERY_L, &dlg_attr->refcyc_per_line_delivery_l, 1145 REFCYC_PER_LINE_DELIVERY_C, &dlg_attr->refcyc_per_line_delivery_c); 1146 1147 if (REG(PREFETCH_SETTINS_C)) 1148 REG_GET(PREFETCH_SETTINS_C, 1149 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1150 else 1151 REG_GET(PREFETCH_SETTINGS_C, 1152 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1153 1154 REG_GET(VBLANK_PARAMETERS_2, 1155 REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr->refcyc_per_pte_group_vblank_c); 1156 1157 REG_GET(VBLANK_PARAMETERS_4, 1158 REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr->refcyc_per_meta_chunk_vblank_c); 1159 1160 if (REG(NOM_PARAMETERS_2)) 1161 REG_GET(NOM_PARAMETERS_2, 1162 DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr->dst_y_per_pte_row_nom_c); 1163 1164 if (REG(NOM_PARAMETERS_3)) 1165 REG_GET(NOM_PARAMETERS_3, 1166 REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr->refcyc_per_pte_group_nom_c); 1167 1168 REG_GET(NOM_PARAMETERS_6, 1169 DST_Y_PER_META_ROW_NOM_C, &dlg_attr->dst_y_per_meta_row_nom_c); 1170 1171 REG_GET(NOM_PARAMETERS_7, 1172 REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr->refcyc_per_meta_chunk_nom_c); 1173 1174 /* TTU - per hubp */ 1175 REG_GET_2(DCN_TTU_QOS_WM, 1176 QoS_LEVEL_LOW_WM, &ttu_attr->qos_level_low_wm, 1177 QoS_LEVEL_HIGH_WM, &ttu_attr->qos_level_high_wm); 1178 1179 REG_GET_2(DCN_GLOBAL_TTU_CNTL, 1180 MIN_TTU_VBLANK, &ttu_attr->min_ttu_vblank, 1181 QoS_LEVEL_FLIP, &ttu_attr->qos_level_flip); 1182 1183 /* TTU - per luma/chroma */ 1184 /* Assumed surf0 is luma and 1 is chroma */ 1185 1186 REG_GET_3(DCN_SURF0_TTU_CNTL0, 1187 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_l, 1188 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_l, 1189 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_l); 1190 1191 REG_GET(DCN_SURF0_TTU_CNTL1, 1192 REFCYC_PER_REQ_DELIVERY_PRE, 1193 &ttu_attr->refcyc_per_req_delivery_pre_l); 1194 1195 REG_GET_3(DCN_SURF1_TTU_CNTL0, 1196 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_c, 1197 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_c, 1198 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_c); 1199 1200 REG_GET(DCN_SURF1_TTU_CNTL1, 1201 REFCYC_PER_REQ_DELIVERY_PRE, 1202 &ttu_attr->refcyc_per_req_delivery_pre_c); 1203 1204 /* Rest of hubp */ 1205 REG_GET(DCSURF_SURFACE_CONFIG, 1206 SURFACE_PIXEL_FORMAT, &s->pixel_format); 1207 1208 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 1209 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &s->inuse_addr_hi); 1210 1211 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 1212 SURFACE_EARLIEST_INUSE_ADDRESS, &s->inuse_addr_lo); 1213 1214 REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION, 1215 PRI_VIEWPORT_WIDTH, &s->viewport_width, 1216 PRI_VIEWPORT_HEIGHT, &s->viewport_height); 1217 1218 REG_GET_2(DCSURF_SURFACE_CONFIG, 1219 ROTATION_ANGLE, &s->rotation_angle, 1220 H_MIRROR_EN, &s->h_mirror_en); 1221 1222 REG_GET(DCSURF_TILING_CONFIG, 1223 SW_MODE, &s->sw_mode); 1224 1225 REG_GET(DCSURF_SURFACE_CONTROL, 1226 PRIMARY_SURFACE_DCC_EN, &s->dcc_en); 1227 1228 REG_GET_3(DCHUBP_CNTL, 1229 HUBP_BLANK_EN, &s->blank_en, 1230 HUBP_TTU_DISABLE, &s->ttu_disable, 1231 HUBP_UNDERFLOW_STATUS, &s->underflow_status); 1232 1233 REG_GET(HUBP_CLK_CNTL, 1234 HUBP_CLOCK_ENABLE, &s->clock_en); 1235 1236 REG_GET(DCN_GLOBAL_TTU_CNTL, 1237 MIN_TTU_VBLANK, &s->min_ttu_vblank); 1238 1239 REG_GET_2(DCN_TTU_QOS_WM, 1240 QoS_LEVEL_LOW_WM, &s->qos_level_low_wm, 1241 QoS_LEVEL_HIGH_WM, &s->qos_level_high_wm); 1242 1243 } 1244 1245 void hubp2_read_state(struct hubp *hubp) 1246 { 1247 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1248 struct dcn_hubp_state *s = &hubp2->state; 1249 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1250 1251 hubp2_read_state_common(hubp); 1252 1253 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, 1254 CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size, 1255 MIN_CHUNK_SIZE, &rq_regs->rq_regs_l.min_chunk_size, 1256 META_CHUNK_SIZE, &rq_regs->rq_regs_l.meta_chunk_size, 1257 MIN_META_CHUNK_SIZE, &rq_regs->rq_regs_l.min_meta_chunk_size, 1258 DPTE_GROUP_SIZE, &rq_regs->rq_regs_l.dpte_group_size, 1259 MPTE_GROUP_SIZE, &rq_regs->rq_regs_l.mpte_group_size, 1260 SWATH_HEIGHT, &rq_regs->rq_regs_l.swath_height, 1261 PTE_ROW_HEIGHT_LINEAR, &rq_regs->rq_regs_l.pte_row_height_linear); 1262 1263 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, 1264 CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size, 1265 MIN_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_chunk_size, 1266 META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.meta_chunk_size, 1267 MIN_META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_meta_chunk_size, 1268 DPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.dpte_group_size, 1269 MPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.mpte_group_size, 1270 SWATH_HEIGHT_C, &rq_regs->rq_regs_c.swath_height, 1271 PTE_ROW_HEIGHT_LINEAR_C, &rq_regs->rq_regs_c.pte_row_height_linear); 1272 1273 } 1274 1275 void hubp2_validate_dml_output(struct hubp *hubp, 1276 struct dc_context *ctx, 1277 struct _vcs_dpi_display_rq_regs_st *dml_rq_regs, 1278 struct _vcs_dpi_display_dlg_regs_st *dml_dlg_attr, 1279 struct _vcs_dpi_display_ttu_regs_st *dml_ttu_attr) 1280 { 1281 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1282 struct _vcs_dpi_display_rq_regs_st rq_regs = {0}; 1283 struct _vcs_dpi_display_dlg_regs_st dlg_attr = {0}; 1284 struct _vcs_dpi_display_ttu_regs_st ttu_attr = {0}; 1285 DC_LOGGER_INIT(ctx->logger); 1286 DC_LOG_DEBUG("DML Validation | Running Validation"); 1287 1288 /* Requestor Regs */ 1289 REG_GET(HUBPRET_CONTROL, 1290 DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs.plane1_base_address); 1291 REG_GET_4(DCN_EXPANSION_MODE, 1292 DRQ_EXPANSION_MODE, &rq_regs.drq_expansion_mode, 1293 PRQ_EXPANSION_MODE, &rq_regs.prq_expansion_mode, 1294 MRQ_EXPANSION_MODE, &rq_regs.mrq_expansion_mode, 1295 CRQ_EXPANSION_MODE, &rq_regs.crq_expansion_mode); 1296 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, 1297 CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, 1298 MIN_CHUNK_SIZE, &rq_regs.rq_regs_l.min_chunk_size, 1299 META_CHUNK_SIZE, &rq_regs.rq_regs_l.meta_chunk_size, 1300 MIN_META_CHUNK_SIZE, &rq_regs.rq_regs_l.min_meta_chunk_size, 1301 DPTE_GROUP_SIZE, &rq_regs.rq_regs_l.dpte_group_size, 1302 MPTE_GROUP_SIZE, &rq_regs.rq_regs_l.mpte_group_size, 1303 SWATH_HEIGHT, &rq_regs.rq_regs_l.swath_height, 1304 PTE_ROW_HEIGHT_LINEAR, &rq_regs.rq_regs_l.pte_row_height_linear); 1305 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, 1306 CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, 1307 MIN_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_chunk_size, 1308 META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.meta_chunk_size, 1309 MIN_META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_meta_chunk_size, 1310 DPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.dpte_group_size, 1311 MPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.mpte_group_size, 1312 SWATH_HEIGHT_C, &rq_regs.rq_regs_c.swath_height, 1313 PTE_ROW_HEIGHT_LINEAR_C, &rq_regs.rq_regs_c.pte_row_height_linear); 1314 1315 if (rq_regs.plane1_base_address != dml_rq_regs->plane1_base_address) 1316 DC_LOG_DEBUG("DML Validation | HUBPRET_CONTROL:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", 1317 dml_rq_regs->plane1_base_address, rq_regs.plane1_base_address); 1318 if (rq_regs.drq_expansion_mode != dml_rq_regs->drq_expansion_mode) 1319 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1320 dml_rq_regs->drq_expansion_mode, rq_regs.drq_expansion_mode); 1321 if (rq_regs.prq_expansion_mode != dml_rq_regs->prq_expansion_mode) 1322 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:MRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1323 dml_rq_regs->prq_expansion_mode, rq_regs.prq_expansion_mode); 1324 if (rq_regs.mrq_expansion_mode != dml_rq_regs->mrq_expansion_mode) 1325 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", 1326 dml_rq_regs->mrq_expansion_mode, rq_regs.mrq_expansion_mode); 1327 if (rq_regs.crq_expansion_mode != dml_rq_regs->crq_expansion_mode) 1328 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:CRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1329 dml_rq_regs->crq_expansion_mode, rq_regs.crq_expansion_mode); 1330 1331 if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) 1332 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:CHUNK_SIZE - Expected: %u Actual: %u\n", 1333 dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); 1334 if (rq_regs.rq_regs_l.min_chunk_size != dml_rq_regs->rq_regs_l.min_chunk_size) 1335 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_CHUNK_SIZE - Expected: %u Actual: %u\n", 1336 dml_rq_regs->rq_regs_l.min_chunk_size, rq_regs.rq_regs_l.min_chunk_size); 1337 if (rq_regs.rq_regs_l.meta_chunk_size != dml_rq_regs->rq_regs_l.meta_chunk_size) 1338 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:META_CHUNK_SIZE - Expected: %u Actual: %u\n", 1339 dml_rq_regs->rq_regs_l.meta_chunk_size, rq_regs.rq_regs_l.meta_chunk_size); 1340 if (rq_regs.rq_regs_l.min_meta_chunk_size != dml_rq_regs->rq_regs_l.min_meta_chunk_size) 1341 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_META_CHUNK_SIZE - Expected: %u Actual: %u\n", 1342 dml_rq_regs->rq_regs_l.min_meta_chunk_size, rq_regs.rq_regs_l.min_meta_chunk_size); 1343 if (rq_regs.rq_regs_l.dpte_group_size != dml_rq_regs->rq_regs_l.dpte_group_size) 1344 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:DPTE_GROUP_SIZE - Expected: %u Actual: %u\n", 1345 dml_rq_regs->rq_regs_l.dpte_group_size, rq_regs.rq_regs_l.dpte_group_size); 1346 if (rq_regs.rq_regs_l.mpte_group_size != dml_rq_regs->rq_regs_l.mpte_group_size) 1347 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MPTE_GROUP_SIZE - Expected: %u Actual: %u\n", 1348 dml_rq_regs->rq_regs_l.mpte_group_size, rq_regs.rq_regs_l.mpte_group_size); 1349 if (rq_regs.rq_regs_l.swath_height != dml_rq_regs->rq_regs_l.swath_height) 1350 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:SWATH_HEIGHT - Expected: %u Actual: %u\n", 1351 dml_rq_regs->rq_regs_l.swath_height, rq_regs.rq_regs_l.swath_height); 1352 if (rq_regs.rq_regs_l.pte_row_height_linear != dml_rq_regs->rq_regs_l.pte_row_height_linear) 1353 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:PTE_ROW_HEIGHT_LINEAR - Expected: %u Actual: %u\n", 1354 dml_rq_regs->rq_regs_l.pte_row_height_linear, rq_regs.rq_regs_l.pte_row_height_linear); 1355 1356 if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) 1357 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1358 dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); 1359 if (rq_regs.rq_regs_c.min_chunk_size != dml_rq_regs->rq_regs_c.min_chunk_size) 1360 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1361 dml_rq_regs->rq_regs_c.min_chunk_size, rq_regs.rq_regs_c.min_chunk_size); 1362 if (rq_regs.rq_regs_c.meta_chunk_size != dml_rq_regs->rq_regs_c.meta_chunk_size) 1363 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1364 dml_rq_regs->rq_regs_c.meta_chunk_size, rq_regs.rq_regs_c.meta_chunk_size); 1365 if (rq_regs.rq_regs_c.min_meta_chunk_size != dml_rq_regs->rq_regs_c.min_meta_chunk_size) 1366 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1367 dml_rq_regs->rq_regs_c.min_meta_chunk_size, rq_regs.rq_regs_c.min_meta_chunk_size); 1368 if (rq_regs.rq_regs_c.dpte_group_size != dml_rq_regs->rq_regs_c.dpte_group_size) 1369 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:DPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", 1370 dml_rq_regs->rq_regs_c.dpte_group_size, rq_regs.rq_regs_c.dpte_group_size); 1371 if (rq_regs.rq_regs_c.mpte_group_size != dml_rq_regs->rq_regs_c.mpte_group_size) 1372 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", 1373 dml_rq_regs->rq_regs_c.mpte_group_size, rq_regs.rq_regs_c.mpte_group_size); 1374 if (rq_regs.rq_regs_c.swath_height != dml_rq_regs->rq_regs_c.swath_height) 1375 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:SWATH_HEIGHT_C - Expected: %u Actual: %u\n", 1376 dml_rq_regs->rq_regs_c.swath_height, rq_regs.rq_regs_c.swath_height); 1377 if (rq_regs.rq_regs_c.pte_row_height_linear != dml_rq_regs->rq_regs_c.pte_row_height_linear) 1378 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:PTE_ROW_HEIGHT_LINEAR_C - Expected: %u Actual: %u\n", 1379 dml_rq_regs->rq_regs_c.pte_row_height_linear, rq_regs.rq_regs_c.pte_row_height_linear); 1380 1381 /* DLG - Per hubp */ 1382 REG_GET_2(BLANK_OFFSET_0, 1383 REFCYC_H_BLANK_END, &dlg_attr.refcyc_h_blank_end, 1384 DLG_V_BLANK_END, &dlg_attr.dlg_vblank_end); 1385 REG_GET(BLANK_OFFSET_1, 1386 MIN_DST_Y_NEXT_START, &dlg_attr.min_dst_y_next_start); 1387 REG_GET(DST_DIMENSIONS, 1388 REFCYC_PER_HTOTAL, &dlg_attr.refcyc_per_htotal); 1389 REG_GET_2(DST_AFTER_SCALER, 1390 REFCYC_X_AFTER_SCALER, &dlg_attr.refcyc_x_after_scaler, 1391 DST_Y_AFTER_SCALER, &dlg_attr.dst_y_after_scaler); 1392 REG_GET(REF_FREQ_TO_PIX_FREQ, 1393 REF_FREQ_TO_PIX_FREQ, &dlg_attr.ref_freq_to_pix_freq); 1394 1395 if (dlg_attr.refcyc_h_blank_end != dml_dlg_attr->refcyc_h_blank_end) 1396 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:REFCYC_H_BLANK_END - Expected: %u Actual: %u\n", 1397 dml_dlg_attr->refcyc_h_blank_end, dlg_attr.refcyc_h_blank_end); 1398 if (dlg_attr.dlg_vblank_end != dml_dlg_attr->dlg_vblank_end) 1399 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:DLG_V_BLANK_END - Expected: %u Actual: %u\n", 1400 dml_dlg_attr->dlg_vblank_end, dlg_attr.dlg_vblank_end); 1401 if (dlg_attr.min_dst_y_next_start != dml_dlg_attr->min_dst_y_next_start) 1402 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_1:MIN_DST_Y_NEXT_START - Expected: %u Actual: %u\n", 1403 dml_dlg_attr->min_dst_y_next_start, dlg_attr.min_dst_y_next_start); 1404 if (dlg_attr.refcyc_per_htotal != dml_dlg_attr->refcyc_per_htotal) 1405 DC_LOG_DEBUG("DML Validation | DST_DIMENSIONS:REFCYC_PER_HTOTAL - Expected: %u Actual: %u\n", 1406 dml_dlg_attr->refcyc_per_htotal, dlg_attr.refcyc_per_htotal); 1407 if (dlg_attr.refcyc_x_after_scaler != dml_dlg_attr->refcyc_x_after_scaler) 1408 DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:REFCYC_X_AFTER_SCALER - Expected: %u Actual: %u\n", 1409 dml_dlg_attr->refcyc_x_after_scaler, dlg_attr.refcyc_x_after_scaler); 1410 if (dlg_attr.dst_y_after_scaler != dml_dlg_attr->dst_y_after_scaler) 1411 DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:DST_Y_AFTER_SCALER - Expected: %u Actual: %u\n", 1412 dml_dlg_attr->dst_y_after_scaler, dlg_attr.dst_y_after_scaler); 1413 if (dlg_attr.ref_freq_to_pix_freq != dml_dlg_attr->ref_freq_to_pix_freq) 1414 DC_LOG_DEBUG("DML Validation | REF_FREQ_TO_PIX_FREQ:REF_FREQ_TO_PIX_FREQ - Expected: %u Actual: %u\n", 1415 dml_dlg_attr->ref_freq_to_pix_freq, dlg_attr.ref_freq_to_pix_freq); 1416 1417 /* DLG - Per luma/chroma */ 1418 REG_GET(VBLANK_PARAMETERS_1, 1419 REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr.refcyc_per_pte_group_vblank_l); 1420 if (REG(NOM_PARAMETERS_0)) 1421 REG_GET(NOM_PARAMETERS_0, 1422 DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr.dst_y_per_pte_row_nom_l); 1423 if (REG(NOM_PARAMETERS_1)) 1424 REG_GET(NOM_PARAMETERS_1, 1425 REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr.refcyc_per_pte_group_nom_l); 1426 REG_GET(NOM_PARAMETERS_4, 1427 DST_Y_PER_META_ROW_NOM_L, &dlg_attr.dst_y_per_meta_row_nom_l); 1428 REG_GET(NOM_PARAMETERS_5, 1429 REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr.refcyc_per_meta_chunk_nom_l); 1430 REG_GET_2(PER_LINE_DELIVERY, 1431 REFCYC_PER_LINE_DELIVERY_L, &dlg_attr.refcyc_per_line_delivery_l, 1432 REFCYC_PER_LINE_DELIVERY_C, &dlg_attr.refcyc_per_line_delivery_c); 1433 REG_GET_2(PER_LINE_DELIVERY_PRE, 1434 REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr.refcyc_per_line_delivery_pre_l, 1435 REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr.refcyc_per_line_delivery_pre_c); 1436 REG_GET(VBLANK_PARAMETERS_2, 1437 REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr.refcyc_per_pte_group_vblank_c); 1438 if (REG(NOM_PARAMETERS_2)) 1439 REG_GET(NOM_PARAMETERS_2, 1440 DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr.dst_y_per_pte_row_nom_c); 1441 if (REG(NOM_PARAMETERS_3)) 1442 REG_GET(NOM_PARAMETERS_3, 1443 REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr.refcyc_per_pte_group_nom_c); 1444 REG_GET(NOM_PARAMETERS_6, 1445 DST_Y_PER_META_ROW_NOM_C, &dlg_attr.dst_y_per_meta_row_nom_c); 1446 REG_GET(NOM_PARAMETERS_7, 1447 REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr.refcyc_per_meta_chunk_nom_c); 1448 REG_GET(VBLANK_PARAMETERS_3, 1449 REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr.refcyc_per_meta_chunk_vblank_l); 1450 REG_GET(VBLANK_PARAMETERS_4, 1451 REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr.refcyc_per_meta_chunk_vblank_c); 1452 1453 if (dlg_attr.refcyc_per_pte_group_vblank_l != dml_dlg_attr->refcyc_per_pte_group_vblank_l) 1454 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_1:REFCYC_PER_PTE_GROUP_VBLANK_L - Expected: %u Actual: %u\n", 1455 dml_dlg_attr->refcyc_per_pte_group_vblank_l, dlg_attr.refcyc_per_pte_group_vblank_l); 1456 if (dlg_attr.dst_y_per_pte_row_nom_l != dml_dlg_attr->dst_y_per_pte_row_nom_l) 1457 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_0:DST_Y_PER_PTE_ROW_NOM_L - Expected: %u Actual: %u\n", 1458 dml_dlg_attr->dst_y_per_pte_row_nom_l, dlg_attr.dst_y_per_pte_row_nom_l); 1459 if (dlg_attr.refcyc_per_pte_group_nom_l != dml_dlg_attr->refcyc_per_pte_group_nom_l) 1460 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_1:REFCYC_PER_PTE_GROUP_NOM_L - Expected: %u Actual: %u\n", 1461 dml_dlg_attr->refcyc_per_pte_group_nom_l, dlg_attr.refcyc_per_pte_group_nom_l); 1462 if (dlg_attr.dst_y_per_meta_row_nom_l != dml_dlg_attr->dst_y_per_meta_row_nom_l) 1463 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_4:DST_Y_PER_META_ROW_NOM_L - Expected: %u Actual: %u\n", 1464 dml_dlg_attr->dst_y_per_meta_row_nom_l, dlg_attr.dst_y_per_meta_row_nom_l); 1465 if (dlg_attr.refcyc_per_meta_chunk_nom_l != dml_dlg_attr->refcyc_per_meta_chunk_nom_l) 1466 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_5:REFCYC_PER_META_CHUNK_NOM_L - Expected: %u Actual: %u\n", 1467 dml_dlg_attr->refcyc_per_meta_chunk_nom_l, dlg_attr.refcyc_per_meta_chunk_nom_l); 1468 if (dlg_attr.refcyc_per_line_delivery_l != dml_dlg_attr->refcyc_per_line_delivery_l) 1469 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_L - Expected: %u Actual: %u\n", 1470 dml_dlg_attr->refcyc_per_line_delivery_l, dlg_attr.refcyc_per_line_delivery_l); 1471 if (dlg_attr.refcyc_per_line_delivery_c != dml_dlg_attr->refcyc_per_line_delivery_c) 1472 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_C - Expected: %u Actual: %u\n", 1473 dml_dlg_attr->refcyc_per_line_delivery_c, dlg_attr.refcyc_per_line_delivery_c); 1474 if (dlg_attr.refcyc_per_pte_group_vblank_c != dml_dlg_attr->refcyc_per_pte_group_vblank_c) 1475 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_2:REFCYC_PER_PTE_GROUP_VBLANK_C - Expected: %u Actual: %u\n", 1476 dml_dlg_attr->refcyc_per_pte_group_vblank_c, dlg_attr.refcyc_per_pte_group_vblank_c); 1477 if (dlg_attr.dst_y_per_pte_row_nom_c != dml_dlg_attr->dst_y_per_pte_row_nom_c) 1478 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_2:DST_Y_PER_PTE_ROW_NOM_C - Expected: %u Actual: %u\n", 1479 dml_dlg_attr->dst_y_per_pte_row_nom_c, dlg_attr.dst_y_per_pte_row_nom_c); 1480 if (dlg_attr.refcyc_per_pte_group_nom_c != dml_dlg_attr->refcyc_per_pte_group_nom_c) 1481 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_3:REFCYC_PER_PTE_GROUP_NOM_C - Expected: %u Actual: %u\n", 1482 dml_dlg_attr->refcyc_per_pte_group_nom_c, dlg_attr.refcyc_per_pte_group_nom_c); 1483 if (dlg_attr.dst_y_per_meta_row_nom_c != dml_dlg_attr->dst_y_per_meta_row_nom_c) 1484 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_6:DST_Y_PER_META_ROW_NOM_C - Expected: %u Actual: %u\n", 1485 dml_dlg_attr->dst_y_per_meta_row_nom_c, dlg_attr.dst_y_per_meta_row_nom_c); 1486 if (dlg_attr.refcyc_per_meta_chunk_nom_c != dml_dlg_attr->refcyc_per_meta_chunk_nom_c) 1487 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_7:REFCYC_PER_META_CHUNK_NOM_C - Expected: %u Actual: %u\n", 1488 dml_dlg_attr->refcyc_per_meta_chunk_nom_c, dlg_attr.refcyc_per_meta_chunk_nom_c); 1489 if (dlg_attr.refcyc_per_line_delivery_pre_l != dml_dlg_attr->refcyc_per_line_delivery_pre_l) 1490 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_L - Expected: %u Actual: %u\n", 1491 dml_dlg_attr->refcyc_per_line_delivery_pre_l, dlg_attr.refcyc_per_line_delivery_pre_l); 1492 if (dlg_attr.refcyc_per_line_delivery_pre_c != dml_dlg_attr->refcyc_per_line_delivery_pre_c) 1493 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_C - Expected: %u Actual: %u\n", 1494 dml_dlg_attr->refcyc_per_line_delivery_pre_c, dlg_attr.refcyc_per_line_delivery_pre_c); 1495 if (dlg_attr.refcyc_per_meta_chunk_vblank_l != dml_dlg_attr->refcyc_per_meta_chunk_vblank_l) 1496 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_3:REFCYC_PER_META_CHUNK_VBLANK_L - Expected: %u Actual: %u\n", 1497 dml_dlg_attr->refcyc_per_meta_chunk_vblank_l, dlg_attr.refcyc_per_meta_chunk_vblank_l); 1498 if (dlg_attr.refcyc_per_meta_chunk_vblank_c != dml_dlg_attr->refcyc_per_meta_chunk_vblank_c) 1499 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_4:REFCYC_PER_META_CHUNK_VBLANK_C - Expected: %u Actual: %u\n", 1500 dml_dlg_attr->refcyc_per_meta_chunk_vblank_c, dlg_attr.refcyc_per_meta_chunk_vblank_c); 1501 1502 /* TTU - per hubp */ 1503 REG_GET_2(DCN_TTU_QOS_WM, 1504 QoS_LEVEL_LOW_WM, &ttu_attr.qos_level_low_wm, 1505 QoS_LEVEL_HIGH_WM, &ttu_attr.qos_level_high_wm); 1506 1507 if (ttu_attr.qos_level_low_wm != dml_ttu_attr->qos_level_low_wm) 1508 DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_LOW_WM - Expected: %u Actual: %u\n", 1509 dml_ttu_attr->qos_level_low_wm, ttu_attr.qos_level_low_wm); 1510 if (ttu_attr.qos_level_high_wm != dml_ttu_attr->qos_level_high_wm) 1511 DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_HIGH_WM - Expected: %u Actual: %u\n", 1512 dml_ttu_attr->qos_level_high_wm, ttu_attr.qos_level_high_wm); 1513 1514 /* TTU - per luma/chroma */ 1515 /* Assumed surf0 is luma and 1 is chroma */ 1516 REG_GET_3(DCN_SURF0_TTU_CNTL0, 1517 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_l, 1518 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_l, 1519 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_l); 1520 REG_GET_3(DCN_SURF1_TTU_CNTL0, 1521 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_c, 1522 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_c, 1523 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_c); 1524 REG_GET_3(DCN_CUR0_TTU_CNTL0, 1525 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_cur0, 1526 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_cur0, 1527 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_cur0); 1528 REG_GET(FLIP_PARAMETERS_1, 1529 REFCYC_PER_PTE_GROUP_FLIP_L, &dlg_attr.refcyc_per_pte_group_flip_l); 1530 REG_GET(DCN_CUR0_TTU_CNTL1, 1531 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur0); 1532 REG_GET(DCN_CUR1_TTU_CNTL1, 1533 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur1); 1534 REG_GET(DCN_SURF0_TTU_CNTL1, 1535 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_l); 1536 REG_GET(DCN_SURF1_TTU_CNTL1, 1537 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_c); 1538 1539 if (ttu_attr.refcyc_per_req_delivery_l != dml_ttu_attr->refcyc_per_req_delivery_l) 1540 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1541 dml_ttu_attr->refcyc_per_req_delivery_l, ttu_attr.refcyc_per_req_delivery_l); 1542 if (ttu_attr.qos_level_fixed_l != dml_ttu_attr->qos_level_fixed_l) 1543 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1544 dml_ttu_attr->qos_level_fixed_l, ttu_attr.qos_level_fixed_l); 1545 if (ttu_attr.qos_ramp_disable_l != dml_ttu_attr->qos_ramp_disable_l) 1546 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1547 dml_ttu_attr->qos_ramp_disable_l, ttu_attr.qos_ramp_disable_l); 1548 if (ttu_attr.refcyc_per_req_delivery_c != dml_ttu_attr->refcyc_per_req_delivery_c) 1549 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1550 dml_ttu_attr->refcyc_per_req_delivery_c, ttu_attr.refcyc_per_req_delivery_c); 1551 if (ttu_attr.qos_level_fixed_c != dml_ttu_attr->qos_level_fixed_c) 1552 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1553 dml_ttu_attr->qos_level_fixed_c, ttu_attr.qos_level_fixed_c); 1554 if (ttu_attr.qos_ramp_disable_c != dml_ttu_attr->qos_ramp_disable_c) 1555 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1556 dml_ttu_attr->qos_ramp_disable_c, ttu_attr.qos_ramp_disable_c); 1557 if (ttu_attr.refcyc_per_req_delivery_cur0 != dml_ttu_attr->refcyc_per_req_delivery_cur0) 1558 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1559 dml_ttu_attr->refcyc_per_req_delivery_cur0, ttu_attr.refcyc_per_req_delivery_cur0); 1560 if (ttu_attr.qos_level_fixed_cur0 != dml_ttu_attr->qos_level_fixed_cur0) 1561 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1562 dml_ttu_attr->qos_level_fixed_cur0, ttu_attr.qos_level_fixed_cur0); 1563 if (ttu_attr.qos_ramp_disable_cur0 != dml_ttu_attr->qos_ramp_disable_cur0) 1564 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1565 dml_ttu_attr->qos_ramp_disable_cur0, ttu_attr.qos_ramp_disable_cur0); 1566 if (dlg_attr.refcyc_per_pte_group_flip_l != dml_dlg_attr->refcyc_per_pte_group_flip_l) 1567 DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_1:REFCYC_PER_PTE_GROUP_FLIP_L - Expected: %u Actual: %u\n", 1568 dml_dlg_attr->refcyc_per_pte_group_flip_l, dlg_attr.refcyc_per_pte_group_flip_l); 1569 if (ttu_attr.refcyc_per_req_delivery_pre_cur0 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur0) 1570 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1571 dml_ttu_attr->refcyc_per_req_delivery_pre_cur0, ttu_attr.refcyc_per_req_delivery_pre_cur0); 1572 if (ttu_attr.refcyc_per_req_delivery_pre_cur1 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur1) 1573 DC_LOG_DEBUG("DML Validation | DCN_CUR1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1574 dml_ttu_attr->refcyc_per_req_delivery_pre_cur1, ttu_attr.refcyc_per_req_delivery_pre_cur1); 1575 if (ttu_attr.refcyc_per_req_delivery_pre_l != dml_ttu_attr->refcyc_per_req_delivery_pre_l) 1576 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1577 dml_ttu_attr->refcyc_per_req_delivery_pre_l, ttu_attr.refcyc_per_req_delivery_pre_l); 1578 if (ttu_attr.refcyc_per_req_delivery_pre_c != dml_ttu_attr->refcyc_per_req_delivery_pre_c) 1579 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1580 dml_ttu_attr->refcyc_per_req_delivery_pre_c, ttu_attr.refcyc_per_req_delivery_pre_c); 1581 } 1582 1583 static struct hubp_funcs dcn20_hubp_funcs = { 1584 .hubp_enable_tripleBuffer = hubp2_enable_triplebuffer, 1585 .hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled, 1586 .hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr, 1587 .hubp_program_surface_config = hubp2_program_surface_config, 1588 .hubp_is_flip_pending = hubp2_is_flip_pending, 1589 .hubp_setup = hubp2_setup, 1590 .hubp_setup_interdependent = hubp2_setup_interdependent, 1591 .hubp_set_vm_system_aperture_settings = hubp2_set_vm_system_aperture_settings, 1592 .set_blank = hubp2_set_blank, 1593 .dcc_control = hubp2_dcc_control, 1594 .mem_program_viewport = min_set_viewport, 1595 .set_cursor_attributes = hubp2_cursor_set_attributes, 1596 .set_cursor_position = hubp2_cursor_set_position, 1597 .hubp_clk_cntl = hubp2_clk_cntl, 1598 .hubp_vtg_sel = hubp2_vtg_sel, 1599 .dmdata_set_attributes = hubp2_dmdata_set_attributes, 1600 .dmdata_load = hubp2_dmdata_load, 1601 .dmdata_status_done = hubp2_dmdata_status_done, 1602 .hubp_read_state = hubp2_read_state, 1603 .hubp_clear_underflow = hubp2_clear_underflow, 1604 .hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl, 1605 .hubp_init = hubp1_init, 1606 .validate_dml_output = hubp2_validate_dml_output, 1607 }; 1608 1609 1610 bool hubp2_construct( 1611 struct dcn20_hubp *hubp2, 1612 struct dc_context *ctx, 1613 uint32_t inst, 1614 const struct dcn_hubp2_registers *hubp_regs, 1615 const struct dcn_hubp2_shift *hubp_shift, 1616 const struct dcn_hubp2_mask *hubp_mask) 1617 { 1618 hubp2->base.funcs = &dcn20_hubp_funcs; 1619 hubp2->base.ctx = ctx; 1620 hubp2->hubp_regs = hubp_regs; 1621 hubp2->hubp_shift = hubp_shift; 1622 hubp2->hubp_mask = hubp_mask; 1623 hubp2->base.inst = inst; 1624 hubp2->base.opp_id = OPP_ID_INVALID; 1625 hubp2->base.mpcc_id = 0xf; 1626 1627 return true; 1628 } 1629