Home
last modified time | relevance | path

Searched refs:uvd (Results 1 – 21 of 21) sorted by relevance

/netbsd/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
H A Damdgpu_uvd.c208 adev->uvd.fw = NULL; in amdgpu_uvd_sw_init()
268 &adev->uvd.inst[j].gpu_addr, &adev->uvd.inst[j].cpu_addr); in amdgpu_uvd_sw_init()
277 adev->uvd.filp[i] = NULL; in amdgpu_uvd_sw_init()
286 adev->uvd.use_ctx_buf = adev->uvd.fw_version >= FW_1_65_10; in amdgpu_uvd_sw_init()
289 adev->uvd.use_ctx_buf = adev->uvd.fw_version >= FW_1_87_11; in amdgpu_uvd_sw_init()
292 adev->uvd.use_ctx_buf = adev->uvd.fw_version >= FW_1_87_12; in amdgpu_uvd_sw_init()
295 adev->uvd.use_ctx_buf = adev->uvd.fw_version >= FW_1_37_15; in amdgpu_uvd_sw_init()
325 release_firmware(adev->uvd.fw); in amdgpu_uvd_sw_fini()
425 memcpy_toio(adev->uvd.inst[i].cpu_addr, adev->uvd.fw->data + offset, in amdgpu_uvd_resume()
459 adev->uvd.filp[i] = NULL; in amdgpu_uvd_free_handles()
[all …]
H A Damdgpu_uvd_v7_0.c388 adev->uvd.harvest_config |= 1 << i; in uvd_v7_0_early_init()
396 adev->uvd.num_uvd_inst = 1; in uvd_v7_0_early_init()
400 adev->uvd.num_enc_rings = 1; in uvd_v7_0_early_init()
402 adev->uvd.num_enc_rings = 2; in uvd_v7_0_early_init()
458 ring = &adev->uvd.inst[j].ring; in uvd_v7_0_sw_init()
466 ring = &adev->uvd.inst[j].ring_enc[i]; in uvd_v7_0_sw_init()
544 ring = &adev->uvd.inst[j].ring; in uvd_v7_0_hw_init()
803 ring = &adev->uvd.inst[i].ring; in uvd_v7_0_sriov_start()
962 ring = &adev->uvd.inst[k].ring; in uvd_v7_0_start()
1852 adev->uvd.inst[i].ring.me = i; in uvd_v7_0_set_ring_funcs()
[all …]
H A Damdgpu_uvd_v6_0.c72 (!adev->uvd.fw_version || adev->uvd.fw_version >= FW_1_130_16)); in uvd_v6_0_enc_support()
370 adev->uvd.num_uvd_inst = 1; in uvd_v6_0_early_init()
379 adev->uvd.num_enc_rings = 2; in uvd_v6_0_early_init()
416 adev->uvd.inst->irq.num_types = 1; in uvd_v6_0_sw_init()
417 adev->uvd.num_enc_rings = 0; in uvd_v6_0_sw_init()
422 ring = &adev->uvd.inst->ring; in uvd_v6_0_sw_init()
434 ring = &adev->uvd.inst->ring_enc[i]; in uvd_v6_0_sw_init()
846 ring = &adev->uvd.inst->ring_enc[0]; in uvd_v6_0_start()
853 ring = &adev->uvd.inst->ring_enc[1]; in uvd_v6_0_start()
1631 adev->uvd.inst->irq.num_types = adev->uvd.num_enc_rings + 1; in uvd_v6_0_set_irq_funcs()
[all …]
H A Damdgpu_uvd_v4_2.c101 adev->uvd.num_uvd_inst = 1; in uvd_v4_2_early_init()
124 ring = &adev->uvd.inst->ring; in uvd_v4_2_sw_init()
163 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_hw_init()
218 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_hw_fini()
261 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_start()
569 addr = (adev->uvd.inst->gpu_addr >> 28) & 0xF; in uvd_v4_2_mc_resume()
573 addr = (adev->uvd.inst->gpu_addr >> 32) & 0xFF; in uvd_v4_2_mc_resume()
680 amdgpu_fence_process(&adev->uvd.inst->ring); in uvd_v4_2_process_interrupt()
769 adev->uvd.inst->ring.funcs = &uvd_v4_2_ring_funcs; in uvd_v4_2_set_ring_funcs()
779 adev->uvd.inst->irq.num_types = 1; in uvd_v4_2_set_irq_funcs()
[all …]
H A Damdgpu_uvd_v5_0.c99 adev->uvd.num_uvd_inst = 1; in uvd_v5_0_early_init()
122 ring = &adev->uvd.inst->ring; in uvd_v5_0_sw_init()
159 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v5_0_hw_init()
216 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v5_0_hw_fini()
265 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v5_0_mc_resume()
267 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v5_0_mc_resume()
299 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v5_0_start()
603 amdgpu_fence_process(&adev->uvd.inst->ring); in uvd_v5_0_process_interrupt()
878 adev->uvd.inst->ring.funcs = &uvd_v5_0_ring_funcs; in uvd_v5_0_set_ring_funcs()
888 adev->uvd.inst->irq.num_types = 1; in uvd_v5_0_set_irq_funcs()
[all …]
H A Damdgpu_kms.c226 fw_info->ver = adev->uvd.fw_version; in amdgpu_firmware_info()
358 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info()
359 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info()
362 if (adev->uvd.inst[i].ring.sched.ready) in amdgpu_hw_ip_info()
378 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info()
379 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info()
382 for (j = 0; j < adev->uvd.num_enc_rings; j++) in amdgpu_hw_ip_info()
383 if (adev->uvd.inst[i].ring_enc[j].sched.ready) in amdgpu_hw_ip_info()
392 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info()
404 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info()
[all …]
H A Damdgpu_fence.c413 index = ALIGN(adev->uvd.fw->size, 8); in amdgpu_fence_driver_start_ring()
414 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring()
415 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
H A Damdgpu_uvd.h39 …(AMDGPU_GPU_PAGE_ALIGN(le32_to_cpu(((const struct common_firmware_header *)(adev)->uvd.fw->data)->…
H A Damdgpu_ctx.c102 sched = &adev->uvd.inst[0].ring.sched; in amdgpu_ctx_init_entity()
112 sched = &adev->uvd.inst[0].ring_enc[0].sched; in amdgpu_ctx_init_entity()
H A Damdgpu_ucode.c402 FW_VERSION_ATTR(uvd_fw_version, 0444, uvd.fw_version);
H A Damdgpu.h902 struct amdgpu_uvd uvd; member
H A Damdgpu_pm.c3188 adev->uvd.decode_image_width >= WIDTH_4K) { in amdgpu_dpm_enable_uvd()
/netbsd/sys/external/bsd/drm2/dist/drm/radeon/
H A Dradeon_uvd.c196 NULL, &rdev->uvd.vcpu_bo); in radeon_uvd_init()
210 &rdev->uvd.gpu_addr); in radeon_uvd_init()
218 r = radeon_bo_kmap(rdev->uvd.vcpu_bo, &rdev->uvd.cpu_addr); in radeon_uvd_init()
228 rdev->uvd.filp[i] = NULL; in radeon_uvd_init()
229 rdev->uvd.img_size[i] = 0; in radeon_uvd_init()
239 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_fini()
260 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_suspend()
280 rdev->uvd.filp[i] = NULL; in radeon_uvd_suspend()
293 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_resume()
301 ptr = rdev->uvd.cpu_addr; in radeon_uvd_resume()
[all …]
H A Dradeon_uvd_v4_2.c51 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume()
52 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
54 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
67 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v4_2_resume()
72 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
76 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
79 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume()
80 WREG32(UVD_GP_SCRATCH4, rdev->uvd.max_handles); in uvd_v4_2_resume()
H A Dradeon_uvd_v2_2.c118 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
130 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v2_2_resume()
135 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
139 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
H A Dradeon_uvd_v1_0.c126 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume()
138 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v1_0_resume()
143 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume()
147 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume()
150 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); in uvd_v1_0_resume()
H A Dradeon_drv.c312 MODULE_PARM_DESC(uvd, "uvd enable/disable uvd support (1 = enable, 0 = disable)");
313 module_param_named(uvd, radeon_uvd, int, 0444);
H A Dradeon_fence.c902 rdev->fence_drv[ring].cpu_addr = (uint32_t *)((uint8_t *)rdev->uvd.cpu_addr + index); in radeon_fence_driver_start_ring()
903 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
H A Dradeon.h2446 struct radeon_uvd uvd; member
/netbsd/sys/external/bsd/drm2/dist/drm/amd/powerplay/hwmgr/
H A Dsmu10_hwmgr.h114 uint32_t uvd : 1; member
H A Dsmu8_hwmgr.h137 uint32_t uvd : 1; member