Home
last modified time | relevance | path

Searched refs:gpu_addr (Results 1 – 25 of 78) sorted by relevance

1234

/dragonfly/sys/dev/drm/radeon/
H A Dr600_dma.c149 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
235 u64 gpu_addr; in r600_dma_ring_test() local
242 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
253 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test()
289 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
316 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit()
342 u64 gpu_addr; in r600_dma_ib_test() local
349 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test()
358 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test()
359 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test()
[all …]
H A Dcik_sdma.c154 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
202 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
231 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit()
650 u64 gpu_addr; in cik_sdma_ring_test() local
657 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
668 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test()
669 radeon_ring_write(ring, upper_32_bits(gpu_addr)); in cik_sdma_ring_test()
707 u64 gpu_addr; in cik_sdma_ib_test() local
714 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test()
726 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
H A Duvd_v4_2.c47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
H A Duvd_v2_2.c43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit()
113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
H A Duvd_v1_0.c85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume()
138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume()
142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume()
364 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start()
374 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start()
487 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
H A Dradeon_semaphore.c53 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create()
73 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal()
92 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
H A Dvce_v1_0.c219 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume()
301 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start()
302 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
308 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start()
309 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
H A Devergreen_dma.c43 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
87 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
88 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
H A Dni_dma.c143 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
221 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
223 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
228 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
H A Dradeon_object.h135 extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
137 u64 max_offset, u64 *gpu_addr);
166 return sa_bo->manager->gpu_addr + sa_bo->soffset; in radeon_sa_bo_gpu_addr()
H A Dradeon_fence.c837 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
844 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
857 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
862 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); in radeon_fence_driver_start_ring()
882 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
H A Dradeon_vce.c158 &rdev->vce.gpu_addr); in radeon_vce_init()
360 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg()
427 dummy = ib.gpu_addr + 1024; in radeon_vce_get_destroy_msg()
700 uint64_t addr = semaphore->gpu_addr; in radeon_vce_semaphore_emit()
723 radeon_ring_write(ring, cpu_to_le32(ib->gpu_addr)); in radeon_vce_ib_execute()
724 radeon_ring_write(ring, cpu_to_le32(upper_32_bits(ib->gpu_addr))); in radeon_vce_ib_execute()
739 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit()
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Dvce_v4_0.c156 uint64_t addr = table->gpu_addr; in vce_v4_0_mmsch_start()
234 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
236 upper_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
257 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
260 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
264 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
267 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
270 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
273 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
623 (adev->vce.gpu_addr >> 8)); in vce_v4_0_mc_resume()
[all …]
H A Dvcn_v1_0.c290 lower_32_bits(adev->vcn.gpu_addr)); in vcn_v1_0_mc_resume()
292 upper_32_bits(adev->vcn.gpu_addr)); in vcn_v1_0_mc_resume()
301 lower_32_bits(adev->vcn.gpu_addr + offset)); in vcn_v1_0_mc_resume()
303 upper_32_bits(adev->vcn.gpu_addr + offset)); in vcn_v1_0_mc_resume()
747 (upper_32_bits(ring->gpu_addr) >> 2)); in vcn_v1_0_start()
751 lower_32_bits(ring->gpu_addr)); in vcn_v1_0_start()
753 upper_32_bits(ring->gpu_addr)); in vcn_v1_0_start()
1006 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vcn_v1_0_dec_ring_emit_ib()
1009 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vcn_v1_0_dec_ring_emit_ib()
1502 val = lower_32_bits(ring->gpu_addr); in vcn_v1_0_jpeg_ring_set_patch_ring()
[all …]
H A Duvd_v7_0.c240 dummy = ib->gpu_addr + 1024; in uvd_v7_0_enc_get_create_msg()
303 dummy = ib->gpu_addr + 1024; in uvd_v7_0_enc_get_destroy_msg()
678 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
680 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
718 uint64_t addr = table->gpu_addr; in uvd_v7_0_mmsch_start()
817 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
819 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
1077 (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v7_0_start()
1081 lower_32_bits(ring->gpu_addr)); in uvd_v7_0_start()
1083 upper_32_bits(ring->gpu_addr)); in uvd_v7_0_start()
[all …]
H A Dsdma_v2_4.c257 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
258 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
592 u64 gpu_addr; in sdma_v2_4_ring_test_ring() local
600 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
613 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
614 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
653 u64 gpu_addr; in sdma_v2_4_ring_test_ib() local
662 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ib()
674 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
675 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
[all …]
H A Dvce_v3_0.c284 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start()
285 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
291 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start()
292 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
298 WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr); in vce_v3_0_start()
299 WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
547 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
551 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
845 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vce_v3_0_ring_emit_ib()
846 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vce_v3_0_ring_emit_ib()
[all …]
H A Damdgpu_fence.c153 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
200 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
384 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
389 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
402 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring()
427 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
H A Damdgpu_virt.c340 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
346 (u64 *)&adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
355 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
367 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
371 (u64 *)&adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
373 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
H A Duvd_v6_0.c232 dummy = ib->gpu_addr + 1024; in uvd_v6_0_enc_get_create_msg()
294 dummy = ib->gpu_addr + 1024; in uvd_v6_0_enc_get_destroy_msg()
601 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
603 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
843 lower_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
845 upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
859 WREG32(mmUVD_RB_BASE_LO, ring->gpu_addr); in uvd_v6_0_start()
866 WREG32(mmUVD_RB_BASE_LO2, ring->gpu_addr); in uvd_v6_0_start()
1021 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib()
1075 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_ring_emit_pipeline_sync()
[all …]
H A Dsdma_v3_0.c433 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
711 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume()
865 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local
873 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
886 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring()
887 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring()
926 u64 gpu_addr; in sdma_v3_0_ring_test_ib() local
935 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib()
947 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
948 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
[all …]
H A Dsdma_v4_0.c392 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v4_0_ring_emit_ib()
707 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v4_0_gfx_resume()
928 u64 gpu_addr; in sdma_v4_0_ring_test_ring() local
936 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v4_0_ring_test_ring()
949 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v4_0_ring_test_ring()
950 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v4_0_ring_test_ring()
990 u64 gpu_addr; in sdma_v4_0_ring_test_ib() local
998 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v4_0_ring_test_ib()
1010 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v4_0_ring_test_ib()
1011 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v4_0_ring_test_ib()
[all …]
H A Damdgpu_object.h247 u64 *gpu_addr, void **cpu_addr);
251 u64 *gpu_addr, void **cpu_addr);
252 void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr,
304 return sa_bo->manager->gpu_addr + sa_bo->soffset; in amdgpu_sa_bo_gpu_addr()
H A Duvd_v5_0.c262 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v5_0_mc_resume()
264 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v5_0_mc_resume()
409 WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v5_0_start()
413 lower_32_bits(ring->gpu_addr)); in uvd_v5_0_start()
415 upper_32_bits(ring->gpu_addr)); in uvd_v5_0_start()
542 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib()
544 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib()
H A Damdgpu_amdkfd.c278 void **mem_obj, uint64_t *gpu_addr, in alloc_gtt_mem() argument
332 *gpu_addr = amdgpu_bo_gpu_offset(bo); in alloc_gtt_mem()
448 uint32_t vmid, uint64_t gpu_addr, in amdgpu_amdkfd_submit_ib() argument
481 ib->gpu_addr = gpu_addr; in amdgpu_amdkfd_submit_ib()

1234