/openbsd/sys/dev/pci/drm/amd/amdgpu/ |
H A D | amdgpu_gfx.c | 312 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init_ring() local 349 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_fini() local 359 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init() local 385 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_mqd_sw_init() local 476 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_mqd_sw_fini() local 511 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_gfx_disable_kcq() 542 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_gfx_disable_kgq() 588 if (!kiq->pmf || !kiq->pmf->kiq_map_queues || !kiq->pmf->kiq_set_resources) in amdgpu_gfx_enable_kcq() 644 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in amdgpu_gfx_enable_kgq() 931 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in amdgpu_kiq_rreg() local [all …]
|
H A D | amdgpu_amdkfd.c | 828 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_amdkfd_unmap_hiq() local 829 struct amdgpu_ring *kiq_ring = &kiq->ring; in amdgpu_amdkfd_unmap_hiq() 834 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_amdkfd_unmap_hiq() 851 spin_lock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 853 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in amdgpu_amdkfd_unmap_hiq() 854 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 859 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, 0, 0); in amdgpu_amdkfd_unmap_hiq() 864 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq()
|
H A D | mes_v11_0.c | 877 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v11_0_kiq_enable_queue() local 881 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v11_0_kiq_enable_queue() 902 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_queue_init() 969 mtx_init(&adev->gfx.kiq[0].ring_lock, IPL_TTY); in mes_v11_0_kiq_ring_init() 971 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_ring_init() 997 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_mqd_sw_init() 1086 &adev->gfx.kiq[0].ring.mqd_gpu_addr, in mes_v11_0_sw_fini() 1087 &adev->gfx.kiq[0].ring.mqd_ptr); in mes_v11_0_sw_fini() 1093 amdgpu_ring_fini(&adev->gfx.kiq[0].ring); in mes_v11_0_sw_fini() 1186 mes_v11_0_kiq_setting(&adev->gfx.kiq[0].ring); in mes_v11_0_kiq_hw_init() [all …]
|
H A D | mes_v10_1.c | 803 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v10_1_kiq_enable_queue() local 807 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v10_1_kiq_enable_queue() 816 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring); in mes_v10_1_kiq_enable_queue() 863 mtx_init(&adev->gfx.kiq[0].ring_lock, IPL_TTY); in mes_v10_1_kiq_ring_init() 865 ring = &adev->gfx.kiq[0].ring; in mes_v10_1_kiq_ring_init() 891 ring = &adev->gfx.kiq[0].ring; in mes_v10_1_mqd_sw_init() 978 &adev->gfx.kiq[0].ring.mqd_gpu_addr, in mes_v10_1_sw_fini() 979 &adev->gfx.kiq[0].ring.mqd_ptr); in mes_v10_1_sw_fini() 985 amdgpu_ring_fini(&adev->gfx.kiq[0].ring); in mes_v10_1_sw_fini() 1041 mes_v10_1_kiq_setting(&adev->gfx.kiq[0].ring); in mes_v10_1_kiq_hw_init() [all …]
|
H A D | gmc_v11_0.c | 297 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring.sched.ready) && in gmc_v11_0_flush_gpu_tlb() 335 struct amdgpu_ring *ring = &adev->gfx.kiq[0].ring; in gmc_v11_0_flush_gpu_tlb_pasid() 336 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gmc_v11_0_flush_gpu_tlb_pasid() local 339 spin_lock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid() 341 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v11_0_flush_gpu_tlb_pasid() 342 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v11_0_flush_gpu_tlb_pasid() 347 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid() 352 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid()
|
H A D | gmc_v10_0.c | 339 if (adev->gfx.kiq[0].ring.sched.ready && !adev->enable_mes && in gmc_v10_0_flush_gpu_tlb() 425 struct amdgpu_ring *ring = &adev->gfx.kiq[0].ring; in gmc_v10_0_flush_gpu_tlb_pasid() 426 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gmc_v10_0_flush_gpu_tlb_pasid() local 429 spin_lock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 431 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v10_0_flush_gpu_tlb_pasid() 432 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v10_0_flush_gpu_tlb_pasid() 437 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 442 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid()
|
H A D | gmc_v9_0.c | 847 if (adev->gfx.kiq[0].ring.sched.ready && in gmc_v9_0_flush_gpu_tlb() 956 struct amdgpu_ring *ring = &adev->gfx.kiq[inst].ring; in gmc_v9_0_flush_gpu_tlb_pasid() 957 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in gmc_v9_0_flush_gpu_tlb_pasid() local 972 unsigned int ndw = kiq->pmf->invalidate_tlbs_size + 8; in gmc_v9_0_flush_gpu_tlb_pasid() 975 ndw += kiq->pmf->invalidate_tlbs_size; in gmc_v9_0_flush_gpu_tlb_pasid() 977 spin_lock(&adev->gfx.kiq[inst].ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() 981 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 987 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 990 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 995 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() [all …]
|
H A D | amdgpu_virt.c | 78 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in amdgpu_virt_kiq_reg_write_reg_wait() local 79 struct amdgpu_ring *ring = &kiq->ring; in amdgpu_virt_kiq_reg_write_reg_wait() 90 spin_lock_irqsave(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 99 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 121 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait()
|
H A D | gfx_v9_0.c | 2005 struct amdgpu_kiq *kiq; in gfx_v9_0_sw_init() local 2158 kiq = &adev->gfx.kiq[0]; in gfx_v9_0_sw_init() 2159 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v9_0_sw_init() 3549 if (adev->gfx.kiq[0].mqd_backup) in gfx_v9_0_kiq_init_queue() 3574 if (adev->gfx.kiq[0].mqd_backup) in gfx_v9_0_kiq_init_queue() 3624 ring = &adev->gfx.kiq[0].ring; in gfx_v9_0_kiq_resume() 3806 adev->gfx.kiq[0].ring.pipe, in gfx_v9_0_hw_fini() 3807 adev->gfx.kiq[0].ring.queue, 0, 0); in gfx_v9_0_hw_fini() 3929 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v9_0_kiq_read_clock() local 5465 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v9_0_ring_preempt_ib() local [all …]
|
H A D | gfx_v9_4_3.c | 189 adev->gfx.kiq[i].pmf = &gfx_v9_4_3_kiq_pm4_funcs; in gfx_v9_4_3_set_kiq_pm4_funcs() 781 struct amdgpu_kiq *kiq; in gfx_v9_4_3_sw_init() local 850 kiq = &adev->gfx.kiq[xcc_id]; in gfx_v9_4_3_sw_init() 851 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, xcc_id); in gfx_v9_4_3_sw_init() 1389 adev->gfx.kiq[xcc_id].ring.sched.ready = false; in gfx_v9_4_3_xcc_cp_compute_enable() 1676 ((adev->doorbell_index.kiq + in gfx_v9_4_3_xcc_kiq_init_register() 1768 if (adev->gfx.kiq[xcc_id].mqd_backup) in gfx_v9_4_3_xcc_kiq_init_queue() 1792 if (adev->gfx.kiq[xcc_id].mqd_backup) in gfx_v9_4_3_xcc_kiq_init_queue() 1863 ring = &adev->gfx.kiq[xcc_id].ring; in gfx_v9_4_3_xcc_kiq_resume() 1997 adev->gfx.kiq[xcc_id].ring.pipe, in gfx_v9_4_3_xcc_fini() [all …]
|
H A D | gfx_v11_0.c | 1309 struct amdgpu_kiq *kiq; in gfx_v11_0_sw_init() local 1443 kiq = &adev->gfx.kiq[0]; in gfx_v11_0_sw_init() 1444 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v11_0_sw_init() 3948 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_kiq_init_register() 3987 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4010 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4051 ring = &adev->gfx.kiq[0].ring; in gfx_v11_0_kiq_resume() 5535 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v11_0_ring_preempt_ib() local 5536 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v11_0_ring_preempt_ib() 5539 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v11_0_ring_preempt_ib() [all …]
|
H A D | amdgpu_amdkfd_gfx_v10_3.c | 280 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v10_3() 295 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3() 322 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3()
|
H A D | amdgpu_amdkfd_gfx_v11.c | 265 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v11() 280 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11() 307 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11()
|
H A D | vega10_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_DOORBELL64_KIQ; in vega10_doorbell_index_init()
|
H A D | vega20_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init()
|
H A D | amdgpu_doorbell.h | 52 uint32_t kiq; member
|
H A D | amdgpu_amdkfd_gfx_v10.c | 294 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in kgd_hiq_mqd_load() 309 spin_lock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load() 336 spin_unlock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load()
|
H A D | gfx_v10_0.c | 4486 struct amdgpu_kiq *kiq; in gfx_v10_0_sw_init() local 4530 &adev->gfx.kiq[0].irq); in gfx_v10_0_sw_init() 4615 kiq = &adev->gfx.kiq[0]; in gfx_v10_0_sw_init() 4616 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v10_0_sw_init() 6693 (adev->doorbell_index.kiq * 2) << 2); in gfx_v10_0_kiq_init_register() 6732 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue() 6755 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue() 6796 ring = &adev->gfx.kiq[0].ring; in gfx_v10_0_kiq_resume() 8534 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v10_0_ring_preempt_ib() local 8535 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_ring_preempt_ib() [all …]
|
H A D | amdgpu_amdkfd_gfx_v9.c | 307 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[inst].ring; in kgd_gfx_v9_hiq_mqd_load() 322 spin_lock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load() 349 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load()
|
H A D | gfx_v8_0.c | 1904 struct amdgpu_kiq *kiq; in gfx_v8_0_sw_init() local 2025 kiq = &adev->gfx.kiq[0]; in gfx_v8_0_sw_init() 2026 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v8_0_sw_init() 2055 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v8_0_sw_fini() 4296 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v8_0_cp_compute_enable() 4318 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_kiq_kcq_enable() 4604 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4628 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4679 ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_kiq_resume() 4743 ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_cp_test_all_rings() [all …]
|
H A D | amdgpu_gfx.h | 356 struct amdgpu_kiq kiq[AMDGPU_MAX_GC_INSTANCES]; member
|
H A D | aqua_vanjaram.c | 41 adev->doorbell_index.kiq = AMDGPU_DOORBELL_LAYOUT1_KIQ_START; in aqua_vanjaram_doorbell_index_init()
|
H A D | soc21.c | 478 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in soc21_init_doorbell_index()
|
H A D | nv.c | 562 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in nv_init_doorbell_index()
|
H A D | vi.c | 2207 adev->doorbell_index.kiq = AMDGPU_DOORBELL_KIQ; in legacy_doorbell_index_init()
|