Home
last modified time | relevance | path

Searched refs:fences (Results 1 – 25 of 68) sorted by relevance

123

/netbsd/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
H A Damdgpu_ids.c121 struct dma_fence *fence, **fences; in amdgpu_pasid_free_delayed() local
136 fence = fences[0]; in amdgpu_pasid_free_delayed()
137 kfree(fences); in amdgpu_pasid_free_delayed()
145 kfree(fences); in amdgpu_pasid_free_delayed()
215 struct dma_fence **fences; in amdgpu_vmid_grab_idle() local
223 if (!fences) in amdgpu_vmid_grab_idle()
230 if (!fences[i]) in amdgpu_vmid_grab_idle()
244 dma_fence_get(fences[j]); in amdgpu_vmid_grab_idle()
250 dma_fence_put(fences[j]); in amdgpu_vmid_grab_idle()
251 kfree(fences); in amdgpu_vmid_grab_idle()
[all …]
H A Damdgpu_sync.c57 hash_init(sync->fences); in amdgpu_sync_create()
142 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
182 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
286 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_peek_fence()
328 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_get_fence()
361 hash_for_each_safe(source->fences, i, tmp, e, node) { in amdgpu_sync_clone()
386 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_wait()
412 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_free()
H A Damdgpu_sa.c215 struct dma_fence **fences, in amdgpu_sa_bo_next_hole() argument
237 fences[i] = NULL; in amdgpu_sa_bo_next_hole()
246 fences[i] = sa_bo->fence; in amdgpu_sa_bo_next_hole()
287 struct dma_fence *fences[AMDGPU_SA_NUM_FENCE_LISTS]; in amdgpu_sa_bo_new() local
322 } while (amdgpu_sa_bo_next_hole(sa_manager, fences, tries)); in amdgpu_sa_bo_new()
325 if (fences[i]) in amdgpu_sa_bo_new()
326 fences[count++] = dma_fence_get(fences[i]); in amdgpu_sa_bo_new()
330 t = dma_fence_wait_any_timeout(fences, count, false, in amdgpu_sa_bo_new()
334 dma_fence_put(fences[i]); in amdgpu_sa_bo_new()
H A Damdgpu_dma_buf.c147 struct dma_fence **fences; in __dma_resv_make_exclusive() local
154 r = dma_resv_get_fences_rcu(obj, NULL, &count, &fences); in __dma_resv_make_exclusive()
161 dma_resv_add_excl_fence(obj, fences[0]); in __dma_resv_make_exclusive()
162 dma_fence_put(fences[0]); in __dma_resv_make_exclusive()
163 kfree(fences); in __dma_resv_make_exclusive()
167 array = dma_fence_array_create(count, fences, in __dma_resv_make_exclusive()
181 dma_fence_put(fences[count]); in __dma_resv_make_exclusive()
182 kfree(fences); in __dma_resv_make_exclusive()
H A Damdgpu_fence.c166 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
270 ptr = &drv->fences[last_seq]; in amdgpu_fence_process()
327 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
464 ring->fence_drv.fences = kcalloc(num_hw_submission * 2, sizeof(void *), in amdgpu_fence_driver_init_ring()
466 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
549 dma_fence_put(ring->fence_drv.fences[j]); in amdgpu_fence_driver_fini()
550 kfree(ring->fence_drv.fences); in amdgpu_fence_driver_fini()
551 ring->fence_drv.fences = NULL; in amdgpu_fence_driver_fini()
H A Damdgpu_debugfs.c1057 struct dma_fence **fences) in amdgpu_ib_preempt_fences_swap() argument
1073 ptr = &drv->fences[last_seq]; in amdgpu_ib_preempt_fences_swap()
1081 fences[last_seq] = fence; in amdgpu_ib_preempt_fences_swap()
1093 fence = fences[i]; in amdgpu_ib_preempt_signal_fences()
1131 ptr = &drv->fences[preempt_seq]; in amdgpu_ib_preempt_mark_partial_job()
1148 struct dma_fence **fences = NULL; in amdgpu_debugfs_ib_preempt() local
1164 fences = kcalloc(length, sizeof(void *), GFP_KERNEL); in amdgpu_debugfs_ib_preempt()
1165 if (!fences) in amdgpu_debugfs_ib_preempt()
1192 amdgpu_ib_preempt_fences_swap(ring, fences); in amdgpu_debugfs_ib_preempt()
1203 amdgpu_ib_preempt_signal_fences(fences, length); in amdgpu_debugfs_ib_preempt()
[all …]
H A Damdgpu_jpeg.c81 unsigned int fences = 0; in amdgpu_jpeg_idle_work_handler() local
88 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec); in amdgpu_jpeg_idle_work_handler()
91 if (fences == 0) in amdgpu_jpeg_idle_work_handler()
H A Damdgpu_ctx.c79 entity = kcalloc(1, offsetof(typeof(*entity), fences[amdgpu_sched_jobs]), in amdgpu_ctx_init_entity()
182 dma_fence_put(entity->fences[i]); in amdgpu_ctx_fini_entity()
473 other = centity->fences[idx]; in amdgpu_ctx_add_fence()
478 centity->fences[idx] = fence; in amdgpu_ctx_add_fence()
510 fence = dma_fence_get(centity->fences[seq & (amdgpu_sched_jobs - 1)]); in amdgpu_ctx_get_fence()
549 other = dma_fence_get(centity->fences[idx]); in amdgpu_ctx_wait_prev_fence()
H A Damdgpu_cs.c1525 struct drm_amdgpu_fence *fences) in amdgpu_cs_wait_all_fences() argument
1535 fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); in amdgpu_cs_wait_all_fences()
1570 struct drm_amdgpu_fence *fences) in amdgpu_cs_wait_any_fence() argument
1588 fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); in amdgpu_cs_wait_any_fence()
1638 struct drm_amdgpu_fence *fences; in amdgpu_cs_wait_fences_ioctl() local
1644 if (fences == NULL) in amdgpu_cs_wait_fences_ioctl()
1647 fences_user = u64_to_user_ptr(wait->in.fences); in amdgpu_cs_wait_fences_ioctl()
1648 if (copy_from_user(fences, fences_user, in amdgpu_cs_wait_fences_ioctl()
1655 r = amdgpu_cs_wait_all_fences(adev, filp, wait, fences); in amdgpu_cs_wait_fences_ioctl()
1657 r = amdgpu_cs_wait_any_fence(adev, filp, wait, fences); in amdgpu_cs_wait_fences_ioctl()
[all …]
H A Damdgpu_vcn.c289 unsigned int fences = 0, fence[AMDGPU_MAX_VCN_INSTANCES] = {0}; in amdgpu_vcn_idle_work_handler() local
312 fences += fence[j]; in amdgpu_vcn_idle_work_handler()
315 if (fences == 0) { in amdgpu_vcn_idle_work_handler()
337 unsigned int fences = 0; in amdgpu_vcn_ring_begin_use() local
341 fences += amdgpu_fence_count_emitted(&adev->vcn.inst[ring->me].ring_enc[i]); in amdgpu_vcn_ring_begin_use()
343 if (fences) in amdgpu_vcn_ring_begin_use()
H A Damdgpu_sync.h40 DECLARE_HASHTABLE(fences, 4);
H A Damdgpu_vcn_v1_0.c1780 unsigned int fences = 0, i; in vcn_v1_0_idle_work_handler() local
1783 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_enc[i]); in vcn_v1_0_idle_work_handler()
1788 if (fences) in vcn_v1_0_idle_work_handler()
1801 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst->ring_dec); in vcn_v1_0_idle_work_handler()
1802 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_dec); in vcn_v1_0_idle_work_handler()
1804 if (fences == 0) { in vcn_v1_0_idle_work_handler()
1832 unsigned int fences = 0, i; in vcn_v1_0_ring_begin_use() local
1835 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_enc[i]); in vcn_v1_0_ring_begin_use()
1837 if (fences) in vcn_v1_0_ring_begin_use()
H A Damdgpu_ctx.h39 struct dma_fence *fences[]; member
/netbsd/sys/external/bsd/drm2/linux/
H A Dlinux_dma_fence_array.c102 if (dma_fence_add_callback(A->fences[i], &C->dfac_cb, in dma_fence_array_enable_signaling()
104 error = A->fences[i]->error; in dma_fence_array_enable_signaling()
139 dma_fence_put(A->fences[i]); in dma_fence_array_release()
141 kfree(A->fences); in dma_fence_array_release()
155 dma_fence_array_create(int num_fences, struct dma_fence **fences, in dma_fence_array_create() argument
168 A->fences = fences; in dma_fence_array_create()
H A Dlinux_dma_fence.c830 dma_fence_wait_any_timeout(struct dma_fence **fences, uint32_t nfences, in dma_fence_wait_any_timeout() argument
844 KASSERT(dma_fence_referenced_p(fences[i])); in dma_fence_wait_any_timeout()
845 if (dma_fence_is_signaled(fences[i])) { in dma_fence_wait_any_timeout()
876 KASSERT(dma_fence_referenced_p(fences[i])); in dma_fence_wait_any_timeout()
877 ret = dma_fence_add_callback(fences[i], &cb[i].fcb, in dma_fence_wait_any_timeout()
955 if (dma_fence_is_signaled(fences[i])) { in dma_fence_wait_any_timeout()
970 (void)dma_fence_remove_callback(fences[i], &cb[i].fcb); in dma_fence_wait_any_timeout()
/netbsd/sys/external/bsd/drm2/dist/drm/i915/selftests/
H A Di915_sw_fence.c458 struct i915_sw_fence **fences; in test_chain() local
462 fences = kmalloc_array(nfences, sizeof(*fences), GFP_KERNEL); in test_chain()
463 if (!fences) in test_chain()
467 fences[i] = alloc_fence(); in test_chain()
468 if (!fences[i]) { in test_chain()
476 fences[i - 1], in test_chain()
483 i915_sw_fence_commit(fences[i]); in test_chain()
489 if (i915_sw_fence_done(fences[i])) { in test_chain()
495 i915_sw_fence_commit(fences[0]); in test_chain()
505 free_fence(fences[i]); in test_chain()
[all …]
/netbsd/sys/external/bsd/drm2/dist/drm/radeon/
H A Dradeon_sa.c262 struct radeon_fence **fences, in radeon_sa_bo_next_hole() argument
292 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole()
331 struct radeon_fence *fences[RADEON_NUM_RINGS]; in radeon_sa_bo_new() local
354 fences[i] = NULL; in radeon_sa_bo_new()
372 } while (radeon_sa_bo_next_hole(sa_manager, fences, tries)); in radeon_sa_bo_new()
375 radeon_fence_ref(fences[i]); in radeon_sa_bo_new()
379 r = radeon_fence_wait_any(rdev, fences, false); in radeon_sa_bo_new()
381 radeon_fence_unref(&fences[i]); in radeon_sa_bo_new()
390 r = radeon_fence_wait_any(rdev, fences, false); in radeon_sa_bo_new()
392 radeon_fence_unref(&fences[i]); in radeon_sa_bo_new()
H A Dradeon_trace.h38 __field(u32, fences)
44 __entry->fences = radeon_fence_count_emitted(
49 __entry->fences)
/netbsd/sys/external/bsd/drm2/dist/drm/i915/gem/
H A Di915_gem_execbuffer.c2475 kvfree(fences); in __free_fence_array()
2484 struct drm_syncobj **fences; in get_fence_array() local
2502 fences = kvmalloc_array(nfences, sizeof(*fences), in get_fence_array()
2504 if (!fences) in get_fence_array()
2534 return fences; in get_fence_array()
2543 struct drm_syncobj **fences) in put_fence_array() argument
2545 if (fences) in put_fence_array()
2789 if (fences) { in i915_gem_do_execbuffer()
2825 if (fences) in i915_gem_do_execbuffer()
3034 if (IS_ERR(fences)) { in i915_gem_execbuffer2_ioctl()
[all …]
/netbsd/sys/external/bsd/drm2/dist/drm/virtio/
H A Dvirtgpu_fence.c110 list_add_tail(&fence->node, &drv->fences); in virtio_gpu_fence_emit()
128 list_for_each_entry_safe(fence, tmp, &drv->fences, node) { in virtio_gpu_fence_event_process()
/netbsd/external/gpl3/gcc/dist/gcc/
H A Dsel-sched.c454 flist_t fences; variable
7306 flist_t old_fences = fences; in schedule_on_fences()
7311 dump_flist (fences); in schedule_on_fences()
7316 for (; fences; fences = FLIST_NEXT (fences)) in schedule_on_fences()
7371 while ((fences = FLIST_NEXT (fences))) in find_min_max_seqno()
7386 flist_t old_fences = fences; in calculate_new_fences()
7391 for (; fences; fences = FLIST_NEXT (fences)) in calculate_new_fences()
7484 while (fences) in sel_sched_region_2()
7492 fences = calculate_new_fences (fences, orig_max_seqno, &max_time); in sel_sched_region_2()
7526 fences = NULL; in sel_sched_region_1()
[all …]
H A Dsel-sched.cc454 flist_t fences; variable
7306 flist_t old_fences = fences; in schedule_on_fences()
7311 dump_flist (fences); in schedule_on_fences()
7316 for (; fences; fences = FLIST_NEXT (fences)) in schedule_on_fences()
7371 while ((fences = FLIST_NEXT (fences))) in find_min_max_seqno()
7386 flist_t old_fences = fences; in calculate_new_fences()
7391 for (; fences; fences = FLIST_NEXT (fences)) in calculate_new_fences()
7484 while (fences) in sel_sched_region_2()
7492 fences = calculate_new_fences (fences, orig_max_seqno, &max_time); in sel_sched_region_2()
7526 fences = NULL; in sel_sched_region_1()
[all …]
/netbsd/external/gpl3/gcc.old/dist/gcc/
H A Dsel-sched.c454 flist_t fences; variable
7306 flist_t old_fences = fences; in schedule_on_fences()
7311 dump_flist (fences); in schedule_on_fences()
7316 for (; fences; fences = FLIST_NEXT (fences)) in schedule_on_fences()
7371 while ((fences = FLIST_NEXT (fences))) in find_min_max_seqno()
7386 flist_t old_fences = fences; in calculate_new_fences()
7391 for (; fences; fences = FLIST_NEXT (fences)) in calculate_new_fences()
7484 while (fences) in sel_sched_region_2()
7492 fences = calculate_new_fences (fences, orig_max_seqno, &max_time); in sel_sched_region_2()
7526 fences = NULL; in sel_sched_region_1()
[all …]
/netbsd/sys/external/bsd/drm2/include/linux/
H A Ddma-fence-array.h51 struct dma_fence **fences; member
/netbsd/sys/external/bsd/drm2/dist/drm/
H A Ddrm_gem.c1547 struct dma_fence **fences; in drm_gem_fence_array_add_implicit() local
1558 &fence_count, &fences); in drm_gem_fence_array_add_implicit()
1563 ret = drm_gem_fence_array_add(fence_array, fences[i]); in drm_gem_fence_array_add_implicit()
1569 dma_fence_put(fences[i]); in drm_gem_fence_array_add_implicit()
1570 kfree(fences); in drm_gem_fence_array_add_implicit()

123