/dragonfly/sys/dev/drm/i915/ |
H A D | i915_sw_fence.c | 124 return fn(fence, state); in __i915_sw_fence_notify() 130 debug_fence_free(fence); in i915_sw_fence_fini() 174 debug_fence_assert(fence); in __i915_sw_fence_wake_up_all() 221 debug_fence_init(fence); in __i915_sw_fence_init() 252 if (fence == signaler) in __i915_sw_fence_check_if_after() 333 wq->private = fence; in __i915_sw_fence_await_sw_fence() 378 fence = xchg(&cb->fence, NULL); in timer_i915_sw_fence_wake() 379 if (!fence) in timer_i915_sw_fence_wake() 396 fence = xchg(&cb->fence, NULL); in dma_i915_sw_fence_wake() 397 if (fence) in dma_i915_sw_fence_wake() [all …]
|
H A D | i915_gem_fence_reg.c | 239 if (fence->vma && fence->vma != vma) { in fence_update() 243 GEM_BUG_ON(fence->vma->fence != fence); in fence_update() 246 fence->vma->fence = NULL; in fence_update() 263 vma->fence = fence; in fence_update() 286 struct drm_i915_fence_reg *fence = vma->fence; in i915_vma_put_fence() local 288 if (!fence) in i915_vma_put_fence() 302 GEM_BUG_ON(fence->vma && fence->vma->fence != fence); in fence_find() 349 fence = vma->fence; in i915_vma_pin_fence() 372 GEM_BUG_ON(vma->fence != (set ? fence : NULL)); in i915_vma_pin_fence() 430 list_add(&fence->link, &fence->i915->mm.fence_list); in i915_unreserve_fence() [all …]
|
H A D | i915_sw_fence.h | 42 void __i915_sw_fence_init(struct i915_sw_fence *fence, 47 #define i915_sw_fence_init(fence, fn) \ argument 51 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 54 #define i915_sw_fence_init(fence, fn) \ argument 55 __i915_sw_fence_init((fence), (fn), NULL, NULL) 59 void i915_sw_fence_fini(struct i915_sw_fence *fence); 64 void i915_sw_fence_commit(struct i915_sw_fence *fence); 66 int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, 85 return atomic_read(&fence->pending) <= 0; in i915_sw_fence_signaled() 90 return atomic_read(&fence->pending) < 0; in i915_sw_fence_done() [all …]
|
H A D | i915_gem_request.c | 61 if (i915_fence_signaled(fence)) in i915_fence_enable_signaling() 65 return !i915_fence_signaled(fence); in i915_fence_enable_signaling() 669 dma_fence_init(&req->fence, in i915_gem_request_alloc() 783 &from->fence, 0, in i915_gem_request_await_request() 790 struct dma_fence *fence) in i915_gem_request_await_dma_fence() argument 792 struct dma_fence **child = &fence; in i915_gem_request_await_dma_fence() 803 if (dma_fence_is_array(fence)) { in i915_gem_request_await_dma_fence() 812 fence = *child++; in i915_gem_request_await_dma_fence() 821 if (fence->context == req->fence.context) in i915_gem_request_await_dma_fence() 829 if (dma_fence_is_i915(fence)) in i915_gem_request_await_dma_fence() [all …]
|
H A D | i915_gem_request.h | 110 struct dma_fence fence; member 209 static inline bool dma_fence_is_i915(const struct dma_fence *fence) in dma_fence_is_i915() argument 211 return fence->ops == &i915_fence_ops; in dma_fence_is_i915() 220 to_request(struct dma_fence *fence) in to_request() argument 223 BUILD_BUG_ON(offsetof(struct drm_i915_gem_request, fence) != 0); in to_request() 224 GEM_BUG_ON(fence && !dma_fence_is_i915(fence)); in to_request() 225 return container_of(fence, struct drm_i915_gem_request, fence); in to_request() 231 return to_request(dma_fence_get(&req->fence)); in i915_gem_request_get() 237 return to_request(dma_fence_get_rcu(&req->fence)); in i915_gem_request_get_rcu() 243 dma_fence_put(&req->fence); in i915_gem_request_put() [all …]
|
/dragonfly/sys/dev/drm/ |
H A D | linux_fence.c | 32 fence->ops = ops; in dma_fence_init() 36 fence->flags = 0; in dma_fence_init() 37 fence->error = 0; in dma_fence_init() 47 if (fence->ops && fence->ops->release) in dma_fence_release() 48 fence->ops->release(fence); in dma_fence_release() 50 kfree(fence); in dma_fence_release() 60 return fence->ops->wait(fence, intr, timeout); in dma_fence_wait_timeout() 108 if (!fence->ops->enable_signaling(fence)) { in dma_fence_default_wait() 267 if (!fence->ops->enable_signaling(fence)) in dma_fence_enable_sw_signaling() 295 if (!fence->ops->enable_signaling(fence)) { in dma_fence_add_callback() [all …]
|
H A D | linux_reservation.c | 109 dma_fence_get(fence); in reservation_object_add_shared_inplace() 151 dma_fence_get(fence); in reservation_object_add_shared_replace() 243 if (fence) in reservation_object_add_excl_fence() 244 dma_fence_get(fence); in reservation_object_add_excl_fence() 443 if (fence && !test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { in reservation_object_wait_timeout_rcu() 449 fence = NULL; in reservation_object_wait_timeout_rcu() 453 fence = NULL; in reservation_object_wait_timeout_rcu() 478 fence = lfence; in reservation_object_wait_timeout_rcu() 484 if (fence) { in reservation_object_wait_timeout_rcu() 491 dma_fence_put(fence); in reservation_object_wait_timeout_rcu() [all …]
|
H A D | drm_syncobj.c | 101 if (*fence) 110 *fence = dma_fence_get(syncobj->fence); 113 *fence = NULL; 168 if (fence) in drm_syncobj_replace_fence() 174 syncobj->fence = fence; in drm_syncobj_replace_fence() 216 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in drm_syncobj_assign_null_handle() 243 if (!*fence) { in drm_syncobj_find_fence() 295 if (fence) in drm_syncobj_create() 481 if (!fence) in drm_syncobj_import_sync_file_fence() 672 wait->fence = dma_fence_get(syncobj->fence); [all …]
|
H A D | linux_fence-array.c | 34 static const char *dma_fence_array_get_driver_name(struct dma_fence *fence) in dma_fence_array_get_driver_name() argument 39 static const char *dma_fence_array_get_timeline_name(struct dma_fence *fence) in dma_fence_array_get_timeline_name() argument 86 static bool dma_fence_array_enable_signaling(struct dma_fence *fence) in dma_fence_array_enable_signaling() argument 88 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_enable_signaling() 119 static bool dma_fence_array_signaled(struct dma_fence *fence) in dma_fence_array_signaled() argument 121 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_signaled() 126 static void dma_fence_array_release(struct dma_fence *fence) in dma_fence_array_release() argument 128 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_release() 135 dma_fence_free(fence); in dma_fence_array_release()
|
/dragonfly/sys/dev/drm/include/linux/ |
H A D | dma-fence.h | 92 if (fence) in dma_fence_get() 94 return fence; in dma_fence_get() 100 if (fence) in dma_fence_get_rcu() 102 return fence; in dma_fence_get_rcu() 108 if (fence) in dma_fence_put() 121 if (fence->ops->signaled && fence->ops->signaled(fence)) { in dma_fence_is_signaled() 170 fence->error = error; in dma_fence_set_error() 176 struct dma_fence *fence; in dma_fence_get_rcu_safe() local 179 fence = *dfp; in dma_fence_get_rcu_safe() 180 if (fence) in dma_fence_get_rcu_safe() [all …]
|
H A D | reservation.h | 78 struct reservation_object_list __rcu *fence; member 96 RCU_INIT_POINTER(obj->fence, NULL); in reservation_object_init() 120 fobj = rcu_dereference_protected(obj->fence, 1); in reservation_object_fini() 143 return rcu_dereference_protected(obj->fence, in reservation_object_get_list() 233 struct dma_fence *fence; in reservation_object_get_excl_rcu() local 239 fence = dma_fence_get_rcu_safe(&obj->fence_excl); in reservation_object_get_excl_rcu() 242 return fence; in reservation_object_get_excl_rcu() 247 struct dma_fence *fence); 250 struct dma_fence *fence);
|
/dragonfly/sys/dev/drm/scheduler/ |
H A D | sched_fence.c | 54 DMA_FENCE_TRACE(&fence->scheduled, in drm_sched_fence_scheduled() 57 DMA_FENCE_TRACE(&fence->scheduled, in drm_sched_fence_scheduled() 66 DMA_FENCE_TRACE(&fence->finished, in drm_sched_fence_finished() 69 DMA_FENCE_TRACE(&fence->finished, in drm_sched_fence_finished() 111 dma_fence_put(fence->parent); in drm_sched_fence_release_scheduled() 126 dma_fence_put(&fence->scheduled); in drm_sched_fence_release_finished() 156 struct drm_sched_fence *fence = NULL; in drm_sched_fence_create() local 160 if (fence == NULL) in drm_sched_fence_create() 163 fence->owner = owner; in drm_sched_fence_create() 164 fence->sched = entity->rq->sched; in drm_sched_fence_create() [all …]
|
H A D | gpu_scheduler.c | 430 if (!fence || dma_fence_is_signaled(fence)) in drm_sched_dependency_optimized() 434 s_fence = to_drm_sched_fence(fence); in drm_sched_dependency_optimized() 459 s_fence = to_drm_sched_fence(fence); in drm_sched_entity_add_dependency_cb() 468 entity->dependency = fence; in drm_sched_entity_add_dependency_cb() 474 dma_fence_put(fence); in drm_sched_entity_add_dependency_cb() 691 struct dma_fence *fence; in drm_sched_job_recovery() local 706 if (fence) { in drm_sched_job_recovery() 715 dma_fence_put(fence); in drm_sched_job_recovery() 875 struct dma_fence *fence; in drm_sched_main() local 897 if (fence) { in drm_sched_main() [all …]
|
/dragonfly/sys/dev/drm/radeon/ |
H A D | radeon_fence.c | 171 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled() 434 if (!fence) in radeon_fence_signaled() 437 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled() 558 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout() 702 return fence; in radeon_fence_ref() 716 *fence = NULL; in radeon_fence_unref() 764 if (!fence) { in radeon_fence_need_sync() 774 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync() 795 if (!fence) { in radeon_fence_note_sync() [all …]
|
H A D | radeon_sync.c | 64 struct radeon_fence *fence) in radeon_sync_fence() argument 68 if (!fence) in radeon_sync_fence() 71 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 72 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 74 if (fence->is_vm_update) { in radeon_sync_fence() 96 struct radeon_fence *fence; in radeon_sync_resv() local 103 if (fence && fence->rdev == rdev) in radeon_sync_resv() 104 radeon_sync_fence(sync, fence); in radeon_sync_resv() 115 fence = to_radeon_fence(f); in radeon_sync_resv() 116 if (fence && fence->rdev == rdev) in radeon_sync_resv() [all …]
|
H A D | radeon_test.c | 38 struct radeon_fence *fence = NULL; in radeon_do_test_moves() local 127 if (IS_ERR(fence)) { in radeon_do_test_moves() 129 r = PTR_ERR(fence); in radeon_do_test_moves() 133 r = radeon_fence_wait(fence, false); in radeon_do_test_moves() 139 radeon_fence_unref(&fence); in radeon_do_test_moves() 178 if (IS_ERR(fence)) { in radeon_do_test_moves() 180 r = PTR_ERR(fence); in radeon_do_test_moves() 184 r = radeon_fence_wait(fence, false); in radeon_do_test_moves() 190 radeon_fence_unref(&fence); in radeon_do_test_moves() 236 if (fence && !IS_ERR(fence)) in radeon_do_test_moves() [all …]
|
H A D | radeon_sa.c | 150 radeon_fence_unref(&sa_bo->fence); in radeon_sa_bo_remove_locked() 163 if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_try_free() 278 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole() 301 ++tries[best_bo->fence->ring]; in radeon_sa_bo_next_hole() 329 (*sa_bo)->fence = NULL; in radeon_sa_bo_new() 377 struct radeon_fence *fence) in radeon_sa_bo_free() argument 387 if (fence && !radeon_fence_signaled(fence)) { in radeon_sa_bo_free() 388 (*sa_bo)->fence = radeon_fence_ref(fence); in radeon_sa_bo_free() 390 &sa_manager->flist[fence->ring]); in radeon_sa_bo_free() 416 if (i->fence) { in radeon_sa_bo_dump_debug_info() [all …]
|
/dragonfly/sys/dev/drm/amd/amdgpu/ |
H A D | amdgpu_job.c | 111 dma_fence_put(job->fence); in amdgpu_job_free_cb() 121 dma_fence_put(job->fence); in amdgpu_job_free() 160 job->fence = dma_fence_get(*fence); in amdgpu_job_submit_direct() 174 struct dma_fence *fence; in amdgpu_job_dependency() local 179 if (fence && explicit) { in amdgpu_job_dependency() 182 fence, false); in amdgpu_job_dependency() 198 return fence; in amdgpu_job_dependency() 222 &fence); in amdgpu_job_run() 228 job->fence = dma_fence_get(fence); in amdgpu_job_run() 232 fence = r ? ERR_PTR(r) : fence; in amdgpu_job_run() [all …]
|
H A D | amdgpu_fence.c | 138 struct amdgpu_fence *fence; in amdgpu_fence_emit() local 144 if (fence == NULL) in amdgpu_fence_emit() 148 fence->ring = ring; in amdgpu_fence_emit() 177 *f = &fence->base; in amdgpu_fence_emit() 257 if (!fence) in amdgpu_fence_process() 259 r = dma_fence_signal(fence); in amdgpu_fence_process() 264 dma_fence_put(fence); in amdgpu_fence_process() 295 struct dma_fence *fence, **ptr; in amdgpu_fence_wait_empty() local 303 fence = rcu_dereference(*ptr); in amdgpu_fence_wait_empty() 304 if (!fence || !dma_fence_get_rcu(fence)) { in amdgpu_fence_wait_empty() [all …]
|
H A D | amdgpu_vcn.c | 322 if (fence) in amdgpu_vcn_dec_send_msg() 323 *fence = dma_fence_get(f); in amdgpu_vcn_dec_send_msg() 399 struct dma_fence *fence; in amdgpu_vcn_dec_ring_test_ib() local 425 dma_fence_put(fence); in amdgpu_vcn_dec_ring_test_ib() 509 if (fence) in amdgpu_vcn_enc_get_create_msg() 510 *fence = dma_fence_get(f); in amdgpu_vcn_enc_get_create_msg() 561 if (fence) in amdgpu_vcn_enc_get_destroy_msg() 600 dma_fence_put(fence); in amdgpu_vcn_enc_ring_test_ib() 645 struct dma_fence **fence) in amdgpu_vcn_jpeg_set_reg() argument 672 if (fence) in amdgpu_vcn_jpeg_set_reg() [all …]
|
H A D | amdgpu_sync.c | 38 struct dma_fence *fence; member 115 struct dma_fence *fence) in amdgpu_sync_keep_later() argument 121 *keep = dma_fence_get(fence); in amdgpu_sync_keep_later() 141 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later() 179 e->fence = dma_fence_get(f); in amdgpu_sync_fence() 267 struct dma_fence *f = e->fence; in amdgpu_sync_peek_fence() 310 f = e->fence; in amdgpu_sync_get_fence() 342 f = e->fence; in amdgpu_sync_clone() 367 r = dma_fence_wait(e->fence, intr); in amdgpu_sync_wait() 372 dma_fence_put(e->fence); in amdgpu_sync_wait() [all …]
|
H A D | amdgpu_cs.c | 1093 } else if (fence) { in amdgpu_cs_process_fence_dep() 1373 if (IS_ERR(fence)) in amdgpu_cs_wait_ioctl() 1375 else if (fence) { in amdgpu_cs_wait_ioctl() 1378 r = fence->error; in amdgpu_cs_wait_ioctl() 1423 return fence; in amdgpu_cs_get_fence() 1436 fence = amdgpu_cs_get_fence(adev, filp, &info->in.fence); in amdgpu_cs_fence_to_handle_ioctl() 1437 if (IS_ERR(fence)) in amdgpu_cs_fence_to_handle_ioctl() 1504 if (IS_ERR(fence)) in amdgpu_cs_wait_all_fences() 1506 else if (!fence) in amdgpu_cs_wait_all_fences() 1517 if (fence->error) in amdgpu_cs_wait_all_fences() [all …]
|
H A D | amdgpu_sa.c | 110 dma_fence_put(sa_bo->fence); in amdgpu_sa_bo_remove_locked() 123 if (sa_bo->fence == NULL || in amdgpu_sa_bo_try_free() 124 !dma_fence_is_signaled(sa_bo->fence)) { in amdgpu_sa_bo_try_free() 236 fences[i] = sa_bo->fence; in amdgpu_sa_bo_next_hole() 259 uint32_t idx = best_bo->fence->context; in amdgpu_sa_bo_next_hole() 293 (*sa_bo)->fence = NULL; in amdgpu_sa_bo_new() 347 struct dma_fence *fence) in amdgpu_sa_bo_free() argument 357 if (fence && !dma_fence_is_signaled(fence)) { in amdgpu_sa_bo_free() 360 (*sa_bo)->fence = dma_fence_get(fence); in amdgpu_sa_bo_free() 390 if (i->fence) in amdgpu_sa_bo_dump_debug_info() [all …]
|
H A D | amdgpu_test.c | 89 struct dma_fence *fence = NULL; in amdgpu_do_test_moves() local 127 size, NULL, &fence, false, false); in amdgpu_do_test_moves() 134 r = dma_fence_wait(fence, false); in amdgpu_do_test_moves() 140 dma_fence_put(fence); in amdgpu_do_test_moves() 141 fence = NULL; in amdgpu_do_test_moves() 173 size, NULL, &fence, false, false); in amdgpu_do_test_moves() 180 r = dma_fence_wait(fence, false); in amdgpu_do_test_moves() 186 dma_fence_put(fence); in amdgpu_do_test_moves() 187 fence = NULL; in amdgpu_do_test_moves() 233 if (fence) in amdgpu_do_test_moves() [all …]
|
/dragonfly/sys/dev/drm/include/drm/ |
H A D | drm_syncobj.h | 52 struct dma_fence *fence; member 91 struct dma_fence *fence); 122 struct dma_fence *fence; in drm_syncobj_fence_get() local 125 fence = dma_fence_get_rcu_safe(&syncobj->fence); in drm_syncobj_fence_get() 128 return fence; in drm_syncobj_fence_get() 139 struct dma_fence *fence); 142 struct dma_fence **fence);
|