Home
last modified time | relevance | path

Searched refs:resv (Results 1 – 25 of 78) sorted by relevance

1234

/dragonfly/sys/vfs/hammer/
H A Dhammer_blockmap.c292 if (resv) { in hammer_blockmap_alloc()
303 ++resv->refs; in hammer_blockmap_alloc()
349 if (resv) { in hammer_blockmap_alloc()
450 resv = NULL; in hammer_blockmap_reserve()
570 if (resv) { in hammer_blockmap_reserve()
592 resv = resx; in hammer_blockmap_reserve()
628 return(resv); in hammer_blockmap_reserve()
671 (intmax_t)resv->zone_offset, resv->zone); in hammer_blockmap_reserve_complete()
705 resv = kmalloc(sizeof(*resv), hmp->m_misc, in hammer_reserve_setdelay_offset()
1109 if (resv) in hammer_blockmap_finalize()
[all …]
/dragonfly/sys/dev/drm/i915/
H A Di915_gem_batch_pool.c117 struct reservation_object *resv = obj->resv; in i915_gem_batch_pool_get() local
119 if (!reservation_object_test_signaled_rcu(resv, true)) in i915_gem_batch_pool_get()
133 if (rcu_access_pointer(resv->fence)) { in i915_gem_batch_pool_get()
134 reservation_object_lock(resv, NULL); in i915_gem_batch_pool_get()
135 reservation_object_add_excl_fence(resv, NULL); in i915_gem_batch_pool_get()
136 reservation_object_unlock(resv); in i915_gem_batch_pool_get()
140 GEM_BUG_ON(!reservation_object_test_signaled_rcu(obj->resv, in i915_gem_batch_pool_get()
H A Di915_gem_clflush.c168 obj->resv, NULL, in i915_gem_clflush_object()
172 reservation_object_lock(obj->resv, NULL); in i915_gem_clflush_object()
173 reservation_object_add_excl_fence(obj->resv, &clflush->dma); in i915_gem_clflush_object()
174 reservation_object_unlock(obj->resv); in i915_gem_clflush_object()
H A Di915_gem_object.h244 struct reservation_object *resv; member
331 reservation_object_lock(obj->resv, NULL); in i915_gem_object_lock()
336 reservation_object_unlock(obj->resv); in i915_gem_object_unlock()
433 fence = reservation_object_get_excl_rcu(obj->resv); in i915_gem_object_last_write_engine()
H A Di915_gem_dmabuf.c252 exp_info.resv = obj->resv; in i915_gem_prime_export()
330 obj->resv = dma_buf->resv; in i915_gem_prime_import()
/dragonfly/sys/dev/drm/ttm/
H A Dttm_bo.c356 if (bo->resv == &bo->ttm_resv) in ttm_bo_individualize_resv()
467 resv = bo->resv; in ttm_bo_cleanup_refs()
469 resv = &bo->ttm_resv; in ttm_bo_cleanup_refs()
705 if (bo->resv == ctx->resv) { in ttm_bo_evict_swapout_allowable()
1180 if (resv) { in ttm_bo_init_reserved()
1181 bo->resv = resv; in ttm_bo_init_reserved()
1184 bo->resv = &bo->ttm_resv; in ttm_bo_init_reserved()
1203 if (!resv) { in ttm_bo_init_reserved()
1212 if (!resv) in ttm_bo_init_reserved()
1246 sg, resv, destroy); in ttm_bo_init()
[all …]
H A Dttm_execbuf_util.c131 ret = reservation_object_reserve_shared(bo->resv); in ttm_eu_reserve_buffers()
143 ret = ww_mutex_lock_slow_interruptible(&bo->resv->lock, in ttm_eu_reserve_buffers()
146 ww_mutex_lock_slow(&bo->resv->lock, ticket); in ttm_eu_reserve_buffers()
151 ret = reservation_object_reserve_shared(bo->resv); in ttm_eu_reserve_buffers()
202 reservation_object_add_shared_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects()
204 reservation_object_add_excl_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects()
H A Dttm_bo_util.c500 fbo->base.resv = &fbo->base.ttm_resv; in ttm_buffer_object_transfer()
501 reservation_object_init(fbo->base.resv); in ttm_buffer_object_transfer()
502 ret = ww_mutex_trylock(&fbo->base.resv->lock); in ttm_buffer_object_transfer()
675 reservation_object_add_excl_fence(bo->resv, fence); in ttm_bo_move_accel_cleanup()
702 reservation_object_add_excl_fence(ghost_obj->resv, fence); in ttm_bo_move_accel_cleanup()
738 reservation_object_add_excl_fence(bo->resv, fence); in ttm_bo_pipeline_move()
758 reservation_object_add_excl_fence(ghost_obj->resv, fence); in ttm_bo_pipeline_move()
827 ret = reservation_object_copy_fences(ghost->resv, bo->resv); in ttm_bo_pipeline_gutting()
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_object.c268 bp.resv = NULL; in amdgpu_bo_create_reserved()
435 .resv = bp->resv, in amdgpu_bo_do_create()
505 NULL, bp->resv, &amdgpu_bo_destroy); in amdgpu_bo_do_create()
530 if (!bp->resv) in amdgpu_bo_do_create()
543 if (!bp->resv) in amdgpu_bo_do_create()
544 ww_mutex_unlock(&bo->tbo.resv->lock); in amdgpu_bo_do_create()
566 bp.resv = bo->tbo.resv; in amdgpu_bo_create_shadow()
606 if (!bp->resv) in amdgpu_bo_create()
612 if (!bp->resv) in amdgpu_bo_create()
722 struct reservation_object *resv, in amdgpu_bo_restore_from_shadow() argument
[all …]
H A Damdgpu_prime.c163 struct reservation_object *resv = attach->dmabuf->resv; in amdgpu_gem_prime_import_sg_table() local
175 bp.resv = resv; in amdgpu_gem_prime_import_sg_table()
176 ww_mutex_lock(&resv->lock, NULL); in amdgpu_gem_prime_import_sg_table()
188 ww_mutex_unlock(&resv->lock); in amdgpu_gem_prime_import_sg_table()
192 ww_mutex_unlock(&resv->lock); in amdgpu_gem_prime_import_sg_table()
277 r = __reservation_object_make_exclusive(bo->tbo.resv);
340 return bo->tbo.resv; in amdgpu_gem_prime_res_obj()
H A Damdgpu_vm.c227 if (bo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_vm_bo_base_init()
573 struct reservation_object *resv = vm->root.base.bo->tbo.resv; in amdgpu_vm_alloc_levels() local
586 bp.resv = resv; in amdgpu_vm_alloc_levels()
1782 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv) { in amdgpu_vm_bo_update()
1918 struct reservation_object *resv = vm->root.base.bo->tbo.resv; in amdgpu_vm_prt_fini() local
2026 struct reservation_object *resv = bo_va->base.bo->tbo.resv; in amdgpu_vm_handle_moved() local
2029 if (resv == vm->root.base.bo->tbo.resv) in amdgpu_vm_handle_moved()
2046 if (!clear && resv != vm->root.base.bo->tbo.resv) in amdgpu_vm_handle_moved()
2112 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv && in amdgpu_vm_bo_insert_map()
2521 if (evicted && bo->tbo.resv == vm->root.base.bo->tbo.resv) { in amdgpu_vm_bo_invalidate()
[all …]
H A Damdgpu_sync.c194 struct reservation_object *resv, in amdgpu_sync_resv() argument
203 if (resv == NULL) in amdgpu_sync_resv()
207 f = reservation_object_get_excl(resv); in amdgpu_sync_resv()
210 flist = reservation_object_get_list(resv); in amdgpu_sync_resv()
216 reservation_object_held(resv)); in amdgpu_sync_resv()
H A Damdgpu_ttm.h88 struct reservation_object *resv,
95 struct reservation_object *resv,
99 struct reservation_object *resv,
H A Damdgpu_gem.c48 struct reservation_object *resv, in amdgpu_gem_object_create() argument
65 bp.resv = resv; in amdgpu_gem_object_create()
136 abo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_gem_object_open()
217 struct reservation_object *resv = NULL; in amdgpu_gem_create_ioctl() local
263 resv = vm->root.base.bo->tbo.resv; in amdgpu_gem_create_ioctl()
268 flags, ttm_bo_type_device, resv, &gobj); in amdgpu_gem_create_ioctl()
451 ret = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, in amdgpu_gem_wait_idle_ioctl()
H A Damdgpu_object.h44 struct reservation_object *resv; member
286 struct reservation_object *resv,
292 struct reservation_object *resv,
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_prime.c77 struct reservation_object *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() local
82 ww_mutex_lock(&resv->lock, NULL); in radeon_gem_prime_import_sg_table()
84 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table()
85 ww_mutex_unlock(&resv->lock); in radeon_gem_prime_import_sg_table()
135 return bo->tbo.resv; in radeon_gem_prime_res_obj()
H A Dradeon_benchmark.c38 struct reservation_object *resv) in radeon_benchmark_do_move() argument
51 resv); in radeon_benchmark_do_move()
56 resv); in radeon_benchmark_do_move()
125 dobj->tbo.resv); in radeon_benchmark_move()
136 dobj->tbo.resv); in radeon_benchmark_move()
H A Dradeon_sync.c91 struct reservation_object *resv, in radeon_sync_resv() argument
101 f = reservation_object_get_excl(resv); in radeon_sync_resv()
108 flist = reservation_object_get_list(resv); in radeon_sync_resv()
114 reservation_object_held(resv)); in radeon_sync_resv()
H A Dradeon_object.c180 struct reservation_object *resv, in radeon_bo_create() argument
262 acc_size, sg, resv, &radeon_ttm_bo_destroy); in radeon_bo_create()
611 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_get_surface_reg()
737 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_get_tiling_flags()
749 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_check_tiling()
871 struct reservation_object *resv = bo->tbo.resv; in radeon_bo_fence() local
874 reservation_object_add_shared_fence(resv, &fence->base); in radeon_bo_fence()
876 reservation_object_add_excl_fence(resv, &fence->base); in radeon_bo_fence()
H A Drv770_dma.c45 struct reservation_object *resv) in rv770_copy_dma() argument
66 radeon_sync_resv(rdev, &sync, resv, false); in rv770_copy_dma()
/dragonfly/sys/dev/drm/include/drm/ttm/
H A Dttm_bo_driver.h633 success = ww_mutex_trylock(&bo->resv->lock); in __ttm_bo_reserve()
638 ret = ww_mutex_lock_interruptible(&bo->resv->lock, ticket); in __ttm_bo_reserve()
640 ret = ww_mutex_lock(&bo->resv->lock, ticket); in __ttm_bo_reserve()
724 ret = ww_mutex_lock_slow_interruptible(&bo->resv->lock, in ttm_bo_reserve_slowpath()
727 ww_mutex_lock_slow(&bo->resv->lock, ticket); in ttm_bo_reserve_slowpath()
746 ww_mutex_unlock(&bo->resv->lock); in __ttm_bo_unreserve()
H A Dttm_bo_api.h233 struct reservation_object *resv; member
278 struct reservation_object *resv; member
529 struct reservation_object *resv,
569 struct sg_table *sg, struct reservation_object *resv,
/dragonfly/contrib/gcc-4.7/gcc/
H A Dgenattr.c126 rtx resv; in find_tune_attr() local
128 FOR_EACH_VEC_ELT (rtx, reservations, j, resv) in find_tune_attr()
129 if (! check_tune_attr (XSTR (attr, 0), XEXP (resv, 2))) in find_tune_attr()
/dragonfly/contrib/gcc-8.0/gcc/
H A Dgenattr.c126 rtx resv; in find_tune_attr() local
128 FOR_EACH_VEC_ELT (reservations, j, resv) in find_tune_attr()
129 if (! check_tune_attr (XSTR (attr, 0), XEXP (resv, 2))) in find_tune_attr()
/dragonfly/sys/dev/drm/include/linux/
H A Ddma-buf.h64 struct reservation_object *resv; member
82 struct reservation_object *resv; member

1234