Home
last modified time | relevance | path

Searched refs:tbo (Results 1 – 25 of 38) sorted by relevance

12

/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_object.c498 bo->tbo.bdev = &adev->mman.bdev; in amdgpu_bo_do_create()
501 bo->tbo.priority = 1; in amdgpu_bo_do_create()
526 dma_fence_put(bo->tbo.moving); in amdgpu_bo_do_create()
544 ww_mutex_unlock(&bo->tbo.resv->lock); in amdgpu_bo_do_create()
566 bp.resv = bo->tbo.resv; in amdgpu_bo_create_shadow()
782 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap()
834 ttm_bo_get(&bo->tbo); in amdgpu_bo_ref()
846 struct ttm_buffer_object *tbo; in amdgpu_bo_unref() local
851 tbo = &((*bo)->tbo); in amdgpu_bo_unref()
852 ttm_bo_put(tbo); in amdgpu_bo_unref()
[all …]
H A Damdgpu_object.h83 struct ttm_buffer_object tbo; member
112 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo()
153 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve()
156 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve()
167 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
172 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size()
193 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in amdgpu_bo_mmap_offset()
202 switch (bo->tbo.mem.mem_type) { in amdgpu_bo_gpu_accessible()
216 struct drm_mm_node *node = bo->tbo.mem.mm_node; in amdgpu_bo_in_cpu_visible_vram()
219 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) in amdgpu_bo_in_cpu_visible_vram()
[all …]
H A Damdgpu_prime.c55 int npages = bo->tbo.num_pages; in amdgpu_gem_prime_get_sg_table()
57 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table()
74 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap()
113 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_prime_mmap()
127 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap()
181 bo->tbo.sg = sg; in amdgpu_gem_prime_import_sg_table()
182 bo->tbo.ttm->sg = sg; in amdgpu_gem_prime_import_sg_table()
277 r = __reservation_object_make_exclusive(bo->tbo.resv);
340 return bo->tbo.resv; in amdgpu_gem_prime_res_obj()
377 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
[all …]
H A Damdgpu_gtt_mgr.c36 struct ttm_buffer_object *tbo; member
111 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_alloc() argument
162 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_new() argument
171 if ((&tbo->mem == mem || tbo->mem.mem_type != TTM_PL_TT) && in amdgpu_gtt_mgr_new()
187 node->tbo = tbo; in amdgpu_gtt_mgr_new()
191 r = amdgpu_gtt_mgr_alloc(man, tbo, place, mem); in amdgpu_gtt_mgr_new()
263 r = amdgpu_ttm_recover_gart(node->tbo); in amdgpu_gtt_mgr_recover()
H A Damdgpu_gem.c131 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open()
136 abo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_gem_object_open()
171 tv.bo = &bo->tbo; in amdgpu_gem_object_close()
263 resv = vm->root.base.bo->tbo.resv; in amdgpu_gem_create_ioctl()
341 r = amdgpu_ttm_tt_get_user_pages(bo->tbo.ttm, in amdgpu_gem_userptr_ioctl()
342 bo->tbo.ttm->pages); in amdgpu_gem_userptr_ioctl()
351 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_gem_userptr_ioctl()
367 release_pages(bo->tbo.ttm->pages, bo->tbo.ttm->num_pages); in amdgpu_gem_userptr_ioctl()
388 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap()
626 tv.bo = &abo->tbo; in amdgpu_gem_va_ioctl()
[all …]
H A Damdgpu_vm.c224 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_base_init()
227 if (bo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_vm_bo_base_init()
330 entry->tv.bo = &entry->robj->tbo; in amdgpu_vm_get_pd_bo()
366 ttm_bo_move_to_lru_tail(&bo->tbo); in amdgpu_vm_validate_pt_bos()
372 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos()
388 ttm_bo_move_to_lru_tail(&bo->tbo); in amdgpu_vm_validate_pt_bos()
1733 mem = &bo->tbo.mem; in amdgpu_vm_bo_update()
1747 if (clear || (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv)) in amdgpu_vm_bo_update()
1782 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv) { in amdgpu_vm_bo_update()
2112 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv && in amdgpu_vm_bo_insert_map()
[all …]
H A Damdgpu_cs.c50 p->uf_entry.tv.bo = &p->uf_entry.robj->tbo; in amdgpu_cs_user_fence_chunk()
398 .resv = bo->tbo.resv, in amdgpu_cs_bo_validate()
430 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_bo_validate()
535 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_cs_list_validate()
549 amdgpu_ttm_tt_set_user_pages(bo->tbo.ttm, in amdgpu_cs_list_validate()
626 if (amdgpu_ttm_tt_userptr_invalidated(bo->tbo.ttm, in amdgpu_cs_parser_bos()
633 bo->tbo.ttm->num_pages); in amdgpu_cs_parser_bos()
662 struct ttm_tt *ttm = e->robj->tbo.ttm; in amdgpu_cs_parser_bos()
739 r = amdgpu_ttm_alloc_gart(&uf->tbo); in amdgpu_cs_parser_bos()
754 e->robj->tbo.ttm->num_pages); in amdgpu_cs_parser_bos()
[all …]
H A Damdgpu_mn.c225 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, end)) in amdgpu_mn_invalidate_node()
228 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_mn_invalidate_node()
233 amdgpu_ttm_tt_mark_user_pages(bo->tbo.ttm); in amdgpu_mn_invalidate_node()
327 if (amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, in amdgpu_mn_invalidate_range_start_hsa()
448 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_register()
506 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_unregister()
H A Damdgpu_ttm.c997 struct ttm_buffer_object *tbo,
1004 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind()
1153 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1156 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, &tbo->mem); in amdgpu_ttm_recover_gart()
1687 ttm_bo_mem_put(&bo->tbo, &bo->tbo.mem); in amdgpu_ttm_fw_reserve_vram_init()
1689 &bo->tbo.mem, &ctx); in amdgpu_ttm_fw_reserve_vram_init()
2139 num_pages = bo->tbo.num_pages; in amdgpu_fill_buffer()
2140 mm_node = bo->tbo.mem.mm_node; in amdgpu_fill_buffer()
2167 num_pages = bo->tbo.num_pages; in amdgpu_fill_buffer()
2168 mm_node = bo->tbo.mem.mm_node; in amdgpu_fill_buffer()
[all …]
H A Damdgpu_vram_mgr.c109 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vram_mgr_bo_visible_size()
110 struct ttm_mem_reg *mem = &bo->tbo.mem; in amdgpu_vram_mgr_bo_visible_size()
138 struct ttm_buffer_object *tbo, in amdgpu_vram_mgr_new() argument
H A Damdgpu_benchmark.c103 r = amdgpu_ttm_alloc_gart(&sobj->tbo); in amdgpu_benchmark_move()
122 r = amdgpu_ttm_alloc_gart(&dobj->tbo); in amdgpu_benchmark_move()
H A Damdgpu_bo_list.c103 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_bo_list_create()
118 entry->tv.bo = &entry->robj->tbo; in amdgpu_bo_list_create()
H A Damdgpu_ttm.h104 int amdgpu_ttm_recover_gart(struct ttm_buffer_object *tbo);
H A Damdgpu_uvd.c500 r = ttm_bo_validate(&bo->tbo, &bo->placement, &tctx); in amdgpu_uvd_cs_pass1()
1039 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_uvd_send_msg()
1074 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_uvd_send_msg()
1086 r = amdgpu_sync_resv(adev, &job->sync, bo->tbo.resv, in amdgpu_uvd_send_msg()
H A Damdgpu_test.c106 r = amdgpu_ttm_alloc_gart(&gtt_obj[i]->tbo); in amdgpu_do_test_moves()
H A Damdgpu_display.c197 r = amdgpu_ttm_alloc_gart(&new_abo->tbo); in amdgpu_display_crtc_page_flip_target()
203 r = reservation_object_get_fences_rcu(new_abo->tbo.resv, &work->excl, in amdgpu_display_crtc_page_flip_target()
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_object.c73 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
285 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap()
311 ttm_bo_reference(&bo->tbo); in radeon_bo_ref()
317 struct ttm_buffer_object *tbo; in radeon_bo_unref() local
323 tbo = &((*bo)->tbo); in radeon_bo_unref()
324 ttm_bo_unref(&tbo); in radeon_bo_unref()
325 if (tbo == NULL) in radeon_bo_unref()
410 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_bo_unpin()
643 ttm_bo_unmap_virtual(&old_object->tbo); in radeon_bo_get_surface_reg()
853 *mem_type = bo->tbo.mem.mem_type; in radeon_bo_wait()
[all …]
H A Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
93 return bo->tbo.offset; in radeon_bo_gpu_offset()
98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
103 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages()
108 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment()
119 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in radeon_bo_mmap_offset()
H A Dradeon_prime.c48 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
50 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
58 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
135 return bo->tbo.resv; in radeon_gem_prime_res_obj()
143 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
H A Dradeon_gem.c40 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in radeon_gem_object_free()
333 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
352 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false);
424 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
457 r = reservation_object_test_signaled_rcu(robj->tbo.resv, true); in radeon_gem_busy_ioctl()
463 cur_placement = READ_ONCE(robj->tbo.mem.mem_type); in radeon_gem_busy_ioctl()
493 cur_placement = READ_ONCE(robj->tbo.mem.mem_type); in radeon_gem_wait_idle_ioctl()
566 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm()
592 r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem); in radeon_gem_va_update_vm()
729 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) in radeon_gem_op_ioctl()
[all …]
H A Dradeon_mn.c145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
154 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start()
160 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_mn_invalidate_range_start()
H A Dradeon_cs.c157 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
181 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs()
255 resv = reloc->robj->tbo.resv; in radeon_cs_sync_rings()
399 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first()
513 &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte()
527 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); in radeon_bo_vm_update_pte()
H A Dradeon_test.c122 vram_obj->tbo.resv); in radeon_do_test_moves()
126 vram_obj->tbo.resv); in radeon_do_test_moves()
173 vram_obj->tbo.resv); in radeon_do_test_moves()
177 vram_obj->tbo.resv); in radeon_do_test_moves()
H A Dradeon_vm.c144 list[0].tv.bo = &vm->page_directory->tbo; in radeon_vm_get_bos()
156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
400 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_vm_clear_bo()
705 radeon_sync_resv(rdev, &ib.sync, pd->tbo.resv, true); in radeon_vm_update_page_directory()
833 radeon_sync_resv(rdev, &ib->sync, pt->tbo.resv, true); in radeon_vm_update_ptes()
834 r = reservation_object_reserve_shared(pt->tbo.resv); in radeon_vm_update_ptes()
945 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
H A Dradeon_benchmark.c125 dobj->tbo.resv); in radeon_benchmark_move()
136 dobj->tbo.resv); in radeon_benchmark_move()

12