/openbsd/sys/dev/pci/drm/amd/amdgpu/ |
H A D | amdgpu_object.c | 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 77 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy() 95 amdgpu_bo_destroy(tbo); in amdgpu_bo_vm_destroy() 404 ttm_resource_free(&(*bo_ptr)->tbo, &(*bo_ptr)->tbo.resource); in amdgpu_bo_create_kernel_at() 806 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in amdgpu_bo_kmap() 858 ttm_bo_get(&bo->tbo); in amdgpu_bo_ref() 875 tbo = &((*bo)->tbo); in amdgpu_bo_unref() 876 ttm_bo_put(tbo); in amdgpu_bo_unref() 1297 if (abo->tbo.base.dma_buf && !abo->tbo.base.import_attach && in amdgpu_bo_move_notify() 1321 obj = &bo->tbo.base; in amdgpu_bo_get_memory() [all …]
|
H A D | amdgpu_dma_buf.c | 153 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 164 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 173 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map() 176 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 177 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map() 188 bo->tbo.base.size, attach->dev, in amdgpu_dma_buf_map() 258 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access() 304 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export() 385 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify() 388 r = ttm_bo_validate(&bo->tbo, &placement, &ctx); in amdgpu_dma_buf_move_notify() [all …]
|
H A D | amdgpu_amdkfd_gpuvm.c | 411 if (bo->tbo.pin_count) in amdgpu_amdkfd_bo_validate() 754 bo->tbo.sg = NULL; in kfd_mem_dmaunmap_sg_bo() 782 &mem->bo->tbo.base, in kfd_mem_export_dmabuf() 1746 bo->tbo.sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1747 bo->tbo.ttm->sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1881 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu() 1883 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu() 1960 bo_size = bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() 2029 if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() 2126 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() [all …]
|
H A D | amdgpu_object.h | 101 struct ttm_buffer_object tbo; member 166 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument 168 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo() 211 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve() 214 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve() 225 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 230 return bo->tbo.base.size; in amdgpu_bo_size() 235 return bo->tbo.base.size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages() 240 return (bo->tbo.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment() 251 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in amdgpu_bo_mmap_offset() [all …]
|
H A D | amdgpu_vm.c | 380 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init() 1081 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_get_memory() 1086 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_get_memory() 1161 mem = bo->tbo.resource; in amdgpu_vm_bo_update() 1225 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_update() 1558 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_insert_map() 1860 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_clear_mappings() 1875 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_clear_mappings() 1955 if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_del() 2049 if (evicted && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_invalidate() [all …]
|
H A D | amdgpu_gem.c | 207 *obj = &bo->tbo.base; in amdgpu_gem_object_create() 262 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); 268 abo->tbo.base.resv != vm->root.bo->tbo.base.resv) 299 r = drm_exec_prepare_obj(&exec, &bo->tbo.base, 1); 339 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) 361 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) 451 resv = vm->root.bo->tbo.base.resv; 591 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || 949 info.bo_size = robj->tbo.base.size; 959 if (robj->tbo.base.import_attach && [all …]
|
H A D | amdgpu_cs.c | 143 if (amdgpu_ttm_tt_get_usermm(p->uf_bo->tbo.ttm)) in amdgpu_cs_p1_user_fence() 789 .resv = bo->tbo.base.resv in amdgpu_cs_bo_validate() 794 if (bo->tbo.pin_count) in amdgpu_cs_bo_validate() 801 (!bo->tbo.base.dma_buf || in amdgpu_cs_bo_validate() 893 for (i = 0; i < bo->tbo.ttm->num_pages; i++) { in amdgpu_cs_parser_bos() 894 if (bo->tbo.ttm->pages[i] != e->user_pages[i]) { in amdgpu_cs_parser_bos() 939 if (amdgpu_ttm_tt_is_userptr(e->bo->tbo.ttm) && in amdgpu_cs_parser_bos() 948 amdgpu_ttm_tt_set_user_pages(e->bo->tbo.ttm, in amdgpu_cs_parser_bos() 975 r = amdgpu_ttm_alloc_gart(&p->uf_bo->tbo); in amdgpu_cs_parser_bos() 1215 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_cs_sync_rings() [all …]
|
H A D | amdgpu_gtt_mgr.c | 114 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_new() argument 119 uint32_t num_pages = PFN_UP(tbo->base.size); in amdgpu_gtt_mgr_new() 127 ttm_resource_init(tbo, place, &node->base); in amdgpu_gtt_mgr_new() 137 num_pages, tbo->page_alignment, in amdgpu_gtt_mgr_new()
|
H A D | amdgpu_vm_sdma.c | 40 r = amdgpu_ttm_alloc_gart(&table->bo.tbo); in amdgpu_vm_sdma_map_table() 45 r = amdgpu_ttm_alloc_gart(&table->shadow->tbo); in amdgpu_vm_sdma_map_table() 143 dma_resv_add_fence(p->vm->root.bo->tbo.base.resv, f, in amdgpu_vm_sdma_commit() 244 dma_resv_iter_begin(&cursor, bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL); in amdgpu_vm_sdma_update()
|
H A D | amdgpu_vram_mgr.c | 250 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vram_mgr_bo_visible_size() 251 struct ttm_resource *res = bo->tbo.resource; in amdgpu_vram_mgr_bo_visible_size() 404 struct ttm_buffer_object *tbo, in amdgpu_dummy_vram_mgr_new() argument 423 struct ttm_buffer_object *tbo, in amdgpu_vram_mgr_new() argument 444 if (tbo->type != ttm_bo_type_kernel) in amdgpu_vram_mgr_new() 457 tbo->page_alignment); in amdgpu_vram_mgr_new() 464 ttm_resource_init(tbo, place, &vres->base); in amdgpu_vram_mgr_new() 485 if (tbo->page_alignment) in amdgpu_vram_mgr_new() 486 min_block_size = (u64)tbo->page_alignment << PAGE_SHIFT; in amdgpu_vram_mgr_new()
|
H A D | amdgpu_preempt_mgr.c | 60 struct ttm_buffer_object *tbo, in amdgpu_preempt_mgr_new() argument 68 ttm_resource_init(tbo, place, *res); in amdgpu_preempt_mgr_new()
|
H A D | amdgpu_gart.c | 179 bo->tbo.sg = sg; in amdgpu_gart_table_ram_alloc() 180 bo->tbo.ttm->sg = sg; in amdgpu_gart_table_ram_alloc() 198 ret = amdgpu_ttm_alloc_gart(&adev->gart.bo->tbo); in amdgpu_gart_table_ram_alloc() 229 struct sg_table *sg = adev->gart.bo->tbo.sg; in amdgpu_gart_table_ram_free()
|
H A D | amdgpu_vm_pt.c | 419 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_vm_pt_clear() 426 r = ttm_bo_validate(&shadow->tbo, &shadow->placement, &ctx); in amdgpu_vm_pt_clear() 542 bp.resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_pt_create() 555 WARN_ON(dma_resv_lock(bo->tbo.base.resv, in amdgpu_vm_pt_create() 563 bp.resv = bo->tbo.base.resv; in amdgpu_vm_pt_create() 570 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_pt_create() 649 ttm_bo_set_bulk_move(&shadow->tbo, NULL); in amdgpu_vm_pt_free() 652 ttm_bo_set_bulk_move(&entry->bo->tbo, NULL); in amdgpu_vm_pt_free()
|
H A D | amdgpu_csa.c | 77 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa() 117 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_unmap_static_csa()
|
H A D | amdgpu_ttm.c | 678 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() 880 struct ttm_buffer_object *tbo, in amdgpu_ttm_gart_bind() argument 883 struct amdgpu_bo *abo = ttm_to_amdgpu_bo(tbo); in amdgpu_ttm_gart_bind() 884 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() 1033 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_ttm_recover_gart() 1036 if (!tbo->ttm) in amdgpu_ttm_recover_gart() 1039 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart() 1040 amdgpu_ttm_gart_bind(adev, tbo, flags); in amdgpu_ttm_recover_gart() 1233 if (!tbo->ttm) in amdgpu_ttm_tt_get_userptr() 1236 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr() [all …]
|
/openbsd/sys/dev/pci/drm/radeon/ |
H A D | radeon_object.c | 56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 235 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap() 261 ttm_bo_get(&bo->tbo); in radeon_bo_ref() 271 tbo = &((*bo)->tbo); in radeon_bo_unref() 272 ttm_bo_put(tbo); in radeon_bo_unref() 285 if (bo->tbo.pin_count) { in radeon_bo_pin_restricted() 286 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted() 322 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted() 342 ttm_bo_unpin(&bo->tbo); in radeon_bo_unpin() [all …]
|
H A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
|
H A D | radeon_mn.c | 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
|
H A D | radeon_prime.c | 41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table() 42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table() 62 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table() 69 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 111 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
|
H A D | radeon_gem.c | 204 *obj = &robj->tbo.base; in radeon_gem_object_create() 240 r = dma_resv_wait_timeout(robj->tbo.base.resv, in radeon_gem_set_domain() 347 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap() 349 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap() 360 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap() 362 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap() 503 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl() 610 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_busy_ioctl() 641 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_wait_idle_ioctl() 712 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm() [all …]
|
H A D | radeon_cs.c | 162 if (radeon_ttm_tt_has_userptr(p->rdev, p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs() 185 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 263 resv = reloc->robj->tbo.base.resv; in radeon_cs_sync_rings() 408 if (la->robj->tbo.base.size > lb->robj->tbo.base.size) in cmp_size_smaller_first() 410 if (la->robj->tbo.base.size < lb->robj->tbo.base.size) in cmp_size_smaller_first() 455 drm_gem_object_put(&bo->tbo.base); in radeon_cs_parser_fini() 527 rdev->ring_tmp_bo.bo->tbo.resource); in radeon_bo_vm_update_pte() 541 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte() 547 r = dma_resv_reserve_fences(bo->tbo.base.resv, 1); in radeon_bo_vm_update_pte()
|
/openbsd/sys/dev/pci/drm/ |
H A D | drm_gem_ttm_helper.c | 16 struct ttm_buffer_object *tbo = in drm_gem_ttm_mmap() local 18 int r = ttm_bo_mmap_obj(tbo); in drm_gem_ttm_mmap() 27 struct ttm_buffer_object *tbo = in drm_gem_ttm_vmap() local 30 return ttm_bo_vmap(tbo, ism); in drm_gem_ttm_vmap() 36 struct ttm_buffer_object *tbo = in drm_gem_ttm_vunmap() local 39 ttm_bo_vunmap(tbo, ism); in drm_gem_ttm_vunmap()
|
/openbsd/usr.bin/mg/ |
H A D | re_search.c | 339 tbo = curwp->w_doto; in re_forwsrch() 342 if (tbo == clp->l_used) in re_forwsrch() 351 tbo = 0; in re_forwsrch() 359 if (tbo != 0) in re_forwsrch() 368 tbo = 0; in re_forwsrch() 390 int tbo, tdotline; in re_backsrch() local 394 tbo = curwp->w_doto; in re_backsrch() 398 tbo = tbo - 1; in re_backsrch() 399 if (tbo < 0) { in re_backsrch() 403 tbo = llength(clp); in re_backsrch() [all …]
|
H A D | search.c | 703 tbo = cbo; in forwsrch() 706 if (tbo == llength(tlp)) { in forwsrch() 710 tbo = 0; in forwsrch() 716 c = lgetc(tlp, tbo++); in forwsrch() 722 curwp->w_doto = tbo; in forwsrch() 767 tbo = cbo; in backsrch() 771 if (tbo == 0) { in backsrch() 776 tbo = llength(tlp) + 1; in backsrch() 778 if (--tbo == llength(tlp)) in backsrch() 781 c = lgetc(tlp, tbo); in backsrch() [all …]
|
H A D | match.c | 135 int tbo; in displaymatch() local 155 tbo = curwp->w_doto; in displaymatch() 165 curwp->w_doto = tbo; in displaymatch()
|