Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 25 of 86) sorted by relevance

1234

/dragonfly/sys/dev/drm/radeon/
H A Dradeon_object.c83 drm_prime_gem_destroy(&bo->gem_base, bo->tbo.sg); in radeon_ttm_bo_destroy()
85 kfree(bo); in radeon_ttm_bo_destroy()
285 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap()
312 return bo; in radeon_bo_ref()
378 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
380 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
382 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted()
411 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
413 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
441 &bo->gem_base, bo, (unsigned long)bo->gem_base.size, in radeon_bo_force_delete()
[all …]
H A Dradeon_prime.c58 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
59 &bo->dma_buf_vmap); in radeon_gem_prime_vmap()
79 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local
93 bo->prime_shared_count = 1; in radeon_gem_prime_import_sg_table()
94 return &bo->gem_base; in radeon_gem_prime_import_sg_table()
109 bo->prime_shared_count++; in radeon_gem_prime_pin()
111 radeon_bo_unreserve(bo); in radeon_gem_prime_pin()
124 radeon_bo_unpin(bo); in radeon_gem_prime_unpin()
126 bo->prime_shared_count--; in radeon_gem_prime_unpin()
127 radeon_bo_unreserve(bo); in radeon_gem_prime_unpin()
[all …]
H A Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
91 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument
93 return bo->tbo.offset; in radeon_bo_gpu_offset()
98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
119 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in radeon_bo_mmap_offset()
132 extern void radeon_bo_kunmap(struct radeon_bo *bo);
134 extern void radeon_bo_unref(struct radeon_bo **bo);
[all …]
H A Dradeon_mn.c83 bo->mn = NULL; in radeon_mn_destroy()
84 list_del_init(&bo->mn_list); in radeon_mn_destroy()
137 struct radeon_bo *bo; in radeon_mn_invalidate_range_start() local
145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
160 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_mn_invalidate_range_start()
164 radeon_bo_unreserve(bo); in radeon_mn_invalidate_range_start()
274 bo->mn = rmn; in radeon_mn_register()
303 rmn = bo->mn; in radeon_mn_unregister()
311 head = bo->mn_list.next; in radeon_mn_unregister()
313 bo->mn = NULL; in radeon_mn_unregister()
[all …]
H A Dradeon_ttm.c197 switch (bo->mem.mem_type) { in radeon_evict_flags()
267 rdev = radeon_get_rdev(bo->bdev); in radeon_move_blit()
324 rdev = radeon_get_rdev(bo->bdev); in radeon_move_vram_ram()
354 ttm_bo_mem_put(bo, &tmp_mem); in radeon_move_vram_ram()
371 rdev = radeon_get_rdev(bo->bdev); in radeon_move_ram_vram()
394 ttm_bo_mem_put(bo, &tmp_mem); in radeon_move_ram_vram()
418 radeon_move_null(bo, new_mem); in radeon_bo_move()
426 radeon_move_null(bo, new_mem); in radeon_bo_move()
879 return ((bo->mem.bus.base + bo->mem.bus.offset) >> PAGE_SHIFT) in radeon_ttm_io_mem_pfn()
1012 struct ttm_buffer_object *bo; in radeon_ttm_fault() local
[all …]
H A Dradeon_vm.c150 if (!vm->page_tables[i].bo) in radeon_vm_get_bos()
330 bo_va->bo = bo; in radeon_vm_bo_add()
388 struct radeon_bo *bo) in radeon_vm_clear_bo() argument
400 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_vm_clear_bo()
428 radeon_bo_unreserve(bo); in radeon_vm_clear_bo()
508 tmp->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_set_addr()
669 struct radeon_bo *bo = vm->page_tables[pt_idx].bo; in radeon_vm_update_page_directory() local
672 if (bo == NULL) in radeon_vm_update_page_directory()
925 bo_va->bo, vm); in radeon_vm_bo_update()
945 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
[all …]
/dragonfly/sys/dev/drm/ttm/
H A Dttm_bo.c97 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug()
160 bo->destroy(bo); in ttm_bo_release_list()
182 if (bo->ttm && !(bo->ttm->page_flags & in ttm_bo_add_to_lru()
349 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_cleanup_memtype_use()
356 if (bo->resv == &bo->ttm_resv) in ttm_bo_individualize_resv()
411 if (bo->resv != &bo->ttm_resv) in ttm_bo_cleanup_refs_or_queue()
433 if (bo->resv != &bo->ttm_resv) in ttm_bo_cleanup_refs_or_queue()
1171 bo->mem.num_pages = bo->num_pages; in ttm_bo_init_reserved()
1184 bo->resv = &bo->ttm_resv; in ttm_bo_init_reserved()
1297 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in ttm_bo_create()
[all …]
H A Dttm_bo_vm.c75 ttm_bo_get(bo); in ttm_bo_vm_fault_idle()
79 ttm_bo_put(bo); in ttm_bo_vm_fault_idle()
166 if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) { in ttm_bo_vm_fault()
244 ttm = bo->ttm; in ttm_bo_vm_fault()
317 ttm_bo_get(bo); in ttm_bo_vm_open()
325 ttm_bo_put(bo); in ttm_bo_vm_close()
426 bo = NULL; in ttm_bo_vm_lookup()
431 if (!bo) in ttm_bo_vm_lookup()
434 return bo; in ttm_bo_vm_lookup()
477 ttm_bo_put(bo); in ttm_bo_mmap()
[all …]
H A Dttm_bo_util.c49 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_free_old_node()
478 ttm_bo_get(bo); in ttm_buffer_object_transfer()
479 fbo->bo = bo; in ttm_buffer_object_transfer()
604 &bo->bdev->man[bo->mem.mem_type]; in ttm_bo_kmap()
609 map->bo = bo; in ttm_bo_kmap()
619 ret = ttm_mem_io_reserve(bo->bdev, &bo->mem); in ttm_bo_kmap()
635 struct ttm_buffer_object *bo = map->bo; in ttm_bo_kunmap() local
637 &bo->bdev->man[bo->mem.mem_type]; in ttm_bo_kunmap()
657 ttm_mem_io_free(map->bo->bdev, &map->bo->mem); in ttm_bo_kunmap()
832 memset(&bo->mem, 0, sizeof(bo->mem)); in ttm_bo_pipeline_gutting()
[all …]
H A Dttm_execbuf_util.c39 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation_reverse() local
41 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation_reverse()
50 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_del_from_lru_locked() local
69 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation() local
71 ttm_bo_add_to_lru(bo); in ttm_eu_backoff_reservation()
72 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation()
111 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_reserve_buffers() local
192 bo = list_first_entry(list, struct ttm_validate_buffer, head)->bo; in ttm_eu_fence_buffer_objects()
193 bdev = bo->bdev; in ttm_eu_fence_buffer_objects()
195 glob = bo->bdev->glob; in ttm_eu_fence_buffer_objects()
[all …]
H A Dttm_tt.c53 struct ttm_bo_device *bdev = bo->bdev; in ttm_tt_create()
56 reservation_object_assert_held(bo->resv); in ttm_tt_create()
64 switch (bo->type) { in ttm_tt_create()
75 bo->ttm = NULL; in ttm_tt_create()
80 bo->ttm = bdev->driver->ttm_tt_create(bo, page_flags); in ttm_tt_create()
81 if (unlikely(bo->ttm == NULL)) in ttm_tt_create()
237 ttm->bdev = bo->bdev; in ttm_tt_init_fields()
238 ttm->num_pages = bo->num_pages; in ttm_tt_init_fields()
243 ttm->sg = bo->sg; in ttm_tt_init_fields()
249 ttm_tt_init_fields(ttm, bo, page_flags); in ttm_tt_init()
[all …]
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_object.c106 drm_prime_gem_destroy(&bo->gem_base, bo->tbo.sg); in amdgpu_bo_destroy()
115 kfree(bo); in amdgpu_bo_destroy()
463 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_do_create()
521 r = amdgpu_fill_buffer(bo, 0, bo->tbo.resv, &fence); in amdgpu_bo_do_create()
570 bo->shadow->parent = amdgpu_bo_ref(bo); in amdgpu_bo_create_shadow()
694 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_validate()
782 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap()
816 if (bo->kmap.bo) in amdgpu_bo_kunmap()
835 return bo; in amdgpu_bo_ref()
940 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin_restricted()
[all …]
H A Damdgpu_prime.c74 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap()
75 &bo->dma_buf_vmap); in amdgpu_gem_prime_vmap()
165 struct amdgpu_bo *bo; in amdgpu_gem_prime_import_sg_table() local
181 bo->tbo.sg = sg; in amdgpu_gem_prime_import_sg_table()
182 bo->tbo.ttm->sg = sg; in amdgpu_gem_prime_import_sg_table()
189 return &bo->gem_base; in amdgpu_gem_prime_import_sg_table()
291 amdgpu_bo_unreserve(bo);
319 amdgpu_bo_unpin(bo);
340 return bo->tbo.resv; in amdgpu_gem_prime_res_obj()
375 if (!bo->pin_count && (bo->allowed_domains & AMDGPU_GEM_DOMAIN_GTT)) {
[all …]
H A Damdgpu_object.h167 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
172 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size()
202 switch (bo->tbo.mem.mem_type) { in amdgpu_bo_gpu_accessible()
219 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) in amdgpu_bo_in_cpu_visible_vram()
255 void *amdgpu_bo_kptr(struct amdgpu_bo *bo);
256 void amdgpu_bo_kunmap(struct amdgpu_bo *bo);
258 void amdgpu_bo_unref(struct amdgpu_bo **bo);
262 int amdgpu_bo_unpin(struct amdgpu_bo *bo);
285 struct amdgpu_bo *bo,
288 int amdgpu_bo_validate(struct amdgpu_bo *bo);
[all …]
H A Damdgpu_vm.c216 base->bo = bo; in amdgpu_vm_bo_base_init()
358 struct amdgpu_bo *bo = bo_base->bo; in amdgpu_vm_validate_pt_bos() local
383 struct amdgpu_bo *bo = bo_base->bo; in amdgpu_vm_validate_pt_bos() local
1163 bo = bo_base->bo->parent; in amdgpu_vm_update_directories()
1716 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update() local
1747 if (clear || (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv)) in amdgpu_vm_bo_update()
1782 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv) { in amdgpu_vm_bo_update()
2112 if (bo && bo->tbo.resv == vm->root.base.bo->tbo.resv && in amdgpu_vm_bo_insert_map()
2448 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2513 if (bo->parent && bo->parent->shadow == bo) in amdgpu_vm_bo_invalidate()
[all …]
H A Damdgpu_mn.c117 struct amdgpu_bo *bo, *next_bo; in amdgpu_mn_destroy() local
125 bo->mn = NULL; in amdgpu_mn_destroy()
126 list_del_init(&bo->mn_list); in amdgpu_mn_destroy()
220 struct amdgpu_bo *bo; in amdgpu_mn_invalidate_node() local
314 struct amdgpu_bo *bo; in amdgpu_mn_invalidate_range_start_hsa() local
325 struct kgd_mem *mem = bo->kfd_bo; in amdgpu_mn_invalidate_range_start_hsa()
482 bo->mn = amn; in amdgpu_mn_register()
512 amn = bo->mn; in amdgpu_mn_unregister()
521 head = bo->mn_list.next; in amdgpu_mn_unregister()
523 bo->mn = NULL; in amdgpu_mn_unregister()
[all …]
H A Damdgpu_ttm.c519 src.bo = bo; in amdgpu_move_blit()
520 dst.bo = bo; in amdgpu_move_blit()
1107 tmp = bo->mem; in amdgpu_ttm_alloc_gart()
1133 ttm_bo_mem_put(bo, &bo->mem); in amdgpu_ttm_alloc_gart()
1134 bo->mem = tmp; in amdgpu_ttm_alloc_gart()
1135 bo->offset = (bo->mem.start << PAGE_SHIFT) + in amdgpu_ttm_alloc_gart()
1136 bo->bdev->man[bo->mem.mem_type].gpu_offset; in amdgpu_ttm_alloc_gart()
1652 struct amdgpu_bo *bo; in amdgpu_ttm_fw_reserve_vram_init() local
1687 ttm_bo_mem_put(&bo->tbo, &bo->tbo.mem); in amdgpu_ttm_fw_reserve_vram_init()
1688 r = ttm_bo_mem_space(&bo->tbo, &bo->placement, in amdgpu_ttm_fw_reserve_vram_init()
[all …]
H A Damdgpu_gem.c51 struct amdgpu_bo *bo; in amdgpu_gem_object_create() local
87 *obj = &bo->gem_base; in amdgpu_gem_object_create()
171 tv.bo = &bo->tbo; in amdgpu_gem_object_close()
183 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close()
301 struct amdgpu_bo *bo; in amdgpu_gem_userptr_ioctl()
327 bo = gem_to_amdgpu_bo(gobj); in amdgpu_gem_userptr_ioctl()
342 bo->tbo.ttm->pages); in amdgpu_gem_userptr_ioctl()
351 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_gem_userptr_ioctl()
352 amdgpu_bo_unreserve(bo); in amdgpu_gem_userptr_ioctl()
367 release_pages(bo->tbo.ttm->pages, bo->tbo.ttm->num_pages); in amdgpu_gem_userptr_ioctl()
[all …]
H A Damdgpu_cs.c404 if (bo->pin_count) in amdgpu_cs_bo_validate()
430 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_bo_validate()
470 if (bo->pin_count) in amdgpu_cs_try_evict()
489 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_try_evict()
517 if (bo->shadow) in amdgpu_cs_validate()
546 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_list_validate()
857 bo = e->robj; in amdgpu_bo_vm_update_pte()
858 if (!bo) in amdgpu_bo_vm_update_pte()
1660 *bo = mapping->bo_va->base.bo; in amdgpu_cs_find_mapping()
1669 amdgpu_bo_placement_from_domain(*bo, (*bo)->allowed_domains); in amdgpu_cs_find_mapping()
[all …]
H A Damdgpu_amdkfd.c282 struct amdgpu_bo *bo = NULL; in alloc_gtt_mem() local
331 *mem_obj = bo; in alloc_gtt_mem()
335 amdgpu_bo_unreserve(bo); in alloc_gtt_mem()
340 amdgpu_bo_unpin(bo); in alloc_gtt_mem()
342 amdgpu_bo_unreserve(bo); in alloc_gtt_mem()
344 amdgpu_bo_unref(&bo); in alloc_gtt_mem()
353 amdgpu_bo_reserve(bo, true); in free_gtt_mem()
354 amdgpu_bo_kunmap(bo); in free_gtt_mem()
355 amdgpu_bo_unpin(bo); in free_gtt_mem()
356 amdgpu_bo_unreserve(bo); in free_gtt_mem()
[all …]
H A Damdgpu_uvd.c479 struct amdgpu_bo *bo; in amdgpu_uvd_cs_pass1() local
500 r = ttm_bo_validate(&bo->tbo, &bo->placement, &tctx); in amdgpu_uvd_cs_pass1()
741 amdgpu_bo_kunmap(bo); in amdgpu_uvd_cs_msg()
763 amdgpu_bo_kunmap(bo); in amdgpu_uvd_cs_msg()
785 amdgpu_bo_kunmap(bo); in amdgpu_uvd_cs_msg()
806 struct amdgpu_bo *bo; in amdgpu_uvd_cs_pass2() local
1031 amdgpu_bo_kunmap(bo); in amdgpu_uvd_send_msg()
1032 amdgpu_bo_unpin(bo); in amdgpu_uvd_send_msg()
1039 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_uvd_send_msg()
1099 amdgpu_bo_unref(&bo); in amdgpu_uvd_send_msg()
[all …]
/dragonfly/sys/dev/drm/include/drm/ttm/
H A Dttm_bo_driver.h108 struct ttm_buffer_object *bo,
633 success = ww_mutex_trylock(&bo->resv->lock); in __ttm_bo_reserve()
640 ret = ww_mutex_lock(&bo->resv->lock, ticket); in __ttm_bo_reserve()
696 WARN_ON(!kref_read(&bo->kref)); in ttm_bo_reserve()
700 ttm_bo_del_sub_from_lru(bo); in ttm_bo_reserve()
721 WARN_ON(!kref_read(&bo->kref)); in ttm_bo_reserve_slowpath()
727 ww_mutex_lock_slow(&bo->resv->lock, ticket); in ttm_bo_reserve_slowpath()
730 ttm_bo_del_sub_from_lru(bo); in ttm_bo_reserve_slowpath()
746 ww_mutex_unlock(&bo->resv->lock); in __ttm_bo_unreserve()
760 ttm_bo_add_to_lru(bo); in ttm_bo_unreserve()
[all …]
H A Dttm_bo_api.h261 struct ttm_buffer_object *bo; member
295 kref_get(&bo->kref); in ttm_bo_get()
309 ttm_bo_reference(struct ttm_buffer_object *bo) in ttm_bo_reference() argument
311 ttm_bo_get(bo); in ttm_bo_reference()
312 return bo; in ttm_bo_reference()
358 int ttm_bo_validate(struct ttm_buffer_object *bo,
369 void ttm_bo_put(struct ttm_buffer_object *bo);
380 void ttm_bo_unref(struct ttm_buffer_object **bo);
392 void ttm_bo_add_to_lru(struct ttm_buffer_object *bo);
404 void ttm_bo_del_from_lru(struct ttm_buffer_object *bo);
[all …]
/dragonfly/sys/vfs/msdosfs/
H A Dmsdosfs_fat.c125 u_long bo; in pcbmap() local
203 if (bo >= bsize) { in pcbmap()
209 cn = getulong(bp->b_data + bo); in pcbmap()
211 cn = getushort(bp->b_data + bo); in pcbmap()
591 bo++; in fatchain()
593 bo++; in fatchain()
597 bo += 2; in fatchain()
604 bo += 4; in fatchain()
607 if (bo >= bsize) in fatchain()
864 putulong(bp->b_data + bo, in freeclusterchain()
[all …]
/dragonfly/usr.sbin/makefs/msdos/
H A Dmsdosfs_fat.c141 u_long bo; in pcbmap() local
215 if (bo >= bsize) { in pcbmap()
221 cn = getulong(bp->b_data + bo); in pcbmap()
223 cn = getushort(bp->b_data + bo); in pcbmap()
594 bo++; in fatchain()
596 bo++; in fatchain()
600 bo += 2; in fatchain()
607 bo += 4; in fatchain()
610 if (bo >= bsize) in fatchain()
850 putulong(bp->b_data + bo, in freeclusterchain()
[all …]

1234