Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 25 of 29) sorted by relevance

12

/dragonfly/sys/dev/drm/ttm/
H A Dttm_tt.c101 ttm->ttm.pages = kvmalloc_array(ttm->ttm.num_pages, in ttm_dma_tt_alloc_page_directory()
102 sizeof(*ttm->ttm.pages) + in ttm_dma_tt_alloc_page_directory()
105 if (!ttm->ttm.pages) in ttm_dma_tt_alloc_page_directory()
107 ttm->dma_address = (void *) (ttm->ttm.pages + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory()
113 ttm->dma_address = kvmalloc_array(ttm->ttm.num_pages, in ttm_sg_tt_alloc_page_directory()
230 ttm->func->destroy(ttm); in ttm_tt_destroy()
269 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_tt_init() local
285 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_sg_tt_init() local
305 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_tt_fini() local
321 ret = ttm->func->unbind(ttm); in ttm_tt_unbind()
[all …]
H A Dttm_agp_backend.c45 struct ttm_tt ttm; member
52 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
65 struct page *page = ttm->pages[i]; in ttm_agp_bind()
86 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
99 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_destroy()
102 ttm_agp_unbind(ttm); in ttm_agp_destroy()
103 ttm_tt_fini(ttm); in ttm_agp_destroy()
125 agp_be->ttm.func = &ttm_agp_func; in ttm_agp_tt_create()
132 return &agp_be->ttm; in ttm_agp_tt_create()
138 if (ttm->state != tt_unpopulated) in ttm_agp_tt_populate()
[all …]
H A Dttm_page_alloc.c878 if (!ttm->pages[i]) in ttm_pool_unpopulate_helper()
885 ttm_put_pages(ttm->pages, ttm->num_pages, ttm->page_flags, in ttm_pool_unpopulate_helper()
886 ttm->caching_state); in ttm_pool_unpopulate_helper()
887 ttm->state = tt_unpopulated; in ttm_pool_unpopulate_helper()
902 ret = ttm_get_pages(ttm->pages, ttm->num_pages, ttm->page_flags, in ttm_pool_populate()
903 ttm->caching_state); in ttm_pool_populate()
919 ret = ttm_tt_swapin(ttm); in ttm_pool_populate()
921 ttm_pool_unpopulate(ttm); in ttm_pool_populate()
926 ttm->state = tt_unbound; in ttm_pool_populate()
933 ttm_pool_unpopulate_helper(ttm, ttm->num_pages); in ttm_pool_unpopulate()
[all …]
H A Dttm_bo_util.c56 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm() local
69 ttm_tt_unbind(ttm); in ttm_bo_move_ttm()
342 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
374 (ttm == NULL || (ttm->state == tt_unpopulated && in ttm_bo_move_memcpy()
383 if (ttm) { in ttm_bo_move_memcpy()
424 bo->ttm = NULL; in ttm_bo_move_memcpy()
567 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm() local
571 BUG_ON(!ttm); in ttm_bo_kmap_ttm()
683 bo->ttm = NULL; in ttm_bo_move_accel_cleanup()
713 bo->ttm = NULL; in ttm_bo_move_accel_cleanup()
[all …]
H A Dttm_page_alloc_dma.c841 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_pool_get_pages() local
861 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_pool_gfp_flags() local
891 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_populate() local
909 type = ttm_to_type(ttm->page_flags, ttm->caching_state); in ttm_dma_populate()
940 ttm->pages[j] = ttm->pages[j - 1] + 1; in ttm_dma_populate()
981 ret = ttm_tt_swapin(ttm); in ttm_dma_populate()
988 ttm->state = tt_unbound; in ttm_dma_populate()
996 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_unpopulate() local
1005 type = ttm_to_type(ttm->page_flags, ttm->caching_state); in ttm_dma_unpopulate()
1075 ttm->pages[i] = NULL; in ttm_dma_unpopulate()
[all …]
H A Dttm_bo_vm.c122 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault()
166 if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) { in ttm_bo_vm_fault()
244 ttm = bo->ttm; in ttm_bo_vm_fault()
249 if (ttm_tt_populate(ttm, &ctx)) { in ttm_bo_vm_fault()
263 page = ttm->pages[page_offset]; in ttm_bo_vm_fault()
384 if (unlikely(bo->ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)) { in ttm_bo_vm_access()
385 ret = ttm_tt_swapin(bo->ttm); in ttm_bo_vm_access()
511 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault_dfly() local
680 ttm = bo->ttm; in ttm_bo_vm_fault_dfly()
691 if (ttm_tt_populate(ttm, &ctx)) { in ttm_bo_vm_fault_dfly()
[all …]
H A Dttm_bo.c155 ttm_tt_destroy(bo->ttm); in ttm_bo_release_list()
182 if (bo->ttm && !(bo->ttm->page_flags & in ttm_bo_add_to_lru()
258 if (bo->ttm == NULL) { in ttm_bo_handle_move_mem()
265 ret = ttm_tt_set_placement_caching(bo->ttm, mem->placement); in ttm_bo_handle_move_mem()
270 ret = ttm_tt_bind(bo->ttm, mem, ctx); in ttm_bo_handle_move_mem()
327 ttm_tt_destroy(bo->ttm); in ttm_bo_handle_move_mem()
328 bo->ttm = NULL; in ttm_bo_handle_move_mem()
347 ttm_tt_destroy(bo->ttm); in ttm_bo_cleanup_memtype_use()
348 bo->ttm = NULL; in ttm_bo_cleanup_memtype_use()
1110 if (bo->mem.mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in ttm_bo_validate()
[all …]
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_ttm.c583 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
594 drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
595 gtt->ttm.dma_address, ttm->num_pages);
655 ttm->num_pages, bo_mem, ttm); in radeon_ttm_backend_bind()
660 ttm->pages, gtt->ttm.dma_address, flags); in radeon_ttm_backend_bind()
714 gtt->ttm.ttm.func = &radeon_backend_func; in radeon_ttm_tt_create()
720 return &gtt->ttm.ttm; in radeon_ttm_tt_create()
725 if (!ttm || ttm->func != &radeon_backend_func) in radeon_ttm_tt_to_gtt()
747 if (!ttm->sg) in radeon_ttm_tt_populate()
762 drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages, in radeon_ttm_tt_populate()
[all …]
H A Dradeon_prime.c50 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
143 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
H A Dradeon_mn.c145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
H A Dradeon_gem.c333 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
424 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
729 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) in radeon_gem_op_ioctl()
H A Dradeon_cs.c157 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
/dragonfly/sys/dev/drm/include/drm/ttm/
H A Dttm_tt.h63 int (*bind) (struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem);
73 int (*unbind) (struct ttm_tt *ttm);
83 void (*destroy) (struct ttm_tt *ttm);
133 struct ttm_tt ttm; member
175 void ttm_tt_fini(struct ttm_tt *ttm);
186 int ttm_tt_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem,
196 void ttm_tt_destroy(struct ttm_tt *ttm);
205 void ttm_tt_unbind(struct ttm_tt *ttm);
214 int ttm_tt_swapin(struct ttm_tt *ttm);
248 void ttm_tt_unpopulate(struct ttm_tt *ttm);
[all …]
H A Dttm_page_alloc.h50 int ttm_pool_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
59 void ttm_pool_unpopulate(struct ttm_tt *ttm);
H A Dttm_bo_driver.h248 int (*ttm_tt_populate)(struct ttm_tt *ttm,
258 void (*ttm_tt_unpopulate)(struct ttm_tt *ttm);
H A Dttm_bo_api.h194 struct ttm_tt *ttm; member
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_ttm.c947 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
961 gtt->ttm.dma_address, ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
984 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
1004 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
1012 ttm->pages, gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
1061 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
1080 ttm->pages, gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
1185 gtt->ttm.ttm.num_pages, gtt->offset); in amdgpu_ttm_backend_unbind()
1225 gtt->ttm.ttm.func = &amdgpu_backend_func; in amdgpu_ttm_tt_create()
1232 return &gtt->ttm.ttm; in amdgpu_ttm_tt_create()
[all …]
H A Damdgpu_ttm.h106 int amdgpu_ttm_tt_get_user_pages(struct ttm_tt *ttm, struct page **pages);
107 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages);
108 void amdgpu_ttm_tt_mark_user_pages(struct ttm_tt *ttm);
109 int amdgpu_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
111 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
112 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
113 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
115 bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
117 bool amdgpu_ttm_tt_userptr_needs_pages(struct ttm_tt *ttm);
118 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm);
[all …]
H A Damdgpu_prime.c57 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table()
127 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap()
182 bo->tbo.ttm->sg = sg; in amdgpu_gem_prime_import_sg_table()
418 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
H A Damdgpu_gem.c131 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open()
330 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
341 r = amdgpu_ttm_tt_get_user_pages(bo->tbo.ttm, in amdgpu_gem_userptr_ioctl()
342 bo->tbo.ttm->pages); in amdgpu_gem_userptr_ioctl()
367 release_pages(bo->tbo.ttm->pages, bo->tbo.ttm->num_pages); in amdgpu_gem_userptr_ioctl()
388 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap()
738 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm)) { in amdgpu_gem_op_ioctl()
H A Damdgpu_cs.c62 if (amdgpu_ttm_tt_get_usermm(p->uf_entry.robj->tbo.ttm)) { in amdgpu_cs_user_fence_chunk()
535 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_cs_list_validate()
542 if (amdgpu_ttm_tt_userptr_needs_pages(bo->tbo.ttm) && in amdgpu_cs_list_validate()
549 amdgpu_ttm_tt_set_user_pages(bo->tbo.ttm, in amdgpu_cs_list_validate()
626 if (amdgpu_ttm_tt_userptr_invalidated(bo->tbo.ttm, in amdgpu_cs_parser_bos()
633 bo->tbo.ttm->num_pages); in amdgpu_cs_parser_bos()
638 if (amdgpu_ttm_tt_userptr_needs_pages(bo->tbo.ttm) && in amdgpu_cs_parser_bos()
662 struct ttm_tt *ttm = e->robj->tbo.ttm; in amdgpu_cs_parser_bos() local
664 e->user_pages = kvmalloc_array(ttm->num_pages, in amdgpu_cs_parser_bos()
754 e->robj->tbo.ttm->num_pages); in amdgpu_cs_parser_bos()
[all …]
H A Damdgpu_mn.c225 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, end)) in amdgpu_mn_invalidate_node()
233 amdgpu_ttm_tt_mark_user_pages(bo->tbo.ttm); in amdgpu_mn_invalidate_node()
327 if (amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, in amdgpu_mn_invalidate_range_start_hsa()
H A Damdgpu_vm.c1731 struct ttm_dma_tt *ttm; in amdgpu_vm_bo_update() local
1736 ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm); in amdgpu_vm_bo_update()
1737 pages_addr = ttm->dma_address; in amdgpu_vm_bo_update()
1743 flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem); in amdgpu_vm_bo_update()
H A Damdgpu_bo_list.c103 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_bo_list_create()
/dragonfly/sys/dev/drm/drm/
H A DMakefile1 .PATH: ${.CURDIR}/.. ${.CURDIR}/../ttm

12