Home
last modified time | relevance | path

Searched refs:vm (Results 1 – 25 of 105) sorted by relevance

12345

/dragonfly/sys/dev/drm/radeon/
H A Dradeon_vm.c299 if (bo_va->vm == vm) { in radeon_vm_bo_find()
329 bo_va->vm = vm; in radeon_vm_bo_add()
338 mutex_lock(&vm->mutex); in radeon_vm_bo_add()
452 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_set_addr() local
477 mutex_lock(&vm->mutex); in radeon_vm_bo_set_addr()
507 tmp->vm = vm; in radeon_vm_bo_set_addr()
916 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_update() local
925 bo_va->bo, vm); in radeon_vm_bo_update()
1122 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_rmv() local
1182 vm->ib_bo_va = NULL; in radeon_vm_init()
[all …]
H A Dradeon_ib.c56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
72 ib->vm = vm; in radeon_ib_get()
73 if (vm) { in radeon_ib_get()
142 if (ib->vm) { in radeon_ib_schedule()
144 vm_id_fence = radeon_vm_grab_id(rdev, ib->vm, ib->ring); in radeon_ib_schedule()
156 if (ib->vm) in radeon_ib_schedule()
157 radeon_vm_flush(rdev, ib->vm, ib->ring, in radeon_ib_schedule()
175 if (ib->vm) in radeon_ib_schedule()
176 radeon_vm_fence(rdev, ib->vm, ib->fence); in radeon_ib_schedule()
H A Dradeon_cs.c493 struct radeon_vm *vm) in radeon_bo_vm_update_pte() argument
503 r = radeon_vm_clear_freed(rdev, vm); in radeon_bo_vm_update_pte()
507 if (vm->ib_bo_va == NULL) { in radeon_bo_vm_update_pte()
521 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte()
541 struct radeon_vm *vm = &fpriv->vm; in radeon_cs_ib_vm_chunk() local
564 mutex_lock(&vm->mutex); in radeon_cs_ib_vm_chunk()
585 mutex_unlock(&vm->mutex); in radeon_cs_ib_vm_chunk()
602 struct radeon_vm *vm = NULL; in radeon_cs_ib_fill() local
610 vm = &fpriv->vm; in radeon_cs_ib_fill()
620 vm, ib_chunk->length_dw * 4); in radeon_cs_ib_fill()
[all …]
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_vm.c215 base->vm = vm; in amdgpu_vm_bo_base_init()
877 if (bo_va->base.vm == vm) { in amdgpu_vm_bo_find()
1213 amdgpu_vm_invalidate_level(adev, vm, &vm->root, in amdgpu_vm_update_directories()
1459 params.vm = vm; in amdgpu_vm_bo_update_mapping()
1717 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update() local
2102 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map() local
2147 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map() local
2269 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap() local
2472 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_rmv() local
2517 struct amdgpu_vm *vm = bo_base->vm; in amdgpu_vm_bo_invalidate() local
[all …]
H A Damdgpu_vm.h142 struct amdgpu_vm *vm; member
277 void amdgpu_vm_get_pd_bo(struct amdgpu_vm *vm,
280 bool amdgpu_vm_ready(struct amdgpu_vm *vm);
285 struct amdgpu_vm *vm,
289 struct amdgpu_vm *vm);
291 struct amdgpu_vm *vm,
294 struct amdgpu_vm *vm);
300 struct amdgpu_bo_va *amdgpu_vm_bo_find(struct amdgpu_vm *vm,
303 struct amdgpu_vm *vm,
317 struct amdgpu_vm *vm,
[all …]
H A Damdgpu_ids.c283 *id = vm->reserved_vmid[vmhub]; in amdgpu_vmid_grab_reserved()
422 if (vm->reserved_vmid[vmhub]) { in amdgpu_vmid_grab()
452 id->owner = vm->entity.fence_context; in amdgpu_vmid_grab()
459 job->pasid = vm->pasid; in amdgpu_vmid_grab()
460 trace_amdgpu_vm_grab_id(vm, ring, job); in amdgpu_vmid_grab()
468 struct amdgpu_vm *vm, in amdgpu_vmid_alloc_reserved() argument
477 if (vm->reserved_vmid[vmhub]) in amdgpu_vmid_alloc_reserved()
489 vm->reserved_vmid[vmhub] = idle; in amdgpu_vmid_alloc_reserved()
499 struct amdgpu_vm *vm, in amdgpu_vmid_free_reserved() argument
505 if (vm->reserved_vmid[vmhub]) { in amdgpu_vmid_free_reserved()
[all …]
H A Damdgpu_gem.c126 struct amdgpu_vm *vm = &fpriv->vm; in amdgpu_gem_object_open() local
143 bo_va = amdgpu_vm_bo_find(vm, abo); in amdgpu_gem_object_open()
159 struct amdgpu_vm *vm = &fpriv->vm; in amdgpu_gem_object_close() local
175 amdgpu_vm_get_pd_bo(vm, &list, &vm_pd); in amdgpu_gem_object_close()
183 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close()
187 if (amdgpu_vm_ready(vm)) { in amdgpu_gem_object_close()
213 struct amdgpu_vm *vm = &fpriv->vm; in amdgpu_gem_create_ioctl() local
263 resv = vm->root.base.bo->tbo.resv; in amdgpu_gem_create_ioctl()
275 amdgpu_bo_unreserve(vm->root.base.bo); in amdgpu_gem_create_ioctl()
523 struct amdgpu_vm *vm, in amdgpu_gem_va_update_vm() argument
[all …]
H A Damdgpu_amdkfd.h165 int amdgpu_amdkfd_gpuvm_create_process_vm(struct kgd_dev *kgd, void **vm,
170 void **vm, void **process_info,
173 struct amdgpu_vm *vm);
174 void amdgpu_amdkfd_gpuvm_destroy_process_vm(struct kgd_dev *kgd, void *vm);
175 uint32_t amdgpu_amdkfd_gpuvm_get_process_page_dir(void *vm);
178 void *vm, struct kgd_mem **mem,
183 struct kgd_dev *kgd, struct kgd_mem *mem, void *vm);
185 struct kgd_dev *kgd, struct kgd_mem *mem, void *vm);
H A Damdgpu_ib.c60 int amdgpu_ib_get(struct amdgpu_device *adev, struct amdgpu_vm *vm, in amdgpu_ib_get() argument
75 if (!vm) in amdgpu_ib_get()
127 struct amdgpu_vm *vm; in amdgpu_ib_schedule() local
141 vm = job->vm; in amdgpu_ib_schedule()
145 vm = NULL; in amdgpu_ib_schedule()
154 if (vm && !job->vmid) { in amdgpu_ib_schedule()
258 if (vm && ring->funcs->emit_switch_buffer) in amdgpu_ib_schedule()
H A Damdgpu_job.c44 struct amdgpu_job **job, struct amdgpu_vm *vm) in amdgpu_job_alloc() argument
62 (*job)->vm = vm; in amdgpu_job_alloc()
173 struct amdgpu_vm *vm = job->vm; in amdgpu_job_dependency() local
188 while (fence == NULL && vm && !job->vmid) { in amdgpu_job_dependency()
189 r = amdgpu_vmid_grab(vm, ring, &job->sync, in amdgpu_job_dependency()
/dragonfly/sys/dev/drm/i915/
H A Di915_gem_gtt.c1199 vm->scratch_pt = alloc_pt(vm); in gen8_init_scratch()
1205 vm->scratch_pd = alloc_pd(vm); in gen8_init_scratch()
1212 vm->scratch_pdp = alloc_pdp(vm); in gen8_init_scratch()
1227 free_pd(vm, vm->scratch_pd); in gen8_init_scratch()
1229 free_pt(vm, vm->scratch_pt); in gen8_init_scratch()
1271 free_pdp(vm, vm->scratch_pdp); in gen8_free_scratch()
1272 free_pd(vm, vm->scratch_pd); in gen8_free_scratch()
1273 free_pt(vm, vm->scratch_pt); in gen8_free_scratch()
1938 vm->scratch_pt = alloc_pt(vm); in gen6_init_scratch()
1951 free_pt(vm, vm->scratch_pt); in gen6_free_scratch()
[all …]
H A Di915_gem_evict.c125 struct drm_i915_private *dev_priv = vm->i915; in i915_gem_evict_something()
129 &vm->inactive_list, in i915_gem_evict_something()
130 &vm->active_list, in i915_gem_evict_something()
160 drm_mm_scan_init_with_range(&scan, &vm->mm, in i915_gem_evict_something()
299 i915_gem_retire_requests(vm->i915); in i915_gem_evict_for_node()
301 check_color = vm->mm.color_adjust; in i915_gem_evict_for_node()
395 &vm->inactive_list, in i915_gem_evict_vm()
396 &vm->active_list, in i915_gem_evict_vm()
404 trace_i915_gem_evict_vm(vm); in i915_gem_evict_vm()
411 if (i915_is_ggtt(vm)) { in i915_gem_evict_vm()
[all …]
H A Di915_vma.c83 struct i915_address_space *vm, in vma_create() argument
91 GEM_BUG_ON(vm == &vm->i915->mm.aliasing_ppgtt->base); in vma_create()
100 vma->vm = vm; in vma_create()
127 if (i915_is_ggtt(vm)) { in vma_create()
224 GEM_BUG_ON(vm->closed); in i915_vma_instance()
258 vma->vm->total))) in i915_vma_bind()
487 end = vma->vm->total; in i915_vma_insert()
512 ret = vma->vm->set_pages(vma); in i915_vma_insert()
587 vma->vm->clear_pages(vma); in i915_vma_insert()
602 vma->vm->clear_pages(vma); in i915_vma_remove()
[all …]
H A Di915_gem_gtt.h323 void (*clear_range)(struct i915_address_space *vm,
325 void (*insert_page)(struct i915_address_space *vm,
330 void (*insert_entries)(struct i915_address_space *vm,
334 void (*cleanup)(struct i915_address_space *vm);
351 i915_vm_is_48bit(const struct i915_address_space *vm) in i915_vm_is_48bit() argument
353 return (vm->total - 1) >> 32; in i915_vm_is_48bit()
490 if (i915_vm_is_48bit(vm)) in i915_pdpes_per_pdp()
556 i915_vm_to_ggtt(struct i915_address_space *vm) in i915_vm_to_ggtt() argument
558 GEM_BUG_ON(!i915_is_ggtt(vm)); in i915_vm_to_ggtt()
559 return container_of(vm, struct i915_ggtt, base); in i915_vm_to_ggtt()
[all …]
/dragonfly/sys/vm/
H A Dvm_unix.c70 struct vmspace *vm = p->p_vmspace; in sys_sbrk() local
80 lwkt_gettoken(&vm->vm_map.token); in sys_sbrk()
85 base = round_page((vm_offset_t)vm->vm_daddr) + vm->vm_dsize; in sys_sbrk()
129 vm->vm_dsize += incr; in sys_sbrk()
157 if (vm->vm_dsize < incr) in sys_sbrk()
158 vm->vm_dsize = incr; in sys_sbrk()
161 lwkt_reltoken(&vm->vm_map.token); in sys_sbrk()
185 lwkt_gettoken(&vm->vm_map.token); in sys_obreak()
237 vm->vm_dsize += diff; in sys_obreak()
246 vm->vm_dsize -= old - new; in sys_obreak()
[all …]
H A Dvm_map.c244 bzero(vm, sizeof(*vm)); in vmspace_ctor()
308 (char *)&vm->vm_endcopy - (char *)&vm->vm_startcopy); in vmspace_alloc()
322 vmspace_hold(vm); in vmspace_alloc()
324 vm->vm_map.pmap = vmspace_pmap(vm); /* XXX */ in vmspace_alloc()
326 vm->vm_flags = 0; in vmspace_alloc()
328 vmspace_drop(vm); in vmspace_alloc()
330 return (vm); in vmspace_alloc()
438 vmspace_rel(vm); in vmspace_relexit()
493 shmexit(vm); in vmspace_terminate()
514 shmexit(vm); in vmspace_terminate()
[all …]
/dragonfly/lib/libkvm/
H A Dkvm_x86_64.c92 munmap(vm->mmapbase, vm->mmapsize); in _kvm_maphdrs()
93 vm->mmapbase = NULL; in _kvm_maphdrs()
96 vm->mmapsize = sz; in _kvm_maphdrs()
142 if (vm->mmapbase != NULL) in _kvm_freevtop()
143 munmap(vm->mmapbase, vm->mmapsize); in _kvm_freevtop()
144 if (vm->PML4) in _kvm_freevtop()
145 free(vm->PML4); in _kvm_freevtop()
146 free(vm); in _kvm_freevtop()
225 struct vmstate *vm; in _kvm_vatop() local
242 vm = kd->vmst; in _kvm_vatop()
[all …]
H A Dkvm_minidump_x86_64.c128 struct vmstate *vm = kd->vmst; in _kvm_minidump_freevtop() local
130 if (vm->bitmap) in _kvm_minidump_freevtop()
131 free(vm->bitmap); in _kvm_minidump_freevtop()
132 if (vm->ptemap) in _kvm_minidump_freevtop()
133 free(vm->ptemap); in _kvm_minidump_freevtop()
134 free(vm); in _kvm_minidump_freevtop()
290 struct vmstate *vm; in _kvm_minidump_vatop() local
297 vm = kd->vmst; in _kvm_minidump_vatop()
301 if (va >= vm->kernbase) { in _kvm_minidump_vatop()
302 switch (vm->pgtable) { in _kvm_minidump_vatop()
[all …]
/dragonfly/sys/dev/drm/
H A Ddrm_modes.c587 dmode->hdisplay = vm->hactive; in drm_display_mode_from_videomode()
592 dmode->vdisplay = vm->vactive; in drm_display_mode_from_videomode()
597 dmode->clock = vm->pixelclock / 1000; in drm_display_mode_from_videomode()
625 struct videomode *vm) in drm_display_mode_to_videomode() argument
627 vm->hactive = dmode->hdisplay; in drm_display_mode_to_videomode()
632 vm->vactive = dmode->vdisplay; in drm_display_mode_to_videomode()
637 vm->pixelclock = dmode->clock * 1000; in drm_display_mode_to_videomode()
639 vm->flags = 0; in drm_display_mode_to_videomode()
673 if (vm->flags & DISPLAY_FLAGS_DE_LOW) in drm_bus_flags_from_videomode()
699 struct videomode vm; in of_get_drm_display_mode() local
[all …]
/dragonfly/sys/platform/vkernel64/platform/
H A Dcopyio.c48 struct vmspace *vm = curproc->p_vmspace; in casu64() local
60 m = vm_fault_page(&vm->vm_map, trunc_page((vm_offset_t)p), in casu64()
86 struct vmspace *vm = curproc->p_vmspace; in casu32() local
98 m = vm_fault_page(&vm->vm_map, trunc_page((vm_offset_t)p), in casu32()
124 struct vmspace *vm = curproc->p_vmspace; in swapu64() local
135 m = vm_fault_page(&vm->vm_map, trunc_page((vm_offset_t)p), in swapu64()
156 struct vmspace *vm = curproc->p_vmspace; in swapu32() local
188 struct vmspace *vm = curproc->p_vmspace; in fuwordadd64() local
220 struct vmspace *vm = curproc->p_vmspace; in fuwordadd32() local
312 struct vmspace *vm = curproc->p_vmspace; in copyin() local
[all …]
/dragonfly/sys/vfs/procfs/
H A Dprocfs_mem.c73 struct vmspace *vm; in procfs_rwmem() local
84 vm = p->p_vmspace; in procfs_rwmem()
87 if ((p->p_flags & (P_WEXIT | P_INEXEC)) || vmspace_getrefs(vm) < 0) in procfs_rwmem()
93 vmspace_hold(vm); in procfs_rwmem()
94 map = &vm->vm_map; in procfs_rwmem()
160 vmspace_drop(vm); in procfs_rwmem()
/dragonfly/test/nvmm/
H A DMakefile1 all: /tmp/calc-vm
3 /tmp/calc-vm: calc-vm.c
7 rm -f /tmp/calc-vm
/dragonfly/sys/dev/virtual/nvmm/
H A Dnvmm_dragonfly.c58 struct vmspace *vm; in os_vmspace_create() local
60 vm = vmspace_alloc(vmin, vmax); in os_vmspace_create()
68 pmap_maybethreaded(&vm->vm_pmap); in os_vmspace_create()
70 return vm; in os_vmspace_create()
74 os_vmspace_destroy(os_vmspace_t *vm) in os_vmspace_destroy() argument
76 pmap_del_all_cpus(vm); in os_vmspace_destroy()
77 vmspace_rel(vm); in os_vmspace_destroy()
81 os_vmspace_fault(os_vmspace_t *vm, vaddr_t va, vm_prot_t prot) in os_vmspace_fault() argument
90 return vm_fault(&vm->vm_map, trunc_page(va), prot, fault_flags); in os_vmspace_fault()
H A Dnvmm_os.h181 #define os_vmspace_pmap(vm) ((vm)->vm_map.pmap) argument
182 #define os_vmspace_pdirpa(vm) ((vm)->vm_map.pmap->pm_pdirpa[0]) argument
185 #define os_vmspace_pmap(vm) vmspace_pmap(vm) argument
186 #define os_vmspace_pdirpa(vm) (vtophys(vmspace_pmap(vm)->pm_pml4)) argument
/dragonfly/sys/dev/drm/amd/include/
H A Dkgd_kfd_interface.h375 int (*create_process_vm)(struct kgd_dev *kgd, void **vm,
378 void **vm, void **process_info, struct dma_fence **ef);
379 void (*destroy_process_vm)(struct kgd_dev *kgd, void *vm);
380 uint32_t (*get_process_page_dir)(void *vm);
384 uint64_t size, void *vm,
389 void *vm);
391 void *vm);

12345