Lines Matching refs:ve

57 static int vmspace_entry_delete(struct vmspace_entry *ve,
59 static void vmspace_entry_cache_ref(struct vmspace_entry *ve);
60 static void vmspace_entry_cache_drop(struct vmspace_entry *ve);
61 static void vmspace_entry_drop(struct vmspace_entry *ve);
81 struct vmspace_entry *ve; in sys_vmspace_create() local
113 ve = kmalloc(sizeof(struct vmspace_entry), M_VKERNEL, M_WAITOK|M_ZERO); in sys_vmspace_create()
114 ve->vmspace = vmspace_alloc(VM_MIN_USER_ADDRESS, VM_MAX_USER_ADDRESS); in sys_vmspace_create()
115 ve->id = uap->id; in sys_vmspace_create()
116 ve->refs = 0; /* active refs (none) */ in sys_vmspace_create()
117 ve->cache_refs = 1; /* on-tree, not deleted (prevent kfree) */ in sys_vmspace_create()
118 pmap_pinit2(vmspace_pmap(ve->vmspace)); in sys_vmspace_create()
121 if (RB_INSERT(vmspace_rb_tree, &vkp->root, ve)) { in sys_vmspace_create()
122 vmspace_rel(ve->vmspace); in sys_vmspace_create()
123 ve->vmspace = NULL; /* safety */ in sys_vmspace_create()
124 kfree(ve, M_VKERNEL); in sys_vmspace_create()
144 struct vmspace_entry *ve; in sys_vmspace_destroy() local
155 if ((ve = vkernel_find_vmspace(vkp, uap->id, 1)) != NULL) { in sys_vmspace_destroy()
156 error = vmspace_entry_delete(ve, vkp, 1); in sys_vmspace_destroy()
158 vmspace_entry_cache_drop(ve); in sys_vmspace_destroy()
182 struct vmspace_entry *ve = NULL; in sys_vmspace_ctl() local
198 if ((ve = vkernel_find_vmspace(vkp, ua.id, 0)) == NULL) { in sys_vmspace_ctl()
216 if (ve && vklp->ve_cache != ve) { in sys_vmspace_ctl()
217 vmspace_entry_cache_ref(ve); in sys_vmspace_ctl()
220 vklp->ve_cache = ve; in sys_vmspace_ctl()
244 vklp->ve = ve; in sys_vmspace_ctl()
245 atomic_add_int(&ve->refs, 1); in sys_vmspace_ctl()
246 pmap_setlwpvm(lp, ve->vmspace); in sys_vmspace_ctl()
257 if (ve) in sys_vmspace_ctl()
258 vmspace_entry_drop(ve); in sys_vmspace_ctl()
276 struct vmspace_entry *ve; in sys_vmspace_mmap() local
284 if ((ve = vkernel_find_vmspace(vkp, uap->id, 0)) == NULL) { in sys_vmspace_mmap()
289 error = kern_mmap(ve->vmspace, uap->addr, uap->len, in sys_vmspace_mmap()
293 vmspace_entry_drop(ve); in sys_vmspace_mmap()
310 struct vmspace_entry *ve; in sys_vmspace_munmap() local
322 if ((ve = vkernel_find_vmspace(vkp, uap->id, 0)) == NULL) { in sys_vmspace_munmap()
364 map = &ve->vmspace->vm_map; in sys_vmspace_munmap()
372 vmspace_entry_drop(ve); in sys_vmspace_munmap()
393 struct vmspace_entry *ve; in sys_vmspace_pread() local
401 if ((ve = vkernel_find_vmspace(vkp, uap->id, 0)) == NULL) { in sys_vmspace_pread()
405 vmspace_entry_drop(ve); in sys_vmspace_pread()
427 struct vmspace_entry *ve; in sys_vmspace_pwrite() local
434 if ((ve = vkernel_find_vmspace(vkp, uap->id, 0)) == NULL) { in sys_vmspace_pwrite()
438 vmspace_entry_drop(ve); in sys_vmspace_pwrite()
456 struct vmspace_entry *ve; in sys_vmspace_mcontrol() local
468 if ((ve = vkernel_find_vmspace(vkp, uap->id, 0)) == NULL) { in sys_vmspace_mcontrol()
497 error = vm_map_madvise(&ve->vmspace->vm_map, start, end, in sys_vmspace_mcontrol()
500 vmspace_entry_drop(ve); in sys_vmspace_mcontrol()
532 rb_vmspace_delete(struct vmspace_entry *ve, void *data) in rb_vmspace_delete() argument
536 if (vmspace_entry_delete(ve, vkp, 0) == 0) in rb_vmspace_delete()
537 vmspace_entry_cache_drop(ve); in rb_vmspace_delete()
539 panic("rb_vmspace_delete: invalid refs %d", ve->refs); in rb_vmspace_delete()
560 vmspace_entry_delete(struct vmspace_entry *ve, struct vkernel_proc *vkp, in vmspace_entry_delete() argument
574 if (atomic_cmpset_int(&ve->refs, refs, VKE_REF_DELETED) == 0) { in vmspace_entry_delete()
575 KKASSERT(ve->refs >= refs); in vmspace_entry_delete()
578 RB_REMOVE(vmspace_rb_tree, &vkp->root, ve); in vmspace_entry_delete()
580 pmap_remove_pages(vmspace_pmap(ve->vmspace), in vmspace_entry_delete()
582 vm_map_remove(&ve->vmspace->vm_map, in vmspace_entry_delete()
584 vmspace_rel(ve->vmspace); in vmspace_entry_delete()
585 ve->vmspace = NULL; /* safety */ in vmspace_entry_delete()
595 vmspace_entry_cache_ref(struct vmspace_entry *ve) in vmspace_entry_cache_ref() argument
597 atomic_add_int(&ve->cache_refs, 1); in vmspace_entry_cache_ref()
609 vmspace_entry_cache_drop(struct vmspace_entry *ve) in vmspace_entry_cache_drop() argument
611 if (atomic_fetchadd_int(&ve->cache_refs, -1) == 1) { in vmspace_entry_cache_drop()
612 KKASSERT(ve->refs & VKE_REF_DELETED); in vmspace_entry_cache_drop()
613 kfree(ve, M_VKERNEL); in vmspace_entry_cache_drop()
623 vmspace_entry_drop(struct vmspace_entry *ve) in vmspace_entry_drop() argument
625 atomic_fetchadd_int(&ve->refs, -1); in vmspace_entry_drop()
641 struct vmspace_entry *ve; in vkernel_find_vmspace() local
652 ve = vklp->ve_cache; in vkernel_find_vmspace()
653 if (ve && ve->id == id) { in vkernel_find_vmspace()
660 n = atomic_fetchadd_int(&ve->refs, 1); in vkernel_find_vmspace()
662 KKASSERT(ve->vmspace); in vkernel_find_vmspace()
663 return ve; in vkernel_find_vmspace()
671 vmspace_entry_drop(ve); in vkernel_find_vmspace()
672 vmspace_entry_cache_drop(ve); in vkernel_find_vmspace()
683 ve = RB_FIND(vmspace_rb_tree, &vkp->root, &key); in vkernel_find_vmspace()
684 if (ve) { in vkernel_find_vmspace()
685 if (atomic_fetchadd_int(&ve->refs, 1) & VKE_REF_DELETED) { in vkernel_find_vmspace()
686 vmspace_entry_drop(ve); in vkernel_find_vmspace()
687 ve = NULL; in vkernel_find_vmspace()
692 return (ve); in vkernel_find_vmspace()
757 struct vmspace_entry *ve; in vkernel_lwp_exit() local
763 if ((ve = vklp->ve) != NULL) { in vkernel_lwp_exit()
767 vklp->ve = NULL; in vkernel_lwp_exit()
768 KKASSERT(ve->refs > 0); in vkernel_lwp_exit()
769 vmspace_entry_drop(ve); in vkernel_lwp_exit()
771 if ((ve = vklp->ve_cache) != NULL) { in vkernel_lwp_exit()
773 vmspace_entry_cache_drop(ve); in vkernel_lwp_exit()
791 struct vmspace_entry *ve; in vkernel_trap() local
801 ve = vklp->ve; in vkernel_trap()
802 KKASSERT(ve != NULL); in vkernel_trap()
807 vklp->ve = NULL; in vkernel_trap()
809 KKASSERT(ve->refs > 0); in vkernel_trap()
810 vmspace_entry_drop(ve); in vkernel_trap()