Home
last modified time | relevance | path

Searched refs:__predict_false (Results 1 – 25 of 74) sorted by relevance

123

/dragonfly/lib/libthread_xu/thread/
H A Dthr_mutex.c373 if (__predict_false(m == NULL)) in __pthread_mutex_trylock()
381 if (__predict_false(ret != 0)) in __pthread_mutex_trylock()
399 if (__predict_false(ret != 0)) in _pthread_mutex_trylock()
428 } else if (__predict_false( in mutex_lock_common()
457 if (__predict_false(m == NULL)) in __pthread_mutex_lock()
467 if (__predict_false(ret)) in __pthread_mutex_lock()
481 if (__predict_false(m == NULL)) in _pthread_mutex_lock()
491 if (__predict_false(ret)) in _pthread_mutex_lock()
506 if (__predict_false(m == NULL)) in __pthread_mutex_timedlock()
516 if (__predict_false(ret)) in __pthread_mutex_timedlock()
[all …]
H A Dthr_cond.c247 if (__predict_false(*cond == NULL && in cond_wait_common()
374 if (__predict_false(*cond == NULL && in cond_signal_common()
H A Dthr_private.h80 if (__predict_false(!(cond))) \
533 if (__predict_false((thrd)->locklevel <= 0)) \
H A Dthr_umtx.c104 if (__predict_false(errval == EAGAIN)) { in __thr_umtx_lock()
/dragonfly/sys/vm/
H A Dvm_page2.h83 if (__predict_false(gd->gd_vmstats.v_free_severe > in vm_paging_severe()
89 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_severe()
122 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_min_dnc()
177 if (__predict_false(gd->gd_vmstats.v_paging_wait > in vm_paging_wait()
183 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_wait()
206 if (__predict_false(gd->gd_vmstats.v_paging_start > in vm_paging_start()
212 if (__predict_false(gd->gd_vmstats.v_free_min > in vm_paging_start()
217 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_start()
243 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_target1()
281 if (__predict_false(gd->gd_vmstats.v_free_reserved > in vm_paging_target2()
[all …]
/dragonfly/lib/libnvmm/
H A Dlibnvmm_x86.c517 if (__predict_false(!seg->attrib.p)) { in segment_check()
526 if (__predict_false(gva + size > limit)) { in segment_check()
594 if (__predict_false(ret == -1)) { in read_guest_memory()
651 if (__predict_false(ret == -1)) { in write_guest_memory()
769 if (__predict_false(cnt == 0)) { in nvmm_assist_io()
853 if (__predict_false(psld)) { in nvmm_assist_io()
2371 if (__predict_false(is_dual(fsm, instr))) { in node_regmodrm()
2470 if (__predict_false(!opcode->valid)) { in node_primary_opcode()
2511 if (__predict_false(!opcode->valid)) { in node_secondary_opcode()
2591 if (__predict_false(!fsm->is64bit)) { in node_rex_prefix()
[all …]
/dragonfly/sys/platform/pc64/x86_64/
H A Dtrap.c198 if (__predict_false(curtd->td_ucred != curp->p_ucred)) { in userenter()
229 if (__predict_false(p->p_flags & P_PROFIL)) { in userret()
245 if (__predict_false(STOPLWP(p, lp))) { in userret()
251 while (__predict_false(dump_stop_usertds)) { in userret()
259 if (__predict_false(p->p_flags & (P_SIGVTALRM | P_SIGPROF))) { in userret()
290 if (__predict_false(lp->lwp_flags & LWP_OLDMASK)) { in userret()
312 while (__predict_false(STOPLWP(lp->lwp_proc, lp))) { in userexit()
1179 if (__predict_false(ISPL(frame->tf_cs) != SEL_UPL)) { in syscall2()
1235 if (__predict_false(narg > regcnt)) { in syscall2()
1343 if (__predict_false(orig_tf_rflags & PSL_T)) { in syscall2()
[all …]
/dragonfly/contrib/dhcpcd/src/
H A Dcommon.h92 #ifndef __predict_false
95 # define __predict_false(exp) __builtin_expect((exp) != 0, 0) macro
98 # define __predict_false(exp) (exp) macro
/dragonfly/sys/sys/
H A Dspinlock2.h116 if (__predict_false(count != 0)) { in _spin_lock_quick()
213 if (__predict_false((lock & SPINLOCK_SHARED) == 0)) { in _spin_lock_shared_quick()
329 if (__predict_false(v & 1)) in spin_access_start()
337 if (__predict_false(v & 1)) { in spin_access_end()
H A Dktr.h185 if (__predict_false(ktr_ ## name ## _enable && \
205 if (__predict_false((cond) && \
H A Dsystm.h98 #define KASSERT(exp,msg) do { if (__predict_false(!(exp))) \
101 #define KKASSERT(exp) do { if (__predict_false(!(exp))) \
107 do { if (__predict_false(!(exp))) { \
H A Dthread2.h221 if (__predict_false(td->td_critcount < 0)) in _crit_exit_noyield()
230 if (__predict_false(td->td_gd->gd_reqflags & RQF_IDLECHECK_MASK)) in _crit_exit_quick()
H A Dktrace.h98 __predict_false((((p)->p_traceflag & (1<<(type))) && \
H A Dcdefs.h353 #define __predict_false(exp) __builtin_expect((exp), 0) macro
356 #define __predict_false(exp) (exp) macro
/dragonfly/sys/dev/virtual/nvmm/
H A Dnvmm.c95 if (__predict_false(machid >= NVMM_MAX_MACHINES)) { in nvmm_machine_get()
100 if (__predict_false(writer)) { in nvmm_machine_get()
105 if (__predict_false(!mach->present)) { in nvmm_machine_get()
109 if (__predict_false(mach->owner != owner && in nvmm_machine_get()
173 if (__predict_false(cpuid >= NVMM_MAX_VCPUS)) { in nvmm_vcpu_get()
179 if (__predict_false(!vcpu->present)) { in nvmm_vcpu_get()
343 if (__predict_false(op >= nvmm_impl->mach_conf_max)) { in nvmm_machine_configure()
462 if (__predict_false(op >= nvmm_impl->vcpu_conf_max)) in nvmm_vcpu_configure()
575 if (__predict_false(os_return_needed())) { in nvmm_do_vcpu_run()
582 if (__predict_false(ret != 0)) { in nvmm_do_vcpu_run()
H A Dnvmm_os.h308 if (__predict_false(hvm_break_wanted())) { in os_return_needed()
311 if (__predict_false(curthread->td_lwp->lwp_mpflags & LWP_MP_URETMASK)) { in os_return_needed()
/dragonfly/sys/libprop/
H A Dprop_rb.c44 #ifndef __predict_false
45 #define __predict_false(x) (x) macro
137 if (__predict_false(diff == 0)) { in _prop_rb_tree_insert_node()
180 if (__predict_false(parent == (struct rb_node *)(void *)&rbt->rbt_root)) { in _prop_rb_tree_insert_node()
359 if (__predict_false(RB_ROOT_P(rbt, grandpa))) { in rb_tree_insert_rebalance()
450 if (__predict_false(rbt->rbt_minmax[RB_POSITION(self)] == self)) { in rb_tree_prune_node()
457 if (__predict_false(was_root)) { in rb_tree_prune_node()
516 if (__predict_false(RB_RED_P(standin_son))) { in rb_tree_swap_prune_and_rebalance()
601 if (__predict_false(rbt->rbt_minmax[RB_POSITION(self)] == self)) in rb_tree_swap_prune_and_rebalance()
660 if (__predict_false(was_root)) { in rb_tree_prune_blackred_branch()
/dragonfly/sys/dev/virtual/nvmm/x86/
H A Dnvmm_x86_svm.c849 if (__predict_false(eax > svm_cpuid_max_basic)) { in svm_inkernel_handle_cpuid()
854 if (__predict_false(eax > SVM_CPUID_MAX_HYPERVISOR)) { in svm_inkernel_handle_cpuid()
859 if (__predict_false(eax > svm_cpuid_max_extended)) { in svm_inkernel_handle_cpuid()
1212 if (__predict_false(exit->u.wrmsr.val & ~EFER_VALID)) { in svm_inkernel_handle_msr()
1340 } else if (__predict_false(vmcb->state.cpl != 0)) { in svm_exit_xsetbv()
1344 } else if (__predict_false((val & XCR0_X87) == 0)) { in svm_exit_xsetbv()
1539 if (__predict_false(svm_vcpu_event_commit(vcpu) != 0)) { in svm_vcpu_run()
1568 if (__predict_false(cpudata->gtlb_want_flush || in svm_vcpu_run()
1576 if (__predict_false(cpudata->gtsc_want_update)) { in svm_vcpu_run()
2361 if (__predict_false(cpuid->mask && cpuid->exit)) { in svm_vcpu_configure_cpuid()
[all …]
H A Dnvmm_x86.c466 if (__predict_false(pat[i] & ~__BITS(2,0))) in nvmm_x86_pat_validate()
468 if (__predict_false(pat[i] == 2 || pat[i] == 3)) in nvmm_x86_pat_validate()
H A Dnvmm_x86_vmx.c1262 if (__predict_false(eax > vmx_cpuid_max_basic)) { in vmx_inkernel_handle_cpuid()
1267 if (__predict_false(eax > VMX_CPUID_MAX_HYPERVISOR)) { in vmx_inkernel_handle_cpuid()
1272 if (__predict_false(eax > vmx_cpuid_max_extended)) { in vmx_inkernel_handle_cpuid()
1910 if (__predict_false(!nvmm_x86_pat_validate(val))) { in vmx_inkernel_handle_msr()
2002 } else if (__predict_false((val & XCR0_X87) == 0)) { in vmx_exit_xsetbv()
2259 if (__predict_false(vmx_vcpu_event_commit(vcpu) != 0)) { in vmx_vcpu_run()
2300 if (__predict_false(cpudata->gtsc_want_update)) { in vmx_vcpu_run()
2334 if (__predict_false(vmx_vcpu_event_commit(vcpu) != 0)) { in vmx_vcpu_run()
2356 if (__predict_false(ret != 0)) { in vmx_vcpu_run()
3121 if (__predict_false(cpuid->mask && cpuid->exit)) { in vmx_vcpu_configure_cpuid()
[all …]
/dragonfly/contrib/dhcpcd/compat/
H A Drb.c215 if (__predict_false(diff == 0)) { in rb_tree_insert_node()
258 if (__predict_false(parent == (struct rb_node *)(void *)&rbt->rbt_root)) { in rb_tree_insert_node()
436 if (__predict_false(RB_ROOT_P(rbt, grandpa))) { in rb_tree_insert_rebalance()
527 if (__predict_false(rbt->rbt_minmax[RB_POSITION(self)] == self)) { in rb_tree_prune_node()
534 if (__predict_false(was_root)) { in rb_tree_prune_node()
593 if (__predict_false(RB_RED_P(standin_son))) { in rb_tree_swap_prune_and_rebalance()
678 if (__predict_false(rbt->rbt_minmax[RB_POSITION(self)] == self)) in rb_tree_swap_prune_and_rebalance()
737 if (__predict_false(was_root)) { in rb_tree_prune_blackred_branch()
/dragonfly/lib/libc/gen/
H A Darc4random.h137 if (__predict_false(rs == NULL || rsx == NULL)) in _rs_forkdetect()
/dragonfly/sys/dev/netif/mxge/
H A Dif_mxge.c1783 if (__predict_false(cnt > tx->max_desc)) in mxge_encap_tso()
1832 if (__predict_false(err)) in mxge_encap()
1845 if (__predict_false(err != 0)) in mxge_encap()
2048 if (__predict_false(init)) in mxge_get_buf_small()
2054 if (__predict_false(init)) { in mxge_get_buf_small()
2069 if (__predict_false(init)) { in mxge_get_buf_small()
2098 if (__predict_false(init)) in mxge_get_buf_big()
2107 if (__predict_false(init)) { in mxge_get_buf_big()
2122 if (__predict_false(init)) { in mxge_get_buf_big()
2740 if (__predict_false(!stats->valid)) in mxge_msi()
[all …]
/dragonfly/sys/kern/
H A Dkern_kmalloc.c694 while (__predict_false(type->ks_loosememuse >= type->ks_limit)) { in _kmalloc_obj_debug()
901 if (__predict_false(use->loosememuse >= KMALLOC_LOOSE_SIZE)) { in _kmalloc_obj_debug()
911 if (__predict_false(flags & M_ZERO)) in _kmalloc_obj_debug()
H A Dvfs_cache.c425 if (__predict_false(mpr != NULL)) { in _cache_mntrel()
512 while (__predict_false(error == EWOULDBLOCK)) { in _cache_lock()
524 if (__predict_false(didwarn)) { in _cache_lock()
556 if (__predict_false(error != 0)) { in _cache_lock_nonblock()
606 while (__predict_false(error == EWOULDBLOCK)) { in _cache_lock_shared()
618 if (__predict_false(didwarn)) { in _cache_lock_shared()
637 if (__predict_false(error != 0)) { in _cache_lock_shared_nonblock()
1174 if (__predict_false(tlocked == 0)) { in cache_lock4_tondlocked()
1177 if (__predict_false(cache_lock_nonblock(tncpd) != 0)) { in cache_lock4_tondlocked()
1197 if (__predict_false(cache_lock_nonblock(fncp) != 0)) { in cache_lock4_tondlocked()
[all …]

123