Home
last modified time | relevance | path

Searched refs:task_per_thread (Results 1 – 25 of 38) sorted by relevance

12

/dports/net/haproxy19/haproxy-1.9.16/src/
H A Dtask.c58 struct task_per_thread task_per_thread[MAX_THREADS]; variable
108 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
109 task_per_thread[nb].rqueue_size++; in __task_wakeup()
176 eb = eb32_first(&task_per_thread[tid].timers); in wake_expired_tasks()
206 __task_queue(task, &task_per_thread[tid].timers); in wake_expired_tasks()
298 while (task_per_thread[tid].task_list_size < max_processed) { in process_runnable_tasks()
320 lrq = eb32sc_first(&task_per_thread[tid].rqueue, tid_bit); in process_runnable_tasks()
417 if (!LIST_ISEMPTY(&task_per_thread[tid].task_list)) { in process_runnable_tasks()
508 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
527 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
[all …]
/dports/net/haproxy20/haproxy-2.0.26/src/
H A Dtask.c57 struct task_per_thread task_per_thread[MAX_THREADS]; variable
106 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
107 task_per_thread[nb].rqueue_size++; in __task_wakeup()
176 eb = eb32_first(&task_per_thread[tid].timers); in wake_expired_tasks()
206 __task_queue(task, &task_per_thread[tid].timers); in wake_expired_tasks()
376 task_per_thread[tid].task_list_size++; in process_runnable_tasks()
398 task_per_thread[tid].task_list_size--; in process_runnable_tasks()
450 if (!LIST_ISEMPTY(&task_per_thread[tid].task_list)) in process_runnable_tasks()
534 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
552 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
[all …]
H A Ddebug.c60 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
61 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
62 !LIST_ISEMPTY(&task_per_thread[thr].task_list), in ha_thread_dump()
63 task_per_thread[thr].task_list_size, in ha_thread_dump()
64 task_per_thread[thr].rqueue_size, in ha_thread_dump()
/dports/net/haproxy23/haproxy-2.3.16/src/
H A Dtask.c43 THREAD_LOCAL struct task_per_thread *sched = &task_per_thread[0]; /* scheduler context for the curr…
56 struct task_per_thread task_per_thread[MAX_THREADS]; variable
99 _HA_ATOMIC_ADD(&task_per_thread[thr].rq_total, 1); in task_kill()
155 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
156 task_per_thread[nb].rqueue_size++; in __task_wakeup()
572 struct task_per_thread * const tt = sched; in process_runnable_tasks()
807 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
825 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
827 LIST_INIT(&task_per_thread[i].tasklets[TL_URGENT]); in init_task()
828 LIST_INIT(&task_per_thread[i].tasklets[TL_NORMAL]); in init_task()
[all …]
H A Ddebug.c66 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
67 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
68 !(LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_URGENT]) && in ha_thread_dump()
69 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_NORMAL]) && in ha_thread_dump()
70 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_BULK]) && in ha_thread_dump()
71 MT_LIST_ISEMPTY(&task_per_thread[thr].shared_tasklet_list)), in ha_thread_dump()
72 task_per_thread[thr].task_list_size, in ha_thread_dump()
73 task_per_thread[thr].rq_total, in ha_thread_dump()
/dports/net/haproxy21/haproxy-2.1.12/src/
H A Dtask.c44 THREAD_LOCAL struct task_per_thread *sched = &task_per_thread[0]; /* scheduler context for the curr…
57 struct task_per_thread task_per_thread[MAX_THREADS]; variable
106 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
107 task_per_thread[nb].rqueue_size++; in __task_wakeup()
162 struct task_per_thread * const tt = sched; // thread's tasks in wake_expired_tasks()
306 struct task_per_thread * const tt = sched; in process_runnable_tasks()
545 tmp_rq = eb32sc_first(&task_per_thread[i].rqueue, MAX_THREADS_MASK); in mworker_cleantasks()
552 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
570 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
572 LIST_INIT(&task_per_thread[i].task_list); in init_task()
[all …]
H A Ddebug.c64 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
65 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
66 !(LIST_ISEMPTY(&task_per_thread[thr].task_list) | in ha_thread_dump()
67 MT_LIST_ISEMPTY(&task_per_thread[thr].shared_tasklet_list)), in ha_thread_dump()
68 task_per_thread[thr].task_list_size, in ha_thread_dump()
69 task_per_thread[thr].rqueue_size, in ha_thread_dump()
/dports/net/nuster/nuster-5.0.4.21/src/
H A Dtask.c44 THREAD_LOCAL struct task_per_thread *sched = &task_per_thread[0]; /* scheduler context for the curr…
57 struct task_per_thread task_per_thread[MAX_THREADS]; variable
106 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
107 task_per_thread[nb].rqueue_size++; in __task_wakeup()
162 struct task_per_thread * const tt = sched; // thread's tasks in wake_expired_tasks()
303 struct task_per_thread * const tt = sched; in process_runnable_tasks()
542 tmp_rq = eb32sc_first(&task_per_thread[i].rqueue, MAX_THREADS_MASK); in mworker_cleantasks()
549 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
567 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
569 LIST_INIT(&task_per_thread[i].task_list); in init_task()
[all …]
H A Ddebug.c62 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
63 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
64 !(LIST_ISEMPTY(&task_per_thread[thr].task_list) | in ha_thread_dump()
65 MT_LIST_ISEMPTY(&task_per_thread[thr].shared_tasklet_list)), in ha_thread_dump()
66 task_per_thread[thr].task_list_size, in ha_thread_dump()
67 task_per_thread[thr].rqueue_size, in ha_thread_dump()
/dports/net/haproxy22/haproxy-2.2.19/src/
H A Dtask.c45 THREAD_LOCAL struct task_per_thread *sched = &task_per_thread[0]; /* scheduler context for the curr…
58 struct task_per_thread task_per_thread[MAX_THREADS]; variable
100 MT_LIST_ADDQ(&task_per_thread[thr].shared_tasklet_list, in task_kill()
160 int nb = ((void *)root - (void *)&task_per_thread[0].rqueue) / sizeof(task_per_thread[0]); in __task_wakeup()
161 task_per_thread[nb].rqueue_size++; in __task_wakeup()
540 struct task_per_thread * const tt = sched; in process_runnable_tasks()
769 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
787 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
789 LIST_INIT(&task_per_thread[i].tasklets[TL_URGENT]); in init_task()
790 LIST_INIT(&task_per_thread[i].tasklets[TL_NORMAL]); in init_task()
[all …]
H A Ddebug.c66 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
67 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
68 !(LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_URGENT]) && in ha_thread_dump()
69 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_NORMAL]) && in ha_thread_dump()
70 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_BULK]) && in ha_thread_dump()
71 MT_LIST_ISEMPTY(&task_per_thread[thr].shared_tasklet_list)), in ha_thread_dump()
72 task_per_thread[thr].task_list_size, in ha_thread_dump()
73 task_per_thread[thr].rqueue_size, in ha_thread_dump()
/dports/net/haproxy19/haproxy-1.9.16/include/proto/
H A Dtask.h103 struct task_per_thread { struct
112 extern struct task_per_thread task_per_thread[MAX_THREADS]; argument
150 root = &task_per_thread[tid].rqueue; in task_wakeup()
154 struct eb_root *root = &task_per_thread[tid].rqueue; in task_wakeup()
231 task_per_thread[tid].rqueue_size--; in __task_unlink_rq()
262 task_per_thread[tid].task_list_size++; in tasklet_wakeup()
273 task_per_thread[tid].task_list_size++; in task_insert_into_tasklet_list()
285 task_per_thread[tid].task_list_size--; in __task_remove_from_tasklet_list()
389 task_per_thread[tid].task_list_size--; in tasklet_free()
431 __task_queue(task, &task_per_thread[tid].timers); in task_queue()
[all …]
/dports/net/haproxy20/haproxy-2.0.26/include/proto/
H A Dtask.h101 extern struct task_per_thread task_per_thread[MAX_THREADS];
137 root = &task_per_thread[tid].rqueue; in task_wakeup()
141 struct eb_root *root = &task_per_thread[tid].rqueue; in task_wakeup()
218 task_per_thread[tid].rqueue_size--; in __task_unlink_rq()
245 LIST_ADDQ(&task_per_thread[tid].task_list, &tl->list); in tasklet_wakeup()
256 LIST_ADDQ(&task_per_thread[tid].task_list, &tl->list); in tasklet_insert_into_tasklet_list()
422 __task_queue(task, &task_per_thread[tid].timers); in task_queue()
456 __task_queue(task, &task_per_thread[tid].timers); in task_schedule()
559 (task_per_thread[tid].rqueue_size > 0) | in thread_has_tasks()
560 !LIST_ISEMPTY(&task_per_thread[tid].task_list)); in thread_has_tasks()
/dports/net/haproxy/haproxy-2.4.10/src/
H A Dtask.c39 THREAD_LOCAL struct task_per_thread *sched = &task_per_thread[0]; /* scheduler context for the curr…
52 struct task_per_thread task_per_thread[MAX_THREADS]; variable
93 MT_LIST_APPEND(&task_per_thread[thr].shared_tasklet_list, in task_kill()
95 _HA_ATOMIC_INC(&task_per_thread[thr].rq_total); in task_kill()
96 _HA_ATOMIC_INC(&task_per_thread[thr].tasks_in_list); in task_kill()
140 _HA_ATOMIC_INC(&task_per_thread[thr].rq_total); in __tasklet_wakeup_on()
548 _HA_ATOMIC_DEC(&task_per_thread[tid].tasks_in_list); in run_tasks_from_lists()
627 struct task_per_thread * const tt = sched; in process_runnable_tasks()
898 tmp_wq = eb32_first(&task_per_thread[i].timers); in mworker_cleantasks()
916 memset(&task_per_thread, 0, sizeof(task_per_thread)); in init_task()
[all …]
H A Ddebug.c161 !eb_is_empty(&task_per_thread[thr].timers), in ha_thread_dump()
162 !eb_is_empty(&task_per_thread[thr].rqueue), in ha_thread_dump()
163 !(LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_URGENT]) && in ha_thread_dump()
164 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_NORMAL]) && in ha_thread_dump()
165 LIST_ISEMPTY(&task_per_thread[thr].tasklets[TL_BULK]) && in ha_thread_dump()
166 MT_LIST_ISEMPTY(&task_per_thread[thr].shared_tasklet_list)), in ha_thread_dump()
167 task_per_thread[thr].tasks_in_list, in ha_thread_dump()
168 task_per_thread[thr].rq_total, in ha_thread_dump()
/dports/net/haproxy23/haproxy-2.3.16/include/haproxy/
H A Dtask.h98 extern THREAD_LOCAL struct task_per_thread *sched; /* current's thread scheduler context */
105 extern struct task_per_thread task_per_thread[MAX_THREADS];
159 ret += _HA_ATOMIC_LOAD(&task_per_thread[thr].rq_total); in total_run_queues()
382 MT_LIST_ADDQ(&task_per_thread[thr].shared_tasklet_list, (struct mt_list *)&tl->list); in tasklet_wakeup_on()
383 _HA_ATOMIC_ADD(&task_per_thread[thr].rq_total, 1); in tasklet_wakeup_on()
418 _HA_ATOMIC_SUB(&task_per_thread[t->tid >= 0 ? t->tid : tid].rq_total, 1); in tasklet_remove_from_tasklet_list()
533 _HA_ATOMIC_SUB(&task_per_thread[tl->tid >= 0 ? tl->tid : tid].rq_total, 1); in tasklet_free()
H A Dtask-t.h77 struct task_per_thread { struct
/dports/net/haproxy/haproxy-2.4.10/include/haproxy/
H A Dtask.h96 extern THREAD_LOCAL struct task_per_thread *sched; /* current's thread scheduler context */
103 extern struct task_per_thread task_per_thread[MAX_THREADS];
158 ret += _HA_ATOMIC_LOAD(&task_per_thread[thr].rq_total); in total_run_queues()
171 ret += _HA_ATOMIC_LOAD(&task_per_thread[thr].nb_tasks); in total_allocated_tasks()
408 _HA_ATOMIC_DEC(&task_per_thread[t->tid >= 0 ? t->tid : tid].rq_total); in tasklet_remove_from_tasklet_list()
535 _HA_ATOMIC_DEC(&task_per_thread[tl->tid >= 0 ? tl->tid : tid].rq_total); in tasklet_free()
/dports/net/haproxy21/haproxy-2.1.12/include/proto/
H A Dtask.h96 extern THREAD_LOCAL struct task_per_thread *sched; /* current's thread scheduler context */
103 extern struct task_per_thread task_per_thread[MAX_THREADS];
248 LIST_ADDQ(&task_per_thread[tid].task_list, &tl->list); in tasklet_wakeup()
253 …if (MT_LIST_ADDQ(&task_per_thread[tl->tid].shared_tasklet_list, (struct mt_list *)&tl->list) == 1)… in tasklet_wakeup()
/dports/net/nuster/nuster-5.0.4.21/include/proto/
H A Dtask.h96 extern THREAD_LOCAL struct task_per_thread *sched; /* current's thread scheduler context */
103 extern struct task_per_thread task_per_thread[MAX_THREADS];
248 LIST_ADDQ(&task_per_thread[tid].task_list, &tl->list); in tasklet_wakeup()
253 …if (MT_LIST_ADDQ(&task_per_thread[tl->tid].shared_tasklet_list, (struct mt_list *)&tl->list) == 1)… in tasklet_wakeup()
/dports/net/haproxy22/haproxy-2.2.19/include/haproxy/
H A Dtask.h99 extern THREAD_LOCAL struct task_per_thread *sched; /* current's thread scheduler context */
107 extern struct task_per_thread task_per_thread[MAX_THREADS];
363 MT_LIST_ADDQ_NOCHECK(&task_per_thread[thr].shared_tasklet_list, (struct mt_list *)&tl->list); in tasklet_wakeup_on()
H A Dtask-t.h74 struct task_per_thread { struct
/dports/net/haproxy20/haproxy-2.0.26/include/types/
H A Dtask.h63 struct task_per_thread { struct
/dports/net/haproxy21/haproxy-2.1.12/include/types/
H A Dtask.h63 struct task_per_thread { struct
/dports/net/nuster/nuster-5.0.4.21/include/types/
H A Dtask.h63 struct task_per_thread { struct

12