Home
last modified time | relevance | path

Searched refs:rq_lock (Results 1 – 16 of 16) sorted by relevance

/linux/drivers/gpu/drm/scheduler/
H A Dsched_entity.c108 spin_lock_init(&entity->rq_lock); in drm_sched_entity_init()
245 spin_lock(&entity->rq_lock); in drm_sched_entity_kill()
248 spin_unlock(&entity->rq_lock); in drm_sched_entity_kill()
397 spin_lock(&entity->rq_lock); in drm_sched_entity_set_priority()
399 spin_unlock(&entity->rq_lock); in drm_sched_entity_set_priority()
556 spin_lock(&entity->rq_lock); in drm_sched_entity_select_rq()
563 spin_unlock(&entity->rq_lock); in drm_sched_entity_select_rq()
601 spin_lock(&entity->rq_lock); in drm_sched_entity_push_job()
603 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
610 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
H A Dsched_main.c173 spin_lock(&entity->rq_lock); in drm_sched_rq_update_fifo()
184 spin_unlock(&entity->rq_lock); in drm_sched_rq_update_fifo()
/linux/drivers/net/ethernet/intel/ice/
H A Dice_controlq.h101 struct mutex rq_lock; /* Receive queue lock */ member
H A Dice_controlq.c540 mutex_lock(&cq->rq_lock); in ice_shutdown_rq()
562 mutex_unlock(&cq->rq_lock); in ice_shutdown_rq()
789 mutex_init(&cq->rq_lock); in ice_init_ctrlq_locks()
827 mutex_destroy(&cq->rq_lock); in ice_destroy_ctrlq_locks()
1189 mutex_lock(&cq->rq_lock); in ice_clean_rq_elem()
1257 mutex_unlock(&cq->rq_lock); in ice_clean_rq_elem()
/linux/Documentation/scheduler/
H A Dmembarrier.rst16 rq_lock(); smp_mb__after_spinlock() in __schedule(). The barrier matches a full
/linux/include/drm/
H A Dgpu_scheduler.h148 spinlock_t rq_lock; member
/linux/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.h89 spinlock_t rq_lock; /* protect rq */ member
H A Dib_verbs.c1604 spin_lock_init(&qp->rq_lock); in bnxt_re_create_qp()
2923 spin_lock_irqsave(&qp->rq_lock, flags); in bnxt_re_post_recv()
2966 spin_unlock_irqrestore(&qp->rq_lock, flags); in bnxt_re_post_recv()
/linux/drivers/infiniband/sw/siw/
H A Dsiw_verbs.c361 spin_lock_init(&qp->rq_lock); in siw_create_qp()
1060 spin_lock_irqsave(&qp->rq_lock, flags); in siw_post_receive()
1088 spin_unlock_irqrestore(&qp->rq_lock, flags); in siw_post_receive()
H A Dsiw.h448 spinlock_t rq_lock; member
/linux/kernel/sched/
H A Dsched.h1801 static inline void rq_lock(struct rq *rq, struct rq_flags *rf) in rq_lock() function
1829 DEFINE_LOCK_GUARD_1(rq_lock, struct rq,
1830 rq_lock(_T->lock, &_T->rf),
1851 rq_lock(rq, rf); in this_rq_lock_irq()
H A Dcore.c828 rq_lock(rq, &rf); in hrtick()
854 rq_lock(rq, &rf); in __hrtick_start()
2434 rq_lock(rq, rf); in move_queued_task()
2508 rq_lock(rq, &rf); in migration_cpu_stop()
3942 rq_lock(rq, &rf); in ttwu_queue()
5585 rq_lock(rq, &rf); in sched_tick()
6575 rq_lock(rq, &rf); in __schedule()
7833 rq_lock(rq, &rf); in __balance_push_cpu_stop()
H A Ddeadline.c1203 scoped_guard (rq_lock, rq) { in dl_server_timer()
1772 rq_lock(rq, &rf); in inactive_task_timer()
2276 rq_lock(rq, &rf); in migrate_task_rq_dl()
H A Drt.c829 rq_lock(rq, &rf); in do_sched_rt_period_timer()
H A Dfair.c6117 rq_lock(rq, &rf); in __cfsb_csd_unthrottle()
9643 rq_lock(rq, &rf); in attach_one_task()
9659 rq_lock(env->dst_rq, &rf); in attach_tasks()
H A Dext.c4764 rq_lock(rq, &rf); in scx_dump_state()