Home
last modified time | relevance | path

Searched refs:rq_lock (Results 1 - 25 of 27) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/gpu/drm/scheduler/
H A Dsched_entity.c76 spin_lock_init(&entity->rq_lock); in drm_sched_entity_init()
182 spin_lock(&entity->rq_lock); in drm_sched_entity_flush()
185 spin_unlock(&entity->rq_lock); in drm_sched_entity_flush()
352 spin_lock(&entity->rq_lock); in drm_sched_entity_set_priority()
354 spin_unlock(&entity->rq_lock); in drm_sched_entity_set_priority()
466 spin_lock(&entity->rq_lock); in drm_sched_entity_select_rq()
474 spin_unlock(&entity->rq_lock); in drm_sched_entity_select_rq()
502 spin_lock(&entity->rq_lock); in drm_sched_entity_push_job()
504 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
510 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/scheduler/
H A Dsched_entity.c86 spin_lock_init(&entity->rq_lock); in drm_sched_entity_init()
223 spin_lock(&entity->rq_lock); in drm_sched_entity_kill()
226 spin_unlock(&entity->rq_lock); in drm_sched_entity_kill()
375 spin_lock(&entity->rq_lock); in drm_sched_entity_set_priority()
377 spin_unlock(&entity->rq_lock); in drm_sched_entity_set_priority()
534 spin_lock(&entity->rq_lock); in drm_sched_entity_select_rq()
541 spin_unlock(&entity->rq_lock); in drm_sched_entity_select_rq()
579 spin_lock(&entity->rq_lock); in drm_sched_entity_push_job()
581 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
588 spin_unlock(&entity->rq_lock); in drm_sched_entity_push_job()
[all...]
H A Dsched_main.c105 spin_lock(&entity->rq_lock); in drm_sched_rq_update_fifo()
116 spin_unlock(&entity->rq_lock); in drm_sched_rq_update_fifo()
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ice/
H A Dice_controlq.h96 struct mutex rq_lock; /* Receive queue lock */ member
H A Dice_controlq.c536 mutex_lock(&cq->rq_lock); in ice_shutdown_rq()
558 mutex_unlock(&cq->rq_lock); in ice_shutdown_rq()
741 mutex_init(&cq->rq_lock); in ice_init_ctrlq_locks()
777 mutex_destroy(&cq->rq_lock); in ice_destroy_ctrlq_locks()
1124 mutex_lock(&cq->rq_lock); in ice_clean_rq_elem()
1195 mutex_unlock(&cq->rq_lock); in ice_clean_rq_elem()
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/ice/
H A Dice_controlq.h95 struct mutex rq_lock; /* Receive queue lock */ member
H A Dice_controlq.c544 mutex_lock(&cq->rq_lock); in ice_shutdown_rq()
566 mutex_unlock(&cq->rq_lock); in ice_shutdown_rq()
790 mutex_init(&cq->rq_lock); in ice_init_ctrlq_locks()
828 mutex_destroy(&cq->rq_lock); in ice_destroy_ctrlq_locks()
1173 mutex_lock(&cq->rq_lock); in ice_clean_rq_elem()
1241 mutex_unlock(&cq->rq_lock); in ice_clean_rq_elem()
/kernel/linux/linux-5.10/include/drm/
H A Dgpu_scheduler.h61 * @rq_lock: lock to modify the runqueue to which this entity belongs.
90 spinlock_t rq_lock; member
/kernel/linux/linux-5.10/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.h85 spinlock_t rq_lock; /* protect rq */ member
H A Dib_verbs.c1469 spin_lock_init(&qp->rq_lock); in bnxt_re_create_qp()
2751 spin_lock_irqsave(&qp->rq_lock, flags); in bnxt_re_post_recv()
2794 spin_unlock_irqrestore(&qp->rq_lock, flags); in bnxt_re_post_recv()
/kernel/linux/linux-6.6/include/drm/
H A Dgpu_scheduler.h105 * @rq_lock, but readers are generally lockless and seem to just race
139 * drm_sched_entity_set_priority(). Protected by &rq_lock.
144 * @rq_lock:
148 spinlock_t rq_lock; member
/kernel/linux/linux-6.6/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.h87 spinlock_t rq_lock; /* protect rq */ member
H A Dib_verbs.c1555 spin_lock_init(&qp->rq_lock); in bnxt_re_create_qp()
2851 spin_lock_irqsave(&qp->rq_lock, flags); in bnxt_re_post_recv()
2894 spin_unlock_irqrestore(&qp->rq_lock, flags); in bnxt_re_post_recv()
/kernel/linux/linux-5.10/include/linux/sunrpc/
H A Dsvc.h299 spinlock_t rq_lock; /* per-request lock */ member
/kernel/linux/linux-5.10/kernel/sched/
H A Dcore.c362 rq_lock(rq, &rf); in hrtick()
388 rq_lock(rq, &rf); in __hrtick_start()
1818 rq_lock(rq, rf); in move_queued_task()
1878 rq_lock(rq, &rf); in migration_cpu_stop()
2843 rq_lock(rq, &rf); in ttwu_queue()
4192 rq_lock(rq, &rf); in scheduler_tick()
4654 rq_lock(rq, &rf); in __schedule()
7168 rq_lock(rq, &rf); in do_isolation_work_cpu_stop()
H A Dsched.h1427 rq_lock(struct rq *rq, struct rq_flags *rf)
1474 rq_lock(rq, rf);
/kernel/linux/linux-6.6/kernel/sched/
H A Dsched.h1297 /* Scratch cpumask to be temporarily used under rq_lock */
1813 rq_lock(struct rq *rq, struct rq_flags *rf)
1852 DEFINE_LOCK_GUARD_1(rq_lock, struct rq,
1853 rq_lock(_T->lock, &_T->rf),
1875 rq_lock(rq, rf);
2789 * acquire rq lock instead of rq_lock(). So at the end of these two functions
H A Dcore.c800 rq_lock(rq, &rf); in hrtick()
826 rq_lock(rq, &rf); in __hrtick_start()
2585 rq_lock(rq, rf); in move_queued_task()
2659 rq_lock(rq, &rf); in migration_cpu_stop()
4151 rq_lock(rq, &rf); in ttwu_queue()
5824 rq_lock(rq, &rf); in scheduler_tick()
6795 rq_lock(rq, &rf); in __schedule()
9665 rq_lock(rq, &rf); in __balance_push_cpu_stop()
10023 rq_lock(rq, &rf); in do_isolation_work_cpu_stop()
H A Dfair.c5973 rq_lock(rq, &rf); in __cfsb_csd_unthrottle()
9449 rq_lock(rq, &rf); in attach_one_task()
9465 rq_lock(env->dst_rq, &rf); in attach_tasks()
13104 rq_lock(rq, &rf); in task_fork_fair()
H A Ddeadline.c1883 rq_lock(rq, &rf); in migrate_task_rq_dl()
/kernel/linux/linux-5.10/drivers/infiniband/sw/siw/
H A Dsiw_verbs.c355 spin_lock_init(&qp->rq_lock); in siw_create_qp()
1057 spin_lock_irqsave(&qp->rq_lock, flags); in siw_post_receive()
1085 spin_unlock_irqrestore(&qp->rq_lock, flags); in siw_post_receive()
H A Dsiw.h449 spinlock_t rq_lock; member
/kernel/linux/linux-6.6/drivers/infiniband/sw/siw/
H A Dsiw_verbs.c356 spin_lock_init(&qp->rq_lock); in siw_create_qp()
1061 spin_lock_irqsave(&qp->rq_lock, flags); in siw_post_receive()
1089 spin_unlock_irqrestore(&qp->rq_lock, flags); in siw_post_receive()
H A Dsiw.h451 spinlock_t rq_lock; member
/kernel/linux/linux-5.10/net/sunrpc/
H A Dsvc.c613 spin_lock_init(&rqstp->rq_lock); in svc_rqst_alloc()

Completed in 93 milliseconds

12