Lines Matching defs:refs
85 #include "refs.h"
245 struct io_ring_ctx *ctx = container_of(ref, struct io_ring_ctx, refs);
258 percpu_ref_get(&ctx->refs);
266 percpu_ref_put(&ctx->refs);
305 if (percpu_ref_init(&ctx->refs, io_ring_ctx_ref_free,
420 /* linked timeouts should have two refs once prep'ed */
549 if (atomic_dec_and_test(&ev_fd->refs)) {
581 atomic_inc(&ev_fd->refs);
585 atomic_dec(&ev_fd->refs);
767 unsigned int refs = tctx->cached_refs;
769 if (refs) {
771 percpu_counter_sub(&tctx->inflight, refs);
772 put_task_struct_many(task, refs);
1108 percpu_ref_get_many(&ctx->refs, ret);
1120 /* refs were already put, restore them for io_req_task_complete() */
1165 percpu_ref_put(&ctx->refs);
1186 percpu_ref_get(&(*ctx)->refs);
1229 percpu_ref_put(&last_ctx->refs);
1232 percpu_ref_get(&last_ctx->refs);
1241 percpu_ref_put(&last_ctx->refs);
2846 atomic_set(&ev_fd->refs, 1);
2882 percpu_ref_put_many(&ctx->refs, nr);
2929 percpu_ref_exit(&ctx->refs);
2955 percpu_ref_put(&ctx->refs);
2973 percpu_ref_get(&ctx->refs);
2975 percpu_ref_put(&ctx->refs);
3072 * we're waiting for refs to drop. We need to reap these manually,
3161 percpu_ref_kill(&ctx->refs);
4305 refcount_inc(&sqd->refs);
4372 * We don't quiesce the refs for register anymore and so it can't be
4375 if (WARN_ON_ONCE(percpu_ref_is_dying(&ctx->refs)))