Home
last modified time | relevance | path

Searched refs:close_work (Results 1 - 25 of 26) sorted by relevance

12

/kernel/linux/linux-5.10/net/vmw_vsock/
H A Dhyperv_transport.c263 (!cancel_timeout || cancel_delayed_work(&vsk->close_work))) { in hvs_do_close_lock_held()
487 container_of(work, struct vsock_sock, close_work.work); in hvs_close_timeout()
517 INIT_DELAYED_WORK(&vsk->close_work, hvs_close_timeout); in hvs_close_lock_held()
519 schedule_delayed_work(&vsk->close_work, HVS_CLOSE_TIMEOUT); in hvs_close_lock_held()
H A Dvirtio_transport_common.c789 (!cancel_timeout || cancel_delayed_work(&vsk->close_work))) { in virtio_transport_do_close()
802 container_of(work, struct vsock_sock, close_work.work); in virtio_transport_close_timeout()
846 INIT_DELAYED_WORK(&vsk->close_work, in virtio_transport_close()
849 schedule_delayed_work(&vsk->close_work, VSOCK_CLOSE_TIMEOUT); in virtio_transport_close()
/kernel/linux/linux-6.6/net/vmw_vsock/
H A Dhyperv_transport.c272 (!cancel_timeout || cancel_delayed_work(&vsk->close_work))) { in hvs_do_close_lock_held()
498 container_of(work, struct vsock_sock, close_work.work); in hvs_close_timeout()
528 INIT_DELAYED_WORK(&vsk->close_work, hvs_close_timeout); in hvs_close_lock_held()
530 schedule_delayed_work(&vsk->close_work, HVS_CLOSE_TIMEOUT); in hvs_close_lock_held()
H A Dvirtio_transport_common.c1018 (!cancel_timeout || cancel_delayed_work(&vsk->close_work))) { in virtio_transport_do_close()
1031 container_of(work, struct vsock_sock, close_work.work); in virtio_transport_close_timeout()
1075 INIT_DELAYED_WORK(&vsk->close_work, in virtio_transport_close()
1078 schedule_delayed_work(&vsk->close_work, VSOCK_CLOSE_TIMEOUT); in virtio_transport_close()
/kernel/linux/linux-5.10/net/smc/
H A Dsmc_close.c116 if (cancel_work_sync(&smc->conn.close_work)) in smc_close_cancel_work()
354 close_work); in smc_close_passive_work()
438 sock_put(sk); /* sock_hold done by schedulers of close_work */ in smc_close_passive_work()
499 INIT_WORK(&smc->conn.close_work, smc_close_passive_work); in smc_close_init()
H A Dsmc.h206 struct work_struct close_work; /* peer sent some closing */ member
H A Dsmc_cdc.c373 sock_hold(&smc->sk); /* sock_put in close_work */ in smc_cdc_msg_recv_action()
374 if (!queue_work(smc_close_wq, &conn->close_work)) in smc_cdc_msg_recv_action()
/kernel/linux/linux-6.6/net/smc/
H A Dsmc_close.c119 if (cancel_work_sync(&smc->conn.close_work)) in smc_close_cancel_work()
360 close_work); in smc_close_passive_work()
444 sock_put(sk); /* sock_hold done by schedulers of close_work */ in smc_close_passive_work()
505 INIT_WORK(&smc->conn.close_work, smc_close_passive_work); in smc_close_init()
H A Dsmc.h232 struct work_struct close_work; /* peer sent some closing */ member
H A Dsmc_cdc.c388 sock_hold(&smc->sk); /* sock_put in close_work */ in smc_cdc_msg_recv_action()
389 if (!queue_work(smc_close_wq, &conn->close_work)) in smc_cdc_msg_recv_action()
/kernel/linux/linux-5.10/drivers/infiniband/ulp/rtrs/
H A Drtrs-srv.h79 struct work_struct close_work; member
H A Drtrs-clt.h133 struct work_struct close_work; member
H A Drtrs-clt.c1460 INIT_WORK(&sess->close_work, rtrs_clt_close_work); in alloc_sess()
1826 queue_work(rtrs_wq, &sess->close_work); in rtrs_clt_close_conns()
1828 flush_work(&sess->close_work); in rtrs_clt_close_conns()
2203 sess = container_of(work, struct rtrs_clt_sess, close_work); in rtrs_clt_close_work()
H A Drtrs-srv.c485 queue_work(rtrs_wq, &sess->close_work); in close_sess()
1504 sess = container_of(work, typeof(*sess), close_work); in rtrs_srv_close_work()
1750 INIT_WORK(&sess->close_work, rtrs_srv_close_work); in __alloc_sess()
/kernel/linux/linux-6.6/drivers/infiniband/ulp/rtrs/
H A Drtrs-srv.h78 struct work_struct close_work; member
H A Drtrs-clt.h136 struct work_struct close_work; member
H A Drtrs-clt.c1573 INIT_WORK(&clt_path->close_work, rtrs_clt_close_work); in alloc_path()
1953 queue_work(rtrs_wq, &clt_path->close_work); in rtrs_clt_close_conns()
1955 flush_work(&clt_path->close_work); in rtrs_clt_close_conns()
2314 clt_path = container_of(work, struct rtrs_clt_path, close_work); in rtrs_clt_close_work()
/kernel/linux/linux-5.10/include/net/
H A Daf_vsock.h62 struct delayed_work close_work; member
/kernel/linux/linux-5.10/kernel/
H A Dacct.c183 static void close_work(struct work_struct *work) in close_work() function
246 INIT_WORK(&acct->work, close_work); in acct_on()
/kernel/linux/linux-5.10/net/caif/
H A Dchnl_net.c124 static void close_work(struct work_struct *work) in close_work() function
138 static DECLARE_WORK(close_worker, close_work);
/kernel/linux/linux-6.6/kernel/
H A Dacct.c202 static void close_work(struct work_struct *work) in close_work() function
265 INIT_WORK(&acct->work, close_work); in acct_on()
/kernel/linux/linux-6.6/net/caif/
H A Dchnl_net.c119 static void close_work(struct work_struct *work) in close_work() function
133 static DECLARE_WORK(close_worker, close_work);
/kernel/linux/linux-6.6/include/net/
H A Daf_vsock.h63 struct delayed_work close_work; member
/kernel/linux/linux-5.10/drivers/infiniband/core/
H A Ducma.c99 struct work_struct close_work; member
178 struct ucma_context *ctx = container_of(work, struct ucma_context, close_work); in ucma_close_id()
201 INIT_WORK(&ctx->close_work, ucma_close_id); in ucma_alloc_ctx()
363 queue_work(system_unbound_wq, &ctx->close_work); in ucma_event_handler()
577 cancel_work_sync(&ctx->close_work); in ucma_destroy_private_ctx()
579 ucma_close_id(&ctx->close_work); in ucma_destroy_private_ctx()
/kernel/linux/linux-6.6/drivers/infiniband/core/
H A Ducma.c99 struct work_struct close_work; member
178 struct ucma_context *ctx = container_of(work, struct ucma_context, close_work); in ucma_close_id()
201 INIT_WORK(&ctx->close_work, ucma_close_id); in ucma_alloc_ctx()
363 queue_work(system_unbound_wq, &ctx->close_work); in ucma_event_handler()
577 cancel_work_sync(&ctx->close_work); in ucma_destroy_private_ctx()
579 ucma_close_id(&ctx->close_work); in ucma_destroy_private_ctx()

Completed in 34 milliseconds

12