/kernel/linux/linux-6.6/fs/nfs/ |
H A D | direct.c | 71 static void nfs_direct_write_complete(struct nfs_direct_req *dreq); 74 static inline void get_dreq(struct nfs_direct_req *dreq) in get_dreq() argument 76 atomic_inc(&dreq->io_count); in get_dreq() 79 static inline int put_dreq(struct nfs_direct_req *dreq) in put_dreq() argument 81 return atomic_dec_and_test(&dreq->io_count); in put_dreq() 85 nfs_direct_handle_truncated(struct nfs_direct_req *dreq, in nfs_direct_handle_truncated() argument 92 if (dreq->max_count >= dreq_len) { in nfs_direct_handle_truncated() 93 dreq->max_count = dreq_len; in nfs_direct_handle_truncated() 94 if (dreq->count > dreq_len) in nfs_direct_handle_truncated() 95 dreq in nfs_direct_handle_truncated() 103 nfs_direct_count_bytes(struct nfs_direct_req *dreq, const struct nfs_pgio_header *hdr) nfs_direct_count_bytes() argument 121 nfs_direct_truncate_request(struct nfs_direct_req *dreq, struct nfs_page *req) nfs_direct_truncate_request() argument 162 nfs_init_cinfo_from_dreq(struct nfs_commit_info *cinfo, struct nfs_direct_req *dreq) nfs_init_cinfo_from_dreq() argument 174 struct nfs_direct_req *dreq; nfs_direct_req_alloc() local 193 struct nfs_direct_req *dreq = container_of(kref, struct nfs_direct_req, kref); nfs_direct_req_free() local 203 nfs_direct_req_release(struct nfs_direct_req *dreq) nfs_direct_req_release() argument 208 nfs_dreq_bytes_left(struct nfs_direct_req *dreq, loff_t offset) nfs_dreq_bytes_left() argument 218 nfs_direct_wait(struct nfs_direct_req *dreq) nfs_direct_wait() argument 243 nfs_direct_complete(struct nfs_direct_req *dreq) nfs_direct_complete() argument 266 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_read_completion() local 324 nfs_direct_read_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos) nfs_direct_read_schedule_iovec() argument 423 struct nfs_direct_req *dreq; nfs_file_direct_read() local 541 nfs_direct_write_reschedule(struct nfs_direct_req *dreq) nfs_direct_write_reschedule() argument 603 struct nfs_direct_req *dreq = data->dreq; nfs_direct_commit_complete() local 648 struct nfs_direct_req *dreq = cinfo->dreq; nfs_direct_resched_write() local 664 nfs_direct_commit_schedule(struct nfs_direct_req *dreq) nfs_direct_commit_schedule() argument 684 nfs_direct_write_clear_reqs(struct nfs_direct_req *dreq) nfs_direct_write_clear_reqs() argument 704 struct nfs_direct_req *dreq = container_of(work, struct nfs_direct_req, work); nfs_direct_write_schedule_work() local 722 nfs_direct_write_complete(struct nfs_direct_req *dreq) nfs_direct_write_complete() argument 730 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_write_completion() local 790 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_write_reschedule_io() local 829 nfs_direct_write_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos, int ioflags) nfs_direct_write_schedule_iovec() argument 963 struct nfs_direct_req *dreq; nfs_file_direct_write() local [all...] |
H A D | cache_lib.c | 68 void nfs_cache_defer_req_put(struct nfs_cache_defer_req *dreq) in nfs_cache_defer_req_put() argument 70 if (refcount_dec_and_test(&dreq->count)) in nfs_cache_defer_req_put() 71 kfree(dreq); in nfs_cache_defer_req_put() 76 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_revisit() local 78 dreq = container_of(d, struct nfs_cache_defer_req, deferred_req); in nfs_dns_cache_revisit() 80 complete(&dreq->completion); in nfs_dns_cache_revisit() 81 nfs_cache_defer_req_put(dreq); in nfs_dns_cache_revisit() 86 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_defer() local 88 dreq = container_of(req, struct nfs_cache_defer_req, req); in nfs_dns_cache_defer() 89 dreq in nfs_dns_cache_defer() 97 struct nfs_cache_defer_req *dreq; nfs_cache_defer_req_alloc() local 108 nfs_cache_wait_for_upcall(struct nfs_cache_defer_req *dreq) nfs_cache_wait_for_upcall() argument [all...] |
H A D | dns_resolve.c | 283 struct nfs_cache_defer_req *dreq) in do_cache_lookup() 289 ret = cache_check(cd, &(*item)->h, &dreq->req); in do_cache_lookup() 325 struct nfs_cache_defer_req *dreq; in do_cache_lookup_wait() local 328 dreq = nfs_cache_defer_req_alloc(); in do_cache_lookup_wait() 329 if (!dreq) in do_cache_lookup_wait() 331 ret = do_cache_lookup(cd, key, item, dreq); in do_cache_lookup_wait() 333 ret = nfs_cache_wait_for_upcall(dreq); in do_cache_lookup_wait() 337 nfs_cache_defer_req_put(dreq); in do_cache_lookup_wait() 280 do_cache_lookup(struct cache_detail *cd, struct nfs_dns_ent *key, struct nfs_dns_ent **item, struct nfs_cache_defer_req *dreq) do_cache_lookup() argument
|
H A D | cache_lib.h | 24 extern void nfs_cache_defer_req_put(struct nfs_cache_defer_req *dreq); 25 extern int nfs_cache_wait_for_upcall(struct nfs_cache_defer_req *dreq);
|
/kernel/linux/linux-5.10/fs/nfs/ |
H A D | direct.c | 102 static void nfs_direct_write_complete(struct nfs_direct_req *dreq); 105 static inline void get_dreq(struct nfs_direct_req *dreq) in get_dreq() argument 107 atomic_inc(&dreq->io_count); in get_dreq() 110 static inline int put_dreq(struct nfs_direct_req *dreq) in put_dreq() argument 112 return atomic_dec_and_test(&dreq->io_count); in put_dreq() 116 nfs_direct_handle_truncated(struct nfs_direct_req *dreq, in nfs_direct_handle_truncated() argument 123 if (dreq->max_count >= dreq_len) { in nfs_direct_handle_truncated() 124 dreq->max_count = dreq_len; in nfs_direct_handle_truncated() 125 if (dreq->count > dreq_len) in nfs_direct_handle_truncated() 126 dreq in nfs_direct_handle_truncated() 136 nfs_direct_count_bytes(struct nfs_direct_req *dreq, const struct nfs_pgio_header *hdr) nfs_direct_count_bytes() argument 186 nfs_init_cinfo_from_dreq(struct nfs_commit_info *cinfo, struct nfs_direct_req *dreq) nfs_init_cinfo_from_dreq() argument 198 struct nfs_direct_req *dreq; nfs_direct_req_alloc() local 217 struct nfs_direct_req *dreq = container_of(kref, struct nfs_direct_req, kref); nfs_direct_req_free() local 227 nfs_direct_req_release(struct nfs_direct_req *dreq) nfs_direct_req_release() argument 232 nfs_dreq_bytes_left(struct nfs_direct_req *dreq) nfs_dreq_bytes_left() argument 241 nfs_direct_wait(struct nfs_direct_req *dreq) nfs_direct_wait() argument 266 nfs_direct_complete(struct nfs_direct_req *dreq) nfs_direct_complete() argument 289 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_read_completion() local 347 nfs_direct_read_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos) nfs_direct_read_schedule_iovec() argument 449 struct nfs_direct_req *dreq; nfs_file_direct_read() local 567 nfs_direct_write_reschedule(struct nfs_direct_req *dreq) nfs_direct_write_reschedule() argument 621 struct nfs_direct_req *dreq = data->dreq; nfs_direct_commit_complete() local 660 struct nfs_direct_req *dreq = cinfo->dreq; nfs_direct_resched_write() local 674 nfs_direct_commit_schedule(struct nfs_direct_req *dreq) nfs_direct_commit_schedule() argument 694 nfs_direct_write_clear_reqs(struct nfs_direct_req *dreq) nfs_direct_write_clear_reqs() argument 713 struct nfs_direct_req *dreq = container_of(work, struct nfs_direct_req, work); nfs_direct_write_schedule_work() local 731 nfs_direct_write_complete(struct nfs_direct_req *dreq) nfs_direct_write_complete() argument 738 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_write_completion() local 795 struct nfs_direct_req *dreq = hdr->dreq; nfs_direct_write_reschedule_io() local 827 nfs_direct_write_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos, int ioflags) nfs_direct_write_schedule_iovec() argument 940 struct nfs_direct_req *dreq; nfs_file_direct_write() local [all...] |
H A D | cache_lib.c | 68 void nfs_cache_defer_req_put(struct nfs_cache_defer_req *dreq) in nfs_cache_defer_req_put() argument 70 if (refcount_dec_and_test(&dreq->count)) in nfs_cache_defer_req_put() 71 kfree(dreq); in nfs_cache_defer_req_put() 76 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_revisit() local 78 dreq = container_of(d, struct nfs_cache_defer_req, deferred_req); in nfs_dns_cache_revisit() 80 complete(&dreq->completion); in nfs_dns_cache_revisit() 81 nfs_cache_defer_req_put(dreq); in nfs_dns_cache_revisit() 86 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_defer() local 88 dreq = container_of(req, struct nfs_cache_defer_req, req); in nfs_dns_cache_defer() 89 dreq in nfs_dns_cache_defer() 97 struct nfs_cache_defer_req *dreq; nfs_cache_defer_req_alloc() local 108 nfs_cache_wait_for_upcall(struct nfs_cache_defer_req *dreq) nfs_cache_wait_for_upcall() argument [all...] |
H A D | dns_resolve.c | 284 struct nfs_cache_defer_req *dreq) in do_cache_lookup() 290 ret = cache_check(cd, &(*item)->h, &dreq->req); in do_cache_lookup() 326 struct nfs_cache_defer_req *dreq; in do_cache_lookup_wait() local 329 dreq = nfs_cache_defer_req_alloc(); in do_cache_lookup_wait() 330 if (!dreq) in do_cache_lookup_wait() 332 ret = do_cache_lookup(cd, key, item, dreq); in do_cache_lookup_wait() 334 ret = nfs_cache_wait_for_upcall(dreq); in do_cache_lookup_wait() 338 nfs_cache_defer_req_put(dreq); in do_cache_lookup_wait() 281 do_cache_lookup(struct cache_detail *cd, struct nfs_dns_ent *key, struct nfs_dns_ent **item, struct nfs_cache_defer_req *dreq) do_cache_lookup() argument
|
H A D | cache_lib.h | 24 extern void nfs_cache_defer_req_put(struct nfs_cache_defer_req *dreq); 25 extern int nfs_cache_wait_for_upcall(struct nfs_cache_defer_req *dreq);
|
/kernel/linux/linux-5.10/net/dccp/ |
H A D | minisocks.c | 90 struct dccp_request_sock *dreq = dccp_rsk(req); in dccp_create_openreq_child() local 99 newdp->dccps_service = dreq->dreq_service; in dccp_create_openreq_child() 100 newdp->dccps_timestamp_echo = dreq->dreq_timestamp_echo; in dccp_create_openreq_child() 101 newdp->dccps_timestamp_time = dreq->dreq_timestamp_time; in dccp_create_openreq_child() 116 newdp->dccps_iss = dreq->dreq_iss; in dccp_create_openreq_child() 117 newdp->dccps_gss = dreq->dreq_gss; in dccp_create_openreq_child() 119 newdp->dccps_isr = dreq->dreq_isr; in dccp_create_openreq_child() 120 newdp->dccps_gsr = dreq->dreq_gsr; in dccp_create_openreq_child() 125 if (dccp_feat_activate_values(newsk, &dreq->dreq_featneg)) { in dccp_create_openreq_child() 146 struct dccp_request_sock *dreq in dccp_check_req() local 259 struct dccp_request_sock *dreq = dccp_rsk(req); dccp_reqsk_init() local [all...] |
H A D | options.c | 44 * @sk: client|server|listening dccp socket (when @dreq != NULL) 45 * @dreq: request socket to use during connection setup, or NULL 48 int dccp_parse_options(struct sock *sk, struct dccp_request_sock *dreq, in dccp_parse_options() argument 99 if (dreq != NULL && (opt >= DCCPO_MIN_RX_CCID_SPECIFIC || in dccp_parse_options() 125 rc = dccp_feat_parse_options(sk, dreq, mandatory, opt, in dccp_parse_options() 144 if (dreq != NULL) { in dccp_parse_options() 145 dreq->dreq_timestamp_echo = ntohl(opt_val); in dccp_parse_options() 146 dreq->dreq_timestamp_time = dccp_timestamp(); in dccp_parse_options() 353 struct dccp_request_sock *dreq, in dccp_insert_option_timestamp_echo() 360 if (dreq ! in dccp_insert_option_timestamp_echo() 352 dccp_insert_option_timestamp_echo(struct dccp_sock *dp, struct dccp_request_sock *dreq, struct sk_buff *skb) dccp_insert_option_timestamp_echo() argument 592 dccp_insert_options_rsk(struct dccp_request_sock *dreq, struct sk_buff *skb) dccp_insert_options_rsk() argument [all...] |
H A D | output.c | 394 struct dccp_request_sock *dreq; in dccp_make_response() local 413 dreq = dccp_rsk(req); in dccp_make_response() 415 dccp_inc_seqno(&dreq->dreq_gss); in dccp_make_response() 417 DCCP_SKB_CB(skb)->dccpd_seq = dreq->dreq_gss; in dccp_make_response() 420 if (dccp_feat_server_ccid_dependencies(dreq)) in dccp_make_response() 423 if (dccp_insert_options_rsk(dreq, skb)) in dccp_make_response() 435 dccp_hdr_set_seq(dh, dreq->dreq_gss); in dccp_make_response() 436 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dreq->dreq_gsr); in dccp_make_response() 437 dccp_hdr_response(skb)->dccph_resp_service = dreq->dreq_service; in dccp_make_response()
|
H A D | ipv4.c | 581 struct dccp_request_sock *dreq; in dccp_v4_conn_request() local 612 dreq = dccp_rsk(req); in dccp_v4_conn_request() 613 if (dccp_parse_options(sk, dreq, skb)) in dccp_v4_conn_request() 633 dreq->dreq_isr = dcb->dccpd_seq; in dccp_v4_conn_request() 634 dreq->dreq_gsr = dreq->dreq_isr; in dccp_v4_conn_request() 635 dreq->dreq_iss = dccp_v4_init_sequence(skb); in dccp_v4_conn_request() 636 dreq->dreq_gss = dreq->dreq_iss; in dccp_v4_conn_request() 637 dreq in dccp_v4_conn_request() [all...] |
H A D | ipv6.c | 310 struct dccp_request_sock *dreq; in dccp_v6_conn_request() local 348 dreq = dccp_rsk(req); in dccp_v6_conn_request() 349 if (dccp_parse_options(sk, dreq, skb)) in dccp_v6_conn_request() 381 dreq->dreq_isr = dcb->dccpd_seq; in dccp_v6_conn_request() 382 dreq->dreq_gsr = dreq->dreq_isr; in dccp_v6_conn_request() 383 dreq->dreq_iss = dccp_v6_init_sequence(skb); in dccp_v6_conn_request() 384 dreq->dreq_gss = dreq->dreq_iss; in dccp_v6_conn_request() 385 dreq in dccp_v6_conn_request() [all...] |
/kernel/linux/linux-6.6/net/dccp/ |
H A D | minisocks.c | 91 struct dccp_request_sock *dreq = dccp_rsk(req); in dccp_create_openreq_child() local 100 newdp->dccps_service = dreq->dreq_service; in dccp_create_openreq_child() 101 newdp->dccps_timestamp_echo = dreq->dreq_timestamp_echo; in dccp_create_openreq_child() 102 newdp->dccps_timestamp_time = dreq->dreq_timestamp_time; in dccp_create_openreq_child() 117 newdp->dccps_iss = dreq->dreq_iss; in dccp_create_openreq_child() 118 newdp->dccps_gss = dreq->dreq_gss; in dccp_create_openreq_child() 120 newdp->dccps_isr = dreq->dreq_isr; in dccp_create_openreq_child() 121 newdp->dccps_gsr = dreq->dreq_gsr; in dccp_create_openreq_child() 126 if (dccp_feat_activate_values(newsk, &dreq->dreq_featneg)) { in dccp_create_openreq_child() 147 struct dccp_request_sock *dreq in dccp_check_req() local 260 struct dccp_request_sock *dreq = dccp_rsk(req); dccp_reqsk_init() local [all...] |
H A D | options.c | 44 * @sk: client|server|listening dccp socket (when @dreq != NULL) 45 * @dreq: request socket to use during connection setup, or NULL 48 int dccp_parse_options(struct sock *sk, struct dccp_request_sock *dreq, in dccp_parse_options() argument 99 if (dreq != NULL && (opt >= DCCPO_MIN_RX_CCID_SPECIFIC || in dccp_parse_options() 125 rc = dccp_feat_parse_options(sk, dreq, mandatory, opt, in dccp_parse_options() 144 if (dreq != NULL) { in dccp_parse_options() 145 dreq->dreq_timestamp_echo = ntohl(opt_val); in dccp_parse_options() 146 dreq->dreq_timestamp_time = dccp_timestamp(); in dccp_parse_options() 353 struct dccp_request_sock *dreq, in dccp_insert_option_timestamp_echo() 360 if (dreq ! in dccp_insert_option_timestamp_echo() 352 dccp_insert_option_timestamp_echo(struct dccp_sock *dp, struct dccp_request_sock *dreq, struct sk_buff *skb) dccp_insert_option_timestamp_echo() argument 592 dccp_insert_options_rsk(struct dccp_request_sock *dreq, struct sk_buff *skb) dccp_insert_options_rsk() argument [all...] |
H A D | output.c | 403 struct dccp_request_sock *dreq; in dccp_make_response() local 422 dreq = dccp_rsk(req); in dccp_make_response() 424 dccp_inc_seqno(&dreq->dreq_gss); in dccp_make_response() 426 DCCP_SKB_CB(skb)->dccpd_seq = dreq->dreq_gss; in dccp_make_response() 429 if (dccp_feat_server_ccid_dependencies(dreq)) in dccp_make_response() 432 if (dccp_insert_options_rsk(dreq, skb)) in dccp_make_response() 444 dccp_hdr_set_seq(dh, dreq->dreq_gss); in dccp_make_response() 445 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dreq->dreq_gsr); in dccp_make_response() 446 dccp_hdr_response(skb)->dccph_resp_service = dreq->dreq_service; in dccp_make_response()
|
H A D | ipv4.c | 597 struct dccp_request_sock *dreq; in dccp_v4_conn_request() local 628 dreq = dccp_rsk(req); in dccp_v4_conn_request() 629 if (dccp_parse_options(sk, dreq, skb)) in dccp_v4_conn_request() 649 dreq->dreq_isr = dcb->dccpd_seq; in dccp_v4_conn_request() 650 dreq->dreq_gsr = dreq->dreq_isr; in dccp_v4_conn_request() 651 dreq->dreq_iss = dccp_v4_init_sequence(skb); in dccp_v4_conn_request() 652 dreq->dreq_gss = dreq->dreq_iss; in dccp_v4_conn_request() 653 dreq in dccp_v4_conn_request() [all...] |
/kernel/linux/linux-5.10/drivers/crypto/marvell/cesa/ |
H A D | tdma.c | 37 void mv_cesa_dma_step(struct mv_cesa_req *dreq) in mv_cesa_dma_step() argument 39 struct mv_cesa_engine *engine = dreq->engine; in mv_cesa_dma_step() 51 writel_relaxed(dreq->chain.first->cur_dma, in mv_cesa_dma_step() 58 void mv_cesa_dma_cleanup(struct mv_cesa_req *dreq) in mv_cesa_dma_cleanup() argument 62 for (tdma = dreq->chain.first; tdma;) { in mv_cesa_dma_cleanup() 75 dreq->chain.first = NULL; in mv_cesa_dma_cleanup() 76 dreq->chain.last = NULL; in mv_cesa_dma_cleanup() 79 void mv_cesa_dma_prepare(struct mv_cesa_req *dreq, in mv_cesa_dma_prepare() argument 84 for (tdma = dreq->chain.first; tdma; tdma = tdma->next) { in mv_cesa_dma_prepare() 97 struct mv_cesa_req *dreq) in mv_cesa_tdma_chain() 96 mv_cesa_tdma_chain(struct mv_cesa_engine *engine, struct mv_cesa_req *dreq) mv_cesa_tdma_chain() argument [all...] |
H A D | cesa.h | 822 void mv_cesa_dma_step(struct mv_cesa_req *dreq); 824 static inline int mv_cesa_dma_process(struct mv_cesa_req *dreq, in mv_cesa_dma_process() argument 836 void mv_cesa_dma_prepare(struct mv_cesa_req *dreq, 838 void mv_cesa_dma_cleanup(struct mv_cesa_req *dreq); 840 struct mv_cesa_req *dreq);
|
/kernel/linux/linux-6.6/drivers/crypto/marvell/cesa/ |
H A D | tdma.c | 37 void mv_cesa_dma_step(struct mv_cesa_req *dreq) in mv_cesa_dma_step() argument 39 struct mv_cesa_engine *engine = dreq->engine; in mv_cesa_dma_step() 51 writel_relaxed(dreq->chain.first->cur_dma, in mv_cesa_dma_step() 58 void mv_cesa_dma_cleanup(struct mv_cesa_req *dreq) in mv_cesa_dma_cleanup() argument 62 for (tdma = dreq->chain.first; tdma;) { in mv_cesa_dma_cleanup() 75 dreq->chain.first = NULL; in mv_cesa_dma_cleanup() 76 dreq->chain.last = NULL; in mv_cesa_dma_cleanup() 79 void mv_cesa_dma_prepare(struct mv_cesa_req *dreq, in mv_cesa_dma_prepare() argument 84 for (tdma = dreq->chain.first; tdma; tdma = tdma->next) { in mv_cesa_dma_prepare() 97 struct mv_cesa_req *dreq) in mv_cesa_tdma_chain() 96 mv_cesa_tdma_chain(struct mv_cesa_engine *engine, struct mv_cesa_req *dreq) mv_cesa_tdma_chain() argument [all...] |
H A D | cesa.h | 826 void mv_cesa_dma_step(struct mv_cesa_req *dreq); 828 static inline int mv_cesa_dma_process(struct mv_cesa_req *dreq, in mv_cesa_dma_process() argument 840 void mv_cesa_dma_prepare(struct mv_cesa_req *dreq, 842 void mv_cesa_dma_cleanup(struct mv_cesa_req *dreq); 844 struct mv_cesa_req *dreq);
|
/kernel/linux/linux-5.10/net/sunrpc/ |
H A D | cache.c | 583 static void __unhash_deferred_req(struct cache_deferred_req *dreq) in __unhash_deferred_req() argument 585 hlist_del_init(&dreq->hash); in __unhash_deferred_req() 586 if (!list_empty(&dreq->recent)) { in __unhash_deferred_req() 587 list_del_init(&dreq->recent); in __unhash_deferred_req() 592 static void __hash_deferred_req(struct cache_deferred_req *dreq, struct cache_head *item) in __hash_deferred_req() argument 596 INIT_LIST_HEAD(&dreq->recent); in __hash_deferred_req() 597 hlist_add_head(&dreq->hash, &cache_defer_hash[hash]); in __hash_deferred_req() 600 static void setup_deferral(struct cache_deferred_req *dreq, in setup_deferral() argument 605 dreq->item = item; in setup_deferral() 609 __hash_deferred_req(dreq, ite in setup_deferral() 625 cache_restart_thread(struct cache_deferred_req *dreq, int too_many) cache_restart_thread() argument 635 struct cache_deferred_req *dreq = &sleeper.handle; cache_wait_req() local 694 struct cache_deferred_req *dreq; cache_defer_req() local 717 struct cache_deferred_req *dreq; cache_revisit_request() local 742 struct cache_deferred_req *dreq, *tmp; cache_clean_deferred() local [all...] |
/kernel/linux/linux-6.6/net/sunrpc/ |
H A D | cache.c | 585 static void __unhash_deferred_req(struct cache_deferred_req *dreq) in __unhash_deferred_req() argument 587 hlist_del_init(&dreq->hash); in __unhash_deferred_req() 588 if (!list_empty(&dreq->recent)) { in __unhash_deferred_req() 589 list_del_init(&dreq->recent); in __unhash_deferred_req() 594 static void __hash_deferred_req(struct cache_deferred_req *dreq, struct cache_head *item) in __hash_deferred_req() argument 598 INIT_LIST_HEAD(&dreq->recent); in __hash_deferred_req() 599 hlist_add_head(&dreq->hash, &cache_defer_hash[hash]); in __hash_deferred_req() 602 static void setup_deferral(struct cache_deferred_req *dreq, in setup_deferral() argument 607 dreq->item = item; in setup_deferral() 611 __hash_deferred_req(dreq, ite in setup_deferral() 627 cache_restart_thread(struct cache_deferred_req *dreq, int too_many) cache_restart_thread() argument 637 struct cache_deferred_req *dreq = &sleeper.handle; cache_wait_req() local 709 struct cache_deferred_req *dreq; cache_defer_req() local 733 struct cache_deferred_req *dreq; cache_revisit_request() local 758 struct cache_deferred_req *dreq, *tmp; cache_clean_deferred() local [all...] |
/kernel/linux/linux-5.10/drivers/s390/block/ |
H A D | dasd_diag.c | 168 struct dasd_diag_req *dreq; in dasd_start_diag() local 179 dreq = cqr->data; in dasd_start_diag() 184 private->iob.block_count = dreq->block_count; in dasd_start_diag() 186 private->iob.bio_list = dreq->bio; in dasd_start_diag() 513 struct dasd_diag_req *dreq; in dasd_diag_build_cp() local 546 cqr = dasd_smalloc_request(DASD_DIAG_MAGIC, 0, struct_size(dreq, bio, count), in dasd_diag_build_cp() 551 dreq = (struct dasd_diag_req *) cqr->data; in dasd_diag_build_cp() 552 dreq->block_count = count; in dasd_diag_build_cp() 553 dbio = dreq->bio; in dasd_diag_build_cp()
|
/kernel/linux/linux-6.6/drivers/s390/block/ |
H A D | dasd_diag.c | 167 struct dasd_diag_req *dreq; in dasd_start_diag() local 178 dreq = cqr->data; in dasd_start_diag() 183 private->iob.block_count = dreq->block_count; in dasd_start_diag() 185 private->iob.bio_list = dreq->bio; in dasd_start_diag() 512 struct dasd_diag_req *dreq; in dasd_diag_build_cp() local 545 cqr = dasd_smalloc_request(DASD_DIAG_MAGIC, 0, struct_size(dreq, bio, count), in dasd_diag_build_cp() 550 dreq = (struct dasd_diag_req *) cqr->data; in dasd_diag_build_cp() 551 dreq->block_count = count; in dasd_diag_build_cp() 552 dbio = dreq->bio; in dasd_diag_build_cp()
|