/kernel/linux/linux-6.6/drivers/gpu/drm/i915/ |
H A D | intel_gvt.c | 80 static void save_mmio(struct intel_gvt_mmio_table_iter *iter, u32 offset, in save_mmio() argument 83 struct drm_i915_private *dev_priv = iter->i915; in save_mmio() 87 mmio = iter->data + i; in save_mmio() 93 static int handle_mmio(struct intel_gvt_mmio_table_iter *iter, in handle_mmio() argument 99 save_mmio(iter, offset, size); in handle_mmio() 107 struct intel_gvt_mmio_table_iter iter; in save_initial_hw_state() local 128 iter.i915 = dev_priv; in save_initial_hw_state() 129 iter.data = vgpu->initial_mmio; in save_initial_hw_state() 130 iter.handle_mmio_cb = handle_mmio; in save_initial_hw_state() 132 ret = intel_gvt_iterate_mmio_table(&iter); in save_initial_hw_state() [all...] |
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/microcode/ |
H A D | intel.c | 123 struct ucode_patch *iter, *tmp, *p = NULL; in save_microcode_patch() local 129 list_for_each_entry_safe(iter, tmp, µcode_cache, plist) { in save_microcode_patch() 130 mc_saved_hdr = (struct microcode_header_intel *)iter->data; in save_microcode_patch() 144 list_replace(&iter->plist, &p->plist); in save_microcode_patch() 145 kfree(iter->data); in save_microcode_patch() 146 kfree(iter); in save_microcode_patch() 678 struct ucode_patch *iter, *tmp; in find_patch() local 680 list_for_each_entry_safe(iter, tmp, µcode_cache, plist) { in find_patch() 682 phdr = (struct microcode_header_intel *)iter->data; in find_patch() 692 return iter in find_patch() 813 generic_load_microcode(int cpu, struct iov_iter *iter) generic_load_microcode() argument 925 struct iov_iter iter; request_microcode_fw() local 954 struct iov_iter iter; request_microcode_user() local [all...] |
/kernel/linux/linux-6.6/block/ |
H A D | fops.c | 38 struct iov_iter *iter) in blkdev_dio_unaligned() 41 !bdev_iter_is_aligned(bdev, iter); in blkdev_dio_unaligned() 47 struct iov_iter *iter, unsigned int nr_pages) in __blkdev_direct_IO_simple() 56 if (blkdev_dio_unaligned(bdev, pos, iter)) in __blkdev_direct_IO_simple() 68 if (iov_iter_rw(iter) == READ) { in __blkdev_direct_IO_simple() 70 if (user_backed_iter(iter)) in __blkdev_direct_IO_simple() 78 ret = bio_iov_iter_get_pages(&bio, iter); in __blkdev_direct_IO_simple() 83 if (iov_iter_rw(iter) == WRITE) in __blkdev_direct_IO_simple() 162 static ssize_t __blkdev_direct_IO(struct kiocb *iocb, struct iov_iter *iter, in __blkdev_direct_IO() argument 169 bool is_read = (iov_iter_rw(iter) in __blkdev_direct_IO() 37 blkdev_dio_unaligned(struct block_device *bdev, loff_t pos, struct iov_iter *iter) blkdev_dio_unaligned() argument 46 __blkdev_direct_IO_simple(struct kiocb *iocb, struct iov_iter *iter, unsigned int nr_pages) __blkdev_direct_IO_simple() argument 301 __blkdev_direct_IO_async(struct kiocb *iocb, struct iov_iter *iter, unsigned int nr_pages) __blkdev_direct_IO_async() argument 366 blkdev_direct_IO(struct kiocb *iocb, struct iov_iter *iter) blkdev_direct_IO() argument [all...] |
/kernel/linux/linux-6.6/drivers/dma-buf/ |
H A D | dma-fence-unwrap.c | 65 struct dma_fence_unwrap *iter) in __dma_fence_unwrap_merge() 76 dma_fence_unwrap_for_each(tmp, &iter[i], fences[i]) { in __dma_fence_unwrap_merge() 106 fences[i] = dma_fence_unwrap_first(fences[i], &iter[i]); in __dma_fence_unwrap_merge() 118 fences[i] = dma_fence_unwrap_next(&iter[i]); in __dma_fence_unwrap_merge() 139 fences[i] = dma_fence_unwrap_next(&iter[i]); in __dma_fence_unwrap_merge() 142 fences[sel] = dma_fence_unwrap_next(&iter[sel]); in __dma_fence_unwrap_merge() 149 fences[sel] = dma_fence_unwrap_next(&iter[sel]); in __dma_fence_unwrap_merge() 63 __dma_fence_unwrap_merge(unsigned int num_fences, struct dma_fence **fences, struct dma_fence_unwrap *iter) __dma_fence_unwrap_merge() argument
|
/kernel/linux/linux-6.6/drivers/gpu/drm/msm/disp/ |
H A D | msm_disp_snapshot.c | 13 struct drm_print_iterator iter; in disp_devcoredump_read() local 19 iter.data = buffer; in disp_devcoredump_read() 20 iter.offset = 0; in disp_devcoredump_read() 21 iter.start = offset; in disp_devcoredump_read() 22 iter.remain = count; in disp_devcoredump_read() 24 p = drm_coredump_printer(&iter); in disp_devcoredump_read() 28 return count - iter.remain; in disp_devcoredump_read()
|
/kernel/linux/linux-6.6/rust/alloc/vec/ |
H A D | spec_extend.rs | 5 use core::iter::TrustedLen; 13 fn spec_extend(&mut self, iter: I); in spec_extend() 18 fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError>; in try_spec_extend() 26 default fn spec_extend(&mut self, iter: I) { 27 self.extend_desugared(iter) 35 default fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError> { 36 self.try_extend_desugared(iter)
|
/kernel/linux/linux-6.6/lib/ |
H A D | dynamic_debug.c | 1032 static struct _ddebug *ddebug_iter_first(struct ddebug_iter *iter) in ddebug_iter_first() argument 1035 iter->table = NULL; in ddebug_iter_first() 1038 iter->table = list_entry(ddebug_tables.next, in ddebug_iter_first() 1040 iter->idx = iter->table->num_ddebugs; in ddebug_iter_first() 1041 return &iter->table->ddebugs[--iter->idx]; in ddebug_iter_first() 1050 static struct _ddebug *ddebug_iter_next(struct ddebug_iter *iter) in ddebug_iter_next() argument 1052 if (iter->table == NULL) in ddebug_iter_next() 1054 if (--iter in ddebug_iter_next() 1075 struct ddebug_iter *iter = m->private; ddebug_proc_start() local 1098 struct ddebug_iter *iter = m->private; ddebug_proc_next() local 1112 ddebug_class_name(struct ddebug_iter *iter, struct _ddebug *dp) ddebug_class_name() argument 1131 struct ddebug_iter *iter = m->private; ddebug_proc_show() local 1406 struct _ddebug *iter, *iter_mod_start; dynamic_debug_init() local [all...] |
/kernel/linux/linux-6.6/include/linux/ |
H A D | uio.h | 23 /* iter types */ 56 * &iter->__ubuf_iovec or iter->__iov 64 * also modifying any of the zero-filling iter init functions. 86 static inline const struct iovec *iter_iov(const struct iov_iter *iter) in iter_iov() argument 88 if (iter->iter_type == ITER_UBUF) in iter_iov() 89 return (const struct iovec *) &iter->__ubuf_iovec; in iter_iov() 90 return iter->__iov; in iter_iov() 93 #define iter_iov_addr(iter) (iter_iov(iter) 101 iov_iter_save_state(struct iov_iter *iter, struct iov_iter_state *state) iov_iter_save_state() argument 418 iov_iter_extract_will_pin(const struct iov_iter *iter) iov_iter_extract_will_pin() argument [all...] |
H A D | dma-fence-chain.h | 110 * @iter: current fence 118 #define dma_fence_chain_for_each(iter, head) \ 119 for (iter = dma_fence_get(head); iter; \ 120 iter = dma_fence_chain_walk(iter))
|
/kernel/linux/linux-5.10/kernel/gcov/ |
H A D | gcov.h | 66 void gcov_iter_free(struct gcov_iterator *iter); 67 void gcov_iter_start(struct gcov_iterator *iter); 68 int gcov_iter_next(struct gcov_iterator *iter); 69 int gcov_iter_write(struct gcov_iterator *iter, struct seq_file *seq); 70 struct gcov_info *gcov_iter_get_info(struct gcov_iterator *iter);
|
/kernel/linux/linux-5.10/drivers/gpu/drm/etnaviv/ |
H A D | etnaviv_perfmon.c | 473 if (domain->iter >= nr_domains) in etnaviv_pm_query_dom() 476 dom = pm_domain(gpu, domain->iter); in etnaviv_pm_query_dom() 480 domain->id = domain->iter; in etnaviv_pm_query_dom() 484 domain->iter++; in etnaviv_pm_query_dom() 485 if (domain->iter == nr_domains) in etnaviv_pm_query_dom() 486 domain->iter = 0xff; in etnaviv_pm_query_dom() 505 if (signal->iter >= dom->nr_signals) in etnaviv_pm_query_sig() 508 sig = &dom->signal[signal->iter]; in etnaviv_pm_query_sig() 510 signal->id = signal->iter; in etnaviv_pm_query_sig() 513 signal->iter in etnaviv_pm_query_sig() [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/etnaviv/ |
H A D | etnaviv_perfmon.c | 505 if (domain->iter >= nr_domains) in etnaviv_pm_query_dom() 508 dom = pm_domain(gpu, domain->iter); in etnaviv_pm_query_dom() 512 domain->id = domain->iter; in etnaviv_pm_query_dom() 516 domain->iter++; in etnaviv_pm_query_dom() 517 if (domain->iter == nr_domains) in etnaviv_pm_query_dom() 518 domain->iter = 0xff; in etnaviv_pm_query_dom() 537 if (signal->iter >= dom->nr_signals) in etnaviv_pm_query_sig() 540 sig = &dom->signal[signal->iter]; in etnaviv_pm_query_sig() 542 signal->id = signal->iter; in etnaviv_pm_query_sig() 545 signal->iter in etnaviv_pm_query_sig() [all...] |
/kernel/linux/linux-5.10/fs/nfs/ |
H A D | direct.c | 157 * @iter: I/O buffer 164 ssize_t nfs_direct_IO(struct kiocb *iocb, struct iov_iter *iter) in nfs_direct_IO() argument 172 VM_BUG_ON(iov_iter_count(iter) != PAGE_SIZE); in nfs_direct_IO() 174 if (iov_iter_rw(iter) == READ) in nfs_direct_IO() 175 return nfs_file_direct_read(iocb, iter, true); in nfs_direct_IO() 176 return nfs_file_direct_write(iocb, iter, true); in nfs_direct_IO() 348 struct iov_iter *iter, in nfs_direct_read_schedule_iovec() 363 while (iov_iter_count(iter)) { in nfs_direct_read_schedule_iovec() 369 result = iov_iter_get_pages_alloc(iter, &pagevec, in nfs_direct_read_schedule_iovec() 375 iov_iter_advance(iter, byte in nfs_direct_read_schedule_iovec() 347 nfs_direct_read_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos) nfs_direct_read_schedule_iovec() argument 443 nfs_file_direct_read(struct kiocb *iocb, struct iov_iter *iter, bool swap) nfs_file_direct_read() argument 827 nfs_direct_write_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos, int ioflags) nfs_direct_write_schedule_iovec() argument 932 nfs_file_direct_write(struct kiocb *iocb, struct iov_iter *iter, bool swap) nfs_file_direct_write() argument [all...] |
/kernel/linux/linux-5.10/net/ipv6/ila/ |
H A D | ila_xlat.c | 383 struct rhashtable_iter iter; in ila_xlat_nl_cmd_flush() local 388 rhashtable_walk_enter(&ilan->xlat.rhash_table, &iter); in ila_xlat_nl_cmd_flush() 389 rhashtable_walk_start(&iter); in ila_xlat_nl_cmd_flush() 392 ila = rhashtable_walk_next(&iter); in ila_xlat_nl_cmd_flush() 419 rhashtable_walk_stop(&iter); in ila_xlat_nl_cmd_flush() 420 rhashtable_walk_exit(&iter); in ila_xlat_nl_cmd_flush() 510 struct ila_dump_iter *iter; in ila_xlat_nl_dump_start() local 512 iter = kmalloc(sizeof(*iter), GFP_KERNEL); in ila_xlat_nl_dump_start() 513 if (!iter) in ila_xlat_nl_dump_start() 526 struct ila_dump_iter *iter = (struct ila_dump_iter *)cb->args[0]; ila_xlat_nl_dump_done() local 537 struct ila_dump_iter *iter = (struct ila_dump_iter *)cb->args[0]; ila_xlat_nl_dump() local [all...] |
/kernel/linux/linux-6.6/net/ipv6/ila/ |
H A D | ila_xlat.c | 382 struct rhashtable_iter iter; in ila_xlat_nl_cmd_flush() local 387 rhashtable_walk_enter(&ilan->xlat.rhash_table, &iter); in ila_xlat_nl_cmd_flush() 388 rhashtable_walk_start(&iter); in ila_xlat_nl_cmd_flush() 391 ila = rhashtable_walk_next(&iter); in ila_xlat_nl_cmd_flush() 418 rhashtable_walk_stop(&iter); in ila_xlat_nl_cmd_flush() 419 rhashtable_walk_exit(&iter); in ila_xlat_nl_cmd_flush() 509 struct ila_dump_iter *iter; in ila_xlat_nl_dump_start() local 511 iter = kmalloc(sizeof(*iter), GFP_KERNEL); in ila_xlat_nl_dump_start() 512 if (!iter) in ila_xlat_nl_dump_start() 525 struct ila_dump_iter *iter = (struct ila_dump_iter *)cb->args[0]; ila_xlat_nl_dump_done() local 536 struct ila_dump_iter *iter = (struct ila_dump_iter *)cb->args[0]; ila_xlat_nl_dump() local [all...] |
/kernel/linux/linux-5.10/kernel/trace/ |
H A D | trace_printk.c | 55 const char **iter; in hold_module_trace_bprintk_format() local 63 for (iter = start; iter < end; iter++) { in hold_module_trace_bprintk_format() 64 struct trace_bprintk_fmt *tb_fmt = lookup_format(*iter); in hold_module_trace_bprintk_format() 67 *iter = tb_fmt->fmt; in hold_module_trace_bprintk_format() 74 fmt = kmalloc(strlen(*iter) + 1, GFP_KERNEL); in hold_module_trace_bprintk_format() 77 strcpy(fmt, *iter); in hold_module_trace_bprintk_format() 82 *iter = fmt; in hold_module_trace_bprintk_format()
|
/kernel/linux/linux-6.6/drivers/char/ |
H A D | misc.c | 125 struct miscdevice *c = NULL, *iter; in misc_open() local 131 list_for_each_entry(iter, &misc_list, list) { in misc_open() 132 if (iter->minor != minor) in misc_open() 134 c = iter; in misc_open() 135 new_fops = fops_get(iter->fops); in misc_open() 144 list_for_each_entry(iter, &misc_list, list) { in misc_open() 145 if (iter->minor != minor) in misc_open() 147 c = iter; in misc_open() 148 new_fops = fops_get(iter->fops); in misc_open()
|
/kernel/linux/linux-6.6/kernel/trace/ |
H A D | trace_printk.c | 55 const char **iter; in hold_module_trace_bprintk_format() local 63 for (iter = start; iter < end; iter++) { in hold_module_trace_bprintk_format() 64 struct trace_bprintk_fmt *tb_fmt = lookup_format(*iter); in hold_module_trace_bprintk_format() 67 *iter = tb_fmt->fmt; in hold_module_trace_bprintk_format() 74 fmt = kmalloc(strlen(*iter) + 1, GFP_KERNEL); in hold_module_trace_bprintk_format() 77 strcpy(fmt, *iter); in hold_module_trace_bprintk_format() 82 *iter = fmt; in hold_module_trace_bprintk_format()
|
/kernel/linux/linux-5.10/net/ipv4/ |
H A D | ipmr_base.c | 114 void *mr_vif_seq_idx(struct net *net, struct mr_vif_iter *iter, loff_t pos) in mr_vif_seq_idx() argument 116 struct mr_table *mrt = iter->mrt; in mr_vif_seq_idx() 118 for (iter->ct = 0; iter->ct < mrt->maxvif; ++iter->ct) { in mr_vif_seq_idx() 119 if (!VIF_EXISTS(mrt, iter->ct)) in mr_vif_seq_idx() 122 return &mrt->vif_table[iter->ct]; in mr_vif_seq_idx() 130 struct mr_vif_iter *iter = seq->private; in mr_vif_seq_next() local 132 struct mr_table *mrt = iter->mrt; in mr_vif_seq_next() 136 return mr_vif_seq_idx(net, iter, in mr_vif_seq_next() [all...] |
/kernel/linux/linux-6.6/net/ipv4/ |
H A D | ipmr_base.c | 114 void *mr_vif_seq_idx(struct net *net, struct mr_vif_iter *iter, loff_t pos) in mr_vif_seq_idx() argument 116 struct mr_table *mrt = iter->mrt; in mr_vif_seq_idx() 118 for (iter->ct = 0; iter->ct < mrt->maxvif; ++iter->ct) { in mr_vif_seq_idx() 119 if (!VIF_EXISTS(mrt, iter->ct)) in mr_vif_seq_idx() 122 return &mrt->vif_table[iter->ct]; in mr_vif_seq_idx() 130 struct mr_vif_iter *iter = seq->private; in mr_vif_seq_next() local 132 struct mr_table *mrt = iter->mrt; in mr_vif_seq_next() 136 return mr_vif_seq_idx(net, iter, in mr_vif_seq_next() [all...] |
/kernel/linux/linux-6.6/drivers/cxl/core/ |
H A D | region.c | 158 struct cxl_port *iter = cxled_to_port(cxled); in cxl_region_decode_reset() local 165 while (!is_cxl_root(to_cxl_port(iter->dev.parent))) in cxl_region_decode_reset() 166 iter = to_cxl_port(iter->dev.parent); in cxl_region_decode_reset() 168 for (ep = cxl_ep_load(iter, cxlmd); iter; in cxl_region_decode_reset() 169 iter = ep->next, ep = cxl_ep_load(iter, cxlmd)) { in cxl_region_decode_reset() 173 cxl_rr = cxl_rr_load(iter, cxlr); in cxl_region_decode_reset() 221 struct cxl_port *iter; in cxl_region_decode_commit() local 787 struct cxl_region_ref *cxl_rr, *iter; alloc_region_ref() local 1325 struct cxl_port *iter; cxl_region_teardown_targets() local 1361 struct cxl_port *iter; cxl_region_setup_targets() local 1458 struct cxl_port *iter; cxl_region_attach_position() local 1605 struct cxl_port *iter, *port = cxled_to_port(cxled); cxl_calc_interleave_pos() local 1849 struct cxl_port *iter, *ep_port = cxled_to_port(cxled); cxl_region_detach() local [all...] |
/kernel/linux/linux-5.10/drivers/dma/ |
H A D | mv_xor.c | 217 struct mv_xor_desc_slot *iter, *_iter; in mv_chan_clean_completed_slots() local 220 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_chan_clean_completed_slots() 223 if (async_tx_test_ack(&iter->async_tx)) { in mv_chan_clean_completed_slots() 224 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_chan_clean_completed_slots() 225 if (!list_empty(&iter->sg_tx_list)) { in mv_chan_clean_completed_slots() 226 list_splice_tail_init(&iter->sg_tx_list, in mv_chan_clean_completed_slots() 265 struct mv_xor_desc_slot *iter, *_iter; in mv_chan_slot_cleanup() local 280 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_chan_slot_cleanup() 284 hw_desc = iter->hw_desc; in mv_chan_slot_cleanup() 286 cookie = mv_desc_run_tx_complete_actions(iter, mv_cha in mv_chan_slot_cleanup() 351 struct mv_xor_desc_slot *iter; mv_chan_alloc_slot() local 630 struct mv_xor_desc_slot *iter, *_iter; mv_xor_free_chan_resources() local [all...] |
/kernel/linux/linux-6.6/drivers/dma/ |
H A D | mv_xor.c | 217 struct mv_xor_desc_slot *iter, *_iter; in mv_chan_clean_completed_slots() local 220 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_chan_clean_completed_slots() 223 if (async_tx_test_ack(&iter->async_tx)) { in mv_chan_clean_completed_slots() 224 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_chan_clean_completed_slots() 225 if (!list_empty(&iter->sg_tx_list)) { in mv_chan_clean_completed_slots() 226 list_splice_tail_init(&iter->sg_tx_list, in mv_chan_clean_completed_slots() 265 struct mv_xor_desc_slot *iter, *_iter; in mv_chan_slot_cleanup() local 280 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_chan_slot_cleanup() 284 hw_desc = iter->hw_desc; in mv_chan_slot_cleanup() 286 cookie = mv_desc_run_tx_complete_actions(iter, mv_cha in mv_chan_slot_cleanup() 351 struct mv_xor_desc_slot *iter; mv_chan_alloc_slot() local 630 struct mv_xor_desc_slot *iter, *_iter; mv_xor_free_chan_resources() local [all...] |
/kernel/linux/linux-5.10/net/rds/ |
H A D | connection.c | 522 struct rds_info_iterator *iter, in __rds_inc_msg_cp() 527 rds6_inc_info_copy(inc, iter, saddr, daddr, flip); in __rds_inc_msg_cp() 530 rds_inc_info_copy(inc, iter, *(__be32 *)saddr, in __rds_inc_msg_cp() 535 struct rds_info_iterator *iter, in rds_conn_message_info_cmn() 581 iter, in rds_conn_message_info_cmn() 601 struct rds_info_iterator *iter, in rds_conn_message_info() 605 rds_conn_message_info_cmn(sock, len, iter, lens, want_send, false); in rds_conn_message_info() 610 struct rds_info_iterator *iter, in rds6_conn_message_info() 614 rds_conn_message_info_cmn(sock, len, iter, lens, want_send, true); in rds6_conn_message_info() 619 struct rds_info_iterator *iter, in rds_conn_message_info_send() 521 __rds_inc_msg_cp(struct rds_incoming *inc, struct rds_info_iterator *iter, void *saddr, void *daddr, int flip, bool isv6) __rds_inc_msg_cp() argument 534 rds_conn_message_info_cmn(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens, int want_send, bool isv6) rds_conn_message_info_cmn() argument 600 rds_conn_message_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens, int want_send) rds_conn_message_info() argument 609 rds6_conn_message_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens, int want_send) rds6_conn_message_info() argument 618 rds_conn_message_info_send(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds_conn_message_info_send() argument 626 rds6_conn_message_info_send(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds6_conn_message_info_send() argument 634 rds_conn_message_info_retrans(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds_conn_message_info_retrans() argument 643 rds6_conn_message_info_retrans(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds6_conn_message_info_retrans() argument 652 rds_for_each_conn_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens, int (*visitor)(struct rds_connection *, void *), u64 *buffer, size_t item_len) rds_for_each_conn_info() argument 690 rds_walk_conn_path_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens, int (*visitor)(struct rds_conn_path *, void *), u64 *buffer, size_t item_len) rds_walk_conn_path_info() argument 798 rds_conn_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds_conn_info() argument 811 rds6_conn_info(struct socket *sock, unsigned int len, struct rds_info_iterator *iter, struct rds_info_lengths *lens) rds6_conn_info() argument [all...] |
/kernel/linux/linux-6.6/fs/nfs/ |
H A D | direct.c | 136 * @iter: I/O buffer 140 int nfs_swap_rw(struct kiocb *iocb, struct iov_iter *iter) in nfs_swap_rw() argument 144 VM_BUG_ON(iov_iter_count(iter) != PAGE_SIZE); in nfs_swap_rw() 146 if (iov_iter_rw(iter) == READ) in nfs_swap_rw() 147 ret = nfs_file_direct_read(iocb, iter, true); in nfs_swap_rw() 149 ret = nfs_file_direct_write(iocb, iter, true); in nfs_swap_rw() 325 struct iov_iter *iter, in nfs_direct_read_schedule_iovec() 340 while (iov_iter_count(iter)) { in nfs_direct_read_schedule_iovec() 346 result = iov_iter_get_pages_alloc2(iter, &pagevec, in nfs_direct_read_schedule_iovec() 400 * @iter 324 nfs_direct_read_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos) nfs_direct_read_schedule_iovec() argument 417 nfs_file_direct_read(struct kiocb *iocb, struct iov_iter *iter, bool swap) nfs_file_direct_read() argument 829 nfs_direct_write_schedule_iovec(struct nfs_direct_req *dreq, struct iov_iter *iter, loff_t pos, int ioflags) nfs_direct_write_schedule_iovec() argument 955 nfs_file_direct_write(struct kiocb *iocb, struct iov_iter *iter, bool swap) nfs_file_direct_write() argument [all...] |