Home
last modified time | relevance | path

Searched refs:bytes_mapped (Results 1 - 4 of 4) sorted by relevance

/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dodp.c553 u64 user_va, size_t bcnt, u32 *bytes_mapped, in pagefault_real_mr()
591 if (bytes_mapped) { in pagefault_real_mr()
595 *bytes_mapped += min_t(u32, new_mappings, bcnt); in pagefault_real_mr()
606 size_t bcnt, u32 *bytes_mapped, u32 flags) in pagefault_implicit_mr()
647 bytes_mapped, flags); in pagefault_implicit_mr()
691 u32 *bytes_mapped, u32 flags) in pagefault_dmabuf_mr()
722 if (bytes_mapped) in pagefault_dmabuf_mr()
723 *bytes_mapped += bcnt; in pagefault_dmabuf_mr()
738 u32 *bytes_mapped, u32 flags) in pagefault_mr()
746 return pagefault_dmabuf_mr(mr, bcnt, bytes_mapped, flag in pagefault_mr()
552 pagefault_real_mr(struct mlx5_ib_mr *mr, struct ib_umem_odp *odp, u64 user_va, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_real_mr() argument
604 pagefault_implicit_mr(struct mlx5_ib_mr *imr, struct ib_umem_odp *odp_imr, u64 user_va, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_implicit_mr() argument
690 pagefault_dmabuf_mr(struct mlx5_ib_mr *mr, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_dmabuf_mr() argument
737 pagefault_mr(struct mlx5_ib_mr *mr, u64 io_virt, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_mr() argument
813 pagefault_single_data_segment(struct mlx5_ib_dev *dev, struct ib_pd *pd, u32 key, u64 io_virt, size_t bcnt, u32 *bytes_committed, u32 *bytes_mapped) pagefault_single_data_segment() argument
981 pagefault_data_segments(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, void *wqe, void *wqe_end, u32 *bytes_mapped, u32 *total_wqe_bytes, bool receive_queue) pagefault_data_segments() argument
1199 u32 bytes_mapped, total_wqe_bytes; mlx5_ib_mr_wqe_pfault_handler() local
1706 u32 bytes_mapped = 0; mlx5_ib_prefetch_mr_work() local
1755 u32 bytes_mapped = 0; mlx5_ib_prefetch_sg_list() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dodp.c671 u64 user_va, size_t bcnt, u32 *bytes_mapped, in pagefault_real_mr()
709 if (bytes_mapped) { in pagefault_real_mr()
713 *bytes_mapped += min_t(u32, new_mappings, bcnt); in pagefault_real_mr()
724 size_t bcnt, u32 *bytes_mapped, u32 flags) in pagefault_implicit_mr()
760 bytes_mapped, flags); in pagefault_implicit_mr()
810 u32 *bytes_mapped, u32 flags) in pagefault_mr()
827 return pagefault_real_mr(mr, odp, user_va, bcnt, bytes_mapped, in pagefault_mr()
830 return pagefault_implicit_mr(mr, odp, io_virt, bcnt, bytes_mapped, in pagefault_mr()
895 u32 *bytes_mapped) in pagefault_single_data_segment()
918 if (bytes_mapped) in pagefault_single_data_segment()
670 pagefault_real_mr(struct mlx5_ib_mr *mr, struct ib_umem_odp *odp, u64 user_va, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_real_mr() argument
722 pagefault_implicit_mr(struct mlx5_ib_mr *imr, struct ib_umem_odp *odp_imr, u64 user_va, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_implicit_mr() argument
809 pagefault_mr(struct mlx5_ib_mr *mr, u64 io_virt, size_t bcnt, u32 *bytes_mapped, u32 flags) pagefault_mr() argument
891 pagefault_single_data_segment(struct mlx5_ib_dev *dev, struct ib_pd *pd, u32 key, u64 io_virt, size_t bcnt, u32 *bytes_committed, u32 *bytes_mapped) pagefault_single_data_segment() argument
1055 pagefault_data_segments(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, void *wqe, void *wqe_end, u32 *bytes_mapped, u32 *total_wqe_bytes, bool receive_queue) pagefault_data_segments() argument
1272 u32 bytes_mapped, total_wqe_bytes; mlx5_ib_mr_wqe_pfault_handler() local
1779 u32 bytes_mapped = 0; mlx5_ib_prefetch_mr_work() local
1835 u32 bytes_mapped = 0; mlx5_ib_prefetch_sg_list() local
[all...]
/kernel/linux/linux-6.6/net/ipv4/
H A Dtcp.c1970 int bytes_mapped; in tcp_zerocopy_vm_insert_batch_error() local
1976 bytes_mapped = PAGE_SIZE * (leftover_pages - pages_remaining); in tcp_zerocopy_vm_insert_batch_error()
1977 *seq += bytes_mapped; in tcp_zerocopy_vm_insert_batch_error()
1978 *address += bytes_mapped; in tcp_zerocopy_vm_insert_batch_error()
2005 unsigned int bytes_mapped; in tcp_zerocopy_vm_insert_batch() local
2010 bytes_mapped = PAGE_SIZE * pages_mapped; in tcp_zerocopy_vm_insert_batch()
2014 *seq += bytes_mapped; in tcp_zerocopy_vm_insert_batch()
2015 *address += bytes_mapped; in tcp_zerocopy_vm_insert_batch()
/kernel/linux/linux-5.10/net/ipv4/
H A Dtcp.c1883 int bytes_mapped; in tcp_zerocopy_vm_insert_batch() local
1887 bytes_mapped = PAGE_SIZE * (pages_to_map - pages_remaining); in tcp_zerocopy_vm_insert_batch()
1891 *seq += bytes_mapped; in tcp_zerocopy_vm_insert_batch()
1892 *insert_addr += bytes_mapped; in tcp_zerocopy_vm_insert_batch()

Completed in 14 milliseconds