Home
last modified time | relevance | path

Searched refs:ubuffer (Results 1 - 20 of 20) sorted by relevance

/kernel/linux/linux-5.10/fs/xfs/
H A Dxfs_itable.h11 void __user *ubuffer; /* user output buffer */ member
13 unsigned int icount; /* number of elements in ubuffer */
30 char __user *b = breq->ubuffer; in xfs_ibulk_advance()
32 breq->ubuffer = b + bytes; in xfs_ibulk_advance()
H A Dxfs_ioctl32.c87 struct compat_xfs_inogrp __user *p32 = breq->ubuffer; in xfs_fsinumbers_fmt_compat()
176 struct compat_xfs_bstat __user *p32 = breq->ubuffer; in xfs_fsbulkstat_one_fmt_compat()
228 * to userpace memory via bulkreq.ubuffer. Normally the compat in xfs_compat_ioc_fsbulkstat()
262 get_user(addr, &p32->ubuffer)) in xfs_compat_ioc_fsbulkstat()
264 bulkreq.ubuffer = compat_ptr(addr); in xfs_compat_ioc_fsbulkstat()
275 if (bulkreq.ubuffer == NULL) in xfs_compat_ioc_fsbulkstat()
278 breq.ubuffer = bulkreq.ubuffer; in xfs_compat_ioc_fsbulkstat()
H A Dxfs_ioctl.c718 if (copy_to_user(breq->ubuffer, &bs1, sizeof(bs1))) in xfs_fsbulkstat_one_fmt()
731 if (copy_to_user(breq->ubuffer, &ig1, sizeof(struct xfs_inogrp))) in xfs_fsinumbers_fmt()
768 if (bulkreq.ubuffer == NULL) in xfs_ioc_fsbulkstat()
771 breq.ubuffer = bulkreq.ubuffer; in xfs_ioc_fsbulkstat()
819 if (copy_to_user(breq->ubuffer, bstat, sizeof(struct xfs_bulkstat))) in xfs_bulkstat_fmt()
835 void __user *ubuffer) in xfs_bulk_ireq_setup()
843 breq->ubuffer = ubuffer; in xfs_bulk_ireq_setup()
954 if (copy_to_user(breq->ubuffer, igr in xfs_inumbers_fmt()
831 xfs_bulk_ireq_setup( struct xfs_mount *mp, struct xfs_bulk_ireq *hdr, struct xfs_ibulk *breq, void __user *ubuffer) xfs_bulk_ireq_setup() argument
[all...]
H A Dxfs_ioctl32.h69 compat_uptr_t ubuffer; /* user buffer for inode desc. */ member
/kernel/linux/linux-6.6/fs/xfs/
H A Dxfs_itable.h12 void __user *ubuffer; /* user output buffer */ member
14 unsigned int icount; /* number of elements in ubuffer */
34 char __user *b = breq->ubuffer; in xfs_ibulk_advance()
36 breq->ubuffer = b + bytes; in xfs_ibulk_advance()
H A Dxfs_ioctl32.c73 struct compat_xfs_inogrp __user *p32 = breq->ubuffer; in xfs_fsinumbers_fmt_compat()
162 struct compat_xfs_bstat __user *p32 = breq->ubuffer; in xfs_fsbulkstat_one_fmt_compat()
216 * to userpace memory via bulkreq.ubuffer. Normally the compat in xfs_compat_ioc_fsbulkstat()
250 get_user(addr, &p32->ubuffer)) in xfs_compat_ioc_fsbulkstat()
252 bulkreq.ubuffer = compat_ptr(addr); in xfs_compat_ioc_fsbulkstat()
263 if (bulkreq.ubuffer == NULL) in xfs_compat_ioc_fsbulkstat()
266 breq.ubuffer = bulkreq.ubuffer; in xfs_compat_ioc_fsbulkstat()
H A Dxfs_ioctl.c640 if (copy_to_user(breq->ubuffer, &bs1, sizeof(bs1))) in xfs_fsbulkstat_one_fmt()
653 if (copy_to_user(breq->ubuffer, &ig1, sizeof(struct xfs_inogrp))) in xfs_fsinumbers_fmt()
692 if (bulkreq.ubuffer == NULL) in xfs_ioc_fsbulkstat()
695 breq.ubuffer = bulkreq.ubuffer; in xfs_ioc_fsbulkstat()
743 if (copy_to_user(breq->ubuffer, bstat, sizeof(struct xfs_bulkstat))) in xfs_bulkstat_fmt()
759 void __user *ubuffer) in xfs_bulk_ireq_setup()
767 breq->ubuffer = ubuffer; in xfs_bulk_ireq_setup()
883 if (copy_to_user(breq->ubuffer, igr in xfs_inumbers_fmt()
755 xfs_bulk_ireq_setup( struct xfs_mount *mp, const struct xfs_bulk_ireq *hdr, struct xfs_ibulk *breq, void __user *ubuffer) xfs_bulk_ireq_setup() argument
[all...]
H A Dxfs_ioctl32.h67 compat_uptr_t ubuffer; /* user buffer for inode desc. */ member
/kernel/linux/linux-6.6/drivers/iommu/iommufd/
H A Dmain.c300 if (copy_to_user(&((struct iommu_option __user *)ucmd->ubuffer)->val64, in iommufd_option()
382 ucmd.ubuffer = (void __user *)arg; in iommufd_fops_ioctl()
383 ret = get_user(ucmd.user_size, (u32 __user *)ucmd.ubuffer); in iommufd_fops_ioctl()
394 ret = copy_struct_from_user(ucmd.cmd, op->size, ucmd.ubuffer, in iommufd_fops_ioctl()
H A Diommufd_private.h94 void __user *ubuffer; member
106 if (copy_to_user(ucmd->ubuffer, ucmd->cmd, in iommufd_ucmd_respond()
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dqp.c185 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_user_wqe_sq()
236 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_wqe_sq()
252 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_user_wqe_rq()
272 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_wqe_rq()
667 base->ubuffer.buf_size = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_user_buf_size()
668 qp->raw_packet_qp.sq.ubuffer.buf_size = qp->sq.wqe_cnt << 6; in set_user_buf_size()
670 base->ubuffer.buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_user_buf_size()
949 struct mlx5_ib_ubuffer *ubuffer = &base->ubuffer; in _create_user_qp() local
1001 if (ucmd->buf_addr && ubuffer in _create_user_qp()
1342 struct mlx5_ib_ubuffer *ubuffer = &sq->ubuffer; create_raw_packet_qp_sq() local
[all...]
H A Dmlx5_ib.h427 struct mlx5_ib_ubuffer ubuffer; member
445 struct mlx5_ib_ubuffer ubuffer; member
455 struct mlx5_ib_ubuffer ubuffer; member
/kernel/linux/linux-6.6/drivers/nvme/host/
H A Dioctl.c166 static int nvme_map_user_request(struct request *req, u64 ubuffer, in nvme_map_user_request() argument
184 ret = io_uring_cmd_import_fixed(ubuffer, bufflen, in nvme_map_user_request()
190 ret = blk_rq_map_user_io(req, NULL, nvme_to_user_ptr(ubuffer), in nvme_map_user_request()
222 struct nvme_command *cmd, u64 ubuffer, unsigned bufflen, in nvme_submit_user_cmd()
239 if (ubuffer && bufflen) { in nvme_submit_user_cmd()
240 ret = nvme_map_user_request(req, ubuffer, bufflen, meta_buffer, in nvme_submit_user_cmd()
221 nvme_submit_user_cmd(struct request_queue *q, struct nvme_command *cmd, u64 ubuffer, unsigned bufflen, void __user *meta_buffer, unsigned meta_len, u32 meta_seed, u64 *result, unsigned timeout, unsigned int flags) nvme_submit_user_cmd() argument
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dqp.c172 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_user_wqe_sq()
223 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_wqe_sq()
239 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_user_wqe_rq()
259 struct ib_umem *umem = base->ubuffer.umem; in mlx5_ib_read_wqe_rq()
585 base->ubuffer.buf_size = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_user_buf_size()
586 qp->raw_packet_qp.sq.ubuffer.buf_size = qp->sq.wqe_cnt << 6; in set_user_buf_size()
588 base->ubuffer.buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_user_buf_size()
898 struct mlx5_ib_ubuffer *ubuffer = &base->ubuffer; in _create_user_qp() local
951 if (ucmd->buf_addr && ubuffer in _create_user_qp()
1204 struct mlx5_ib_ubuffer *ubuffer = &sq->ubuffer; create_raw_packet_qp_sq() local
[all...]
H A Dmlx5_ib.h336 struct mlx5_ib_ubuffer ubuffer; member
354 struct mlx5_ib_ubuffer ubuffer; member
364 struct mlx5_ib_ubuffer ubuffer; member
/kernel/linux/linux-5.10/fs/xfs/libxfs/
H A Dxfs_fs.h435 void __user *ubuffer;/* user buffer for inode desc. */ member
/kernel/linux/linux-6.6/fs/xfs/libxfs/
H A Dxfs_fs.h427 void __user *ubuffer;/* user buffer for inode desc. */ member
/kernel/linux/linux-5.10/drivers/s390/char/
H A Dcon3215.c92 char ubuffer[80]; /* copy_from_user buffer */ member
/kernel/linux/linux-6.6/drivers/s390/char/
H A Dcon3215.c92 char ubuffer[80]; /* copy_from_user buffer */ member
/kernel/linux/linux-5.10/drivers/nvme/host/
H A Dcore.c1097 struct nvme_command *cmd, void __user *ubuffer, in nvme_submit_user_cmd()
1117 if (ubuffer && bufflen) { in nvme_submit_user_cmd()
1118 ret = blk_rq_map_user(q, req, NULL, ubuffer, bufflen, in nvme_submit_user_cmd()
1096 nvme_submit_user_cmd(struct request_queue *q, struct nvme_command *cmd, void __user *ubuffer, unsigned bufflen, void __user *meta_buffer, unsigned meta_len, u32 meta_seed, u64 *result, unsigned timeout) nvme_submit_user_cmd() argument

Completed in 36 milliseconds