Lines Matching refs:riov

266 	     struct vringh_kiov *riov,
288 if (WARN_ON(!riov && !wiov))
291 if (riov)
292 riov->i = riov->used = 0;
348 iov = riov;
658 * @riov: where to put the readable descriptors (or NULL)
670 * Note that you may need to clean up riov and wiov, even on error!
673 struct vringh_iov *riov,
711 err = __vringh_iov(vrh, *head, (struct vringh_kiov *)riov,
723 * @riov: the riov as passed to vringh_getdesc_user() (updated as we consume)
729 ssize_t vringh_iov_pull_user(struct vringh_iov *riov, void *dst, size_t len)
731 return vringh_iov_xfer(NULL, (struct vringh_kiov *)riov,
928 * @riov: where to put the readable descriptors (or NULL)
931 * @gfp: flags for allocating larger riov/wiov.
940 * Note that you may need to clean up riov and wiov, even on error!
943 struct vringh_kiov *riov,
959 err = __vringh_iov(vrh, *head, riov, wiov, no_range_check, NULL,
970 * @riov: the riov as passed to vringh_getdesc_kern() (updated as we consume)
976 ssize_t vringh_iov_pull_kern(struct vringh_kiov *riov, void *dst, size_t len)
978 return vringh_iov_xfer(NULL, riov, dst, len, xfer_kern);
1280 * @riov: where to put the readable descriptors (or NULL)
1283 * @gfp: flags for allocating larger riov/wiov.
1292 * Note that you may need to clean up riov and wiov, even on error!
1295 struct vringh_kiov *riov,
1311 err = __vringh_iov(vrh, *head, riov, wiov, no_range_check, NULL,
1323 * @riov: the riov as passed to vringh_getdesc_iotlb() (updated as we consume)
1330 struct vringh_kiov *riov,
1333 return vringh_iov_xfer(vrh, riov, dst, len, xfer_from_iotlb);