Lines Matching defs:num

78 	u16 num;			/* Descriptor list length. */
121 unsigned int num;
235 static struct vring_desc_extra *vring_alloc_desc_extra(unsigned int num);
420 static void virtqueue_init(struct vring_virtqueue *vq, u32 num)
422 vq->vq.num_free = num;
580 WARN_ON_ONCE(total_sg > vq->split.vring.num && !vq->indirect);
647 vq->split.desc_extra[prev & (vq->split.vring.num - 1)].flags &=
687 avail = vq->split.avail_idx_shadow & (vq->split.vring.num - 1);
848 last_used = (vq->last_used_idx & (vq->split.vring.num - 1));
854 if (unlikely(i >= vq->split.vring.num)) {
982 for (i = 0; i < vq->split.vring.num; i++) {
995 BUG_ON(vq->vq.num_free != vq->split.vring.num);
1022 int num;
1024 num = vq->split.vring.num;
1030 vq->split.vring.avail->ring[num] = 0;
1036 *(__virtio16 *)&(vq->split.vring.used->ring[num]) = 0;
1038 virtqueue_init(vq, num);
1056 u32 num = vring_split->vring.num;
1058 state = kmalloc_array(num, sizeof(struct vring_desc_state_split), GFP_KERNEL);
1062 extra = vring_alloc_desc_extra(num);
1066 memset(state, 0, num * sizeof(struct vring_desc_state_split));
1092 u32 num,
1100 /* We assume num is a power of 2. */
1101 if (!is_power_of_2(num)) {
1102 dev_warn(&vdev->dev, "Bad virtqueue length %u\n", num);
1107 for (; num && vring_size(num, vring_align) > PAGE_SIZE; num /= 2) {
1108 queue = vring_alloc_queue(vdev, vring_size(num, vring_align),
1118 if (!num)
1123 queue = vring_alloc_queue(vdev, vring_size(num, vring_align),
1130 vring_init(&vring_split->vring, num, queue, vring_align);
1133 vring_split->queue_size_in_bytes = vring_size(num, vring_align);
1143 unsigned int num,
1158 err = vring_alloc_queue_split(&vring_split, vdev, num, vring_align,
1175 static int virtqueue_resize_split(struct virtqueue *_vq, u32 num)
1182 err = vring_alloc_queue_split(&vring_split, vdev, num,
1197 virtqueue_init(vq, vring_split.vring.num);
1311 BUG_ON(id == vq->packed.vring.num);
1365 if (n >= vq->packed.vring.num) {
1376 vq->packed.desc_state[id].num = 1;
1446 WARN_ON_ONCE(total_sg > vq->packed.vring.num && !vq->indirect);
1460 BUG_ON(id == vq->packed.vring.num);
1493 if ((unlikely(++i >= vq->packed.vring.num))) {
1513 vq->packed.desc_state[id].num = descs_used;
1545 if (i >= vq->packed.vring.num)
1594 event_idx -= vq->packed.vring.num;
1616 vq->vq.num_free += state->num;
1620 for (i = 0; i < state->num; i++) {
1704 if (unlikely(id >= vq->packed.vring.num)) {
1717 last_used += vq->packed.desc_state[id].num;
1718 if (unlikely(last_used >= vq->packed.vring.num)) {
1719 last_used -= vq->packed.vring.num;
1821 bufs = (vq->packed.vring.num - vq->vq.num_free) * 3 / 4;
1826 if (used_idx >= vq->packed.vring.num) {
1827 used_idx -= vq->packed.vring.num;
1875 for (i = 0; i < vq->packed.vring.num; i++) {
1885 BUG_ON(vq->vq.num_free != vq->packed.vring.num);
1891 static struct vring_desc_extra *vring_alloc_desc_extra(unsigned int num)
1896 desc_extra = kmalloc_array(num, sizeof(struct vring_desc_extra),
1901 memset(desc_extra, 0, num * sizeof(struct vring_desc_extra));
1903 for (i = 0; i < num - 1; i++)
1937 u32 num, struct device *dma_dev)
1944 ring_size_in_bytes = num * sizeof(struct vring_packed_desc);
1980 vring_packed->vring.num = num;
1993 u32 num = vring_packed->vring.num;
1995 state = kmalloc_array(num, sizeof(struct vring_desc_state_packed), GFP_KERNEL);
1999 memset(state, 0, num * sizeof(struct vring_desc_state_packed));
2001 extra = vring_alloc_desc_extra(num);
2049 virtqueue_init(vq, vq->packed.vring.num);
2055 unsigned int num,
2070 if (vring_alloc_queue_packed(&vring_packed, vdev, num, dma_dev))
2109 virtqueue_init(vq, num);
2125 static int virtqueue_resize_packed(struct virtqueue *_vq, u32 num)
2132 if (vring_alloc_queue_packed(&vring_packed, vdev, num, vring_dma_dev(vq)))
2143 virtqueue_init(vq, vring_packed.vring.num);
2253 * @num: the number of entries in @sg readable by other side
2263 struct scatterlist *sg, unsigned int num,
2267 return virtqueue_add(vq, &sg, num, 1, 0, data, NULL, gfp);
2275 * @num: the number of entries in @sg writable by other side
2285 struct scatterlist *sg, unsigned int num,
2289 return virtqueue_add(vq, &sg, num, 0, 1, data, NULL, gfp);
2297 * @num: the number of entries in @sg writable by other side
2308 struct scatterlist *sg, unsigned int num,
2313 return virtqueue_add(vq, &sg, num, 0, 1, data, ctx, gfp);
2656 virtqueue_init(vq, vring_split->vring.num);
2667 unsigned int num,
2679 return vring_create_virtqueue_packed(index, num, vring_align,
2683 return vring_create_virtqueue_split(index, num, vring_align,
2691 unsigned int num,
2704 return vring_create_virtqueue_packed(index, num, vring_align,
2708 return vring_create_virtqueue_split(index, num, vring_align,
2717 * @num: new ring num
2734 * -E2BIG/-EINVAL: num error
2738 int virtqueue_resize(struct virtqueue *_vq, u32 num,
2744 if (num > vq->vq.num_max)
2747 if (!num)
2750 if ((vq->packed_ring ? vq->packed.vring.num : vq->split.vring.num) == num)
2758 err = virtqueue_resize_packed(_vq, num);
2760 err = virtqueue_resize_split(_vq, num);
2790 u32 num;
2794 num = vq->packed_ring ? vq->packed.vring.num : vq->split.vring.num;
2796 if (num != vq->vq.num_free) {
2850 unsigned int num,
2865 vring_init(&vring_split.vring, num, pages, vring_align);
2984 return vq->packed_ring ? vq->packed.vring.num : vq->split.vring.num;