Lines Matching refs:vbuf

92 	struct virtio_gpu_vbuffer *vbuf;
94 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL);
98 vbuf->buf = (void *)vbuf + sizeof(*vbuf);
99 vbuf->size = size;
101 vbuf->resp_cb = resp_cb;
102 vbuf->resp_size = resp_size;
104 vbuf->resp_buf = (void *)vbuf->buf + size;
106 vbuf->resp_buf = resp_buf;
107 BUG_ON(!vbuf->resp_buf);
108 return vbuf;
112 virtio_gpu_vbuf_ctrl_hdr(struct virtio_gpu_vbuffer *vbuf)
114 /* this assumes a vbuf contains a command that starts with a
118 return (struct virtio_gpu_ctrl_hdr *)vbuf->buf;
125 struct virtio_gpu_vbuffer *vbuf;
127 vbuf = virtio_gpu_get_vbuf
130 if (IS_ERR(vbuf)) {
132 return ERR_CAST(vbuf);
134 *vbuffer_p = vbuf;
135 return (struct virtio_gpu_update_cursor *)vbuf->buf;
144 struct virtio_gpu_vbuffer *vbuf;
146 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size,
148 *vbuffer_p = vbuf;
149 return (struct virtio_gpu_command *)vbuf->buf;
172 struct virtio_gpu_vbuffer *vbuf)
174 if (vbuf->resp_size > MAX_INLINE_RESP_SIZE)
175 kfree(vbuf->resp_buf);
176 kvfree(vbuf->data_buf);
177 kmem_cache_free(vgdev->vbufs, vbuf);
182 struct virtio_gpu_vbuffer *vbuf;
186 while ((vbuf = virtqueue_get_buf(vq, &len))) {
187 list_add_tail(&vbuf->list, reclaim_list);
318 struct virtio_gpu_vbuffer *vbuf,
329 if (fence && vbuf->objs)
330 virtio_gpu_array_unlock_resv(vbuf->objs);
331 free_vbuf(vgdev, vbuf);
348 /* now that the position of the vbuf in the virtqueue is known, we can
352 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf),
354 if (vbuf->objs) {
355 virtio_gpu_array_add_fence(vbuf->objs, &fence->f);
356 virtio_gpu_array_unlock_resv(vbuf->objs);
360 ret = virtqueue_add_sgs(vq, sgs, outcnt, incnt, vbuf, GFP_ATOMIC);
363 trace_virtio_gpu_cmd_queue(vq, virtio_gpu_vbuf_ctrl_hdr(vbuf));
374 struct virtio_gpu_vbuffer *vbuf,
382 sg_init_one(&vcmd, vbuf->buf, vbuf->size);
388 if (vbuf->data_size) {
389 if (is_vmalloc_addr(vbuf->data_buf)) {
391 sgt = vmalloc_to_sgt(vbuf->data_buf, vbuf->data_size,
394 if (fence && vbuf->objs)
395 virtio_gpu_array_unlock_resv(vbuf->objs);
402 sg_init_one(&vout, vbuf->data_buf, vbuf->data_size);
410 if (vbuf->resp_size) {
411 sg_init_one(&vresp, vbuf->resp_buf, vbuf->resp_size);
417 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt,
444 struct virtio_gpu_vbuffer *vbuf)
446 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL);
450 struct virtio_gpu_vbuffer *vbuf)
458 free_vbuf(vgdev, vbuf);
462 sg_init_one(&ccmd, vbuf->buf, vbuf->size);
468 ret = virtqueue_add_sgs(vq, sgs, outcnt, 0, vbuf, GFP_ATOMIC);
476 virtio_gpu_vbuf_ctrl_hdr(vbuf));
501 struct virtio_gpu_vbuffer *vbuf;
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
505 vbuf->objs = objs;
513 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
518 struct virtio_gpu_vbuffer *vbuf)
522 bo = vbuf->resp_cb_data;
523 vbuf->resp_cb_data = NULL;
532 struct virtio_gpu_vbuffer *vbuf;
535 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p),
542 vbuf->resp_cb_data = bo;
543 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
554 struct virtio_gpu_vbuffer *vbuf;
556 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
567 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
576 struct virtio_gpu_vbuffer *vbuf;
578 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
588 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
600 struct virtio_gpu_vbuffer *vbuf;
608 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
610 vbuf->objs = objs;
620 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
631 struct virtio_gpu_vbuffer *vbuf;
633 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
640 vbuf->data_buf = ents;
641 vbuf->data_size = sizeof(*ents) * nents;
643 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
647 struct virtio_gpu_vbuffer *vbuf)
650 (struct virtio_gpu_resp_display_info *)vbuf->resp_buf;
676 struct virtio_gpu_vbuffer *vbuf)
679 (struct virtio_gpu_get_capset_info *)vbuf->buf;
681 (struct virtio_gpu_resp_capset_info *)vbuf->resp_buf;
697 struct virtio_gpu_vbuffer *vbuf)
700 (struct virtio_gpu_get_capset *)vbuf->buf;
702 (struct virtio_gpu_resp_capset *)vbuf->resp_buf;
734 struct virtio_gpu_vbuffer *vbuf)
737 (struct virtio_gpu_cmd_get_edid *)vbuf->buf;
739 (struct virtio_gpu_resp_edid *)vbuf->resp_buf;
763 struct virtio_gpu_vbuffer *vbuf;
772 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf,
779 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
786 struct virtio_gpu_vbuffer *vbuf;
795 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf,
802 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
811 struct virtio_gpu_vbuffer *vbuf;
870 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p),
877 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
885 struct virtio_gpu_vbuffer *vbuf;
899 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf,
904 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
914 struct virtio_gpu_vbuffer *vbuf;
916 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
924 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
931 struct virtio_gpu_vbuffer *vbuf;
933 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
938 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
947 struct virtio_gpu_vbuffer *vbuf;
949 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
951 vbuf->objs = objs;
956 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
965 struct virtio_gpu_vbuffer *vbuf;
967 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
969 vbuf->objs = objs;
974 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);
985 struct virtio_gpu_vbuffer *vbuf;
987 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
989 vbuf->objs = objs;
1005 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
1019 struct virtio_gpu_vbuffer *vbuf;
1027 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
1030 vbuf->objs = objs;
1039 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
1051 struct virtio_gpu_vbuffer *vbuf;
1053 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
1056 vbuf->objs = objs;
1065 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
1075 struct virtio_gpu_vbuffer *vbuf;
1077 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
1080 vbuf->data_buf = data;
1081 vbuf->data_size = data_size;
1082 vbuf->objs = objs;
1088 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence);
1103 struct virtio_gpu_vbuffer *vbuf;
1107 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf);
1109 virtio_gpu_queue_cursor(vgdev, vbuf);
1113 struct virtio_gpu_vbuffer *vbuf)
1116 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]);
1118 (struct virtio_gpu_resp_resource_uuid *)vbuf->resp_buf;
1142 struct virtio_gpu_vbuffer *vbuf;
1155 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p),
1162 vbuf->objs = objs;
1163 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf);