1/*
2 * Copyright 2013 Red Hat
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19 * THE AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 */
24#ifndef VIRTGPU_DRM_H
25#define VIRTGPU_DRM_H
26
27#include "drm.h"
28
29#if defined(__cplusplus)
30extern "C" {
31#endif
32
33/* Please note that modifications to all structs defined here are
34 * subject to backwards-compatibility constraints.
35 *
36 * Do not use pointers, use __u64 instead for 32 bit / 64 bit user/kernel
37 * compatibility Keep fields aligned to their size
38 */
39
40#define DRM_VIRTGPU_MAP         0x01
41#define DRM_VIRTGPU_EXECBUFFER  0x02
42#define DRM_VIRTGPU_GETPARAM    0x03
43#define DRM_VIRTGPU_RESOURCE_CREATE 0x04
44#define DRM_VIRTGPU_RESOURCE_INFO     0x05
45#define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
46#define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
47#define DRM_VIRTGPU_WAIT     0x08
48#define DRM_VIRTGPU_GET_CAPS  0x09
49#define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
50#define DRM_VIRTGPU_CONTEXT_INIT 0x0b
51
52#define VIRTGPU_EXECBUF_FENCE_FD_IN	0x01
53#define VIRTGPU_EXECBUF_FENCE_FD_OUT	0x02
54#define VIRTGPU_EXECBUF_RING_IDX	0x04
55#define VIRTGPU_EXECBUF_FLAGS  (\
56		VIRTGPU_EXECBUF_FENCE_FD_IN |\
57		VIRTGPU_EXECBUF_FENCE_FD_OUT |\
58		VIRTGPU_EXECBUF_RING_IDX |\
59		0)
60
61struct drm_virtgpu_map {
62	__u64 offset; /* use for mmap system call */
63	__u32 handle;
64	__u32 pad;
65};
66
67#define VIRTGPU_EXECBUF_SYNCOBJ_RESET		0x01
68#define VIRTGPU_EXECBUF_SYNCOBJ_FLAGS ( \
69		VIRTGPU_EXECBUF_SYNCOBJ_RESET | \
70		0)
71struct drm_virtgpu_execbuffer_syncobj {
72	__u32 handle;
73	__u32 flags;
74	__u64 point;
75};
76
77/* fence_fd is modified on success if VIRTGPU_EXECBUF_FENCE_FD_OUT flag is set. */
78struct drm_virtgpu_execbuffer {
79	__u32 flags;
80	__u32 size;
81	__u64 command; /* void* */
82	__u64 bo_handles;
83	__u32 num_bo_handles;
84	__s32 fence_fd; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
85	__u32 ring_idx; /* command ring index (see VIRTGPU_EXECBUF_RING_IDX) */
86	__u32 syncobj_stride; /* size of @drm_virtgpu_execbuffer_syncobj */
87	__u32 num_in_syncobjs;
88	__u32 num_out_syncobjs;
89	__u64 in_syncobjs;
90	__u64 out_syncobjs;
91};
92
93#define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
94#define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
95#define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
96#define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
97#define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing  */
98#define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
99#define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
100
101struct drm_virtgpu_getparam {
102	__u64 param;
103	__u64 value;
104};
105
106/* NO_BO flags? NO resource flag? */
107/* resource flag for y_0_top */
108struct drm_virtgpu_resource_create {
109	__u32 target;
110	__u32 format;
111	__u32 bind;
112	__u32 width;
113	__u32 height;
114	__u32 depth;
115	__u32 array_size;
116	__u32 last_level;
117	__u32 nr_samples;
118	__u32 flags;
119	__u32 bo_handle; /* if this is set - recreate a new resource attached to this bo ? */
120	__u32 res_handle;  /* returned by kernel */
121	__u32 size;        /* validate transfer in the host */
122	__u32 stride;      /* validate transfer in the host */
123};
124
125struct drm_virtgpu_resource_info {
126	__u32 bo_handle;
127	__u32 res_handle;
128	__u32 size;
129	__u32 blob_mem;
130};
131
132struct drm_virtgpu_3d_box {
133	__u32 x;
134	__u32 y;
135	__u32 z;
136	__u32 w;
137	__u32 h;
138	__u32 d;
139};
140
141struct drm_virtgpu_3d_transfer_to_host {
142	__u32 bo_handle;
143	struct drm_virtgpu_3d_box box;
144	__u32 level;
145	__u32 offset;
146	__u32 stride;
147	__u32 layer_stride;
148};
149
150struct drm_virtgpu_3d_transfer_from_host {
151	__u32 bo_handle;
152	struct drm_virtgpu_3d_box box;
153	__u32 level;
154	__u32 offset;
155	__u32 stride;
156	__u32 layer_stride;
157};
158
159#define VIRTGPU_WAIT_NOWAIT 1 /* like it */
160struct drm_virtgpu_3d_wait {
161	__u32 handle; /* 0 is an invalid handle */
162	__u32 flags;
163};
164
165struct drm_virtgpu_get_caps {
166	__u32 cap_set_id;
167	__u32 cap_set_ver;
168	__u64 addr;
169	__u32 size;
170	__u32 pad;
171};
172
173struct drm_virtgpu_resource_create_blob {
174#define VIRTGPU_BLOB_MEM_GUEST             0x0001
175#define VIRTGPU_BLOB_MEM_HOST3D            0x0002
176#define VIRTGPU_BLOB_MEM_HOST3D_GUEST      0x0003
177
178#define VIRTGPU_BLOB_FLAG_USE_MAPPABLE     0x0001
179#define VIRTGPU_BLOB_FLAG_USE_SHAREABLE    0x0002
180#define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
181	/* zero is invalid blob_mem */
182	__u32 blob_mem;
183	__u32 blob_flags;
184	__u32 bo_handle;
185	__u32 res_handle;
186	__u64 size;
187
188	/*
189	 * for 3D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
190	 * VIRTGPU_BLOB_MEM_HOST3D otherwise, must be zero.
191	 */
192	__u32 pad;
193	__u32 cmd_size;
194	__u64 cmd;
195	__u64 blob_id;
196};
197
198#define VIRTGPU_CONTEXT_PARAM_CAPSET_ID       0x0001
199#define VIRTGPU_CONTEXT_PARAM_NUM_RINGS       0x0002
200#define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
201struct drm_virtgpu_context_set_param {
202	__u64 param;
203	__u64 value;
204};
205
206struct drm_virtgpu_context_init {
207	__u32 num_params;
208	__u32 pad;
209
210	/* pointer to drm_virtgpu_context_set_param array */
211	__u64 ctx_set_params;
212};
213
214/*
215 * Event code that's given when VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK is in
216 * effect.  The event size is sizeof(drm_event), since there is no additional
217 * payload.
218 */
219#define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000
220
221#define DRM_IOCTL_VIRTGPU_MAP \
222	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map)
223
224#define DRM_IOCTL_VIRTGPU_EXECBUFFER \
225	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER,\
226		struct drm_virtgpu_execbuffer)
227
228#define DRM_IOCTL_VIRTGPU_GETPARAM \
229	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM,\
230		struct drm_virtgpu_getparam)
231
232#define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE			\
233	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE,	\
234		struct drm_virtgpu_resource_create)
235
236#define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
237	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
238		 struct drm_virtgpu_resource_info)
239
240#define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
241	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST,	\
242		struct drm_virtgpu_3d_transfer_from_host)
243
244#define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
245	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST,	\
246		struct drm_virtgpu_3d_transfer_to_host)
247
248#define DRM_IOCTL_VIRTGPU_WAIT				\
249	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT,	\
250		struct drm_virtgpu_3d_wait)
251
252#define DRM_IOCTL_VIRTGPU_GET_CAPS \
253	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, \
254	struct drm_virtgpu_get_caps)
255
256#define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB				\
257	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB,	\
258		struct drm_virtgpu_resource_create_blob)
259
260#define DRM_IOCTL_VIRTGPU_CONTEXT_INIT					\
261	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT,		\
262		struct drm_virtgpu_context_init)
263
264#if defined(__cplusplus)
265}
266#endif
267
268#endif
269