2 * Copyright 2013 Red Hat
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
29 #if defined(__cplusplus)
33 /* Please note that modifications to all structs defined here are
34 * subject to backwards-compatibility constraints.
36 * Do not use pointers, use __u64 instead for 32 bit / 64 bit user/kernel
37 * compatibility Keep fields aligned to their size
40 #define DRM_VIRTGPU_MAP 0x01
41 #define DRM_VIRTGPU_EXECBUFFER 0x02
42 #define DRM_VIRTGPU_GETPARAM 0x03
43 #define DRM_VIRTGPU_RESOURCE_CREATE 0x04
44 #define DRM_VIRTGPU_RESOURCE_INFO 0x05
45 #define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
46 #define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
47 #define DRM_VIRTGPU_WAIT 0x08
48 #define DRM_VIRTGPU_GET_CAPS 0x09
49 #define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
50 #define DRM_VIRTGPU_CONTEXT_INIT 0x0b
52 #define VIRTGPU_EXECBUF_FENCE_FD_IN 0x01
53 #define VIRTGPU_EXECBUF_FENCE_FD_OUT 0x02
54 #define VIRTGPU_EXECBUF_RING_IDX 0x04
55 #define VIRTGPU_EXECBUF_FLAGS (\
56 VIRTGPU_EXECBUF_FENCE_FD_IN |\
57 VIRTGPU_EXECBUF_FENCE_FD_OUT |\
58 VIRTGPU_EXECBUF_RING_IDX |\
61 struct drm_virtgpu_map {
62 __u64 offset; /* use for mmap system call */
67 #define VIRTGPU_EXECBUF_SYNCOBJ_RESET 0x01
68 #define VIRTGPU_EXECBUF_SYNCOBJ_FLAGS ( \
69 VIRTGPU_EXECBUF_SYNCOBJ_RESET | \
71 struct drm_virtgpu_execbuffer_syncobj {
77 /* fence_fd is modified on success if VIRTGPU_EXECBUF_FENCE_FD_OUT flag is set. */
78 struct drm_virtgpu_execbuffer {
81 __u64 command; /* void* */
84 __s32 fence_fd; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
85 __u32 ring_idx; /* command ring index (see VIRTGPU_EXECBUF_RING_IDX) */
86 __u32 syncobj_stride; /* size of @drm_virtgpu_execbuffer_syncobj */
87 __u32 num_in_syncobjs;
88 __u32 num_out_syncobjs;
93 #define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
94 #define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
95 #define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
96 #define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
97 #define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing */
98 #define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
99 #define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
100 #define VIRTGPU_PARAM_EXPLICIT_DEBUG_NAME 8 /* Ability to set debug name from userspace */
102 struct drm_virtgpu_getparam {
107 /* NO_BO flags? NO resource flag? */
108 /* resource flag for y_0_top */
109 struct drm_virtgpu_resource_create {
120 __u32 bo_handle; /* if this is set - recreate a new resource attached to this bo ? */
121 __u32 res_handle; /* returned by kernel */
122 __u32 size; /* validate transfer in the host */
123 __u32 stride; /* validate transfer in the host */
126 struct drm_virtgpu_resource_info {
133 struct drm_virtgpu_3d_box {
142 struct drm_virtgpu_3d_transfer_to_host {
144 struct drm_virtgpu_3d_box box;
151 struct drm_virtgpu_3d_transfer_from_host {
153 struct drm_virtgpu_3d_box box;
160 #define VIRTGPU_WAIT_NOWAIT 1 /* like it */
161 struct drm_virtgpu_3d_wait {
162 __u32 handle; /* 0 is an invalid handle */
166 struct drm_virtgpu_get_caps {
174 struct drm_virtgpu_resource_create_blob {
175 #define VIRTGPU_BLOB_MEM_GUEST 0x0001
176 #define VIRTGPU_BLOB_MEM_HOST3D 0x0002
177 #define VIRTGPU_BLOB_MEM_HOST3D_GUEST 0x0003
179 #define VIRTGPU_BLOB_FLAG_USE_MAPPABLE 0x0001
180 #define VIRTGPU_BLOB_FLAG_USE_SHAREABLE 0x0002
181 #define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
182 /* zero is invalid blob_mem */
190 * for 3D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
191 * VIRTGPU_BLOB_MEM_HOST3D otherwise, must be zero.
199 #define VIRTGPU_CONTEXT_PARAM_CAPSET_ID 0x0001
200 #define VIRTGPU_CONTEXT_PARAM_NUM_RINGS 0x0002
201 #define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
202 #define VIRTGPU_CONTEXT_PARAM_DEBUG_NAME 0x0004
203 struct drm_virtgpu_context_set_param {
208 struct drm_virtgpu_context_init {
212 /* pointer to drm_virtgpu_context_set_param array */
213 __u64 ctx_set_params;
217 * Event code that's given when VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK is in
218 * effect. The event size is sizeof(drm_event), since there is no additional
221 #define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000
223 #define DRM_IOCTL_VIRTGPU_MAP \
224 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map)
226 #define DRM_IOCTL_VIRTGPU_EXECBUFFER \
227 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER,\
228 struct drm_virtgpu_execbuffer)
230 #define DRM_IOCTL_VIRTGPU_GETPARAM \
231 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM,\
232 struct drm_virtgpu_getparam)
234 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE \
235 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE, \
236 struct drm_virtgpu_resource_create)
238 #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
239 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
240 struct drm_virtgpu_resource_info)
242 #define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
243 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST, \
244 struct drm_virtgpu_3d_transfer_from_host)
246 #define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
247 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST, \
248 struct drm_virtgpu_3d_transfer_to_host)
250 #define DRM_IOCTL_VIRTGPU_WAIT \
251 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT, \
252 struct drm_virtgpu_3d_wait)
254 #define DRM_IOCTL_VIRTGPU_GET_CAPS \
255 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, \
256 struct drm_virtgpu_get_caps)
258 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB \
259 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB, \
260 struct drm_virtgpu_resource_create_blob)
262 #define DRM_IOCTL_VIRTGPU_CONTEXT_INIT \
263 DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT, \
264 struct drm_virtgpu_context_init)
266 #if defined(__cplusplus)