virtgpu_drm.h 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268
  1. /*
  2. * Copyright 2013 Red Hat
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragraph) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * THE AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  22. * OTHER DEALINGS IN THE SOFTWARE.
  23. */
  24. #ifndef VIRTGPU_DRM_H
  25. #define VIRTGPU_DRM_H
  26. #include "drm.h"
  27. #if defined(__cplusplus)
  28. extern "C" {
  29. #endif
  30. /* Please note that modifications to all structs defined here are
  31. * subject to backwards-compatibility constraints.
  32. *
  33. * Do not use pointers, use __u64 instead for 32 bit / 64 bit user/kernel
  34. * compatibility Keep fields aligned to their size
  35. */
  36. #define DRM_VIRTGPU_MAP 0x01
  37. #define DRM_VIRTGPU_EXECBUFFER 0x02
  38. #define DRM_VIRTGPU_GETPARAM 0x03
  39. #define DRM_VIRTGPU_RESOURCE_CREATE 0x04
  40. #define DRM_VIRTGPU_RESOURCE_INFO 0x05
  41. #define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
  42. #define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
  43. #define DRM_VIRTGPU_WAIT 0x08
  44. #define DRM_VIRTGPU_GET_CAPS 0x09
  45. #define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
  46. #define DRM_VIRTGPU_CONTEXT_INIT 0x0b
  47. #define VIRTGPU_EXECBUF_FENCE_FD_IN 0x01
  48. #define VIRTGPU_EXECBUF_FENCE_FD_OUT 0x02
  49. #define VIRTGPU_EXECBUF_RING_IDX 0x04
  50. #define VIRTGPU_EXECBUF_FLAGS (\
  51. VIRTGPU_EXECBUF_FENCE_FD_IN |\
  52. VIRTGPU_EXECBUF_FENCE_FD_OUT |\
  53. VIRTGPU_EXECBUF_RING_IDX |\
  54. 0)
  55. struct drm_virtgpu_map {
  56. __u64 offset; /* use for mmap system call */
  57. __u32 handle;
  58. __u32 pad;
  59. };
  60. #define VIRTGPU_EXECBUF_SYNCOBJ_RESET 0x01
  61. #define VIRTGPU_EXECBUF_SYNCOBJ_FLAGS ( \
  62. VIRTGPU_EXECBUF_SYNCOBJ_RESET | \
  63. 0)
  64. struct drm_virtgpu_execbuffer_syncobj {
  65. __u32 handle;
  66. __u32 flags;
  67. __u64 point;
  68. };
  69. /* fence_fd is modified on success if VIRTGPU_EXECBUF_FENCE_FD_OUT flag is set. */
  70. struct drm_virtgpu_execbuffer {
  71. __u32 flags;
  72. __u32 size;
  73. __u64 command; /* void* */
  74. __u64 bo_handles;
  75. __u32 num_bo_handles;
  76. __s32 fence_fd; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
  77. __u32 ring_idx; /* command ring index (see VIRTGPU_EXECBUF_RING_IDX) */
  78. __u32 syncobj_stride; /* size of @drm_virtgpu_execbuffer_syncobj */
  79. __u32 num_in_syncobjs;
  80. __u32 num_out_syncobjs;
  81. __u64 in_syncobjs;
  82. __u64 out_syncobjs;
  83. };
  84. #define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
  85. #define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
  86. #define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
  87. #define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
  88. #define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing */
  89. #define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
  90. #define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
  91. struct drm_virtgpu_getparam {
  92. __u64 param;
  93. __u64 value;
  94. };
  95. /* NO_BO flags? NO resource flag? */
  96. /* resource flag for y_0_top */
  97. struct drm_virtgpu_resource_create {
  98. __u32 target;
  99. __u32 format;
  100. __u32 bind;
  101. __u32 width;
  102. __u32 height;
  103. __u32 depth;
  104. __u32 array_size;
  105. __u32 last_level;
  106. __u32 nr_samples;
  107. __u32 flags;
  108. __u32 bo_handle; /* if this is set - recreate a new resource attached to this bo ? */
  109. __u32 res_handle; /* returned by kernel */
  110. __u32 size; /* validate transfer in the host */
  111. __u32 stride; /* validate transfer in the host */
  112. };
  113. struct drm_virtgpu_resource_info {
  114. __u32 bo_handle;
  115. __u32 res_handle;
  116. __u32 size;
  117. __u32 blob_mem;
  118. };
  119. struct drm_virtgpu_3d_box {
  120. __u32 x;
  121. __u32 y;
  122. __u32 z;
  123. __u32 w;
  124. __u32 h;
  125. __u32 d;
  126. };
  127. struct drm_virtgpu_3d_transfer_to_host {
  128. __u32 bo_handle;
  129. struct drm_virtgpu_3d_box box;
  130. __u32 level;
  131. __u32 offset;
  132. __u32 stride;
  133. __u32 layer_stride;
  134. };
  135. struct drm_virtgpu_3d_transfer_from_host {
  136. __u32 bo_handle;
  137. struct drm_virtgpu_3d_box box;
  138. __u32 level;
  139. __u32 offset;
  140. __u32 stride;
  141. __u32 layer_stride;
  142. };
  143. #define VIRTGPU_WAIT_NOWAIT 1 /* like it */
  144. struct drm_virtgpu_3d_wait {
  145. __u32 handle; /* 0 is an invalid handle */
  146. __u32 flags;
  147. };
  148. struct drm_virtgpu_get_caps {
  149. __u32 cap_set_id;
  150. __u32 cap_set_ver;
  151. __u64 addr;
  152. __u32 size;
  153. __u32 pad;
  154. };
  155. struct drm_virtgpu_resource_create_blob {
  156. #define VIRTGPU_BLOB_MEM_GUEST 0x0001
  157. #define VIRTGPU_BLOB_MEM_HOST3D 0x0002
  158. #define VIRTGPU_BLOB_MEM_HOST3D_GUEST 0x0003
  159. #define VIRTGPU_BLOB_FLAG_USE_MAPPABLE 0x0001
  160. #define VIRTGPU_BLOB_FLAG_USE_SHAREABLE 0x0002
  161. #define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
  162. /* zero is invalid blob_mem */
  163. __u32 blob_mem;
  164. __u32 blob_flags;
  165. __u32 bo_handle;
  166. __u32 res_handle;
  167. __u64 size;
  168. /*
  169. * for 3D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
  170. * VIRTGPU_BLOB_MEM_HOST3D otherwise, must be zero.
  171. */
  172. __u32 pad;
  173. __u32 cmd_size;
  174. __u64 cmd;
  175. __u64 blob_id;
  176. };
  177. #define VIRTGPU_CONTEXT_PARAM_CAPSET_ID 0x0001
  178. #define VIRTGPU_CONTEXT_PARAM_NUM_RINGS 0x0002
  179. #define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
  180. struct drm_virtgpu_context_set_param {
  181. __u64 param;
  182. __u64 value;
  183. };
  184. struct drm_virtgpu_context_init {
  185. __u32 num_params;
  186. __u32 pad;
  187. /* pointer to drm_virtgpu_context_set_param array */
  188. __u64 ctx_set_params;
  189. };
  190. /*
  191. * Event code that's given when VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK is in
  192. * effect. The event size is sizeof(drm_event), since there is no additional
  193. * payload.
  194. */
  195. #define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000
  196. #define DRM_IOCTL_VIRTGPU_MAP \
  197. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map)
  198. #define DRM_IOCTL_VIRTGPU_EXECBUFFER \
  199. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER,\
  200. struct drm_virtgpu_execbuffer)
  201. #define DRM_IOCTL_VIRTGPU_GETPARAM \
  202. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM,\
  203. struct drm_virtgpu_getparam)
  204. #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE \
  205. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE, \
  206. struct drm_virtgpu_resource_create)
  207. #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
  208. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
  209. struct drm_virtgpu_resource_info)
  210. #define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
  211. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST, \
  212. struct drm_virtgpu_3d_transfer_from_host)
  213. #define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
  214. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST, \
  215. struct drm_virtgpu_3d_transfer_to_host)
  216. #define DRM_IOCTL_VIRTGPU_WAIT \
  217. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT, \
  218. struct drm_virtgpu_3d_wait)
  219. #define DRM_IOCTL_VIRTGPU_GET_CAPS \
  220. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, \
  221. struct drm_virtgpu_get_caps)
  222. #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB \
  223. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB, \
  224. struct drm_virtgpu_resource_create_blob)
  225. #define DRM_IOCTL_VIRTGPU_CONTEXT_INIT \
  226. DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT, \
  227. struct drm_virtgpu_context_init)
  228. #if defined(__cplusplus)
  229. }
  230. #endif
  231. #endif