ffplay_renderer.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #define VK_NO_PROTOTYPES
  19. #define VK_ENABLE_BETA_EXTENSIONS
  20. #include "config.h"
  21. #include "ffplay_renderer.h"
  22. #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
  23. /* Get PL_API_VER */
  24. #include <libplacebo/config.h>
  25. #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
  26. #else
  27. #define HAVE_VULKAN_RENDERER 0
  28. #endif
  29. #if HAVE_VULKAN_RENDERER
  30. #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
  31. #define VK_USE_PLATFORM_WIN32_KHR
  32. #endif
  33. #include <libplacebo/vulkan.h>
  34. #include <libplacebo/utils/frame_queue.h>
  35. #include <libplacebo/utils/libav.h>
  36. #include <SDL_vulkan.h>
  37. #include "libavutil/bprint.h"
  38. #endif
  39. struct VkRenderer {
  40. const AVClass *class;
  41. int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
  42. int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
  43. int (*display)(VkRenderer *renderer, AVFrame *frame);
  44. int (*resize)(VkRenderer *renderer, int width, int height);
  45. void (*destroy)(VkRenderer *renderer);
  46. };
  47. #if HAVE_VULKAN_RENDERER
  48. typedef struct RendererContext {
  49. VkRenderer api;
  50. // Can be NULL when vulkan instance is created by avutil
  51. pl_vk_inst placebo_instance;
  52. pl_vulkan placebo_vulkan;
  53. pl_swapchain swapchain;
  54. VkSurfaceKHR vk_surface;
  55. pl_renderer renderer;
  56. pl_tex tex[4];
  57. pl_log vk_log;
  58. AVBufferRef *hw_device_ref;
  59. AVBufferRef *hw_frame_ref;
  60. enum AVPixelFormat *transfer_formats;
  61. AVHWFramesConstraints *constraints;
  62. PFN_vkGetInstanceProcAddr get_proc_addr;
  63. // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
  64. VkInstance inst;
  65. AVFrame *vk_frame;
  66. } RendererContext;
  67. static void vk_log_cb(void *log_priv, enum pl_log_level level,
  68. const char *msg)
  69. {
  70. static const int level_map[] = {
  71. AV_LOG_QUIET,
  72. AV_LOG_FATAL,
  73. AV_LOG_ERROR,
  74. AV_LOG_WARNING,
  75. AV_LOG_INFO,
  76. AV_LOG_DEBUG,
  77. AV_LOG_TRACE,
  78. };
  79. if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
  80. av_log(log_priv, level_map[level], "%s\n", msg);
  81. }
  82. // Should keep sync with optional_device_exts inside hwcontext_vulkan.c
  83. static const char *optional_device_exts[] = {
  84. /* Misc or required by other extensions */
  85. VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME,
  86. VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
  87. VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
  88. VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME,
  89. VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME,
  90. VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
  91. VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME,
  92. /* Imports/exports */
  93. VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
  94. VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
  95. VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
  96. VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
  97. VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
  98. #ifdef _WIN32
  99. VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
  100. VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
  101. #endif
  102. /* Video encoding/decoding */
  103. VK_KHR_VIDEO_QUEUE_EXTENSION_NAME,
  104. VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME,
  105. VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME,
  106. VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME,
  107. "VK_MESA_video_decode_av1",
  108. };
  109. static inline int enable_debug(const AVDictionary *opt)
  110. {
  111. AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
  112. int debug = entry && strtol(entry->value, NULL, 10);
  113. return debug;
  114. }
  115. static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
  116. {
  117. AVHWDeviceContext *avhwctx = priv;
  118. const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
  119. hwctx->lock_queue(avhwctx, qf, qidx);
  120. }
  121. static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
  122. {
  123. AVHWDeviceContext *avhwctx = priv;
  124. const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
  125. hwctx->unlock_queue(avhwctx, qf, qidx);
  126. }
  127. static int add_instance_extension(const char **ext, unsigned num_ext,
  128. const AVDictionary *opt,
  129. AVDictionary **dict)
  130. {
  131. const char *inst_ext_key = "instance_extensions";
  132. AVDictionaryEntry *entry;
  133. AVBPrint buf;
  134. char *ext_list = NULL;
  135. int ret;
  136. av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
  137. for (int i = 0; i < num_ext; i++) {
  138. if (i)
  139. av_bprintf(&buf, "+%s", ext[i]);
  140. else
  141. av_bprintf(&buf, "%s", ext[i]);
  142. }
  143. entry = av_dict_get(opt, inst_ext_key, NULL, 0);
  144. if (entry && entry->value && entry->value[0]) {
  145. if (num_ext)
  146. av_bprintf(&buf, "+");
  147. av_bprintf(&buf, "%s", entry->value);
  148. }
  149. ret = av_bprint_finalize(&buf, &ext_list);
  150. if (ret < 0)
  151. return ret;
  152. return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
  153. }
  154. static int add_device_extension(const AVDictionary *opt,
  155. AVDictionary **dict)
  156. {
  157. const char *dev_ext_key = "device_extensions";
  158. AVDictionaryEntry *entry;
  159. AVBPrint buf;
  160. char *ext_list = NULL;
  161. int ret;
  162. av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
  163. av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
  164. for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
  165. av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
  166. entry = av_dict_get(opt, dev_ext_key, NULL, 0);
  167. if (entry && entry->value && entry->value[0])
  168. av_bprintf(&buf, "+%s", entry->value);
  169. ret = av_bprint_finalize(&buf, &ext_list);
  170. if (ret < 0)
  171. return ret;
  172. return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
  173. }
  174. static const char *select_device(const AVDictionary *opt)
  175. {
  176. const AVDictionaryEntry *entry;
  177. entry = av_dict_get(opt, "device", NULL, 0);
  178. if (entry)
  179. return entry->value;
  180. return NULL;
  181. }
  182. static int create_vk_by_hwcontext(VkRenderer *renderer,
  183. const char **ext, unsigned num_ext,
  184. const AVDictionary *opt)
  185. {
  186. RendererContext *ctx = (RendererContext *) renderer;
  187. AVHWDeviceContext *dev;
  188. AVVulkanDeviceContext *hwctx;
  189. AVDictionary *dict = NULL;
  190. int ret;
  191. ret = add_instance_extension(ext, num_ext, opt, &dict);
  192. if (ret < 0)
  193. return ret;
  194. ret = add_device_extension(opt, &dict);
  195. if (ret) {
  196. av_dict_free(&dict);
  197. return ret;
  198. }
  199. ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
  200. select_device(opt), dict, 0);
  201. av_dict_free(&dict);
  202. if (ret < 0)
  203. return ret;
  204. dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
  205. hwctx = dev->hwctx;
  206. // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
  207. // Check the result and return error if they don't match.
  208. if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
  209. av_log(renderer, AV_LOG_ERROR,
  210. "hwdevice and SDL use different get_proc_addr. "
  211. "Try -vulkan_params create_by_placebo=1\n");
  212. return AVERROR_PATCHWELCOME;
  213. }
  214. ctx->get_proc_addr = hwctx->get_proc_addr;
  215. ctx->inst = hwctx->inst;
  216. ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log,
  217. pl_vulkan_import_params(
  218. .instance = hwctx->inst,
  219. .get_proc_addr = hwctx->get_proc_addr,
  220. .phys_device = hwctx->phys_dev,
  221. .device = hwctx->act_dev,
  222. .extensions = hwctx->enabled_dev_extensions,
  223. .num_extensions = hwctx->nb_enabled_dev_extensions,
  224. .features = &hwctx->device_features,
  225. .lock_queue = hwctx_lock_queue,
  226. .unlock_queue = hwctx_unlock_queue,
  227. .queue_ctx = dev,
  228. .queue_graphics = {
  229. .index = hwctx->queue_family_index,
  230. .count = hwctx->nb_graphics_queues,
  231. },
  232. .queue_compute = {
  233. .index = hwctx->queue_family_comp_index,
  234. .count = hwctx->nb_comp_queues,
  235. },
  236. .queue_transfer = {
  237. .index = hwctx->queue_family_tx_index,
  238. .count = hwctx->nb_tx_queues,
  239. },
  240. ));
  241. if (!ctx->placebo_vulkan)
  242. return AVERROR_EXTERNAL;
  243. return 0;
  244. }
  245. static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
  246. uint32_t queue_family, uint32_t index)
  247. {
  248. RendererContext *ctx = dev_ctx->user_opaque;
  249. pl_vulkan vk = ctx->placebo_vulkan;
  250. vk->lock_queue(vk, queue_family, index);
  251. }
  252. static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
  253. uint32_t queue_family,
  254. uint32_t index)
  255. {
  256. RendererContext *ctx = dev_ctx->user_opaque;
  257. pl_vulkan vk = ctx->placebo_vulkan;
  258. vk->unlock_queue(vk, queue_family, index);
  259. }
  260. static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
  261. {
  262. RendererContext *ctx = (RendererContext *) renderer;
  263. VkQueueFamilyProperties *queue_family_prop = NULL;
  264. uint32_t num_queue_family_prop = 0;
  265. PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
  266. PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
  267. *index = -1;
  268. *count = 0;
  269. get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
  270. get_proc_addr(ctx->placebo_instance->instance,
  271. "vkGetPhysicalDeviceQueueFamilyProperties");
  272. get_queue_family_prop(ctx->placebo_vulkan->phys_device,
  273. &num_queue_family_prop, NULL);
  274. if (!num_queue_family_prop)
  275. return AVERROR_EXTERNAL;
  276. queue_family_prop = av_calloc(num_queue_family_prop,
  277. sizeof(*queue_family_prop));
  278. if (!queue_family_prop)
  279. return AVERROR(ENOMEM);
  280. get_queue_family_prop(ctx->placebo_vulkan->phys_device,
  281. &num_queue_family_prop,
  282. queue_family_prop);
  283. for (int i = 0; i < num_queue_family_prop; i++) {
  284. if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
  285. *index = i;
  286. *count = queue_family_prop[i].queueCount;
  287. break;
  288. }
  289. }
  290. av_free(queue_family_prop);
  291. return 0;
  292. }
  293. static int create_vk_by_placebo(VkRenderer *renderer,
  294. const char **ext, unsigned num_ext,
  295. const AVDictionary *opt)
  296. {
  297. RendererContext *ctx = (RendererContext *) renderer;
  298. AVHWDeviceContext *device_ctx;
  299. AVVulkanDeviceContext *vk_dev_ctx;
  300. int decode_index;
  301. int decode_count;
  302. int ret;
  303. ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
  304. ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
  305. .get_proc_addr = ctx->get_proc_addr,
  306. .debug = enable_debug(opt),
  307. .extensions = ext,
  308. .num_extensions = num_ext
  309. ));
  310. if (!ctx->placebo_instance) {
  311. return AVERROR_EXTERNAL;
  312. }
  313. ctx->inst = ctx->placebo_instance->instance;
  314. ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
  315. .instance = ctx->placebo_instance->instance,
  316. .get_proc_addr = ctx->placebo_instance->get_proc_addr,
  317. .surface = ctx->vk_surface,
  318. .allow_software = false,
  319. .opt_extensions = optional_device_exts,
  320. .num_opt_extensions = FF_ARRAY_ELEMS(optional_device_exts),
  321. .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
  322. .device_name = select_device(opt),
  323. ));
  324. if (!ctx->placebo_vulkan)
  325. return AVERROR_EXTERNAL;
  326. ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
  327. if (!ctx->hw_device_ref) {
  328. return AVERROR(ENOMEM);
  329. }
  330. device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
  331. device_ctx->user_opaque = ctx;
  332. vk_dev_ctx = device_ctx->hwctx;
  333. vk_dev_ctx->lock_queue = placebo_lock_queue,
  334. vk_dev_ctx->unlock_queue = placebo_unlock_queue;
  335. vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
  336. vk_dev_ctx->inst = ctx->placebo_instance->instance;
  337. vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
  338. vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
  339. vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
  340. vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
  341. vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
  342. vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
  343. vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
  344. vk_dev_ctx->queue_family_index = ctx->placebo_vulkan->queue_graphics.index;
  345. vk_dev_ctx->nb_graphics_queues = ctx->placebo_vulkan->queue_graphics.count;
  346. vk_dev_ctx->queue_family_tx_index = ctx->placebo_vulkan->queue_transfer.index;
  347. vk_dev_ctx->nb_tx_queues = ctx->placebo_vulkan->queue_transfer.count;
  348. vk_dev_ctx->queue_family_comp_index = ctx->placebo_vulkan->queue_compute.index;
  349. vk_dev_ctx->nb_comp_queues = ctx->placebo_vulkan->queue_compute.count;
  350. ret = get_decode_queue(renderer, &decode_index, &decode_count);
  351. if (ret < 0)
  352. return ret;
  353. vk_dev_ctx->queue_family_decode_index = decode_index;
  354. vk_dev_ctx->nb_decode_queues = decode_count;
  355. ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
  356. if (ret < 0)
  357. return ret;
  358. return 0;
  359. }
  360. static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
  361. {
  362. int ret = 0;
  363. unsigned num_ext = 0;
  364. const char **ext = NULL;
  365. int w, h;
  366. struct pl_log_params vk_log_params = {
  367. .log_cb = vk_log_cb,
  368. .log_level = PL_LOG_DEBUG,
  369. .log_priv = renderer,
  370. };
  371. RendererContext *ctx = (RendererContext *) renderer;
  372. AVDictionaryEntry *entry;
  373. ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
  374. if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
  375. av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
  376. SDL_GetError());
  377. return AVERROR_EXTERNAL;
  378. }
  379. ext = av_calloc(num_ext, sizeof(*ext));
  380. if (!ext) {
  381. ret = AVERROR(ENOMEM);
  382. goto out;
  383. }
  384. SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
  385. entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
  386. if (entry && strtol(entry->value, NULL, 10))
  387. ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
  388. else
  389. ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
  390. if (ret < 0)
  391. goto out;
  392. if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
  393. ret = AVERROR_EXTERNAL;
  394. goto out;
  395. }
  396. ctx->swapchain = pl_vulkan_create_swapchain(
  397. ctx->placebo_vulkan,
  398. pl_vulkan_swapchain_params(
  399. .surface = ctx->vk_surface,
  400. .present_mode = VK_PRESENT_MODE_FIFO_KHR));
  401. if (!ctx->swapchain) {
  402. ret = AVERROR_EXTERNAL;
  403. goto out;
  404. }
  405. SDL_Vulkan_GetDrawableSize(window, &w, &h);
  406. pl_swapchain_resize(ctx->swapchain, &w, &h);
  407. ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
  408. if (!ctx->renderer) {
  409. ret = AVERROR_EXTERNAL;
  410. goto out;
  411. }
  412. ctx->vk_frame = av_frame_alloc();
  413. if (!ctx->vk_frame) {
  414. ret = AVERROR(ENOMEM);
  415. goto out;
  416. }
  417. ret = 0;
  418. out:
  419. av_free(ext);
  420. return ret;
  421. }
  422. static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
  423. {
  424. RendererContext *ctx = (RendererContext *) renderer;
  425. *dev = ctx->hw_device_ref;
  426. return 0;
  427. }
  428. static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
  429. {
  430. RendererContext *ctx = (RendererContext *) renderer;
  431. AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
  432. frame->hw_frames_ctx->data;
  433. AVHWFramesContext *hw_frame;
  434. AVVulkanFramesContext *vk_frame_ctx;
  435. int ret;
  436. if (ctx->hw_frame_ref) {
  437. hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
  438. if (hw_frame->width == frame->width &&
  439. hw_frame->height == frame->height &&
  440. hw_frame->sw_format == src_hw_frame->sw_format)
  441. return 0;
  442. av_buffer_unref(&ctx->hw_frame_ref);
  443. }
  444. if (!ctx->constraints) {
  445. ctx->constraints = av_hwdevice_get_hwframe_constraints(
  446. ctx->hw_device_ref, NULL);
  447. if (!ctx->constraints)
  448. return AVERROR(ENOMEM);
  449. }
  450. // Check constraints and skip create hwframe. Don't take it as error since
  451. // we can fallback to memory copy from GPU to CPU.
  452. if ((ctx->constraints->max_width &&
  453. ctx->constraints->max_width < frame->width) ||
  454. (ctx->constraints->max_height &&
  455. ctx->constraints->max_height < frame->height) ||
  456. (ctx->constraints->min_width &&
  457. ctx->constraints->min_width > frame->width) ||
  458. (ctx->constraints->min_height &&
  459. ctx->constraints->min_height > frame->height))
  460. return 0;
  461. if (ctx->constraints->valid_sw_formats) {
  462. enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
  463. while (*sw_formats != AV_PIX_FMT_NONE) {
  464. if (*sw_formats == src_hw_frame->sw_format)
  465. break;
  466. sw_formats++;
  467. }
  468. if (*sw_formats == AV_PIX_FMT_NONE)
  469. return 0;
  470. }
  471. ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
  472. if (!ctx->hw_frame_ref)
  473. return AVERROR(ENOMEM);
  474. hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
  475. hw_frame->format = AV_PIX_FMT_VULKAN;
  476. hw_frame->sw_format = src_hw_frame->sw_format;
  477. hw_frame->width = frame->width;
  478. hw_frame->height = frame->height;
  479. if (frame->format == AV_PIX_FMT_CUDA) {
  480. vk_frame_ctx = hw_frame->hwctx;
  481. vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
  482. }
  483. ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
  484. if (ret < 0) {
  485. av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
  486. av_err2str(ret));
  487. return ret;
  488. }
  489. av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
  490. AV_HWFRAME_TRANSFER_DIRECTION_TO,
  491. &ctx->transfer_formats, 0);
  492. return 0;
  493. }
  494. static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
  495. {
  496. if (!ctx->hw_frame_ref || !ctx->transfer_formats)
  497. return 0;
  498. for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
  499. if (ctx->transfer_formats[i] == frame->format)
  500. return 1;
  501. return 0;
  502. }
  503. static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
  504. {
  505. int ret = av_frame_copy_props(ctx->vk_frame, frame);
  506. if (ret < 0)
  507. return ret;
  508. av_frame_unref(frame);
  509. av_frame_move_ref(frame, ctx->vk_frame);
  510. return 0;
  511. }
  512. static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
  513. {
  514. RendererContext *ctx = (RendererContext *) renderer;
  515. int ret;
  516. if (use_hw_frame && !ctx->hw_frame_ref)
  517. return AVERROR(ENOSYS);
  518. // Try map data first
  519. av_frame_unref(ctx->vk_frame);
  520. if (use_hw_frame) {
  521. ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
  522. ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
  523. }
  524. ret = av_hwframe_map(ctx->vk_frame, frame, 0);
  525. if (!ret)
  526. return move_to_output_frame(ctx, frame);
  527. if (ret != AVERROR(ENOSYS))
  528. av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
  529. return ret;
  530. }
  531. static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
  532. {
  533. RendererContext *ctx = (RendererContext *) renderer;
  534. int ret;
  535. if (use_hw_frame && !check_hw_transfer(ctx, frame))
  536. return AVERROR(ENOSYS);
  537. av_frame_unref(ctx->vk_frame);
  538. if (use_hw_frame)
  539. av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
  540. ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
  541. if (!ret)
  542. return move_to_output_frame(ctx, frame);
  543. if (ret != AVERROR(ENOSYS))
  544. av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
  545. av_err2str(ret));
  546. return ret;
  547. }
  548. static int convert_frame(VkRenderer *renderer, AVFrame *frame)
  549. {
  550. int ret;
  551. if (!frame->hw_frames_ctx)
  552. return 0;
  553. if (frame->format == AV_PIX_FMT_VULKAN)
  554. return 0;
  555. ret = create_hw_frame(renderer, frame);
  556. if (ret < 0)
  557. return ret;
  558. for (int use_hw = 1; use_hw >=0; use_hw--) {
  559. ret = map_frame(renderer, frame, use_hw);
  560. if (!ret)
  561. return 0;
  562. if (ret != AVERROR(ENOSYS))
  563. return ret;
  564. ret = transfer_frame(renderer, frame, use_hw);
  565. if (!ret)
  566. return 0;
  567. if (ret != AVERROR(ENOSYS))
  568. return ret;
  569. }
  570. return ret;
  571. }
  572. static int display(VkRenderer *renderer, AVFrame *frame)
  573. {
  574. struct pl_swapchain_frame swap_frame = {0};
  575. struct pl_frame pl_frame = {0};
  576. struct pl_frame target = {0};
  577. RendererContext *ctx = (RendererContext *) renderer;
  578. int ret = 0;
  579. ret = convert_frame(renderer, frame);
  580. if (ret < 0)
  581. return ret;
  582. if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
  583. .frame = frame,
  584. .tex = ctx->tex))) {
  585. av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
  586. return AVERROR_EXTERNAL;
  587. }
  588. if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
  589. av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
  590. ret = AVERROR_EXTERNAL;
  591. goto out;
  592. }
  593. pl_frame_from_swapchain(&target, &swap_frame);
  594. if (!pl_render_image(ctx->renderer, &pl_frame, &target,
  595. &pl_render_default_params)) {
  596. av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
  597. ret = AVERROR_EXTERNAL;
  598. goto out;
  599. }
  600. if (!pl_swapchain_submit_frame(ctx->swapchain)) {
  601. av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
  602. ret = AVERROR_EXTERNAL;
  603. goto out;
  604. }
  605. pl_swapchain_swap_buffers(ctx->swapchain);
  606. out:
  607. pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
  608. return ret;
  609. }
  610. static int resize(VkRenderer *renderer, int width, int height)
  611. {
  612. RendererContext *ctx = (RendererContext *) renderer;
  613. if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
  614. return AVERROR_EXTERNAL;
  615. return 0;
  616. }
  617. static void destroy(VkRenderer *renderer)
  618. {
  619. RendererContext *ctx = (RendererContext *) renderer;
  620. PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
  621. av_frame_free(&ctx->vk_frame);
  622. av_freep(&ctx->transfer_formats);
  623. av_hwframe_constraints_free(&ctx->constraints);
  624. av_buffer_unref(&ctx->hw_frame_ref);
  625. if (ctx->placebo_vulkan) {
  626. for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
  627. pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
  628. pl_renderer_destroy(&ctx->renderer);
  629. pl_swapchain_destroy(&ctx->swapchain);
  630. pl_vulkan_destroy(&ctx->placebo_vulkan);
  631. }
  632. if (ctx->vk_surface) {
  633. vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
  634. ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
  635. vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
  636. ctx->vk_surface = VK_NULL_HANDLE;
  637. }
  638. av_buffer_unref(&ctx->hw_device_ref);
  639. pl_vk_inst_destroy(&ctx->placebo_instance);
  640. pl_log_destroy(&ctx->vk_log);
  641. }
  642. static const AVClass vulkan_renderer_class = {
  643. .class_name = "Vulkan Renderer",
  644. .item_name = av_default_item_name,
  645. .version = LIBAVUTIL_VERSION_INT,
  646. };
  647. VkRenderer *vk_get_renderer(void)
  648. {
  649. RendererContext *ctx = av_mallocz(sizeof(*ctx));
  650. VkRenderer *renderer;
  651. if (!ctx)
  652. return NULL;
  653. renderer = &ctx->api;
  654. renderer->class = &vulkan_renderer_class;
  655. renderer->get_hw_dev = get_hw_dev;
  656. renderer->create = create;
  657. renderer->display = display;
  658. renderer->resize = resize;
  659. renderer->destroy = destroy;
  660. return renderer;
  661. }
  662. #else
  663. VkRenderer *vk_get_renderer(void)
  664. {
  665. return NULL;
  666. }
  667. #endif
  668. int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
  669. AVDictionary *opt)
  670. {
  671. return renderer->create(renderer, window, opt);
  672. }
  673. int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
  674. {
  675. return renderer->get_hw_dev(renderer, dev);
  676. }
  677. int vk_renderer_display(VkRenderer *renderer, AVFrame *frame)
  678. {
  679. return renderer->display(renderer, frame);
  680. }
  681. int vk_renderer_resize(VkRenderer *renderer, int width, int height)
  682. {
  683. return renderer->resize(renderer, width, height);
  684. }
  685. void vk_renderer_destroy(VkRenderer *renderer)
  686. {
  687. renderer->destroy(renderer);
  688. }