ffplay_renderer.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #define VK_NO_PROTOTYPES
  19. #define VK_ENABLE_BETA_EXTENSIONS
  20. #include "config.h"
  21. #include "ffplay_renderer.h"
  22. #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
  23. /* Get PL_API_VER */
  24. #include <libplacebo/config.h>
  25. #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
  26. #else
  27. #define HAVE_VULKAN_RENDERER 0
  28. #endif
  29. #if HAVE_VULKAN_RENDERER
  30. #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
  31. #define VK_USE_PLATFORM_WIN32_KHR
  32. #endif
  33. #include <libplacebo/vulkan.h>
  34. #include <libplacebo/utils/frame_queue.h>
  35. #include <libplacebo/utils/libav.h>
  36. #include <SDL_vulkan.h>
  37. #include "libavutil/bprint.h"
  38. #include "libavutil/mem.h"
  39. #endif
  40. struct VkRenderer {
  41. const AVClass *class;
  42. int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
  43. int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
  44. int (*display)(VkRenderer *renderer, AVFrame *frame);
  45. int (*resize)(VkRenderer *renderer, int width, int height);
  46. void (*destroy)(VkRenderer *renderer);
  47. };
  48. #if HAVE_VULKAN_RENDERER
  49. typedef struct RendererContext {
  50. VkRenderer api;
  51. // Can be NULL when vulkan instance is created by avutil
  52. pl_vk_inst placebo_instance;
  53. pl_vulkan placebo_vulkan;
  54. pl_swapchain swapchain;
  55. VkSurfaceKHR vk_surface;
  56. pl_renderer renderer;
  57. pl_tex tex[4];
  58. pl_log vk_log;
  59. AVBufferRef *hw_device_ref;
  60. AVBufferRef *hw_frame_ref;
  61. enum AVPixelFormat *transfer_formats;
  62. AVHWFramesConstraints *constraints;
  63. PFN_vkGetInstanceProcAddr get_proc_addr;
  64. // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
  65. VkInstance inst;
  66. AVFrame *vk_frame;
  67. } RendererContext;
  68. static void vk_log_cb(void *log_priv, enum pl_log_level level,
  69. const char *msg)
  70. {
  71. static const int level_map[] = {
  72. AV_LOG_QUIET,
  73. AV_LOG_FATAL,
  74. AV_LOG_ERROR,
  75. AV_LOG_WARNING,
  76. AV_LOG_INFO,
  77. AV_LOG_DEBUG,
  78. AV_LOG_TRACE,
  79. };
  80. if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
  81. av_log(log_priv, level_map[level], "%s\n", msg);
  82. }
  83. // Should keep sync with optional_device_exts inside hwcontext_vulkan.c
  84. static const char *optional_device_exts[] = {
  85. /* Misc or required by other extensions */
  86. VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME,
  87. VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
  88. VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
  89. VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME,
  90. VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME,
  91. VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
  92. VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME,
  93. /* Imports/exports */
  94. VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
  95. VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
  96. VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
  97. VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
  98. VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
  99. #ifdef _WIN32
  100. VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
  101. VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
  102. #endif
  103. /* Video encoding/decoding */
  104. VK_KHR_VIDEO_QUEUE_EXTENSION_NAME,
  105. VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME,
  106. VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME,
  107. VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME,
  108. "VK_MESA_video_decode_av1",
  109. };
  110. static inline int enable_debug(const AVDictionary *opt)
  111. {
  112. AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
  113. int debug = entry && strtol(entry->value, NULL, 10);
  114. return debug;
  115. }
  116. static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
  117. {
  118. AVHWDeviceContext *avhwctx = priv;
  119. const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
  120. hwctx->lock_queue(avhwctx, qf, qidx);
  121. }
  122. static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
  123. {
  124. AVHWDeviceContext *avhwctx = priv;
  125. const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
  126. hwctx->unlock_queue(avhwctx, qf, qidx);
  127. }
  128. static int add_instance_extension(const char **ext, unsigned num_ext,
  129. const AVDictionary *opt,
  130. AVDictionary **dict)
  131. {
  132. const char *inst_ext_key = "instance_extensions";
  133. AVDictionaryEntry *entry;
  134. AVBPrint buf;
  135. char *ext_list = NULL;
  136. int ret;
  137. av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
  138. for (int i = 0; i < num_ext; i++) {
  139. if (i)
  140. av_bprintf(&buf, "+%s", ext[i]);
  141. else
  142. av_bprintf(&buf, "%s", ext[i]);
  143. }
  144. entry = av_dict_get(opt, inst_ext_key, NULL, 0);
  145. if (entry && entry->value && entry->value[0]) {
  146. if (num_ext)
  147. av_bprintf(&buf, "+");
  148. av_bprintf(&buf, "%s", entry->value);
  149. }
  150. ret = av_bprint_finalize(&buf, &ext_list);
  151. if (ret < 0)
  152. return ret;
  153. return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
  154. }
  155. static int add_device_extension(const AVDictionary *opt,
  156. AVDictionary **dict)
  157. {
  158. const char *dev_ext_key = "device_extensions";
  159. AVDictionaryEntry *entry;
  160. AVBPrint buf;
  161. char *ext_list = NULL;
  162. int ret;
  163. av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
  164. av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
  165. for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
  166. av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
  167. entry = av_dict_get(opt, dev_ext_key, NULL, 0);
  168. if (entry && entry->value && entry->value[0])
  169. av_bprintf(&buf, "+%s", entry->value);
  170. ret = av_bprint_finalize(&buf, &ext_list);
  171. if (ret < 0)
  172. return ret;
  173. return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
  174. }
  175. static const char *select_device(const AVDictionary *opt)
  176. {
  177. const AVDictionaryEntry *entry;
  178. entry = av_dict_get(opt, "device", NULL, 0);
  179. if (entry)
  180. return entry->value;
  181. return NULL;
  182. }
  183. static int create_vk_by_hwcontext(VkRenderer *renderer,
  184. const char **ext, unsigned num_ext,
  185. const AVDictionary *opt)
  186. {
  187. RendererContext *ctx = (RendererContext *) renderer;
  188. AVHWDeviceContext *dev;
  189. AVVulkanDeviceContext *hwctx;
  190. AVDictionary *dict = NULL;
  191. int ret;
  192. ret = add_instance_extension(ext, num_ext, opt, &dict);
  193. if (ret < 0)
  194. return ret;
  195. ret = add_device_extension(opt, &dict);
  196. if (ret) {
  197. av_dict_free(&dict);
  198. return ret;
  199. }
  200. ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
  201. select_device(opt), dict, 0);
  202. av_dict_free(&dict);
  203. if (ret < 0)
  204. return ret;
  205. dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
  206. hwctx = dev->hwctx;
  207. // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
  208. // Check the result and return error if they don't match.
  209. if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
  210. av_log(renderer, AV_LOG_ERROR,
  211. "hwdevice and SDL use different get_proc_addr. "
  212. "Try -vulkan_params create_by_placebo=1\n");
  213. return AVERROR_PATCHWELCOME;
  214. }
  215. ctx->get_proc_addr = hwctx->get_proc_addr;
  216. ctx->inst = hwctx->inst;
  217. ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log,
  218. pl_vulkan_import_params(
  219. .instance = hwctx->inst,
  220. .get_proc_addr = hwctx->get_proc_addr,
  221. .phys_device = hwctx->phys_dev,
  222. .device = hwctx->act_dev,
  223. .extensions = hwctx->enabled_dev_extensions,
  224. .num_extensions = hwctx->nb_enabled_dev_extensions,
  225. .features = &hwctx->device_features,
  226. .lock_queue = hwctx_lock_queue,
  227. .unlock_queue = hwctx_unlock_queue,
  228. .queue_ctx = dev,
  229. .queue_graphics = {
  230. .index = hwctx->queue_family_index,
  231. .count = hwctx->nb_graphics_queues,
  232. },
  233. .queue_compute = {
  234. .index = hwctx->queue_family_comp_index,
  235. .count = hwctx->nb_comp_queues,
  236. },
  237. .queue_transfer = {
  238. .index = hwctx->queue_family_tx_index,
  239. .count = hwctx->nb_tx_queues,
  240. },
  241. ));
  242. if (!ctx->placebo_vulkan)
  243. return AVERROR_EXTERNAL;
  244. return 0;
  245. }
  246. static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
  247. uint32_t queue_family, uint32_t index)
  248. {
  249. RendererContext *ctx = dev_ctx->user_opaque;
  250. pl_vulkan vk = ctx->placebo_vulkan;
  251. vk->lock_queue(vk, queue_family, index);
  252. }
  253. static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
  254. uint32_t queue_family,
  255. uint32_t index)
  256. {
  257. RendererContext *ctx = dev_ctx->user_opaque;
  258. pl_vulkan vk = ctx->placebo_vulkan;
  259. vk->unlock_queue(vk, queue_family, index);
  260. }
  261. static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
  262. {
  263. RendererContext *ctx = (RendererContext *) renderer;
  264. VkQueueFamilyProperties *queue_family_prop = NULL;
  265. uint32_t num_queue_family_prop = 0;
  266. PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
  267. PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
  268. *index = -1;
  269. *count = 0;
  270. get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
  271. get_proc_addr(ctx->placebo_instance->instance,
  272. "vkGetPhysicalDeviceQueueFamilyProperties");
  273. get_queue_family_prop(ctx->placebo_vulkan->phys_device,
  274. &num_queue_family_prop, NULL);
  275. if (!num_queue_family_prop)
  276. return AVERROR_EXTERNAL;
  277. queue_family_prop = av_calloc(num_queue_family_prop,
  278. sizeof(*queue_family_prop));
  279. if (!queue_family_prop)
  280. return AVERROR(ENOMEM);
  281. get_queue_family_prop(ctx->placebo_vulkan->phys_device,
  282. &num_queue_family_prop,
  283. queue_family_prop);
  284. for (int i = 0; i < num_queue_family_prop; i++) {
  285. if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
  286. *index = i;
  287. *count = queue_family_prop[i].queueCount;
  288. break;
  289. }
  290. }
  291. av_free(queue_family_prop);
  292. return 0;
  293. }
  294. static int create_vk_by_placebo(VkRenderer *renderer,
  295. const char **ext, unsigned num_ext,
  296. const AVDictionary *opt)
  297. {
  298. RendererContext *ctx = (RendererContext *) renderer;
  299. AVHWDeviceContext *device_ctx;
  300. AVVulkanDeviceContext *vk_dev_ctx;
  301. int decode_index;
  302. int decode_count;
  303. int ret;
  304. ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
  305. ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
  306. .get_proc_addr = ctx->get_proc_addr,
  307. .debug = enable_debug(opt),
  308. .extensions = ext,
  309. .num_extensions = num_ext
  310. ));
  311. if (!ctx->placebo_instance) {
  312. return AVERROR_EXTERNAL;
  313. }
  314. ctx->inst = ctx->placebo_instance->instance;
  315. ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
  316. .instance = ctx->placebo_instance->instance,
  317. .get_proc_addr = ctx->placebo_instance->get_proc_addr,
  318. .surface = ctx->vk_surface,
  319. .allow_software = false,
  320. .opt_extensions = optional_device_exts,
  321. .num_opt_extensions = FF_ARRAY_ELEMS(optional_device_exts),
  322. .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
  323. .device_name = select_device(opt),
  324. ));
  325. if (!ctx->placebo_vulkan)
  326. return AVERROR_EXTERNAL;
  327. ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
  328. if (!ctx->hw_device_ref) {
  329. return AVERROR(ENOMEM);
  330. }
  331. device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
  332. device_ctx->user_opaque = ctx;
  333. vk_dev_ctx = device_ctx->hwctx;
  334. vk_dev_ctx->lock_queue = placebo_lock_queue,
  335. vk_dev_ctx->unlock_queue = placebo_unlock_queue;
  336. vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
  337. vk_dev_ctx->inst = ctx->placebo_instance->instance;
  338. vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
  339. vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
  340. vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
  341. vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
  342. vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
  343. vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
  344. vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
  345. vk_dev_ctx->queue_family_index = ctx->placebo_vulkan->queue_graphics.index;
  346. vk_dev_ctx->nb_graphics_queues = ctx->placebo_vulkan->queue_graphics.count;
  347. vk_dev_ctx->queue_family_tx_index = ctx->placebo_vulkan->queue_transfer.index;
  348. vk_dev_ctx->nb_tx_queues = ctx->placebo_vulkan->queue_transfer.count;
  349. vk_dev_ctx->queue_family_comp_index = ctx->placebo_vulkan->queue_compute.index;
  350. vk_dev_ctx->nb_comp_queues = ctx->placebo_vulkan->queue_compute.count;
  351. ret = get_decode_queue(renderer, &decode_index, &decode_count);
  352. if (ret < 0)
  353. return ret;
  354. vk_dev_ctx->queue_family_decode_index = decode_index;
  355. vk_dev_ctx->nb_decode_queues = decode_count;
  356. ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
  357. if (ret < 0)
  358. return ret;
  359. return 0;
  360. }
  361. static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
  362. {
  363. int ret = 0;
  364. unsigned num_ext = 0;
  365. const char **ext = NULL;
  366. int w, h;
  367. struct pl_log_params vk_log_params = {
  368. .log_cb = vk_log_cb,
  369. .log_level = PL_LOG_DEBUG,
  370. .log_priv = renderer,
  371. };
  372. RendererContext *ctx = (RendererContext *) renderer;
  373. AVDictionaryEntry *entry;
  374. ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
  375. if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
  376. av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
  377. SDL_GetError());
  378. return AVERROR_EXTERNAL;
  379. }
  380. ext = av_calloc(num_ext, sizeof(*ext));
  381. if (!ext) {
  382. ret = AVERROR(ENOMEM);
  383. goto out;
  384. }
  385. SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
  386. entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
  387. if (entry && strtol(entry->value, NULL, 10))
  388. ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
  389. else
  390. ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
  391. if (ret < 0)
  392. goto out;
  393. if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
  394. ret = AVERROR_EXTERNAL;
  395. goto out;
  396. }
  397. ctx->swapchain = pl_vulkan_create_swapchain(
  398. ctx->placebo_vulkan,
  399. pl_vulkan_swapchain_params(
  400. .surface = ctx->vk_surface,
  401. .present_mode = VK_PRESENT_MODE_FIFO_KHR));
  402. if (!ctx->swapchain) {
  403. ret = AVERROR_EXTERNAL;
  404. goto out;
  405. }
  406. SDL_Vulkan_GetDrawableSize(window, &w, &h);
  407. pl_swapchain_resize(ctx->swapchain, &w, &h);
  408. ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
  409. if (!ctx->renderer) {
  410. ret = AVERROR_EXTERNAL;
  411. goto out;
  412. }
  413. ctx->vk_frame = av_frame_alloc();
  414. if (!ctx->vk_frame) {
  415. ret = AVERROR(ENOMEM);
  416. goto out;
  417. }
  418. ret = 0;
  419. out:
  420. av_free(ext);
  421. return ret;
  422. }
  423. static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
  424. {
  425. RendererContext *ctx = (RendererContext *) renderer;
  426. *dev = ctx->hw_device_ref;
  427. return 0;
  428. }
  429. static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
  430. {
  431. RendererContext *ctx = (RendererContext *) renderer;
  432. AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
  433. frame->hw_frames_ctx->data;
  434. AVHWFramesContext *hw_frame;
  435. AVVulkanFramesContext *vk_frame_ctx;
  436. int ret;
  437. if (ctx->hw_frame_ref) {
  438. hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
  439. if (hw_frame->width == frame->width &&
  440. hw_frame->height == frame->height &&
  441. hw_frame->sw_format == src_hw_frame->sw_format)
  442. return 0;
  443. av_buffer_unref(&ctx->hw_frame_ref);
  444. }
  445. if (!ctx->constraints) {
  446. ctx->constraints = av_hwdevice_get_hwframe_constraints(
  447. ctx->hw_device_ref, NULL);
  448. if (!ctx->constraints)
  449. return AVERROR(ENOMEM);
  450. }
  451. // Check constraints and skip create hwframe. Don't take it as error since
  452. // we can fallback to memory copy from GPU to CPU.
  453. if ((ctx->constraints->max_width &&
  454. ctx->constraints->max_width < frame->width) ||
  455. (ctx->constraints->max_height &&
  456. ctx->constraints->max_height < frame->height) ||
  457. (ctx->constraints->min_width &&
  458. ctx->constraints->min_width > frame->width) ||
  459. (ctx->constraints->min_height &&
  460. ctx->constraints->min_height > frame->height))
  461. return 0;
  462. if (ctx->constraints->valid_sw_formats) {
  463. enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
  464. while (*sw_formats != AV_PIX_FMT_NONE) {
  465. if (*sw_formats == src_hw_frame->sw_format)
  466. break;
  467. sw_formats++;
  468. }
  469. if (*sw_formats == AV_PIX_FMT_NONE)
  470. return 0;
  471. }
  472. ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
  473. if (!ctx->hw_frame_ref)
  474. return AVERROR(ENOMEM);
  475. hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
  476. hw_frame->format = AV_PIX_FMT_VULKAN;
  477. hw_frame->sw_format = src_hw_frame->sw_format;
  478. hw_frame->width = frame->width;
  479. hw_frame->height = frame->height;
  480. if (frame->format == AV_PIX_FMT_CUDA) {
  481. vk_frame_ctx = hw_frame->hwctx;
  482. vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
  483. }
  484. ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
  485. if (ret < 0) {
  486. av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
  487. av_err2str(ret));
  488. return ret;
  489. }
  490. av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
  491. AV_HWFRAME_TRANSFER_DIRECTION_TO,
  492. &ctx->transfer_formats, 0);
  493. return 0;
  494. }
  495. static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
  496. {
  497. if (!ctx->hw_frame_ref || !ctx->transfer_formats)
  498. return 0;
  499. for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
  500. if (ctx->transfer_formats[i] == frame->format)
  501. return 1;
  502. return 0;
  503. }
  504. static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
  505. {
  506. int ret = av_frame_copy_props(ctx->vk_frame, frame);
  507. if (ret < 0)
  508. return ret;
  509. av_frame_unref(frame);
  510. av_frame_move_ref(frame, ctx->vk_frame);
  511. return 0;
  512. }
  513. static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
  514. {
  515. RendererContext *ctx = (RendererContext *) renderer;
  516. int ret;
  517. if (use_hw_frame && !ctx->hw_frame_ref)
  518. return AVERROR(ENOSYS);
  519. // Try map data first
  520. av_frame_unref(ctx->vk_frame);
  521. if (use_hw_frame) {
  522. ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
  523. ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
  524. }
  525. ret = av_hwframe_map(ctx->vk_frame, frame, 0);
  526. if (!ret)
  527. return move_to_output_frame(ctx, frame);
  528. if (ret != AVERROR(ENOSYS))
  529. av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
  530. return ret;
  531. }
  532. static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
  533. {
  534. RendererContext *ctx = (RendererContext *) renderer;
  535. int ret;
  536. if (use_hw_frame && !check_hw_transfer(ctx, frame))
  537. return AVERROR(ENOSYS);
  538. av_frame_unref(ctx->vk_frame);
  539. if (use_hw_frame)
  540. av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
  541. ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
  542. if (!ret)
  543. return move_to_output_frame(ctx, frame);
  544. if (ret != AVERROR(ENOSYS))
  545. av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
  546. av_err2str(ret));
  547. return ret;
  548. }
  549. static int convert_frame(VkRenderer *renderer, AVFrame *frame)
  550. {
  551. int ret;
  552. if (!frame->hw_frames_ctx)
  553. return 0;
  554. if (frame->format == AV_PIX_FMT_VULKAN)
  555. return 0;
  556. ret = create_hw_frame(renderer, frame);
  557. if (ret < 0)
  558. return ret;
  559. for (int use_hw = 1; use_hw >=0; use_hw--) {
  560. ret = map_frame(renderer, frame, use_hw);
  561. if (!ret)
  562. return 0;
  563. if (ret != AVERROR(ENOSYS))
  564. return ret;
  565. ret = transfer_frame(renderer, frame, use_hw);
  566. if (!ret)
  567. return 0;
  568. if (ret != AVERROR(ENOSYS))
  569. return ret;
  570. }
  571. return ret;
  572. }
  573. static int display(VkRenderer *renderer, AVFrame *frame)
  574. {
  575. struct pl_swapchain_frame swap_frame = {0};
  576. struct pl_frame pl_frame = {0};
  577. struct pl_frame target = {0};
  578. RendererContext *ctx = (RendererContext *) renderer;
  579. int ret = 0;
  580. ret = convert_frame(renderer, frame);
  581. if (ret < 0)
  582. return ret;
  583. if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
  584. .frame = frame,
  585. .tex = ctx->tex))) {
  586. av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
  587. return AVERROR_EXTERNAL;
  588. }
  589. if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
  590. av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
  591. ret = AVERROR_EXTERNAL;
  592. goto out;
  593. }
  594. pl_frame_from_swapchain(&target, &swap_frame);
  595. if (!pl_render_image(ctx->renderer, &pl_frame, &target,
  596. &pl_render_default_params)) {
  597. av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
  598. ret = AVERROR_EXTERNAL;
  599. goto out;
  600. }
  601. if (!pl_swapchain_submit_frame(ctx->swapchain)) {
  602. av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
  603. ret = AVERROR_EXTERNAL;
  604. goto out;
  605. }
  606. pl_swapchain_swap_buffers(ctx->swapchain);
  607. out:
  608. pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
  609. return ret;
  610. }
  611. static int resize(VkRenderer *renderer, int width, int height)
  612. {
  613. RendererContext *ctx = (RendererContext *) renderer;
  614. if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
  615. return AVERROR_EXTERNAL;
  616. return 0;
  617. }
  618. static void destroy(VkRenderer *renderer)
  619. {
  620. RendererContext *ctx = (RendererContext *) renderer;
  621. PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
  622. av_frame_free(&ctx->vk_frame);
  623. av_freep(&ctx->transfer_formats);
  624. av_hwframe_constraints_free(&ctx->constraints);
  625. av_buffer_unref(&ctx->hw_frame_ref);
  626. if (ctx->placebo_vulkan) {
  627. for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
  628. pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
  629. pl_renderer_destroy(&ctx->renderer);
  630. pl_swapchain_destroy(&ctx->swapchain);
  631. pl_vulkan_destroy(&ctx->placebo_vulkan);
  632. }
  633. if (ctx->vk_surface) {
  634. vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
  635. ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
  636. vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
  637. ctx->vk_surface = VK_NULL_HANDLE;
  638. }
  639. av_buffer_unref(&ctx->hw_device_ref);
  640. pl_vk_inst_destroy(&ctx->placebo_instance);
  641. pl_log_destroy(&ctx->vk_log);
  642. }
  643. static const AVClass vulkan_renderer_class = {
  644. .class_name = "Vulkan Renderer",
  645. .item_name = av_default_item_name,
  646. .version = LIBAVUTIL_VERSION_INT,
  647. };
  648. VkRenderer *vk_get_renderer(void)
  649. {
  650. RendererContext *ctx = av_mallocz(sizeof(*ctx));
  651. VkRenderer *renderer;
  652. if (!ctx)
  653. return NULL;
  654. renderer = &ctx->api;
  655. renderer->class = &vulkan_renderer_class;
  656. renderer->get_hw_dev = get_hw_dev;
  657. renderer->create = create;
  658. renderer->display = display;
  659. renderer->resize = resize;
  660. renderer->destroy = destroy;
  661. return renderer;
  662. }
  663. #else
  664. VkRenderer *vk_get_renderer(void)
  665. {
  666. return NULL;
  667. }
  668. #endif
  669. int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
  670. AVDictionary *opt)
  671. {
  672. return renderer->create(renderer, window, opt);
  673. }
  674. int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
  675. {
  676. return renderer->get_hw_dev(renderer, dev);
  677. }
  678. int vk_renderer_display(VkRenderer *renderer, AVFrame *frame)
  679. {
  680. return renderer->display(renderer, frame);
  681. }
  682. int vk_renderer_resize(VkRenderer *renderer, int width, int height)
  683. {
  684. return renderer->resize(renderer, width, height);
  685. }
  686. void vk_renderer_destroy(VkRenderer *renderer)
  687. {
  688. renderer->destroy(renderer);
  689. }