vaapi_encode_h265.c 45 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <string.h>
  19. #include <va/va.h>
  20. #include <va/va_enc_hevc.h>
  21. #include "libavutil/avassert.h"
  22. #include "libavutil/common.h"
  23. #include "libavutil/mem.h"
  24. #include "libavutil/pixdesc.h"
  25. #include "libavutil/opt.h"
  26. #include "libavutil/mastering_display_metadata.h"
  27. #include "atsc_a53.h"
  28. #include "avcodec.h"
  29. #include "cbs.h"
  30. #include "cbs_h265.h"
  31. #include "hw_base_encode_h265.h"
  32. #include "codec_internal.h"
  33. #include "h2645data.h"
  34. #include "h265_profile_level.h"
  35. #include "vaapi_encode.h"
  36. #include "hevc/hevc.h"
  37. enum {
  38. SEI_MASTERING_DISPLAY = 0x08,
  39. SEI_CONTENT_LIGHT_LEVEL = 0x10,
  40. SEI_A53_CC = 0x20,
  41. };
  42. typedef struct VAAPIEncodeH265Picture {
  43. int pic_order_cnt;
  44. int64_t last_idr_frame;
  45. int slice_nal_unit;
  46. int slice_type;
  47. int pic_type;
  48. } VAAPIEncodeH265Picture;
  49. typedef struct VAAPIEncodeH265Context {
  50. VAAPIEncodeContext common;
  51. // Encoder features.
  52. uint32_t va_features;
  53. // Block size info.
  54. uint32_t va_bs;
  55. uint32_t ctu_size;
  56. uint32_t min_cb_size;
  57. // User options.
  58. int qp;
  59. int aud;
  60. int profile;
  61. int level;
  62. int sei;
  63. // Derived settings.
  64. int fixed_qp_p;
  65. int fixed_qp_b;
  66. // Writer structures.
  67. FFHWBaseEncodeH265 units;
  68. FFHWBaseEncodeH265Opts unit_opts;
  69. H265RawAUD raw_aud;
  70. H265RawSlice raw_slice;
  71. SEIRawMasteringDisplayColourVolume sei_mastering_display;
  72. SEIRawContentLightLevelInfo sei_content_light_level;
  73. SEIRawUserDataRegistered sei_a53cc;
  74. void *sei_a53cc_data;
  75. CodedBitstreamContext *cbc;
  76. CodedBitstreamFragment current_access_unit;
  77. int aud_needed;
  78. int sei_needed;
  79. } VAAPIEncodeH265Context;
  80. static int vaapi_encode_h265_write_access_unit(AVCodecContext *avctx,
  81. char *data, size_t *data_len,
  82. CodedBitstreamFragment *au)
  83. {
  84. VAAPIEncodeH265Context *priv = avctx->priv_data;
  85. int err;
  86. err = ff_cbs_write_fragment_data(priv->cbc, au);
  87. if (err < 0) {
  88. av_log(avctx, AV_LOG_ERROR, "Failed to write packed header.\n");
  89. return err;
  90. }
  91. if (*data_len < 8 * au->data_size - au->data_bit_padding) {
  92. av_log(avctx, AV_LOG_ERROR, "Access unit too large: "
  93. "%zu < %zu.\n", *data_len,
  94. 8 * au->data_size - au->data_bit_padding);
  95. return AVERROR(ENOSPC);
  96. }
  97. memcpy(data, au->data, au->data_size);
  98. *data_len = 8 * au->data_size - au->data_bit_padding;
  99. return 0;
  100. }
  101. static int vaapi_encode_h265_add_nal(AVCodecContext *avctx,
  102. CodedBitstreamFragment *au,
  103. void *nal_unit)
  104. {
  105. H265RawNALUnitHeader *header = nal_unit;
  106. int err;
  107. err = ff_cbs_insert_unit_content(au, -1,
  108. header->nal_unit_type, nal_unit, NULL);
  109. if (err < 0) {
  110. av_log(avctx, AV_LOG_ERROR, "Failed to add NAL unit: "
  111. "type = %d.\n", header->nal_unit_type);
  112. return err;
  113. }
  114. return 0;
  115. }
  116. static int vaapi_encode_h265_write_sequence_header(AVCodecContext *avctx,
  117. char *data, size_t *data_len)
  118. {
  119. VAAPIEncodeH265Context *priv = avctx->priv_data;
  120. CodedBitstreamFragment *au = &priv->current_access_unit;
  121. int err;
  122. if (priv->aud_needed) {
  123. err = vaapi_encode_h265_add_nal(avctx, au, &priv->raw_aud);
  124. if (err < 0)
  125. goto fail;
  126. priv->aud_needed = 0;
  127. }
  128. err = vaapi_encode_h265_add_nal(avctx, au, &priv->units.raw_vps);
  129. if (err < 0)
  130. goto fail;
  131. err = vaapi_encode_h265_add_nal(avctx, au, &priv->units.raw_sps);
  132. if (err < 0)
  133. goto fail;
  134. err = vaapi_encode_h265_add_nal(avctx, au, &priv->units.raw_pps);
  135. if (err < 0)
  136. goto fail;
  137. err = vaapi_encode_h265_write_access_unit(avctx, data, data_len, au);
  138. fail:
  139. ff_cbs_fragment_reset(au);
  140. return err;
  141. }
  142. static int vaapi_encode_h265_write_slice_header(AVCodecContext *avctx,
  143. VAAPIEncodePicture *pic,
  144. VAAPIEncodeSlice *slice,
  145. char *data, size_t *data_len)
  146. {
  147. VAAPIEncodeH265Context *priv = avctx->priv_data;
  148. CodedBitstreamFragment *au = &priv->current_access_unit;
  149. int err;
  150. if (priv->aud_needed) {
  151. err = vaapi_encode_h265_add_nal(avctx, au, &priv->raw_aud);
  152. if (err < 0)
  153. goto fail;
  154. priv->aud_needed = 0;
  155. }
  156. err = vaapi_encode_h265_add_nal(avctx, au, &priv->raw_slice);
  157. if (err < 0)
  158. goto fail;
  159. err = vaapi_encode_h265_write_access_unit(avctx, data, data_len, au);
  160. fail:
  161. ff_cbs_fragment_reset(au);
  162. return err;
  163. }
  164. static int vaapi_encode_h265_write_extra_header(AVCodecContext *avctx,
  165. FFHWBaseEncodePicture *base,
  166. int index, int *type,
  167. char *data, size_t *data_len)
  168. {
  169. VAAPIEncodeH265Context *priv = avctx->priv_data;
  170. CodedBitstreamFragment *au = &priv->current_access_unit;
  171. int err;
  172. if (priv->sei_needed) {
  173. if (priv->aud_needed) {
  174. err = vaapi_encode_h265_add_nal(avctx, au, &priv->aud);
  175. if (err < 0)
  176. goto fail;
  177. priv->aud_needed = 0;
  178. }
  179. if (priv->sei_needed & SEI_MASTERING_DISPLAY) {
  180. err = ff_cbs_sei_add_message(priv->cbc, au, 1,
  181. SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME,
  182. &priv->sei_mastering_display, NULL);
  183. if (err < 0)
  184. goto fail;
  185. }
  186. if (priv->sei_needed & SEI_CONTENT_LIGHT_LEVEL) {
  187. err = ff_cbs_sei_add_message(priv->cbc, au, 1,
  188. SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO,
  189. &priv->sei_content_light_level, NULL);
  190. if (err < 0)
  191. goto fail;
  192. }
  193. if (priv->sei_needed & SEI_A53_CC) {
  194. err = ff_cbs_sei_add_message(priv->cbc, au, 1,
  195. SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35,
  196. &priv->sei_a53cc, NULL);
  197. if (err < 0)
  198. goto fail;
  199. }
  200. priv->sei_needed = 0;
  201. err = vaapi_encode_h265_write_access_unit(avctx, data, data_len, au);
  202. if (err < 0)
  203. goto fail;
  204. ff_cbs_fragment_reset(au);
  205. *type = VAEncPackedHeaderRawData;
  206. return 0;
  207. } else {
  208. return AVERROR_EOF;
  209. }
  210. fail:
  211. ff_cbs_fragment_reset(au);
  212. return err;
  213. }
  214. static int vaapi_encode_h265_init_sequence_params(AVCodecContext *avctx)
  215. {
  216. FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
  217. VAAPIEncodeContext *ctx = avctx->priv_data;
  218. VAAPIEncodeH265Context *priv = avctx->priv_data;
  219. H265RawVPS *vps = &priv->units.raw_vps;
  220. H265RawSPS *sps = &priv->units.raw_sps;
  221. H265RawPPS *pps = &priv->units.raw_pps;
  222. VAEncSequenceParameterBufferHEVC *vseq = ctx->codec_sequence_params;
  223. VAEncPictureParameterBufferHEVC *vpic = ctx->codec_picture_params;
  224. int i, err;
  225. // priv->unit_opts.tier already set
  226. // priv->unit_opts.fixed_qp_idr already set
  227. priv->unit_opts.cu_qp_delta_enabled_flag = (ctx->va_rc_mode != VA_RC_CQP);
  228. priv->unit_opts.tile_rows = ctx->tile_rows;
  229. priv->unit_opts.tile_cols = ctx->tile_cols;
  230. priv->unit_opts.nb_slices = ctx->nb_slices;
  231. priv->unit_opts.slice_block_rows = ctx->slice_block_rows;
  232. priv->unit_opts.slice_block_rows = ctx->slice_block_cols;
  233. memcpy(priv->unit_opts.col_width, ctx->col_width,
  234. ctx->tile_rows*sizeof(*priv->unit_opts.col_width));
  235. memcpy(priv->unit_opts.row_height, ctx->row_height,
  236. ctx->tile_cols*sizeof(*priv->unit_opts.row_height));
  237. err = ff_hw_base_encode_init_params_h265(base_ctx, avctx,
  238. &priv->units, &priv->unit_opts);
  239. if (err < 0)
  240. return err;
  241. #if VA_CHECK_VERSION(1, 13, 0)
  242. // update sps setting according to queried result
  243. if (priv->va_features) {
  244. VAConfigAttribValEncHEVCFeatures features = { .value = priv->va_features };
  245. // Enable feature if get queried result is VA_FEATURE_SUPPORTED | VA_FEATURE_REQUIRED
  246. sps->amp_enabled_flag =
  247. !!features.bits.amp;
  248. sps->sample_adaptive_offset_enabled_flag =
  249. !!features.bits.sao;
  250. sps->sps_temporal_mvp_enabled_flag =
  251. !!features.bits.temporal_mvp;
  252. sps->pcm_enabled_flag =
  253. !!features.bits.pcm;
  254. }
  255. if (priv->va_bs) {
  256. VAConfigAttribValEncHEVCBlockSizes bs = { .value = priv->va_bs };
  257. sps->log2_min_luma_coding_block_size_minus3 =
  258. ff_ctz(priv->min_cb_size) - 3;
  259. sps->log2_diff_max_min_luma_coding_block_size =
  260. ff_ctz(priv->ctu_size) - ff_ctz(priv->min_cb_size);
  261. sps->log2_min_luma_transform_block_size_minus2 =
  262. bs.bits.log2_min_luma_transform_block_size_minus2;
  263. sps->log2_diff_max_min_luma_transform_block_size =
  264. bs.bits.log2_max_luma_transform_block_size_minus2 -
  265. bs.bits.log2_min_luma_transform_block_size_minus2;
  266. sps->max_transform_hierarchy_depth_inter =
  267. bs.bits.max_max_transform_hierarchy_depth_inter;
  268. sps->max_transform_hierarchy_depth_intra =
  269. bs.bits.max_max_transform_hierarchy_depth_intra;
  270. }
  271. // update pps setting according to queried result
  272. if (priv->va_features) {
  273. VAConfigAttribValEncHEVCFeatures features = { .value = priv->va_features };
  274. if (ctx->va_rc_mode != VA_RC_CQP)
  275. pps->cu_qp_delta_enabled_flag =
  276. !!features.bits.cu_qp_delta;
  277. pps->transform_skip_enabled_flag =
  278. !!features.bits.transform_skip;
  279. // set diff_cu_qp_delta_depth as its max value if cu_qp_delta enabled. Otherwise
  280. // 0 will make cu_qp_delta invalid.
  281. if (pps->cu_qp_delta_enabled_flag)
  282. pps->diff_cu_qp_delta_depth = sps->log2_diff_max_min_luma_coding_block_size;
  283. }
  284. #endif
  285. // Fill VAAPI parameter buffers.
  286. *vseq = (VAEncSequenceParameterBufferHEVC) {
  287. .general_profile_idc = vps->profile_tier_level.general_profile_idc,
  288. .general_level_idc = vps->profile_tier_level.general_level_idc,
  289. .general_tier_flag = vps->profile_tier_level.general_tier_flag,
  290. .intra_period = base_ctx->gop_size,
  291. .intra_idr_period = base_ctx->gop_size,
  292. .ip_period = base_ctx->b_per_p + 1,
  293. .bits_per_second = ctx->va_bit_rate,
  294. .pic_width_in_luma_samples = sps->pic_width_in_luma_samples,
  295. .pic_height_in_luma_samples = sps->pic_height_in_luma_samples,
  296. .seq_fields.bits = {
  297. .chroma_format_idc = sps->chroma_format_idc,
  298. .separate_colour_plane_flag = sps->separate_colour_plane_flag,
  299. .bit_depth_luma_minus8 = sps->bit_depth_luma_minus8,
  300. .bit_depth_chroma_minus8 = sps->bit_depth_chroma_minus8,
  301. .scaling_list_enabled_flag = sps->scaling_list_enabled_flag,
  302. .strong_intra_smoothing_enabled_flag =
  303. sps->strong_intra_smoothing_enabled_flag,
  304. .amp_enabled_flag = sps->amp_enabled_flag,
  305. .sample_adaptive_offset_enabled_flag =
  306. sps->sample_adaptive_offset_enabled_flag,
  307. .pcm_enabled_flag = sps->pcm_enabled_flag,
  308. .pcm_loop_filter_disabled_flag = sps->pcm_loop_filter_disabled_flag,
  309. .sps_temporal_mvp_enabled_flag = sps->sps_temporal_mvp_enabled_flag,
  310. },
  311. .log2_min_luma_coding_block_size_minus3 =
  312. sps->log2_min_luma_coding_block_size_minus3,
  313. .log2_diff_max_min_luma_coding_block_size =
  314. sps->log2_diff_max_min_luma_coding_block_size,
  315. .log2_min_transform_block_size_minus2 =
  316. sps->log2_min_luma_transform_block_size_minus2,
  317. .log2_diff_max_min_transform_block_size =
  318. sps->log2_diff_max_min_luma_transform_block_size,
  319. .max_transform_hierarchy_depth_inter =
  320. sps->max_transform_hierarchy_depth_inter,
  321. .max_transform_hierarchy_depth_intra =
  322. sps->max_transform_hierarchy_depth_intra,
  323. .pcm_sample_bit_depth_luma_minus1 =
  324. sps->pcm_sample_bit_depth_luma_minus1,
  325. .pcm_sample_bit_depth_chroma_minus1 =
  326. sps->pcm_sample_bit_depth_chroma_minus1,
  327. .log2_min_pcm_luma_coding_block_size_minus3 =
  328. sps->log2_min_pcm_luma_coding_block_size_minus3,
  329. .log2_max_pcm_luma_coding_block_size_minus3 =
  330. sps->log2_min_pcm_luma_coding_block_size_minus3 +
  331. sps->log2_diff_max_min_pcm_luma_coding_block_size,
  332. .vui_parameters_present_flag = 0,
  333. };
  334. *vpic = (VAEncPictureParameterBufferHEVC) {
  335. .decoded_curr_pic = {
  336. .picture_id = VA_INVALID_ID,
  337. .flags = VA_PICTURE_HEVC_INVALID,
  338. },
  339. .coded_buf = VA_INVALID_ID,
  340. .collocated_ref_pic_index = sps->sps_temporal_mvp_enabled_flag ?
  341. 0 : 0xff,
  342. .last_picture = 0,
  343. .pic_init_qp = pps->init_qp_minus26 + 26,
  344. .diff_cu_qp_delta_depth = pps->diff_cu_qp_delta_depth,
  345. .pps_cb_qp_offset = pps->pps_cb_qp_offset,
  346. .pps_cr_qp_offset = pps->pps_cr_qp_offset,
  347. .num_tile_columns_minus1 = pps->num_tile_columns_minus1,
  348. .num_tile_rows_minus1 = pps->num_tile_rows_minus1,
  349. .log2_parallel_merge_level_minus2 = pps->log2_parallel_merge_level_minus2,
  350. .ctu_max_bitsize_allowed = 0,
  351. .num_ref_idx_l0_default_active_minus1 =
  352. pps->num_ref_idx_l0_default_active_minus1,
  353. .num_ref_idx_l1_default_active_minus1 =
  354. pps->num_ref_idx_l1_default_active_minus1,
  355. .slice_pic_parameter_set_id = pps->pps_pic_parameter_set_id,
  356. .pic_fields.bits = {
  357. .sign_data_hiding_enabled_flag = pps->sign_data_hiding_enabled_flag,
  358. .constrained_intra_pred_flag = pps->constrained_intra_pred_flag,
  359. .transform_skip_enabled_flag = pps->transform_skip_enabled_flag,
  360. .cu_qp_delta_enabled_flag = pps->cu_qp_delta_enabled_flag,
  361. .weighted_pred_flag = pps->weighted_pred_flag,
  362. .weighted_bipred_flag = pps->weighted_bipred_flag,
  363. .transquant_bypass_enabled_flag = pps->transquant_bypass_enabled_flag,
  364. .tiles_enabled_flag = pps->tiles_enabled_flag,
  365. .entropy_coding_sync_enabled_flag = pps->entropy_coding_sync_enabled_flag,
  366. .loop_filter_across_tiles_enabled_flag =
  367. pps->loop_filter_across_tiles_enabled_flag,
  368. .pps_loop_filter_across_slices_enabled_flag =
  369. pps->pps_loop_filter_across_slices_enabled_flag,
  370. .scaling_list_data_present_flag = (sps->sps_scaling_list_data_present_flag |
  371. pps->pps_scaling_list_data_present_flag),
  372. .screen_content_flag = 0,
  373. .enable_gpu_weighted_prediction = 0,
  374. .no_output_of_prior_pics_flag = 0,
  375. },
  376. };
  377. if (pps->tiles_enabled_flag) {
  378. for (i = 0; i <= vpic->num_tile_rows_minus1; i++)
  379. vpic->row_height_minus1[i] = pps->row_height_minus1[i];
  380. for (i = 0; i <= vpic->num_tile_columns_minus1; i++)
  381. vpic->column_width_minus1[i] = pps->column_width_minus1[i];
  382. }
  383. return 0;
  384. }
  385. static int vaapi_encode_h265_init_picture_params(AVCodecContext *avctx,
  386. FFHWBaseEncodePicture *pic)
  387. {
  388. FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
  389. VAAPIEncodeH265Context *priv = avctx->priv_data;
  390. VAAPIEncodePicture *vaapi_pic = pic->priv;
  391. VAAPIEncodeH265Picture *hpic = pic->codec_priv;
  392. FFHWBaseEncodePicture *prev = pic->prev;
  393. VAAPIEncodeH265Picture *hprev = prev ? prev->codec_priv : NULL;
  394. VAEncPictureParameterBufferHEVC *vpic = vaapi_pic->codec_picture_params;
  395. int i, j = 0;
  396. if (pic->type == FF_HW_PICTURE_TYPE_IDR) {
  397. av_assert0(pic->display_order == pic->encode_order);
  398. hpic->last_idr_frame = pic->display_order;
  399. hpic->slice_nal_unit = HEVC_NAL_IDR_W_RADL;
  400. hpic->slice_type = HEVC_SLICE_I;
  401. hpic->pic_type = 0;
  402. } else {
  403. av_assert0(prev);
  404. hpic->last_idr_frame = hprev->last_idr_frame;
  405. if (pic->type == FF_HW_PICTURE_TYPE_I) {
  406. hpic->slice_nal_unit = HEVC_NAL_CRA_NUT;
  407. hpic->slice_type = HEVC_SLICE_I;
  408. hpic->pic_type = 0;
  409. } else if (pic->type == FF_HW_PICTURE_TYPE_P) {
  410. av_assert0(pic->refs[0]);
  411. hpic->slice_nal_unit = HEVC_NAL_TRAIL_R;
  412. hpic->slice_type = HEVC_SLICE_P;
  413. hpic->pic_type = 1;
  414. } else {
  415. FFHWBaseEncodePicture *irap_ref;
  416. av_assert0(pic->refs[0][0] && pic->refs[1][0]);
  417. for (irap_ref = pic; irap_ref; irap_ref = irap_ref->refs[1][0]) {
  418. if (irap_ref->type == FF_HW_PICTURE_TYPE_I)
  419. break;
  420. }
  421. if (pic->b_depth == base_ctx->max_b_depth) {
  422. hpic->slice_nal_unit = irap_ref ? HEVC_NAL_RASL_N
  423. : HEVC_NAL_TRAIL_N;
  424. } else {
  425. hpic->slice_nal_unit = irap_ref ? HEVC_NAL_RASL_R
  426. : HEVC_NAL_TRAIL_R;
  427. }
  428. hpic->slice_type = HEVC_SLICE_B;
  429. hpic->pic_type = 2;
  430. }
  431. }
  432. hpic->pic_order_cnt = pic->display_order - hpic->last_idr_frame;
  433. if (priv->aud) {
  434. priv->aud_needed = 1;
  435. priv->raw_aud = (H265RawAUD) {
  436. .nal_unit_header = {
  437. .nal_unit_type = HEVC_NAL_AUD,
  438. .nuh_layer_id = 0,
  439. .nuh_temporal_id_plus1 = 1,
  440. },
  441. .pic_type = hpic->pic_type,
  442. };
  443. } else {
  444. priv->aud_needed = 0;
  445. }
  446. priv->sei_needed = 0;
  447. // Only look for the metadata on I/IDR frame on the output. We
  448. // may force an IDR frame on the output where the medadata gets
  449. // changed on the input frame.
  450. if ((priv->sei & SEI_MASTERING_DISPLAY) &&
  451. (pic->type == FF_HW_PICTURE_TYPE_I || pic->type == FF_HW_PICTURE_TYPE_IDR)) {
  452. AVFrameSideData *sd =
  453. av_frame_get_side_data(pic->input_image,
  454. AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
  455. if (sd) {
  456. AVMasteringDisplayMetadata *mdm =
  457. (AVMasteringDisplayMetadata *)sd->data;
  458. // SEI is needed when both the primaries and luminance are set
  459. if (mdm->has_primaries && mdm->has_luminance) {
  460. SEIRawMasteringDisplayColourVolume *mdcv =
  461. &priv->sei_mastering_display;
  462. const int mapping[3] = {1, 2, 0};
  463. const int chroma_den = 50000;
  464. const int luma_den = 10000;
  465. for (i = 0; i < 3; i++) {
  466. const int j = mapping[i];
  467. mdcv->display_primaries_x[i] =
  468. FFMIN(lrint(chroma_den *
  469. av_q2d(mdm->display_primaries[j][0])),
  470. chroma_den);
  471. mdcv->display_primaries_y[i] =
  472. FFMIN(lrint(chroma_den *
  473. av_q2d(mdm->display_primaries[j][1])),
  474. chroma_den);
  475. }
  476. mdcv->white_point_x =
  477. FFMIN(lrint(chroma_den * av_q2d(mdm->white_point[0])),
  478. chroma_den);
  479. mdcv->white_point_y =
  480. FFMIN(lrint(chroma_den * av_q2d(mdm->white_point[1])),
  481. chroma_den);
  482. mdcv->max_display_mastering_luminance =
  483. lrint(luma_den * av_q2d(mdm->max_luminance));
  484. mdcv->min_display_mastering_luminance =
  485. FFMIN(lrint(luma_den * av_q2d(mdm->min_luminance)),
  486. mdcv->max_display_mastering_luminance);
  487. priv->sei_needed |= SEI_MASTERING_DISPLAY;
  488. }
  489. }
  490. }
  491. if ((priv->sei & SEI_CONTENT_LIGHT_LEVEL) &&
  492. (pic->type == FF_HW_PICTURE_TYPE_I || pic->type == FF_HW_PICTURE_TYPE_IDR)) {
  493. AVFrameSideData *sd =
  494. av_frame_get_side_data(pic->input_image,
  495. AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
  496. if (sd) {
  497. AVContentLightMetadata *clm =
  498. (AVContentLightMetadata *)sd->data;
  499. SEIRawContentLightLevelInfo *clli =
  500. &priv->sei_content_light_level;
  501. clli->max_content_light_level = FFMIN(clm->MaxCLL, 65535);
  502. clli->max_pic_average_light_level = FFMIN(clm->MaxFALL, 65535);
  503. priv->sei_needed |= SEI_CONTENT_LIGHT_LEVEL;
  504. }
  505. }
  506. if (priv->sei & SEI_A53_CC) {
  507. int err;
  508. size_t sei_a53cc_len;
  509. av_freep(&priv->sei_a53cc_data);
  510. err = ff_alloc_a53_sei(pic->input_image, 0, &priv->sei_a53cc_data, &sei_a53cc_len);
  511. if (err < 0)
  512. return err;
  513. if (priv->sei_a53cc_data != NULL) {
  514. priv->sei_a53cc.itu_t_t35_country_code = 181;
  515. priv->sei_a53cc.data = (uint8_t *)priv->sei_a53cc_data + 1;
  516. priv->sei_a53cc.data_length = sei_a53cc_len - 1;
  517. priv->sei_needed |= SEI_A53_CC;
  518. }
  519. }
  520. vpic->decoded_curr_pic = (VAPictureHEVC) {
  521. .picture_id = vaapi_pic->recon_surface,
  522. .pic_order_cnt = hpic->pic_order_cnt,
  523. .flags = 0,
  524. };
  525. for (int k = 0; k < MAX_REFERENCE_LIST_NUM; k++) {
  526. for (i = 0; i < pic->nb_refs[k]; i++) {
  527. FFHWBaseEncodePicture *ref = pic->refs[k][i];
  528. VAAPIEncodeH265Picture *href;
  529. av_assert0(ref && ref->encode_order < pic->encode_order);
  530. href = ref->codec_priv;
  531. vpic->reference_frames[j++] = (VAPictureHEVC) {
  532. .picture_id = ((VAAPIEncodePicture *)ref->priv)->recon_surface,
  533. .pic_order_cnt = href->pic_order_cnt,
  534. .flags = (ref->display_order < pic->display_order ?
  535. VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE : 0) |
  536. (ref->display_order > pic->display_order ?
  537. VA_PICTURE_HEVC_RPS_ST_CURR_AFTER : 0),
  538. };
  539. }
  540. }
  541. for (; j < FF_ARRAY_ELEMS(vpic->reference_frames); j++) {
  542. vpic->reference_frames[j] = (VAPictureHEVC) {
  543. .picture_id = VA_INVALID_ID,
  544. .flags = VA_PICTURE_HEVC_INVALID,
  545. };
  546. }
  547. vpic->coded_buf = vaapi_pic->output_buffer;
  548. vpic->nal_unit_type = hpic->slice_nal_unit;
  549. vpic->pic_fields.bits.reference_pic_flag = pic->is_reference;
  550. switch (pic->type) {
  551. case FF_HW_PICTURE_TYPE_IDR:
  552. vpic->pic_fields.bits.idr_pic_flag = 1;
  553. vpic->pic_fields.bits.coding_type = 1;
  554. break;
  555. case FF_HW_PICTURE_TYPE_I:
  556. vpic->pic_fields.bits.idr_pic_flag = 0;
  557. vpic->pic_fields.bits.coding_type = 1;
  558. break;
  559. case FF_HW_PICTURE_TYPE_P:
  560. vpic->pic_fields.bits.idr_pic_flag = 0;
  561. vpic->pic_fields.bits.coding_type = 2;
  562. break;
  563. case FF_HW_PICTURE_TYPE_B:
  564. vpic->pic_fields.bits.idr_pic_flag = 0;
  565. vpic->pic_fields.bits.coding_type = 3;
  566. break;
  567. default:
  568. av_assert0(0 && "invalid picture type");
  569. }
  570. return 0;
  571. }
  572. static int vaapi_encode_h265_init_slice_params(AVCodecContext *avctx,
  573. FFHWBaseEncodePicture *pic,
  574. VAAPIEncodeSlice *slice)
  575. {
  576. FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
  577. VAAPIEncodeH265Context *priv = avctx->priv_data;
  578. VAAPIEncodePicture *vaapi_pic = pic->priv;
  579. VAAPIEncodeH265Picture *hpic = pic->codec_priv;
  580. const H265RawSPS *sps = &priv->units.raw_sps;
  581. const H265RawPPS *pps = &priv->units.raw_pps;
  582. H265RawSliceHeader *sh = &priv->raw_slice.header;
  583. VAEncPictureParameterBufferHEVC *vpic = vaapi_pic->codec_picture_params;
  584. VAEncSliceParameterBufferHEVC *vslice = slice->codec_slice_params;
  585. int i;
  586. sh->nal_unit_header = (H265RawNALUnitHeader) {
  587. .nal_unit_type = hpic->slice_nal_unit,
  588. .nuh_layer_id = 0,
  589. .nuh_temporal_id_plus1 = 1,
  590. };
  591. sh->slice_pic_parameter_set_id = pps->pps_pic_parameter_set_id;
  592. sh->first_slice_segment_in_pic_flag = slice->index == 0;
  593. sh->slice_segment_address = slice->block_start;
  594. sh->slice_type = hpic->slice_type;
  595. if (sh->slice_type == HEVC_SLICE_P && base_ctx->p_to_gpb)
  596. sh->slice_type = HEVC_SLICE_B;
  597. sh->slice_pic_order_cnt_lsb = hpic->pic_order_cnt &
  598. (1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)) - 1;
  599. if (pic->type != FF_HW_PICTURE_TYPE_IDR) {
  600. H265RawSTRefPicSet *rps;
  601. const VAAPIEncodeH265Picture *strp;
  602. int rps_poc[MAX_DPB_SIZE];
  603. int rps_used[MAX_DPB_SIZE];
  604. int i, j, poc, rps_pics;
  605. sh->short_term_ref_pic_set_sps_flag = 0;
  606. rps = &sh->short_term_ref_pic_set;
  607. memset(rps, 0, sizeof(*rps));
  608. rps_pics = 0;
  609. for (i = 0; i < MAX_REFERENCE_LIST_NUM; i++) {
  610. for (j = 0; j < pic->nb_refs[i]; j++) {
  611. strp = pic->refs[i][j]->codec_priv;
  612. rps_poc[rps_pics] = strp->pic_order_cnt;
  613. rps_used[rps_pics] = 1;
  614. ++rps_pics;
  615. }
  616. }
  617. for (i = 0; i < pic->nb_dpb_pics; i++) {
  618. if (pic->dpb[i] == pic)
  619. continue;
  620. for (j = 0; j < pic->nb_refs[0]; j++) {
  621. if (pic->dpb[i] == pic->refs[0][j])
  622. break;
  623. }
  624. if (j < pic->nb_refs[0])
  625. continue;
  626. for (j = 0; j < pic->nb_refs[1]; j++) {
  627. if (pic->dpb[i] == pic->refs[1][j])
  628. break;
  629. }
  630. if (j < pic->nb_refs[1])
  631. continue;
  632. strp = pic->dpb[i]->codec_priv;
  633. rps_poc[rps_pics] = strp->pic_order_cnt;
  634. rps_used[rps_pics] = 0;
  635. ++rps_pics;
  636. }
  637. for (i = 1; i < rps_pics; i++) {
  638. for (j = i; j > 0; j--) {
  639. if (rps_poc[j] > rps_poc[j - 1])
  640. break;
  641. av_assert0(rps_poc[j] != rps_poc[j - 1]);
  642. FFSWAP(int, rps_poc[j], rps_poc[j - 1]);
  643. FFSWAP(int, rps_used[j], rps_used[j - 1]);
  644. }
  645. }
  646. av_log(avctx, AV_LOG_DEBUG, "RPS for POC %d:",
  647. hpic->pic_order_cnt);
  648. for (i = 0; i < rps_pics; i++) {
  649. av_log(avctx, AV_LOG_DEBUG, " (%d,%d)",
  650. rps_poc[i], rps_used[i]);
  651. }
  652. av_log(avctx, AV_LOG_DEBUG, "\n");
  653. for (i = 0; i < rps_pics; i++) {
  654. av_assert0(rps_poc[i] != hpic->pic_order_cnt);
  655. if (rps_poc[i] > hpic->pic_order_cnt)
  656. break;
  657. }
  658. rps->num_negative_pics = i;
  659. poc = hpic->pic_order_cnt;
  660. for (j = i - 1; j >= 0; j--) {
  661. rps->delta_poc_s0_minus1[i - 1 - j] = poc - rps_poc[j] - 1;
  662. rps->used_by_curr_pic_s0_flag[i - 1 - j] = rps_used[j];
  663. poc = rps_poc[j];
  664. }
  665. rps->num_positive_pics = rps_pics - i;
  666. poc = hpic->pic_order_cnt;
  667. for (j = i; j < rps_pics; j++) {
  668. rps->delta_poc_s1_minus1[j - i] = rps_poc[j] - poc - 1;
  669. rps->used_by_curr_pic_s1_flag[j - i] = rps_used[j];
  670. poc = rps_poc[j];
  671. }
  672. sh->num_long_term_sps = 0;
  673. sh->num_long_term_pics = 0;
  674. // when this flag is not present, it is inerred to 1.
  675. sh->collocated_from_l0_flag = 1;
  676. sh->slice_temporal_mvp_enabled_flag =
  677. sps->sps_temporal_mvp_enabled_flag;
  678. if (sh->slice_temporal_mvp_enabled_flag) {
  679. if (sh->slice_type == HEVC_SLICE_B)
  680. sh->collocated_from_l0_flag = 1;
  681. sh->collocated_ref_idx = 0;
  682. }
  683. sh->num_ref_idx_active_override_flag = 0;
  684. sh->num_ref_idx_l0_active_minus1 = pps->num_ref_idx_l0_default_active_minus1;
  685. sh->num_ref_idx_l1_active_minus1 = pps->num_ref_idx_l1_default_active_minus1;
  686. }
  687. sh->slice_sao_luma_flag = sh->slice_sao_chroma_flag =
  688. sps->sample_adaptive_offset_enabled_flag;
  689. if (pic->type == FF_HW_PICTURE_TYPE_B)
  690. sh->slice_qp_delta = priv->fixed_qp_b - (pps->init_qp_minus26 + 26);
  691. else if (pic->type == FF_HW_PICTURE_TYPE_P)
  692. sh->slice_qp_delta = priv->fixed_qp_p - (pps->init_qp_minus26 + 26);
  693. else
  694. sh->slice_qp_delta = priv->unit_opts.fixed_qp_idr - (pps->init_qp_minus26 + 26);
  695. *vslice = (VAEncSliceParameterBufferHEVC) {
  696. .slice_segment_address = sh->slice_segment_address,
  697. .num_ctu_in_slice = slice->block_size,
  698. .slice_type = sh->slice_type,
  699. .slice_pic_parameter_set_id = sh->slice_pic_parameter_set_id,
  700. .num_ref_idx_l0_active_minus1 = sh->num_ref_idx_l0_active_minus1,
  701. .num_ref_idx_l1_active_minus1 = sh->num_ref_idx_l1_active_minus1,
  702. .luma_log2_weight_denom = sh->luma_log2_weight_denom,
  703. .delta_chroma_log2_weight_denom = sh->delta_chroma_log2_weight_denom,
  704. .max_num_merge_cand = 5 - sh->five_minus_max_num_merge_cand,
  705. .slice_qp_delta = sh->slice_qp_delta,
  706. .slice_cb_qp_offset = sh->slice_cb_qp_offset,
  707. .slice_cr_qp_offset = sh->slice_cr_qp_offset,
  708. .slice_beta_offset_div2 = sh->slice_beta_offset_div2,
  709. .slice_tc_offset_div2 = sh->slice_tc_offset_div2,
  710. .slice_fields.bits = {
  711. .last_slice_of_pic_flag = slice->index == vaapi_pic->nb_slices - 1,
  712. .dependent_slice_segment_flag = sh->dependent_slice_segment_flag,
  713. .colour_plane_id = sh->colour_plane_id,
  714. .slice_temporal_mvp_enabled_flag =
  715. sh->slice_temporal_mvp_enabled_flag,
  716. .slice_sao_luma_flag = sh->slice_sao_luma_flag,
  717. .slice_sao_chroma_flag = sh->slice_sao_chroma_flag,
  718. .num_ref_idx_active_override_flag =
  719. sh->num_ref_idx_active_override_flag,
  720. .mvd_l1_zero_flag = sh->mvd_l1_zero_flag,
  721. .cabac_init_flag = sh->cabac_init_flag,
  722. .slice_deblocking_filter_disabled_flag =
  723. sh->slice_deblocking_filter_disabled_flag,
  724. .slice_loop_filter_across_slices_enabled_flag =
  725. sh->slice_loop_filter_across_slices_enabled_flag,
  726. .collocated_from_l0_flag = sh->collocated_from_l0_flag,
  727. },
  728. };
  729. for (i = 0; i < FF_ARRAY_ELEMS(vslice->ref_pic_list0); i++) {
  730. vslice->ref_pic_list0[i].picture_id = VA_INVALID_ID;
  731. vslice->ref_pic_list0[i].flags = VA_PICTURE_HEVC_INVALID;
  732. vslice->ref_pic_list1[i].picture_id = VA_INVALID_ID;
  733. vslice->ref_pic_list1[i].flags = VA_PICTURE_HEVC_INVALID;
  734. }
  735. if (pic->nb_refs[0]) {
  736. // Backward reference for P- or B-frame.
  737. av_assert0(pic->type == FF_HW_PICTURE_TYPE_P ||
  738. pic->type == FF_HW_PICTURE_TYPE_B);
  739. vslice->ref_pic_list0[0] = vpic->reference_frames[0];
  740. if (base_ctx->p_to_gpb && pic->type == FF_HW_PICTURE_TYPE_P)
  741. // Reference for GPB B-frame, L0 == L1
  742. vslice->ref_pic_list1[0] = vpic->reference_frames[0];
  743. }
  744. if (pic->nb_refs[1]) {
  745. // Forward reference for B-frame.
  746. av_assert0(pic->type == FF_HW_PICTURE_TYPE_B);
  747. vslice->ref_pic_list1[0] = vpic->reference_frames[1];
  748. }
  749. if (pic->type == FF_HW_PICTURE_TYPE_P && base_ctx->p_to_gpb) {
  750. vslice->slice_type = HEVC_SLICE_B;
  751. for (i = 0; i < FF_ARRAY_ELEMS(vslice->ref_pic_list0); i++) {
  752. vslice->ref_pic_list1[i].picture_id = vslice->ref_pic_list0[i].picture_id;
  753. vslice->ref_pic_list1[i].flags = vslice->ref_pic_list0[i].flags;
  754. }
  755. }
  756. return 0;
  757. }
  758. static av_cold int vaapi_encode_h265_get_encoder_caps(AVCodecContext *avctx)
  759. {
  760. FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
  761. VAAPIEncodeH265Context *priv = avctx->priv_data;
  762. #if VA_CHECK_VERSION(1, 13, 0)
  763. {
  764. VAAPIEncodeContext *ctx = avctx->priv_data;
  765. VAConfigAttribValEncHEVCBlockSizes block_size;
  766. VAConfigAttrib attr;
  767. VAStatus vas;
  768. attr.type = VAConfigAttribEncHEVCFeatures;
  769. vas = vaGetConfigAttributes(ctx->hwctx->display, ctx->va_profile,
  770. ctx->va_entrypoint, &attr, 1);
  771. if (vas != VA_STATUS_SUCCESS) {
  772. av_log(avctx, AV_LOG_ERROR, "Failed to query encoder "
  773. "features, using guessed defaults.\n");
  774. return AVERROR_EXTERNAL;
  775. } else if (attr.value == VA_ATTRIB_NOT_SUPPORTED) {
  776. av_log(avctx, AV_LOG_WARNING, "Driver does not advertise "
  777. "encoder features, using guessed defaults.\n");
  778. } else {
  779. priv->va_features = attr.value;
  780. }
  781. attr.type = VAConfigAttribEncHEVCBlockSizes;
  782. vas = vaGetConfigAttributes(ctx->hwctx->display, ctx->va_profile,
  783. ctx->va_entrypoint, &attr, 1);
  784. if (vas != VA_STATUS_SUCCESS) {
  785. av_log(avctx, AV_LOG_ERROR, "Failed to query encoder "
  786. "block size, using guessed defaults.\n");
  787. return AVERROR_EXTERNAL;
  788. } else if (attr.value == VA_ATTRIB_NOT_SUPPORTED) {
  789. av_log(avctx, AV_LOG_WARNING, "Driver does not advertise "
  790. "encoder block size, using guessed defaults.\n");
  791. } else {
  792. priv->va_bs = block_size.value = attr.value;
  793. priv->ctu_size =
  794. 1 << block_size.bits.log2_max_coding_tree_block_size_minus3 + 3;
  795. priv->min_cb_size =
  796. 1 << block_size.bits.log2_min_luma_coding_block_size_minus3 + 3;
  797. }
  798. }
  799. #endif
  800. if (!priv->ctu_size) {
  801. priv->ctu_size = 32;
  802. priv->min_cb_size = 16;
  803. }
  804. av_log(avctx, AV_LOG_VERBOSE, "Using CTU size %dx%d, "
  805. "min CB size %dx%d.\n", priv->ctu_size, priv->ctu_size,
  806. priv->min_cb_size, priv->min_cb_size);
  807. base_ctx->surface_width = FFALIGN(avctx->width, priv->min_cb_size);
  808. base_ctx->surface_height = FFALIGN(avctx->height, priv->min_cb_size);
  809. base_ctx->slice_block_width = base_ctx->slice_block_height = priv->ctu_size;
  810. return 0;
  811. }
  812. static av_cold int vaapi_encode_h265_configure(AVCodecContext *avctx)
  813. {
  814. VAAPIEncodeContext *ctx = avctx->priv_data;
  815. VAAPIEncodeH265Context *priv = avctx->priv_data;
  816. int err;
  817. err = ff_cbs_init(&priv->cbc, AV_CODEC_ID_HEVC, avctx);
  818. if (err < 0)
  819. return err;
  820. if (ctx->va_rc_mode == VA_RC_CQP) {
  821. // Note that VAAPI only supports positive QP values - the range is
  822. // therefore always bounded below by 1, even in 10-bit mode where
  823. // it should go down to -12.
  824. priv->fixed_qp_p = av_clip(ctx->rc_quality, 1, 51);
  825. if (avctx->i_quant_factor > 0.0)
  826. priv->unit_opts.fixed_qp_idr =
  827. av_clip((avctx->i_quant_factor * priv->fixed_qp_p +
  828. avctx->i_quant_offset) + 0.5, 1, 51);
  829. else
  830. priv->unit_opts.fixed_qp_idr = priv->fixed_qp_p;
  831. if (avctx->b_quant_factor > 0.0)
  832. priv->fixed_qp_b =
  833. av_clip((avctx->b_quant_factor * priv->fixed_qp_p +
  834. avctx->b_quant_offset) + 0.5, 1, 51);
  835. else
  836. priv->fixed_qp_b = priv->fixed_qp_p;
  837. av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = "
  838. "%d / %d / %d for IDR- / P- / B-frames.\n",
  839. priv->unit_opts.fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b);
  840. } else {
  841. // These still need to be set for init_qp/slice_qp_delta.
  842. priv->unit_opts.fixed_qp_idr = 30;
  843. priv->fixed_qp_p = 30;
  844. priv->fixed_qp_b = 30;
  845. }
  846. ctx->roi_quant_range = 51 + 6 * (ctx->profile->depth - 8);
  847. return 0;
  848. }
  849. static const VAAPIEncodeProfile vaapi_encode_h265_profiles[] = {
  850. { AV_PROFILE_HEVC_MAIN, 8, 3, 1, 1, VAProfileHEVCMain },
  851. { AV_PROFILE_HEVC_REXT, 8, 3, 1, 1, VAProfileHEVCMain },
  852. #if VA_CHECK_VERSION(0, 37, 0)
  853. { AV_PROFILE_HEVC_MAIN_10, 10, 3, 1, 1, VAProfileHEVCMain10 },
  854. { AV_PROFILE_HEVC_REXT, 10, 3, 1, 1, VAProfileHEVCMain10 },
  855. #endif
  856. #if VA_CHECK_VERSION(1, 2, 0)
  857. { AV_PROFILE_HEVC_REXT, 12, 3, 1, 1, VAProfileHEVCMain12 },
  858. { AV_PROFILE_HEVC_REXT, 8, 3, 1, 0, VAProfileHEVCMain422_10 },
  859. { AV_PROFILE_HEVC_REXT, 10, 3, 1, 0, VAProfileHEVCMain422_10 },
  860. { AV_PROFILE_HEVC_REXT, 12, 3, 1, 0, VAProfileHEVCMain422_12 },
  861. { AV_PROFILE_HEVC_REXT, 8, 3, 0, 0, VAProfileHEVCMain444 },
  862. { AV_PROFILE_HEVC_REXT, 10, 3, 0, 0, VAProfileHEVCMain444_10 },
  863. { AV_PROFILE_HEVC_REXT, 12, 3, 0, 0, VAProfileHEVCMain444_12 },
  864. #endif
  865. { AV_PROFILE_UNKNOWN }
  866. };
  867. static const VAAPIEncodeType vaapi_encode_type_h265 = {
  868. .profiles = vaapi_encode_h265_profiles,
  869. .flags = FF_HW_FLAG_SLICE_CONTROL |
  870. FF_HW_FLAG_B_PICTURES |
  871. FF_HW_FLAG_B_PICTURE_REFERENCES |
  872. FF_HW_FLAG_NON_IDR_KEY_PICTURES,
  873. .default_quality = 25,
  874. .get_encoder_caps = &vaapi_encode_h265_get_encoder_caps,
  875. .configure = &vaapi_encode_h265_configure,
  876. .picture_priv_data_size = sizeof(VAAPIEncodeH265Picture),
  877. .sequence_params_size = sizeof(VAEncSequenceParameterBufferHEVC),
  878. .init_sequence_params = &vaapi_encode_h265_init_sequence_params,
  879. .picture_params_size = sizeof(VAEncPictureParameterBufferHEVC),
  880. .init_picture_params = &vaapi_encode_h265_init_picture_params,
  881. .slice_params_size = sizeof(VAEncSliceParameterBufferHEVC),
  882. .init_slice_params = &vaapi_encode_h265_init_slice_params,
  883. .sequence_header_type = VAEncPackedHeaderSequence,
  884. .write_sequence_header = &vaapi_encode_h265_write_sequence_header,
  885. .slice_header_type = VAEncPackedHeaderHEVC_Slice,
  886. .write_slice_header = &vaapi_encode_h265_write_slice_header,
  887. .write_extra_header = &vaapi_encode_h265_write_extra_header,
  888. };
  889. static av_cold int vaapi_encode_h265_init(AVCodecContext *avctx)
  890. {
  891. VAAPIEncodeContext *ctx = avctx->priv_data;
  892. VAAPIEncodeH265Context *priv = avctx->priv_data;
  893. ctx->codec = &vaapi_encode_type_h265;
  894. if (avctx->profile == AV_PROFILE_UNKNOWN)
  895. avctx->profile = priv->profile;
  896. if (avctx->level == AV_LEVEL_UNKNOWN)
  897. avctx->level = priv->level;
  898. if (avctx->level != AV_LEVEL_UNKNOWN && avctx->level & ~0xff) {
  899. av_log(avctx, AV_LOG_ERROR, "Invalid level %d: must fit "
  900. "in 8-bit unsigned integer.\n", avctx->level);
  901. return AVERROR(EINVAL);
  902. }
  903. ctx->desired_packed_headers =
  904. VA_ENC_PACKED_HEADER_SEQUENCE | // VPS, SPS and PPS.
  905. VA_ENC_PACKED_HEADER_SLICE | // Slice headers.
  906. VA_ENC_PACKED_HEADER_MISC; // SEI
  907. if (priv->qp > 0)
  908. ctx->explicit_qp = priv->qp;
  909. return ff_vaapi_encode_init(avctx);
  910. }
  911. static av_cold int vaapi_encode_h265_close(AVCodecContext *avctx)
  912. {
  913. VAAPIEncodeH265Context *priv = avctx->priv_data;
  914. ff_cbs_fragment_free(&priv->current_access_unit);
  915. ff_cbs_close(&priv->cbc);
  916. av_freep(&priv->sei_a53cc_data);
  917. return ff_vaapi_encode_close(avctx);
  918. }
  919. #define OFFSET(x) offsetof(VAAPIEncodeH265Context, x)
  920. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
  921. static const AVOption vaapi_encode_h265_options[] = {
  922. HW_BASE_ENCODE_COMMON_OPTIONS,
  923. VAAPI_ENCODE_COMMON_OPTIONS,
  924. VAAPI_ENCODE_RC_OPTIONS,
  925. { "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
  926. OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 52, FLAGS },
  927. { "aud", "Include AUD",
  928. OFFSET(aud), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
  929. { "profile", "Set profile (general_profile_idc)",
  930. OFFSET(profile), AV_OPT_TYPE_INT,
  931. { .i64 = AV_PROFILE_UNKNOWN }, AV_PROFILE_UNKNOWN, 0xff, FLAGS, .unit = "profile" },
  932. #define PROFILE(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
  933. { .i64 = value }, 0, 0, FLAGS, .unit = "profile"
  934. { PROFILE("main", AV_PROFILE_HEVC_MAIN) },
  935. { PROFILE("main10", AV_PROFILE_HEVC_MAIN_10) },
  936. { PROFILE("rext", AV_PROFILE_HEVC_REXT) },
  937. #undef PROFILE
  938. { "tier", "Set tier (general_tier_flag)",
  939. OFFSET(unit_opts.tier), AV_OPT_TYPE_INT,
  940. { .i64 = 0 }, 0, 1, FLAGS, .unit = "tier" },
  941. { "main", NULL, 0, AV_OPT_TYPE_CONST,
  942. { .i64 = 0 }, 0, 0, FLAGS, .unit = "tier" },
  943. { "high", NULL, 0, AV_OPT_TYPE_CONST,
  944. { .i64 = 1 }, 0, 0, FLAGS, .unit = "tier" },
  945. { "level", "Set level (general_level_idc)",
  946. OFFSET(level), AV_OPT_TYPE_INT,
  947. { .i64 = AV_LEVEL_UNKNOWN }, AV_LEVEL_UNKNOWN, 0xff, FLAGS, .unit = "level" },
  948. #define LEVEL(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
  949. { .i64 = value }, 0, 0, FLAGS, .unit = "level"
  950. { LEVEL("1", 30) },
  951. { LEVEL("2", 60) },
  952. { LEVEL("2.1", 63) },
  953. { LEVEL("3", 90) },
  954. { LEVEL("3.1", 93) },
  955. { LEVEL("4", 120) },
  956. { LEVEL("4.1", 123) },
  957. { LEVEL("5", 150) },
  958. { LEVEL("5.1", 153) },
  959. { LEVEL("5.2", 156) },
  960. { LEVEL("6", 180) },
  961. { LEVEL("6.1", 183) },
  962. { LEVEL("6.2", 186) },
  963. #undef LEVEL
  964. { "sei", "Set SEI to include",
  965. OFFSET(sei), AV_OPT_TYPE_FLAGS,
  966. { .i64 = SEI_MASTERING_DISPLAY | SEI_CONTENT_LIGHT_LEVEL | SEI_A53_CC },
  967. 0, INT_MAX, FLAGS, .unit = "sei" },
  968. { "hdr",
  969. "Include HDR metadata for mastering display colour volume "
  970. "and content light level information",
  971. 0, AV_OPT_TYPE_CONST,
  972. { .i64 = SEI_MASTERING_DISPLAY | SEI_CONTENT_LIGHT_LEVEL },
  973. INT_MIN, INT_MAX, FLAGS, .unit = "sei" },
  974. { "a53_cc",
  975. "Include A/53 caption data",
  976. 0, AV_OPT_TYPE_CONST,
  977. { .i64 = SEI_A53_CC },
  978. INT_MIN, INT_MAX, FLAGS, .unit = "sei" },
  979. { "tiles", "Tile columns x rows",
  980. OFFSET(common.tile_cols), AV_OPT_TYPE_IMAGE_SIZE,
  981. { .str = NULL }, 0, 0, FLAGS },
  982. { NULL },
  983. };
  984. static const FFCodecDefault vaapi_encode_h265_defaults[] = {
  985. { "b", "0" },
  986. { "bf", "2" },
  987. { "g", "120" },
  988. { "i_qfactor", "1" },
  989. { "i_qoffset", "0" },
  990. { "b_qfactor", "6/5" },
  991. { "b_qoffset", "0" },
  992. { "qmin", "-1" },
  993. { "qmax", "-1" },
  994. { NULL },
  995. };
  996. static const AVClass vaapi_encode_h265_class = {
  997. .class_name = "h265_vaapi",
  998. .item_name = av_default_item_name,
  999. .option = vaapi_encode_h265_options,
  1000. .version = LIBAVUTIL_VERSION_INT,
  1001. };
  1002. const FFCodec ff_hevc_vaapi_encoder = {
  1003. .p.name = "hevc_vaapi",
  1004. CODEC_LONG_NAME("H.265/HEVC (VAAPI)"),
  1005. .p.type = AVMEDIA_TYPE_VIDEO,
  1006. .p.id = AV_CODEC_ID_HEVC,
  1007. .priv_data_size = sizeof(VAAPIEncodeH265Context),
  1008. .init = &vaapi_encode_h265_init,
  1009. FF_CODEC_RECEIVE_PACKET_CB(&ff_vaapi_encode_receive_packet),
  1010. .close = &vaapi_encode_h265_close,
  1011. .p.priv_class = &vaapi_encode_h265_class,
  1012. .p.capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HARDWARE |
  1013. AV_CODEC_CAP_DR1 | AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE,
  1014. .caps_internal = FF_CODEC_CAP_NOT_INIT_THREADSAFE |
  1015. FF_CODEC_CAP_INIT_CLEANUP,
  1016. .defaults = vaapi_encode_h265_defaults,
  1017. .p.pix_fmts = (const enum AVPixelFormat[]) {
  1018. AV_PIX_FMT_VAAPI,
  1019. AV_PIX_FMT_NONE,
  1020. },
  1021. .color_ranges = AVCOL_RANGE_MPEG | AVCOL_RANGE_JPEG,
  1022. .hw_configs = ff_vaapi_encode_hw_configs,
  1023. .p.wrapper_name = "vaapi",
  1024. };