|
@@ -49,6 +49,7 @@
|
|
|
# include "libavfilter/avfilter.h"
|
|
|
# include "libavfilter/avfiltergraph.h"
|
|
|
# include "libavfilter/buffersink.h"
|
|
|
+# include "libavfilter/buffersrc.h"
|
|
|
#endif
|
|
|
|
|
|
#include <SDL.h>
|
|
@@ -227,7 +228,10 @@ typedef struct VideoState {
|
|
|
int step;
|
|
|
|
|
|
#if CONFIG_AVFILTER
|
|
|
+ AVFilterContext *in_video_filter; ///< the first filter in the video chain
|
|
|
AVFilterContext *out_video_filter; ///< the last filter in the video chain
|
|
|
+ int use_dr1;
|
|
|
+ FrameBuffer *buffer_pool;
|
|
|
#endif
|
|
|
|
|
|
int refresh;
|
|
@@ -1545,222 +1549,29 @@ static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacke
|
|
|
}
|
|
|
|
|
|
#if CONFIG_AVFILTER
|
|
|
-typedef struct {
|
|
|
- VideoState *is;
|
|
|
- AVFrame *frame;
|
|
|
- int use_dr1;
|
|
|
-} FilterPriv;
|
|
|
-
|
|
|
-static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
|
|
|
-{
|
|
|
- AVFilterContext *ctx = codec->opaque;
|
|
|
- AVFilterBufferRef *ref;
|
|
|
- int perms = AV_PERM_WRITE;
|
|
|
- int i, w, h, stride[AV_NUM_DATA_POINTERS];
|
|
|
- unsigned edge;
|
|
|
- int pixel_size;
|
|
|
-
|
|
|
- av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
|
|
|
-
|
|
|
- if (codec->codec->capabilities & CODEC_CAP_NEG_LINESIZES)
|
|
|
- perms |= AV_PERM_NEG_LINESIZES;
|
|
|
-
|
|
|
- if (pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
|
|
|
- if (pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
|
|
|
- if (pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
|
|
|
- if (pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
|
|
|
- }
|
|
|
- if (pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
|
|
|
-
|
|
|
- w = codec->width;
|
|
|
- h = codec->height;
|
|
|
-
|
|
|
- if(av_image_check_size(w, h, 0, codec) || codec->pix_fmt<0)
|
|
|
- return -1;
|
|
|
-
|
|
|
- avcodec_align_dimensions2(codec, &w, &h, stride);
|
|
|
- edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
|
|
|
- w += edge << 1;
|
|
|
- h += edge << 1;
|
|
|
- if (codec->pix_fmt != ctx->outputs[0]->format) {
|
|
|
- av_log(codec, AV_LOG_ERROR, "Pixel format mismatches %d %d\n", codec->pix_fmt, ctx->outputs[0]->format);
|
|
|
- return -1;
|
|
|
- }
|
|
|
- if (!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
|
|
|
- return -1;
|
|
|
-
|
|
|
- pixel_size = av_pix_fmt_descriptors[ref->format].comp[0].step_minus1 + 1;
|
|
|
- ref->video->w = codec->width;
|
|
|
- ref->video->h = codec->height;
|
|
|
- for (i = 0; i < 4; i ++) {
|
|
|
- unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_w : 0;
|
|
|
- unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_h : 0;
|
|
|
-
|
|
|
- pic->base[i] = ref->data[i];
|
|
|
- if (ref->data[i]) {
|
|
|
- ref->data[i] += ((edge * pixel_size) >> hshift) + ((edge * ref->linesize[i]) >> vshift);
|
|
|
- }
|
|
|
- pic->data[i] = ref->data[i];
|
|
|
- pic->linesize[i] = ref->linesize[i];
|
|
|
- }
|
|
|
- pic->opaque = ref;
|
|
|
- pic->type = FF_BUFFER_TYPE_USER;
|
|
|
- pic->reordered_opaque = codec->reordered_opaque;
|
|
|
- pic->width = codec->width;
|
|
|
- pic->height = codec->height;
|
|
|
- pic->format = codec->pix_fmt;
|
|
|
- pic->sample_aspect_ratio = codec->sample_aspect_ratio;
|
|
|
- if (codec->pkt) pic->pkt_pts = codec->pkt->pts;
|
|
|
- else pic->pkt_pts = AV_NOPTS_VALUE;
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
|
|
|
-{
|
|
|
- memset(pic->data, 0, sizeof(pic->data));
|
|
|
- avfilter_unref_buffer(pic->opaque);
|
|
|
-}
|
|
|
-
|
|
|
-static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
|
|
|
-{
|
|
|
- AVFilterBufferRef *ref = pic->opaque;
|
|
|
-
|
|
|
- if (pic->data[0] == NULL) {
|
|
|
- pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
|
|
|
- return codec->get_buffer(codec, pic);
|
|
|
- }
|
|
|
-
|
|
|
- if ((codec->width != ref->video->w) || (codec->height != ref->video->h) ||
|
|
|
- (codec->pix_fmt != ref->format)) {
|
|
|
- av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
|
|
|
- return -1;
|
|
|
- }
|
|
|
-
|
|
|
- pic->reordered_opaque = codec->reordered_opaque;
|
|
|
- if (codec->pkt) pic->pkt_pts = codec->pkt->pts;
|
|
|
- else pic->pkt_pts = AV_NOPTS_VALUE;
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
|
|
|
-{
|
|
|
- FilterPriv *priv = ctx->priv;
|
|
|
- AVCodecContext *codec;
|
|
|
- if (!opaque) return -1;
|
|
|
-
|
|
|
- priv->is = opaque;
|
|
|
- codec = priv->is->video_st->codec;
|
|
|
- codec->opaque = ctx;
|
|
|
- if (codec->codec->capabilities & CODEC_CAP_DR1) {
|
|
|
- av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
|
|
|
- priv->use_dr1 = 1;
|
|
|
- codec->get_buffer = input_get_buffer;
|
|
|
- codec->release_buffer = input_release_buffer;
|
|
|
- codec->reget_buffer = input_reget_buffer;
|
|
|
- codec->thread_safe_callbacks = 1;
|
|
|
- }
|
|
|
-
|
|
|
- priv->frame = avcodec_alloc_frame();
|
|
|
-
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static void input_uninit(AVFilterContext *ctx)
|
|
|
-{
|
|
|
- FilterPriv *priv = ctx->priv;
|
|
|
- av_free(priv->frame);
|
|
|
-}
|
|
|
-
|
|
|
-static int input_request_frame(AVFilterLink *link)
|
|
|
-{
|
|
|
- FilterPriv *priv = link->src->priv;
|
|
|
- AVFilterBufferRef *picref;
|
|
|
- int64_t pts = 0;
|
|
|
- AVPacket pkt;
|
|
|
- int ret;
|
|
|
-
|
|
|
- while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
|
|
|
- av_free_packet(&pkt);
|
|
|
- if (ret < 0)
|
|
|
- return -1;
|
|
|
-
|
|
|
- if (priv->use_dr1 && priv->frame->opaque) {
|
|
|
- picref = avfilter_ref_buffer(priv->frame->opaque, ~0);
|
|
|
- } else {
|
|
|
- picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, priv->frame->width, priv->frame->height);
|
|
|
- av_image_copy(picref->data, picref->linesize,
|
|
|
- (const uint8_t **)(void **)priv->frame->data, priv->frame->linesize,
|
|
|
- picref->format, priv->frame->width, priv->frame->height);
|
|
|
- }
|
|
|
- av_free_packet(&pkt);
|
|
|
-
|
|
|
- avfilter_copy_frame_props(picref, priv->frame);
|
|
|
- picref->video->sample_aspect_ratio = av_guess_sample_aspect_ratio(priv->is->ic, priv->is->video_st, priv->frame);
|
|
|
- picref->pts = pts;
|
|
|
-
|
|
|
- avfilter_start_frame(link, picref);
|
|
|
- avfilter_draw_slice(link, 0, picref->video->h, 1);
|
|
|
- avfilter_end_frame(link);
|
|
|
-
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static int input_query_formats(AVFilterContext *ctx)
|
|
|
-{
|
|
|
- FilterPriv *priv = ctx->priv;
|
|
|
- enum PixelFormat pix_fmts[] = {
|
|
|
- priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
|
|
|
- };
|
|
|
-
|
|
|
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static int input_config_props(AVFilterLink *link)
|
|
|
-{
|
|
|
- FilterPriv *priv = link->src->priv;
|
|
|
- AVStream *s = priv->is->video_st;
|
|
|
-
|
|
|
- link->w = s->codec->width;
|
|
|
- link->h = s->codec->height;
|
|
|
- link->sample_aspect_ratio = s->sample_aspect_ratio.num ?
|
|
|
- s->sample_aspect_ratio : s->codec->sample_aspect_ratio;
|
|
|
- link->time_base = s->time_base;
|
|
|
-
|
|
|
- return 0;
|
|
|
-}
|
|
|
-
|
|
|
-static AVFilter input_filter =
|
|
|
-{
|
|
|
- .name = "ffplay_input",
|
|
|
-
|
|
|
- .priv_size = sizeof(FilterPriv),
|
|
|
-
|
|
|
- .init = input_init,
|
|
|
- .uninit = input_uninit,
|
|
|
-
|
|
|
- .query_formats = input_query_formats,
|
|
|
-
|
|
|
- .inputs = (AVFilterPad[]) {{ .name = NULL }},
|
|
|
- .outputs = (AVFilterPad[]) {{ .name = "default",
|
|
|
- .type = AVMEDIA_TYPE_VIDEO,
|
|
|
- .request_frame = input_request_frame,
|
|
|
- .config_props = input_config_props, },
|
|
|
- { .name = NULL }},
|
|
|
-};
|
|
|
-
|
|
|
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
|
|
|
{
|
|
|
static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
|
|
|
char sws_flags_str[128];
|
|
|
+ char buffersrc_args[256];
|
|
|
int ret;
|
|
|
AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
|
|
|
- AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_format;;
|
|
|
+ AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_format;
|
|
|
+ AVCodecContext *codec = is->video_st->codec;
|
|
|
+
|
|
|
snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
|
|
|
graph->scale_sws_opts = av_strdup(sws_flags_str);
|
|
|
|
|
|
- if ((ret = avfilter_graph_create_filter(&filt_src, &input_filter, "src",
|
|
|
- NULL, is, graph)) < 0)
|
|
|
+ snprintf(buffersrc_args, sizeof(buffersrc_args), "%d:%d:%d:%d:%d:%d:%d",
|
|
|
+ codec->width, codec->height, codec->pix_fmt,
|
|
|
+ is->video_st->time_base.num, is->video_st->time_base.den,
|
|
|
+ codec->sample_aspect_ratio.num, codec->sample_aspect_ratio.den);
|
|
|
+
|
|
|
+
|
|
|
+ if ((ret = avfilter_graph_create_filter(&filt_src,
|
|
|
+ avfilter_get_by_name("buffer"),
|
|
|
+ "src", buffersrc_args, NULL,
|
|
|
+ graph)) < 0)
|
|
|
return ret;
|
|
|
|
|
|
#if FF_API_OLD_VSINK_API
|
|
@@ -1809,8 +1620,16 @@ static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const c
|
|
|
if ((ret = avfilter_graph_config(graph, NULL)) < 0)
|
|
|
return ret;
|
|
|
|
|
|
+ is->in_video_filter = filt_src;
|
|
|
is->out_video_filter = filt_out;
|
|
|
|
|
|
+ if (codec->codec->capabilities & CODEC_CAP_DR1) {
|
|
|
+ is->use_dr1 = 1;
|
|
|
+ codec->get_buffer = codec_get_buffer;
|
|
|
+ codec->release_buffer = codec_release_buffer;
|
|
|
+ codec->opaque = &is->buffer_pool;
|
|
|
+ }
|
|
|
+
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
@@ -1826,7 +1645,7 @@ static int video_thread(void *arg)
|
|
|
|
|
|
#if CONFIG_AVFILTER
|
|
|
AVFilterGraph *graph = avfilter_graph_alloc();
|
|
|
- AVFilterContext *filt_out = NULL;
|
|
|
+ AVFilterContext *filt_out = NULL, *filt_in = NULL;
|
|
|
int last_w = is->video_st->codec->width;
|
|
|
int last_h = is->video_st->codec->height;
|
|
|
|
|
@@ -1837,18 +1656,31 @@ static int video_thread(void *arg)
|
|
|
SDL_PushEvent(&event);
|
|
|
goto the_end;
|
|
|
}
|
|
|
+ filt_in = is->in_video_filter;
|
|
|
filt_out = is->out_video_filter;
|
|
|
#endif
|
|
|
|
|
|
for (;;) {
|
|
|
-#if !CONFIG_AVFILTER
|
|
|
AVPacket pkt;
|
|
|
-#else
|
|
|
+#if CONFIG_AVFILTER
|
|
|
AVFilterBufferRef *picref;
|
|
|
AVRational tb = filt_out->inputs[0]->time_base;
|
|
|
#endif
|
|
|
while (is->paused && !is->videoq.abort_request)
|
|
|
SDL_Delay(10);
|
|
|
+
|
|
|
+ ret = get_video_frame(is, frame, &pts_int, &pkt);
|
|
|
+ if (ret < 0)
|
|
|
+ goto the_end;
|
|
|
+ av_free_packet(&pkt);
|
|
|
+
|
|
|
+ if (!ret)
|
|
|
+ continue;
|
|
|
+
|
|
|
+ is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
|
|
|
+ if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
|
|
|
+ is->frame_last_filter_delay = 0;
|
|
|
+
|
|
|
#if CONFIG_AVFILTER
|
|
|
if ( last_w != is->video_st->codec->width
|
|
|
|| last_h != is->video_st->codec->height) {
|
|
@@ -1862,48 +1694,55 @@ static int video_thread(void *arg)
|
|
|
last_w = is->video_st->codec->width;
|
|
|
last_h = is->video_st->codec->height;
|
|
|
}
|
|
|
- ret = av_buffersink_get_buffer_ref(filt_out, &picref, 0);
|
|
|
- if (picref) {
|
|
|
+
|
|
|
+ frame->pts = pts_int;
|
|
|
+ if (is->use_dr1) {
|
|
|
+ FrameBuffer *buf = frame->opaque;
|
|
|
+ AVFilterBufferRef *fb = avfilter_get_video_buffer_ref_from_arrays(
|
|
|
+ frame->data, frame->linesize,
|
|
|
+ AV_PERM_READ | AV_PERM_PRESERVE,
|
|
|
+ frame->width, frame->height,
|
|
|
+ frame->format);
|
|
|
+
|
|
|
+ avfilter_copy_frame_props(fb, frame);
|
|
|
+ fb->buf->priv = buf;
|
|
|
+ fb->buf->free = filter_release_buffer;
|
|
|
+
|
|
|
+ buf->refcount++;
|
|
|
+ av_buffersrc_buffer(filt_in, fb);
|
|
|
+
|
|
|
+ } else
|
|
|
+ av_buffersrc_write_frame(filt_in, frame);
|
|
|
+
|
|
|
+ while (ret >= 0) {
|
|
|
+ ret = av_buffersink_get_buffer_ref(filt_out, &picref, 0);
|
|
|
+ if (ret < 0) {
|
|
|
+ ret = 0;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+
|
|
|
avfilter_fill_frame_from_video_buffer_ref(frame, picref);
|
|
|
+
|
|
|
pts_int = picref->pts;
|
|
|
tb = filt_out->inputs[0]->time_base;
|
|
|
pos = picref->pos;
|
|
|
frame->opaque = picref;
|
|
|
|
|
|
- ret = 1;
|
|
|
- }
|
|
|
-
|
|
|
- if (ret >= 0 && av_cmp_q(tb, is->video_st->time_base)) {
|
|
|
- av_unused int64_t pts1 = pts_int;
|
|
|
- pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
|
|
|
- av_dlog(NULL, "video_thread(): "
|
|
|
- "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
|
|
|
- tb.num, tb.den, pts1,
|
|
|
- is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
|
|
|
+ if (av_cmp_q(tb, is->video_st->time_base)) {
|
|
|
+ av_unused int64_t pts1 = pts_int;
|
|
|
+ pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
|
|
|
+ av_dlog(NULL, "video_thread(): "
|
|
|
+ "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
|
|
|
+ tb.num, tb.den, pts1,
|
|
|
+ is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
|
|
|
+ }
|
|
|
+ pts = pts_int * av_q2d(is->video_st->time_base);
|
|
|
+ ret = queue_picture(is, frame, pts, pos);
|
|
|
}
|
|
|
#else
|
|
|
- ret = get_video_frame(is, frame, &pts_int, &pkt);
|
|
|
- pos = pkt.pos;
|
|
|
- av_free_packet(&pkt);
|
|
|
- if (ret == 0)
|
|
|
- continue;
|
|
|
-#endif
|
|
|
-
|
|
|
- if (ret < 0)
|
|
|
- goto the_end;
|
|
|
-
|
|
|
- is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
|
|
|
- if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
|
|
|
- is->frame_last_filter_delay = 0;
|
|
|
-
|
|
|
-#if CONFIG_AVFILTER
|
|
|
- if (!picref)
|
|
|
- continue;
|
|
|
-#endif
|
|
|
-
|
|
|
pts = pts_int * av_q2d(is->video_st->time_base);
|
|
|
-
|
|
|
- ret = queue_picture(is, frame, pts, pos);
|
|
|
+ ret = queue_picture(is, frame, pts, pkt.pos);
|
|
|
+#endif
|
|
|
|
|
|
if (ret < 0)
|
|
|
goto the_end;
|
|
@@ -2461,6 +2300,7 @@ static void stream_component_close(VideoState *is, int stream_index)
|
|
|
|
|
|
ic->streams[stream_index]->discard = AVDISCARD_ALL;
|
|
|
avcodec_close(avctx);
|
|
|
+ free_buffer_pool(&is->buffer_pool);
|
|
|
switch (avctx->codec_type) {
|
|
|
case AVMEDIA_TYPE_AUDIO:
|
|
|
is->audio_st = NULL;
|