Browse Source

Merge commit '716d413c13981da15323c7a3821860536eefdbbb'

* commit '716d413c13981da15323c7a3821860536eefdbbb':
  Replace PIX_FMT_* -> AV_PIX_FMT_*, PixelFormat -> AVPixelFormat

Conflicts:
	doc/examples/muxing.c
	ffmpeg.h
	ffmpeg_filter.c
	ffmpeg_opt.c
	ffplay.c
	ffprobe.c
	libavcodec/8bps.c
	libavcodec/aasc.c
	libavcodec/aura.c
	libavcodec/avcodec.h
	libavcodec/avs.c
	libavcodec/bfi.c
	libavcodec/bmp.c
	libavcodec/bmpenc.c
	libavcodec/c93.c
	libavcodec/cscd.c
	libavcodec/cyuv.c
	libavcodec/dpx.c
	libavcodec/dpxenc.c
	libavcodec/eatgv.c
	libavcodec/escape124.c
	libavcodec/ffv1.c
	libavcodec/flashsv.c
	libavcodec/fraps.c
	libavcodec/h264.c
	libavcodec/huffyuv.c
	libavcodec/iff.c
	libavcodec/imgconvert.c
	libavcodec/indeo3.c
	libavcodec/kmvc.c
	libavcodec/libopenjpegdec.c
	libavcodec/libopenjpegenc.c
	libavcodec/libx264.c
	libavcodec/ljpegenc.c
	libavcodec/mjpegdec.c
	libavcodec/mjpegenc.c
	libavcodec/motionpixels.c
	libavcodec/mpeg12.c
	libavcodec/mpeg12enc.c
	libavcodec/mpeg4videodec.c
	libavcodec/mpegvideo_enc.c
	libavcodec/pamenc.c
	libavcodec/pcxenc.c
	libavcodec/pgssubdec.c
	libavcodec/pngdec.c
	libavcodec/pngenc.c
	libavcodec/pnm.c
	libavcodec/pnmdec.c
	libavcodec/pnmenc.c
	libavcodec/ptx.c
	libavcodec/qdrw.c
	libavcodec/qpeg.c
	libavcodec/qtrleenc.c
	libavcodec/raw.c
	libavcodec/rawdec.c
	libavcodec/rl2.c
	libavcodec/sgidec.c
	libavcodec/sgienc.c
	libavcodec/snowdec.c
	libavcodec/snowenc.c
	libavcodec/sunrast.c
	libavcodec/targa.c
	libavcodec/targaenc.c
	libavcodec/tiff.c
	libavcodec/tiffenc.c
	libavcodec/tmv.c
	libavcodec/truemotion2.c
	libavcodec/utils.c
	libavcodec/vb.c
	libavcodec/vp3.c
	libavcodec/wnv1.c
	libavcodec/xl.c
	libavcodec/xwddec.c
	libavcodec/xwdenc.c
	libavcodec/yop.c
	libavdevice/v4l2.c
	libavdevice/x11grab.c
	libavfilter/avfilter.c
	libavfilter/avfilter.h
	libavfilter/buffersrc.c
	libavfilter/drawutils.c
	libavfilter/formats.c
	libavfilter/src_movie.c
	libavfilter/vf_ass.c
	libavfilter/vf_drawtext.c
	libavfilter/vf_fade.c
	libavfilter/vf_format.c
	libavfilter/vf_hflip.c
	libavfilter/vf_lut.c
	libavfilter/vf_overlay.c
	libavfilter/vf_pad.c
	libavfilter/vf_scale.c
	libavfilter/vf_transpose.c
	libavfilter/vf_yadif.c
	libavfilter/video.c
	libavfilter/vsrc_testsrc.c
	libavformat/movenc.c
	libavformat/mxf.h
	libavformat/utils.c
	libavformat/yuv4mpeg.c
	libavutil/imgutils.c
	libavutil/pixdesc.c
	libswscale/input.c
	libswscale/output.c
	libswscale/swscale_internal.h
	libswscale/swscale_unscaled.c
	libswscale/utils.c
	libswscale/x86/swscale_template.c
	libswscale/x86/yuv2rgb.c
	libswscale/x86/yuv2rgb_template.c
	libswscale/yuv2rgb.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
Michael Niedermayer 12 years ago
parent
commit
ac627b3d38

+ 4 - 4
cmdutils.c

@@ -851,8 +851,8 @@ static void print_codec(const AVCodec *c)
         }
         printf("\n");
     }
-    PRINT_CODEC_SUPPORTED(c, pix_fmts, enum PixelFormat, "pixel formats",
-                          PIX_FMT_NONE, GET_PIX_FMT_NAME);
+    PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats",
+                          AV_PIX_FMT_NONE, GET_PIX_FMT_NAME);
     PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0,
                           GET_SAMPLE_RATE_NAME);
     PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats",
@@ -1094,7 +1094,7 @@ int show_filters(void *optctx, const char *opt, const char *arg)
 
 int show_pix_fmts(void *optctx, const char *opt, const char *arg)
 {
-    enum PixelFormat pix_fmt;
+    enum AVPixelFormat pix_fmt;
 
     printf("Pixel formats:\n"
            "I.... = Supported Input  format for conversion\n"
@@ -1110,7 +1110,7 @@ int show_pix_fmts(void *optctx, const char *opt, const char *arg)
 #   define sws_isSupportedOutput(x) 0
 #endif
 
-    for (pix_fmt = 0; pix_fmt < PIX_FMT_NB; pix_fmt++) {
+    for (pix_fmt = 0; pix_fmt < AV_PIX_FMT_NB; pix_fmt++) {
         const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[pix_fmt];
         if(!pix_desc->name)
             continue;

+ 1 - 1
cmdutils.h

@@ -426,7 +426,7 @@ typedef struct FrameBuffer {
     int  linesize[4];
 
     int h, w;
-    enum PixelFormat pix_fmt;
+    enum AVPixelFormat pix_fmt;
 
     int refcount;
     struct FrameBuffer **pool;  ///< head of the buffer pool

+ 1 - 1
doc/examples/decoding_encoding.c

@@ -356,7 +356,7 @@ static void video_encode_example(const char *filename, int codec_id)
     c->time_base= (AVRational){1,25};
     c->gop_size = 10; /* emit one intra frame every ten frames */
     c->max_b_frames=1;
-    c->pix_fmt = PIX_FMT_YUV420P;
+    c->pix_fmt = AV_PIX_FMT_YUV420P;
 
     if(codec_id == AV_CODEC_ID_H264)
         av_opt_set(c->priv_data, "preset", "slow", 0);

+ 1 - 1
doc/examples/filtering_video.c

@@ -87,7 +87,7 @@ static int init_filters(const char *filters_descr)
     AVFilter *buffersink = avfilter_get_by_name("ffbuffersink");
     AVFilterInOut *outputs = avfilter_inout_alloc();
     AVFilterInOut *inputs  = avfilter_inout_alloc();
-    enum PixelFormat pix_fmts[] = { PIX_FMT_GRAY8, PIX_FMT_NONE };
+    enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE };
     AVBufferSinkParams *buffersink_params;
 
     filter_graph = avfilter_graph_alloc();

+ 5 - 5
doc/examples/muxing.c

@@ -41,7 +41,7 @@
 #define STREAM_DURATION   200.0
 #define STREAM_FRAME_RATE 25 /* 25 images/s */
 #define STREAM_NB_FRAMES  ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
-#define STREAM_PIX_FMT    PIX_FMT_YUV420P /* default pix_fmt */
+#define STREAM_PIX_FMT    AV_PIX_FMT_YUV420P /* default pix_fmt */
 
 static int sws_flags = SWS_BICUBIC;
 
@@ -277,8 +277,8 @@ static void open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st)
     /* If the output format is not YUV420P, then a temporary YUV420P
      * picture is needed too. It is then converted to the required
      * output format. */
-    if (c->pix_fmt != PIX_FMT_YUV420P) {
-        ret = avpicture_alloc(&src_picture, PIX_FMT_YUV420P, c->width, c->height);
+    if (c->pix_fmt != AV_PIX_FMT_YUV420P) {
+        ret = avpicture_alloc(&src_picture, AV_PIX_FMT_YUV420P, c->width, c->height);
         if (ret < 0) {
             fprintf(stderr, "Could not allocate temporary picture\n");
             exit(1);
@@ -322,11 +322,11 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
          * frames if using B-frames, so we get the last frames by
          * passing the same picture again. */
     } else {
-        if (c->pix_fmt != PIX_FMT_YUV420P) {
+        if (c->pix_fmt != AV_PIX_FMT_YUV420P) {
             /* as we only generate a YUV420P picture, we must convert it
              * to the codec pixel format if needed */
             if (!sws_ctx) {
-                sws_ctx = sws_getContext(c->width, c->height, PIX_FMT_YUV420P,
+                sws_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_YUV420P,
                                          c->width, c->height, c->pix_fmt,
                                          sws_flags, NULL, NULL, NULL);
                 if (!sws_ctx) {

+ 1 - 1
doc/examples/scaling_video.c

@@ -55,7 +55,7 @@ int main(int argc, char **argv)
     uint8_t *src_data[4], *dst_data[4];
     int src_linesize[4], dst_linesize[4];
     int src_w = 320, src_h = 240, dst_w, dst_h;
-    enum PixelFormat src_pix_fmt = PIX_FMT_YUV420P, dst_pix_fmt = PIX_FMT_RGB24;
+    enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUV420P, dst_pix_fmt = AV_PIX_FMT_RGB24;
     const char *dst_size = NULL;
     const char *dst_filename = NULL;
     FILE *dst_file;

+ 1 - 1
doc/filters.texi

@@ -3738,7 +3738,7 @@ will instruct the source to accept video frames with size 320x240 and
 with format "yuv410p", assuming 1/24 as the timestamps timebase and
 square pixels (1:1 sample aspect ratio).
 Since the pixel format with name "yuv410p" corresponds to the number 6
-(check the enum PixelFormat definition in @file{libavutil/pixfmt.h}),
+(check the enum AVPixelFormat definition in @file{libavutil/pixfmt.h}),
 this example corresponds to:
 @example
 buffer=size=320x240:pixfmt=6:time_base=1/24:pixel_aspect=1/1

+ 1 - 1
ffmpeg.h

@@ -398,7 +398,7 @@ void assert_avoptions(AVDictionary *m);
 
 int guess_input_channel_layout(InputStream *ist);
 
-enum PixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum PixelFormat target);
+enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum AVPixelFormat target);
 void choose_sample_fmt(AVStream *st, AVCodec *codec);
 
 int configure_filtergraph(FilterGraph *fg);

+ 21 - 21
ffmpeg_filter.c

@@ -33,27 +33,27 @@
 #include "libavutil/imgutils.h"
 #include "libavutil/samplefmt.h"
 
-enum PixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum PixelFormat target)
+enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum AVPixelFormat target)
 {
     if (codec && codec->pix_fmts) {
-        const enum PixelFormat *p = codec->pix_fmts;
+        const enum AVPixelFormat *p = codec->pix_fmts;
         int has_alpha= av_pix_fmt_descriptors[target].nb_components % 2 == 0;
-        enum PixelFormat best= PIX_FMT_NONE;
+        enum AVPixelFormat best= AV_PIX_FMT_NONE;
         if (st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
             if (st->codec->codec_id == AV_CODEC_ID_MJPEG) {
-                p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
+                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
             } else if (st->codec->codec_id == AV_CODEC_ID_LJPEG) {
-                p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
-                                                 PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
+                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
+                                                 AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };
             }
         }
-        for (; *p != PIX_FMT_NONE; p++) {
+        for (; *p != AV_PIX_FMT_NONE; p++) {
             best= avcodec_find_best_pix_fmt_of_2(best, *p, target, has_alpha, NULL);
             if (*p == target)
                 break;
         }
-        if (*p == PIX_FMT_NONE) {
-            if (target != PIX_FMT_NONE)
+        if (*p == AV_PIX_FMT_NONE) {
+            if (target != AV_PIX_FMT_NONE)
                 av_log(NULL, AV_LOG_WARNING,
                        "Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
                        av_pix_fmt_descriptors[target].name,
@@ -93,14 +93,14 @@ static char *choose_pix_fmts(OutputStream *ost)
         if (ost->filter)
             avfilter_graph_set_auto_convert(ost->filter->graph->graph,
                                             AVFILTER_AUTO_CONVERT_NONE);
-        if (ost->st->codec->pix_fmt == PIX_FMT_NONE)
+        if (ost->st->codec->pix_fmt == AV_PIX_FMT_NONE)
             return NULL;
         return av_strdup(av_get_pix_fmt_name(ost->st->codec->pix_fmt));
     }
-    if (ost->st->codec->pix_fmt != PIX_FMT_NONE) {
+    if (ost->st->codec->pix_fmt != AV_PIX_FMT_NONE) {
         return av_strdup(av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc, ost->st->codec->pix_fmt)));
     } else if (ost->enc && ost->enc->pix_fmts) {
-        const enum PixelFormat *p;
+        const enum AVPixelFormat *p;
         AVIOContext *s = NULL;
         uint8_t *ret;
         int len;
@@ -111,14 +111,14 @@ static char *choose_pix_fmts(OutputStream *ost)
         p = ost->enc->pix_fmts;
         if (ost->st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
             if (ost->st->codec->codec_id == AV_CODEC_ID_MJPEG) {
-                p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
+                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
             } else if (ost->st->codec->codec_id == AV_CODEC_ID_LJPEG) {
-                p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
-                                                    PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
+                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
+                                                    AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };
             }
         }
 
-        for (; *p != PIX_FMT_NONE; p++) {
+        for (; *p != AV_PIX_FMT_NONE; p++) {
             const char *name = av_get_pix_fmt_name(*p);
             avio_printf(s, "%s:", name);
         }
@@ -159,7 +159,7 @@ static char *choose_ ## var ## s(OutputStream *ost)                            \
         return NULL;                                                           \
 }
 
-// DEF_CHOOSE_FORMAT(enum PixelFormat, pix_fmt, pix_fmts, PIX_FMT_NONE,
+// DEF_CHOOSE_FORMAT(enum AVPixelFormat, pix_fmt, pix_fmts, AV_PIX_FMT_NONE,
 //                   GET_PIX_FMT_NAME, ":")
 
 DEF_CHOOSE_FORMAT(enum AVSampleFormat, sample_fmt, sample_fmts,
@@ -526,17 +526,17 @@ static int sub2video_prepare(InputStream *ist)
     ist->sub2video.w = ist->st->codec->width  = w;
     ist->sub2video.h = ist->st->codec->height = h;
 
-    /* rectangles are PIX_FMT_PAL8, but we have no guarantee that the
+    /* rectangles are AV_PIX_FMT_PAL8, but we have no guarantee that the
        palettes for all rectangles are identical or compatible */
-    ist->st->codec->pix_fmt = PIX_FMT_RGB32;
+    ist->st->codec->pix_fmt = AV_PIX_FMT_RGB32;
 
-    ret = av_image_alloc(image, linesize, w, h, PIX_FMT_RGB32, 32);
+    ret = av_image_alloc(image, linesize, w, h, AV_PIX_FMT_RGB32, 32);
     if (ret < 0)
         return ret;
     memset(image[0], 0, h * linesize[0]);
     ist->sub2video.ref = avfilter_get_video_buffer_ref_from_arrays(
             image, linesize, AV_PERM_READ | AV_PERM_PRESERVE,
-            w, h, PIX_FMT_RGB32);
+            w, h, AV_PIX_FMT_RGB32);
     if (!ist->sub2video.ref) {
         av_free(image[0]);
         return AVERROR(ENOMEM);

+ 1 - 1
ffmpeg_opt.c

@@ -1078,7 +1078,7 @@ static OutputStream *new_video_stream(OptionsContext *o, AVFormatContext *oc, in
             if (!*++frame_pix_fmt)
                 frame_pix_fmt = NULL;
         }
-        if (frame_pix_fmt && (video_enc->pix_fmt = av_get_pix_fmt(frame_pix_fmt)) == PIX_FMT_NONE) {
+        if (frame_pix_fmt && (video_enc->pix_fmt = av_get_pix_fmt(frame_pix_fmt)) == AV_PIX_FMT_NONE) {
             av_log(NULL, AV_LOG_FATAL, "Unknown pixel format requested: %s.\n", frame_pix_fmt);
             exit(1);
         }

Some files were not shown because too many files changed in this diff