FFMS2 compile fix for ffmpeg revisions >18129. Probably breaks backwards compatibility so it doesn't compile with older revisions anymore, have fun.

Originally committed to SVN as r2772.
This commit is contained in:
Karl Blomster 2009-04-06 20:15:49 +00:00
parent 036da35fe8
commit 1e47a9d3e5
8 changed files with 22 additions and 69 deletions

View file

@ -49,7 +49,7 @@ AvisynthVideoSource::AvisynthVideoSource(const char *SourceFile, int Track, Fram
} }
try { try {
InitOutputFormat(VP.PixelFormat, Env); InitOutputFormat(VP.VPixelFormat, Env);
} catch (AvisynthError &) { } catch (AvisynthError &) {
FFMS_DestroyVideoSource(VS); FFMS_DestroyVideoSource(VS);
throw; throw;
@ -73,9 +73,9 @@ AvisynthVideoSource::~AvisynthVideoSource() {
FFMS_DestroyVideoSource(VS); FFMS_DestroyVideoSource(VS);
} }
void AvisynthVideoSource::InitOutputFormat(int CurrentFormat, IScriptEnvironment *Env) { void AvisynthVideoSource::InitOutputFormat(PixelFormat CurrentFormat, IScriptEnvironment *Env) {
int Loss; int Loss;
int BestFormat = avcodec_find_best_pix_fmt((1 << PIX_FMT_YUVJ420P) | (1 << PIX_FMT_YUV420P) | (1 << PIX_FMT_YUYV422) | (1 << PIX_FMT_RGB32) | (1 << PIX_FMT_BGR24), CurrentFormat, 1 /* Required to prevent pointless RGB32 => RGB24 conversion */, &Loss); PixelFormat BestFormat = avcodec_find_best_pix_fmt((1 << PIX_FMT_YUVJ420P) | (1 << PIX_FMT_YUV420P) | (1 << PIX_FMT_YUYV422) | (1 << PIX_FMT_RGB32) | (1 << PIX_FMT_BGR24), CurrentFormat, 1 /* Required to prevent pointless RGB32 => RGB24 conversion */, &Loss);
switch (BestFormat) { switch (BestFormat) {
case PIX_FMT_YUVJ420P: // stupid yv12 distinctions, also inexplicably completely undeniably incompatible with all other supported output formats case PIX_FMT_YUVJ420P: // stupid yv12 distinctions, also inexplicably completely undeniably incompatible with all other supported output formats

View file

@ -37,11 +37,11 @@ private:
VideoInfo VI; VideoInfo VI;
VideoBase *VS; VideoBase *VS;
SwsContext *SWS; SwsContext *SWS;
int ConvertToFormat; PixelFormat ConvertToFormat;
int FPSNum; int FPSNum;
int FPSDen; int FPSDen;
void InitOutputFormat(int CurrentFormat, IScriptEnvironment *Env); void InitOutputFormat(PixelFormat CurrentFormat, IScriptEnvironment *Env);
PVideoFrame OutputFrame(const AVFrameLite *SrcPicture, IScriptEnvironment *Env); PVideoFrame OutputFrame(const AVFrameLite *SrcPicture, IScriptEnvironment *Env);
public: public:
AvisynthVideoSource(const char *SourceFile, int Track, FrameIndex *TrackIndices, int FPSNum, int FPSDen, const char *PP, int Threads, int SeekMode, IScriptEnvironment* Env, char *ErrorMsg, unsigned MsgSize); AvisynthVideoSource(const char *SourceFile, int Track, FrameIndex *TrackIndices, int FPSNum, int FPSDen, const char *PP, int Threads, int SeekMode, IScriptEnvironment* Env, char *ErrorMsg, unsigned MsgSize);

View file

@ -22,6 +22,7 @@
#define FFMS_H #define FFMS_H
#include <stdint.h> #include <stdint.h>
#include <libavutil/pixfmt.h>
#ifdef __cplusplus #ifdef __cplusplus
# define EXTERN_C extern "C" # define EXTERN_C extern "C"
@ -53,53 +54,6 @@ enum TrackType {
FFMS_TYPE_AUDIO = 1, FFMS_TYPE_AUDIO = 1,
}; };
// PixelFormat declarations from avutil.h so external libraries don't necessarily have to include ffmpeg headers
enum FFMS_PixelFormat {
FFMS_PIX_FMT_NONE= -1,
FFMS_PIX_FMT_YUV420P, ///< Planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
FFMS_PIX_FMT_YUYV422, ///< Packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
FFMS_PIX_FMT_RGB24, ///< Packed RGB 8:8:8, 24bpp, RGBRGB...
FFMS_PIX_FMT_BGR24, ///< Packed RGB 8:8:8, 24bpp, BGRBGR...
FFMS_PIX_FMT_YUV422P, ///< Planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
FFMS_PIX_FMT_YUV444P, ///< Planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
FFMS_PIX_FMT_RGB32, ///< Packed RGB 8:8:8, 32bpp, (msb)8A 8R 8G 8B(lsb), in cpu endianness
FFMS_PIX_FMT_YUV410P, ///< Planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
FFMS_PIX_FMT_YUV411P, ///< Planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
FFMS_PIX_FMT_RGB565, ///< Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), in cpu endianness
FFMS_PIX_FMT_RGB555, ///< Packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), in cpu endianness most significant bit to 0
FFMS_PIX_FMT_GRAY8, ///< Y , 8bpp
FFMS_PIX_FMT_MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black
FFMS_PIX_FMT_MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white
FFMS_PIX_FMT_PAL8, ///< 8 bit with PIX_FMT_RGB32 palette
FFMS_PIX_FMT_YUVJ420P, ///< Planar YUV 4:2:0, 12bpp, full scale (jpeg)
FFMS_PIX_FMT_YUVJ422P, ///< Planar YUV 4:2:2, 16bpp, full scale (jpeg)
FFMS_PIX_FMT_YUVJ444P, ///< Planar YUV 4:4:4, 24bpp, full scale (jpeg)
FFMS_PIX_FMT_XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing(xvmc_render.h)
FFMS_PIX_FMT_XVMC_MPEG2_IDCT,
FFMS_PIX_FMT_UYVY422, ///< Packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
FFMS_PIX_FMT_UYYVYY411, ///< Packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
FFMS_PIX_FMT_BGR32, ///< Packed RGB 8:8:8, 32bpp, (msb)8A 8B 8G 8R(lsb), in cpu endianness
FFMS_PIX_FMT_BGR565, ///< Packed RGB 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), in cpu endianness
FFMS_PIX_FMT_BGR555, ///< Packed RGB 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), in cpu endianness most significant bit to 1
FFMS_PIX_FMT_BGR8, ///< Packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
FFMS_PIX_FMT_BGR4, ///< Packed RGB 1:2:1, 4bpp, (msb)1B 2G 1R(lsb)
FFMS_PIX_FMT_BGR4_BYTE, ///< Packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
FFMS_PIX_FMT_RGB8, ///< Packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
FFMS_PIX_FMT_RGB4, ///< Packed RGB 1:2:1, 4bpp, (msb)2R 3G 3B(lsb)
FFMS_PIX_FMT_RGB4_BYTE, ///< Packed RGB 1:2:1, 8bpp, (msb)2R 3G 3B(lsb)
FFMS_PIX_FMT_NV12, ///< Planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 for UV
FFMS_PIX_FMT_NV21, ///< as above, but U and V bytes are swapped
FFMS_PIX_FMT_RGB32_1, ///< Packed RGB 8:8:8, 32bpp, (msb)8R 8G 8B 8A(lsb), in cpu endianness
FFMS_PIX_FMT_BGR32_1, ///< Packed RGB 8:8:8, 32bpp, (msb)8B 8G 8R 8A(lsb), in cpu endianness
FFMS_PIX_FMT_GRAY16BE, ///< Y , 16bpp, big-endian
FFMS_PIX_FMT_GRAY16LE, ///< Y , 16bpp, little-endian
FFMS_PIX_FMT_YUV440P, ///< Planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
FFMS_PIX_FMT_YUVJ440P, ///< Planar YUV 4:4:0 full scale (jpeg)
FFMS_PIX_FMT_YUVA420P, ///< Planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
};
// This is a subset of the original AVFrame only containing the most used parts. // This is a subset of the original AVFrame only containing the most used parts.
// Even if it might seem like a good idea to cast it back to a full AVFrame to // Even if it might seem like a good idea to cast it back to a full AVFrame to
// access a few more values you really shouldn't do that. Only the values present // access a few more values you really shouldn't do that. Only the values present
@ -136,7 +90,7 @@ struct VideoProperties {
int FPSDenominator; int FPSDenominator;
int FPSNumerator; int FPSNumerator;
int NumFrames; int NumFrames;
int PixelFormat; PixelFormat VPixelFormat;
int SARNum; int SARNum;
int SARDen; int SARDen;
int CropTop; int CropTop;

View file

@ -228,7 +228,7 @@ Note that --enable-w32threads is required for multithreaded decoding to work.
<ul> <ul>
<li>2.00 beta 6<ul> <li>2.00 beta 6<ul>
<li>Negative timecodes and other bugs caused by an integer overflow fixed</li> <li>Negative timecodes and other bugs caused by an integer overflow fixed</li>
<li>Updated FFmpeg to rev X</li> <li>Updated FFmpeg to rev X (once again compilation fixes for the changes)</li>
</ul></li> </ul></li>
<li>2.00 beta 5<ul> <li>2.00 beta 5<ul>
@ -236,7 +236,6 @@ Note that --enable-w32threads is required for multithreaded decoding to work.
<li>Added missing variables and explanations to the manual</li> <li>Added missing variables and explanations to the manual</li>
<li>Can now directly be compiled as a library for use in *nix</li> <li>Can now directly be compiled as a library for use in *nix</li>
<li>Fixed the missing decimals in saved timecode files</li> <li>Fixed the missing decimals in saved timecode files</li>
<li>Updated FFmpeg to rev 18249</li>
</ul></li> </ul></li>
<li>2.00 beta 4<ul> <li>2.00 beta 4<ul>

View file

@ -43,8 +43,8 @@ FFPP::FFPP(PClip AChild, const char *PP, IScriptEnvironment *Env) : GenericVideo
Flags |= PP_FORMAT_420; Flags |= PP_FORMAT_420;
} else if (vi.IsYUY2()) { } else if (vi.IsYUY2()) {
Flags |= PP_FORMAT_422; Flags |= PP_FORMAT_422;
SWSTo422P = sws_getContext(vi.width, vi.height, PIX_FMT_YUV422, vi.width, vi.height, PIX_FMT_YUV422P, GetCPUFlags() | SWS_BICUBIC, NULL, NULL, NULL); SWSTo422P = sws_getContext(vi.width, vi.height, PIX_FMT_YUYV422, vi.width, vi.height, PIX_FMT_YUV422P, GetCPUFlags() | SWS_BICUBIC, NULL, NULL, NULL);
SWSFrom422P = sws_getContext(vi.width, vi.height, PIX_FMT_YUV422P, vi.width, vi.height, PIX_FMT_YUV422, GetCPUFlags() | SWS_BICUBIC, NULL, NULL, NULL); SWSFrom422P = sws_getContext(vi.width, vi.height, PIX_FMT_YUV422P, vi.width, vi.height, PIX_FMT_YUYV422, GetCPUFlags() | SWS_BICUBIC, NULL, NULL, NULL);
avpicture_alloc(&InputPicture, PIX_FMT_YUV422P, vi.width, vi.height); avpicture_alloc(&InputPicture, PIX_FMT_YUV422P, vi.width, vi.height);
avpicture_alloc(&OutputPicture, PIX_FMT_YUV422P, vi.width, vi.height); avpicture_alloc(&OutputPicture, PIX_FMT_YUV422P, vi.width, vi.height);
} else { } else {

View file

@ -21,7 +21,7 @@
#include "ffswscale.h" #include "ffswscale.h"
#include "utils.h" #include "utils.h"
int CSNameToPIXFMT(const char * ACSName, int ADefault) { PixelFormat CSNameToPIXFMT(const char * ACSName, PixelFormat ADefault) {
if (!_stricmp(ACSName, "")) if (!_stricmp(ACSName, ""))
return ADefault; return ADefault;
if (!_stricmp(ACSName, "YV12")) if (!_stricmp(ACSName, "YV12"))
@ -67,7 +67,7 @@ SWScale::SWScale(PClip Child, int ResizeToWidth, int ResizeToHeight, const char
OrigHeight = vi.height; OrigHeight = vi.height;
FlipOutput = vi.IsYUV(); FlipOutput = vi.IsYUV();
int ConvertFromFormat = PIX_FMT_NONE; PixelFormat ConvertFromFormat = PIX_FMT_NONE;
if (vi.IsYV12()) if (vi.IsYV12())
ConvertFromFormat = PIX_FMT_YUV420P; ConvertFromFormat = PIX_FMT_YUV420P;
if (vi.IsYUY2()) if (vi.IsYUY2())
@ -87,7 +87,7 @@ SWScale::SWScale(PClip Child, int ResizeToWidth, int ResizeToHeight, const char
else else
vi.width = ResizeToWidth; vi.width = ResizeToWidth;
int ConvertToFormat = CSNameToPIXFMT(ConvertToFormatName, ConvertFromFormat); PixelFormat ConvertToFormat = CSNameToPIXFMT(ConvertToFormatName, ConvertFromFormat);
if (ConvertToFormat == PIX_FMT_NONE) if (ConvertToFormat == PIX_FMT_NONE)
Env->ThrowError("SWScale: Invalid colorspace specified (%s)", ConvertToFormatName); Env->ThrowError("SWScale: Invalid colorspace specified (%s)", ConvertToFormatName);

View file

@ -25,7 +25,7 @@
#define _snprintf snprintf #define _snprintf snprintf
#endif #endif
int VideoBase::InitPP(const char *PP, int PixelFormat, char *ErrorMsg, unsigned MsgSize) { int VideoBase::InitPP(const char *PP, PixelFormat PixelFormat, char *ErrorMsg, unsigned MsgSize) {
if (PP == NULL || !strcmp(PP, "")) if (PP == NULL || !strcmp(PP, ""))
return 0; return 0;
@ -126,7 +126,7 @@ int VideoBase::SetOutputFormat(int TargetFormats, int Width, int Height, char *E
// if (OutputFormat == -1) // if (OutputFormat == -1)
// return -1; // return -1;
int OutputFormat = TargetFormats; PixelFormat OutputFormat = static_cast<PixelFormat>(TargetFormats);
SwsContext *NewSWS = NULL; SwsContext *NewSWS = NULL;
if (CodecContext->pix_fmt != OutputFormat || Width != CodecContext->width || Height != CodecContext->height) { if (CodecContext->pix_fmt != OutputFormat || Width != CodecContext->width || Height != CodecContext->height) {
@ -144,7 +144,7 @@ int VideoBase::SetOutputFormat(int TargetFormats, int Width, int Height, char *E
VP.Height = Height; VP.Height = Height;
VP.Width = Width; VP.Width = Width;
VP.PixelFormat = OutputFormat; VP.VPixelFormat = OutputFormat;
// FIXME: In theory the allocations in this part could fail just like in InitPP but whatever // FIXME: In theory the allocations in this part could fail just like in InitPP but whatever
if (FinalFrame != PPFrame) { if (FinalFrame != PPFrame) {
@ -154,7 +154,7 @@ int VideoBase::SetOutputFormat(int TargetFormats, int Width, int Height, char *E
if (SWS) { if (SWS) {
FinalFrame = avcodec_alloc_frame(); FinalFrame = avcodec_alloc_frame();
avpicture_alloc((AVPicture *)FinalFrame, VP.PixelFormat, VP.Width, VP.Height); avpicture_alloc((AVPicture *)FinalFrame, VP.VPixelFormat, VP.Width, VP.Height);
} else { } else {
FinalFrame = PPFrame; FinalFrame = PPFrame;
} }
@ -168,7 +168,7 @@ void VideoBase::ResetOutputFormat() {
SWS = NULL; SWS = NULL;
VP.Height = CodecContext->height; VP.Height = CodecContext->height;
VP.Width = CodecContext->width; VP.Width = CodecContext->width;
VP.PixelFormat = CodecContext->pix_fmt; VP.VPixelFormat = CodecContext->pix_fmt;
} }
void FFVideoSource::Free(bool CloseCodec) { void FFVideoSource::Free(bool CloseCodec) {
@ -239,7 +239,7 @@ FFVideoSource::FFVideoSource(const char *SourceFile, int Track, FrameIndex *Trac
VP.FPSDenominator = FormatContext->streams[VideoTrack]->time_base.num; VP.FPSDenominator = FormatContext->streams[VideoTrack]->time_base.num;
VP.FPSNumerator = FormatContext->streams[VideoTrack]->time_base.den; VP.FPSNumerator = FormatContext->streams[VideoTrack]->time_base.den;
VP.NumFrames = Frames.size(); VP.NumFrames = Frames.size();
VP.PixelFormat = CodecContext->pix_fmt; VP.VPixelFormat = CodecContext->pix_fmt;
VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
@ -458,7 +458,7 @@ MatroskaVideoSource::MatroskaVideoSource(const char *SourceFile, int Track,
VP.FPSDenominator = 1; VP.FPSDenominator = 1;
VP.FPSNumerator = 30; VP.FPSNumerator = 30;
VP.NumFrames = Frames.size(); VP.NumFrames = Frames.size();
VP.PixelFormat = CodecContext->pix_fmt; VP.VPixelFormat = CodecContext->pix_fmt;
VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
@ -681,7 +681,7 @@ HaaliTSVideoSource::HaaliTSVideoSource(const char *SourceFile, int Track,
VP.FPSDenominator = 1; VP.FPSDenominator = 1;
VP.FPSNumerator = 30; VP.FPSNumerator = 30;
VP.NumFrames = Frames.size(); VP.NumFrames = Frames.size();
VP.PixelFormat = CodecContext->pix_fmt; VP.VPixelFormat = CodecContext->pix_fmt;
VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.FirstTime = ((Frames.front().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000; VP.LastTime = ((Frames.back().DTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;

View file

@ -61,7 +61,7 @@ protected:
AVCodecContext *CodecContext; AVCodecContext *CodecContext;
VideoBase(); VideoBase();
int InitPP(const char *PP, int PixelFormat, char *ErrorMsg, unsigned MsgSize); int InitPP(const char *PP, PixelFormat PixelFormat, char *ErrorMsg, unsigned MsgSize);
AVFrameLite *OutputFrame(AVFrame *Frame); AVFrameLite *OutputFrame(AVFrame *Frame);
public: public:
virtual ~VideoBase(); virtual ~VideoBase();