Fix ffmpegsource video provider to no longer depend on ffmpeg directly, everything is now done by API calls to the ffmpegsource DLL.
Originally committed to SVN as r2340.
This commit is contained in:
parent
d92efd1f54
commit
0026ba38ec
2 changed files with 47 additions and 64 deletions
|
@ -51,10 +51,9 @@ FFmpegSourceVideoProvider::FFmpegSourceVideoProvider(Aegisub::String filename, d
|
||||||
|
|
||||||
// clean up variables
|
// clean up variables
|
||||||
VideoSource = NULL;
|
VideoSource = NULL;
|
||||||
SWSContext = NULL;
|
DstFormat = FFMS_PIX_FMT_NONE;
|
||||||
BufferRGB = NULL;
|
LastDstFormat = FFMS_PIX_FMT_NONE;
|
||||||
KeyFramesLoaded = false;
|
KeyFramesLoaded = false;
|
||||||
FrameAllocated = false;
|
|
||||||
FrameNumber = -1;
|
FrameNumber = -1;
|
||||||
MessageSize = sizeof(FFMSErrorMessage);
|
MessageSize = sizeof(FFMSErrorMessage);
|
||||||
|
|
||||||
|
@ -184,19 +183,12 @@ void FFmpegSourceVideoProvider::LoadVideo(Aegisub::String filename, double fps)
|
||||||
///////////////
|
///////////////
|
||||||
// Close video
|
// Close video
|
||||||
void FFmpegSourceVideoProvider::Close() {
|
void FFmpegSourceVideoProvider::Close() {
|
||||||
if (SWSContext)
|
|
||||||
sws_freeContext(SWSContext);
|
|
||||||
SWSContext = NULL;
|
|
||||||
if (VideoSource)
|
if (VideoSource)
|
||||||
FFMS_DestroyVideoSource(VideoSource);
|
FFMS_DestroyVideoSource(VideoSource);
|
||||||
VideoSource = NULL;
|
VideoSource = NULL;
|
||||||
// this seems to cause a heap corruption in debug mode
|
|
||||||
/* if (FrameAllocated)
|
|
||||||
avpicture_free(&FrameRGB); */
|
|
||||||
FrameAllocated = false;
|
|
||||||
if (BufferRGB)
|
|
||||||
delete BufferRGB;
|
|
||||||
|
|
||||||
|
DstFormat = FFMS_PIX_FMT_NONE;
|
||||||
|
LastDstFormat = FFMS_PIX_FMT_NONE;
|
||||||
KeyFramesLoaded = false;
|
KeyFramesLoaded = false;
|
||||||
KeyFramesList.clear();
|
KeyFramesList.clear();
|
||||||
TimecodesVector.clear();
|
TimecodesVector.clear();
|
||||||
|
@ -228,62 +220,63 @@ const AegiVideoFrame FFmpegSourceVideoProvider::GetFrame(int _n, int FormatType)
|
||||||
n = GetFrameCount()-1;
|
n = GetFrameCount()-1;
|
||||||
// set position
|
// set position
|
||||||
FrameNumber = n;
|
FrameNumber = n;
|
||||||
|
|
||||||
|
// these are for convenience
|
||||||
|
int w = VideoInfo->Width;
|
||||||
|
int h = VideoInfo->Height;
|
||||||
|
|
||||||
|
// this is what we'll return eventually
|
||||||
|
AegiVideoFrame &DstFrame = CurFrame;
|
||||||
|
|
||||||
|
// choose output format
|
||||||
|
if (FormatType & FORMAT_RGB32) {
|
||||||
|
DstFormat = FFMS_PIX_FMT_RGB32; // FIXME: should be RGB32
|
||||||
|
DstFrame.format = FORMAT_RGB32;
|
||||||
|
} else if (FormatType & FORMAT_RGB24) {
|
||||||
|
DstFormat = FFMS_PIX_FMT_RGB24;
|
||||||
|
DstFrame.format = FORMAT_RGB24;
|
||||||
|
} else if (FormatType & FORMAT_YV12) {
|
||||||
|
DstFormat = FFMS_PIX_FMT_YUV420P; // may or may not work
|
||||||
|
DstFrame.format = FORMAT_YV12;
|
||||||
|
} else if (FormatType & FORMAT_YUY2) {
|
||||||
|
DstFormat = FFMS_PIX_FMT_YUYV422;
|
||||||
|
DstFrame.format = FORMAT_YUY2;
|
||||||
|
} else
|
||||||
|
throw _T("FFmpegSource video provider: upstream provider requested unknown or unsupported pixel format");
|
||||||
|
|
||||||
|
// requested format was changed since last time we were called, (re)set output format
|
||||||
|
if (LastDstFormat != DstFormat) {
|
||||||
|
if (FFMS_SetOutputFormat(VideoSource, DstFormat, w, h))
|
||||||
|
throw _T("FFmpegSource video provider: failed to set desired output format");
|
||||||
|
LastDstFormat = DstFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode frame
|
||||||
const AVFrameLite *SrcFrame = FFMS_GetFrame(VideoSource, n, FFMSErrorMessage, MessageSize);
|
const AVFrameLite *SrcFrame = FFMS_GetFrame(VideoSource, n, FFMSErrorMessage, MessageSize);
|
||||||
if (SrcFrame == NULL) {
|
if (SrcFrame == NULL) {
|
||||||
ErrorMsg.Printf(_T("FFmpegSource video provider: %s"), FFMSErrorMessage);
|
ErrorMsg.Printf(_T("FFmpegSource video provider: %s"), FFMSErrorMessage);
|
||||||
throw ErrorMsg;
|
throw ErrorMsg;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVPicture *SrcPicture = reinterpret_cast<AVPicture *>(const_cast<AVFrameLite *>(SrcFrame));
|
|
||||||
|
|
||||||
// prepare stuff for conversion to RGB32
|
|
||||||
int w = VideoInfo->Width;
|
|
||||||
int h = VideoInfo->Height;
|
|
||||||
PixelFormat DstFormat;
|
|
||||||
|
|
||||||
switch (FormatType) {
|
|
||||||
case FORMAT_RGB32: DstFormat = PIX_FMT_RGB32; break;
|
|
||||||
case FORMAT_RGB24: DstFormat = PIX_FMT_RGB24; break;
|
|
||||||
case FORMAT_YV12: DstFormat = PIX_FMT_YUV420P; break; // may or may not work
|
|
||||||
case FORMAT_YUY2: DstFormat = PIX_FMT_YUYV422; break;
|
|
||||||
default: throw _T("FFmpegSource video provider: upstream provider requested unknown or unsupported pixel format");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!SWSContext) {
|
|
||||||
if (avpicture_alloc(&FrameRGB, DstFormat, w, h) != 0)
|
|
||||||
throw _T("FFmpegSource video provider: could not allocate output picture buffer");
|
|
||||||
FrameAllocated = true;
|
|
||||||
unsigned int DstSize = avpicture_get_size(DstFormat,w,h);
|
|
||||||
BufferRGB = new uint8_t[DstSize];
|
|
||||||
|
|
||||||
// initialize swscaler context
|
|
||||||
SWSContext = sws_getContext(w, h, VideoInfo->PixelFormat, w, h, DstFormat, SWS_BICUBIC, NULL, NULL, NULL);
|
|
||||||
if (SWSContext == NULL)
|
|
||||||
throw _T("FFmpegSource video provider: failed to initialize SWScale colorspace conversion");
|
|
||||||
}
|
|
||||||
avpicture_fill(&FrameRGB, BufferRGB, DstFormat, w, h);
|
|
||||||
|
|
||||||
// this is what we'll return eventually
|
|
||||||
AegiVideoFrame &DstFrame = CurFrame;
|
|
||||||
|
|
||||||
// set some properties
|
// set some properties
|
||||||
DstFrame.w = w;
|
DstFrame.w = w;
|
||||||
DstFrame.h = h;
|
DstFrame.h = h;
|
||||||
DstFrame.flipped = false;
|
DstFrame.flipped = false;
|
||||||
if (FormatType == FORMAT_RGB32 || FormatType == FORMAT_RGB24) {
|
if (DstFrame.format == FORMAT_RGB32 || DstFrame.format == FORMAT_RGB24)
|
||||||
DstFrame.invertChannels = true;
|
DstFrame.invertChannels = true;
|
||||||
} else {
|
else
|
||||||
DstFrame.invertChannels = false;
|
DstFrame.invertChannels = false;
|
||||||
}
|
|
||||||
DstFrame.format = (VideoFrameFormat)FormatType;
|
|
||||||
|
|
||||||
// allocate destination frame
|
// allocate destination
|
||||||
for (int i=0;i<4;i++) DstFrame.pitch[i] = FrameRGB.linesize[i];
|
for (int i = 0; i < 4; i++)
|
||||||
|
DstFrame.pitch[i] = SrcFrame->Linesize[i];
|
||||||
DstFrame.Allocate();
|
DstFrame.Allocate();
|
||||||
|
|
||||||
// let swscale do the conversion to RGB and write directly to the output frame
|
// copy data to destination, skipping planes with no data in them
|
||||||
sws_scale(SWSContext, SrcPicture->data, SrcPicture->linesize, 0, h, DstFrame.data, FrameRGB.linesize);
|
for (int j = 0; j < 4; j++) {
|
||||||
|
if (SrcFrame->Linesize[j] > 0)
|
||||||
|
memcpy(DstFrame.data[j], SrcFrame->Data[j], DstFrame.pitch[j] * DstFrame.h);
|
||||||
|
}
|
||||||
|
|
||||||
return DstFrame;
|
return DstFrame;
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,18 +37,10 @@
|
||||||
// Headers
|
// Headers
|
||||||
#include <wx/wxprec.h>
|
#include <wx/wxprec.h>
|
||||||
#ifdef WITH_FFMPEGSOURCE
|
#ifdef WITH_FFMPEGSOURCE
|
||||||
#ifdef WIN32
|
|
||||||
#define EMULATE_INTTYPES
|
|
||||||
#endif /* WIN32 */
|
|
||||||
#include "include/aegisub/video_provider.h"
|
#include "include/aegisub/video_provider.h"
|
||||||
#include "include/aegisub/aegisub.h"
|
#include "include/aegisub/aegisub.h"
|
||||||
#include "dialog_progress.h"
|
#include "dialog_progress.h"
|
||||||
#include "vfr.h"
|
#include "vfr.h"
|
||||||
extern "C" {
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
}
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <ffms.h>
|
#include <ffms.h>
|
||||||
|
|
||||||
|
@ -60,7 +52,6 @@ class FFmpegSourceVideoProvider : public VideoProvider {
|
||||||
private:
|
private:
|
||||||
VideoBase *VideoSource;
|
VideoBase *VideoSource;
|
||||||
const VideoProperties *VideoInfo;
|
const VideoProperties *VideoInfo;
|
||||||
SwsContext *SWSContext;
|
|
||||||
|
|
||||||
int FrameNumber;
|
int FrameNumber;
|
||||||
wxArrayInt KeyFramesList;
|
wxArrayInt KeyFramesList;
|
||||||
|
@ -68,9 +59,8 @@ private:
|
||||||
std::vector<int> TimecodesVector;
|
std::vector<int> TimecodesVector;
|
||||||
FrameRate Timecodes;
|
FrameRate Timecodes;
|
||||||
|
|
||||||
AVPicture FrameRGB;
|
FFMS_PixelFormat DstFormat;
|
||||||
bool FrameAllocated;
|
FFMS_PixelFormat LastDstFormat;
|
||||||
uint8_t *BufferRGB;
|
|
||||||
AegiVideoFrame CurFrame;
|
AegiVideoFrame CurFrame;
|
||||||
|
|
||||||
char FFMSErrorMessage[1024];
|
char FFMSErrorMessage[1024];
|
||||||
|
|
Loading…
Reference in a new issue