Move YUV -> RGB conversion to VideoProviderYUV4MPEG and remove support for formats other than RGB32 from AegiVideoFrame

Originally committed to SVN as r5079.
This commit is contained in:
Thomas Goyne 2010-12-31 21:03:11 +00:00
parent 833e69b09f
commit 824294078f
10 changed files with 171 additions and 332 deletions

View file

@ -112,21 +112,15 @@ void CSRISubtitlesProvider::DrawSubtitles(AegiVideoFrame &dst,double time) {
// Load data into frame
csri_frame frame;
for (int i=0;i<4;i++) {
if (dst.flipped) {
frame.planes[i] = dst.data[i] + (dst.h-1) * dst.pitch[i];
frame.strides[i] = -(signed)dst.pitch[i];
}
else {
frame.planes[i] = dst.data[i];
frame.strides[i] = dst.pitch[i];
}
if (dst.flipped) {
frame.planes[0] = dst.data + (dst.h-1) * dst.pitch;
frame.strides[0] = -(signed)dst.pitch;
}
switch (dst.format) {
case FORMAT_RGB32: frame.pixfmt = CSRI_F_BGR_; break;
case FORMAT_RGB24: frame.pixfmt = CSRI_F_BGR; break;
default: frame.pixfmt = CSRI_F_BGR_;
else {
frame.planes[0] = dst.data;
frame.strides[0] = dst.pitch;
}
frame.pixfmt = CSRI_F_BGR_;
// Set format
csri_fmt format;

View file

@ -208,11 +208,11 @@ void LibassSubtitlesProvider::DrawSubtitles(AegiVideoFrame &frame,double time) {
// Prepare copy
int src_stride = img->stride;
int dst_stride = frame.pitch[0];
int dst_stride = frame.pitch;
int dst_delta = dst_stride - img->w*4;
//int stride = std::min(src_stride,dst_stride);
const unsigned char *src = img->bitmap;
unsigned char *dst = frame.data[0] + (img->dst_y * dst_stride + img->dst_x * 4);
unsigned char *dst = frame.data + (img->dst_y * dst_stride + img->dst_x * 4);
unsigned int k,ck,t;
// Copy image to destination frame

View file

@ -89,21 +89,6 @@ template<typename T> inline T mid(T a, T b, T c) { return std::max(a, std::min(b
#endif
#endif
/// @brief Code taken from http://bob.allegronetwork.com/prog/tricks.html#clamp Clamp integer to range
/// @param x
/// @param min
/// @param max
///
static FORCEINLINE int ClampSignedInteger32(int x,int min,int max) {
x -= min;
x &= (~x) >> 31;
x += min;
x -= max;
x &= x >> 31;
x += max;
return x;
}
struct delete_ptr {
template<class T>
void operator()(T* ptr) const {

View file

@ -39,29 +39,20 @@
#include "utils.h"
#include "video_frame.h"
/// @brief Reset values to the defaults
///
/// Note that this function DOES NOT unallocate memory.
/// Use Clear() for that
void AegiVideoFrame::Reset() {
// Zero variables
for (int i=0;i<4;i++) {
data[i] = NULL;
pitch[i] = 0;
}
data = 0;
pitch = 0;
memSize = 0;
w = 0;
h = 0;
// Set properties
format = FORMAT_NONE;
flipped = false;
invertChannels = true;
ownMem = true;
}
/// @brief Constructor
AegiVideoFrame::AegiVideoFrame() {
Reset();
}
@ -69,217 +60,90 @@ AegiVideoFrame::AegiVideoFrame() {
/// @brief Create a solid black frame of the request size and format
/// @param width
/// @param height
/// @param fmt
AegiVideoFrame::AegiVideoFrame(int width,int height,VideoFrameFormat fmt) {
AegiVideoFrame::AegiVideoFrame(unsigned int width, unsigned int height) {
assert(width > 0 && width < 10000);
assert(height > 0 && height < 10000);
Reset();
// Set format
format = fmt;
w = width;
h = height;
pitch[0] = w * GetBpp();
if (fmt == FORMAT_YV12) {
pitch[1] = w/2;
pitch[2] = w/2;
}
pitch = w * GetBpp();
Allocate();
// Clear data
int size = pitch[0]*height + (pitch[1]+pitch[2])*height/2;
memset(data[0],0,size);
memset(data, 0, pitch * height);
}
/// @brief Allocate memory if needed
void AegiVideoFrame::Allocate() {
// Check for sanity
wxASSERT(pitch[0] > 0 && pitch[0] < 10000);
wxASSERT(w > 0 && w < 10000);
wxASSERT(h > 0 && h < 10000);
wxASSERT(format != FORMAT_NONE);
assert(pitch > 0 && pitch < 10000);
assert(w > 0 && w < 10000);
assert(h > 0 && h < 10000);
// Get size
int height = h;
unsigned int size;
if (format == FORMAT_YV12) {
wxASSERT(pitch[1] > 0 && pitch[1] < 10000);
wxASSERT(pitch[2] > 0 && pitch[2] < 10000);
size = pitch[0]*height + (pitch[1]+pitch[2])*height/2;
}
else size = pitch[0] * height;
unsigned int size = pitch * h;
// Reallocate, if necessary
if (memSize != size || !ownMem) {
if (data[0] && ownMem) {
delete[] data[0];
if (ownMem) {
delete[] data;
}
data[0] = new unsigned char[size];
for (int i=1;i<4;i++) data[i] = NULL;
data = new unsigned char[size];
memSize = size;
// Planar
if (format == FORMAT_YV12) {
data[1] = data[0] + (pitch[0]*height);
data[2] = data[0] + (pitch[0]*height+pitch[1]*height/2);
}
}
ownMem = true;
}
/// @brief Clear
void AegiVideoFrame::Clear() {
if (ownMem) delete[] data[0];
if (ownMem) delete[] data;
Reset();
}
/// @brief Copy from an AegiVideoFrame
/// @param source The frame to copy from
void AegiVideoFrame::CopyFrom(const AegiVideoFrame &source) {
w = source.w;
h = source.h;
format = source.format;
for (int i=0;i<4;i++) pitch[i] = source.pitch[i];
pitch = source.pitch;
Allocate();
memcpy(data[0],source.data[0],memSize);
memcpy(data, source.data, memSize);
flipped = source.flipped;
invertChannels = source.invertChannels;
}
/// @brief Set the frame to an externally allocated block of memory
/// @param source Target frame data
/// @param width The frame width in pixels
/// @param height The frame height in pixels
/// @param pitch The frame's pitch
/// @param format The fram'e format
void AegiVideoFrame::SetTo(const unsigned char *const source[], int width, int height, const int pitch[4], VideoFrameFormat format) {
wxASSERT(pitch[0] > 0 && pitch[0] < 10000);
wxASSERT(width > 0 && width < 10000);
wxASSERT(height > 0 && height < 10000);
wxASSERT(format != FORMAT_NONE);
void AegiVideoFrame::SetTo(const unsigned char *source, unsigned int width, unsigned int height, unsigned int pitch) {
assert(pitch > 0 && pitch < 10000);
assert(width > 0 && width < 10000);
assert(height > 0 && height < 10000);
ownMem = false;
w = width;
h = height;
this->format = format;
for (int i = 0; i < 4; i++) {
// Note that despite this cast, the contents of data should still never be modified
data[i] = const_cast<unsigned char*>(source[i]);
this->pitch[i] = pitch[i];
}
// Note that despite this cast, the contents of data should still never be modified
data = const_cast<unsigned char*>(source);
this->pitch = pitch;
}
/// @brief Get wxImage
/// @return
wxImage AegiVideoFrame::GetImage() const {
if (format == FORMAT_RGB32 || format == FORMAT_RGB24) {
// Create
unsigned char *buf = (unsigned char*)malloc(w*h*3);
if (!buf) throw std::bad_alloc();
const unsigned char *src = data[0];
unsigned char *dst = buf;
unsigned char *buf = (unsigned char*)malloc(w*h*3);
if (!buf) throw std::bad_alloc();
const unsigned char *src = data;
unsigned char *dst = buf;
int Bpp = GetBpp();
int Bpp = GetBpp();
// Convert
for (unsigned int y=0;y<h;y++) {
dst = buf + y*w*3;
if (flipped) src = data[0] + (h-y-1)*pitch[0];
else src = data[0] + y*pitch[0];
for (unsigned int x=0;x<w;x++) {
*dst++ = *(src+2);
*dst++ = *(src+1);
*dst++ = *(src);
src += Bpp;
}
}
// Make image
wxImage img(w,h);
img.SetData(buf);
return img;
}
else if (format == FORMAT_YV12) {
AegiVideoFrame temp;
temp.ConvertFrom(*this);
return temp.GetImage();
}
else {
return wxImage(w,h);
}
}
/// @brief Get bytes per pixel for the current frame format
/// @param plane
/// @return
int AegiVideoFrame::GetBpp(int plane) const {
switch (format) {
case FORMAT_RGB32: return 4;
case FORMAT_RGB24: return 3;
case FORMAT_YUY2: return 2;
case FORMAT_YV12:
if (plane == 0) return 1;
else return 0;
default: return 0;
}
}
/// @brief Convert from another frame
/// @param source The frame to convert from
/// @param newFormat The format to convert to
void AegiVideoFrame::ConvertFrom(const AegiVideoFrame &source, VideoFrameFormat newFormat) {
if (newFormat != FORMAT_RGB32) throw _T("AegiVideoFrame::ConvertFrom: Unsupported destination format.");
if (source.format != FORMAT_YV12) throw _T("AegiVideoFrame::ConvertFrom: Unsupported source format.");
w = source.w;
h = source.h;
format = newFormat;
pitch[0] = w * 4;
Allocate();
// Set up pointers
const unsigned char *src_y = source.data[0];
const unsigned char *src_u = source.data[1];
const unsigned char *src_v = source.data[2];
unsigned char *dst = data[0];
// Set up pitches
const int src_delta1 = source.pitch[0]-w;
const int src_delta2 = source.pitch[1]-w/2;
const int src_delta3 = source.pitch[2]-w/2;
const int dst_delta = pitch[0]-w*4;
register int y,u,v;
// Loop
for (unsigned int py=0;py<h;py++) {
for (unsigned int px=0;px<w/2;px++) {
u = *src_u++ - 128;
v = *src_v++ - 128;
for (unsigned int i=0;i<2;i++) {
y = (*src_y++ - 16) * 298;
// Assign
*dst++ = ClampSignedInteger32((y + 516 * u + 128) >> 8,0,255); // Blue
*dst++ = ClampSignedInteger32((y - 100 * u - 208 * v + 128) >> 8,0,255); // Green
*dst++ = ClampSignedInteger32((y + 409 * v + 128) >> 8,0,255); // Red
*dst++ = 0;
}
}
// Increase pointers
src_y += src_delta1;
src_u += src_delta2;
src_v += src_delta3;
dst += dst_delta;
// Roll back u/v on even lines
if (!(py & 1)) {
src_u -= source.pitch[1];
src_v -= source.pitch[2];
// Convert
for (unsigned int y=0;y<h;y++) {
dst = buf + y*w*3;
if (flipped) src = data + (h-y-1)*pitch;
else src = data + y*pitch;
for (unsigned int x=0;x<w;x++) {
*dst++ = *(src+2);
*dst++ = *(src+1);
*dst++ = *(src);
src += Bpp;
}
}
wxImage img(w,h);
img.SetData(buf);
return img;
}

View file

@ -34,63 +34,35 @@
/// @ingroup video
///
#pragma once
#ifndef AGI_PRE
#include <wx/image.h>
#endif
/// DOCME
enum VideoFrameFormat {
/// DOCME
FORMAT_NONE = 0x0000,
/// RGB, interleaved
FORMAT_RGB24 = 0x0001,
/// RGBA, interleaved
FORMAT_RGB32 = 0x0002,
/// YCbCr 4:2:2, planar
FORMAT_YUY2 = 0x0004,
/// YCbCr 4:2:0, planar
FORMAT_YV12 = 0x0008,
/// YCbCr 4:4:4, planar
FORMAT_YUV444 = 0x0010,
/// YCbCr 4:4:4 plus alpha, planar
FORMAT_YUV444A = 0x0020,
/// Y only (greyscale)
FORMAT_YUVMONO = 0x0040,
};
/// DOCME
/// @class AegiVideoFrame
/// @brief DOCME
///
/// DOCME
class AegiVideoFrame {
private:
/// Whether the object owns its buffer. If this is false, **data should never be modified
bool ownMem;
/// @brief Reset values to the defaults
///
/// Note that this function DOES NOT deallocate memory.
/// Use Clear() for that
void Reset();
public:
/// @brief Allocate memory if needed
void Allocate();
unsigned int memSize; /// The size in bytes of the frame buffer
/// The size in bytes of the frame buffer
unsigned int memSize;
/// Pointers to the data planes. Interleaved formats only use data[0]
unsigned char *data[4];
/// Data format
VideoFrameFormat format;
/// Pointer to the data planes
unsigned char *data;
/// Width in pixels
unsigned int w;
@ -99,25 +71,33 @@ public:
unsigned int h;
// Pitch, that is, the number of bytes used by each row.
unsigned int pitch[4];
unsigned int pitch;
/// First row is actually the bottom one
bool flipped;
/// Swap Red and Blue channels or U and V planes (controls RGB versus BGR ordering etc)
/// Swap Red and Blue channels (controls RGB versus BGR ordering etc)
bool invertChannels;
AegiVideoFrame();
AegiVideoFrame(int width,int height,VideoFrameFormat format=FORMAT_RGB32);
AegiVideoFrame(unsigned int width, unsigned int height);
// @brief Clear this frame, freeing its memory if nessesary
void Clear();
/// @brief Copy from an AegiVideoFrame
/// @param source The frame to copy from
void CopyFrom(const AegiVideoFrame &source);
void ConvertFrom(const AegiVideoFrame &source, VideoFrameFormat newFormat=FORMAT_RGB32);
void SetTo(const unsigned char *const source[], int width, int height, const int pitch[4], VideoFrameFormat format);
/// @brief Set the frame to an externally allocated block of memory
/// @param source Target frame data
/// @param width The frame width in pixels
/// @param height The frame height in pixels
/// @param pitch The frame's pitch
/// @param format The frame's format
void SetTo(const unsigned char *source, unsigned int width, unsigned int height, unsigned int pitch);
/// @brief Get this frame as a wxImage
wxImage GetImage() const;
int GetBpp(int plane=0) const;
int GetBpp() const { return 4; };
};

View file

@ -279,7 +279,7 @@ void VideoOutGL::UploadFrameData(const AegiVideoFrame& frame) {
if (frame.h == 0 || frame.w == 0) return;
GLuint format = frame.invertChannels ? GL_BGRA_EXT : GL_RGBA;
InitTextures(frame.w, frame.h, format, frame.GetBpp(0), frame.flipped);
InitTextures(frame.w, frame.h, format, frame.GetBpp(), frame.flipped);
// Set the row length, needed to be able to upload partial rows
CHECK_ERROR(glPixelStorei(GL_UNPACK_ROW_LENGTH, frame.w));
@ -289,7 +289,7 @@ void VideoOutGL::UploadFrameData(const AegiVideoFrame& frame) {
CHECK_ERROR(glBindTexture(GL_TEXTURE_2D, ti.textureID));
CHECK_ERROR(glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ti.sourceW,
ti.sourceH, format, GL_UNSIGNED_BYTE, frame.data[0] + ti.dataOffset));
ti.sourceH, format, GL_UNSIGNED_BYTE, frame.data + ti.dataOffset));
}
CHECK_ERROR(glPixelStorei(GL_UNPACK_ROW_LENGTH, 0));

View file

@ -262,12 +262,11 @@ const AegiVideoFrame AvisynthVideoProvider::GetFrame(int n) {
// Aegisub's video frame
AegiVideoFrame &final = iframe;
final.format = FORMAT_RGB32;
final.flipped = true;
final.invertChannels = true;
// Set size properties
final.pitch[0] = frame->GetPitch();
final.pitch = frame->GetPitch();
final.w = frame->GetRowSize() / Bpp;
final.h = frame->GetHeight();
@ -275,7 +274,7 @@ const AegiVideoFrame AvisynthVideoProvider::GetFrame(int n) {
final.Allocate();
// Copy
memcpy(final.data[0],frame->GetReadPtr(),final.pitch[0] * final.h);
memcpy(final.data,frame->GetReadPtr(),final.pitch * final.h);
// Set last number
last_fnum = n;

View file

@ -58,8 +58,8 @@ void DummyVideoProvider::Create(double _fps, int frames, int _width, int _height
width = _width;
height = _height;
frame = AegiVideoFrame(width,height,FORMAT_RGB32);
unsigned char *dst = frame.data[0];
frame = AegiVideoFrame(width,height);
unsigned char *dst = frame.data;
unsigned char r = colour.Red(), g = colour.Green(), b = colour.Blue();
unsigned char h, s, l, lr, lg, lb; // light variants
@ -69,7 +69,7 @@ void DummyVideoProvider::Create(double _fps, int frames, int _width, int _height
hsl_to_rgb(h, s, l, &lr, &lg, &lb);
if (pattern) {
int ppitch = frame.pitch[0] / frame.GetBpp();
int ppitch = frame.pitch / frame.GetBpp();
for (unsigned int y = 0; y < frame.h; ++y) {
if ((y / 8) & 1) {
for (int x = 0; x < ppitch; ++x) {
@ -106,7 +106,7 @@ void DummyVideoProvider::Create(double _fps, int frames, int _width, int _height
}
}
else {
for (int i=frame.pitch[0]*frame.h/frame.GetBpp();--i>=0;) {
for (int i=frame.pitch*frame.h/frame.GetBpp();--i>=0;) {
*dst++ = b;
*dst++ = g;
*dst++ = r;

View file

@ -53,6 +53,7 @@
#include "aegisub_endian.h"
#include "compat.h"
#include "main.h"
#include "utils.h"
#include "video_context.h"
#include "video_provider_ffmpegsource.h"
@ -273,23 +274,16 @@ void FFmpegSourceVideoProvider::Close() {
/// @param _n
/// @return
///
const AegiVideoFrame FFmpegSourceVideoProvider::GetFrame(int _n) {
// don't try to seek to insane places
int n = _n;
if (n < 0)
n = 0;
if (n >= GetFrameCount())
n = GetFrameCount()-1;
// set position
FrameNumber = n;
const AegiVideoFrame FFmpegSourceVideoProvider::GetFrame(int n) {
FrameNumber = mid(0, n, GetFrameCount() - 1);
// decode frame
const FFMS_Frame *SrcFrame = FFMS_GetFrame(VideoSource, n, &ErrInfo);
const FFMS_Frame *SrcFrame = FFMS_GetFrame(VideoSource, FrameNumber, &ErrInfo);
if (SrcFrame == NULL) {
throw VideoDecodeError(std::string("Failed to retrieve frame:") + ErrInfo.Buffer);
}
CurFrame.SetTo(SrcFrame->Data, Width, Height, SrcFrame->Linesize, FORMAT_RGB32);
CurFrame.SetTo(SrcFrame->Data[0], Width, Height, SrcFrame->Linesize[0]);
return CurFrame;
}
#endif /* WITH_FFMPEGSOURCE */

View file

@ -39,6 +39,7 @@
#include <libaegisub/log.h>
#include "compat.h"
#include "utils.h"
#include "video_provider_yuv4mpeg.h"
// All of this cstdio bogus is because of one reason and one reason only:
@ -69,7 +70,7 @@ YUV4MPEGVideoProvider::YUV4MPEGVideoProvider(wxString fname)
wxString filename = wxFileName(fname).GetShortPath();
#ifdef WIN32
sf = _wfopen(filename.wc_str(), _T("rb"));
sf = _wfopen(filename.wc_str(), L"rb");
#else
sf = fopen(filename.utf8_str(), "rb");
#endif
@ -195,7 +196,7 @@ std::vector<wxString> YUV4MPEGVideoProvider::ReadHeader(int64_t startpos, bool r
void YUV4MPEGVideoProvider::ParseFileHeader(const std::vector<wxString>& tags) {
if (tags.size() <= 1)
throw VideoOpenError("ParseFileHeader: contentless header");
if (tags.front().Cmp(_T("YUV4MPEG2")))
if (tags.front().Cmp("YUV4MPEG2"))
throw VideoOpenError("ParseFileHeader: malformed header (bad magic)");
// temporary stuff
@ -207,52 +208,54 @@ void YUV4MPEGVideoProvider::ParseFileHeader(const std::vector<wxString>& tags) {
Y4M_PixelFormat t_pixfmt = Y4M_PIXFMT_NONE;
for (unsigned i = 1; i < tags.size(); i++) {
wxString tag = _T("");
wxString tag;
long tmp_long1 = 0;
long tmp_long2 = 0;
if (tags.at(i).StartsWith(_T("W"), &tag)) {
if (tags[i].StartsWith("W", &tag)) {
if (!tag.ToLong(&tmp_long1))
throw VideoOpenError("ParseFileHeader: invalid width");
t_w = (int)tmp_long1;
}
else if (tags.at(i).StartsWith(_T("H"), &tag)) {
else if (tags[i].StartsWith("H", &tag)) {
if (!tag.ToLong(&tmp_long1))
throw VideoOpenError("ParseFileHeader: invalid height");
t_h = (int)tmp_long1;
}
else if (tags.at(i).StartsWith(_T("F"), &tag)) {
else if (tags[i].StartsWith("F", &tag)) {
if (!(tag.BeforeFirst(':')).ToLong(&tmp_long1) && tag.AfterFirst(':').ToLong(&tmp_long2))
throw VideoOpenError("ParseFileHeader: invalid framerate");
t_fps_num = (int)tmp_long1;
t_fps_den = (int)tmp_long2;
}
else if (tags.at(i).StartsWith(_T("C"), &tag)) {
else if (tags[i].StartsWith("C", &tag)) {
// technically this should probably be case sensitive,
// but being liberal in what you accept doesn't hurt
if (!tag.CmpNoCase(_T("420"))) t_pixfmt = Y4M_PIXFMT_420JPEG; // is this really correct?
else if (!tag.CmpNoCase(_T("420jpeg"))) t_pixfmt = Y4M_PIXFMT_420JPEG;
else if (!tag.CmpNoCase(_T("420mpeg2"))) t_pixfmt = Y4M_PIXFMT_420MPEG2;
else if (!tag.CmpNoCase(_T("420paldv"))) t_pixfmt = Y4M_PIXFMT_420PALDV;
else if (!tag.CmpNoCase(_T("411"))) t_pixfmt = Y4M_PIXFMT_411;
else if (!tag.CmpNoCase(_T("422"))) t_pixfmt = Y4M_PIXFMT_422;
else if (!tag.CmpNoCase(_T("444"))) t_pixfmt = Y4M_PIXFMT_444;
else if (!tag.CmpNoCase(_T("444alpha"))) t_pixfmt = Y4M_PIXFMT_444ALPHA;
else if (!tag.CmpNoCase(_T("mono"))) t_pixfmt = Y4M_PIXFMT_MONO;
tag.MakeLower();
if (tag == "420") t_pixfmt = Y4M_PIXFMT_420JPEG; // is this really correct?
else if (tag == "420jpeg") t_pixfmt = Y4M_PIXFMT_420JPEG;
else if (tag == "420mpeg2") t_pixfmt = Y4M_PIXFMT_420MPEG2;
else if (tag == "420paldv") t_pixfmt = Y4M_PIXFMT_420PALDV;
else if (tag == "411") t_pixfmt = Y4M_PIXFMT_411;
else if (tag == "422") t_pixfmt = Y4M_PIXFMT_422;
else if (tag == "444") t_pixfmt = Y4M_PIXFMT_444;
else if (tag == "444alpha") t_pixfmt = Y4M_PIXFMT_444ALPHA;
else if (tag == "mono") t_pixfmt = Y4M_PIXFMT_MONO;
else
throw VideoOpenError("ParseFileHeader: invalid or unknown colorspace");
}
else if (tags.at(i).StartsWith(_T("I"), &tag)) {
if (!tag.CmpNoCase(_T("p"))) t_imode = Y4M_ILACE_PROGRESSIVE;
else if (!tag.CmpNoCase(_T("t"))) t_imode = Y4M_ILACE_TFF;
else if (!tag.CmpNoCase(_T("b"))) t_imode = Y4M_ILACE_BFF;
else if (!tag.CmpNoCase(_T("m"))) t_imode = Y4M_ILACE_MIXED;
else if (!tag.CmpNoCase(_T("?"))) t_imode = Y4M_ILACE_UNKNOWN;
else if (tags[i].StartsWith("I", &tag)) {
tag.MakeLower();
if (tag == "p") t_imode = Y4M_ILACE_PROGRESSIVE;
else if (tag == "t") t_imode = Y4M_ILACE_TFF;
else if (tag == "b") t_imode = Y4M_ILACE_BFF;
else if (tag == "m") t_imode = Y4M_ILACE_MIXED;
else if (tag == "?") t_imode = Y4M_ILACE_UNKNOWN;
else
throw VideoOpenError("ParseFileHeader: invalid or unknown interlacing mode");
}
else
LOG_D("provider/video/yuv4mpeg") << "Unparsed tag: " << tags.at(i).c_str();
LOG_D("provider/video/yuv4mpeg") << "Unparsed tag: " << tags[i].c_str();
}
// The point of all this is to allow multiple YUV4MPEG2 headers in a single file
@ -317,11 +320,11 @@ int YUV4MPEGVideoProvider::IndexFile() {
break; // no more headers
Y4M_FrameFlags flags = Y4M_FFLAG_NOTSET;
if (!tags.front().Cmp(_T("YUV4MPEG2"))) {
if (!tags.front().Cmp("YUV4MPEG2")) {
ParseFileHeader(tags);
continue;
}
else if (!tags.front().Cmp(_T("FRAME")))
else if (!tags.front().Cmp("FRAME"))
flags = ParseFrameHeader(tags);
if (flags == Y4M_FFLAG_NONE) {
@ -329,7 +332,7 @@ int YUV4MPEGVideoProvider::IndexFile() {
seek_table.push_back(curpos);
// seek to next frame header start position
if (fseeko(sf, frame_sz, SEEK_CUR))
throw VideoOpenError(STD_STR(wxString::Format(_T("IndexFile: failed seeking to position %d"), curpos + frame_sz)));
throw VideoOpenError(STD_STR(wxString::Format("IndexFile: failed seeking to position %d", curpos + frame_sz)));
}
else {
/// @todo implement rff flags etc
@ -339,60 +342,80 @@ int YUV4MPEGVideoProvider::IndexFile() {
return framecount;
}
// http://bob.allegronetwork.com/prog/tricks.html#clamp
static FORCEINLINE int clamp(int x) {
x &= (~x) >> 31;
x -= 255;
x &= x >> 31;
x += 255;
return x;
}
/// @brief Gets a given frame
/// @param n The frame number to return
/// @return The video frame
const AegiVideoFrame YUV4MPEGVideoProvider::GetFrame(int n) {
// don't try to seek to insane places
if (n < 0)
n = 0;
if (n >= num_frames)
n = num_frames-1;
// set position
cur_fn = n;
cur_fn = mid(0, n, num_frames - 1);
VideoFrameFormat src_fmt, dst_fmt;
dst_fmt = FORMAT_RGB32;
int uv_width;
int uv_width = w / 2;
switch (pixfmt) {
case Y4M_PIXFMT_420JPEG:
case Y4M_PIXFMT_420MPEG2:
case Y4M_PIXFMT_420PALDV:
src_fmt = FORMAT_YV12; uv_width = w / 2; break;
case Y4M_PIXFMT_422:
src_fmt = FORMAT_YUY2; uv_width = w / 2; break;
break;
/// @todo add support for more pixel formats
default:
throw _T("YUV4MPEG video provider: GetFrame: Unsupported source colorspace");
throw "YUV4MPEG video provider: GetFrame: Unsupported source colorspace";
}
AegiVideoFrame tmp_frame;
tmp_frame.format = src_fmt;
tmp_frame.w = w;
tmp_frame.h = h;
tmp_frame.invertChannels = false;
tmp_frame.pitch[0] = w;
for (int i=1;i<=2;i++)
tmp_frame.pitch[i] = uv_width;
tmp_frame.Allocate();
std::vector<uint8_t> planes[3];
planes[0].resize(luma_sz);
planes[1].resize(chroma_sz);
planes[2].resize(chroma_sz);
fseeko(sf, seek_table[n], SEEK_SET);
size_t ret;
ret = fread(tmp_frame.data[0], luma_sz, 1, sf);
ret = fread(&planes[0][0], luma_sz, 1, sf);
if (ret != 1 || feof(sf) || ferror(sf))
throw _T("YUV4MPEG video provider: GetFrame: failed to read luma plane");
throw "YUV4MPEG video provider: GetFrame: failed to read luma plane";
for (int i = 1; i <= 2; i++) {
ret = fread(tmp_frame.data[i], chroma_sz, 1, sf);
ret = fread(&planes[i][0], chroma_sz, 1, sf);
if (ret != 1 || feof(sf) || ferror(sf))
throw _T("YUV4MPEG video provider: GetFrame: failed to read chroma planes");
throw "YUV4MPEG video provider: GetFrame: failed to read chroma planes";
}
AegiVideoFrame dst_frame;
dst_frame.invertChannels = true;
dst_frame.ConvertFrom(tmp_frame, dst_fmt);
dst_frame.w = w;
dst_frame.h = h;
dst_frame.pitch = w * 4;
dst_frame.Allocate();
tmp_frame.Clear();
const unsigned char *src_y = &planes[0][0];
const unsigned char *src_u = &planes[1][0];
const unsigned char *src_v = &planes[2][0];
unsigned char *dst = dst_frame.data;
for (int py = 0; py < h; ++py) {
for (int px = 0; px < w / 2; ++px) {
const int u = *src_u++ - 128;
const int v = *src_v++ - 128;
for (unsigned int i = 0; i < 2; ++i) {
const int y = (*src_y++ - 16) * 298;
*dst++ = clamp((y + 516 * u + 128) >> 8); // Blue
*dst++ = clamp((y - 100 * u - 208 * v + 128) >> 8); // Green
*dst++ = clamp((y + 409 * v + 128) >> 8); // Red
*dst++ = 0; // Alpha
}
}
// Roll back u/v on even lines
if (!(py & 1)) {
src_u -= uv_width;
src_v -= uv_width;
}
}
return dst_frame;
}