diff --git a/aegisub/build/aegisub_vs2008/aegisub_vs2008.vcproj b/aegisub/build/aegisub_vs2008/aegisub_vs2008.vcproj
index bedefeb58..5c023739e 100644
--- a/aegisub/build/aegisub_vs2008/aegisub_vs2008.vcproj
+++ b/aegisub/build/aegisub_vs2008/aegisub_vs2008.vcproj
@@ -1538,6 +1538,14 @@
+
+
+
+
YUV4MPEGVideoProvider::ReadHeader(int64_t startpos, bool reset_pos) {
+ int64_t oldpos = ftello(sf);
+ std::vector tags;
+ wxString curtag = _T("");
+ int bytesread = 0;
+ int buf;
+
+ if (fseeko(sf, startpos, SEEK_SET))
+ throw wxString::Format(_T("YUV4MPEG video provider: ReadHeader: failed seeking to position %d"), startpos);
+
+ // read header until terminating newline (0x0A) is found
+ while ((buf = fgetc(sf)) != 0x0A) {
+ if (ferror(sf))
+ throw wxString(_T("ReadHeader: Failed to read from file"));
+ if (feof(sf)) {
+ // you know, this is one of the places where it would be really nice
+ // to be able to throw an exception object that tells the caller that EOF was reached
+ wxLogDebug(_T("YUV4MPEG video provider: ReadHeader: Reached EOF, returning"));
+ break;
+ }
+
+ // some basic low-effort sanity checking
+ if (buf == 0x00)
+ throw wxString(_T("ReadHeader: Malformed header (unexpected NUL)"));
+ if (++bytesread >= YUV4MPEG_HEADER_MAXLEN)
+ throw wxString(_T("ReadHeader: Malformed header (no terminating newline found)"));
+
+ // found a new tag
+ if (buf == 0x20) {
+ tags.push_back(curtag);
+ curtag.Clear();
+ }
+ else
+ curtag.Append(static_cast(buf));
+ }
+ // if only one tag with no trailing space was found (possible in the
+ // FRAME header case), make sure we get it
+ if (!curtag.IsEmpty()) {
+ tags.push_back(curtag);
+ curtag.Clear();
+ }
+
+ if (reset_pos)
+ fseeko(sf, oldpos, SEEK_SET);
+
+ return tags;
+}
+
+
+// parse a file header and set file properties
+void YUV4MPEGVideoProvider::ParseFileHeader(const std::vector& tags) {
+ if (tags.size() <= 1)
+ throw wxString(_T("ParseFileHeader: contentless header"));
+ if (tags.front().Cmp(_T("YUV4MPEG2")))
+ throw wxString(_T("ParseFileHeader: malformed header (bad magic)"));
+
+ // temporary stuff
+ int t_w = -1;
+ int t_h = -1;
+ int t_fps_num = -1;
+ int t_fps_den = -1;
+ Y4M_InterlacingMode t_imode = Y4M_ILACE_NOTSET;
+ Y4M_PixelFormat t_pixfmt = Y4M_PIXFMT_NONE;
+
+ for (unsigned i = 1; i < tags.size(); i++) {
+ wxString tag = _T("");
+ long tmp_long1 = 0;
+ long tmp_long2 = 0;
+
+ if (tags.at(i).StartsWith(_T("W"), &tag)) {
+ if (!tag.ToLong(&tmp_long1))
+ throw wxString(_T("ParseFileHeader: invalid width"));
+ t_w = (int)tmp_long1;
+ }
+ else if (tags.at(i).StartsWith(_T("H"), &tag)) {
+ if (!tag.ToLong(&tmp_long1))
+ throw wxString(_T("ParseFileHeader: invalid height"));
+ t_h = (int)tmp_long1;
+ }
+ else if (tags.at(i).StartsWith(_T("F"), &tag)) {
+ if (!(tag.BeforeFirst(':')).ToLong(&tmp_long1) && tag.AfterFirst(':').ToLong(&tmp_long2))
+ throw wxString(_T("ParseFileHeader: invalid framerate"));
+ t_fps_num = (int)tmp_long1;
+ t_fps_den = (int)tmp_long2;
+ }
+ else if (tags.at(i).StartsWith(_T("C"), &tag)) {
+ // technically this should probably be case sensitive,
+ // but being liberal in what you accept doesn't hurt
+ if (!tag.CmpNoCase(_T("420jpeg"))) t_pixfmt = Y4M_PIXFMT_420JPEG;
+ else if (!tag.CmpNoCase(_T("420"))) t_pixfmt = Y4M_PIXFMT_420JPEG; // is this really correct?
+ else if (!tag.CmpNoCase(_T("420mpeg2"))) t_pixfmt = Y4M_PIXFMT_420MPEG2;
+ else if (!tag.CmpNoCase(_T("420paldv"))) t_pixfmt = Y4M_PIXFMT_420PALDV;
+ else if (!tag.CmpNoCase(_T("411"))) t_pixfmt = Y4M_PIXFMT_411;
+ else if (!tag.CmpNoCase(_T("422"))) t_pixfmt = Y4M_PIXFMT_422;
+ else if (!tag.CmpNoCase(_T("444"))) t_pixfmt = Y4M_PIXFMT_444;
+ else if (!tag.CmpNoCase(_T("444alpha"))) t_pixfmt = Y4M_PIXFMT_444ALPHA;
+ else if (!tag.CmpNoCase(_T("mono"))) t_pixfmt = Y4M_PIXFMT_MONO;
+ else
+ throw wxString(_T("ParseFileHeader: invalid or unknown colorspace"));
+ }
+ else if (tags.at(i).StartsWith(_T("I"), &tag)) {
+ if (!tag.CmpNoCase(_T("p"))) t_imode = Y4M_ILACE_PROGRESSIVE;
+ else if (!tag.CmpNoCase(_T("t"))) t_imode = Y4M_ILACE_TFF;
+ else if (!tag.CmpNoCase(_T("b"))) t_imode = Y4M_ILACE_BFF;
+ else if (!tag.CmpNoCase(_T("m"))) t_imode = Y4M_ILACE_MIXED;
+ else if (!tag.CmpNoCase(_T("?"))) t_imode = Y4M_ILACE_UNKNOWN;
+ else
+ throw wxString(_T("ParseFileHeader: invalid or unknown interlacing mode"));
+ }
+ else
+ wxLogDebug(_T("ParseFileHeader: unparsed tag: %s"), tags.at(i).c_str());
+ }
+
+ // The point of all this is to allow multiple YUV4MPEG2 headers in a single file
+ // (can happen if you concat several files) as long as they have identical
+ // header flags. The spec doesn't explicitly say you have to allow this,
+ // but the "reference implementation" (mjpegtools) does, so I'm doing it too.
+ if (inited) {
+ if (t_w > 0 && t_w != w)
+ throw wxString(_T("ParseFileHeader: illegal width change"));
+ if (t_h > 0 && t_h != h)
+ throw wxString(_T("ParseFileHeader: illegal height change"));
+ if ((t_fps_num > 0 && t_fps_den > 0) && (t_fps_num != fps_rat.num || t_fps_den != fps_rat.den))
+ throw wxString(_T("ParseFileHeader: illegal framerate change"));
+ if (t_pixfmt != Y4M_PIXFMT_NONE && t_pixfmt != pixfmt)
+ throw wxString(_T("ParseFileHeader: illegal colorspace change"));
+ if (t_imode != Y4M_ILACE_NOTSET && t_imode != imode)
+ throw wxString(_T("ParseFileHeader: illegal interlacing mode change"));
+ }
+ else {
+ w = t_w;
+ h = t_h;
+ fps_rat.num = t_fps_num;
+ fps_rat.den = t_fps_den;
+ pixfmt = t_pixfmt != Y4M_PIXFMT_NONE ? t_pixfmt : Y4M_PIXFMT_420JPEG;
+ imode = t_imode != Y4M_ILACE_NOTSET ? t_imode : Y4M_ILACE_UNKNOWN;
+ inited = true;
+ }
+}
+
+
+// parse a frame header (currently unused)
+YUV4MPEGVideoProvider::Y4M_FrameFlags YUV4MPEGVideoProvider::ParseFrameHeader(const std::vector& tags) {
+ if (tags.front().Cmp(_("FRAME")))
+ throw wxString(_T("ParseFrameHeader: malformed frame header (bad magic)"));
+
+ // TODO: implement parsing of rff flags etc
+
+ return Y4M_FFLAG_NONE;
+}
+
+
+// index the file, i.e. find all frames and their flags
+int YUV4MPEGVideoProvider::IndexFile() {
+ int framecount = 0;
+ int64_t curpos = ftello(sf);
+
+ // the ParseFileHeader() call in LoadVideo() will already have read
+ // the file header for us and set the seek position correctly
+ while (true) {
+ curpos = ftello(sf); // update position
+ // continue reading headers until no more are found
+ std::vector tags = ReadHeader(curpos, false);
+ curpos = ftello(sf);
+
+ if (tags.empty())
+ break; // no more headers
+
+ Y4M_FrameFlags flags = Y4M_FFLAG_NOTSET;
+ if (!tags.front().Cmp(_T("YUV4MPEG2"))) {
+ ParseFileHeader(tags);
+ continue;
+ }
+ else if (!tags.front().Cmp(_T("FRAME")))
+ flags = ParseFrameHeader(tags);
+
+ if (flags == Y4M_FFLAG_NONE) {
+ framecount++;
+ seek_table.push_back(curpos);
+ // seek to next frame header start position
+ if (fseeko(sf, frame_sz, SEEK_CUR))
+ throw wxString::Format(_T("IndexFile: failed seeking to position %d"), curpos + frame_sz);
+ }
+ else {
+ // TODO: implement this
+ }
+ }
+
+ return framecount;
+}
+
+
+const AegiVideoFrame YUV4MPEGVideoProvider::GetFrame(int n, int desired_fmts) {
+ // don't try to seek to insane places
+ if (n < 0)
+ n = 0;
+ if (n >= num_frames)
+ n = num_frames-1;
+ // set position
+ cur_fn = n;
+
+ VideoFrameFormat src_fmt, dst_fmt;
+ switch (pixfmt) {
+ case Y4M_PIXFMT_420JPEG:
+ case Y4M_PIXFMT_420MPEG2:
+ case Y4M_PIXFMT_420PALDV:
+ src_fmt = FORMAT_YV12; break;
+ case Y4M_PIXFMT_422:
+ src_fmt = FORMAT_YUY2; break;
+ // TODO: add support for more pixel formats
+ default:
+ throw wxString(_T("YUV4MPEG video provider: GetFrame: Unsupported source colorspace"));
+ }
+
+ // TODO: fix this terrible piece of crap and implement colorspace conversions
+ // (write a function to select best output format)
+ if ((desired_fmts & FORMAT_YV12) && src_fmt == FORMAT_YV12)
+ dst_fmt = FORMAT_YV12;
+ else if ((desired_fmts & FORMAT_YUY2) && src_fmt == FORMAT_YUY2)
+ dst_fmt = FORMAT_YUY2;
+ else if ((desired_fmts & FORMAT_RGB32) && src_fmt == FORMAT_YV12)
+ dst_fmt = FORMAT_RGB32;
+ else
+ throw wxString(_T("YUV4MPEG video provider: GetFrame: Upstream video provider requested unknown or unsupported color format"));
+
+ int uv_width, uv_height;
+ // TODO: ugh, fix this
+ switch (src_fmt) {
+ case FORMAT_YV12:
+ uv_width = w / 2; uv_height = h / 2; break;
+ case FORMAT_YUY2:
+ uv_width = w / 2; uv_height = h; break;
+ default:
+ throw wxString(_T("YUV4MPEG video provider: GetFrame: sanity check failed"));
+ }
+
+ AegiVideoFrame tmp_frame;
+
+ tmp_frame.format = src_fmt;
+ tmp_frame.w = w;
+ tmp_frame.h = h;
+ tmp_frame.invertChannels = false;
+ tmp_frame.pitch[0] = w;
+ for (int i=1;i<=2;i++) tmp_frame.pitch[i] = uv_width;
+ tmp_frame.Allocate();
+
+ fseeko(sf, seek_table[n], SEEK_SET);
+ size_t ret;
+ ret = fread(tmp_frame.data[0], w * h, 1, sf);
+ if (ret != 1 || feof(sf) || ferror(sf))
+ throw wxString(_T("YUV4MPEG video provider: GetFrame: failed to read luma plane"));
+ for (int i = 1; i <= 2; i++) {
+ ret = fread(tmp_frame.data[i], uv_width * uv_height, 1, sf);
+ if (ret != 1 || feof(sf) || ferror(sf))
+ throw wxString(_T("YUV4MPEG video provider: GetFrame: failed to read chroma planes"));
+ }
+
+ AegiVideoFrame dst_frame;
+ dst_frame.format = dst_fmt;
+ dst_frame.w = w;
+ dst_frame.h = h;
+ if (dst_fmt == FORMAT_RGB32) {
+ dst_frame.invertChannels = true;
+ dst_frame.pitch[0] = w * 4;
+ dst_frame.ConvertFrom(tmp_frame);
+ }
+ else
+ dst_frame.CopyFrom(tmp_frame);
+
+ tmp_frame.Clear();
+
+ return dst_frame;
+}
+
+
+
+///////////////
+// Utility functions
+int YUV4MPEGVideoProvider::GetWidth() {
+ return w;
+}
+
+int YUV4MPEGVideoProvider::GetHeight() {
+ return h;
+}
+
+int YUV4MPEGVideoProvider::GetFrameCount() {
+ return num_frames;
+}
+
+int YUV4MPEGVideoProvider::GetPosition() {
+ return cur_fn;
+}
+
+double YUV4MPEGVideoProvider::GetFPS() {
+ return double(fps_rat.num) / double(fps_rat.den);
+}
+
diff --git a/aegisub/src/video_provider_yuv4mpeg.h b/aegisub/src/video_provider_yuv4mpeg.h
new file mode 100644
index 000000000..17e83c365
--- /dev/null
+++ b/aegisub/src/video_provider_yuv4mpeg.h
@@ -0,0 +1,137 @@
+// Copyright (c) 2009, Karl Blomster
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+// * Neither the name of the Aegisub Group nor the names of its contributors
+// may be used to endorse or promote products derived from this software
+// without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// -----------------------------------------------------------------------------
+//
+// AEGISUB
+//
+// Website: http://aegisub.cellosoft.com
+// Contact: mailto:zeratul@cellosoft.com
+//
+
+
+#pragma once
+
+#include "include/aegisub/video_provider.h"
+#include
+#include
+#include
+
+// ffmpeg uses 80, so I'm p sure this isn't too small
+#define YUV4MPEG_HEADER_MAXLEN 128
+
+
+class YUV4MPEGVideoProvider : public VideoProvider {
+private:
+ enum Y4M_PixelFormat {
+ Y4M_PIXFMT_NONE = -1,
+ Y4M_PIXFMT_420JPEG,
+ Y4M_PIXFMT_420MPEG2,
+ Y4M_PIXFMT_420PALDV,
+ Y4M_PIXFMT_411,
+ Y4M_PIXFMT_422,
+ Y4M_PIXFMT_444,
+ Y4M_PIXFMT_444ALPHA,
+ Y4M_PIXFMT_MONO,
+ };
+
+ enum Y4M_InterlacingMode {
+ Y4M_ILACE_NOTSET = -1, // not to be confused with Y4M_ILACE_UNKNOWN
+ Y4M_ILACE_PROGRESSIVE,
+ Y4M_ILACE_TFF,
+ Y4M_ILACE_BFF,
+ Y4M_ILACE_MIXED,
+ Y4M_ILACE_UNKNOWN,
+ };
+
+ // this is currently unused :(
+ enum Y4M_FrameFlags {
+ Y4M_FFLAG_NOTSET = -1,
+ Y4M_FFLAG_NONE = 0x0000,
+ // repeat field/frame flags
+ Y4M_FFLAG_R_TFF = 0x0001, // TFF
+ Y4M_FFLAG_R_TFF_R = 0x0002, // TFF and repeat
+ Y4M_FFLAG_R_BFF = 0x0004, // BFF
+ Y4M_FFLAG_R_BFF_R = 0x0008, // BFF and repeat
+ Y4M_FFLAG_R_P = 0x0010, // progressive
+ Y4M_FFLAG_R_P_R = 0x0020, // progressive and repeat once
+ Y4M_FFLAG_R_P_RR = 0x0040, // progressive and repeat twice
+ // temporal sampling flags
+ Y4M_FFLAG_T_P = 0x0080, // progressive (fields sampled at the same time)
+ Y4M_FFLAG_T_I = 0x0100, // interlaced (fields sampled at different times)
+ // spatial sampling flags
+ Y4M_FFLAG_C_P = 0x0200, // progressive (whole frame subsampled)
+ Y4M_FFLAG_C_I = 0x0400, // interlaced (fields subsampled independently)
+ Y4M_FFLAG_C_UNKNOWN = 0x0800, // unknown (only allowed for non-4:2:0 sampling)
+ };
+
+ FILE *sf; // source file
+ bool inited;
+ int w, h; // width/height
+ int num_frames; // length of file in frames
+ int frame_sz; // size of each frame in bytes
+ Y4M_PixelFormat pixfmt; // colorspace/pixel format
+ Y4M_InterlacingMode imode; // interlacing mode
+ struct {
+ int num;
+ int den;
+ } fps_rat; // framerate
+
+ std::vector seek_table; // the position in the file of each frame, in bytes
+ int cur_fn; // current frame number
+
+ wxString errmsg;
+
+ void LoadVideo(const Aegisub::String filename);
+ void Close();
+
+ void CheckFileFormat();
+ void ParseFileHeader(const std::vector& tags);
+ Y4M_FrameFlags ParseFrameHeader(const std::vector& tags);
+ std::vector ReadHeader(int64_t startpos, bool reset_pos=false);
+ int IndexFile();
+
+public:
+ YUV4MPEGVideoProvider(Aegisub::String filename, double fps);
+ ~YUV4MPEGVideoProvider();
+
+ const AegiVideoFrame GetFrame(int n, int formatType);
+ int GetPosition();
+ int GetFrameCount();
+
+ int GetWidth();
+ int GetHeight();
+ double GetFPS();
+ bool AreKeyFramesLoaded() { return false; }
+ wxArrayInt GetKeyFrames() { return wxArrayInt(); };
+ bool IsVFR() { return false; };
+ FrameRate GetTrueFrameRate() { return FrameRate(); };
+ Aegisub::String GetDecoderName() { return L"YUV4MPEG"; }
+ bool IsNativelyByFrames() { return true; }
+ int GetDesiredCacheSize() { return 8; }
+};
+