1
0
Fork 0

Reuse buffers for video frames

At least on OS X, allocating the buffers is one of the more expensive
parts of video playback, and on an arbitrary 720p H.264 file with simple
subtitles this cuts CPU usage while playing by about 30%.
This commit is contained in:
Thomas Goyne 2014-06-12 14:34:21 -07:00
parent a574d6ac67
commit 4bdccb889c
12 changed files with 72 additions and 64 deletions

View File

@ -20,6 +20,7 @@
#include "ass_file.h"
#include "export_fixstyle.h"
#include "include/aegisub/subtitles_provider.h"
#include "video_frame.h"
#include "video_provider_manager.h"
#include <libaegisub/dispatch.h>
@ -30,10 +31,22 @@ enum {
};
std::shared_ptr<VideoFrame> AsyncVideoProvider::ProcFrame(int frame_number, double time, bool raw) {
// Find an unused buffer to use or allocate a new one if needed
std::shared_ptr<VideoFrame> frame;
for (auto& buffer : buffers) {
if (buffer.use_count() == 1) {
frame = buffer;
break;
}
}
if (!frame) {
frame = std::make_shared<VideoFrame>();
buffers.push_back(frame);
}
try {
frame = source_provider->GetFrame(frame_number);
source_provider->GetFrame(frame_number, *frame);
}
catch (VideoProviderError const& err) { throw VideoProviderErrorEvent(err); }

View File

@ -76,6 +76,8 @@ class AsyncVideoProvider {
/// they can be rendered
std::atomic<uint_fast32_t> version{ 0 };
std::vector<std::shared_ptr<VideoFrame>> buffers;
public:
/// @brief Load the passed subtitle file
/// @param subs File to load

View File

@ -37,7 +37,6 @@
#include <libaegisub/exception.h>
#include <libaegisub/vfr.h>
#include <memory>
#include <string>
struct VideoFrame;
@ -47,7 +46,7 @@ public:
virtual ~VideoProvider() = default;
/// Override this method to actually get frames
virtual std::shared_ptr<VideoFrame> GetFrame(int n)=0;
virtual void GetFrame(int n, VideoFrame &frame)=0;
/// Set the YCbCr matrix to the specified one
///

View File

@ -101,18 +101,19 @@ void SubtitlesPreview::SetColour(agi::Color col) {
void SubtitlesPreview::UpdateBitmap() {
if (!vid) return;
auto frame = vid->GetFrame(0);
VideoFrame frame;
vid->GetFrame(0, frame);
if (provider) {
try {
provider->LoadSubtitles(sub_file.get());
provider->DrawSubtitles(*frame, 0.1);
provider->DrawSubtitles(frame, 0.1);
}
catch (...) { }
}
// Convert frame to bitmap
*bmp = static_cast<wxBitmap>(GetImage(*frame));
*bmp = static_cast<wxBitmap>(GetImage(frame));
Refresh();
}

View File

@ -19,24 +19,6 @@
#include <boost/gil/gil_all.hpp>
#include <wx/image.h>
VideoFrame::VideoFrame(const unsigned char *data, size_t width, size_t height, size_t pitch, bool flipped)
: data(data, data + width * height * 4)
, width(width)
, height(height)
, pitch(pitch)
, flipped(flipped)
{
}
VideoFrame::VideoFrame(std::vector<unsigned char>&& data, size_t width, size_t height, size_t pitch, bool flipped)
: data(std::move(data))
, width(width)
, height(height)
, pitch(pitch)
, flipped(flipped)
{
}
namespace {
// We actually have bgr_, not bgra, so we need a custom converter which ignores the alpha channel
struct color_converter {

View File

@ -24,9 +24,6 @@ struct VideoFrame {
size_t height;
size_t pitch;
bool flipped;
VideoFrame(const unsigned char *data, size_t width, size_t height, size_t pitch, bool fipped);
VideoFrame(std::vector<unsigned char>&& data, size_t width, size_t height, size_t pitch, bool fipped);
};
wxImage GetImage(VideoFrame const& frame);

View File

@ -73,7 +73,7 @@ class AvisynthVideoProvider: public VideoProvider {
public:
AvisynthVideoProvider(agi::fs::path const& filename, std::string const& colormatrix);
std::shared_ptr<VideoFrame> GetFrame(int n);
void GetFrame(int n, VideoFrame &frame) override;
void SetColorSpace(std::string const& matrix) override {
// Can't really do anything if this fails
@ -309,11 +309,16 @@ AVSValue AvisynthVideoProvider::Open(agi::fs::path const& filename) {
throw VideoNotSupported("No function suitable for opening the video found");
}
std::shared_ptr<VideoFrame> AvisynthVideoProvider::GetFrame(int n) {
void AvisynthVideoProvider::GetFrame(int n, VideoFrame &out) {
std::lock_guard<std::mutex> lock(avs.GetMutex());
auto frame = RGB32Video->GetFrame(n, avs.GetEnv());
return std::make_shared<VideoFrame>(frame->GetReadPtr(), frame->GetRowSize() / 4, frame->GetHeight(), frame->GetPitch(), true);
auto ptr = frame->GetReadPtr();
out.data.assign(ptr, ptr + frame->GetPitch() * frame->GetHeight());
out.flipped = true;
out.height = frame->GetHeight();
out.width = frame->GetRowSize() / 4;
out.pitch = frame->GetPitch();
}
}

View File

@ -25,14 +25,14 @@
namespace {
/// A video frame and its frame number
struct CachedFrame final : public VideoFrame {
struct CachedFrame {
VideoFrame frame;
int frame_number;
CachedFrame(int frame_number, VideoFrame const& frame)
: VideoFrame(frame.data.data(), frame.width, frame.height, frame.pitch, frame.flipped)
, frame_number(frame_number)
{
}
CachedFrame(VideoFrame const& frame, int frame_number)
: frame(frame), frame_number(frame_number) { }
CachedFrame(CachedFrame const&) = delete;
};
/// @class VideoProviderCache
@ -45,19 +45,15 @@ class VideoProviderCache final : public VideoProvider {
///
/// Note that this is a soft limit. The cache stops allocating new frames
/// once it has exceeded the limit, but it never tries to shrink
const size_t max_cache_size;
const size_t max_cache_size = OPT_GET("Provider/Video/Cache/Size")->GetInt() << 20; // convert MB to bytes
/// Cache of video frames with the most recently used ones at the front
std::list<CachedFrame> cache;
public:
VideoProviderCache(std::unique_ptr<VideoProvider> master)
: master(std::move(master))
, max_cache_size(OPT_GET("Provider/Video/Cache/Size")->GetInt() << 20) // convert MB to bytes
{
}
VideoProviderCache(std::unique_ptr<VideoProvider> master) : master(std::move(master)) { }
std::shared_ptr<VideoFrame> GetFrame(int n) override;
void GetFrame(int n, VideoFrame &frame) override;
void SetColorSpace(std::string const& m) override {
cache.clear();
@ -78,25 +74,28 @@ public:
bool HasAudio() const override { return master->HasAudio(); }
};
std::shared_ptr<VideoFrame> VideoProviderCache::GetFrame(int n) {
void VideoProviderCache::GetFrame(int n, VideoFrame &out) {
size_t total_size = 0;
for (auto cur = cache.begin(); cur != cache.end(); ++cur) {
if (cur->frame_number == n) {
cache.splice(cache.begin(), cache, cur); // Move to front
return std::make_shared<VideoFrame>(cache.front());
out = cache.front().frame;
return;
}
total_size += cur->data.size();
total_size += cur->frame.data.size();
}
auto frame = master->GetFrame(n);
master->GetFrame(n, out);
if (total_size >= max_cache_size)
cache.pop_back();
cache.emplace_front(n, *frame);
return frame;
if (total_size >= max_cache_size) {
cache.splice(cache.begin(), cache, --cache.end()); // Move last to front
cache.front().frame_number = n;
cache.front().frame = out;
}
else
cache.emplace_front(out, n);
}
}

View File

@ -92,8 +92,12 @@ std::string DummyVideoProvider::MakeFilename(double fps, int frames, int width,
return agi::format("?dummy:%f:%d:%d:%d:%d:%d:%d:%s", fps, frames, width, height, (int)colour.r, (int)colour.g, (int)colour.b, (pattern ? "c" : ""));
}
std::shared_ptr<VideoFrame> DummyVideoProvider::GetFrame(int) {
return std::make_shared<VideoFrame>(data.data(), width, height, width * 4, false);
void DummyVideoProvider::GetFrame(int, VideoFrame &frame) {
frame.data = data;
frame.width = width;
frame.height = height;
frame.pitch = width * 4;
frame.flipped = false;
}
namespace agi { class BackgroundRunner; }

View File

@ -64,7 +64,7 @@ public:
/// string will result in a video with the given parameters
static std::string MakeFilename(double fps, int frames, int width, int height, agi::Color colour, bool pattern);
std::shared_ptr<VideoFrame> GetFrame(int n) override;
void GetFrame(int n, VideoFrame &frame) override;
void SetColorSpace(std::string const&) override { }
int GetFrameCount() const override { return framecount; }

View File

@ -70,7 +70,7 @@ class FFmpegSourceVideoProvider final : public VideoProvider, FFmpegSourceProvid
public:
FFmpegSourceVideoProvider(agi::fs::path const& filename, std::string const& colormatrix, agi::BackgroundRunner *br);
std::shared_ptr<VideoFrame> GetFrame(int n) override;
void GetFrame(int n, VideoFrame &out) override;
void SetColorSpace(std::string const& matrix) override {
#if FFMS_VERSION >= ((2 << 24) | (17 << 16) | (1 << 8) | 0)
@ -285,14 +285,18 @@ void FFmpegSourceVideoProvider::LoadVideo(agi::fs::path const& filename, std::st
Timecodes = agi::vfr::Framerate(TimecodesVector);
}
std::shared_ptr<VideoFrame> FFmpegSourceVideoProvider::GetFrame(int n) {
void FFmpegSourceVideoProvider::GetFrame(int n, VideoFrame &out) {
n = mid(0, n, GetFrameCount() - 1);
auto frame = FFMS_GetFrame(VideoSource, n, &ErrInfo);
if (!frame)
throw VideoDecodeError(std::string("Failed to retrieve frame: ") + ErrInfo.Buffer);
return std::make_shared<VideoFrame>(frame->Data[0], Width, Height, frame->Linesize[0], false);
out.data.assign(frame->Data[0], frame->Data[0] + frame->Linesize[0] * Height);
out.flipped = false;
out.width = Width;
out.height = Height;
out.pitch = frame->Linesize[0];
}
}

View File

@ -143,7 +143,7 @@ class YUV4MPEGVideoProvider final : public VideoProvider {
public:
YUV4MPEGVideoProvider(agi::fs::path const& filename);
std::shared_ptr<VideoFrame> GetFrame(int n) override;
void GetFrame(int n, VideoFrame &frame) override;
void SetColorSpace(std::string const&) override { }
int GetFrameCount() const override { return num_frames; }
@ -391,7 +391,7 @@ int YUV4MPEGVideoProvider::IndexFile(uint64_t pos) {
return framecount;
}
std::shared_ptr<VideoFrame> YUV4MPEGVideoProvider::GetFrame(int n) {
void YUV4MPEGVideoProvider::GetFrame(int n, VideoFrame &frame) {
n = mid(0, n, num_frames - 1);
int uv_width = w / 2;
@ -408,9 +408,8 @@ std::shared_ptr<VideoFrame> YUV4MPEGVideoProvider::GetFrame(int n) {
auto src_y = reinterpret_cast<const unsigned char *>(file.read(seek_table[n], luma_sz + chroma_sz * 2));
auto src_u = src_y + luma_sz;
auto src_v = src_u + chroma_sz;
std::vector<unsigned char> data;
data.resize(w * h * 4);
unsigned char *dst = &data[0];
frame.data.resize(w * h * 4);
unsigned char *dst = &frame.data[0];
for (int py = 0; py < h; ++py) {
for (int px = 0; px < w / 2; ++px) {
@ -433,7 +432,10 @@ std::shared_ptr<VideoFrame> YUV4MPEGVideoProvider::GetFrame(int n) {
}
}
return std::make_shared<VideoFrame>(std::move(data), w, h, w * 4, false);
frame.flipped = false;
frame.width = w;
frame.height = h;
frame.pitch = w * 4;
}
}