Added a method to AegiVideoFrame to allow conversion from YV12->RGB32, which is used by screenshot taking and video context, if the video provider can only provide YV12 (ffmpeg).

Originally committed to SVN as r981.
This commit is contained in:
Rodrigo Braz Monteiro 2007-04-03 02:31:51 +00:00
parent 325b88df19
commit 4e2af4cd57
5 changed files with 123 additions and 35 deletions

View file

@ -269,7 +269,7 @@ bool OpenGLWrapper::ShadersAvailable() {
static bool first = true;
if (first) {
first = false;
wxMessageBox(_T("Warning, OpenGL shaders are not available on this machine. YV12 video will be on greyscale."),_T("GL Shaders Error"));
if (!available) wxMessageBox(_T("Warning, OpenGL shaders are not available on this machine. YV12 video will be on greyscale."),_T("GL Shaders Error"));
}
return available;
}

View file

@ -408,30 +408,45 @@ GLuint VideoContext::GetFrameAsTexture(int n) {
// Get frame
AegiVideoFrame frame = GetFrame(n);
AegiVideoFrame *srcFrame = &frame;
// Set frame
lastFrame = n;
// Image type
GLenum format = GL_LUMINANCE;
if (frame.format == FORMAT_RGB32) {
if (frame.invertChannels) format = GL_BGRA_EXT;
else format = GL_RGBA;
}
else if (frame.format == FORMAT_RGB24) {
if (frame.invertChannels) format = GL_BGR_EXT;
else format = GL_RGB;
}
else if (frame.format == FORMAT_YV12) {
format = GL_LUMINANCE;
}
isInverted = frame.flipped;
// Set context
GetGLContext(displayList.front())->SetCurrent(*displayList.front());
glEnable(GL_TEXTURE_2D);
if (glGetError() != 0) throw _T("Error enabling texture.");
// Deal with YV12
bool doMakeShader = false;
if (frame.format == FORMAT_YV12 && yv12shader == 0) {
doMakeShader = OpenGLWrapper::UseShaders();
if (!doMakeShader) {
tempRGBFrame.w = frame.w;
tempRGBFrame.h = frame.h;
tempRGBFrame.pitch[0] = frame.w * 4;
tempRGBFrame.format = FORMAT_RGB32;
tempRGBFrame.ConvertFrom(frame);
srcFrame = &tempRGBFrame;
}
}
// Image type
GLenum format = GL_LUMINANCE;
if (srcFrame->format == FORMAT_RGB32) {
if (srcFrame->invertChannels) format = GL_BGRA_EXT;
else format = GL_RGBA;
}
else if (srcFrame->format == FORMAT_RGB24) {
if (srcFrame->invertChannels) format = GL_BGR_EXT;
else format = GL_RGB;
}
else if (srcFrame->format == FORMAT_YV12) {
format = GL_LUMINANCE;
}
isInverted = srcFrame->flipped;
if (lastTex == 0) {
// Enable
glShadeModel(GL_FLAT);
@ -454,9 +469,9 @@ GLuint VideoContext::GetFrameAsTexture(int n) {
if (glGetError() != 0) throw _T("Error setting wrap_t texture parameter.");
// Load image data into texture
int height = frame.h;
if (frame.format == FORMAT_YV12) height = height * 3 / 2;
int tw = SmallestPowerOf2(MAX(frame.pitch[0]/frame.GetBpp(0),frame.pitch[1]+frame.pitch[2]));
int height = srcFrame->h;
if (srcFrame->format == FORMAT_YV12) height = height * 3 / 2;
int tw = SmallestPowerOf2(MAX(srcFrame->pitch[0]/srcFrame->GetBpp(0),srcFrame->pitch[1]+srcFrame->pitch[2]));
int th = SmallestPowerOf2(height);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA8,tw,th,0,format,GL_UNSIGNED_BYTE,NULL);
if (glGetError() != 0) {
@ -468,38 +483,38 @@ GLuint VideoContext::GetFrameAsTexture(int n) {
if (glGetError() != 0) throw _T("Error allocating texture.");
}
}
texW = float(frame.w)/float(tw);
texH = float(frame.h)/float(th);
texW = float(srcFrame->w)/float(tw);
texH = float(srcFrame->h)/float(th);
// Set texture
//glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
if (glGetError() != 0) throw _T("Error setting hinting.");
//if (glGetError() != 0) throw _T("Error setting hinting.");
// Create shader
if (doMakeShader) {
yv12shader = OpenGLWrapper::CreateYV12Shader(texW,texH,float(srcFrame->pitch[1])/float(tw));
}
// Set priority
float priority = 1.0f;
glPrioritizeTextures(1,&lastTex,&priority);
// Create shader if necessary
if (frame.format == FORMAT_YV12 && yv12shader == 0 && OpenGLWrapper::UseShaders()) {
yv12shader = OpenGLWrapper::CreateYV12Shader(texW,texH,float(frame.pitch[1])/float(tw));
}
}
// Load texture data
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,frame.pitch[0]/frame.GetBpp(0),frame.h,format,GL_UNSIGNED_BYTE,frame.data[0]);
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,srcFrame->pitch[0]/srcFrame->GetBpp(0),srcFrame->h,format,GL_UNSIGNED_BYTE,srcFrame->data[0]);
if (glGetError() != 0) throw _T("Error uploading primary plane");
// UV planes for YV12
if (frame.format == FORMAT_YV12) {
if (srcFrame->format == FORMAT_YV12) {
int u = 1;
int v = 2;
if (frame.invertChannels) {
if (srcFrame->invertChannels) {
u = 2;
v = 1;
}
glTexSubImage2D(GL_TEXTURE_2D,0,0,frame.h,frame.pitch[1],frame.h/2,format,GL_UNSIGNED_BYTE,frame.data[u]);
glTexSubImage2D(GL_TEXTURE_2D,0,0,srcFrame->h,srcFrame->pitch[1],srcFrame->h/2,format,GL_UNSIGNED_BYTE,srcFrame->data[u]);
if (glGetError() != 0) throw _T("Error uploading U plane.");
glTexSubImage2D(GL_TEXTURE_2D,0,frame.pitch[1],frame.h,frame.pitch[2],frame.h/2,format,GL_UNSIGNED_BYTE,frame.data[v]);
glTexSubImage2D(GL_TEXTURE_2D,0,srcFrame->pitch[1],srcFrame->h,srcFrame->pitch[2],srcFrame->h/2,format,GL_UNSIGNED_BYTE,srcFrame->data[v]);
if (glGetError() != 0) throw _T("Error uploadinv V plane.");
}

View file

@ -72,6 +72,7 @@ private:
wxGLContext *glContext;
VideoFrameFormat vidFormat;
AegiVideoFrame tempFrame;
AegiVideoFrame tempRGBFrame;
wxString tempfile;
VideoProvider *provider;

View file

@ -37,6 +37,7 @@
///////////
// Headers
#include "video_frame.h"
#include "utils.h"
/////////
@ -208,9 +209,15 @@ wxImage AegiVideoFrame::GetImage() const {
}
// YV12
//else if (format == FORMAT_YV12) {
// TODO
//}
else if (format == FORMAT_YV12) {
AegiVideoFrame temp;
temp.w = w;
temp.h = h;
temp.pitch[0] = w*4;
temp.format = FORMAT_RGB32;
temp.ConvertFrom(*this);
return temp.GetImage();
}
else {
return wxImage(w,h);
@ -256,3 +263,67 @@ int AegiVideoFrame::GetBpp(int plane) const {
default: return 0;
}
}
//////////////////////////////
// Convert from another frame
void AegiVideoFrame::ConvertFrom(const AegiVideoFrame &source) {
// Ensure compatibility
if (w != source.w) throw _T("AegiVideoFrame::ConvertFrom: Widths don't match.");
if (h != source.h) throw _T("AegiVideoFrame::ConvertFrom: Heights don't match.");
if (format != FORMAT_RGB32) throw _T("AegiVideoFrame::ConvertFrom: Unsupported destination format.");
if (source.format != FORMAT_YV12) throw _T("AegiVideoFrame::ConvertFrom: Unsupported source format.");
// Allocate
Allocate();
// Set up pointers
const unsigned char *src_y = source.data[0];
const unsigned char *src_u = source.data[1];
const unsigned char *src_v = source.data[2];
unsigned char *dst = data[0];
// Set up pitches
const int src_delta1 = source.pitch[0]-w;
const int src_delta2 = source.pitch[1]-w/2;
const int src_delta3 = source.pitch[2]-w/2;
const int dst_delta = pitch[0]-w*4;
int r,g,b,y,u,v,c,d,e;
// Loop
for (unsigned int py=0;py<h;py++) {
for (unsigned int px=0;px<w/2;px++) {
u = *src_u++;
v = *src_v++;
for (unsigned int i=0;i<2;i++) {
y = *src_y++;
// Convert
c = y - 16;
d = u - 128;
e = v - 128;
r = MID(0,( 298 * c + 409 * e + 128) >> 8,255);
g = MID(0,( 298 * c - 100 * d - 208 * e + 128) >> 8,255);
b = MID(0,( 298 * c + 516 * d + 128) >> 8,255);
// Assign
*dst++ = b;
*dst++ = g;
*dst++ = r;
*dst++ = 0;
}
}
// Increase pointers
src_y += src_delta1;
src_u += src_delta2;
src_v += src_delta3;
dst += dst_delta;
// Roll back u/v on even lines
if (!(py & 1)) {
src_u -= source.pitch[1];
src_v -= source.pitch[2];
}
}
}

View file

@ -72,6 +72,7 @@ public:
void Allocate();
void Clear();
void CopyFrom(const AegiVideoFrame &source);
void ConvertFrom(const AegiVideoFrame &source);
wxImage GetImage() const;
void GetFloat(float *buffer) const;