forked from mia/Aegisub
Fix some assorted VideoOutGL-related stupidity. Updates #1056.
Originally committed to SVN as r3864.
This commit is contained in:
parent
fedd8ecce6
commit
2f2f0d9aac
3 changed files with 36 additions and 43 deletions
|
@ -217,14 +217,14 @@ void VideoDisplay::SetFrame(int frameNumber) {
|
|||
wxLogError(
|
||||
L"Failed to initialize video display. Closing other running programs and updating your video card drivers may fix this.\n"
|
||||
L"Error message reported: %s",
|
||||
err.GetMessage());
|
||||
err.GetMessage().c_str());
|
||||
context->Reset();
|
||||
}
|
||||
catch (const VideoOutRenderException& err) {
|
||||
wxLogError(
|
||||
L"Could not upload video frame to graphics card.\n"
|
||||
L"Error message reported: %s",
|
||||
err.GetMessage());
|
||||
err.GetMessage().c_str());
|
||||
}
|
||||
}
|
||||
Render();
|
||||
|
@ -322,7 +322,7 @@ catch (const VideoOutException &err) {
|
|||
wxLogError(
|
||||
_T("An error occurred trying to render the video frame on the screen.\n")
|
||||
_T("Error message reported: %s"),
|
||||
err.GetMessage());
|
||||
err.GetMessage().c_str());
|
||||
VideoContext::Get()->Reset();
|
||||
}
|
||||
catch (const wxChar *err) {
|
||||
|
|
|
@ -61,9 +61,8 @@
|
|||
#define CHECK_INIT_ERROR(cmd) cmd; if (GLenum err = glGetError()) throw VideoOutInitException(_T(#cmd), err)
|
||||
#define CHECK_ERROR(cmd) cmd; if (GLenum err = glGetError()) throw VideoOutRenderException(_T(#cmd), err)
|
||||
|
||||
namespace {
|
||||
/// @brief Structure tracking all precomputable information about a subtexture
|
||||
struct TextureInfo {
|
||||
struct VideoOutGL::TextureInfo {
|
||||
/// The OpenGL texture id this is for
|
||||
GLuint textureID;
|
||||
/// The byte offset into the frame's data block
|
||||
|
@ -84,23 +83,20 @@ namespace {
|
|||
float texLeft;
|
||||
float texRight;
|
||||
};
|
||||
|
||||
/// @brief Test if a texture can be created
|
||||
/// @param width The width of the texture
|
||||
/// @param height The height of the texture
|
||||
/// @param format The texture's format
|
||||
/// @return Whether the texture could be created.
|
||||
bool TestTexture(int width, int height, GLint format) {
|
||||
GLuint texture;
|
||||
glGenTextures(1, &texture);
|
||||
glTexImage2D(GL_PROXY_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||
static bool TestTexture(int width, int height, GLint format) {
|
||||
glTexImage2D(GL_PROXY_TEXTURE_2D, 0, format, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||
glGetTexLevelParameteriv(GL_PROXY_TEXTURE_2D, 0, GL_TEXTURE_INTERNAL_FORMAT, &format);
|
||||
glDeleteTextures(1, &texture);
|
||||
while (glGetError()) { } // Silently swallow all errors as we don't care why it failed if it did
|
||||
|
||||
wxLogDebug("VideoOutGL::TestTexture: %dx%d\n", width, height);
|
||||
return format != 0;
|
||||
}
|
||||
}
|
||||
|
||||
VideoOutGL::VideoOutGL()
|
||||
: maxTextureSize(0),
|
||||
|
@ -136,8 +132,6 @@ void VideoOutGL::DetectOpenGLCapabilities() {
|
|||
supportsRectangularTextures = TestTexture(maxTextureSize, maxTextureSize >> 1, internalFormat);
|
||||
|
||||
// Test GL_CLAMP_TO_EDGE support
|
||||
GLuint texture;
|
||||
glGenTextures(1, &texture);
|
||||
glTexImage2D(GL_PROXY_TEXTURE_2D, 0, GL_RGBA8, 64, 64, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
if (glGetError()) {
|
||||
|
|
|
@ -41,14 +41,13 @@
|
|||
#endif
|
||||
|
||||
class AegiVideoFrame;
|
||||
namespace {
|
||||
struct TextureInfo;
|
||||
}
|
||||
|
||||
/// @class VideoOutGL
|
||||
/// @brief OpenGL based video renderer
|
||||
class VideoOutGL {
|
||||
private:
|
||||
struct TextureInfo;
|
||||
|
||||
/// The maximum texture size supported by the user's graphics card
|
||||
int maxTextureSize;
|
||||
/// Whether rectangular textures are supported by the user's graphics card
|
||||
|
|
Loading…
Reference in a new issue