Merge pull request #5051 from stenzek/renderer-fixes

VideoBackends: Fix crashes introduced by #4999
This commit is contained in:
Markus Wick
2017-03-09 21:06:50 +01:00
committed by GitHub
26 changed files with 91 additions and 97 deletions

View File

@ -51,8 +51,6 @@ void VideoConfig::UpdateProjectionHack()
::UpdateProjectionHack(g_Config.iPhackvalue, g_Config.sPhackvalue);
}
static int s_max_texture_size = 0;
namespace OGL
{
VideoConfig g_ogl_config;
@ -334,49 +332,12 @@ Renderer::Renderer()
{
bool bSuccess = true;
// Init extension support.
if (!GLExtensions::Init())
{
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
// extensions
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
bSuccess = false;
}
g_ogl_config.gl_vendor = (const char*)glGetString(GL_VENDOR);
g_ogl_config.gl_renderer = (const char*)glGetString(GL_RENDERER);
g_ogl_config.gl_version = (const char*)glGetString(GL_VERSION);
InitDriverInfo();
if (GLExtensions::Version() < 300)
{
// integer vertex attributes require a gl3 only function
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
"GPU: Does your video card support OpenGL 3?");
bSuccess = false;
}
// check for the max vertex attributes
GLint numvertexattribs = 0;
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
if (numvertexattribs < 16)
{
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
"GPU: Does your video card support OpenGL 2.x?",
numvertexattribs);
bSuccess = false;
}
// check the max texture width and height
GLint max_texture_size;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, (GLint*)&max_texture_size);
if (max_texture_size < 1024)
{
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
bSuccess = false;
}
if (GLInterface->GetMode() == GLInterfaceMode::MODE_OPENGL)
{
if (!GLExtensions::Supports("GL_ARB_framebuffer_object"))
@ -657,10 +618,6 @@ Renderer::Renderer()
return;
}
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
g_Config.VerifyValidity();
UpdateActiveConfig();
@ -1778,19 +1735,6 @@ void Renderer::SetInterlacingMode()
{
// TODO
}
}
namespace OGL
{
u32 Renderer::GetMaxTextureSize()
{
// Right now nvidia seems to do something very weird if we try to cache GL_MAX_TEXTURE_SIZE in
// init. This is a workaround that lets
// us keep the perf improvement that caching it gives us.
if (s_max_texture_size == 0)
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &s_max_texture_size);
return static_cast<u32>(s_max_texture_size);
}
void Renderer::ChangeSurface(void* new_surface_handle)
{

View File

@ -103,8 +103,6 @@ public:
void ReinterpretPixelData(unsigned int convtype) override;
u32 GetMaxTextureSize() override;
void ChangeSurface(void* new_surface_handle) override;
private:

View File

@ -23,5 +23,9 @@ class VideoBackend : public VideoBackendBase
void InitBackendInfo() override;
unsigned int PeekMessages() override;
private:
bool InitializeGLExtensions();
bool FillBackendInfo();
};
}

View File

@ -42,6 +42,7 @@ Make AA apply instantly during gameplay if possible
#include "Common/FileSearch.h"
#include "Common/GL/GLInterfaceBase.h"
#include "Common/GL/GLUtil.h"
#include "Common/MsgHandler.h"
#include "VideoBackends/OGL/BoundingBox.h"
#include "VideoBackends/OGL/PerfQuery.h"
@ -96,6 +97,7 @@ static std::vector<std::string> GetShaders(const std::string& sub_dir = "")
void VideoBackend::InitBackendInfo()
{
g_Config.backend_info.api_type = APIType::OpenGL;
g_Config.backend_info.MaxTextureSize = 16384;
g_Config.backend_info.bSupportsExclusiveFullscreen = false;
g_Config.backend_info.bSupportsOversizedViewports = true;
g_Config.backend_info.bSupportsGeometryShaders = true;
@ -123,6 +125,59 @@ void VideoBackend::InitBackendInfo()
g_Config.backend_info.AnaglyphShaders = GetShaders(ANAGLYPH_DIR DIR_SEP);
}
bool VideoBackend::InitializeGLExtensions()
{
// Init extension support.
if (!GLExtensions::Init())
{
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
// extensions
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
return false;
}
if (GLExtensions::Version() < 300)
{
// integer vertex attributes require a gl3 only function
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
"GPU: Does your video card support OpenGL 3?");
return false;
}
return true;
}
bool VideoBackend::FillBackendInfo()
{
// check for the max vertex attributes
GLint numvertexattribs = 0;
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
if (numvertexattribs < 16)
{
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
"GPU: Does your video card support OpenGL 2.x?",
numvertexattribs);
return false;
}
// check the max texture width and height
GLint max_texture_size = 0;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
g_Config.backend_info.MaxTextureSize = static_cast<u32>(max_texture_size);
if (max_texture_size < 1024)
{
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
return false;
}
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
// TODO: Move the remaining fields from the Renderer constructor here.
return true;
}
bool VideoBackend::Initialize(void* window_handle)
{
InitBackendInfo();
@ -141,6 +196,12 @@ bool VideoBackend::Initialize(void* window_handle)
void VideoBackend::Video_Prepare()
{
GLInterface->MakeCurrent();
if (!InitializeGLExtensions() || !FillBackendInfo())
{
// TODO: Handle this better. We'll likely end up crashing anyway, but this method doesn't
// return anything, so we can't inform the caller that startup failed.
return;
}
g_renderer = std::make_unique<Renderer>();