VideoBackends: Move max texture size to VideoConfig

This stops the virtual method call from within the Renderer constructor.

The initialization here for GL had to be moved to VideoBackend, as the
Renderer constructor will not have been executed before the value is
required.
This commit is contained in:
Stenzek
2017-03-10 00:01:23 +10:00
parent 4012166085
commit 2cd240af0d
24 changed files with 79 additions and 83 deletions

View File

@ -42,6 +42,7 @@ Make AA apply instantly during gameplay if possible
#include "Common/FileSearch.h"
#include "Common/GL/GLInterfaceBase.h"
#include "Common/GL/GLUtil.h"
#include "Common/MsgHandler.h"
#include "VideoBackends/OGL/BoundingBox.h"
#include "VideoBackends/OGL/PerfQuery.h"
@ -96,6 +97,7 @@ static std::vector<std::string> GetShaders(const std::string& sub_dir = "")
void VideoBackend::InitBackendInfo()
{
g_Config.backend_info.api_type = APIType::OpenGL;
g_Config.backend_info.MaxTextureSize = 16384;
g_Config.backend_info.bSupportsExclusiveFullscreen = false;
g_Config.backend_info.bSupportsOversizedViewports = true;
g_Config.backend_info.bSupportsGeometryShaders = true;
@ -123,6 +125,59 @@ void VideoBackend::InitBackendInfo()
g_Config.backend_info.AnaglyphShaders = GetShaders(ANAGLYPH_DIR DIR_SEP);
}
bool VideoBackend::InitializeGLExtensions()
{
// Init extension support.
if (!GLExtensions::Init())
{
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
// extensions
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
return false;
}
if (GLExtensions::Version() < 300)
{
// integer vertex attributes require a gl3 only function
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
"GPU: Does your video card support OpenGL 3?");
return false;
}
return true;
}
bool VideoBackend::FillBackendInfo()
{
// check for the max vertex attributes
GLint numvertexattribs = 0;
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
if (numvertexattribs < 16)
{
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
"GPU: Does your video card support OpenGL 2.x?",
numvertexattribs);
return false;
}
// check the max texture width and height
GLint max_texture_size = 0;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
g_Config.backend_info.MaxTextureSize = static_cast<u32>(max_texture_size);
if (max_texture_size < 1024)
{
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
return false;
}
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
// TODO: Move the remaining fields from the Renderer constructor here.
return true;
}
bool VideoBackend::Initialize(void* window_handle)
{
InitBackendInfo();
@ -141,6 +196,12 @@ bool VideoBackend::Initialize(void* window_handle)
void VideoBackend::Video_Prepare()
{
GLInterface->MakeCurrent();
if (!InitializeGLExtensions() || !FillBackendInfo())
{
// TODO: Handle this better. We'll likely end up crashing anyway, but this method doesn't
// return anything, so we can't inform the caller that startup failed.
return;
}
g_renderer = std::make_unique<Renderer>();