Merge pull request #5051 from stenzek/renderer-fixes

VideoBackends: Fix crashes introduced by #4999
This commit is contained in:
Markus Wick
2017-03-09 21:06:50 +01:00
committed by GitHub
26 changed files with 91 additions and 97 deletions

View File

@ -1129,11 +1129,6 @@ void Renderer::SetInterlacingMode()
// TODO
}
u32 Renderer::GetMaxTextureSize()
{
return DX11::D3D::GetMaxTextureSize();
}
u16 Renderer::BBoxRead(int index)
{
// Here we get the min/max value of the truncated position of the upscaled framebuffer.

View File

@ -62,8 +62,6 @@ public:
bool CheckForResize();
u32 GetMaxTextureSize() override;
private:
void BlitScreen(TargetRectangle src, TargetRectangle dst, D3DTexture2D* src_texture,
u32 src_width, u32 src_height, float Gamma);

View File

@ -61,6 +61,7 @@ void VideoBackend::InitBackendInfo()
}
g_Config.backend_info.api_type = APIType::D3D;
g_Config.backend_info.MaxTextureSize = DX11::D3D::GetMaxTextureSize();
g_Config.backend_info.bSupportsExclusiveFullscreen = true;
g_Config.backend_info.bSupportsDualSourceBlend = true;
g_Config.backend_info.bSupportsPrimitiveRestart = true;

View File

@ -121,8 +121,6 @@ const std::string PixelShaderVersionString();
const std::string GeometryShaderVersionString();
const std::string VertexShaderVersionString();
unsigned int GetMaxTextureSize();
HRESULT SetFullscreenState(bool enable_fullscreen);
bool GetFullscreenState();

View File

@ -1159,11 +1159,6 @@ void Renderer::SetInterlacingMode()
// EXISTINGD3D11TODO
}
u32 Renderer::GetMaxTextureSize()
{
return DX12::D3D::GetMaxTextureSize();
}
u16 Renderer::BBoxRead(int index)
{
// Here we get the min/max value of the truncated position of the upscaled framebuffer.

View File

@ -61,8 +61,6 @@ public:
bool CheckForResize();
u32 GetMaxTextureSize() override;
static D3D12_BLEND_DESC GetResetBlendDesc();
static D3D12_DEPTH_STENCIL_DESC GetResetDepthStencilDesc();
static D3D12_RASTERIZER_DESC GetResetRasterizerDesc();

View File

@ -64,6 +64,7 @@ void VideoBackend::InitBackendInfo()
}
g_Config.backend_info.api_type = APIType::D3D;
g_Config.backend_info.MaxTextureSize = D3D12_REQ_TEXTURE2D_U_OR_V_DIMENSION;
g_Config.backend_info.bSupportsExclusiveFullscreen = false;
g_Config.backend_info.bSupportsDualSourceBlend = true;
g_Config.backend_info.bSupportsPrimitiveRestart = true;

View File

@ -24,6 +24,7 @@ namespace Null
void VideoBackend::InitBackendInfo()
{
g_Config.backend_info.api_type = APIType::Nothing;
g_Config.backend_info.MaxTextureSize = 16384;
g_Config.backend_info.bSupportsExclusiveFullscreen = true;
g_Config.backend_info.bSupportsDualSourceBlend = true;
g_Config.backend_info.bSupportsPrimitiveRestart = true;

View File

@ -19,7 +19,6 @@ public:
void PokeEFB(EFBAccessType type, const EfbPokeData* points, size_t num_points) override {}
u16 BBoxRead(int index) override { return 0; }
void BBoxWrite(int index, u16 value) override {}
u32 GetMaxTextureSize() override { return 16 * 1024; }
TargetRectangle ConvertEFBRectangle(const EFBRectangle& rc) override;
void SwapImpl(u32 xfb_addr, u32 fb_width, u32 fb_stride, u32 fb_height, const EFBRectangle& rc,

View File

@ -51,8 +51,6 @@ void VideoConfig::UpdateProjectionHack()
::UpdateProjectionHack(g_Config.iPhackvalue, g_Config.sPhackvalue);
}
static int s_max_texture_size = 0;
namespace OGL
{
VideoConfig g_ogl_config;
@ -334,49 +332,12 @@ Renderer::Renderer()
{
bool bSuccess = true;
// Init extension support.
if (!GLExtensions::Init())
{
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
// extensions
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
bSuccess = false;
}
g_ogl_config.gl_vendor = (const char*)glGetString(GL_VENDOR);
g_ogl_config.gl_renderer = (const char*)glGetString(GL_RENDERER);
g_ogl_config.gl_version = (const char*)glGetString(GL_VERSION);
InitDriverInfo();
if (GLExtensions::Version() < 300)
{
// integer vertex attributes require a gl3 only function
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
"GPU: Does your video card support OpenGL 3?");
bSuccess = false;
}
// check for the max vertex attributes
GLint numvertexattribs = 0;
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
if (numvertexattribs < 16)
{
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
"GPU: Does your video card support OpenGL 2.x?",
numvertexattribs);
bSuccess = false;
}
// check the max texture width and height
GLint max_texture_size;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, (GLint*)&max_texture_size);
if (max_texture_size < 1024)
{
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
bSuccess = false;
}
if (GLInterface->GetMode() == GLInterfaceMode::MODE_OPENGL)
{
if (!GLExtensions::Supports("GL_ARB_framebuffer_object"))
@ -657,10 +618,6 @@ Renderer::Renderer()
return;
}
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
g_Config.VerifyValidity();
UpdateActiveConfig();
@ -1778,19 +1735,6 @@ void Renderer::SetInterlacingMode()
{
// TODO
}
}
namespace OGL
{
u32 Renderer::GetMaxTextureSize()
{
// Right now nvidia seems to do something very weird if we try to cache GL_MAX_TEXTURE_SIZE in
// init. This is a workaround that lets
// us keep the perf improvement that caching it gives us.
if (s_max_texture_size == 0)
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &s_max_texture_size);
return static_cast<u32>(s_max_texture_size);
}
void Renderer::ChangeSurface(void* new_surface_handle)
{

View File

@ -103,8 +103,6 @@ public:
void ReinterpretPixelData(unsigned int convtype) override;
u32 GetMaxTextureSize() override;
void ChangeSurface(void* new_surface_handle) override;
private:

View File

@ -23,5 +23,9 @@ class VideoBackend : public VideoBackendBase
void InitBackendInfo() override;
unsigned int PeekMessages() override;
private:
bool InitializeGLExtensions();
bool FillBackendInfo();
};
}

View File

@ -42,6 +42,7 @@ Make AA apply instantly during gameplay if possible
#include "Common/FileSearch.h"
#include "Common/GL/GLInterfaceBase.h"
#include "Common/GL/GLUtil.h"
#include "Common/MsgHandler.h"
#include "VideoBackends/OGL/BoundingBox.h"
#include "VideoBackends/OGL/PerfQuery.h"
@ -96,6 +97,7 @@ static std::vector<std::string> GetShaders(const std::string& sub_dir = "")
void VideoBackend::InitBackendInfo()
{
g_Config.backend_info.api_type = APIType::OpenGL;
g_Config.backend_info.MaxTextureSize = 16384;
g_Config.backend_info.bSupportsExclusiveFullscreen = false;
g_Config.backend_info.bSupportsOversizedViewports = true;
g_Config.backend_info.bSupportsGeometryShaders = true;
@ -123,6 +125,59 @@ void VideoBackend::InitBackendInfo()
g_Config.backend_info.AnaglyphShaders = GetShaders(ANAGLYPH_DIR DIR_SEP);
}
bool VideoBackend::InitializeGLExtensions()
{
// Init extension support.
if (!GLExtensions::Init())
{
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
// extensions
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
return false;
}
if (GLExtensions::Version() < 300)
{
// integer vertex attributes require a gl3 only function
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
"GPU: Does your video card support OpenGL 3?");
return false;
}
return true;
}
bool VideoBackend::FillBackendInfo()
{
// check for the max vertex attributes
GLint numvertexattribs = 0;
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
if (numvertexattribs < 16)
{
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
"GPU: Does your video card support OpenGL 2.x?",
numvertexattribs);
return false;
}
// check the max texture width and height
GLint max_texture_size = 0;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
g_Config.backend_info.MaxTextureSize = static_cast<u32>(max_texture_size);
if (max_texture_size < 1024)
{
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
return false;
}
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
// TODO: Move the remaining fields from the Renderer constructor here.
return true;
}
bool VideoBackend::Initialize(void* window_handle)
{
InitBackendInfo();
@ -141,6 +196,12 @@ bool VideoBackend::Initialize(void* window_handle)
void VideoBackend::Video_Prepare()
{
GLInterface->MakeCurrent();
if (!InitializeGLExtensions() || !FillBackendInfo())
{
// TODO: Handle this better. We'll likely end up crashing anyway, but this method doesn't
// return anything, so we can't inform the caller that startup failed.
return;
}
g_renderer = std::make_unique<Renderer>();

View File

@ -31,7 +31,6 @@ public:
u16 BBoxRead(int index) override;
void BBoxWrite(int index, u16 value) override;
u32 GetMaxTextureSize() override { return 16 * 1024; };
TargetRectangle ConvertEFBRectangle(const EFBRectangle& rc) override;
void SwapImpl(u32 xfbAddr, u32 fbWidth, u32 fbStride, u32 fbHeight, const EFBRectangle& rc,

View File

@ -124,6 +124,7 @@ std::string VideoSoftware::GetDisplayName() const
void VideoSoftware::InitBackendInfo()
{
g_Config.backend_info.api_type = APIType::Nothing;
g_Config.backend_info.MaxTextureSize = 16384;
g_Config.backend_info.bSupports3DVision = false;
g_Config.backend_info.bSupportsDualSourceBlend = true;
g_Config.backend_info.bSupportsEarlyZ = true;

View File

@ -41,7 +41,6 @@ public:
void PokeEFB(EFBAccessType type, const EfbPokeData* points, size_t num_points) override;
u16 BBoxRead(int index) override;
void BBoxWrite(int index, u16 value) override;
u32 GetMaxTextureSize() override { return 16 * 1024; }
TargetRectangle ConvertEFBRectangle(const EFBRectangle& rc) override;
void SwapImpl(u32 xfb_addr, u32 fb_width, u32 fb_stride, u32 fb_height, const EFBRectangle& rc,

View File

@ -257,8 +257,10 @@ void VulkanContext::PopulateBackendInfoAdapters(VideoConfig* config, const GPULi
}
void VulkanContext::PopulateBackendInfoFeatures(VideoConfig* config, VkPhysicalDevice gpu,
const VkPhysicalDeviceProperties& properties,
const VkPhysicalDeviceFeatures& features)
{
config->backend_info.MaxTextureSize = properties.limits.maxImageDimension2D;
config->backend_info.bSupportsDualSourceBlend = (features.dualSrcBlend == VK_TRUE);
config->backend_info.bSupportsGeometryShaders = (features.geometryShader == VK_TRUE);
config->backend_info.bSupportsGSInstancing = (features.geometryShader == VK_TRUE);

View File

@ -35,6 +35,7 @@ public:
static void PopulateBackendInfo(VideoConfig* config);
static void PopulateBackendInfoAdapters(VideoConfig* config, const GPUList& gpu_list);
static void PopulateBackendInfoFeatures(VideoConfig* config, VkPhysicalDevice gpu,
const VkPhysicalDeviceProperties& properties,
const VkPhysicalDeviceFeatures& features);
static void PopulateBackendInfoMultisampleModes(VideoConfig* config, VkPhysicalDevice gpu,
const VkPhysicalDeviceProperties& properties);

View File

@ -52,7 +52,7 @@ void VideoBackend::InitBackendInfo()
vkGetPhysicalDeviceProperties(gpu, &properties);
VkPhysicalDeviceFeatures features;
vkGetPhysicalDeviceFeatures(gpu, &features);
VulkanContext::PopulateBackendInfoFeatures(&g_Config, gpu, features);
VulkanContext::PopulateBackendInfoFeatures(&g_Config, gpu, properties, features);
VulkanContext::PopulateBackendInfoMultisampleModes(&g_Config, gpu, properties);
}
}
@ -178,6 +178,7 @@ bool VideoBackend::Initialize(void* window_handle)
// Since VulkanContext maintains a copy of the device features and properties, we can use this
// to initialize the backend information, so that we don't need to enumerate everything again.
VulkanContext::PopulateBackendInfoFeatures(&g_Config, g_vulkan_context->GetPhysicalDevice(),
g_vulkan_context->GetDeviceProperties(),
g_vulkan_context->GetDeviceFeatures());
VulkanContext::PopulateBackendInfoMultisampleModes(
&g_Config, g_vulkan_context->GetPhysicalDevice(), g_vulkan_context->GetDeviceProperties());