OGL: Implement GPU texture decoding backend

This commit is contained in:
Stenzek
2016-11-27 18:15:00 +10:00
parent 79ba946d70
commit b01bcb80f4
4 changed files with 193 additions and 6 deletions

View File

@ -108,7 +108,11 @@ void VideoBackend::InitBackendInfo()
g_Config.backend_info.bSupportsReversedDepthRange = true;
g_Config.backend_info.bSupportsMultithreading = false;
g_Config.backend_info.bSupportsInternalResolutionFrameDumps = true;
g_Config.backend_info.bSupportsGPUTextureDecoding = false;
// TODO: There is a bug here, if texel buffers are not supported the graphics options
// will show the option when it is not supported. The only way around this would be
// creating a context when calling this function to determine what is available.
g_Config.backend_info.bSupportsGPUTextureDecoding = true;
// Overwritten in Render.cpp later
g_Config.backend_info.bSupportsDualSourceBlend = true;