VideoBackends: Add configuration field for GPU texture decoding

This commit is contained in:
Stenzek
2016-11-27 18:14:57 +10:00
parent abc662d69c
commit 82fd984f3e
9 changed files with 27 additions and 0 deletions

View File

@ -81,6 +81,7 @@ void VideoConfig::Load(const std::string& ini_file)
settings->Get("DumpPath", &sDumpPath, "");
settings->Get("BitrateKbps", &iBitrateKbps, 2500);
settings->Get("InternalResolutionFrameDumps", &bInternalResolutionFrameDumps, false);
settings->Get("EnableGPUTextureDecoding", &bEnableGPUTextureDecoding, false);
settings->Get("EnablePixelLighting", &bEnablePixelLighting, false);
settings->Get("FastDepthCalc", &bFastDepthCalc, true);
settings->Get("MSAA", &iMultisamples, 1);
@ -305,6 +306,7 @@ void VideoConfig::Save(const std::string& ini_file)
settings->Set("DumpPath", sDumpPath);
settings->Set("BitrateKbps", iBitrateKbps);
settings->Set("InternalResolutionFrameDumps", bInternalResolutionFrameDumps);
settings->Set("EnableGPUTextureDecoding", bEnableGPUTextureDecoding);
settings->Set("EnablePixelLighting", bEnablePixelLighting);
settings->Set("FastDepthCalc", bFastDepthCalc);
settings->Set("MSAA", iMultisamples);

View File

@ -108,6 +108,7 @@ struct VideoConfig final
bool bInternalResolutionFrameDumps;
bool bFreeLook;
bool bBorderlessFullscreen;
bool bEnableGPUTextureDecoding;
int iBitrateKbps;
// Hacks
@ -196,6 +197,7 @@ struct VideoConfig final
bool bSupportsReversedDepthRange;
bool bSupportsMultithreading;
bool bSupportsInternalResolutionFrameDumps;
bool bSupportsGPUTextureDecoding;
} backend_info;
// Utility
@ -211,6 +213,10 @@ struct VideoConfig final
return false;
return backend_info.bSupportsBBox && backend_info.bSupportsFragmentStoresAndAtomics;
}
bool UseGPUTextureDecoding() const
{
return backend_info.bSupportsGPUTextureDecoding && bEnableGPUTextureDecoding;
}
};
extern VideoConfig g_Config;