TextureConversionShader: Consider source format of EFB for EFB2RAM

Currently, we use the alpha channel from the EFB even if the current
format does not include an alpha channel. Now, the alpha channel is set
to 1 if the format does not have an alpha channel, as well as truncating
to 5/6 bits per channel. This matches the EFB-to-texture behavior.
This commit is contained in:
Stenzek
2017-04-04 23:53:03 +10:00
parent 4e90c5da8b
commit 3847e226ab
3 changed files with 150 additions and 101 deletions

View File

@ -8,6 +8,7 @@
#include <utility>
#include "Common/CommonTypes.h"
#include "VideoCommon/TextureDecoder.h"
enum class APIType;
@ -15,7 +16,7 @@ namespace TextureConversionShader
{
u16 GetEncodedSampleCount(u32 format);
const char* GenerateEncodingShader(u32 format, APIType ApiType);
const char* GenerateEncodingShader(const EFBCopyFormat& format, APIType ApiType);
// View format of the input data to the texture decoding shader.
enum BufferFormat
@ -39,7 +40,7 @@ struct DecodingShaderInfo
// Obtain shader information for the specified texture format.
// If this format does not have a shader written for it, returns nullptr.
const DecodingShaderInfo* GetDecodingShaderInfo(u32 format);
const DecodingShaderInfo* GetDecodingShaderInfo(TextureFormat format);
// Determine how many bytes there are in each element of the texel buffer.
// Needed for alignment and stride calculations.
@ -50,6 +51,7 @@ u32 GetBytesPerBufferElement(BufferFormat buffer_format);
std::pair<u32, u32> GetDispatchCount(const DecodingShaderInfo* info, u32 width, u32 height);
// Returns the GLSL string containing the texture decoding shader for the specified format.
std::string GenerateDecodingShader(u32 format, u32 palette_format, APIType api_type);
std::string GenerateDecodingShader(TextureFormat format, TlutFormat palette_format,
APIType api_type);
} // namespace TextureConversionShader