VideoCommon: Remove support for decoding to ARGB textures

The D3D / OGL backends only ever used RGBA textures, and the Software
backend uses its own custom code for sampling. The ARGB path seems to
just be dead code.

Since ARGB and RGBA formats are similar, I don't think this will make
the code more difficult to read or unable to be used as
reference. Somebody who wants to use this code to output ARGB can simply
modify the MakeRGBA function to put the shift at the other end.
This commit is contained in:
Jasper St. Pierre
2014-08-10 13:39:20 -04:00
parent 9438a30384
commit a8e591dc73
9 changed files with 8 additions and 902 deletions

View File

@ -490,8 +490,7 @@ TextureCache::TCacheEntryBase* TextureCache::Load(unsigned int const stage,
{
if (!(texformat == GX_TF_RGBA8 && from_tmem))
{
pcfmt = TexDecoder_Decode(temp, src_data, expandedWidth,
expandedHeight, texformat, tlutaddr, tlutfmt, g_ActiveConfig.backend_info.bUseRGBATextures);
pcfmt = TexDecoder_Decode(temp, src_data, expandedWidth, expandedHeight, texformat, tlutaddr, tlutfmt);
}
else
{
@ -567,7 +566,7 @@ TextureCache::TCacheEntryBase* TextureCache::Load(unsigned int const stage,
const u8*& mip_src_data = from_tmem
? ((level % 2) ? ptr_odd : ptr_even)
: src_data;
TexDecoder_Decode(temp, mip_src_data, expanded_mip_width, expanded_mip_height, texformat, tlutaddr, tlutfmt, g_ActiveConfig.backend_info.bUseRGBATextures);
TexDecoder_Decode(temp, mip_src_data, expanded_mip_width, expanded_mip_height, texformat, tlutaddr, tlutfmt);
mip_src_data += TexDecoder_GetTextureSizeInBytes(expanded_mip_width, expanded_mip_height, texformat);
entry->Load(mip_width, mip_height, expanded_mip_width, level);