VideoCommon: Minor changes

Make Renderer::GetMaxTextureSize return u32 instead of int.
This commit is contained in:
Shawn Hoffman
2016-10-03 06:51:46 -07:00
parent cc66f0336f
commit 86112c7258
11 changed files with 14 additions and 15 deletions

View File

@ -1809,14 +1809,14 @@ bool Renderer::SaveScreenshot(const std::string& filename, const TargetRectangle
return TextureToPng(data.get(), W * 4, filename, W, H, false);
}
int Renderer::GetMaxTextureSize()
u32 Renderer::GetMaxTextureSize()
{
// Right now nvidia seems to do something very weird if we try to cache GL_MAX_TEXTURE_SIZE in
// init. This is a workaround that lets
// us keep the perf improvement that caching it gives us.
if (s_max_texture_size == 0)
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &s_max_texture_size);
return s_max_texture_size;
return static_cast<u32>(s_max_texture_size);
}
void Renderer::ChangeSurface(void* new_surface_handle)