2015-05-23 22:55:12 -06:00
|
|
|
// Copyright 2008 Dolphin Emulator Project
|
2015-05-17 17:08:10 -06:00
|
|
|
// Licensed under GPLv2+
|
2013-04-17 21:29:41 -06:00
|
|
|
// Refer to the license.txt file included.
|
2008-12-07 22:25:12 -07:00
|
|
|
|
2011-02-13 19:18:03 -07:00
|
|
|
// OpenGL Backend Documentation
|
2013-10-28 23:23:17 -06:00
|
|
|
/*
|
2009-06-06 07:36:33 -06:00
|
|
|
|
|
|
|
1.1 Display settings
|
2009-06-15 00:39:26 -06:00
|
|
|
|
2009-09-09 13:52:45 -06:00
|
|
|
Internal and fullscreen resolution: Since the only internal resolutions allowed
|
|
|
|
are also fullscreen resolution allowed by the system there is only need for one
|
|
|
|
resolution setting that applies to both the internal resolution and the
|
|
|
|
fullscreen resolution. - Apparently no, someone else doesn't agree
|
2009-06-06 07:36:33 -06:00
|
|
|
|
2009-09-09 13:52:45 -06:00
|
|
|
Todo: Make the internal resolution option apply instantly, currently only the
|
|
|
|
native and 2x option applies instantly. To do this we need to be able to change
|
|
|
|
the reinitialize FramebufferManager:Init() while a game is running.
|
2009-06-06 07:36:33 -06:00
|
|
|
|
2009-06-07 05:51:53 -06:00
|
|
|
1.2 Screenshots
|
2009-06-15 00:39:26 -06:00
|
|
|
|
2009-06-07 05:51:53 -06:00
|
|
|
|
2009-09-09 13:52:45 -06:00
|
|
|
The screenshots should be taken from the internal representation of the picture
|
|
|
|
regardless of what the current window size is. Since AA and wireframe is
|
|
|
|
applied together with the picture resizing this rule is not currently applied
|
|
|
|
to AA or wireframe pictures, they are instead taken from whatever the window
|
|
|
|
size is.
|
2009-06-07 05:51:53 -06:00
|
|
|
|
2009-09-09 13:52:45 -06:00
|
|
|
Todo: Render AA and wireframe to a separate picture used for the screenshot in
|
|
|
|
addition to the one for display.
|
2009-06-07 05:51:53 -06:00
|
|
|
|
|
|
|
1.3 AA
|
2009-06-15 00:39:26 -06:00
|
|
|
|
2009-06-07 05:51:53 -06:00
|
|
|
Make AA apply instantly during gameplay if possible
|
|
|
|
|
2009-06-15 00:39:26 -06:00
|
|
|
*/
|
2009-06-06 07:36:33 -06:00
|
|
|
|
2015-12-20 19:49:49 -07:00
|
|
|
#include <memory>
|
2016-01-02 13:01:12 -07:00
|
|
|
#include <string>
|
|
|
|
#include <vector>
|
2014-02-17 03:18:15 -07:00
|
|
|
|
2015-09-18 10:40:00 -06:00
|
|
|
#include "Common/GL/GLInterfaceBase.h"
|
|
|
|
#include "Common/GL/GLUtil.h"
|
2017-03-09 07:01:23 -07:00
|
|
|
#include "Common/MsgHandler.h"
|
2014-02-17 03:18:15 -07:00
|
|
|
|
2014-11-13 15:26:49 -07:00
|
|
|
#include "VideoBackends/OGL/BoundingBox.h"
|
2014-02-17 03:18:15 -07:00
|
|
|
#include "VideoBackends/OGL/PerfQuery.h"
|
|
|
|
#include "VideoBackends/OGL/ProgramShaderCache.h"
|
|
|
|
#include "VideoBackends/OGL/Render.h"
|
|
|
|
#include "VideoBackends/OGL/SamplerCache.h"
|
|
|
|
#include "VideoBackends/OGL/TextureCache.h"
|
|
|
|
#include "VideoBackends/OGL/TextureConverter.h"
|
|
|
|
#include "VideoBackends/OGL/VertexManager.h"
|
|
|
|
#include "VideoBackends/OGL/VideoBackend.h"
|
|
|
|
|
|
|
|
#include "VideoCommon/OnScreenDisplay.h"
|
2016-07-21 17:04:57 -06:00
|
|
|
#include "VideoCommon/VideoCommon.h"
|
2014-02-17 03:18:15 -07:00
|
|
|
#include "VideoCommon/VideoConfig.h"
|
|
|
|
|
2011-01-30 18:28:32 -07:00
|
|
|
namespace OGL
|
2010-12-19 12:43:18 -07:00
|
|
|
{
|
2014-03-10 23:55:00 -06:00
|
|
|
std::string VideoBackend::GetName() const
|
2013-05-01 07:51:43 -06:00
|
|
|
{
|
2016-06-24 02:43:46 -06:00
|
|
|
return "OGL";
|
2013-05-01 07:51:43 -06:00
|
|
|
}
|
|
|
|
|
2014-03-10 23:55:00 -06:00
|
|
|
std::string VideoBackend::GetDisplayName() const
|
2009-07-02 11:11:27 -06:00
|
|
|
{
|
2016-06-24 02:43:46 -06:00
|
|
|
if (GLInterface != nullptr && GLInterface->GetMode() == GLInterfaceMode::MODE_OPENGLES3)
|
|
|
|
return "OpenGLES";
|
|
|
|
else
|
|
|
|
return "OpenGL";
|
2009-09-09 13:52:45 -06:00
|
|
|
}
|
2009-02-21 21:24:53 -07:00
|
|
|
|
2016-01-13 13:38:11 -07:00
|
|
|
void VideoBackend::InitBackendInfo()
|
2009-09-09 13:52:45 -06:00
|
|
|
{
|
2016-07-21 17:04:57 -06:00
|
|
|
g_Config.backend_info.api_type = APIType::OpenGL;
|
2017-03-09 07:01:23 -07:00
|
|
|
g_Config.backend_info.MaxTextureSize = 16384;
|
2016-06-24 02:43:46 -06:00
|
|
|
g_Config.backend_info.bSupportsExclusiveFullscreen = false;
|
|
|
|
g_Config.backend_info.bSupportsOversizedViewports = true;
|
|
|
|
g_Config.backend_info.bSupportsGeometryShaders = true;
|
2016-11-27 01:14:55 -07:00
|
|
|
g_Config.backend_info.bSupportsComputeShaders = false;
|
2016-06-24 02:43:46 -06:00
|
|
|
g_Config.backend_info.bSupports3DVision = false;
|
|
|
|
g_Config.backend_info.bSupportsPostProcessing = true;
|
|
|
|
g_Config.backend_info.bSupportsSSAA = true;
|
2016-08-21 09:47:24 -06:00
|
|
|
g_Config.backend_info.bSupportsReversedDepthRange = true;
|
2016-08-13 06:08:46 -06:00
|
|
|
g_Config.backend_info.bSupportsMultithreading = false;
|
2017-05-29 16:02:09 -06:00
|
|
|
g_Config.backend_info.bSupportsCopyToVram = true;
|
2016-11-27 01:15:00 -07:00
|
|
|
|
|
|
|
// TODO: There is a bug here, if texel buffers are not supported the graphics options
|
|
|
|
// will show the option when it is not supported. The only way around this would be
|
|
|
|
// creating a context when calling this function to determine what is available.
|
|
|
|
g_Config.backend_info.bSupportsGPUTextureDecoding = true;
|
2016-06-24 02:43:46 -06:00
|
|
|
|
2016-01-13 13:14:20 -07:00
|
|
|
// Overwritten in Render.cpp later
|
|
|
|
g_Config.backend_info.bSupportsDualSourceBlend = true;
|
|
|
|
g_Config.backend_info.bSupportsPrimitiveRestart = true;
|
|
|
|
g_Config.backend_info.bSupportsPaletteConversion = true;
|
|
|
|
g_Config.backend_info.bSupportsClipControl = true;
|
2016-08-05 14:31:34 -06:00
|
|
|
g_Config.backend_info.bSupportsDepthClamp = true;
|
2017-04-16 03:30:11 -06:00
|
|
|
g_Config.backend_info.bSupportsST3CTextures = false;
|
2017-07-27 06:00:04 -06:00
|
|
|
g_Config.backend_info.bSupportsBPTCTextures = false;
|
2016-01-13 13:14:20 -07:00
|
|
|
|
2016-06-24 02:43:46 -06:00
|
|
|
g_Config.backend_info.Adapters.clear();
|
|
|
|
|
|
|
|
// aamodes - 1 is to stay consistent with D3D (means no AA)
|
|
|
|
g_Config.backend_info.AAModes = {1, 2, 4, 8};
|
2011-05-31 14:16:59 -06:00
|
|
|
}
|
2010-11-22 15:17:35 -07:00
|
|
|
|
2017-03-09 07:01:23 -07:00
|
|
|
bool VideoBackend::InitializeGLExtensions()
|
|
|
|
{
|
|
|
|
// Init extension support.
|
|
|
|
if (!GLExtensions::Init())
|
|
|
|
{
|
|
|
|
// OpenGL 2.0 is required for all shader based drawings. There is no way to get this by
|
|
|
|
// extensions
|
|
|
|
PanicAlert("GPU: OGL ERROR: Does your video card support OpenGL 2.0?");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (GLExtensions::Version() < 300)
|
|
|
|
{
|
|
|
|
// integer vertex attributes require a gl3 only function
|
|
|
|
PanicAlert("GPU: OGL ERROR: Need OpenGL version 3.\n"
|
|
|
|
"GPU: Does your video card support OpenGL 3?");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool VideoBackend::FillBackendInfo()
|
|
|
|
{
|
|
|
|
// check for the max vertex attributes
|
|
|
|
GLint numvertexattribs = 0;
|
|
|
|
glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &numvertexattribs);
|
|
|
|
if (numvertexattribs < 16)
|
|
|
|
{
|
|
|
|
PanicAlert("GPU: OGL ERROR: Number of attributes %d not enough.\n"
|
|
|
|
"GPU: Does your video card support OpenGL 2.x?",
|
|
|
|
numvertexattribs);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// check the max texture width and height
|
|
|
|
GLint max_texture_size = 0;
|
|
|
|
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
|
|
|
|
g_Config.backend_info.MaxTextureSize = static_cast<u32>(max_texture_size);
|
|
|
|
if (max_texture_size < 1024)
|
|
|
|
{
|
|
|
|
PanicAlert("GL_MAX_TEXTURE_SIZE too small at %i - must be at least 1024.", max_texture_size);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Move the remaining fields from the Renderer constructor here.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-01-02 13:01:12 -07:00
|
|
|
bool VideoBackend::Initialize(void* window_handle)
|
2008-12-07 22:25:12 -07:00
|
|
|
{
|
2016-06-24 02:43:46 -06:00
|
|
|
InitBackendInfo();
|
2016-01-13 13:14:20 -07:00
|
|
|
InitializeShared();
|
2009-07-30 19:55:26 -06:00
|
|
|
|
2016-06-24 02:43:46 -06:00
|
|
|
InitInterface();
|
|
|
|
GLInterface->SetMode(GLInterfaceMode::MODE_DETECT);
|
2017-11-10 20:55:00 -07:00
|
|
|
if (!GLInterface->Create(window_handle, g_ActiveConfig.stereo_mode == StereoMode::QuadBuffer))
|
2016-06-24 02:43:46 -06:00
|
|
|
return false;
|
2013-06-19 01:17:33 -06:00
|
|
|
|
2016-06-24 02:43:46 -06:00
|
|
|
GLInterface->MakeCurrent();
|
2017-03-09 07:01:23 -07:00
|
|
|
if (!InitializeGLExtensions() || !FillBackendInfo())
|
2018-01-25 22:09:07 -07:00
|
|
|
return false;
|
2016-06-24 02:43:46 -06:00
|
|
|
|
|
|
|
g_renderer = std::make_unique<Renderer>();
|
|
|
|
g_vertex_manager = std::make_unique<VertexManager>();
|
|
|
|
g_perf_query = GetPerfQuery();
|
|
|
|
ProgramShaderCache::Init();
|
|
|
|
g_texture_cache = std::make_unique<TextureCache>();
|
|
|
|
g_sampler_cache = std::make_unique<SamplerCache>();
|
2018-02-24 08:15:35 -07:00
|
|
|
g_shader_cache = std::make_unique<VideoCommon::ShaderCache>();
|
2017-03-03 23:39:50 -07:00
|
|
|
static_cast<Renderer*>(g_renderer.get())->Init();
|
2016-06-24 02:43:46 -06:00
|
|
|
TextureConverter::Init();
|
OGL: implement Bounding Box on systems w/o SSBO
This commit should have zero performance effect if SSBOs are supported.
If they aren't (e.g. on all Macs), this commit alters FramebufferManager
to attach a new stencil buffer and VertexManager to draw to it when
bounding box is active. `BBoxRead` gets the pixel data from the buffer
and dumbly loops through it to find the bounding box.
This patch can run Paper Mario: The Thousand-Year Door at almost full
speed (50–60 FPS) without Dual-Core enabled for all common bounding
box-using actions I tested (going through pipes, Plane Mode, Paper
Mode, Prof. Frankly's gate, combat, walking around the overworld, etc.)
on my computer (macOS 10.12.3, 2.8 GHz Intel Core i7, 16 GB 1600 MHz
DDR3, and Intel Iris 1536 MB).
A few more demanding scenes (e.g. the self-building bridge on the way
to Petalburg) slow to ~15% of their speed without this patch (though
they don't run quite at full speed even on master). The slowdown is
caused almost solely by `glReadPixels` in `OGL::BoundingBox::Get`.
Other implementation ideas:
- Use a stencil buffer that's separate from the depth buffer. This would
require ARB_texture_stencil8 / OpenGL 4.4, which isn't available on
macOS.
- Use `glGetTexImage` instead of `glReadPixels`. This is ~5 FPS slower
on my computer, presumably because it has to transfer the entire
combined depth-stencil buffer instead of only the stencil data.
Getting only stencil data from `glGetTexImage` requires
ARB_texture_stencil8 / OpenGL 4.4, which (again) is not available on
macOS.
- Don't use a PBO, and use `glReadPixels` synchronously. This has no
visible performance effect on my computer, and is theoretically
slower.
2017-03-05 16:34:30 -07:00
|
|
|
BoundingBox::Init(g_renderer->GetTargetWidth(), g_renderer->GetTargetHeight());
|
2018-02-24 08:15:35 -07:00
|
|
|
return g_shader_cache->Initialize();
|
2010-09-30 09:24:34 -06:00
|
|
|
}
|
|
|
|
|
2011-01-30 18:28:32 -07:00
|
|
|
void VideoBackend::Shutdown()
|
2010-09-30 09:24:34 -06:00
|
|
|
{
|
2018-02-24 08:15:35 -07:00
|
|
|
g_shader_cache->Shutdown();
|
2018-01-25 22:09:07 -07:00
|
|
|
g_renderer->Shutdown();
|
2016-06-24 02:43:46 -06:00
|
|
|
BoundingBox::Shutdown();
|
|
|
|
TextureConverter::Shutdown();
|
2018-02-24 08:15:35 -07:00
|
|
|
g_shader_cache.reset();
|
2016-06-24 02:43:46 -06:00
|
|
|
g_sampler_cache.reset();
|
|
|
|
g_texture_cache.reset();
|
|
|
|
ProgramShaderCache::Shutdown();
|
|
|
|
g_perf_query.reset();
|
|
|
|
g_vertex_manager.reset();
|
|
|
|
g_renderer.reset();
|
|
|
|
GLInterface->ClearCurrent();
|
2018-01-25 22:09:07 -07:00
|
|
|
GLInterface->Shutdown();
|
|
|
|
GLInterface.reset();
|
|
|
|
ShutdownShared();
|
2010-09-30 09:24:34 -06:00
|
|
|
}
|
2011-01-30 18:28:32 -07:00
|
|
|
}
|