More coding style fixes because I suck at sed

This commit is contained in:
Pierre Bourdon
2011-12-11 11:14:02 +01:00
parent 014c474024
commit df283a56a0
8 changed files with 155 additions and 155 deletions

View File

@ -27,116 +27,116 @@ namespace OGL
std::pair<u64, u64> ProgramShaderCache::CurrentShaderProgram;
const char *UniformNames[NUM_UNIFORMS] = {
// SAMPLERS
"samp0","samp1","samp2","samp3","samp4","samp5","samp6","samp7",
// PIXEL SHADER UNIFORMS
I_COLORS,
I_KCOLORS,
I_ALPHA,
I_TEXDIMS,
I_ZBIAS ,
I_INDTEXSCALE ,
I_INDTEXMTX,
I_FOG,
I_PLIGHTS,
I_PMATERIALS,
// VERTEX SHADER UNIFORMS
I_POSNORMALMATRIX,
I_PROJECTION ,
I_MATERIALS,
I_LIGHTS,
I_TEXMATRICES,
I_TRANSFORMMATRICES ,
I_NORMALMATRICES ,
I_POSTTRANSFORMMATRICES,
I_DEPTHPARAMS,
};
// SAMPLERS
"samp0","samp1","samp2","samp3","samp4","samp5","samp6","samp7",
// PIXEL SHADER UNIFORMS
I_COLORS,
I_KCOLORS,
I_ALPHA,
I_TEXDIMS,
I_ZBIAS ,
I_INDTEXSCALE ,
I_INDTEXMTX,
I_FOG,
I_PLIGHTS,
I_PMATERIALS,
// VERTEX SHADER UNIFORMS
I_POSNORMALMATRIX,
I_PROJECTION ,
I_MATERIALS,
I_LIGHTS,
I_TEXMATRICES,
I_TRANSFORMMATRICES ,
I_NORMALMATRICES ,
I_POSTTRANSFORMMATRICES,
I_DEPTHPARAMS,
};
void ProgramShaderCache::SetBothShaders(GLuint PS, GLuint VS)
{
PROGRAMUID uid;
CurrentFShader = PS;
CurrentVShader = VS;
PROGRAMUID uid;
CurrentFShader = PS;
CurrentVShader = VS;
GetProgramShaderId(&uid, CurrentVShader, CurrentFShader);
GetProgramShaderId(&uid, CurrentVShader, CurrentFShader);
if(uid.uid.id == 0)
{
CurrentProgram = 0;
glUseProgram(0);
return;
}
if(uid.uid.id == 0)
{
CurrentProgram = 0;
glUseProgram(0);
return;
}
// Fragment shaders can survive without Vertex Shaders
// We have a valid fragment shader, let's create our program
std::pair<u64, u64> ShaderPair = std::make_pair(uid.uid.psid, uid.uid.vsid);
PCache::iterator iter = pshaders.find(ShaderPair);
if (iter != pshaders.end())
{
PCacheEntry &entry = iter->second;
glUseProgram(entry.program.glprogid);
CurrentShaderProgram = ShaderPair;
CurrentProgram = entry.program.glprogid;
return;
}
PCacheEntry entry;
entry.program.vsid = CurrentVShader;
entry.program.psid = CurrentFShader;
entry.program.glprogid = glCreateProgram();
// Fragment shaders can survive without Vertex Shaders
// We have a valid fragment shader, let's create our program
std::pair<u64, u64> ShaderPair = std::make_pair(uid.uid.psid, uid.uid.vsid);
PCache::iterator iter = pshaders.find(ShaderPair);
if (iter != pshaders.end())
{
PCacheEntry &entry = iter->second;
glUseProgram(entry.program.glprogid);
CurrentShaderProgram = ShaderPair;
CurrentProgram = entry.program.glprogid;
return;
}
PCacheEntry entry;
entry.program.vsid = CurrentVShader;
entry.program.psid = CurrentFShader;
entry.program.glprogid = glCreateProgram();
// Right, the program is created now
// Let's attach everything
if(entry.program.vsid != 0) // attaching zero vertex shader makes it freak out
glAttachShader(entry.program.glprogid, entry.program.vsid);
glAttachShader(entry.program.glprogid, entry.program.psid);
glLinkProgram(entry.program.glprogid);
glUseProgram(entry.program.glprogid);
// Right, the program is created now
// Let's attach everything
if(entry.program.vsid != 0) // attaching zero vertex shader makes it freak out
glAttachShader(entry.program.glprogid, entry.program.vsid);
glAttachShader(entry.program.glprogid, entry.program.psid);
glLinkProgram(entry.program.glprogid);
glUseProgram(entry.program.glprogid);
// Dunno why this is needed when I have the binding
// points statically set in the shader source
// We should only need these two functions when we don't support binding but do support UBO
// Driver Bug? Nvidia GTX 570, 290.xx Driver, Linux x64
//if(!g_ActiveConfig.backend_info.bSupportsGLSLBinding)
{
// Dunno why this is needed when I have the binding
// points statically set in the shader source
// We should only need these two functions when we don't support binding but do support UBO
// Driver Bug? Nvidia GTX 570, 290.xx Driver, Linux x64
//if(!g_ActiveConfig.backend_info.bSupportsGLSLBinding)
{
glUniformBlockBinding( entry.program.glprogid, 0, 1 );
glUniformBlockBinding( entry.program.glprogid, 1, 2 );
}
// We cache our uniform locations for now
// Once we move up to a newer version of GLSL, ~1.30
// We can remove this
//For some reason this fails on my hardware
//glGetUniformIndices(entry.program.glprogid, NUM_UNIFORMS, UniformNames, entry.program.UniformLocations);
//Got to do it this crappy way.
if(!g_ActiveConfig.backend_info.bSupportsGLSLUBO)
// We cache our uniform locations for now
// Once we move up to a newer version of GLSL, ~1.30
// We can remove this
//For some reason this fails on my hardware
//glGetUniformIndices(entry.program.glprogid, NUM_UNIFORMS, UniformNames, entry.program.UniformLocations);
//Got to do it this crappy way.
if(!g_ActiveConfig.backend_info.bSupportsGLSLUBO)
for(int a = 0; a < NUM_UNIFORMS; ++a)
entry.program.UniformLocations[a] = glGetUniformLocation(entry.program.glprogid, UniformNames[a]);
// Need to get some attribute locations
if(uid.uid.vsid != 0) // We have no vertex Shader
{
entry.program.attrLoc[0] = glGetAttribLocation(entry.program.glprogid, "rawnorm1");
entry.program.attrLoc[1] = glGetAttribLocation(entry.program.glprogid, "rawnorm2");
entry.program.attrLoc[2] = glGetAttribLocation(entry.program.glprogid, "fposmtx");
if(entry.program.attrLoc[0] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[0]);
if(entry.program.attrLoc[1] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[1]);
if(entry.program.attrLoc[2] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[2]);
}
else
entry.program.attrLoc[0] = entry.program.attrLoc[1] = entry.program.attrLoc[2] = 0;
// Need to get some attribute locations
if(uid.uid.vsid != 0) // We have no vertex Shader
{
entry.program.attrLoc[0] = glGetAttribLocation(entry.program.glprogid, "rawnorm1");
entry.program.attrLoc[1] = glGetAttribLocation(entry.program.glprogid, "rawnorm2");
entry.program.attrLoc[2] = glGetAttribLocation(entry.program.glprogid, "fposmtx");
if(entry.program.attrLoc[0] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[0]);
if(entry.program.attrLoc[1] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[1]);
if(entry.program.attrLoc[2] > 0)
glEnableVertexAttribArray(entry.program.attrLoc[2]);
}
else
entry.program.attrLoc[0] = entry.program.attrLoc[1] = entry.program.attrLoc[2] = 0;
pshaders[ShaderPair] = entry;
CurrentShaderProgram = ShaderPair;
CurrentProgram = entry.program.glprogid;
pshaders[ShaderPair] = entry;
CurrentShaderProgram = ShaderPair;
CurrentProgram = entry.program.glprogid;
}
void ProgramShaderCache::SetUniformObjects(int Buffer, unsigned int offset, const float *f, unsigned int count)
{
@ -158,11 +158,11 @@ namespace OGL
GLint ProgramShaderCache::GetAttr(int num)
{
return pshaders[CurrentShaderProgram].program.attrLoc[num];
return pshaders[CurrentShaderProgram].program.attrLoc[num];
}
PROGRAMSHADER ProgramShaderCache::GetShaderProgram(void)
{
return pshaders[CurrentShaderProgram].program;
return pshaders[CurrentShaderProgram].program;
}
void ProgramShaderCache::Init(void)
{
@ -190,11 +190,11 @@ namespace OGL
}
void ProgramShaderCache::Shutdown(void)
{
PCache::iterator iter = pshaders.begin();
for (; iter != pshaders.end(); iter++)
iter->second.Destroy();
pshaders.clear();
glDeleteBuffers(2, UBOBuffers);
PCache::iterator iter = pshaders.begin();
for (; iter != pshaders.end(); iter++)
iter->second.Destroy();
pshaders.clear();
glDeleteBuffers(2, UBOBuffers);
}
}