Thanks for the reply.
I am working on a complete minimal example that displays the problem, but this will take some time. Until then, here is the code surrounding the glLingProgram call:
bool Shader::createAsset(const std::string &name) {
// Name contains vertex shader file name and the fragment shader file name, so extract those
std::istringstream is(name);
std::string geomName, fragName, vertName;
is >> geomName >> vertName >> fragName;
// Load the shaders
if(geomName != "NONE")
if(!loadGeometryShader(geomName, _geomID)) {
std::cerr << "Could not load geometry shader!" << std::endl;
return false;
}
if(vertName != "NONE")
if(!loadVertexShader(vertName, _vertID)) {
std::cerr << "Could not load vertex shader!" << std::endl;
return false;
}
if(fragName != "NONE")
if(!loadFragmentShader(fragName, _fragID)) {
std::cerr << "Could not load fragment shader!" << std::endl;
return false;
}
// Create the main shader
_progID = glCreateProgram();
// Attach the shader components (vertex and fragment) to the program
if(_geomID != 0)
glAttachShader(_progID, _geomID);
if(_vertID != 0)
glAttachShader(_progID, _vertID);
if(_fragID != 0)
glAttachShader(_progID, _fragID);
if(!link(_progID)) {
std::cerr << "- in " << name << std::endl;
return false;
}
D3D_GL_ERROR_CHECK();
return true;
}
the load___shader calls just load and compile a shader, it works fine.
Shader::link looks like this:
bool Shader::link(GLuint id) {
glLinkProgram(id);
int result;
// Check if linking was successful
glGetProgramiv(id, GL_LINK_STATUS, &result);
if(result == GL_FALSE) {
// Not validated, print out the log
int logLength;
glGetProgramiv(id, GL_INFO_LOG_LENGTH, &logLength);
if(logLength <= 0) {
std::cerr << "Unable to link program: Error: Invalid log length \"" << logLength << "\": Could not retrieve error log!" << std::endl;
return false;
}
// Allocate the string
std::string log;
log.resize(logLength);
glGetProgramInfoLog(id, logLength, &result, &log[0]);
std::cerr << "Unable to link program: " << log << std::endl;
return false;
}
D3D_GL_ERROR_CHECK();
return true;
}
It crashes on the glLinkProgram.
The main looks like this:
int main() {
std::ofstream out("out.txt");
std::cout.rdbuf(out.rdbuf());
std::ofstream err("err.txt");
std::cerr.rdbuf(err.rdbuf());
sf::RenderWindow window;
sf::ContextSettings settings;
settings.majorVersion = 4;
settings.minorVersion = 3;
settings.stencilBits = 0;
settings.antialiasingLevel = 0;
window.create(sf::VideoMode(1280, 720), "d3d", sf::Style::Default, settings);
window.setVerticalSyncEnabled(true);
window.setFramerateLimit(60);
GLenum error = glewInit();
assert(error == GLEW_NO_ERROR);
// -------------------------------- OpenGL Setup --------------------------------
d3d::sfmloglSetup();
glViewport(0, 0, window.getSize().x, window.getSize().y);
d3d::checkForGLError();
// -------------------------------- Scene Setup --------------------------------
std::unique_ptr<d3d::RenderScene> scene(new d3d::RenderScene());
{
std::shared_ptr<d3d::Shader> gBufferRender(new d3d::Shader());
std::shared_ptr<d3d::Shader> gBufferRenderNormal(new d3d::Shader());
std::shared_ptr<d3d::Shader> gBufferRenderHeightNormal(new d3d::Shader());
std::shared_ptr<d3d::Texture2D> whiteTexture(new d3d::Texture2D());
gBufferRender->createAsset("NONE resources/shaders/gbufferrender/gBufferRender.vert resources/shaders/gbufferrender/gBufferRender.frag");
gBufferRenderNormal->createAsset("NONE resources/shaders/gbufferrender/gBufferRenderBump.vert resources/shaders/gbufferrender/gBufferRenderBump.frag");
gBufferRenderHeightNormal->createAsset("NONE resources/shaders/gbufferrender/gBufferRenderParallax.vert resources/shaders/gbufferrender/gBufferRenderParallax.frag");
whiteTexture->createAsset("resources/shaders/white.png");
scene->createRenderScene(8, d3d::AABB3D(d3d::Vec3f(-1.0f, -1.0f, -1.0f), d3d::Vec3f(1.0f, 1.0f, 1.0f)), &window,
gBufferRender, gBufferRenderNormal, gBufferRenderHeightNormal, whiteTexture);
scene->_logicCamera._projectionMatrix = d3d::Matrix4x4f::perspectiveMatrix(d3d::_piOver4, static_cast<float>(window.getSize().x) / static_cast<float>(window.getSize().y), 0.1f, 10000.0f);
scene->_logicCamera._position = d3d::Vec3f(1.5f, 1.5f, 1.5f);
scene->_logicCamera._rotation = d3d::Quaternion::getFromMatrix(d3d::Matrix4x4f::cameraDirectionMatrix(-scene->_logicCamera._position.normalized(), d3d::Vec3f(0.0f, 1.0f, 0.0f)));
}
the crash occurs when trying to load the first shader.
sfmloglSetup just sets some GL states like so:
void d3d::sfmloglSetup() {
glGenVertexArrays(1, &_vaoID);
glBindVertexArray(_vaoID);
glEnableVertexAttribArray(D3D_ATTRIB_POSITION);
glEnableVertexAttribArray(D3D_ATTRIB_NORMAL);
glEnableVertexAttribArray(D3D_ATTRIB_TEXCOORD);
glFrontFace(GL_CCW);
glEnable(GL_CULL_FACE);
glClearDepth(1.0f);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glDepthFunc(GL_LESS);
glEnable(GL_DEPTH_TEST);
}