问题
In my program I tried to load precompiled binary shaders with the example code from Sasha Willems (https://github.com/SaschaWillems/openglcpp/blob/master/SPIRVShader/main.cpp):
bool loadBinaryShader(const char *fileName, GLuint stage, GLuint binaryFormat, GLuint &shader)
{
std::ifstream shaderFile;
shaderFile.open(fileName, std::ios::binary | std::ios::ate);
if (shaderFile.is_open())
{
size_t size = shaderFile.tellg();
shaderFile.seekg(0, std::ios::beg);
char* bin = new char[size];
shaderFile.read(bin, size);
GLint status;
shader = glCreateShader(stage); // Create a new shader
glShaderBinary(1, &shader, binaryFormat, bin, size); // Load the binary shader file
glSpecializeShaderARB(shader, "main", 0, nullptr, nullptr); // Set main entry point (required, no specialization used in this example)
glGetShaderiv(shader, GL_COMPILE_STATUS, &status); // Check compilation status
return status;
}
else
{
std::cerr << "Could not open \"" << fileName << "\"" << std::endl;
return false;
}
}
I have created the following two simple shaders for testing:
test.frag
#version 450
in vec4 color;
out vec4 outCol;
void main()
{
outCol = vec4(1., 0., 0., 1.);
}
and text.vert
#version 450
layout (location = 0) in vec3 inPos;
layout (location = 1) in vec3 inColor;
layout (location = 0) out vec3 outColor;
out gl_PerVertex
{
vec4 gl_Position;
};
void main()
{
outColor = vec3(0.2, 1., 0.2);
gl_Position = vec4(5.*inPos.xyz, 1.0);
}
I converted them to SPIR-V format with glslangValidator from github: https://github.com/KhronosGroup/glslang I used:
glslangValidator.exe test.vert -G -o anypath.spv
When I'm trying to load these shaders, it crashes with an segmentation fault at line
glSpecializeShaderARB(shader, "main", 0, nullptr, nullptr);
I've tried the same on another PC with an older GPU (GeForce GTX 660) and it just works fine. But it doesn't work on my new PC with the Radeon R9 Fury X.
Any Ideas?
来源:https://stackoverflow.com/questions/43515969/loading-spirv-binary-shader-fails