问题
For Luminace histogram calculation
I have used the code from the project GPU image ios by Brad Larson.
He has used blending for Histogram calculation.
Attaching the Vertex and Fragment shader
Vertex shader
#version 300 es
in vec4 position;
out vec3 colorFactor;
const vec3 W = vec3(0.299, 0.587, 0.114);
void main()
{
float luminance = dot(position.xyz, W);
colorFactor = vec3(1.0, 1.0, 1.0);
gl_Position = vec4(-1.0 + (luminance * 0.00784313725), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
} ;
Fragment Shader
#version 300 es
const lowp float scalingFactor = 1.0 / 256.0;
in lowp vec3 colorFactor;
out vec4 gl_FragColor;\n"+
void main()
{
gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
};
i have used a 256x1 texture attached to an FBO and Pass in the pixel as input to vertex shader. Texture is defined as follows
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGBA,
256, 1, 0, GLES30.GL_RGBA,
GLES30.GL_UNSIGNED_BYTE, null);
And my onDrawFrame goes like this
GLES30.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT);
GLES30.glBlendEquation(GLES30.GL_FUNC_ADD);
GLES30.glBlendFunc(GLES30.GL_ONE, GLES30.GL_ONE);
GLES30.glEnable(GLES30.GL_BLEND);
filterPositionAttribute = mshader2.getHandle("position");
GLES30.glVertexAttribPointer(filterPositionAttribute, 4, GLES30.GL_UNSIGNED_BYTE,false,60, PixelBuffer);
GLES30.glEnableVertexAttribArray(filterPositionAttribute);
GLES30.glDrawArrays(GLES30.GL_POINTS, 0, mViewportWidth * mViewportHeight /16);
i will be using 1 pixel out of 16 for histogram calculation.
Now i am getting the graph and values. But when verified using matlab and other software like irfanview ,it appears deviated.
Attaching the graph created in excel values from my application
verified values using matlab
is this variation expected or i am making some mistakes. Could anybody help ,thanks in advance
回答1:
I'm using C and desktop GL, but here's the gist of it :
Vertex shader
#version 330
layout (location = 0) in vec2 inPosition;
void main()
{
int x = compute the bin (0 to 255) from inPosition;
gl_Position = vec4(
-1.0 + ((x + 0.5) / 128.0),
0.5,
0.0,
1.0
);
}
Fragment shader
#version 330
out vec4 outputColor;
void main()
{
outputColor = vec4(1.0, 1.0, 1.0, 1.0);
}
Init:
glGenTextures(1, &tex);
glGenFramebuffers(1, &fbo);
glActiveTexture(GL_TEXTURE0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_R32F, 256, 1);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, tex, 0);
Drawing :
/* Upload data */
glBufferData(GL_ARRAY_BUFFER, num_input_data * 2 * sizeof(float), input_data_ptr, GL_STREAM_DRAW);
/* Clear buffer */
const float zero[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
glClearBufferfv(GL_COLOR, 0, zero);
/* Init viewport */
glViewport(0, 0, 256, 1);
/* Draw */
glDrawArrays(GL_POINTS, 0, num_input_data);
For brievety I only put the init code for the resulting buffer, all the VBO/VAO init/binding has been skipped
来源:https://stackoverflow.com/questions/37539335/luminance-histogram-calculation-in-gpu-android-opengl-es-3-0