问题
I'm trying to convert YUV(YV12) to RGB with GLSL shader.
As below step.
- read a raw YUV(YV12) data from image file
- filtering Y, Cb and Cr from the raw YUV(YV12) data
- mapping texture
- send Fragment Shader.
but result image is not same as raw data.
below image is raw data.
screenshot of raw image link(Available for download)
and below image is convert data.
screenshot of convert image link(Available for download)
and below is my source code.
- (void) readYUVFile
{
...
NSData* fileData = [NSData dataWithContentsOfFile:file];
NSInteger width = 720;
NSInteger height = 480;
NSInteger uv_width = width / 2;
NSInteger uv_height = height / 2;
NSInteger dataSize = [fileData length];
GLint nYsize = width * height;
GLint nUVsize = uv_width * uv_height;
GLint nCbOffSet = nYsize;
GLint nCrOffSet = nCbOffSet + nUVsize;
Byte* uData = spriteData + nCbOffSet;
Byte* vData = uData + nUVsize;
GLfloat imageY[ 345600 ], imageU[ 86400 ], imageV[ 86400 ];
int x, y, nIndexY = 0, nIndexUV = 0;
for( y = 0; y < height; y++ )
{
for( x = 0; x < width; x++ )
{
imageY[ nIndexY ] = (GLfloat)spriteData[ nIndexY ] - 16.0;
if( (y < uv_height) && (x < uv_width) )
{
imageU[ nIndexUV ] = (GLfloat)uData[ nIndexUV ] - 128.0;
imageV[ nIndexUV ] = (GLfloat)vData[ nIndexUV ] - 128.0;
nIndexUV++;
}
nIndexY++;
}
}
m_YpixelTexture = [self textureY:imageY widthType:width heightType:height];
m_UpixelTexture = [self textureU:imageU widthType:uv_width heightType:uv_height];
m_VpixelTexture = [self textureV:imageV widthType:uv_width heightType:uv_height];
...
}
- (GLuint) textureY: (GLfloat*)imageData
widthType: (int) width
heightType: (int) height
{
GLuint texName;
glActiveTexture( GL_TEXTURE0 );
glGenTextures( 1, &texName );
glBindTexture( GL_TEXTURE_2D, texName );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData );
return texName;
}
- (GLuint) textureU: (GLfloat*)imageData
widthType: (int) width
heightType: (int) height
{
GLuint texName;
glActiveTexture( GL_TEXTURE1 );
glGenTextures( 1, &texName );
glBindTexture( GL_TEXTURE_2D, texName );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData );
return texName;
}
- (GLuint) textureV: (GLfloat*)imageData
widthType: (int) width
heightType: (int) height
{
GLuint texName;
glActiveTexture( GL_TEXTURE2 );
glGenTextures( 1, &texName );
glBindTexture( GL_TEXTURE_2D, texName );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData );
return texName;
}
and below is source code of Fragment Shader.
uniform sampler2D Ytexture; // Y Texture Sampler
uniform sampler2D Utexture; // U Texture Sampler
uniform sampler2D Vtexture; // V Texture Sampler
varying highp vec2 TexCoordOut;
void main()
{
highp float y, u, v;
highp float r, g, b;
y = texture2D( Ytexture, TexCoordOut ).p;
u = texture2D( Utexture, TexCoordOut ).p;
v = texture2D( Vtexture, TexCoordOut ).p;
y = 1.1643 * ( y - 0.0625 );
u = u - 0.5;
v = v - 0.5;
r = y + 1.5958 * v;
g = y - 0.39173 * u - 0.81290 * v;
b = y + 2.017 * u;
gl_FragColor = highp vec4( r, g, b, 1.0 );
}
Y data is good, but U and V data is not good. And y-axis of image data is reverse output.
How to resolve this issue?
回答1:
The image is probably mirrored across the horizontal because of a simple disagreement in axes — OpenGL follows the graph paper convention where (0, 0) is the bottom left corner and the y axis heads upwards, whereas almost all graphics image formats follow the English reading order convention where (0, 0) is the top left corner and the y axis heads downwards. Just flip your input y coordinates (in the vertex shader if necessary).
As for the colours, the second screenshot currently isn't working for me (as per my comment) but my best guess would be that you're subtracting 128 when building imageU and imageV, then subtracting 0.5 again in your shader. Presumably you actually want to do just the one or the other (specifically, do it in the shader because texture data is unsigned)? You make the same mistake with imageY but the net effect will just be to darken the image slightly rather than to shift all the colours half way around the scale.
My only other thought is that your individual textures have only one channel so it'd be better to upload them as GL_LUMINANCE rather than GL_RGBA.
来源:https://stackoverflow.com/questions/9075074/about-converting-yuvyv12-to-rgb-with-glsl-for-ios