I am sending an opengl 3D scene across an IP link to my iPhone app built using SDK 4.1. I render the 3D scene first using FBO, read it with glReadPixels and send it to the iphone app. If I convert the pixel data received by the app to UIImage, I see it is displayed correctly. But when I use the same data in glTexSubImage2D using opengl es 1.1 (on iPhone 3G), I don't see anything. Any ideas what could be going wrong? I am using glTexSubImage2D since glDrawPixels is not supported. Any examples/tutorials to solve this problem? Appreciate any help.
Thanks in advance.
Code -
- (id) initWithFrame: (CGRect) frame
{
.....
api = kEAGLRenderingAPIOpenGLES1;
m_context = [[EAGLContext alloc] initWithAPI:api];
glGenFramebuffersOES(1, &m_frameBuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, m_frameBuffer);
glGenRenderbuffersOES(1, &m_colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, m_colorRenderbuffer);
glGenTextures(1, &m_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_BGRA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL);
....
}
- (void) drawView: (CADisplayLink*) displayLink
{
glBindTexture(GL_TEXTURE_2D, m_tex);
glTexSubImage2D(GL_TEXTURE_2D, 0,0,0,
WIDTH, HEIGHT,
GL_BGRA,
//GL_RGBA,
GL_UNSIGNED_BYTE,
m_pixelData);
const GLfloat quadVertices[] = {
-1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, -1.0f, 0.0f
};
// Sets up an array of values for the texture coordinates.
const GLfloat quadTexcoords[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
glVertexPointer(3, GL_FLOAT, 0, quadVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, quadTexcoords);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, m_frameBuffer);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 5);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, m_colorRenderbuffer);
[m_context presentRenderbuffer:GL_RENDERBUFFER_OES];
}