Hi all,
I have some scientific image data that's coming out of a detector device in a 16 bit range which then gets rendered in an image. In order to display this data, I'm using OpenGL, because it should support ushorts as part of the library. I've managed to get this data into textures rendering on an OpenGL 1.4 platform, a limitation that is a requirement of this project.
Unfortunately, the resulting textures look like they're being reduced to 8 bits, rather than 16 bits. I test this by generating a gradient image and displaying it; while the image itself has each pixel different from its neighbors, the displayed texture is showing stripe patterns where all pixels next to one another are showing up as equal values.
I've tried doing this with GlDrawPixels, and the resulting image actually looks like it's really rendering all 16 bits.
How can I force these textures to display properly?
To give more background, the LUT (LookUp Table) is being determined by the following code:
String str = "!!ARBfp1.0\n" +
"ATTRIB tex = fragment.texcoord[0];\n" +
"PARAM cbias = program.local[0];\n" +
"PARAM cscale = program.local[1];\n" +
"OUTPUT cout = result.color;\n" +
"TEMP tmp;\n" +
"TXP tmp, tex, texture[0], 2D;\n" +
"SUB tmp, tmp, cbias;\n" +
"MUL cout, tmp, cscale;\n" +
"END";
Gl.glEnable(Gl.GL_FRAGMENT_PROGRAM_ARB);
Gl.glGenProgramsARB(1, out mFragProg);
Gl.glBindProgramARB(Gl.GL_FRAGMENT_PROGRAM_ARB, mFragProg);
System.Text.Encoding ascii = System.Text.Encoding.ASCII;
Byte[] encodedBytes = ascii.GetBytes(str);
Gl.glProgramStringARB(Gl.GL_FRAGMENT_PROGRAM_ARB, Gl.GL_PROGRAM_FORMAT_ASCII_ARB,
count, encodedBytes);
GetGLError("Shader");
Gl.glDisable(Gl.GL_FRAGMENT_PROGRAM_ARB);
Where cbias and cScale are between 0 and 1.
Thanks!
EDIT: To answer some of the other questions, the line with glTexImage:
Gl.glBindTexture(Gl.GL_TEXTURE_2D, inTexData.TexName);
Gl.glTexImage2D(Gl.GL_TEXTURE_2D, 0, Gl.GL_LUMINANCE, inTexData.TexWidth, inTexData.TexHeight,
0, Gl.GL_LUMINANCE, Gl.GL_UNSIGNED_SHORT, theTexBuffer);
Gl.glTexParameteri(Gl.GL_TEXTURE_2D, Gl.GL_TEXTURE_MIN_FILTER, Gl.GL_LINEAR); // Linear Filtering
Gl.glTexParameteri(Gl.GL_TEXTURE_2D, Gl.GL_TEXTURE_MAG_FILTER, Gl.GL_LINEAR); // Linear Filtering
theTexBuffer = null;
GC.Collect();
GC.WaitForPendingFinalizers();
The pixel format is set when the context is initialized:
Gdi.PIXELFORMATDESCRIPTOR pfd = new Gdi.PIXELFORMATDESCRIPTOR();// The pixel format descriptor
pfd.nSize = (short)Marshal.SizeOf(pfd); // Size of the pixel format descriptor
pfd.nVersion = 1; // Version number (always 1)
pfd.dwFlags = Gdi.PFD_DRAW_TO_WINDOW | // Format must support windowed mode
Gdi.PFD_SUPPORT_OPENGL | // Format must support OpenGL
Gdi.PFD_DOUBLEBUFFER; // Must support double buffering
pfd.iPixelType = (byte)Gdi.PFD_TYPE_RGBA; // Request an RGBA format
pfd.cColorBits = (byte)colorBits; // Select our color depth
pfd.cRedBits = 0; // Individual color bits ignored
pfd.cRedShift = 0;
pfd.cGreenBits = 0;
pfd.cGreenShift = 0;
pfd.cBlueBits = 0;
pfd.cBlueShift = 0;
pfd.cAlphaBits = 0; // No alpha buffer
pfd.cAlphaShift = 0; // Alpha shift bit ignored
pfd.cAccumBits = 0; // Accumulation buffer
pfd.cAccumRedBits = 0; // Individual accumulation bits ignored
pfd.cAccumGreenBits = 0;
pfd.cAccumBlueBits = 0;
pfd.cAccumAlphaBits = 0;
pfd.cDepthBits = 16; // Z-buffer (depth buffer)
pfd.cStencilBits = 0; // No stencil buffer
pfd.cAuxBuffers = 0; // No auxiliary buffer
pfd.iLayerType = (byte)Gdi.PFD_MAIN_PLANE; // Main drawing layer
pfd.bReserved = 0; // Reserved
pfd.dwLayerMask = 0; // Layer masks ignored
pfd.dwVisibleMask = 0;
pfd.dwDamageMask = 0;
pixelFormat = Gdi.ChoosePixelFormat(mDC, ref pfd); // Attempt to find an appropriate pixel format
if (!Gdi.SetPixelFormat(mDC, pixelFormat, ref pfd))
{ // Are we not able to set the pixel format?
BigMessageBox.ShowMessage("Can not set the chosen PixelFormat. Chosen PixelFormat was " + pixelFormat + ".");
Environment.Exit(-1);
}