Based on the responses to a previous question, I've created a category on UIImageView for extracting pixel data. This works fine in the simulator, but not when deployed to the device. I should say not always -- the odd thing is that it does fetch the correct pixel colour if point.x == point.y; otherwise, it gives me pixel data for a pixel on the other side of that line, as if mirrored. (So a tap on a pixel in the lower-right corner of the image gives me the pixel data for a corresponding pixel in the upper-left, but tapping on a pixel in the lower-left corner returns the correct pixel colour). The touch coordinates (CGPoint) are correct.
What am I doing wrong?
Here's my code:
@interface UIImageView (PixelColor)
- (UIColor*)getRGBPixelColorAtPoint:(CGPoint)point;
@end
@implementation UIImageView (PixelColor)
- (UIColor*)getRGBPixelColorAtPoint:(CGPoint)point
{
UIColor* color = nil;
CGImageRef cgImage = [self.image CGImage];
size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage);
NSUInteger x = (NSUInteger)floor(point.x);
NSUInteger y = height - (NSUInteger)floor(point.y);
if ((x < width) && (y < height))
{
CGDataProviderRef provider = CGImageGetDataProvider(cgImage);
CFDataRef bitmapData = CGDataProviderCopyData(provider);
const UInt8* data = CFDataGetBytePtr(bitmapData);
size_t offset = ((width * y) + x) * 4;
UInt8 red = data[offset];
UInt8 blue = data[offset+1];
UInt8 green = data[offset+2];
UInt8 alpha = data[offset+3];
CFRelease(bitmapData);
color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255.0f];
}
return color;
}