views:

2525

answers:

6

Hi,

I am trying to convert an image into grayscale in the following way:

#define bytesPerPixel 4
#define bitsPerComponent 8

-(unsigned char*) getBytesForImage: (UIImage*)pImage
{
    CGImageRef image = [pImage CGImage];
    NSUInteger width = CGImageGetWidth(image);
    NSUInteger height = CGImageGetHeight(image);

    NSUInteger bytesPerRow = bytesPerPixel * width;

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    unsigned char *rawData = malloc(height * width * 4);
    CGContextRef context = CGBitmapContextCreate(rawData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
    CGColorSpaceRelease(colorSpace);

    CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
    CGContextRelease(context);

    return rawData;
}

-(UIImage*) processImage: (UIImage*)pImage
{   
    DebugLog(@"processing image");
    unsigned char *rawData = [self getBytesForImage: pImage];

    NSUInteger width = pImage.size.width;
    NSUInteger height = pImage.size.height;

    DebugLog(@"width: %d", width);
    DebugLog(@"height: %d", height);

    NSUInteger bytesPerRow = bytesPerPixel * width;

    for (int xCoordinate = 0; xCoordinate < width; xCoordinate++)
    {
     for (int yCoordinate = 0; yCoordinate < height; yCoordinate++)
     {
      int byteIndex = (bytesPerRow * yCoordinate) + xCoordinate * bytesPerPixel;

      //Getting original colors
      float red = ( rawData[byteIndex] / 255.f );
      float green = ( rawData[byteIndex + 1] / 255.f );
      float blue = ( rawData[byteIndex + 2] / 255.f );

      //Processing pixel data
      float averageColor = (red + green + blue) / 3.0f;

      red = averageColor;
      green = averageColor;
      blue = averageColor;

      //Assigning new color components
      rawData[byteIndex] = (unsigned char) red * 255;
      rawData[byteIndex + 1] = (unsigned char) green * 255;
      rawData[byteIndex + 2] = (unsigned char) blue * 255;


     }
    }

    NSData* newPixelData = [NSData dataWithBytes: rawData length: height * width * 4];
    UIImage* newImage = [UIImage imageWithData: newPixelData];

    free(rawData);

    DebugLog(@"image processed");

    return newImage;

}

So when I want to convert an image I just call processImage:

imageToDisplay.image = [self processImage: image];

But imageToDisplay doesn't display. What may be the problem?

Thanks.

+2  A: 

What exactly takes place when you use this function? Is the function returning an invalid image, or is the display not showing it correctly?

This is the method I use to convert to greyscale.

- (UIImage *) convertToGreyscale:(UIImage *)i {

    int kRed = 1;
    int kGreen = 2;
    int kBlue = 4;

    int colors = kGreen;
    int m_width = i.size.width;
    int m_height = i.size.height;

    uint32_t *rgbImage = (uint32_t *) malloc(m_width * m_height * sizeof(uint32_t));
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbImage, m_width, m_height, 8, m_width * 4, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGContextSetInterpolationQuality(context, kCGInterpolationHigh);
    CGContextSetShouldAntialias(context, NO);
    CGContextDrawImage(context, CGRectMake(0, 0, m_width, m_height), [i CGImage]);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);

    // now convert to grayscale
    uint8_t *m_imageData = (uint8_t *) malloc(m_width * m_height);
    for(int y = 0; y < m_height; y++) {
     for(int x = 0; x < m_width; x++) {
      uint32_t rgbPixel=rgbImage[y*m_width+x];
      uint32_t sum=0,count=0;
      if (colors & kRed) {sum += (rgbPixel>>24)&255; count++;}
      if (colors & kGreen) {sum += (rgbPixel>>16)&255; count++;}
      if (colors & kBlue) {sum += (rgbPixel>>8)&255; count++;}
      m_imageData[y*m_width+x]=sum/count;
     }
    }
    free(rgbImage);

    // convert from a gray scale image back into a UIImage
    uint8_t *result = (uint8_t *) calloc(m_width * m_height *sizeof(uint32_t), 1);

    // process the image back to rgb
    for(int i = 0; i < m_height * m_width; i++) {
     result[i*4]=0;
     int val=m_imageData[i];
     result[i*4+1]=val;
     result[i*4+2]=val;
     result[i*4+3]=val;
    }

    // create a UIImage
    colorSpace = CGColorSpaceCreateDeviceRGB();
    context = CGBitmapContextCreate(result, m_width, m_height, 8, m_width * sizeof(uint32_t), colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGImageRef image = CGBitmapContextCreateImage(context);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    UIImage *resultUIImage = [UIImage imageWithCGImage:image];
    CGImageRelease(image);

    // make sure the data will be released by giving it to an autoreleased NSData
    [NSData dataWithBytesNoCopy:result length:m_width * m_height];

    return resultUIImage;
}
Dutchie432
Thanks, that is working, but I get an image that is rotated by 90 degrees from the original position. How can I fix it?
Ilya
Check out http://www.cocoadev.com/index.pl?UIImage (or just google "How to rotate a UIImage"
Dutchie432
I notice that this function doesn't respect the original image's alpha transparency mask.
willc2
Wikipedia and others seem to imply that the correct distribution is0.3RED+0.59GREEN+0.11Blue, not just averaging the sum of the three colors.
mahboudz
This is a function I've been using for quite some time with 0 problem. Take it or leave it :)
Dutchie432
+3  A: 

I needed a version that preserved the alpha channel, so I modified the code posted by Dutchie432:

@implementation UIImage (grayscale)

typedef enum {
    ALPHA = 0,
    BLUE = 1,
    GREEN = 2,
    RED = 3
} PIXELS;

- (UIImage *)convertToGrayscale {
    CGSize size = [self size];
    int width = size.width;
    int height = size.height;

    // the pixels will be painted to this array
    uint32_t *pixels = (uint32_t *) malloc(width * height * sizeof(uint32_t));

    // clear the pixels so any transparency is preserved
    memset(pixels, 0, width * height * sizeof(uint32_t));

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // create a context with RGBA pixels
    CGContextRef context = CGBitmapContextCreate(pixels, width, height, 8, width * sizeof(uint32_t), colorSpace, 
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);

    // paint the bitmap to our context which will fill in the pixels array
    CGContextDrawImage(context, CGRectMake(0, 0, width, height), [self CGImage]);

    for(int y = 0; y < height; y++) {
        for(int x = 0; x < width; x++) {
            uint8_t *rgbaPixel = (uint8_t *) &pixels[y * width + x];

            // convert to grayscale using recommended method: http://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale
            uint32_t gray = 0.3 * rgbaPixel[RED] + 0.59 * rgbaPixel[GREEN] + 0.11 * rgbaPixel[BLUE];

            // set the pixels to gray
            rgbaPixel[RED] = gray;
            rgbaPixel[GREEN] = gray;
            rgbaPixel[BLUE] = gray;
        }
    }

    // create a new CGImageRef from our context with the modified pixels
    CGImageRef image = CGBitmapContextCreateImage(context);

    // we're done with the context, color space, and pixels
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    free(pixels);

    // make a new UIImage to return
    UIImage *resultUIImage = [UIImage imageWithCGImage:image];

    // we're done with image now too
    CGImageRelease(image);

    return resultUIImage;
}

@end
Cam
A: 

btw when i use the convertToGrayscale method on big images its rotate it and on small images it wont... thats really a strange behavior... any one have same problem?

Adeem Maqsood Basraa
A: 

Thanks. It works great! Hallelujah!

Joseph