Showing posts with label RGB values. Show all posts
Showing posts with label RGB values. Show all posts

Saturday, August 17, 2013

How to check whether a UIImage is black and white or not?


Please do follow this post along with the post here

In one of my previous posts, I described on how I convert an image to black and white and grayscale as well. Here, I'm going to explain on how to check an image is black and white or not.

//checking whether the image is a black and white one or not
-(IBAction) checkBlackAndWhite:(id)sender
{
//sourceImage  is the user selected image from the Image Picker. I simply display it on a UIImageView and for the processing  i get the image that is currently viewed in the UIImageView
    BOOL blackNWhite=[self imageIsBlackAndWhite:sourceImage];
    
    if(sourceImage==nil)
    {
        NSLog(@"Image Not selected");
        
        alert=[[UIAlertView alloc] initWithTitle:@"Image Process" message:@"Image Not selected" delegate:nil cancelButtonTitle:@"Cancel" otherButtonTitles:@"OK", nil];
        [alert show];
    }
    
    else
    {    
        if(blackNWhite==YES)
        {
            NSLog(@"Image is Black and White");
            
            alert=[[UIAlertView alloc] initWithTitle:@"Image Process" message:@"Image is Black and White" delegate:nil cancelButtonTitle:@"Cancel" otherButtonTitles:@"OK", nil];
            [alert show];
        }
        
        else
        {
            NSLog(@"Image is not Black and White");
            
            alert=[[UIAlertView alloc] initWithTitle:@"Image Process" message:@"Image is not Black and White" delegate:nil cancelButtonTitle:@"Cancel" otherButtonTitles:@"OK", nil];
            [alert show];
        }
    }
}

//method will check whther a selected image is a black and white image or not
-(BOOL)imageIsBlackAndWhite:(UIImage *)image
{
    BOOL isBlackNWhite=NO;

    // load image
    CGImageRef imageRef = image.CGImage;
    CFDataRef cfData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef));
    NSData * data = (__bridge NSData *) cfData;
    char *pixels = (char *)[data bytes];
    
    const int threshold = 10; //define a gray threshold
    
    for(int i = 0; i < [data length]; i += 4)
    {
        Byte red = pixels[i];
        Byte green = pixels[i+1];
        Byte blue = pixels[i+2];
        
        //check if a single channel is too far from the average value.
        //greys have RGB values very close to each other
        int average = (red+green+blue)/3;
        
        if( abs(average - red) >= threshold ||abs(average - green) >= threshold ||abs(average - blue) >= threshold )
        { 
            isBlackNWhite=NO;
        }
        
        else
        {
            isBlackNWhite=YES;
        }
    }
    CFRelease(cfData);
    return isBlackNWhite;
}





How to convert a UIImage to Black and White?


Please do follow this post along with the post here

//this button click even twill call the convertToBlackNWhite() method to sart the color conversion to black and white
 -(IBAction) convertBlackNWhite:(id)sender

{    
    if(sourceImage==nil) //sourceImage  is the user selected image from the Image Picker. I simply display it on a UIImageView and for the processing  i get the image that is currently viewed in the UIImageView
    {
        NSLog(@"Image Not selected");
    }
    
    else
    {    
        UIImage *blackAndWhiteImage=[self convertToBlackNWhite:sourceImage];
        resultImageView.image=blackAndWhiteImage;
    }
}

//method is used to convert an image to its black and white image and view in the UIImageView on te screen
-(UIImage *)convertToBlackNWhite:(UIImage *)initialImage

{
    /*
     approach:- threshold
     get the UIImage and convert it to take the bitmap image of it.
     check each pixal whether it is higher than a certain value. if yes->make it white else black
     */
    unsigned char *bitmap = [self convertUIImageToBitmapRGBA8:initialImage];
    
    for (int i=0;i<initialImage.size.width * initialImage.size.height * 4; i+=4)
    {
        if ((bitmap[i] + bitmap[i + 1] + bitmap[i + 2]) < (255 * 3 / 2))
        {
            bitmap[i ] = 0;
            bitmap[i + 1] = 0;
            bitmap[i + 2] = 0;
        }
        else
        {
            bitmap[i ] = 255;
            bitmap[i + 1] = 255;
            bitmap[i + 2] = 255;
        }
    }
    
    initialImage = [self convertBitmapRGBA8ToUIImage:bitmap withWidth:initialImage.size.width withHeight:initialImage.size.height];
    
    return initialImage;
    
}

//PLEASE FIND THE BELOW CONVERSION METHODS FROM HERE
//https://gist.github.com/PaulSolt/739132

-(UIImage *) convertBitmapRGBA8ToUIImage:(unsigned char *) buffer withWidth:(int) width withHeight:(int) height
{
size_t bufferLength = width * height * 4;
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, bufferLength, NULL);
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = 4 * width;
    
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
    
if(colorSpaceRef == NULL)
    {
NSLog(@"Error allocating color space");
CGDataProviderRelease(provider);
return nil;
}
    
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
    
CGImageRef iref = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpaceRef,
bitmapInfo,
provider, // data provider
NULL, // decode
YES, // should interpolate
renderingIntent);
    
uint32_t* pixels = (uint32_t*)malloc(bufferLength);
    
if(pixels == NULL)
    {
NSLog(@"Error: Memory not allocated for bitmap");
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref);
return nil;
}
    
CGContextRef context = CGBitmapContextCreate(pixels, width, height, bitsPerComponent, bytesPerRow, colorSpaceRef, bitmapInfo);
if(context == NULL)
    {
NSLog(@"Error context not created");
free(pixels);
}
    
UIImage *image = nil;
    
if(context)
    {        
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, width, height), iref);        
CGImageRef imageRef = CGBitmapContextCreateImage(context);
        
// Support both iPad 3.2 and iPhone 4 Retina displays with the correct scale
if([UIImage respondsToSelector:@selector(imageWithCGImage:scale:orientation:)]) {
float scale = [[UIScreen mainScreen] scale];
image = [UIImage imageWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];

               else 
                {
image = [UIImage imageWithCGImage:imageRef];
}
        
CGImageRelease(imageRef);
CGContextRelease(context);
}
    
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref);
CGDataProviderRelease(provider);
    
if(pixels) {
free(pixels);
}
    
return image;
}

-(unsigned char *) convertUIImageToBitmapRGBA8:(UIImage *) image
{
CGImageRef imageRef = image.CGImage;
    
//Create a bitmap context to draw the uiimage into
CGContextRef context = [self newBitmapRGBA8ContextFromImage:imageRef];
    
if(!context)
    {
return NULL;
}
    
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
    
CGRect rect = CGRectMake(0, 0, width, height);
    
// Draw image into the context to get the raw image data
CGContextDrawImage(context, rect, imageRef);
    
// Get a pointer to the data
unsigned char *bitmapData = (unsigned char *)CGBitmapContextGetData(context);
    
// Copy the data and release the memory (return memory allocated with new)
size_t bytesPerRow = CGBitmapContextGetBytesPerRow(context);
size_t bufferLength = bytesPerRow * height;
    
unsigned char *newBitmap = NULL;
    
if(bitmapData)
    {
newBitmap = (unsigned char *)malloc(sizeof(unsigned char) * bytesPerRow * height);
        
if(newBitmap)
        { // Copy the data
for(int i = 0; i < bufferLength; ++i)
            {
newBitmap[i] = bitmapData[i];
}
}
        
free(bitmapData);
}
    
    else
    {
NSLog(@"Error getting bitmap pixel data\n");
    }
    
CGContextRelease(context);    
return newBitmap;
}

-(CGContextRef) newBitmapRGBA8ContextFromImage:(CGImageRef) image
{
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
uint32_t *bitmapData;
    
size_t bitsPerPixel = 32;
size_t bitsPerComponent = 8;
size_t bytesPerPixel = bitsPerPixel / bitsPerComponent;
    
size_t width = CGImageGetWidth(image);
size_t height = CGImageGetHeight(image);
    
size_t bytesPerRow = width * bytesPerPixel;
size_t bufferLength = bytesPerRow * height;
    
colorSpace = CGColorSpaceCreateDeviceRGB();
    
if(!colorSpace)
    {
NSLog(@"Error allocating color space RGB\n");
return NULL;
}
    
// Allocate memory for image data
bitmapData = (uint32_t *)malloc(bufferLength);
    
if(!bitmapData)
    {
NSLog(@"Error allocating memory for bitmap\n");
CGColorSpaceRelease(colorSpace);
return NULL;
}
    
//Create bitmap context
context = CGBitmapContextCreate(bitmapData, width,height,bitsPerComponent,bytesPerRow,colorSpace,
                                    kCGImageAlphaPremultipliedLast); // RGBA
    
if(!context)
    {
free(bitmapData);
NSLog(@"Bitmap context not created");
    }   
CGColorSpaceRelease(colorSpace);    
return context;
}