如下:
#import UIImage+OpenCV.h#import MyViewController.h#import #import #import #import #import // Aperture value to use for the Canny edge detectionconst int kCannyAperture = 7;@interface MyViewController ()- (void)processFrame;@end@implementation MyViewController@synthesize imageView = _imageView;@synthesize imageView1 = _imageView1;- (void)viewDidLoad{ [super viewDidLoad]; //[self TakeColorFromImageHSV]; UIImage *mImage = [UIImage imageNamed:@防偽標籤007.jpg]; IplImage *srcIpl = [self convertToIplImage:mImage]; IplImage *dscIpl = cvCreateImage(cvGetSize(srcIpl), srcIpl->depth, 1); [self SkinDetect:srcIpl withParam:dscIpl]; IplImage *dscIplNew = cvCreateImage(cvGetSize(srcIpl), IPL_DEPTH_8U, 3); cvCvtColor(dscIpl, dscIplNew, CV_GRAY2BGR); self.imageView.image = mImage; UIImage *mImage1 = [UIImage imageNamed:@temple005.jpg]; self.imageView1.image = mImage1; IplImage *srcIpl1 = [self convertToIplImage:mImage1]; IplImage *dscIpl1 = cvCreateImage(cvGetSize(srcIpl1), srcIpl1 ->depth, 1); [self SkinDetect:srcIpl1 withParam:dscIpl1]; IplImage *dscIplNew1 = cvCreateImage(cvGetSize(srcIpl1), IPL_DEPTH_8U, 3); cvCvtColor(dscIpl1, dscIplNew1, CV_GRAY2BGR); IplImage *src = srcIpl; IplImage *srcResult = srcIpl; //用來顯示 IplImage *templat = srcIpl1; IplImage *result; int srcW, srcH, templatW, templatH, resultH, resultW; srcW = src->width; srcH = src->height; templatW = templat->width; templatH = templat->height; resultW = srcW - templatW + 1; resultH = srcH - templatH + 1; result = cvCreateImage(cvSize(resultW, resultH), 32, 1); cvMatchTemplate(src, templat, result, CV_TM_SQDIFF); double minValue, maxValue; CvPoint minLoc, maxLoc; cvMinMaxLoc(result, &minValue, &maxValue, &minLoc, &maxLoc); cvRectangle(srcResult, minLoc, cvPoint(minLoc.x + templatW, minLoc.y+ templatH), cvScalar(0,0,255)); self.imageView1.image = [self convertToUIImage:srcResult]; }/// UIImage類型轉換為IPlImage類型-(IplImage*)convertToIplImage:(UIImage*)image{ CGImageRef imageRef = image.CGImage; CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); IplImage *iplImage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4); CGContextRef contextRef = CGBitmapContextCreate(iplImage->imageData, iplImage->width, iplImage->height, iplImage->depth, iplImage->widthStep, colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault); CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef); CGContextRelease(contextRef); CGColorSpaceRelease(colorSpace); IplImage *ret = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3); cvCvtColor(iplImage, ret, CV_RGB2BGR); cvReleaseImage(&iplImage); return ret;}/// IplImage類型轉換為UIImage類型-(UIImage*)convertToUIImage:(IplImage*)image{ cvCvtColor(image, image, CV_BGR2RGB); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize]; CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data); CGImageRef imageRef = CGImageCreate(image->width, image->height, image->depth, image->depth * image->nChannels, image->widthStep, colorSpace, kCGImageAlphaNone | kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault); UIImage *ret = [UIImage imageWithCGImage:imageRef]; CGImageRelease(imageRef); CGDataProviderRelease(provider); CGColorSpaceRelease(colorSpace); return ret;}- (void)viewDidUnload{ [super viewDidUnload]; self.imageView = nil; self.imageView1=nil; delete _videoCapture; _videoCapture = nil;}@end