[iOS]사진 에 얼굴 이 있 는 지 확인 하기

9783 단어 학습 노트
이 는 얼굴 인식 이라는 기능 을 여러 곳 에서 사용 할 수 있다.예 를 들 어 앱 에서 사용자 가 얼굴 인식 을 사용 해 야 할 때 이런 것 은 반드시 사용자 가 얼굴 을 올 릴 때 사진 에 얼굴 이 있 는 지 판단 해 야 한다.
[사용]
1、이미 UIImage 를 받 은 후 바로 검색
UIImage *editedImage = imgModel.previewPhoto?:imgModel.thumbPhoto;
///           
if ([IWCheckImageHaveFace checkHaveFaceWithImage:editedImage]) {
    [self uploadImages:editedImage];
} else {
    [MBProgressHUD showError:@"           ,     "];
}

2.카메라 출력 데이터 에서 UIImage 를 가 져 와 서 검 측 합 니 다.
#import "IWFaceCompareCheckController.h"
#import "CommonHeader.h"
#import "IWCheckImageHaveFace.h"
#import 


@interface IWFaceCompareCheckController () 

@property (nonatomic, assign) BOOL isDelay;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;

@end

@implementation IWFaceCompareCheckController

- (AVCaptureSession *)captureSession {
    if (!_captureSession) {
        _captureSession = [[AVCaptureSession alloc] init];
    }
    return _captureSession;
}

- (void)viewDidLoad {
    [super viewDidLoad];
    [self beginSessionIsFront:NO];
    [self performSelector:@selector(startCheckAction) withObject:nil afterDelay:1.0];
}

- (void)viewWillDisappear:(BOOL)animated {
    [super viewWillDisappear:animated];
    [self.captureSession stopRunning];
}

- (void)returnPresVcAction {
    [self.captureSession stopRunning];
    [self.navigationController popViewControllerAnimated:NO];
}

- (void)startCheckAction {
    _isDelay = YES;
}

- (AVCaptureDevice *)videoDeviceWithPosition:(AVCaptureDevicePosition)position {
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            return device;
        }
    }
    return nil;
}

- (void)beginSessionIsFront:(BOOL)isFront {
    self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;

    AVCaptureDevice *videoDevice;
    if (isFront) {
        videoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionFront];
    } else {
        videoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionBack];
    }

    NSError *error;
    AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&error];
    if ([self.captureSession canAddInput:deviceInput]) {
        [self.captureSession addInput:deviceInput];
    } else {
        NSLog(@"add input error");
    }
    
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
    dispatch_queue_t cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
    [output setSampleBufferDelegate:self queue:cameraQueue];
    output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithUnsignedInteger:kCVPixelFormatType_32BGRA],
                            kCVPixelBufferPixelFormatTypeKey,
                            nil];
    [self.captureSession addOutput:output];
    if (error) {
        NSLog(@"error:%@",error.description);
    }
    
    AVCaptureConnection *videoConnection = [output connectionWithMediaType:AVMediaTypeVideo];
    if (isFront) {
        if (videoConnection.supportsVideoMirroring) {
            [videoConnection setVideoMirrored:true];
        }
    } else {
        if (videoConnection.supportsVideoMirroring) {
            [videoConnection setVideoMirrored:false];
        }
    }
    
    self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    self.previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect";
    self.previewLayer.frame = self.view.bounds;
    [self.view.layer addSublayer:self.previewLayer];
    [self.captureSession startRunning];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    //     ,  1     
    if (!_isDelay) {
        return;
    }
    UIImage *resultImage = [IWCheckImageHaveFace sampleBufferToImage:sampleBuffer];
    //           
    if ([IWCheckImageHaveFace checkHaveFaceWithImage:resultImage]) {
        [self uploadImage:resultImage];
    } else {
        
    }
}

@end

[검 측]
#import 
#import 
#import 

@interface IWCheckImageHaveFace : NSObject

+ (UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer;

///           
+ (BOOL)checkHaveFaceWithImage:(UIImage *)resultImage;

@end
#import "IWCheckImageHaveFace.h"

@interface IWCheckImageHaveFace ()

@end

@implementation IWCheckImageHaveFace

+ (UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
    UIImage *result = [[UIImage alloc] initWithCGImage:videoImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
    CGImageRelease(videoImage);
    return result;
}

+ (BOOL)checkHaveFaceWithImage:(UIImage *)resultImage {
    BOOL result = NO;
    resultImage = [self fixOrientation:resultImage];
            
    CGFloat scale = [UIScreen mainScreen].bounds.size.width / resultImage.size.width;
    //CGFloat topMargin = ([UIScreen mainScreen].bounds.size.height - resultImage.size.height * scale) * 0.5;
    
    CIImage *ciImage = [[CIImage alloc] initWithImage:resultImage];
    CIDetector *detector = [CIDetector  detectorOfType:CIDetectorTypeFace context:nil options:@{CIDetectorAccuracy: CIDetectorAccuracyHigh,CIDetectorMinFeatureSize:[[NSNumber alloc] initWithFloat:0.2],CIDetectorTracking:[NSNumber numberWithBool:YES]}];
    NSArray *results = (NSArray *) [detector featuresInImage:ciImage options:@{CIDetectorImageOrientation:[[NSNumber alloc]initWithInt:1]}];
    
    if (results.count > 0) {
        result = YES;
    }
    return result;
}

///       
- (UIImage *)fixOrientation:(UIImage *)aImage {
    // No-op if the orientation is already correct
    if (aImage.imageOrientation == UIImageOrientationUp)
        return aImage;
    
    // We need to calculate the proper transformation to make the image upright.
    // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
    CGAffineTransform transform = CGAffineTransformIdentity;
    
    switch (aImage.imageOrientation) {
        case UIImageOrientationDown:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;
            
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformRotate(transform, M_PI_2);
            break;
            
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        default:
            break;
    }
    
    switch (aImage.imageOrientation) {
        case UIImageOrientationUpMirrored:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;
            
        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;
        default:
            break;
    }
    
    // Now we draw the underlying CGImage into a new context, applying the transform
    // calculated above.
    CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
                                             CGImageGetBitsPerComponent(aImage.CGImage), 0,
                                             CGImageGetColorSpace(aImage.CGImage),
                                             CGImageGetBitmapInfo(aImage.CGImage));
    CGContextConcatCTM(ctx, transform);
    switch (aImage.imageOrientation) {
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            // Grr...
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
            break;
        default:
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
            break;
    }
    
    // And now we just create a new UIImage from the drawing context
    CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
    UIImage *img = [UIImage imageWithCGImage:cgimg];
    CGContextRelease(ctx);
    CGImageRelease(cgimg);
    return img;
}

@end

좋은 웹페이지 즐겨찾기