[iOS]사진 에 얼굴 이 있 는 지 확인 하기
9783 단어 학습 노트
[사용]
1、이미 UIImage 를 받 은 후 바로 검색
UIImage *editedImage = imgModel.previewPhoto?:imgModel.thumbPhoto;
///
if ([IWCheckImageHaveFace checkHaveFaceWithImage:editedImage]) {
[self uploadImages:editedImage];
} else {
[MBProgressHUD showError:@" , "];
}
2.카메라 출력 데이터 에서 UIImage 를 가 져 와 서 검 측 합 니 다.
#import "IWFaceCompareCheckController.h"
#import "CommonHeader.h"
#import "IWCheckImageHaveFace.h"
#import
@interface IWFaceCompareCheckController ()
@property (nonatomic, assign) BOOL isDelay;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
@end
@implementation IWFaceCompareCheckController
- (AVCaptureSession *)captureSession {
if (!_captureSession) {
_captureSession = [[AVCaptureSession alloc] init];
}
return _captureSession;
}
- (void)viewDidLoad {
[super viewDidLoad];
[self beginSessionIsFront:NO];
[self performSelector:@selector(startCheckAction) withObject:nil afterDelay:1.0];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[self.captureSession stopRunning];
}
- (void)returnPresVcAction {
[self.captureSession stopRunning];
[self.navigationController popViewControllerAnimated:NO];
}
- (void)startCheckAction {
_isDelay = YES;
}
- (AVCaptureDevice *)videoDeviceWithPosition:(AVCaptureDevicePosition)position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
- (void)beginSessionIsFront:(BOOL)isFront {
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *videoDevice;
if (isFront) {
videoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionFront];
} else {
videoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionBack];
}
NSError *error;
AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&error];
if ([self.captureSession canAddInput:deviceInput]) {
[self.captureSession addInput:deviceInput];
} else {
NSLog(@"add input error");
}
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:cameraQueue];
output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInteger:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey,
nil];
[self.captureSession addOutput:output];
if (error) {
NSLog(@"error:%@",error.description);
}
AVCaptureConnection *videoConnection = [output connectionWithMediaType:AVMediaTypeVideo];
if (isFront) {
if (videoConnection.supportsVideoMirroring) {
[videoConnection setVideoMirrored:true];
}
} else {
if (videoConnection.supportsVideoMirroring) {
[videoConnection setVideoMirrored:false];
}
}
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect";
self.previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:self.previewLayer];
[self.captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// , 1
if (!_isDelay) {
return;
}
UIImage *resultImage = [IWCheckImageHaveFace sampleBufferToImage:sampleBuffer];
//
if ([IWCheckImageHaveFace checkHaveFaceWithImage:resultImage]) {
[self uploadImage:resultImage];
} else {
}
}
@end
[검 측]
#import
#import
#import
@interface IWCheckImageHaveFace : NSObject
+ (UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer;
///
+ (BOOL)checkHaveFaceWithImage:(UIImage *)resultImage;
@end
#import "IWCheckImageHaveFace.h"
@interface IWCheckImageHaveFace ()
@end
@implementation IWCheckImageHaveFace
+ (UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
UIImage *result = [[UIImage alloc] initWithCGImage:videoImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
CGImageRelease(videoImage);
return result;
}
+ (BOOL)checkHaveFaceWithImage:(UIImage *)resultImage {
BOOL result = NO;
resultImage = [self fixOrientation:resultImage];
CGFloat scale = [UIScreen mainScreen].bounds.size.width / resultImage.size.width;
//CGFloat topMargin = ([UIScreen mainScreen].bounds.size.height - resultImage.size.height * scale) * 0.5;
CIImage *ciImage = [[CIImage alloc] initWithImage:resultImage];
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:@{CIDetectorAccuracy: CIDetectorAccuracyHigh,CIDetectorMinFeatureSize:[[NSNumber alloc] initWithFloat:0.2],CIDetectorTracking:[NSNumber numberWithBool:YES]}];
NSArray *results = (NSArray *) [detector featuresInImage:ciImage options:@{CIDetectorImageOrientation:[[NSNumber alloc]initWithInt:1]}];
if (results.count > 0) {
result = YES;
}
return result;
}
///
- (UIImage *)fixOrientation:(UIImage *)aImage {
// No-op if the orientation is already correct
if (aImage.imageOrientation == UIImageOrientationUp)
return aImage;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (aImage.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
switch (aImage.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
default:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
CGImageGetBitsPerComponent(aImage.CGImage), 0,
CGImageGetColorSpace(aImage.CGImage),
CGImageGetBitmapInfo(aImage.CGImage));
CGContextConcatCTM(ctx, transform);
switch (aImage.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
@end
이 내용에 흥미가 있습니까?
현재 기사가 여러분의 문제를 해결하지 못하는 경우 AI 엔진은 머신러닝 분석(스마트 모델이 방금 만들어져 부정확한 경우가 있을 수 있음)을 통해 가장 유사한 기사를 추천합니다:
STL 학습노트(6) 함수 객체모방 함수는 모두pass-by-value이다 함수 대상은 값에 따라 전달되고 값에 따라 되돌아오기 때문에 함수 대상은 가능한 한 작아야 한다(대상 복사 비용이 크다) 함수 f와 대상 x, x 대상에서 f를 호출하면:...
텍스트를 자유롭게 공유하거나 복사할 수 있습니다.하지만 이 문서의 URL은 참조 URL로 남겨 두십시오.
CC BY-SA 2.5, CC BY-SA 3.0 및 CC BY-SA 4.0에 따라 라이센스가 부여됩니다.