사용자 정의 카메라 공유
#import
두 번째 단계, 필요한 대상 만들기@property (nonatomic,strong) AVCaptureDevice *devides;
@property (nonatomic,strong) AVCaptureDeviceInput *input;//
@property (nonatomic,strong) AVCaptureStillImageOutput *imageOutput;//
@property (nonatomic,strong) AVCaptureSession *session;//
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;//
3부, UI 설정
- (void)initCamera
{
self.devides = [self cameraWithPosition:AVCaptureDevicePositionBack];
self.input = [[AVCaptureDeviceInput alloc]initWithDevice:self.devides error:nil];
self.imageOutput = [[AVCaptureStillImageOutput alloc]init];
self.session = [[AVCaptureSession alloc]init];
self.session.sessionPreset = AVCaptureSessionPreset1280x720;
if ([self.session canAddInput:self.input])
{
[self.session addInput:self.input];
}
if ([self.session canAddOutput:self.imageOutput])
{
[self.session addOutput:self.imageOutput];
}
/** **/
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
self.previewLayer.frame = CGRectMake(0, 0, WIDTH, HEIGHT);
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:self.previewLayer];
[self.session startRunning];
/** , **/
CALayer *faceRectLayer = [CALayer new];
faceRectLayer.zPosition = 1;
faceRectLayer.frame = CGRectMake(0, 0, WIDTH, HEIGHT );
faceRectLayer.backgroundColor = [UIColor colorWithRed:200/255 green:200/255 blue:200/255 alpha:0].CGColor;
faceRectLayer.contents = (__bridge id _Nullable)(self.previewImage.CGImage);
[self.previewLayer addSublayer:faceRectLayer];
/** **/
change = [UIButton buttonWithType:UIButtonTypeCustom];
change.frame = CGRectMake(0, 0, 40, 40);
change.center = CGPointMake(WIDTH - 60, HEIGHT - 60);
[change setBackgroundImage:[UIImage imageNamed:@" "] forState:UIControlStateNormal];
[change addTarget:self action:@selector(changeCamera) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:change];
/** **/
takePhoto = [UIButton buttonWithType:UIButtonTypeCustom];
takePhoto.frame = CGRectMake(0, 0, 60, 60);
takePhoto.center = CGPointMake(WIDTH/2, HEIGHT - 60);
[takePhoto setBackgroundImage:[UIImage imageNamed:@" "] forState:UIControlStateNormal];
[takePhoto addTarget:self action:@selector(selectedToTakePhoto) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:takePhoto];
/** **/
backBtn = [UIButton buttonWithType:UIButtonTypeCustom];
backBtn.frame = CGRectMake(0, 0, 40, 40);
backBtn.center = CGPointMake(60, HEIGHT - 60);
[backBtn setBackgroundImage:[UIImage imageNamed:@" "] forState:UIControlStateNormal];
[backBtn addTarget:self action:@selector(selectedToDismiss) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:backBtn];
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
if ( device.position == position )
{
return device;
}
return nil;
}
4단계, 앞뒤 카메라 전환
#pragma mark
- (void)changeCamera
{
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1)
{
NSError *error;
/** **/
CATransition *animation = [CATransition animation];
animation.duration = 0.5f;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
animation.type = @"oglFlip";
AVCaptureDevice *newCamera = nil;
AVCaptureDeviceInput *newInput = nil;
/** **/
AVCaptureDevicePosition position = [[_input device] position];
if (position == AVCaptureDevicePositionFront)
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
self.session.sessionPreset = AVCaptureSessionPreset1280x720;
animation.subtype = kCATransitionFromLeft;//
}
else
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
self.session.sessionPreset = AVCaptureSessionPreset1280x720;
animation.subtype = kCATransitionFromRight;//
}
/** **/
newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
[self.previewLayer addAnimation:animation forKey:nil];
if (newInput != nil)
{
[self.session beginConfiguration];
[self.session removeInput:self.input];
if ([self.session canAddInput:newInput])
{
[self.session addInput:newInput];
self.input = newInput;
}
else
{
[self.session addInput:self.input];
}
[self.session commitConfiguration];
}
else if (error)
{
NSLog(@"Change carema failed.Error:%@", error);
}
}
}
5단계, 사진촬영, 이미지 획득
#pragma mark
- (void)selectedToTakePhoto
{
AVCaptureConnection *conntion = [self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
if (!conntion)
{
NSLog(@" !");
return;
}
[self.imageOutput captureStillImageAsynchronouslyFromConnection:conntion completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
if (imageDataSampleBuffer == nil)
{
return ;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
if (image == nil)
{
return;
}
/**block , image**/
self.takePhotoImage(image);
}];
}
이 내용에 흥미가 있습니까?
현재 기사가 여러분의 문제를 해결하지 못하는 경우 AI 엔진은 머신러닝 분석(스마트 모델이 방금 만들어져 부정확한 경우가 있을 수 있음)을 통해 가장 유사한 기사를 추천합니다:
다양한 언어의 JSONJSON은 Javascript 표기법을 사용하여 데이터 구조를 레이아웃하는 데이터 형식입니다. 그러나 Javascript가 코드에서 이러한 구조를 나타낼 수 있는 유일한 언어는 아닙니다. 저는 일반적으로 '객체'{}...
텍스트를 자유롭게 공유하거나 복사할 수 있습니다.하지만 이 문서의 URL은 참조 URL로 남겨 두십시오.
CC BY-SA 2.5, CC BY-SA 3.0 및 CC BY-SA 4.0에 따라 라이센스가 부여됩니다.