iOS 카메라에서 YUV420SP 데이터 가져오기

3815 단어
두 개의 라이브러리 도입 필요
#import
#import
클래스는 AVCaptureVideoDataOutputSampleBufferDelegate 에이전트를 준수해야 합니다.
Session 설정
- (void)setSession
{
    _captureInput = [[AVCaptureDeviceInput alloc]initWithDevice:[self getFrontCameraDevice] error:nil];
    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
                                               init];
    captureOutput.alwaysDiscardsLateVideoFrames = YES;
    
    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [captureOutput setSampleBufferDelegate:self queue:queue];
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber
                       numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
    NSDictionary* videoSettings = [NSDictionary
                                   dictionaryWithObject:value forKey:key];
    [captureOutput setVideoSettings:videoSettings];
    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession addInput:_captureInput];
    [self.captureSession addOutput:captureOutput];
    [self.captureSession setSessionPreset:AVCaptureSessionPreset640x480];
    
}

NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
설정 카메라가 반환되는 데이터 유형을 YUV420SP 유형으로 나타냅니다.
[self.captureSessionsetSessionPreset:AVCaptureSessionPreset640x480];
해상도 설정
/**
 *   
 *
 *  @return  
 */
- (AVCaptureDevice *)getFrontCameraDevice{
    NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *camera in cameras) {
        if ([camera position] == AVCaptureDevicePositionFront) {
            return camera;
        }
    }
    return nil;
}

AVCaptureVideoDataOutputSampleBufferDelegate 에이전트의 작업
#pragma mark AVCaptureSession - delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    CMTime duration = CMSampleBufferGetDuration(sampleBuffer);
    // Get the number of bytes per row for the plane pixel buffer
    void *imageAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
    
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    
    Byte *buf = malloc(width * height * 3/ 2);
    memcpy(buf, imageAddress, width * height);
    size_t a = width * height;
    size_t b = width * height * 5 / 4;
    for (NSInteger i = 0; i < width * height/ 2; i ++) {
        memcpy(buf + a, imageAddress + width * height + i , 1);
        a++;
        i++;
        memcpy(buf + b, imageAddress + width * height + i, 1);
        b++;
        
    }
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}

YUV420SP의 데이터를 YUV420P로 다시 정렬
    Byte *buf = malloc(width * height *3/ 2);
    memcpy(buf, imageAddress, width * height);
    size_t a = width * height;
    size_t b = width * height * 5/4;
    for (NSInteger i =0; i < width * height/2; i++) {
        memcpy(buf + a, imageAddress + width * height + i ,1);
        a++;
        i++;
        memcpy(buf + b, imageAddress + width * height + i,1);
        b++;
        
    }
buf의 데이터는 바로 YUV420P의 데이터로 OpenGL ES에 표시할 수 있습니다.

좋은 웹페이지 즐겨찾기