iOS, Swift-QR코드/바코드 검색

6262 단어
몇 단락의 코드만 실현되었는데 왜 그 세 가지 방법을 써야 하는가?

oc

#import "ScanViewController.h"
#import 
#import
#import

@interface ScanViewController () 
@property (nonatomic ,strong) AVCaptureSession *captureSession;
@property (nonatomic ,strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@end

@implementation ScanViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.

    //      
    AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    //     
    AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
    //     
    AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc]init];

    //            
    [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];

    //      
    output.rectOfInterest = [self calculateScanRect];

    //       
    self.captureSession = [[AVCaptureSession alloc]init];
    //      
    [self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];

    [self.captureSession addInput:input];
    [self.captureSession addOutput:output];

    //           (             )
    output.metadataObjectTypes=@[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode93Code];

    self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    self.videoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
    self.videoPreviewLayer.frame=CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

    [self.view.layer insertSublayer:self.videoPreviewLayer atIndex:0];

   //    
   [self.captureSession startRunning];
 }

-(CGRect)calculateScanRect {
    CGSize previewSize = self.videoPreviewLayer.frame.size;
    CGSize scanSize  = CGSizeMake(previewSize.width * 3/4, previewSize.height * 3/4);
    CGRect scanRect = CGRectMake((previewSize.width-scanSize.width)/2,
                             (previewSize.height-scanSize.height)/2, scanSize.width, scanSize.height);
    scanRect = CGRectMake(scanRect.origin.y/previewSize.height,
                      scanRect.origin.x/previewSize.width,
                      scanRect.size.height/previewSize.height,
                      scanRect.size.width/previewSize.width);
    return scanRect;
}

#pragma mark - AVCaptureMetadataOutputObjectsDelegate

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
    if (metadataObjects.count>0) {
        [self.captureSession stopRunning];//    
        AVMetadataMachineReadableCodeObject * metadataObject = [metadataObjects objectAtIndex : 0 ];
        //       
        NSLog(@"%@",metadataObject.stringValue);
    }
}

@end

그다음에 스와이프.

/**
       
 */
private func showScanCode() {
    if self.captureSession == nil {
        let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        var input: AnyObject!
        do {
            input = try AVCaptureDeviceInput(device: captureDevice)
        }
        catch {
            return
        }
        self.captureSession = AVCaptureSession()
        self.captureSession?.addInput(input as! AVCaptureInput)
        let captureMetadataOutput = AVCaptureMetadataOutput()
        self.captureSession?.addOutput(captureMetadataOutput)
        captureMetadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
        captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN13Code,
                                                     AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code,
                                                     AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode93Code]
        self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
        self.videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
        self.videoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height)
        captureMetadataOutput.rectOfInterest = self.calculateScanRect()
        self.view.layer.addSublayer(self.videoPreviewLayer!)
        //   
        do {
            try captureDevice!.lockForConfiguration()
        } catch {
            return
        }
        let maxZoomFactor = captureDevice!.activeFormat.videoMaxZoomFactor
        let zoomFactor = maxZoomFactor < 2.0 ? maxZoomFactor : 2.0
        captureDevice!.videoZoomFactor = zoomFactor
        captureDevice!.unlockForConfiguration()
    }
    self.captureSession?.startRunning()
}

private func calculateScanRect() -> CGRect {
    let previewSize: CGSize = self.videoPreviewLayer!.frame.size
    let scanSize: CGSize = CGSizeMake(previewSize.width * 3/4, previewSize.height * 3/4)
    var scanRect:CGRect = CGRectMake((previewSize.width-scanSize.width)/2,
                                     (previewSize.height-scanSize.height)/2, scanSize.width, scanSize.height);
    // AVCapture            , iPhone       ,       90°
    scanRect = CGRectMake(scanRect.origin.y/previewSize.height,
                          scanRect.origin.x/previewSize.width,
                          scanRect.size.height/previewSize.height,
                          scanRect.size.width/previewSize.width);
    return scanRect
}

// MARK: - AVCaptureMetadataOutputObjectsDelegate

func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
    if let metadatas = metadataObjects {
        if metadatas.count > 0 {
            let metadata = metadatas[0] as? AVMetadataMachineReadableCodeObject
            if let barCode = metadata?.stringValue {
                print("barCode: \(barCode)")
            }
        }
    }
}

좋은 웹페이지 즐겨찾기