iOS, Swift-QR코드/바코드 검색
oc
#import "ScanViewController.h"
#import
#import
#import
@interface ScanViewController ()
@property (nonatomic ,strong) AVCaptureSession *captureSession;
@property (nonatomic ,strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@end
@implementation ScanViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
//
AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//
AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
//
AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc]init];
//
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
//
output.rectOfInterest = [self calculateScanRect];
//
self.captureSession = [[AVCaptureSession alloc]init];
//
[self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
[self.captureSession addInput:input];
[self.captureSession addOutput:output];
// ( )
output.metadataObjectTypes=@[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode93Code];
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.videoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
self.videoPreviewLayer.frame=CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
[self.view.layer insertSublayer:self.videoPreviewLayer atIndex:0];
//
[self.captureSession startRunning];
}
-(CGRect)calculateScanRect {
CGSize previewSize = self.videoPreviewLayer.frame.size;
CGSize scanSize = CGSizeMake(previewSize.width * 3/4, previewSize.height * 3/4);
CGRect scanRect = CGRectMake((previewSize.width-scanSize.width)/2,
(previewSize.height-scanSize.height)/2, scanSize.width, scanSize.height);
scanRect = CGRectMake(scanRect.origin.y/previewSize.height,
scanRect.origin.x/previewSize.width,
scanRect.size.height/previewSize.height,
scanRect.size.width/previewSize.width);
return scanRect;
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects.count>0) {
[self.captureSession stopRunning];//
AVMetadataMachineReadableCodeObject * metadataObject = [metadataObjects objectAtIndex : 0 ];
//
NSLog(@"%@",metadataObject.stringValue);
}
}
@end
그다음에 스와이프.
/**
*/
private func showScanCode() {
if self.captureSession == nil {
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var input: AnyObject!
do {
input = try AVCaptureDeviceInput(device: captureDevice)
}
catch {
return
}
self.captureSession = AVCaptureSession()
self.captureSession?.addInput(input as! AVCaptureInput)
let captureMetadataOutput = AVCaptureMetadataOutput()
self.captureSession?.addOutput(captureMetadataOutput)
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode93Code]
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
self.videoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height)
captureMetadataOutput.rectOfInterest = self.calculateScanRect()
self.view.layer.addSublayer(self.videoPreviewLayer!)
//
do {
try captureDevice!.lockForConfiguration()
} catch {
return
}
let maxZoomFactor = captureDevice!.activeFormat.videoMaxZoomFactor
let zoomFactor = maxZoomFactor < 2.0 ? maxZoomFactor : 2.0
captureDevice!.videoZoomFactor = zoomFactor
captureDevice!.unlockForConfiguration()
}
self.captureSession?.startRunning()
}
private func calculateScanRect() -> CGRect {
let previewSize: CGSize = self.videoPreviewLayer!.frame.size
let scanSize: CGSize = CGSizeMake(previewSize.width * 3/4, previewSize.height * 3/4)
var scanRect:CGRect = CGRectMake((previewSize.width-scanSize.width)/2,
(previewSize.height-scanSize.height)/2, scanSize.width, scanSize.height);
// AVCapture , iPhone , 90°
scanRect = CGRectMake(scanRect.origin.y/previewSize.height,
scanRect.origin.x/previewSize.width,
scanRect.size.height/previewSize.height,
scanRect.size.width/previewSize.width);
return scanRect
}
// MARK: - AVCaptureMetadataOutputObjectsDelegate
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
if let metadatas = metadataObjects {
if metadatas.count > 0 {
let metadata = metadatas[0] as? AVMetadataMachineReadableCodeObject
if let barCode = metadata?.stringValue {
print("barCode: \(barCode)")
}
}
}
}
이 내용에 흥미가 있습니까?
현재 기사가 여러분의 문제를 해결하지 못하는 경우 AI 엔진은 머신러닝 분석(스마트 모델이 방금 만들어져 부정확한 경우가 있을 수 있음)을 통해 가장 유사한 기사를 추천합니다:
다양한 언어의 JSONJSON은 Javascript 표기법을 사용하여 데이터 구조를 레이아웃하는 데이터 형식입니다. 그러나 Javascript가 코드에서 이러한 구조를 나타낼 수 있는 유일한 언어는 아닙니다. 저는 일반적으로 '객체'{}...
텍스트를 자유롭게 공유하거나 복사할 수 있습니다.하지만 이 문서의 URL은 참조 URL로 남겨 두십시오.
CC BY-SA 2.5, CC BY-SA 3.0 및 CC BY-SA 4.0에 따라 라이센스가 부여됩니다.