iOS 는 마이크로 신장 을 모방 하여 QR 코드 를 식별 하 는 다양한 방식 을 사용한다.

15832 단어 ios식별QR 코드
참고:https://github.com/nglszs/BCQRcode
방식 1:

#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
**************
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad {
  [super viewDidLoad];
  self.title = @"   ";
  UIBarButtonItem *leftBtn = [[UIBarButtonItem alloc]
                initWithTitle:@"  "
                style:UIBarButtonItemStylePlain
                target:self
                action:@selector(backView)];
  self.navigationItem.leftBarButtonItem = leftBtn;
  UIBarButtonItem *rightBtn = [[UIBarButtonItem alloc]
                 initWithTitle:@"  "
                 style:UIBarButtonItemStylePlain
                 target:self
                 action:@selector(ScanView)];
  self.navigationItem.rightBarButtonItem = rightBtn;
  //          ,          ,             
  UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(readCode:)];
  [self.view addGestureRecognizer:longPress];
}
- (void)readCode:(UILongPressGestureRecognizer *)pressSender {
  if (pressSender.state == UIGestureRecognizerStateBegan) {
    //      
    UIGraphicsBeginImageContextWithOptions(self.view.bounds.size, YES, 0);
    CGContextRef context = UIGraphicsGetCurrentContext();
    [self.view.layer renderInContext:context];
    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
   //      
    CIImage *ciImage = [[CIImage alloc] initWithCGImage:image.CGImage options:nil];
    CIContext *ciContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer : @(YES)}]; //     
    CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:ciContext options:@{CIDetectorAccuracy : CIDetectorAccuracyHigh}];//      
    NSArray *features = [detector featuresInImage:ciImage];
    for (CIQRCodeFeature *feature in features) {
      NSLog(@"msg = %@",feature.messageString); //          
      //       
      ResultViewController *resultVC = [[ResultViewController alloc] init];
      resultVC.contentString = feature.messageString;
      [self.navigationController pushViewController:resultVC animated:NO];
    }
  }
}
- (void)backView {
    UIImageView *codeImageView = [[UIImageView alloc] initWithFrame:CGRectMake((BCWidth - 200)/2, 100, 200, 200)];
    codeImageView.layer.borderColor = [UIColor orangeColor].CGColor;
    codeImageView.layer.borderWidth = 1;
    [self.view addSubview:codeImageView];
  //      ,        
  [codeImageView creatCode:@"https://www.baidu.com" Image:[UIImage imageNamed:@"bg"] andImageCorner:4];
  //       
  // [codeImageView creatCode:@"    " Image:nil andImageCorner:4];
}
- (void)ScanView {
  [self.navigationController pushViewController:[ScanCodeViewController new] animated:YES];
}
- (void)didReceiveMemoryWarning {
  [super didReceiveMemoryWarning];
  // Dispose of any resources that can be recreated.
}
@end
************     
#import <UIKit/UIKit.h>
@interface UIImageView (CreatCode)
/**
           ,image           ,        codeImage nil  ,        
 */
- (void)creatCode:(NSString *)codeContent Image:(UIImage *)codeImage andImageCorner:(CGFloat)imageCorner;
@end
**************
#import "UIImageView+CreatCode.h"
#define ImageSize self.bounds.size.width
@implementation UIImageView (CreatCode)
- (void)creatCode:(NSString *)codeContent Image:(UIImage *)codeImage andImageCorner:(CGFloat)imageCorner {
  // CoreImage          ,          
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
    CIFilter *codeFilter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
    //            
    [codeFilter setDefaults];
    NSData *codeData = [codeContent dataUsingEncoding:NSUTF8StringEncoding];
    //      
    [codeFilter setValue:codeData forKey:@"inputMessage"];
    //         
    CIImage *outputImage = [codeFilter outputImage];
    //             ,      
    UIImage *translateImage = [self creatUIImageFromCIImage:outputImage andSize:ImageSize];
    //          ,     image   
    UIImage *resultImage = [self setSuperImage:translateImage andSubImage:[self imageCornerRadius:imageCorner andImage:codeImage]];
    dispatch_async(dispatch_get_main_queue(), ^{
      self.image = resultImage;
    });
});
}
//   size   imageview      ,          size
- (UIImage *)creatUIImageFromCIImage:(CIImage *)image andSize:(CGFloat)size {
  //     bitmao       ,               
  CGRect extent = CGRectIntegral(image.extent);
  CGFloat scale = MIN(size/CGRectGetWidth(extent), size/CGRectGetHeight(extent));
  size_t width = CGRectGetWidth(extent) * scale;
  size_t height = CGRectGetHeight(extent) * scale;
  CGColorSpaceRef colorRef = CGColorSpaceCreateDeviceGray();
  CGContextRef contextRef = CGBitmapContextCreate(nil, width, height, 8, 0, colorRef, (CGBitmapInfo)kCGImageAlphaNone);
  CIContext *context = [CIContext contextWithOptions:nil];
  CGImageRef imageRef = [context createCGImage:image fromRect:extent];
  CGContextSetInterpolationQuality(contextRef, kCGInterpolationNone);
  CGContextScaleCTM(contextRef, scale, scale);
  CGContextDrawImage(contextRef, extent, imageRef);
  CGImageRef newImage = CGBitmapContextCreateImage(contextRef);
  CGContextRelease(contextRef);
  CGImageRelease(imageRef);
  return [UIImage imageWithCGImage:newImage];
}
//                  
- (UIImage *)imageCornerRadius:(CGFloat)cornerRadius andImage:(UIImage *)image {
  //          ,frame    ,        
  CGRect frame = CGRectMake(0, 0, ImageSize/5, ImageSize/5);
  UIGraphicsBeginImageContextWithOptions(frame.size, NO, 0);
  [[UIBezierPath bezierPathWithRoundedRect:frame cornerRadius:cornerRadius] addClip];
  [image drawInRect:frame];
  UIImage *clipImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return clipImage;
}
- (UIImage *)setSuperImage:(UIImage *)superImage andSubImage:(UIImage *)subImage {
  //          
  UIGraphicsBeginImageContextWithOptions(superImage.size, YES, 0);
  [superImage drawInRect:CGRectMake(0, 0, superImage.size.width, superImage.size.height)];
  [subImage drawInRect:CGRectMake((ImageSize - ImageSize/5)/2, (ImageSize - ImageSize/5)/2, subImage.size.width, subImage.size.height)];
  UIImage *resultImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return resultImage;
}
@end
***************     
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface ScanCodeViewController : UIViewController<AVCaptureMetadataOutputObjectsDelegate>
{
  AVCaptureSession * session;
  AVCaptureMetadataOutput * output;
  NSInteger lineNum;
  BOOL upOrDown;
  NSTimer *lineTimer;
}
@property (nonatomic, strong) UIImageView *lineImageView;
@property (nonatomic, strong) UIImageView *backImageView;
@end
******************
#import "ScanCodeViewController.h"
@implementation ScanCodeViewController
- (void)viewDidLoad {
  [super viewDidLoad];
  if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 7.0) {
        AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
        if (authStatus == AVAuthorizationStatusDenied || authStatus == AVAuthorizationStatusRestricted)
        {
          [[[UIAlertView alloc] initWithTitle:nil message:@"           ,    ,       " delegate:nil cancelButtonTitle:@"  " otherButtonTitles:nil, nil] show];
          return;
        } else {
          //    
          AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
          //     
          AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
          //     
          output = [[AVCaptureMetadataOutput alloc]init];
          //            
          [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
          //      ,        
          [output setRectOfInterest:CGRectMake(64/BCHeight, (BCWidth - 320)/2/BCWidth, 320/BCHeight, 320/BCWidth)];
          //       
          session = [[AVCaptureSession alloc]init];
          //      
          [session setSessionPreset:AVCaptureSessionPresetHigh];
          [session addInput:input];
          [session addOutput:output];
          //           
          output.metadataObjectTypes=@[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code];
          AVCaptureVideoPreviewLayer * layer = [AVCaptureVideoPreviewLayer layerWithSession:session];
          layer.videoGravity=AVLayerVideoGravityResizeAspectFill;
          layer.frame=self.view.layer.bounds;
          [self.view.layer addSublayer:layer];
      }
  }
        [self _initView];
}
//             ,         
- (void)_initView {
  //   
  _backImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 64, BCWidth, BCHeight - 64)];
  _backImageView.image = [UIImage imageNamed:@"camera_bg"];
  [self.view addSubview:_backImageView];
  _lineImageView = [[UIImageView alloc] initWithFrame:CGRectMake(16, 15, BCWidth - 32, 1)];
  _lineImageView.backgroundColor = [UIColor orangeColor];
  [_backImageView addSubview:_lineImageView];
  //      
  lineNum = 0;
  upOrDown = NO;
  lineTimer = [NSTimer scheduledTimerWithTimeInterval:0.01 target:self selector:@selector(lineAnimation) userInfo:nil repeats:YES];
}
-(void)lineAnimation {
  if (upOrDown == NO) {
    lineNum ++;
    _lineImageView.frame = CGRectMake(CGRectGetMinX(_lineImageView.frame), 15 + lineNum, BCWidth - 32, 1);
    CGFloat tempHeight = CGRectGetHeight(_backImageView.frame) * 321/542;
    NSInteger height = (NSInteger)tempHeight + 20;
    if (lineNum == height) {
      upOrDown = YES;
    }
  }
  else {
    lineNum --;
    _lineImageView.frame = CGRectMake(CGRectGetMinX(_lineImageView.frame), 15 + lineNum, BCWidth - 32, 1);
    if (lineNum == 0) {
      upOrDown = NO;
    }
  }
}
#pragma mark AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
  if ([metadataObjects count] > 0) {
    [session stopRunning]; //    
    AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects firstObject];
    ResultViewController *resultVC = [[ResultViewController alloc] init];
    resultVC.contentString = metadataObject.stringValue;
    [self.navigationController pushViewController:resultVC animated:NO];
  }
}
- (void)viewWillAppear:(BOOL)animated {
  [super viewWillAppear:animated];
  [session startRunning];
  [lineTimer setFireDate:[NSDate distantPast]];
}
- (void)viewWillDisappear:(BOOL)animated {
  [super viewWillDisappear:animated];
  [session stopRunning];
  [lineTimer setFireDate:[NSDate distantFuture]];
  if (![self.navigationController.viewControllers containsObject:self]) {//  timer
    [lineTimer invalidate];
    lineTimer = nil;
  }
}
- (void)dealloc {
  NSLog(@"   ");
}
@end
*******                
#import <UIKit/UIKit.h>
@interface ResultViewController : UIViewController
@property(nonatomic, retain)NSString *contentString;
@end
********
#import "ResultViewController.h"
#import <WebKit/WebKit.h>
@implementation ResultViewController
- (void)viewDidLoad {
  [super viewDidLoad];
  //              ,      ,                  
  if ([_contentString hasPrefix:@"http"]) {
    WKWebView *showView = [[WKWebView alloc] initWithFrame:BCScreen];
    NSURLRequest *codeRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:_contentString]];
    [showView loadRequest:codeRequest];
    [self.view addSubview:showView];
  } else {
    UILabel *showLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 80, 200, 50)];
    showLabel.center = self.view.center;
    showLabel.font = [UIFont boldSystemFontOfSize:16];
    showLabel.text = [NSString stringWithFormat:@"     ---%@",_contentString];
    showLabel.numberOfLines = 0;
    [self.view addSubview:showLabel];
  }
}
@end
방식 2:웹 페이지 의 QR 코드 식별
iOS WebView 에서 QR 코드 로 길 게 인식
생각:
웹 뷰 를 길 게 누 르 는 과정 에서 캡 처 한 다음 에 QR 코드 가 있 는 지 분석 하지만 만약 에 QR 코드 의 절반 을 캡 처 했다 면 풀 리 지 않 는 다 는 단점 이 있 습 니 다.
웹 뷰 에 클릭 한 그림 을 가 져 오 는 JS 를 주입 하여 그림 을 가 져 오고 해석 합 니 다.단점:만약 사진 이 너무 커서 다운로드 가 필요 하 다 면 반드시 사용자 체험 에 영향 을 줄 것 이다.

@interface CVWebViewController ()<UIGestureRecognizerDelegate>
@property (weak, nonatomic) IBOutlet UIWebView *webView;
@end
@implementation CVWebViewController
- (void)viewDidLoad
{
  [self.webView loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://mp.weixin.qq.com/s?__biz=MzI2ODAzODAzMw==&mid=2650057120&idx=2&sn=c875f7d03ea3823e8dcb3dc4d0cff51d&scene=0#wechat_redirect"]]];
  UILongPressGestureRecognizer *longPressed = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longPressed:)];
  longPressed.delegate = self;
  [self.webView addGestureRecognizer:longPressed];
}
- (void)longPressed:(UITapGestureRecognizer*)recognizer
{
  if (recognizer.state != UIGestureRecognizerStateBegan) {
    return;
  }
  CGPoint touchPoint = [recognizer locationInView:self.webView];
  NSString *js = [NSString stringWithFormat:@"document.elementFromPoint(%f, %f).src", touchPoint.x, touchPoint.y];
  NSString *imageUrl = [self.webView stringByEvaluatingJavaScriptFromString:js];
  if (imageUrl.length == 0) {
    return;
  }
  NSLog(@"image url:%@",imageUrl);
  NSData *data = [NSData dataWithContentsOfURL:[NSURL URLWithString:imageUrl]];
  UIImage *image = [UIImage imageWithData:data];
  if (image) {
    //......
    //save image or Extract QR code
  }
}
-(BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
{
  return YES;
}
위 에서 말 한 것 은 편집장 님 께 서 소개 해 주신 iOS 가 위 챗 장 을 모방 하여 QR 코드 를 식별 하 는 다양한 방식 으로 여러분 께 도움 이 되 기 를 바 랍 니 다.궁금 한 점 이 있 으 시 면 댓 글 을 남 겨 주세요.편집장 님 께 서 바로 답 해 드 리 겠 습 니 다.여기 서도 저희 사이트 에 대한 여러분 의 지지 에 감 사 드 립 니 다!

좋은 웹페이지 즐겨찾기