标签:
最近大家总是问我有没有关于二维码的demo,为了满足大家的需求,特此研究了一番,希望能帮到大家!
每日更新关注:http://weibo.com/hanjunqiang 新浪微博
指示根视图:
self.window.rootViewController = [[UINavigationController alloc]initWithRootViewController:[SecondViewController new]];
生成二维码:
//  Created by 韩俊强 on 15/11/27.
//  Copyright (c) 2015年 韩俊强. All rights reserved.
//
#import "SecondViewController.h"
@interface SecondViewController ()
@property (nonatomic, strong) UITextField *tfCode;
@property (nonatomic, strong) UIButton *btnGenerate;
@property (nonatomic, strong) UIImageView *imageView;
@end
@implementation SecondViewController
- (void)viewDidLoad {
    [super viewDidLoad];
    CGSize windowSize = [UIScreen mainScreen].bounds.size;
    
    self.tfCode = [[UITextField alloc] initWithFrame:CGRectMake(10, 64, windowSize.width-100, 40)];
    [self.view addSubview:self.tfCode];
    self.tfCode.borderStyle = UITextBorderStyleRoundedRect;
    
    self.btnGenerate = [[UIButton alloc] initWithFrame:CGRectMake(windowSize.width-100, 64, 90, 40)];
    [self.view addSubview:self.btnGenerate];
    [self.btnGenerate addTarget:self action:@selector(actionGenerate) forControlEvents:UIControlEventTouchUpInside];
    self.btnGenerate.backgroundColor = [UIColor lightGrayColor];
    [self.btnGenerate setTitle:@"生成" forState:UIControlStateNormal];
    [self.btnGenerate setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
    
    self.imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 300, 300)];
    [self.view addSubview:self.imageView];
    self.imageView.center = CGPointMake(windowSize.width/2, windowSize.height/2);
    
    self.tfCode.text = @"http://www.baidu.com";
}
- (void)actionGenerate
{
    NSString *text = self.tfCode.text;
    
    NSData *stringData = [text dataUsingEncoding: NSUTF8StringEncoding];
    
    //生成
    CIFilter *qrFilter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
    [qrFilter setValue:stringData forKey:@"inputMessage"];
    [qrFilter setValue:@"M" forKey:@"inputCorrectionLevel"];
    
    UIColor *onColor = [UIColor blackColor];
    UIColor *offColor = [UIColor whiteColor];
    
    //上色
    CIFilter *colorFilter = [CIFilter filterWithName:@"CIFalseColor" keysAndValues:@"inputImage",qrFilter.outputImage,@"inputColor0",[CIColor colorWithCGColor:onColor.CGColor],@"inputColor1",[CIColor colorWithCGColor:offColor.CGColor],nil];
    
    CIImage *qrImage = colorFilter.outputImage;
    
    //绘制
    CGSize size = CGSizeMake(300, 300);
    CGImageRef cgImage = [[CIContext contextWithOptions:nil] createCGImage:qrImage fromRect:qrImage.extent];
    UIGraphicsBeginImageContext(size);
    CGContextRef context = UIGraphicsGetCurrentContext();
    CGContextSetInterpolationQuality(context, kCGInterpolationNone);
    CGContextScaleCTM(context, 1.0, -1.0);
    CGContextDrawImage(context, CGContextGetClipBoundingBox(context), cgImage);
    UIImage *codeImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    
    CGImageRelease(cgImage);
    
    self.imageView.image = codeImage;
}
//  Created by 韩俊强 on 15/11/27.
//  Copyright (c) 2015年 韩俊强. All rights reserved.
//
#import "RootViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface RootViewController ()<AVCaptureMetadataOutputObjectsDelegate,UIAlertViewDelegate>
@property (nonatomic, strong) UIView *scanRectView;
// 硬件设备
@property (strong, nonatomic) AVCaptureDevice            *device;
//输入设备
@property (strong, nonatomic) AVCaptureDeviceInput       *input;
//输出设备
@property (strong, nonatomic) AVCaptureMetadataOutput    *output;
//桥梁.连接输入和输出设备,
@property (strong, nonatomic) AVCaptureSession           *session;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *preview;
@end
@implementation RootViewController
- (void)viewDidLoad {
    [super viewDidLoad];
    CGSize windowSize = [UIScreen mainScreen].bounds.size;
    
    CGSize scanSize = CGSizeMake(windowSize.width*3/4, windowSize.width*3/4);
    CGRect scanRect = CGRectMake((windowSize.width-scanSize.width)/2, (windowSize.height-scanSize.height)/2, scanSize.width, scanSize.height);
    
    scanRect = CGRectMake(scanRect.origin.y/windowSize.height, scanRect.origin.x/windowSize.width, scanRect.size.height/windowSize.height,scanRect.size.width/windowSize.width);
    
    self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
    
    self.output = [[AVCaptureMetadataOutput alloc]init];
    [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    
    self.session = [[AVCaptureSession alloc]init];
    [self.session setSessionPreset:([UIScreen mainScreen].bounds.size.height<500)?AVCaptureSessionPreset640x480:AVCaptureSessionPresetHigh];
    [self.session addInput:self.input];
    [self.session addOutput:self.output];
    self.output.metadataObjectTypes=@[AVMetadataObjectTypeQRCode];
    self.output.rectOfInterest = scanRect;
    
    self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
    self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
    self.preview.frame = [UIScreen mainScreen].bounds;
    [self.view.layer insertSublayer:self.preview atIndex:0];
    
    self.scanRectView = [UIView new];
    [self.view addSubview:self.scanRectView];
    self.scanRectView.frame = CGRectMake(0, 0, scanSize.width, scanSize.height);
    self.scanRectView.center = CGPointMake(CGRectGetMidX([UIScreen mainScreen].bounds), CGRectGetMidY([UIScreen mainScreen].bounds));
    self.scanRectView.layer.borderColor = [UIColor redColor].CGColor;
    self.scanRectView.layer.borderWidth = 1;
    
    
    //开始捕获
    [self.session startRunning];
    
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    if ( (metadataObjects.count==0) )
    {
        return;
    }
    
    if (metadataObjects.count>0) {
        
        [self.session stopRunning];
        
        AVMetadataMachineReadableCodeObject *metadataObject = metadataObjects.firstObject;
        //输出扫描字符串
        
        UIAlertView *alert = [[UIAlertView alloc] initWithTitle:metadataObject.stringValue message:@"" delegate:self cancelButtonTitle:@"ok" otherButtonTitles: nil];
        
        [alert show];
    }
}
- (void)alertView:(UIAlertView *)alertView willDismissWithButtonIndex:(NSInteger)buttonIndex
{
    [self.session startRunning];
}
每日更新关注:http://weibo.com/hanjunqiang 新浪微博
每日更新关注:http://weibo.com/hanjunqiang 
 新浪微博
IOS7之前,开发者进行扫码编程时,一般会借助第三方库。常用的是ZBarSDK,IOS7之后,系统的AVMetadataObject类中,为我们提供了解析二维码的接口。经过测试,使用原生API扫描和处理的效率非常高,远远高于第三方库。
官方提供的接口非常简单,代码如下:
| 
1 
2 
3 
4 
5 
6 
7 
8 
9 
10 
11 
12 
13 
14 
15 
16 
17 
18 
19 
20 
21 
22 
23 
24 
25 
26 
27 
28 
29 
30 
31 
32 
33 
34 
35 
36 | @interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate>//用于处理采集信息的代理{    AVCaptureSession * session;//输入输出的中间桥梁}@end@implementation ViewController- (void)viewDidLoad {    [super viewDidLoad];    // Do any additional setup after loading the view, typically from a nib.    //获取摄像设备    AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];    //创建输入流    AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];    //创建输出流    AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc]init];    //设置代理 在主线程里刷新    [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];        //初始化链接对象    session = [[AVCaptureSession alloc]init];    //高质量采集率    [session setSessionPreset:AVCaptureSessionPresetHigh];        [session addInput:input];    [session addOutput:output];    //设置扫码支持的编码格式(如下设置条形码和二维码兼容)    output.metadataObjectTypes=@[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode128Code];           AVCaptureVideoPreviewLayer * layer = [AVCaptureVideoPreviewLayer layerWithSession:session];    layer.videoGravity=AVLayerVideoGravityResizeAspectFill;    layer.frame=self.view.layer.bounds;    [self.view.layer insertSublayer:layer atIndex:0];    //开始捕获    [session startRunning];} | 
之后我们的UI上已经可以看到摄像头捕获的内容,只要实现代理中的方法,就可以完成二维码条形码的扫描:
| 
1 
2 
3 
4 
5 
6 
7 
8 | -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{    if(metadataObjects.count>0) {        //[session stopRunning];        AVMetadataMachineReadableCodeObject * metadataObject = [metadataObjects objectAtIndex : 0 ];        //输出扫描字符串        NSLog(@"%@",metadataObject.stringValue);    }} | 
通过上面的代码测试,我们可以发现系统的解析处理效率是相当的高,IOS官方提供的API也确实非常强大,然而,我们可以做进一步的优化,将效率更加提高:
首先,AVCaptureMetadataOutput类中有一个这样的属性(在IOS7.0之后可用):
@property(nonatomic) CGRect rectOfInterest;
这个属性大致意思就是告诉系统它需要注意的区域,大部分APP的扫码UI中都会有一个框,提醒你将条形码放入那个区域,这个属性的作用就在这里,它可以设置一个范围,只处理在这个范围内捕获到的图像的信息。如此一来,可想而知,我们代码的效率又会得到很大的提高,在使用这个属性的时候。需要几点注意:
1、这个CGRect参数和普通的Rect范围不太一样,它的四个值的范围都是0-1,表示比例。
2、经过测试发现,这个参数里面的x对应的恰恰是距离左上角的垂直距离,y对应的是距离左上角的水平距离。
3、宽度和高度设置的情况也是类似。
3、举个例子如果我们想让扫描的处理区域是屏幕的下半部分,我们这样设置
| 
1 | output.rectOfInterest=CGRectMake(0.5,0,0.5, 1); | 
具体apple为什么要设计成这样,或者是这个参数我的用法那里不对,还需要了解的朋友给个指导。
每日更新关注:http://weibo.com/hanjunqiang 
 新浪微博
标签:
原文地址:http://blog.csdn.net/qq_31810357/article/details/50442512