iOS-原生實現(xiàn)二維碼掃描

可以在GitHub上直接下載文件
https://github.com/peiDuo/PDCameraScan
二維碼掃描

//
//  ViewController.m
//  OCErWeiMa
//
//  Created by 裴鐸 on 2018/7/15.
//  Copyright ? 2018年 裴鐸. All rights reserved.
//

#import "ViewController.h"
#import "PDCameraScanView.h"http://掃描界面頭文件
#import <AVFoundation/AVFoundation.h>  //引用AVFoundation框架

@interface ViewController ()<
AVCaptureMetadataOutputObjectsDelegate> //遵守AVCaptureMetadataOutputObjectsDelegate協(xié)議
@property ( strong , nonatomic ) AVCaptureDevice * device; //捕獲設(shè)備,默認后置攝像頭
@property ( strong , nonatomic ) AVCaptureDeviceInput * input; //輸入設(shè)備
@property ( strong , nonatomic ) AVCaptureMetadataOutput * output;//輸出設(shè)備,需要指定他的輸出類型及掃描范圍
@property ( strong , nonatomic ) AVCaptureSession * session; //AVFoundation框架捕獲類的中心樞紐,協(xié)調(diào)輸入輸出設(shè)備以獲得數(shù)據(jù)
@property ( strong , nonatomic ) AVCaptureVideoPreviewLayer * previewLayer;//展示捕獲圖像的圖層,是CALayer的子類
@property (nonatomic,strong)UIView *scanView;//定位掃描框在哪個位置

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    //屏幕的寬度
    CGFloat kScreen_Width = [UIScreen mainScreen].bounds.size.width;
    
    //定位掃描框在屏幕正中央,并且寬高為200的正方形
    self.scanView = [[UIView alloc]initWithFrame:CGRectMake((kScreen_Width-200)/2, (self.view.frame.size.height-200)/2, 200, 200)];
    [self.view addSubview:self.scanView];
    
    //設(shè)置掃描界面(包括掃描界面之外的部分置灰,掃描邊框等的設(shè)置),后面設(shè)置
    PDCameraScanView *clearView = [[PDCameraScanView alloc]initWithFrame:self.view.frame];
    [self.view addSubview:clearView];
    
    //初始化并啟動掃描
    [self startScan];
}

/**
 開始掃描
 */
- (void)startScan
{
    // 1.判斷輸入能否添加到會話中
    if (![self.session canAddInput:self.input]) return;
    [self.session addInput:self.input];
    
    
    // 2.判斷輸出能夠添加到會話中
    if (![self.session canAddOutput:self.output]) return;
    [self.session addOutput:self.output];
    
    // 4.設(shè)置輸出能夠解析的數(shù)據(jù)類型
    // 注意點: 設(shè)置數(shù)據(jù)類型一定要在輸出對象添加到會話之后才能設(shè)置
    //設(shè)置availableMetadataObjectTypes為二維碼、條形碼等均可掃描,如果想只掃描二維碼可設(shè)置為
    // [self.output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];
    
    self.output.metadataObjectTypes = self.output.availableMetadataObjectTypes;
    
    // 5.設(shè)置監(jiān)聽監(jiān)聽輸出解析到的數(shù)據(jù)
    [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    
    // 6.添加預(yù)覽圖層
    [self.view.layer insertSublayer:self.previewLayer atIndex:0];
    self.previewLayer.frame = self.view.bounds;
    
    // 8.開始掃描
    [self.session startRunning];
}


/**
 掃描結(jié)束回調(diào)
 下面是接收掃描結(jié)果的代理AVCaptureMetadataOutputObjectsDelegate:
 */
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    [self.session stopRunning];   //停止掃描
    //我們捕獲的對象可能不是AVMetadataMachineReadableCodeObject類,所以要先判斷,不然會崩潰
    if (![[metadataObjects lastObject] isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
        [self.session startRunning];
        return;
    }
    // id 類型不能點語法,所以要先去取出數(shù)組中對象
    AVMetadataMachineReadableCodeObject *object = [metadataObjects lastObject];
    if ( object.stringValue == nil ){
        [self.session startRunning];
    }
    
    NSLog(@"掃描結(jié)束了 %@",object);
    
}

/**
 調(diào)用相冊
 */
- (void)choicePhoto{
    //調(diào)用相冊
    UIImagePickerController *imagePicker = [[UIImagePickerController alloc]init];
    //UIImagePickerControllerSourceTypePhotoLibrary為相冊
    imagePicker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
    
    //設(shè)置代理UIImagePickerControllerDelegate和UINavigationControllerDelegate
    imagePicker.delegate = self;
    
    [self presentViewController:imagePicker animated:YES completion:nil];
}

//選中圖片的回調(diào)
-(void)imagePickerController:(UIImagePickerController*)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
    //取出選中的圖片
    UIImage *pickImage = info[UIImagePickerControllerOriginalImage];
    NSData *imageData = UIImagePNGRepresentation(pickImage);
    CIImage *ciImage = [CIImage imageWithData:imageData];
    
    //創(chuàng)建探測器
    //CIDetectorTypeQRCode表示二維碼,這里選擇CIDetectorAccuracyLow識別速度快
    CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:nil options:@{CIDetectorAccuracy: CIDetectorAccuracyLow}];
    NSArray *feature = [detector featuresInImage:ciImage];
    
    //取出探測到的數(shù)據(jù)
    for (CIQRCodeFeature *result in feature) {
        NSString *content = result.messageString;// 這個就是我們想要的值
    }
    
    [self dismissViewControllerAnimated:YES completion:nil];
}


#pragma mark 懶加載

//下面初始化AVCaptureSession和AVCaptureVideoPreviewLayer:
- (AVCaptureSession *)session
{
    if (_session == nil) {
        _session = [[AVCaptureSession alloc] init];
    }
    return _session;
}

- (AVCaptureVideoPreviewLayer *)previewLayer
{
    if (_previewLayer == nil) {
        //負責圖像渲染出來
        _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
        self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    }
    return _previewLayer;
}

/**
 這里設(shè)置輸出設(shè)備要注意rectOfInterest屬性的設(shè)置,一般默認是CGRect(x: 0, y: 0, width: 1, height: 1),
 全屏都能讀取的,但是讀取速度較慢。
 注意rectOfInterest屬性的傳人的是比例。
 比例是根據(jù)掃描容器的尺寸比上屏幕尺寸(注意要計算的時候要計算導(dǎo)航欄高度,有的話需減去)。
 參照的是橫屏左上角的比例,而不是豎屏。
 所以我們再設(shè)置的時候要調(diào)整方向如下面所示。
 */
- (AVCaptureMetadataOutput *)output{
    if (_output == nil) {
        //初始化輸出設(shè)備
        _output = [[AVCaptureMetadataOutput alloc] init];
        
        // 1.獲取屏幕的frame
        CGRect viewRect = self.view.frame;
        // 2.獲取掃描容器的frame
        CGRect containerRect = self.scanView.frame;
        
        CGFloat x = containerRect.origin.y / viewRect.size.height;
        CGFloat y = containerRect.origin.x / viewRect.size.width;
        CGFloat width = containerRect.size.height / viewRect.size.height;
        CGFloat height = containerRect.size.width / viewRect.size.width;
        //rectOfInterest屬性設(shè)置設(shè)備的掃描范圍
        _output.rectOfInterest = CGRectMake(x, y, width, height);
    }
    return _output;
    
    /**網(wǎng)上還有一種是根據(jù)AVCaptureInputPortFormatDescriptionDidChangeNotification通知設(shè)置的,也是可行的,自選一種即可
     __weak typeof(self) weakSelf = self;
     [[NSNotificationCenter defaultCenter]addObserverForName:AVCaptureInputPortFormatDescriptionDidChangeNotification
     object:nil
     queue:[NSOperationQueue mainQueue]
     usingBlock:^(NSNotification * _Nonnull note) {
     if (weakSelf){
     //調(diào)整掃描區(qū)域
     AVCaptureMetadataOutput *output = weakSelf.session.outputs.firstObject;
     output.rectOfInterest = [weakSelf.previewLayer metadataOutputRectOfInterestForRect:weakSelf.scanView.frame];
     }
     }];*/
}


- (AVCaptureDevice *)device{
    if (_device == nil) {
        // 設(shè)置AVCaptureDevice的類型為Video類型
        _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    }
    return _device;
}

- (AVCaptureDeviceInput *)input{
    if (_input == nil) {
        //輸入設(shè)備初始化
        _input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
    }
    return _input;
}


@end

掃描視圖文件的借口部分

//
//  PDCameraScanView.h
//  OCErWeiMa
//
//  Created by 裴鐸 on 2018/7/15.
//  Copyright ? 2018年 裴鐸. All rights reserved.
//

#import <UIKit/UIKit.h>

@interface PDCameraScanView : UIView

- (instancetype)initWithFrame:(CGRect)frame;
@end

下面是掃描視圖的實現(xiàn)文件

//
//  PDCameraScanView.m
//  OCErWeiMa
//
//  Created by 裴鐸 on 2018/7/15.
//  Copyright ? 2018年 裴鐸. All rights reserved.
//

#import "PDCameraScanView.h"

@interface PDCameraScanView(){
    CGFloat sceenHeight;
    NSTimer *timer;
    CGRect  scanRect;
    CGFloat kScreen_Width;
    CGFloat kScreen_Height;
}

@property (nonatomic,assign)CGFloat lineWidth;
@property (nonatomic,assign)CGFloat height;
@property (nonatomic,strong)UIColor  *lineColor;
@property (nonatomic, assign)CGFloat scanTime;

@end

@implementation PDCameraScanView

- (instancetype)initWithFrame:(CGRect)frame{
    if (self = [super initWithFrame:frame]) {
        
        self.backgroundColor = [UIColor clearColor]; // 清空背景色,否則為黑
        sceenHeight =self.frame.size.height;
        _height =   200; // 寬高200的正方形
        _lineWidth = 2;   // 掃描框4個腳的寬度
        _lineColor =  [UIColor greenColor]; // 掃描框4個腳的顏色
        _scanTime = 3;      //掃描線的時間間隔設(shè)置
        
        kScreen_Width = [UIScreen mainScreen].bounds.size.width;
        kScreen_Height = [UIScreen mainScreen].bounds.size.height;
        [self scanLineMove];
        
        //定時,多少秒掃描線刷新一次
        timer =  [NSTimer scheduledTimerWithTimeInterval:_scanTime target:self selector:@selector(scanLineMove) userInfo:nil repeats:YES];
    }
    return self;
}

- (void)scanLineMove{
    UIView *line = [[UIView alloc]initWithFrame:CGRectMake((kScreen_Width-_height)/2, (sceenHeight-_height)/2, _height, 1)];
    line.backgroundColor = [UIColor greenColor];
    [self addSubview:line];
    [UIView animateWithDuration:_scanTime animations:^{
        line.frame = CGRectMake((self->kScreen_Width-self->_height)/2,  (self->sceenHeight+_height)/2, _height, 0.5);
    } completion:^(BOOL finished) {
        [line removeFromSuperview];
    }];
}

-(void)drawRect:(CGRect)rect{
    CGFloat   bottomHeight =  (sceenHeight-_height)/2;
    CGFloat   leftWidth = (kScreen_Width-_height)/2;
    
    CGContextRef ctx = UIGraphicsGetCurrentContext();
    
    //設(shè)置4個方向的灰度值,透明度為0.5,可自行調(diào)整。
    CGContextSetRGBFillColor(ctx, 0, 0, 0, 0.5);
    CGContextFillRect(ctx, CGRectMake(0, 0, kScreen_Width, bottomHeight));
    CGContextStrokePath(ctx);
    CGContextFillRect(ctx, CGRectMake(0,bottomHeight, leftWidth, _height));
    CGContextStrokePath(ctx);
    CGContextFillRect(ctx, CGRectMake((kScreen_Width+_height)/2, bottomHeight, leftWidth, _height));
    CGContextStrokePath(ctx);
    CGContextFillRect(ctx, CGRectMake(0,(sceenHeight+_height)/2, kScreen_Width, bottomHeight));
    CGContextStrokePath(ctx);
    
    //掃描框4個腳的設(shè)置
    CGContextSetLineWidth(ctx, _lineWidth);
    CGContextSetStrokeColorWithColor(ctx, _lineColor.CGColor);
    //左上角
    CGContextMoveToPoint(ctx, leftWidth, bottomHeight+30);
    CGContextAddLineToPoint(ctx, leftWidth, bottomHeight);
    CGContextAddLineToPoint(ctx, leftWidth+30, bottomHeight);
    CGContextStrokePath(ctx);
    //右上角
    CGContextMoveToPoint(ctx, (kScreen_Width+_height)/2-30, bottomHeight);
    CGContextAddLineToPoint(ctx, (kScreen_Width+_height)/2, bottomHeight);
    CGContextAddLineToPoint(ctx, (kScreen_Width+_height)/2, bottomHeight+30);
    CGContextStrokePath(ctx);
    //左下角
    CGContextMoveToPoint(ctx, leftWidth, (sceenHeight+_height)/2-30);
    CGContextAddLineToPoint(ctx, leftWidth,  (sceenHeight+_height)/2);
    CGContextAddLineToPoint(ctx, leftWidth+30, (sceenHeight+_height)/2);
    CGContextStrokePath(ctx);
    //右下角
    CGContextMoveToPoint(ctx, (kScreen_Width+_height)/2-30, (sceenHeight+_height)/2);
    CGContextAddLineToPoint(ctx,  (kScreen_Width+_height)/2,  (sceenHeight+_height)/2);
    CGContextAddLineToPoint(ctx,  (kScreen_Width+_height)/2, (sceenHeight+_height)/2-30);
    CGContextStrokePath(ctx);
    
    //設(shè)置掃描框4個邊的顏色和線框。
    //    CGContextSetStrokeColorWithColor(ctx, [UIColor whiteColor].CGColor);
    //    CGContextSet_lineWidth(ctx, 1);
    //    CGContextAddRect(ctx, CGRectMake(leftWidth, bottomHeight, height, height));
    //    CGContextStrokePath(ctx);
    scanRect = CGRectMake(leftWidth, bottomHeight, _height, _height);
}

- (void)dealloc{
    //清除計時器
    [timer invalidate];
    timer = nil;
}

@end
最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時請結(jié)合常識與多方信息審慎甄別。
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點,簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

友情鏈接更多精彩內(nèi)容