Scan two-dimensional code effects
Source: Https://github.com/YouXianMing/Animations
QRCodeView.h//QRCode////Created by youxianming on 16/7/7. copyright©2016 years xianming you.
All rights reserved.
#import <UIKit/UIKit.h> #import <AVFoundation/AVFoundation.h> @class Qrcodeview; @protocol qrcodeviewdelegate <NSObject> @optional/** * get QR Scan results * * @param codeview qrcodeview entity objects * @param
Codestring Scan String */(void) Qrcodeview: (Qrcodeview *) CodeView codestring: (NSString *) codestring;
@end @interface Qrcodeview:uiview/** * Agent */@property (nonatomic, weak) ID <QRCodeViewDelegate> delegate;
/** * The state of the lamp, the default is closed * * * * @property (nonatomic) Avcapturetorchmode Torchmode;
/** * Sensitive area, if not set, for all scanning area * * * @property (nonatomic) CGRect Interestarea;
/** * You use to add a custom control to the view, the dimensions are in line with the currently initialized view * * * * * @property (nonatomic, strong) UIView *contentview;
/** * is under Operation * * * @property (nonatomic, readonly) BOOL isrunning;
/** * Start Scanning * * @return If successful, return yes, otherwise return no * * *-(BOOL) start;
/** * End Scan/-(void) stop;
@end
QRCODEVIEW.M//QRCode////Created by youxianming on 16/7/7. copyright©2016 years xianming you.
All rights reserved. #import "QRCodeView.h" @interface Qrcodeview () <AVCaptureMetadataOutputObjectsDelegate> @property (nonatomic
) BOOL isrunning;
@property (nonatomic, strong) UIView *videoview;
@property (nonatomic, strong) Avcapturedeviceinput *deviceinput;
@property (nonatomic, strong) Avcapturedevice *capturedevice;
@property (nonatomic, strong) avcapturesession *capturesession;
@property (nonatomic, strong) Avcapturevideopreviewlayer *videopreviewlayer;
@property (nonatomic, strong) Avcapturemetadataoutput *capturemetadataoutput; @end @implementation Qrcodeview-(Instancetype) initWithFrame: (CGRect) Frame {if (self = [super Initwithframe:frame])
{Self.videoview = [[UIView alloc] initWithFrame:self.bounds];
[Self addSubview:self.videoView];
Self.contentview = [[UIView alloc] initWithFrame:self.bounds]; [Self addsubview:self. Contentview];
Self.capturedevice = [Avcapturedevice defaultdevicewithmediatype:avmediatypevideo];
_torchmode = Avcapturetorchmodeoff;
[Self addnotificationcenter];
return self;
} #pragma mark-nsnotificationcenter related. -(void) Addnotificationcenter {[[Nsnotificationcenter defaultcenter] addobserver:self selector: @selector (n Otificationcenterevent:) name:avcaptureinputportformatdescriptiondidchangenotification Object:nil
]; }-(void) Removenotificationcenter {[[Nsnotificationcenter Defaultcenter] removeobserver:self name:AVCap
Tureinputportformatdescriptiondidchangenotification Object:nil]; }-(void) Notificationcenterevent: (Nsnotification *) Sender {if (self.interestArea.size.width && self.interest Area.size.height) {self.captureMetadataOutput.rectOfInterest = [self.videopreviewlayer
MetadataOutputRectOfInterestForRect:self.interestArea]; else {Self.capturemetaDataoutput.rectofinterest = CGRectMake (0, 0, 1, 1);
}} #pragma Mark-start & Stop.
-(BOOL) Start {//Initialize input stream BOOL result = NO;
Nserror *error = nil;
Self.deviceinput = [Avcapturedeviceinput deviceInputWithDevice:self.captureDevice error:&error];
if (self.deviceinput = = nil) {NSLog (@ "%@", error);
return result;
///Create Session self.capturesession = [[Avcapturesession alloc] init];
Add input stream [self.capturesession addInput:self.deviceInput];
Initialize output stream self.capturemetadataoutput = [[Avcapturemetadataoutput alloc] init];
Add output stream [self.capturesession addOutput:self.captureMetadataOutput];
Create a queue.
[Self.capturemetadataoutput setmetadataobjectsdelegate:self queue:dispatch_queue_create (nil, nil)];
Self.captureMetadataOutput.metadataObjectTypes = @[avmetadataobjecttypeqrcode];
Create output object self.videopreviewlayer = [[Avcapturevideopreviewlayer alloc] initWithSession:self.captureSession]; Self.videoPreviewLayer.videoGravity = AvlayervideogravityresIzeaspectfill;
Self.videoPreviewLayer.frame = Self.contentView.bounds;
[Self.videoView.layer AddSublayer:self.videoPreviewLayer];
Start [Self.capturesession startrunning];
self.isrunning = YES;
result = YES;
return result;
}-(void) Stop {[self.capturesession stoprunning];
self.isrunning = NO;
Self.capturesession = nil; } #pragma mark-avcapturemetadataoutputobjectsdelegate-(void) Captureoutput: (Avcaptureoutput *) captureoutput Didoutputmetadataobjects: (Nsarray *) metadataobjects fromconnection: (avcaptureconnection *) connection {if (MetadataO
Bjects.count > 0) {avmetadatamachinereadablecodeobject *metadata = Metadataobjects.firstobject;
NSString *result = nil;
if ([Metadata.type Isequaltostring:avmetadataobjecttypeqrcode]) {result = Metadata.stringvalue; if (_delegate && [_delegate respondstoselector: @selector (qrcodeview:codestring:)]) {[_delegate Qrcodevie
W:self Codestring:result]; #pragm}}}}A Mark-setter & Getter.
-(void) Settorchmode: (Avcapturetorchmode) torchmode {_torchmode = Torchmode;
if (_deviceinput && [Self.capturedevice Hastorch]) {[Self.capturedevice lockforconfiguration:nil];
[Self.capturedevice Settorchmode:torchmode];
[Self.capturedevice unlockforconfiguration];
}} #pragma Mark-system method.
-(void) Dealloc {[Self stop];
[Self removenotificationcenter];
} @end
The above is the entire content of this article, I hope to help you learn, but also hope that we support the cloud habitat community.