The relative layout frame is used in the code masonry
Prepare two pictures, one is the scanning frame, one is the thin line that is scanned when the name is respectively
Scanframe.png and Scanline.png and put them in advance.
Import relative layout header files
#define Mas_shorthand
#define Mas_shorthand_globals
#import "Masonry.h"
Importing dependent header files
#import <AVFoundation/AVFoundation.h>
The specific code is as follows:
static const char *keyscanqrcodequeuename = "Scanqrcodequeue";
@interface Viewcontroller () <AVCaptureMetadataOutputObjectsDelegate>
@property (Nonatomic,strong) Uiimageview *line;;
@property (Nonatomic,strong) avcapturesession *scansession;
@property (Nonatomic,strong) Avcapturevideopreviewlayer *scanlayer;
@end
@implementation Viewcontroller
-(void) Viewdidload {
[Super Viewdidload];
[Avcapturedevice requestaccessformediatype:avmediatypevideo Completionhandler: ^ (BOOL granted) {
if (granted) {
[Self startreading];
} else {
NSLog (@ "Please grant access to the camera"); }
}];
}
Start scanning
-(BOOL) startreading
{
Get Avcapturedevice Instance
Nserror * ERROR;
Avcapturedevice *capturedevice = [Avcapturedevice defaultdevicewithmediatype:avmediatypevideo];
Initializing the input stream
Avcapturedeviceinput *inputstream = [Avcapturedeviceinput deviceinputwithdevice:capturedevice error:&error];
if (!inputstream) {
NSLog (@ "%@", [Error localizeddescription]);
return NO;
}
Create a session
_scansession = [[Avcapturesession alloc] init];
Add an input stream
[_scansession Addinput:inputstream];
Initializing the output stream
Avcapturemetadataoutput *capturemetadataoutput = [[Avcapturemetadataoutput alloc] init];
Adding an output stream
[_scansession Addoutput:capturemetadataoutput];
Create a dispatch queue.
dispatch_queue_t Dispatchqueue;
Dispatchqueue = Dispatch_queue_create (Keyscanqrcodequeuename, NULL);
[Capturemetadataoutput setmetadataobjectsdelegate:self Queue:dispatchqueue];
Set meta data type Avmetadataobjecttypeqrcode
[Capturemetadataoutput Setmetadataobjecttypes:[nsarray Arraywithobject:avmetadataobjecttypeqrcode];
Creating an Output Object
_scanlayer = [[Avcapturevideopreviewlayer alloc] initwithsession:_scansession];
[_scanlayer Setvideogravity:avlayervideogravityresizeaspectfill];
[_scanlayer SetFrame:self.view.bounds];
[Self.view.layer Addsublayer:_scanlayer];
[Self setoverlaypickerview];
Start session
[_scansession startrunning];
return YES;
}
End Scan
-(void) stopreading
{
[_scansession stoprunning];
_scansession = nil;
}
Scan results
-(void) Captureoutput: (Avcaptureoutput *) captureoutput didoutputmetadataobjects: (Nsarray *) metadataobjects
Fromconnection: (avcaptureconnection *) connection
{
if (metadataobjects! = Nil && [metadataobjects count] > 0) {
Avmetadatamachinereadablecodeobject *metadataobj = [Metadataobjects objectatindex:0];
NSString *result;
if ([[[Metadataobj Type] isequaltostring:avmetadataobjecttypeqrcode]) {
result = Metadataobj.stringvalue;
} else {
NSLog (@ "Not two-dimensional code");
}
[Self Performselectoronmainthread: @selector (reportscanresult:) Withobject:result Waituntildone:no];
}
}
Processing results
-(void) Reportscanresult: (NSString *) result
{
[Self stopreading];
NSLog (@ "%@", result);
}
Add an animation on the way to recognition
-(void) Setoverlaypickerview
{
View on the left
UIView *leftview = [UIView new];
Leftview.alpha = 0.5;
Leftview.backgroundcolor = [Uicolor blackcolor];
[Self.view Addsubview:leftview];
[Leftview makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Self.view.top);
Make.bottom.equalTo (Self.view.bottom);
Make.left.equalTo (Self.view.left);
Make.width.equalTo (30);
}];
View on right
UIView *rightview = [UIView new];
Rightview.alpha = 0.5;
Rightview.backgroundcolor = [Uicolor blackcolor];
[Self.view Addsubview:rightview];
[Rightview makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Self.view.top);
Make.bottom.equalTo (Self.view.bottom);
Make.right.equalTo (Self.view.right);
Make.width.equalTo (30);
}];
Top view
uiview* Upview = [UIView new];
Upview.alpha = 0.5;
Upview.backgroundcolor = [Uicolor blackcolor];
[Self.view Addsubview:upview];
[Upview makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Self.view.top);
Make.left.equalTo (Leftview.right);
Make.right.equalTo (Rightview.left);
Make.height.equalTo (30);
}];
Scan Box
Uiimageview *centerview = [Uiimageview new];
Centerview.center = Self.view.center;
Centerview.image = [UIImage imagenamed:@ "Scanframe.png"];
Centerview.contentmode = Uiviewcontentmodescaleaspectfit;
Centerview.backgroundcolor = [Uicolor Clearcolor];
[Self.view Addsubview:centerview];
[Centerview makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Upview.bottom);
Make.left.equalTo (Leftview.right);
Make.right.equalTo (Rightview.left);
Make.height.equalTo (Upview.width);
}];
Bottom view
UIView * Downview = [UIView new];
Downview.alpha = 0.5;
Downview.backgroundcolor = [Uicolor blackcolor];
[Self.view Addsubview:downview];
[Downview makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Centerview.bottom);
Make.left.equalTo (Leftview.right);
Make.right.equalTo (Rightview.left);
Make.bottom.equalTo (Self.view.bottom);
}];
_line = [Uiimageview new];
_line.image = [UIImage imagenamed:@ "Scanline.png"];
_line.contentmode = Uiviewcontentmodescaleaspectfill;
_line.backgroundcolor = [Uicolor Clearcolor];
[Self addanimation];
[Self.view Addsubview:_line];
[_line makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Centerview.top);
Make.left.equalTo (Centerview.left);
Make.right.equalTo (Centerview.right);
Make.height.equalTo (2);
}];
Prompt information
UILabel *msg = [UILabel new];
Msg.backgroundcolor = [Uicolor Clearcolor];
Msg.textcolor = [Uicolor Whitecolor];
Msg.textalignment = Nstextalignmentcenter;
Msg.font = [Uifont systemfontofsize:16];
Msg.text = @ "The two-dimensional stacking into the box, you can identify the ticket";
[Self.view addsubview:msg];
[Msg makeconstraints:^ (Masconstraintmaker *make) {
Make.top.equalTo (Centerview.bottom). Offset (20);
Make.left.equalTo (Self.view.left);
Make.right.equalTo (Self.view.right);
Make.height.equalTo (30);
}];
}
-(void) addanimation{
Cabasicanimation *animation = [self moveytime:2 fromy:[nsnumber numberwithfloat:0] toy:[nsnumber numberWithFloat: SCREENWIDTH-60-2] Rep:open_max];
[Line.layer addanimation:animation forkey:@ "animation"];
}
-(Cabasicanimation *) Moveytime: (float) Time FromY: (NSNumber *) FromY ToY: (NSNumber *) ToY Rep: (int) Rep
{
Cabasicanimation *animationmove = [cabasicanimation animationwithkeypath:@ "TRANSFORM.TRANSLATION.Y"];
[Animationmove setfromvalue:fromy];
[Animationmove Settovalue:toy];
Animationmove.duration = time;
Animationmove.delegate = self;
Animationmove.repeatcount = rep;
Animationmove.fillmode = Kcafillmodeforwards;
Animationmove.removedoncompletion = NO;
Animationmove.timingfunction = [Camediatimingfunction functionwithname:kcamediatimingfunctioneaseineaseout];
return animationmove;
}
@end
Note that Scanframe.png is the frame of the scan when you need to put yourself in advance in the project. Scanline.png is the time to scan from top to bottom of the line in advance into the project
IOS native QR code scan with scan box and scan process animations