Added an api for ios7 to scan QR code
I used the ZBar open-source library to scan the QR code, but it seems that arm64 is not supported and is not being updated.
#import @interface QRCodeReadController : BaseViewControllervcapturemetadataoutputobjectsdelegate>@property (weak, nonatomic) IBOutlet UIView *viewPreview;@end
Add a viewPreview to xib to dynamically display the obtained camera content during code scanning.
@ Interface QRCodeReadController () {NSInteger maxY; NSInteger minY; NSTimer * timer; UIImageView * line;} @ property (nonatomic) BOOL isReading; @ property (nonatomic, strong) AVCaptureSession * captureSession; @ property (nonatomic, strong) implements * videoPreviewLayer;-(BOOL) startReading;-(void) stopReading; @ end @ implementation QRCodeReadController-(id) initWithNibName :( NSString *) nibname Nil bundle :( NSBundle *) nibBundleOrNil {self = [super initWithNibName: nibNameOrNil bundle: nibBundleOrNil]; if (self) {// Custom initialization} return self;}-(void) viewDidLoad {[super viewDidLoad]; // Do any additional setup after loading the view from its nib. _ isReading = NO; if ([self startReading]) {maxY = 280; minY = 2; line = [[UIImageView alloc] initWithFrame: CGRectMake (0, 0,280, 10)]; // 0-200 [line setImage: [UIImage imageNamed: @ e0]; [_ viewPreview addSubview: line]; timer = [nst1_scheduledtimerwithtimeinterval: 1.0/40 target: self selector: @ selector (move) userInfo: nil repeats: YES] ;}}/ *** AVCaptureMetadataOutput object. this class in combination with the AVCaptureMetadataOutputObjectsDelegate protocol will manage to intercept any metadata found in the input device (meaning Data in a QR code captured by our camera) and translate it to a human readable format. */-(BOOL) startReading {NSError * error; AVCaptureDevice * captureDevice = [AVCaptureDevice failed: AVMediaTypeVideo]; optional * input = [cannot deviceInputWithDevice: captureDevice error: & error]; if (! Input) {NSLog (@ % @, [error localizedDescription]); return NO;} _ captureSession = [[AVCaptureSession alloc] init]; [_ captureSession addInput: input]; optional * captureMetadataOutput = [[financialloc] init]; [_ captureSession addOutput: captureMetadataOutput]; includispatchqueue; dispatchQueue = Queue (myQueue, NULL); [captureMetadataOutput set MetadataObjectsDelegate: self queue: dispatchQueue]; [captureMetadataOutput setMetadataObjectTypes: [NSArray arrayWithObject: AVMetadataObjectTypeQRCode]; // show to user what the camera of the device sees using a AVCaptureVideoPreviewLayer _ videoPreviewLayer = [[using alloc] initWithSession: _ captureSession]; [_ videoPreviewLayer setVideoGravity: camera]; [_ VideoPreviewLayer setFrame: _ viewPreview. layer. bounds]; [_ viewPreview. layer addSublayer: _ videoPreviewLayer]; [_ captureSession startRunning]; return YES;}-(void) captureOutput :( optional *) captureOutput timeout :( NSArray *) metadataObjects fromConnection :( AVCaptureConnection *) connection {if (metadataObjects! = Nil & [metadataObjects count]> 0) {optional * metadataObj = [metadataObjects objectAtIndex: 0]; if ([[metadataObj type] isEqualToString: AVMetadataObjectTypeQRCode]) {[self defined mselec#mainthread: @ selector (stopReading) withObject: nil waitUntilDone: NO]; NSLog (@ metadataObj string =%@, [metadataObj stringValue]); _ isReading = NO ;}}- (void) stopReading {[_ captureSession stopRunning]; _ captureSession = nil; [_ videoPreviewLayer removeFromSuperlayer];} // when scanning, move the scanning line-(void) move {NSLog (@ ++); static BOOL flag = TRUE; // true down and false up if (flag) {if (line. frame. origin. y
MinY) {line. frame = CGRectMake (line. frame. origin. x, line. frame. origin. y-5, line. frame. size. width, line. frame. size. height);} else {flag =! Flag ;}}}