IOS 通過網路攝影機讀取每一幀的圖片,並且做識別做Face Service(swift)

來源:互聯網
上載者:User

IOS 通過網路攝影機讀取每一幀的圖片,並且做識別做Face Service(swift)
要使用IOS的網路攝影機,需要使用AVFoundation 庫,庫裡面的東西我就不介紹。 啟動網路攝影機需要使用AVCaptureSession 類。 然後得到網路攝影機傳輸的每一幀資料,需要使用AVCaptureVideoDataOutputSampleBufferDelegate 委託。 首先在viewDidLoad 裡添加找網路攝影機裝置的代碼,找到網路攝影機裝置以後,開啟網路攝影機  

        captureSession.sessionPreset = AVCaptureSessionPresetLow        let devices = AVCaptureDevice.devices()        for device in devices {            if (device.hasMediaType(AVMediaTypeVideo)) {                if (device.position == AVCaptureDevicePosition.Front) {                    captureDevice = device as?AVCaptureDevice                    if captureDevice != nil {                        println("Capture Device found")                        beginSession()                    }                }            }        } 

 

 beginSession,開啟網路攝影機:  
func beginSession() {        var err : NSError? = nil        captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))        let output = AVCaptureVideoDataOutput()                let cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)        output.setSampleBufferDelegate(self, queue: cameraQueue)        output.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA]        captureSession.addOutput(output)                        if err != nil {            println("error: \(err?.localizedDescription)")        }        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)        previewLayer?.videoGravity = "AVLayerVideoGravityResizeAspect"        previewLayer?.frame = self.view.bounds        self.view.layer.addSublayer(previewLayer)                captureSession.startRunning()    }

 

 開啟以後,實現captureOutput 方法:  
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {                if(self.isStart)        {            let resultImage = sampleBufferToImage(sampleBuffer)                        let context = CIContext(options:[kCIContextUseSoftwareRenderer:true])            let detecotr = CIDetector(ofType:CIDetectorTypeFace,  context:context, options:[CIDetectorAccuracy: CIDetectorAccuracyHigh])                                                            let ciImage = CIImage(image: resultImage)                        let results:NSArray = detecotr.featuresInImage(ciImage,options: ["CIDetectorImageOrientation" : 6])                        for r in results {                let face:CIFaceFeature = r as! CIFaceFeature;                let faceImage = UIImage(CGImage: context.createCGImage(ciImage, fromRect: face.bounds),scale: 1.0, orientation: .Right)                                NSLog("Face found at (%f,%f) of dimensions %fx%f", face.bounds.origin.x, face.bounds.origin.y,pickUIImager.frame.origin.x, pickUIImager.frame.origin.y)                                dispatch_async(dispatch_get_main_queue()) {                    if (self.isStart)                    {                        self.dismissViewControllerAnimated(true, completion: nil)                        self.didReceiveMemoryWarning()                                                self.callBack!(face: faceImage!)                    }                    self.isStart = false                }            }        }    } 

 

在每一幀圖片上使用CIDetector 得到人臉,CIDetector 還可以得到眨眼,與微笑的人臉,如果要詳細使用去官方查看API 上面就是關鍵代碼,設定了有2秒的延遲,2秒之後開始臉部偵測。 全部代碼: 
////  ViewController.swift//  AVSessionTest////  Created by qugang on 15/7/8.//  Copyright (c) 2015年 qugang. All rights reserved.//import UIKitimport AVFoundationclass AVCaptireVideoPicController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {        var callBack :((face: UIImage) ->())?    let captureSession = AVCaptureSession()    var captureDevice : AVCaptureDevice?    var previewLayer : AVCaptureVideoPreviewLayer?    var pickUIImager : UIImageView = UIImageView(image: UIImage(named: "pick_bg"))    var line : UIImageView = UIImageView(image: UIImage(named: "line"))    var timer : NSTimer!    var upOrdown = true    var isStart = false            override func viewDidLoad() {        super.viewDidLoad()                captureSession.sessionPreset = AVCaptureSessionPresetLow        let devices = AVCaptureDevice.devices()        for device in devices {            if (device.hasMediaType(AVMediaTypeVideo)) {                if (device.position == AVCaptureDevicePosition.Front) {                    captureDevice = device as?AVCaptureDevice                    if captureDevice != nil {                        println("Capture Device found")                        beginSession()                    }                }            }        }        pickUIImager.frame = CGRect(x: self.view.bounds.width / 2 - 100, y: self.view.bounds.height / 2 - 100,width: 200,height: 200)        line.frame = CGRect(x: self.view.bounds.width / 2 - 100, y: self.view.bounds.height / 2 - 100, width: 200, height: 2)        self.view.addSubview(pickUIImager)        self.view.addSubview(line)        timer =  NSTimer.scheduledTimerWithTimeInterval(0.01, target: self, selector: "animationSate", userInfo: nil, repeats: true)                NSTimer.scheduledTimerWithTimeInterval(2, target: self, selector: "isStartTrue", userInfo: nil, repeats: false)    }        func isStartTrue(){        self.isStart = true    }        override func didReceiveMemoryWarning(){        super.didReceiveMemoryWarning()        captureSession.stopRunning()            }                    func animationSate(){        if upOrdown {            if (line.frame.origin.y >= pickUIImager.frame.origin.y + 200)            {                upOrdown = false            }            else            {                line.frame.origin.y += 2            }        } else {            if (line.frame.origin.y <= pickUIImager.frame.origin.y)            {                upOrdown = true            }            else            {                line.frame.origin.y -= 2            }        }    }                            func beginSession() {        var err : NSError? = nil        captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))        let output = AVCaptureVideoDataOutput()                let cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)        output.setSampleBufferDelegate(self, queue: cameraQueue)        output.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA]        captureSession.addOutput(output)                        if err != nil {            println("error: \(err?.localizedDescription)")        }        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)        previewLayer?.videoGravity = "AVLayerVideoGravityResizeAspect"        previewLayer?.frame = self.view.bounds        self.view.layer.addSublayer(previewLayer)                captureSession.startRunning()    }    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {                if(self.isStart)        {            let resultImage = sampleBufferToImage(sampleBuffer)                        let context = CIContext(options:[kCIContextUseSoftwareRenderer:true])            let detecotr = CIDetector(ofType:CIDetectorTypeFace,  context:context, options:[CIDetectorAccuracy: CIDetectorAccuracyHigh])                                                            let ciImage = CIImage(image: resultImage)                        let results:NSArray = detecotr.featuresInImage(ciImage,options: ["CIDetectorImageOrientation" : 6])                        for r in results {                let face:CIFaceFeature = r as! CIFaceFeature;                let faceImage = UIImage(CGImage: context.createCGImage(ciImage, fromRect: face.bounds),scale: 1.0, orientation: .Right)                                NSLog("Face found at (%f,%f) of dimensions %fx%f", face.bounds.origin.x, face.bounds.origin.y,pickUIImager.frame.origin.x, pickUIImager.frame.origin.y)                                dispatch_async(dispatch_get_main_queue()) {                    if (self.isStart)                    {                        self.dismissViewControllerAnimated(true, completion: nil)                        self.didReceiveMemoryWarning()                                                self.callBack!(face: faceImage!)                    }                    self.isStart = false                }            }        }    }    private func sampleBufferToImage(sampleBuffer: CMSampleBuffer!) -> UIImage {        let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)        CVPixelBufferLockBaseAddress(imageBuffer, 0)        let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)                let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)        let width = CVPixelBufferGetWidth(imageBuffer)        let height = CVPixelBufferGetHeight(imageBuffer)                let colorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceRGB()                let bitsPerCompornent = 8        var bitmapInfo = CGBitmapInfo((CGBitmapInfo.ByteOrder32Little.rawValue | CGImageAlphaInfo.PremultipliedFirst.rawValue) as UInt32)                        let newContext = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, bitmapInfo) as CGContextRef                let imageRef: CGImageRef = CGBitmapContextCreateImage(newContext)        let resultImage = UIImage(CGImage: imageRef, scale: 1.0, orientation: UIImageOrientation.Right)!                return resultImage    }        func imageResize (imageObj:UIImage, sizeChange:CGSize)-> UIImage{                let hasAlpha = false        let scale: CGFloat = 0.0                 UIGraphicsBeginImageContextWithOptions(sizeChange, !hasAlpha, scale)        imageObj.drawInRect(CGRect(origin: CGPointZero, size: sizeChange))                let scaledImage = UIGraphicsGetImageFromCurrentImageContext()        return scaledImage    }

 

 }

聯繫我們

該頁面正文內容均來源於網絡整理,並不代表阿里雲官方的觀點,該頁面所提到的產品和服務也與阿里云無關,如果該頁面內容對您造成了困擾,歡迎寫郵件給我們,收到郵件我們將在5個工作日內處理。

如果您發現本社區中有涉嫌抄襲的內容,歡迎發送郵件至: info-contact@alibabacloud.com 進行舉報並提供相關證據,工作人員會在 5 個工作天內聯絡您,一經查實,本站將立刻刪除涉嫌侵權內容。

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.