Real time face detection with Camera on swift 3

前端 未结 3 619
野的像风
野的像风 2021-02-11 08:38

How can I do face detection in realtime just as \"Camera\" does? like white round shape around and over the face. I use AVCapturSession . I found that the image I s

3条回答
  •  面向向阳花
    2021-02-11 09:14

    Swift 3

    I have found a solution using AVFoundation that will create square face tracking in real time on iOS. I have modified some code here.

    import UIKit
    import AVFoundation
    
    class DetailsView: UIView {
        func setup() {
            layer.borderColor = UIColor.red.withAlphaComponent(0.7).cgColor
            layer.borderWidth = 5.0
        }
    }
    
    
    class ViewController: UIViewController {
    
        let stillImageOutput = AVCaptureStillImageOutput()
    
        var session: AVCaptureSession?
        var stillOutput = AVCaptureStillImageOutput()
        var borderLayer: CAShapeLayer?
    
        let detailsView: DetailsView = {
            let detailsView = DetailsView()
            detailsView.setup()
    
            return detailsView
        }()
    
        lazy var previewLayer: AVCaptureVideoPreviewLayer? = {
            var previewLay = AVCaptureVideoPreviewLayer(session: self.session!)
            previewLay?.videoGravity = AVLayerVideoGravityResizeAspectFill
    
            return previewLay
        }()
    
        lazy var frontCamera: AVCaptureDevice? = {
            guard let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] else { return nil }
    
            return devices.filter { .position == .front }.first
        }()
    
        let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy : CIDetectorAccuracyLow])
    
        override func viewDidLayoutSubviews() {
            super.viewDidLayoutSubviews()
            previewLayer?.frame = view.frame
        }
    
        override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)
            guard let previewLayer = previewLayer else { return }
    
            view.layer.addSublayer(previewLayer)
            view.addSubview(detailsView)
            view.bringSubview(toFront: detailsView)
        }
    
        override func viewDidLoad() {
            super.viewDidLoad()
            sessionPrepare()
            session?.startRunning()
        }
        //function to store image
        func saveToCamera() {
    
            if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
    
                stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
                    if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
    
                        if let cameraImage = UIImage(data: imageData) {
    
                            UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil)
                        }
                    }
                })
            }
        }
    }
    
    extension ViewController {
    
        func sessionPrepare() {
            session = AVCaptureSession()
    
            guard let session = session, let captureDevice = frontCamera else { return }
    
            session.sessionPreset = AVCaptureSessionPresetPhoto
    
    
            do {
                let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
                session.beginConfiguration()
                stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
    
                if session.canAddOutput(stillImageOutput) {
                    session.addOutput(stillImageOutput)
                }
    
                if session.canAddInput(deviceInput) {
                    session.addInput(deviceInput)
                }
    
                let output = AVCaptureVideoDataOutput()
                output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
    
                output.alwaysDiscardsLateVideoFrames = true
    
                if session.canAddOutput(output) {
                    session.addOutput(output)
                }
    
                session.commitConfiguration()
    
                let queue = DispatchQueue(label: "output.queue")
                output.setSampleBufferDelegate(self, queue: queue)
    
            } catch {
                print("error with creating AVCaptureDeviceInput")
            }
        }
    }
    
    extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
        func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate)
            let ciImage = CIImage(cvImageBuffer: pixelBuffer!, options: attachments as! [String : Any]?)
            let options: [String : Any] = [CIDetectorImageOrientation: exifOrientation(orientation: UIDevice.current.orientation),
                                           CIDetectorSmile: true,
                                           CIDetectorEyeBlink: true]
            let allFeatures = faceDetector?.features(in: ciImage, options: options)
    
            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
            let cleanAperture = CMVideoFormatDescriptionGetCleanAperture(formatDescription!, false)
    
            guard let features = allFeatures else { return }
    
            for feature in features {
                if let faceFeature = feature as? CIFaceFeature {
                    let faceRect = calculateFaceRect(facePosition: faceFeature.mouthPosition, faceBounds: faceFeature.bounds, clearAperture: cleanAperture)
                    update(with: faceRect)
                }
            }
    
            if features.count == 0 {
                DispatchQueue.main.async {
                    self.detailsView.alpha = 0.0
                }
            }
    
        }
    
        func exifOrientation(orientation: UIDeviceOrientation) -> Int {
            switch orientation {
            case .portraitUpsideDown:
                return 8
            case .landscapeLeft:
                return 3
            case .landscapeRight:
                return 1
            default:
                return 6
            }
        }
    
        func videoBox(frameSize: CGSize, apertureSize: CGSize) -> CGRect {
            let apertureRatio = apertureSize.height / apertureSize.width
            let viewRatio = frameSize.width / frameSize.height
    
            var size = CGSize.zero
    
            if (viewRatio > apertureRatio) {
                size.width = frameSize.width
                size.height = apertureSize.width * (frameSize.width / apertureSize.height)
            } else {
                size.width = apertureSize.height * (frameSize.height / apertureSize.width)
                size.height = frameSize.height
            }
    
            var videoBox = CGRect(origin: .zero, size: size)
    
            if (size.width < frameSize.width) {
                videoBox.origin.x = (frameSize.width - size.width) / 2.0
            } else {
                videoBox.origin.x = (size.width - frameSize.width) / 2.0
            }
    
            if (size.height < frameSize.height) {
                videoBox.origin.y = (frameSize.height - size.height) / 2.0
            } else {
                videoBox.origin.y = (size.height - frameSize.height) / 2.0
            }
    
            return videoBox
        }
    
        func calculateFaceRect(facePosition: CGPoint, faceBounds: CGRect, clearAperture: CGRect) -> CGRect {
            let parentFrameSize = previewLayer!.frame.size
            let previewBox = videoBox(frameSize: parentFrameSize, apertureSize: clearAperture.size)
    
            var faceRect = faceBounds
    
            swap(&faceRect.size.width, &faceRect.size.height)
            swap(&faceRect.origin.x, &faceRect.origin.y)
    
            let widthScaleBy = previewBox.size.width / clearAperture.size.height
            let heightScaleBy = previewBox.size.height / clearAperture.size.width
    
            faceRect.size.width *= widthScaleBy
            faceRect.size.height *= heightScaleBy
            faceRect.origin.x *= widthScaleBy
            faceRect.origin.y *= heightScaleBy
    
            faceRect = faceRect.offsetBy(dx: 0.0, dy: previewBox.origin.y)
            let frame = CGRect(x: parentFrameSize.width - faceRect.origin.x - faceRect.size.width - previewBox.origin.x / 2.0, y: faceRect.origin.y, width: faceRect.width, height: faceRect.height)
    
            return frame
        }
    
    }
    extension ViewController {
        func update(with faceRect: CGRect) {
            DispatchQueue.main.async {
                UIView.animate(withDuration: 0.2) {
                    self.detailsView.alpha = 1.0
                    self.detailsView.frame = faceRect
                }
            }
        }
    }
    

    **Edited *******

    Swift 4

    Apple's own Vision frameworks available to detect face in real time from Swift 4. click the link for document and sample app.

提交回复
热议问题