How to use AVCapturePhotoOutput

前端 未结 6 1708
予麋鹿
予麋鹿 2020-11-27 02:39

I have been working on using a custom camera, and I recently upgraded to Xcode 8 beta along with Swift 3. I originally had this:

var stillImageOutput: AVCapt         


        
相关标签:
6条回答
  • 2020-11-27 03:19

    There is my full implementation

    import UIKit
    import AVFoundation
    
    class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
    
    var captureSesssion : AVCaptureSession!
    var cameraOutput : AVCapturePhotoOutput!
    var previewLayer : AVCaptureVideoPreviewLayer!
    
    @IBOutlet weak var capturedImage: UIImageView!
    @IBOutlet weak var previewView: UIView!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        captureSesssion = AVCaptureSession()
        captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto
        cameraOutput = AVCapturePhotoOutput()
    
        let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
    
        if let input = try? AVCaptureDeviceInput(device: device) {
            if (captureSesssion.canAddInput(input)) {
                captureSesssion.addInput(input)
                if (captureSesssion.canAddOutput(cameraOutput)) {
                    captureSesssion.addOutput(cameraOutput)
                    previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)
                    previewLayer.frame = previewView.bounds
                    previewView.layer.addSublayer(previewLayer)
                    captureSesssion.startRunning()
                }
            } else {
                print("issue here : captureSesssion.canAddInput")
            }
        } else {
            print("some problem here")
        }
    }
    
    // Take picture button
    @IBAction func didPressTakePhoto(_ sender: UIButton) {
        let settings = AVCapturePhotoSettings()
        let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
        let previewFormat = [
             kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
             kCVPixelBufferWidthKey as String: 160,
             kCVPixelBufferHeightKey as String: 160
        ]
        settings.previewPhotoFormat = previewFormat
        cameraOutput.capturePhoto(with: settings, delegate: self)
    }
    
    // callBack from take picture
    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
    
        if let error = error {
            print("error occure : \(error.localizedDescription)")
        }
    
        if  let sampleBuffer = photoSampleBuffer,
            let previewBuffer = previewPhotoSampleBuffer,
            let dataImage =  AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer:  sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print(UIImage(data: dataImage)?.size as Any)
    
            let dataProvider = CGDataProvider(data: dataImage as CFData)
            let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
            let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
    
            self.capturedImage.image = image
        } else {
            print("some error here")
        }
    }
    
    // This method you can use somewhere you need to know camera permission   state
    func askPermission() {
        print("here")
        let cameraPermissionStatus =  AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
    
        switch cameraPermissionStatus {
        case .authorized:
            print("Already Authorized")
        case .denied:
            print("denied")
    
            let alert = UIAlertController(title: "Sorry :(" , message: "But  could you please grant permission for camera within device settings",  preferredStyle: .alert)
            let action = UIAlertAction(title: "Ok", style: .cancel,  handler: nil)
            alert.addAction(action)
            present(alert, animated: true, completion: nil)
    
        case .restricted:
            print("restricted")
        default:
            AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: {
                [weak self]
                (granted :Bool) -> Void in
    
                if granted == true {
                    // User granted
                    print("User granted")
     DispatchQueue.main.async(){
                //Do smth that you need in main thread   
                } 
                }
                else {
                    // User Rejected
                    print("User Rejected")
    
    DispatchQueue.main.async(){
                let alert = UIAlertController(title: "WHY?" , message:  "Camera it is the main feature of our application", preferredStyle: .alert)
                    let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
                    alert.addAction(action)
                    self?.present(alert, animated: true, completion: nil)  
                } 
                }
            });
        }
    }
    }
    
    0 讨论(0)
  • 2020-11-27 03:21

    The capture delegate function has been changed to photoOutput. Here's the updated function for Swift 4.

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {            
            if let error = error {
                print(error.localizedDescription)
            }
    
            if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
                print("image: \(String(describing: UIImage(data: dataImage)?.size))") // Your Image
            }
    }
    
    0 讨论(0)
  • 2020-11-27 03:33

    In iOS 11 "photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {}" is deprecated.

    Use following method:

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        let imageData = photo.fileDataRepresentation()
        if let data = imageData, let img = UIImage(data: data) {
            print(img)
        }
    }
    
    0 讨论(0)
  • 2020-11-27 03:39

    I took @Aleksey Timoshchenko's excellent answer and updated it to Swift 4.x.

    Note that for my use-case I allow the user to take multiple photos which is why I save them in the images array.

    Note that you need to wire up the @IBAction takePhoto method via your storyboard or in code. In my case, I use a storyboard.

    As of iOS 11, the AVCapturePhotoOutput.jpegPhotoDataRepresentation that is used in @Aleksey Timoshchenko's answer is deprecated.

    Swift 4.x

    class CameraVC: UIViewController {
    
        @IBOutlet weak var cameraView: UIView!
    
        var images = [UIImage]()
    
        var captureSession: AVCaptureSession!
        var cameraOutput: AVCapturePhotoOutput!
        var previewLayer: AVCaptureVideoPreviewLayer!
    
        override func viewDidLoad() {
            super.viewDidLoad()
        }
    
        override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)
            startCamera()
        }
    
        func startCamera() {
            captureSession = AVCaptureSession()
            captureSession.sessionPreset = AVCaptureSession.Preset.photo
            cameraOutput = AVCapturePhotoOutput()
    
            if let device = AVCaptureDevice.default(for: .video),
               let input = try? AVCaptureDeviceInput(device: device) {
                if (captureSession.canAddInput(input)) {
                    captureSession.addInput(input)
                    if (captureSession.canAddOutput(cameraOutput)) {
                        captureSession.addOutput(cameraOutput)
                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                        previewLayer.frame = cameraView.bounds
                        cameraView.layer.addSublayer(previewLayer)
                        captureSession.startRunning()
                    }
                } else {
                    print("issue here : captureSesssion.canAddInput")
                }
            } else {
                print("some problem here")
            }
        }
    
        @IBAction func takePhoto(_ sender: UITapGestureRecognizer) {
            let settings = AVCapturePhotoSettings()
            let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
            let previewFormat = [
                kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
                kCVPixelBufferWidthKey as String: 160,
                kCVPixelBufferHeightKey as String: 160
            ]
            settings.previewPhotoFormat = previewFormat
            cameraOutput.capturePhoto(with: settings, delegate: self)   
        }
    }
    
    extension CameraVC : AVCapturePhotoCaptureDelegate {
        func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
    
            if let error = error {
                print("error occured : \(error.localizedDescription)")
            }
    
            if let dataImage = photo.fileDataRepresentation() {
                print(UIImage(data: dataImage)?.size as Any)
    
                let dataProvider = CGDataProvider(data: dataImage as CFData)
                let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
                let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImage.Orientation.right)
    
                /**
                   save image in array / do whatever you want to do with the image here
                */
                self.images.append(image)
    
            } else {
                print("some error here")
            }
        }
    }
    
    0 讨论(0)
  • 2020-11-27 03:41

    Updated to Swift 4 Hi it's really easy to use AVCapturePhotoOutput.

    You need the AVCapturePhotoCaptureDelegate which returns the CMSampleBuffer.

    You can get as well a preview image if you tell the AVCapturePhotoSettings the previewFormat

        class CameraCaptureOutput: NSObject, AVCapturePhotoCaptureDelegate {
    
            let cameraOutput = AVCapturePhotoOutput()
    
            func capturePhoto() {
    
              let settings = AVCapturePhotoSettings()
              let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
              let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
                                   kCVPixelBufferWidthKey as String: 160,
                                   kCVPixelBufferHeightKey as String: 160]
              settings.previewPhotoFormat = previewFormat
              self.cameraOutput.capturePhoto(with: settings, delegate: self)
    
            }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {                        
                if let error = error {
                    print(error.localizedDescription)
                }
    
                if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
                  print("image: \(UIImage(data: dataImage)?.size)") // Your Image
                }   
            }
        }
    

    For more information visit https://developer.apple.com/reference/AVFoundation/AVCapturePhotoOutput

    Note: You have to add the AVCapturePhotoOutput to the AVCaptureSession before taking the picture. So something like: session.addOutput(output), and then: output.capturePhoto(with:settings, delegate:self) Thanks @BigHeadCreations

    0 讨论(0)
  • 2020-11-27 03:45

    I found this project in GitHub that helped me understand the initialization of the device and capture-session.

    AVCapturePhotoOutput_test by inoue0426

    0 讨论(0)
提交回复
热议问题