Make an UIImage from a CMSampleBuffer

后端 未结 8 1110
温柔的废话
温柔的废话 2020-12-13 15:01

This is not the same as the countless questions about converting a CMSampleBuffer to a UIImage. I\'m simply wondering why I can\'t convert it like

相关标签:
8条回答
  • 2020-12-13 15:57

    With Swift 3 and iOS 10 AVCapturePhotoOutput : Includes :

    import UIKit
    import CoreData
    import CoreMotion
    import AVFoundation
    

    Create an UIView for preview and link it to the Main Class

      @IBOutlet var preview: UIView!
    

    Create this to setup the camera session (kCVPixelFormatType_32BGRA is important !!) :

      lazy var cameraSession: AVCaptureSession = {
        let s = AVCaptureSession()
        s.sessionPreset = AVCaptureSessionPresetHigh
        return s
      }()
    
      lazy var previewLayer: AVCaptureVideoPreviewLayer = {
        let previewl:AVCaptureVideoPreviewLayer =  AVCaptureVideoPreviewLayer(session: self.cameraSession)
        previewl.frame = self.preview.bounds
        return previewl
      }()
    
      func setupCameraSession() {
        let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) as AVCaptureDevice
    
        do {
          let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
    
          cameraSession.beginConfiguration()
    
          if (cameraSession.canAddInput(deviceInput) == true) {
            cameraSession.addInput(deviceInput)
          }
    
          let dataOutput = AVCaptureVideoDataOutput()
          dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: **kCVPixelFormatType_32BGRA** as UInt32)]
          dataOutput.alwaysDiscardsLateVideoFrames = true
    
          if (cameraSession.canAddOutput(dataOutput) == true) {
            cameraSession.addOutput(dataOutput)
          }
    
          cameraSession.commitConfiguration()
    
          let queue = DispatchQueue(label: "fr.popigny.videoQueue", attributes: [])
          dataOutput.setSampleBufferDelegate(self, queue: queue)
    
        }
        catch let error as NSError {
          NSLog("\(error), \(error.localizedDescription)")
        }
      }
    

    In WillAppear :

      override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        setupCameraSession()
      }
    

    In Didappear :

      override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        preview.layer.addSublayer(previewLayer)
        cameraSession.startRunning()
      }
    

    Create a function to capture output :

      func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
    
        // Here you collect each frame and process it
        let ts:CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        self.mycapturedimage = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
    }
    

    Here is the code that convert an kCVPixelFormatType_32BGRA CMSampleBuffer to an UIImage the key things is the bitmapInfo that must correspond to 32BGRA 32 little with premultfirst and alpha info :

      func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage
      {
        // Get a CMSampleBuffer's Core Video image buffer for the media data
        let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the base address of the pixel buffer
        CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
    
    
        // Get the number of bytes per row for the pixel buffer
        let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);
    
        // Get the number of bytes per row for the pixel buffer
        let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
        // Get the pixel buffer width and height
        let width = CVPixelBufferGetWidth(imageBuffer!);
        let height = CVPixelBufferGetHeight(imageBuffer!);
    
        // Create a device-dependent RGB color space
        let colorSpace = CGColorSpaceCreateDeviceRGB();
    
        // Create a bitmap graphics context with the sample buffer data
        var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
        bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
        //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
        let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
        // Create a Quartz image from the pixel data in the bitmap graphics context
        let quartzImage = context?.makeImage();
        // Unlock the pixel buffer
        CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
    
        // Create an image object from the Quartz image
        let image = UIImage.init(cgImage: quartzImage!);
    
        return (image);
      }
    
    0 讨论(0)
  • 2020-12-13 15:58

    For JPEG images:

    Swift 4:

    let buff: CMSampleBuffer ...            // Have you have CMSampleBuffer 
    if let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buff, previewPhotoSampleBuffer: nil) {
        let image = UIImage(data: imageData) //  Here you have UIImage
    }
    
    0 讨论(0)
提交回复
热议问题