I am currently trying to figure out how to use iOS 10's AVCapturePhotoOutput method and am having trouble doing so. I feel like I am about to get it right but continue receiving an error:
Terminating app due to uncaught exception 'NSGenericException', reason: '-[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] No active and enabled video connection'
I have tried to put this line of code in either the AVCapturePhotoCaptureDelegate or my didPressTakePhoto function:
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait;
...
}
Here is the code I have so far:
import AVFoundation
import UIKit
class Camera: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate {
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var imageView: UIImageView!
var captureSession : AVCaptureSession?
var stillImageOutput : AVCapturePhotoOutput?
var stillImageOutputSettings : AVCapturePhotoSettings?
var previewLayer : AVCaptureVideoPreviewLayer?
var didTakePhoto = Bool();
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated);
previewLayer?.frame = cameraView.bounds;
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated);
captureSession = AVCaptureSession();
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080;
stillImageOutput = AVCapturePhotoOutput();
let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);
do {
let input = try AVCaptureDeviceInput(device: backCamera)
if (captureSession?.canAddInput(input))! {
captureSession?.addInput(input);
if (captureSession?.canAddOutput(stillImageOutput))! {
captureSession?.canAddOutput(stillImageOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect;
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraView.layer.addSublayer(previewLayer!);
captureSession?.startRunning();
}
}
} catch {
print(error);
}
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print(error.localizedDescription);
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print(UIImage(data: dataImage)?.size as Any);
let dataProvider = CGDataProvider(data: dataImage as CFData);
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent);
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right);
self.imageView.image = image;
self.imageView.isHidden = false;
}
}
func didPressTakePhoto() {
stillImageOutputSettings = AVCapturePhotoSettings();
let previewPixelType = stillImageOutputSettings?.availablePreviewPhotoPixelFormatTypes.first!;
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160];
stillImageOutputSettings?.previewPhotoFormat = previewFormat;
stillImageOutput.capturePhoto(with: stillImageOutputSettings!, delegate: self);
}
func didPressTakeAnother() {
if (didTakePhoto == true) {
imageView.isHidden = true;
didTakePhoto = false;
} else {
captureSession?.startRunning();
didTakePhoto = true;
didPressTakePhoto();
}
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
didPressTakeAnother();
}
}
Any suggestions?
Thanks in advance!
For anyone else that may be trying to figure this out, the following resources helped me:
For reference and code layout
Actual implementation and usage of new iOS10 Camera features
change AVCaptureSessionPreset1920x1080
to AVCaptureSessionPresetHigh
try it
来源:https://stackoverflow.com/questions/43059282/using-avcapturephotooutput-in-ios10-nsgenericexception