Custom camera view Swift iOS 8 iPhone Xcode 6.1

前端 未结 3 1191
予麋鹿
予麋鹿 2021-01-31 12:31

I would like use camera in my iPhone inside of View. I don\'t want use typical full screen camera view, but my own.

For example I would like have a square 200x200 at the

相关标签:
3条回答
  • 2021-01-31 13:21

    An another code block. how you can do manual focus with iPhone.

    import UIKit
    
    class ViewController: UIViewController {
    
        @IBOutlet var cameraView: CameraView!
    
        override func viewDidLoad() {
            super.viewDidLoad()
            // Do any additional setup after loading the view, typically from a nib.
        }
    
        override func didReceiveMemoryWarning() {
            super.didReceiveMemoryWarning()
            // Dispose of any resources that can be recreated.
        }
    
        @IBAction func sliderChanged(sender: UISlider) {
            cameraView.setFocusWithLensPosition(sender.value)
        }
    }
    
    
    import UIKit
    import AVFoundation
    
    class CameraView: UIView {
    
        // AVFoundation properties
        let captureSession = AVCaptureSession()
        var captureDevice: AVCaptureDevice!
        var captureDeviceFormat: AVCaptureDeviceFormat?
        let stillImageOutput = AVCaptureStillImageOutput()
        var cameraLayer: AVCaptureVideoPreviewLayer?
    
        required init?(coder aDecoder: NSCoder) {
            super.init(coder: aDecoder)
            initCamera()
        }
    
        func initCamera() {
            captureSession.beginConfiguration()
    
    
            stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    
            // get the back camera
            if let device = cameraDeviceForPosition(AVCaptureDevicePosition.Back) {
    
                captureDevice = device
                captureDeviceFormat = device.activeFormat
    
                let error: NSErrorPointer = nil
    
                do {
                    try captureDevice!.lockForConfiguration()
                } catch let error1 as NSError {
                    error.memory = error1
                }
                captureDevice!.focusMode = AVCaptureFocusMode.Locked
                captureDevice!.unlockForConfiguration()
    
                var deviceInput: AVCaptureDeviceInput!
                do {
                    deviceInput = try AVCaptureDeviceInput(device: captureDevice)
                } catch let error1 as NSError {
                    error.memory = error1
                    deviceInput = nil
                }
                if(error == nil) {
                    captureSession.addInput(deviceInput)
                }
    
                captureSession.addOutput(stillImageOutput)
    
                // use the high resolution photo preset
                captureSession.sessionPreset = AVCaptureSessionPresetPhoto
    
    
                // setup camera preview
                cameraLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    
    
                if let player = cameraLayer {
                    player.videoGravity = AVLayerVideoGravityResizeAspectFill
                    self.layer.addSublayer(player)
                    player.frame = self.layer.bounds
                    player.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
                }
    
                // commit and start capturing
                captureSession.commitConfiguration()
                captureSession.startRunning()
            }
    
            captureSession.commitConfiguration()
        }
    
        func setFocusWithLensPosition(pos: CFloat) {
            let error: NSErrorPointer = nil
            do {
                try captureDevice!.lockForConfiguration()
            } catch let error1 as NSError {
                error.memory = error1
            }
            captureDevice!.setFocusModeLockedWithLensPosition(pos, completionHandler: nil)
            captureDevice!.unlockForConfiguration()
        }
    
        // return the camera device for a position
        func cameraDeviceForPosition(position:AVCaptureDevicePosition) -> AVCaptureDevice?
        {
            for device:AnyObject in AVCaptureDevice.devices() {
                if (device.position == position) {
                    return device as? AVCaptureDevice;
                }
            }
    
            return nil
        }
    
    }
    
    0 讨论(0)
  • 2021-01-31 13:27

    You'll want to use the AVFoundation Framework to allow you to make your own AVCaptureSession inside of a view that you create in your storyboard. Here is a nice tutorial showing you how to find the camera and create a capture session:

    http://jamesonquave.com/blog/taking-control-of-the-iphone-camera-in-ios-8-with-swift-part-1/

    This tutorial uses the whole view as the capture view, so that is how big the camera will be if you model it after his code. To make a 200x200 square in the middle of the screen, you have to draw one out on your view controller in your storyboard, link it to a variable in your swift file where all the code is going, and then change the part at the bottom that says,

    previewLayer?.frame = self.view.layer.frame
    

    to your200by200View.layer.frame

    Hopefully this can help. If not, I can try to help some more or someone can correct me.

    Good luck!

    0 讨论(0)
  • 2021-01-31 13:31

    An example of how you can add a cameraOverlayView to create a 200x200 square viewing window at the center of the screen:

    @IBAction func takePhoto(sender: AnyObject) {
    
        if !UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.Camera){
            return
        }
    
        var imagePicker = UIImagePickerController()
        imagePicker.delegate = self
        imagePicker.sourceType = UIImagePickerControllerSourceType.Camera;
    
        //Create camera overlay
        let pickerFrame = CGRectMake(0, UIApplication.sharedApplication().statusBarFrame.size.height, imagePicker.view.bounds.width, imagePicker.view.bounds.height - imagePicker.navigationBar.bounds.size.height - imagePicker.toolbar.bounds.size.height)
        let squareFrame = CGRectMake(pickerFrame.width/2 - 200/2, pickerFrame.height/2 - 200/2, 200, 200)
        UIGraphicsBeginImageContext(pickerFrame.size)
    
        let context = UIGraphicsGetCurrentContext()
        CGContextSaveGState(context)
        CGContextAddRect(context, CGContextGetClipBoundingBox(context))
        CGContextMoveToPoint(context, squareFrame.origin.x, squareFrame.origin.y)
        CGContextAddLineToPoint(context, squareFrame.origin.x + squareFrame.width, squareFrame.origin.y)
        CGContextAddLineToPoint(context, squareFrame.origin.x + squareFrame.width, squareFrame.origin.y + squareFrame.size.height)
        CGContextAddLineToPoint(context, squareFrame.origin.x, squareFrame.origin.y + squareFrame.size.height)
        CGContextAddLineToPoint(context, squareFrame.origin.x, squareFrame.origin.y)
        CGContextEOClip(context)
        CGContextMoveToPoint(context, pickerFrame.origin.x, pickerFrame.origin.y)
        CGContextSetRGBFillColor(context, 0, 0, 0, 1)
        CGContextFillRect(context, pickerFrame)
        CGContextRestoreGState(context)
    
        let overlayImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext();
    
        let overlayView = UIImageView(frame: pickerFrame)
        overlayView.image = overlayImage
        imagePicker.cameraOverlayView = overlayView
        self.presentViewController(imagePicker, animated: true, completion: nil)
    
    }
    
    0 讨论(0)
提交回复
热议问题