AVCaptureSession captured photo's size different than the preview?

China☆狼群 提交于 2020-06-17 15:58:26

问题


These two images are the camera preview and the resulting capture (in that order, respectively [taken via screenshots]). We want the captured photo to match the preview/vice versa. How can we address this?

Tried manipulating the CALayer containing the photo data to size the image like how a Xamarin.Forms' image sizes with AspectFit by assigning the ContentsGravity with various options like kCAGravityResizeAspect. Fiddled with other Contents options such as ContentsRect and ContentsScale but no dice. Below is the View and its corresponding Renderer. So how to address the sizing issue?

Native Camera View

namespace App.iOS.Views
{
    public class NativeCameraView : UIView
    {
        AVCaptureVideoPreviewLayer previewLayer;
        CameraOptions cameraOptions;

        public AVCaptureSession CaptureSession { get; private set; }

        public AVCaptureStillImageOutput CaptureOutput { get; set; }

        public bool IsPreviewing { get; set; }

        public NativeCameraPreview(CameraOptions options)
        {
            cameraOptions = options;
            IsPreviewing = false;
            Initialize();
        }

        public override void LayoutSubviews()
        {
            base.LayoutSubviews();

            UIDevice device = UIDevice.CurrentDevice;
            UIDeviceOrientation orientation = device.Orientation;
            AVCaptureConnection previewLayerConnection = this.previewLayer.Connection;
            if (previewLayerConnection.SupportsVideoOrientation)
            {
                switch (orientation)
                {
                    case UIDeviceOrientation.Portrait:
                        UpdatePreviewLayer(previewLayerConnection,
                            AVCaptureVideoOrientation.Portrait);
                        break;
                    case UIDeviceOrientation.LandscapeRight:
                        UpdatePreviewLayer(previewLayerConnection,
                            AVCaptureVideoOrientation.LandscapeLeft);
                        break;
                    case UIDeviceOrientation.LandscapeLeft:
                        UpdatePreviewLayer(previewLayerConnection,
                            AVCaptureVideoOrientation.LandscapeRight);
                        break;
                    case UIDeviceOrientation.PortraitUpsideDown:
                        UpdatePreviewLayer(previewLayerConnection,
                            AVCaptureVideoOrientation.PortraitUpsideDown);
                        break;
                    default:
                        UpdatePreviewLayer(previewLayerConnection,
                            AVCaptureVideoOrientation.Portrait);
                        break;
                }
            }
        }

        private void UpdatePreviewLayer(AVCaptureConnection layer,
            AVCaptureVideoOrientation orientation)
        {
            layer.VideoOrientation = orientation;
            previewLayer.Frame = this.Bounds;
        }

        public async Task CapturePhoto()
        {
            var videoConnection = CaptureOutput.ConnectionFromMediaType(AVMediaType.Video);
            var sampleBuffer = await CaptureOutput.CaptureStillImageTaskAsync(videoConnection);
            var jpegData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            var photo = new UIImage(jpegData);
            var rotatedPhoto = RotateImage(photo, 180f);

            CALayer layer = new CALayer
            {
                //ContentsGravity = "kCAGravityResizeAspect",
                //ContentsRect = rect,
                //GeometryFlipped = true,
                ContentsScale = 1.0f,
                Frame = Bounds,
                Contents = rotatedPhoto.CGImage //Contents = photo.CGImage,
            };

            MainPage.UpdateSource(UIImageFromLayer(layer).AsJPEG().AsStream());
            MainPage.UpdateImage(UIImageFromLayer(layer).AsJPEG().AsStream());
        }

        public UIImage RotateImage(UIImage image, float degree)
        {
            float Radians = degree * (float)Math.PI / 180;

            UIView view = new UIView(frame: new CGRect(0, 0, image.Size.Width, image.Size.Height));
            CGAffineTransform t = CGAffineTransform.MakeRotation(Radians);
            view.Transform = t;
            CGSize size = view.Frame.Size;

            UIGraphics.BeginImageContext(size);
            CGContext context = UIGraphics.GetCurrentContext();

            context.TranslateCTM(size.Width / 2, size.Height / 2);
            context.RotateCTM(Radians);
            context.ScaleCTM(1, -1);

            context.DrawImage(new CGRect(-image.Size.Width / 2, -image.Size.Height / 2, image.Size.Width, image.Size.Height), image.CGImage);

            UIImage imageCopy = UIGraphics.GetImageFromCurrentImageContext();
            UIGraphics.EndImageContext();

            return imageCopy;
        }

        UIImage ImageFromLayer(CALayer layer)
        {
            UIGraphics.BeginImageContextWithOptions(
                layer.Frame.Size,
                layer.Opaque,
                0);
            layer.RenderInContext(UIGraphics.GetCurrentContext());
            var outputImage = UIGraphics.GetImageFromCurrentImageContext();
            UIGraphics.EndImageContext();
            return outputImage;
        }

        void Initialize()
        {
            CaptureSession = new AVCaptureSession();
            CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto;
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame = Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var input = new AVCaptureDeviceInput(device, out error);

            var dictionary = new NSMutableDictionary();
            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            CaptureOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };
            CaptureSession.AddOutput(CaptureOutput);

            CaptureSession.AddInput(input);
            Layer.AddSublayer(previewLayer);
            CaptureSession.StartRunning();
            IsPreviewing = true;
        }
    }
}

Native Camera Renderer

[assembly: ExportRenderer(typeof(CameraView), typeof(CameraViewRenderer))]
namespace App.iOS.Renderers
{
    public class CameraViewRenderer : ViewRenderer<CameraView, NativeCameraView>
    {
        NativeCameraView uiCameraView;

        protected override void OnElementChanged(ElementChangedEventArgs<CameraView> e)
        {
            base.OnElementChanged(e);

            if (Control == null)
            {
                uiCameraView = new NativeCameraView(e.NewElement.Camera);
                SetNativeControl(uiCameraView);
            }
            if (e.OldElement != null)
            {
                // Unsubscribe
                uiCameraView.Tapped -= OnCameraViewTapped;
            }
            if (e.NewElement != null)
            {
                // Subscribe
                uiCameraView.Tapped += OnCameraViewTapped;
            }
        }

        async void OnCameraViewTapped(object sender, EventArgs e)
        {
            await uiCameraView.CapturePhoto();
        }
    }
}

NOTE A similar question appears to have been asked quite some time ago.

来源:https://stackoverflow.com/questions/62398048/avcapturesession-captured-photos-size-different-than-the-preview

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!