AVCaptureVideoDataOutput does not give any frames

330 Views Asked by At

After creating the AVCaptureVideoDataOutput the session does not produce any frames. No frames have been dropped, the device log has been checked but there is nothing in there and no errors are produced by the device.

This has been tested and works on an iPod but not on an iPad Pro.

The current code implementation I'm using is:

namespace Bluewire.MobileGlass.Application
{
    public enum IPDFCameraViewType
    {
        BlackAndWhite,
        Normal
    }

    [Register("IPDFCameraViewController")]
    public class IPDFCameraViewController : UIView
    {
        private static readonly CIDetector HighAccuracyRectangleDetector = CIDetector.CreateRectangleDetector(null, new CIDetectorOptions { Accuracy = FaceDetectorAccuracy.High });
        private AVCaptureSession captureSession;
        private AVCaptureDevice captureDevice;
        private EAGLContext context;
        private AVCaptureStillImageOutput stillImageOutput;
        private bool forceStop;
        private CGSize _intrinsicContentSize;
        private static CIContext ctx = null;
        private static readonly CIDetector QRCodeDetector = CIDetector.CreateQRDetector(null, new CIDetectorOptions { Accuracy = FaceDetectorAccuracy.High });
        private CIContext _coreImageContext;
        private uint _renderBuffer;
        private GLKView _glkView;
        private bool _isStopped;
        private nfloat _imageDetectionConfidence;
        private NSTimer _borderDetectTimeKeeper;
        private bool _borderDetectFrame;
        private CGRectangle _borderDetectLastRectangleFeature;
        private bool _isCapturing;
        private DispatchQueue _captureQueue;
        private IPDFCameraViewType _cameraViewType;
        private bool _enableTorch;
        private IDisposable willResignActiveToken;
        private IDisposable didBecomeActiveToken;

        public IPDFCameraViewController(System.IntPtr handle)
            : base(handle)
        { }

        public void SetupCameraView()
        {
            CreateGLKView();
            var possibleDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var device = possibleDevices.FirstOrDefault();
            if (device == null) return;

            _imageDetectionConfidence = 0.0f;
            var session = new AVCaptureSession();
            captureSession = session;
            session.BeginConfiguration();
            captureDevice = device;
            var input = CheckForError((out NSError err) => AVCaptureDeviceInput.FromDevice(captureDevice, out err));
            session.SessionPreset = AVCaptureSession.PresetPhoto;
            session.AddInput(input);   
            var dataOutput = new AVCaptureVideoDataOutput();
            dataOutput.AlwaysDiscardsLateVideoFrames = true;
            dataOutput.WeakVideoSettings = VideoSettings;
            dataOutput.SetSampleBufferDelegateQueue(new IPDFVideoDataDelegateImpl(this), _captureQueue);
            session.AddOutput(dataOutput);
            var connection = dataOutput.Connections.First();
            connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            stillImageOutput = new AVCaptureStillImageOutput();
            session.AddOutput(stillImageOutput);
            if (device.FlashAvailable)
            {
                CheckForError((out NSError err) => device.LockForConfiguration(out err));
                device.FlashMode = AVCaptureFlashMode.Off;
                device.UnlockForConfiguration();
                if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                {
                    CheckForError((out NSError err) => device.LockForConfiguration(out err));
                    device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    device.UnlockForConfiguration();
                }
            }
            session.CommitConfiguration();
        }

        private NSDictionary VideoSettings = new NSDictionary<NSString, NSObject>(CoreVideo.CVPixelBuffer.PixelFormatTypeKey, new NSNumber((int)CoreVideo.CVPixelFormatType.CV32BGRA));
        private delegate void ErrorFunc(out NSError error);
        private delegate T ErrorFunc<T>(out NSError error);
        private void CheckForError(ErrorFunc e)
        {
            NSError error;
            e(out error);
            if (error != null)
            {
                throw new NSErrorException(error);
            }
        }

        private T CheckForError<T>(ErrorFunc<T> e)
        {
            NSError error;
            var result = e(out error);
            if (error != null)
            {
                throw new NSErrorException(error);
            }
            return result;
        }

        public void Start()
        {
            _isStopped = false;
            captureSession.StartRunning();
            _borderDetectTimeKeeper = NSTimer.CreateRepeatingScheduledTimer(TimeSpan.FromSeconds(0.5), (timer) =>
            {
                _borderDetectFrame = true;
            });
            HideGLKView(false);
        }

        public bool EnableBorderDetection { get; set; }
        public bool EnableTorch {
            get
            {
                return _enableTorch;
            }
            set
            {
                _enableTorch = value;
                var device = captureDevice;
                if (device.HasTorch && device.HasFlash)
                {
                    CheckForError((out NSError err) => device.LockForConfiguration(out err));
                    if (_enableTorch)
                    {
                        device.TorchMode = AVCaptureTorchMode.On;
                    }
                    else
                    {
                        device.TorchMode = AVCaptureTorchMode.Off;
                    }
                    device.UnlockForConfiguration();
                }
            }
        }

        private void HideGLKView(bool hidden)
        {
            Animate(0.1, () =>
            {
                _glkView.Alpha = (hidden) ? 0.0f : 1.0f;
            }, () => { });
        }

        public IPDFCameraViewType CameraViewType
        {
            get
            {
                return _cameraViewType;
            }
            set
            {
                var effect = UIBlurEffect.FromStyle(UIBlurEffectStyle.Dark);
                var viewWithBlurredBackground = new UIVisualEffectView(effect);
                viewWithBlurredBackground.Frame = Bounds;
                InsertSubviewAbove(viewWithBlurredBackground, _glkView);
                _cameraViewType = value;
                DispatchQueue.MainQueue.DispatchAfter(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(0.3)), () =>
                {
                    viewWithBlurredBackground.RemoveFromSuperview();
                });
            }
        }

        public override void AwakeFromNib()
        {
            base.AwakeFromNib();
            willResignActiveToken = UIApplication.Notifications.ObserveWillResignActive((sender, args) =>
            {
                forceStop = true;
            });
            didBecomeActiveToken = UIApplication.Notifications.ObserveDidBecomeActive((sender, args) =>
            {
                forceStop = false;
            });
            _captureQueue = new DispatchQueue("com.instapdf.AVCameraCaptureQueue");
        }

        ~IPDFCameraViewController()
        {
            willResignActiveToken.Dispose();
            didBecomeActiveToken.Dispose();
        } 

        private class IPDFVideoDataDelegateImpl : AVCaptureVideoDataOutputSampleBufferDelegate
        {
            private readonly IPDFCameraViewController mainController;
            public IPDFVideoDataDelegateImpl(IPDFCameraViewController mainController)
            {
                this.mainController = mainController;
            }

            public override void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                Console.WriteLine("Dropped frame");
            }

            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                Console.WriteLine("Captured Image");
            }
        }

        private void CreateGLKView()
        {
            if (context != null) return;
            context = new EAGLContext(EAGLRenderingAPI.OpenGLES2);
            var view = new GLKView(Bounds);
            view.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
            view.TranslatesAutoresizingMaskIntoConstraints = true;
            view.Context = context;
            view.ContentScaleFactor = 1.0f;
            view.DrawableDepthFormat = GLKViewDrawableDepthFormat.Format24;
            InsertSubview(view, 0);
            _glkView = view;
            _coreImageContext = CIContext.FromContext(context, new CIContextOptions() { WorkingColorSpace = null, UseSoftwareRenderer = false });
        }

        public override CGSize IntrinsicContentSize
        {
            get
            {
                if (_intrinsicContentSize.Width == 0|| _intrinsicContentSize.Height == 0)
                {
                    return new CGSize(1.0, 1.0);
                }
                return _intrinsicContentSize;
            }
        }
    }
}
0

There are 0 best solutions below