Warning: file_get_contents(/data/phpspider/zhask/data//catemap/6/opengl/4.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C# 基于视觉框架NSInvalidArgumentException的xamarin iOS人脸检测_C#_Objective C_Xamarin.ios_Face Detection - Fatal编程技术网

C# 基于视觉框架NSInvalidArgumentException的xamarin iOS人脸检测

C# 基于视觉框架NSInvalidArgumentException的xamarin iOS人脸检测,c#,objective-c,xamarin.ios,face-detection,C#,Objective C,Xamarin.ios,Face Detection,我目前正在开发一款应用程序,使用iOS Vision framework进行实时摄像头人脸检测,但我遇到了间歇性Objective-C错误,奇怪的是,这种错误并非每次都会发生 Objective-C exception thrown. Name: NSInvalidArgumentException Reason: *** -[__NSPlaceholderArray initWithObjects:count:]: attempt to insert nil object from obje

我目前正在开发一款应用程序,使用iOS Vision framework进行实时摄像头人脸检测,但我遇到了间歇性Objective-C错误,奇怪的是,这种错误并非每次都会发生

Objective-C exception thrown.  Name: NSInvalidArgumentException Reason: *** -[__NSPlaceholderArray initWithObjects:count:]: attempt to insert nil object from objects[0]
这个错误,停止了人脸检测过程,这是非常令人沮丧的

这就是我到目前为止所拥有的,如果有人能告诉我是什么导致了这个错误,我能做些什么来补救它,那将是一个很大的帮助。谢谢

public class DataOutputDelegate : AVCaptureVideoDataOutputSampleBufferDelegate
    { 
        public UIImage CapturedImage { get; set; }

        VNDetectFaceRectanglesRequest faceDetection = new VNDetectFaceRectanglesRequest(null);
        VNSequenceRequestHandler faceDetectionRequest = new VNSequenceRequestHandler();

        [Export("captureOutput:didOutputSampleBuffer:fromConnection:")]
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            if (connection.SupportsVideoOrientation)
                connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

            try
            {
                using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                using (var attachments = sampleBuffer.GetAttachments<NSString, NSObject>(CMAttachmentMode.ShouldPropagate))
                using (var ciimage = new CIImage(pixelBuffer, attachments))
                using (var ciImageWithOrientation = ciimage.CreateWithOrientation(CIImageOrientation.RightTop))
                {           
                    if(faceDetection != null)
                    {
                        faceDetectionRequest.Perform(new VNRequest[] { faceDetection }, ciImageWithOrientation, out var performError);

                        if(performError != null)
                        {
                            throw new Exception(performError.LocalizedDescription);
                        }

                        CheckForFace();
                    }                   
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
            finally
            {
                sampleBuffer.Dispose();
                GC.Collect();
                GC.WaitForPendingFinalizers();
            }

        }

        void CheckForFace()
        {                       
            var observations = faceDetection.GetResults<VNFaceObservation>() ?? Array.Empty<VNFaceObservation>();

            if (observations.Length > 0)
            {
                Device.BeginInvokeOnMainThread(() =>
                {
                    MessagingCenter.Send<object>(new object(), Constants.CameraPreview.MessageKeys.FaceDetected);
                    Console.WriteLine("\nFace detected . . .");
                });
            }
            else
            {
                Device.BeginInvokeOnMainThread(() =>
                {
                    MessagingCenter.Send<object>(new object(), Constants.CameraPreview.MessageKeys.FaceLost);
                    Console.WriteLine("\nFace is lost . . .");
                });
            }

            return;
        }
    }
公共类DataOutputDelegate:AVCaptureVideoDataOutputSampleBufferDelegate
{ 
公共UIImage CapturedImage{get;set;}
VnDetectFacetAnglesRequestFaceDetection=新的VnDetectFacetAnglesRequest(空);
VNSequenceRequestHandler faceDetectionRequest=新的VNSequenceRequestHandler();
[导出(“captureOutput:didOutputSampleBuffer:fromConnection:”)]
公共覆盖void DidOutputSampleBuffer(AVCaptureOutputCaptureOutput、CMSampleBuffer sampleBuffer、AVCaptureConnection连接)
{
if(连接.支持视频方向)
connection.VideoOrientation=AVCaptureVideoOrientation.patriot;
尝试
{
使用(var pixelBuffer=sampleBuffer.GetImageBuffer())
使用(var attachments=sampleBuffer.GetAttachments(CMAttachmentMode.ShouldPropagate))
使用(var ciimage=new ciimage(像素缓冲区,附件))
使用(var ciImageWithOrientation=ciimage.CreateWithOrientation(CIImageOrientation.RightTop))
{           
if(人脸检测!=null)
{
Perform(新的VNRequest[]{faceDetection},ciImageWithOrientation,out-var-performError);
if(性能错误!=null)
{
抛出新异常(performError.LocalizedDescription);
}
CheckForFace();
}                   
}
}
捕获(例外e)
{
控制台写入线(e.Message);
}
最后
{
sampleBuffer.Dispose();
GC.Collect();
GC.WaitForPendingFinalizers();
}
}
void CheckForFace()
{                       
var observations=faceDetection.GetResults()??Array.Empty();
如果(观察值长度>0)
{
Device.beginInvokeMainThread(()=>
{
MessagingCenter.Send(新对象(),常量.CameraPreview.MessageKeys.FaceDetected);
Console.WriteLine(“\n检测到面…”);
});
}
其他的
{
Device.beginInvokeMainThread(()=>
{
MessagingCenter.Send(新对象(),常量.CameraPreview.MessageKeys.FaceLost);
Console.WriteLine(“\n面丢失…”);
});
}
返回;
}
}