C# 他提醒我解决问题: public void ProcessRequest(HttpContext context) { _httpContext = context; var imageid = context.Request.QueryString["Image"]; if (imageid == null || imageid == "") { imageid = "1"; } using (WebClient wc = new WebClient()) { // Handler retrieves the image from database and load it on the stream using (Stream s = wc.OpenRead("http://mypageurl/Image.ashx?Image=" + imageid)) { using (Bitmap bmp = new Bitmap(s)) { AddFace(bmp); } } } } public void AddFace(Bitmap image) { var faceImage = DetectFace(image); if (faceImage != null) { var stream = new MemoryStream(); faceImage.Save(stream, ImageFormat.Bmp); stream.Position = 0; byte[] data = new byte[stream.Length]; stream.Read(data, 0, (int)stream.Length); _httpContext.Response.Clear(); _httpContext.Response.ContentType = "image/jpeg"; _httpContext.Response.BinaryWrite(data); } } private Bitmap DetectFace(Bitmap faceImage) { var image = new Image<Bgr, byte>(faceImage); var gray = image.Convert<Gray, Byte>(); string filePath = HttpContext.Current.Server.MapPath("haarcascade_frontalface_default.xml"); var face = new HaarCascade(filePath); MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); Image<Gray, byte> result = null; foreach (MCvAvgComp f in facesDetected[0]) { //draw the face detected in the 0th (gray) channel with blue color image.Draw(f.rect, new Bgr(Color.Blue), 2); result = image.Copy(f.rect).Convert<Gray, byte>(); break; } if (result != null) { result = result.Resize(200, 200, INTER.CV_INTER_CUBIC); return result.Bitmap; } return null; } public bool IsReusable { get { return false; } } public void ProcessRequest(HttpContext上下文) { _httpContext=context; var imageid=context.Request.QueryString[“Image”]; 如果(imageid==null | | imageid==“”) { imageid=“1”; } 使用(WebClient wc=new WebClient()) { //处理程序从数据库检索图像并将其加载到流中 使用(流s=wc.OpenRead(“http://mypageurl/Image.ashx?Image=“+imageid)) { 使用(位图bmp=新位图)) { AddFace(bmp); } } } } 公共void AddFace(位图图像) { var faceImage=检测面(图像); 如果(faceImage!=null) { var stream=newmemoryStream(); 保存(流,ImageFormat.Bmp); 流位置=0; 字节[]数据=新字节[stream.Length]; 读取(数据,0,(int)stream.Length); _httpContext.Response.Clear(); _httpContext.Response.ContentType=“image/jpeg”; _httpContext.Response.BinaryWrite(数据); } } 专用位图检测面(位图面图像) { var图像=新图像(脸图像); var gray=image.Convert(); 字符串filePath=HttpContext.Current.Server.MapPath(“haarcascade_frontalface_default.xml”); var face=新的HaarCascade(文件路径); MCvAvgComp[][]面检测=灰色。检测ArcCascade(面,1.1,10,HAAR_检测类型。DO_CANNY_修剪,新尺寸(20,20)); 图像结果=空; foreach(检测到面中的MCvAvgComp f[0]) { //用蓝色绘制在第0个(灰色)通道中检测到的人脸 图像绘制(f.rect,新Bgr(颜色蓝色),2); 结果=image.Copy(f.rect.Convert(); 打破 } 如果(结果!=null) { result=result.Resize(200,200,INTER.CV\u INTER\u CUBIC); 返回结果.位图; } 返回null; } 公共布尔可重用 { 获取{return false;} }

C# 他提醒我解决问题: public void ProcessRequest(HttpContext context) { _httpContext = context; var imageid = context.Request.QueryString["Image"]; if (imageid == null || imageid == "") { imageid = "1"; } using (WebClient wc = new WebClient()) { // Handler retrieves the image from database and load it on the stream using (Stream s = wc.OpenRead("http://mypageurl/Image.ashx?Image=" + imageid)) { using (Bitmap bmp = new Bitmap(s)) { AddFace(bmp); } } } } public void AddFace(Bitmap image) { var faceImage = DetectFace(image); if (faceImage != null) { var stream = new MemoryStream(); faceImage.Save(stream, ImageFormat.Bmp); stream.Position = 0; byte[] data = new byte[stream.Length]; stream.Read(data, 0, (int)stream.Length); _httpContext.Response.Clear(); _httpContext.Response.ContentType = "image/jpeg"; _httpContext.Response.BinaryWrite(data); } } private Bitmap DetectFace(Bitmap faceImage) { var image = new Image<Bgr, byte>(faceImage); var gray = image.Convert<Gray, Byte>(); string filePath = HttpContext.Current.Server.MapPath("haarcascade_frontalface_default.xml"); var face = new HaarCascade(filePath); MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); Image<Gray, byte> result = null; foreach (MCvAvgComp f in facesDetected[0]) { //draw the face detected in the 0th (gray) channel with blue color image.Draw(f.rect, new Bgr(Color.Blue), 2); result = image.Copy(f.rect).Convert<Gray, byte>(); break; } if (result != null) { result = result.Resize(200, 200, INTER.CV_INTER_CUBIC); return result.Bitmap; } return null; } public bool IsReusable { get { return false; } } public void ProcessRequest(HttpContext上下文) { _httpContext=context; var imageid=context.Request.QueryString[“Image”]; 如果(imageid==null | | imageid==“”) { imageid=“1”; } 使用(WebClient wc=new WebClient()) { //处理程序从数据库检索图像并将其加载到流中 使用(流s=wc.OpenRead(“http://mypageurl/Image.ashx?Image=“+imageid)) { 使用(位图bmp=新位图)) { AddFace(bmp); } } } } 公共void AddFace(位图图像) { var faceImage=检测面(图像); 如果(faceImage!=null) { var stream=newmemoryStream(); 保存(流,ImageFormat.Bmp); 流位置=0; 字节[]数据=新字节[stream.Length]; 读取(数据,0,(int)stream.Length); _httpContext.Response.Clear(); _httpContext.Response.ContentType=“image/jpeg”; _httpContext.Response.BinaryWrite(数据); } } 专用位图检测面(位图面图像) { var图像=新图像(脸图像); var gray=image.Convert(); 字符串filePath=HttpContext.Current.Server.MapPath(“haarcascade_frontalface_default.xml”); var face=新的HaarCascade(文件路径); MCvAvgComp[][]面检测=灰色。检测ArcCascade(面,1.1,10,HAAR_检测类型。DO_CANNY_修剪,新尺寸(20,20)); 图像结果=空; foreach(检测到面中的MCvAvgComp f[0]) { //用蓝色绘制在第0个(灰色)通道中检测到的人脸 图像绘制(f.rect,新Bgr(颜色蓝色),2); 结果=image.Copy(f.rect.Convert(); 打破 } 如果(结果!=null) { result=result.Resize(200,200,INTER.CV\u INTER\u CUBIC); 返回结果.位图; } 返回null; } 公共布尔可重用 { 获取{return false;} },c#,ms-access,nullreferenceexception,emgucv,face-recognition,C#,Ms Access,Nullreferenceexception,Emgucv,Face Recognition,终于成功了!!只需再花一天时间编写代码,我就解决了这个问题: public void ProcessRequest(HttpContext context) { _httpContext = context; var imageid = context.Request.QueryString["Image"]; if (imageid == null || imageid == "") { imageid = "1"; } us

终于成功了!!只需再花一天时间编写代码,我就解决了这个问题:

public void ProcessRequest(HttpContext context)
{
    _httpContext = context;
    var imageid = context.Request.QueryString["Image"];
    if (imageid == null || imageid == "")
    {
        imageid = "1";
    }


    using (WebClient wc = new WebClient())
    {
        // Handler retrieves the image from database and load it on the stream
        using (Stream s = wc.OpenRead("http://mypageurl/Image.ashx?Image=" + imageid))
        {
            using (Bitmap bmp = new Bitmap(s))
            {
                AddFace(bmp);
            }
        }
    }

}

public void AddFace(Bitmap image)
{
    var faceImage = DetectFace(image);
    if (faceImage != null)
    {
        var stream = new MemoryStream();
        faceImage.Save(stream, ImageFormat.Bmp);
        stream.Position = 0;
        byte[] data = new byte[stream.Length];
        stream.Read(data, 0, (int)stream.Length);

        _httpContext.Response.Clear();
        _httpContext.Response.ContentType = "image/jpeg";
        _httpContext.Response.BinaryWrite(data);
    }
}

private Bitmap DetectFace(Bitmap faceImage)
{
    var image = new Image<Bgr, byte>(faceImage);
    var gray = image.Convert<Gray, Byte>();
    string filePath = HttpContext.Current.Server.MapPath("haarcascade_frontalface_default.xml");
    var face = new HaarCascade(filePath);
    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
    Image<Gray, byte> result = null;

    foreach (MCvAvgComp f in facesDetected[0])
    {
        //draw the face detected in the 0th (gray) channel with blue color
        image.Draw(f.rect, new Bgr(Color.Blue), 2);
        result = image.Copy(f.rect).Convert<Gray, byte>();
        break;
    }

    if (result != null)
    {
        result = result.Resize(200, 200, INTER.CV_INTER_CUBIC);
        return result.Bitmap;
    }


   return null;
}

public bool IsReusable
{
    get { return false; }
}
public void ProcessRequest(HttpContext上下文)
{
_httpContext=context;
var imageid=context.Request.QueryString[“Image”];
如果(imageid==null | | imageid==“”)
{
imageid=“1”;
}
使用(WebClient wc=new WebClient())
{
//处理程序从数据库检索图像并将其加载到流中
使用(流s=wc.OpenRead(“http://mypageurl/Image.ashx?Image=“+imageid))
{
使用(位图bmp=新位图))
{
AddFace(bmp);
}
}
}
}
公共void AddFace(位图图像)
{
var faceImage=检测面(图像);
如果(faceImage!=null)
{
var stream=newmemoryStream();
保存(流,ImageFormat.Bmp);
流位置=0;
字节[]数据=新字节[stream.Length];
读取(数据,0,(int)stream.Length);
_httpContext.Response.Clear();
_httpContext.Response.ContentType=“image/jpeg”;
_httpContext.Response.BinaryWrite(数据);
}
}
专用位图检测面(位图面图像)
{
var图像=新图像(脸图像);
var gray=image.Convert();
字符串filePath=HttpContext.Current.Server.MapPath(“haarcascade_frontalface_default.xml”);
var face=新的HaarCascade(文件路径);
MCvAvgComp[][]面检测=灰色。检测ArcCascade(面,1.1,10,HAAR_检测类型。DO_CANNY_修剪,新尺寸(20,20));
图像结果=空;
foreach(检测到面中的MCvAvgComp f[0])
{
//用蓝色绘制在第0个(灰色)通道中检测到的人脸
图像绘制(f.rect,新Bgr(颜色蓝色),2);
结果=image.Copy(f.rect.Convert();
打破
}
如果(结果!=null)
{
result=result.Resize(200,200,INTER.CV\u INTER\u CUBIC);
返回结果.位图;
}
返回null;
}
公共布尔可重用
{
获取{return false;}
}

您应该查看最近的另一个问题。你们两个也许可以互相帮助。你能给我建议下一步我应该做什么来进行面部识别吗?我对所有这些都是新手,但现在,通过遵循一些教程,我能够从数据库中保存和检索人脸。不幸的是,导师没有为它制作下一个教程。这是教程,你应该看看最近的另一个问题。你们两个也许可以互相帮助。你能给我建议下一步我应该做什么来进行面部识别吗?我对所有这些都是新手,但现在,通过遵循一些教程,我能够从数据库中保存和检索人脸。不幸的是,导师没有为它制作下一个教程。以下是教程,在将图像转换为BMP图像(“bmpImage”对象)为.BMP文件后,我尝试从fetchedBytes数组中保存图像,并尝试在人脸注册期间保存捕获图像的字节流(在将其保存到数据库之前)。它们都产生了精细的.bmp图片,可以用MSPaint打开。另外,我注意到只有当数据库中存储了多个图像时才会出现异常。如果我只在数据库中存储1个图像,则不会发生异常。我现在完全搞糊涂了"如果有帮助的话,这是我程序中处理并将捕获的图像存储到数据库中的一部分的屏幕截图(用于人脸注册):无论如何谢谢你的回答,,Gord^^@ABVincita我已经用一种可能的解决方法更新了我的答案。我已经尝试将转换为BMP图像(“bmpImage”对象)后的fetchedBytes数组中的图像保存为.BMP文件,并且还尝试在人脸注册期间保存捕获图像中的字节流(在将其保存到数据库之前)。它们都产生了精细的.bmp图片,可以用MSPaint打开。另外,我注意到只有当数据库中存储了多个图像时才会出现异常。如果我只在数据库中存储1个图像,则不会发生异常。
private void ProcessFrame(object sender, EventArgs arg)
    {
        Image<Bgr, Byte> ImageFrame = capture.QueryFrame();

        Image<Gray, byte> grayframe = ImageFrame.Convert<Gray, byte>();

        MinNeighbors = int.Parse(comboBoxMinNeighbors.Text);
        WindowsSize = int.Parse(textBoxWinSiz.Text);
        ScaleIncreaseRate = Double.Parse(comboBoxMinNeighbors.Text);

        var faces = grayframe.DetectHaarCascade(haar, ScaleIncreaseRate, MinNeighbors,
                                        HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                        new Size(WindowsSize, WindowsSize))[0];

        if (faces.Length > 0) 
        {
            Bitmap BmpInput = grayframe.ToBitmap();

            Graphics FaceCanvas;

            foreach (var face in faces)
            {
                t = t + 1;
                result = ImageFrame.Copy(face.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                ImageFrame.Draw(face.rect, new Bgr(Color.Red), 2);

                ExtractedFace = new Bitmap(face.rect.Width, face.rect.Height);

                FaceCanvas = Graphics.FromImage(ExtractedFace);

                FaceCanvas.DrawImage(BmpInput, 0, 0, face.rect, GraphicsUnit.Pixel);

                ImageFrame.Draw(face.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {

                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);
                    try
                    {
                        name = recognizer.Recognize(result).Label; 
                    }
                    catch (Exception error)
                    {
                        MessageBox.Show(error.ToString());
                    }

                    ImageFrame.Draw(name, ref font, new Point(face.rect.X - 2, face.rect.Y - 2), new Bgr(Color.LightGreen));
                }

            }
        }
        CamImageBox.Image = ImageFrame;
    }
name = recognizer.Recognize(result).Label;
trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "\\TrainedFaces\\" + LoadFaces));
MemoryStream stream = new MemoryStream(fetchedBytes);
bmpImage = new Bitmap(stream);
trainingImages.Add(new Emgu.CV.Image<Gray, Byte>(bmpImage));
public void ProcessRequest(HttpContext context)
{
    _httpContext = context;
    var imageid = context.Request.QueryString["Image"];
    if (imageid == null || imageid == "")
    {
        imageid = "1";
    }


    using (WebClient wc = new WebClient())
    {
        // Handler retrieves the image from database and load it on the stream
        using (Stream s = wc.OpenRead("http://mypageurl/Image.ashx?Image=" + imageid))
        {
            using (Bitmap bmp = new Bitmap(s))
            {
                AddFace(bmp);
            }
        }
    }

}

public void AddFace(Bitmap image)
{
    var faceImage = DetectFace(image);
    if (faceImage != null)
    {
        var stream = new MemoryStream();
        faceImage.Save(stream, ImageFormat.Bmp);
        stream.Position = 0;
        byte[] data = new byte[stream.Length];
        stream.Read(data, 0, (int)stream.Length);

        _httpContext.Response.Clear();
        _httpContext.Response.ContentType = "image/jpeg";
        _httpContext.Response.BinaryWrite(data);
    }
}

private Bitmap DetectFace(Bitmap faceImage)
{
    var image = new Image<Bgr, byte>(faceImage);
    var gray = image.Convert<Gray, Byte>();
    string filePath = HttpContext.Current.Server.MapPath("haarcascade_frontalface_default.xml");
    var face = new HaarCascade(filePath);
    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
    Image<Gray, byte> result = null;

    foreach (MCvAvgComp f in facesDetected[0])
    {
        //draw the face detected in the 0th (gray) channel with blue color
        image.Draw(f.rect, new Bgr(Color.Blue), 2);
        result = image.Copy(f.rect).Convert<Gray, byte>();
        break;
    }

    if (result != null)
    {
        result = result.Resize(200, 200, INTER.CV_INTER_CUBIC);
        return result.Bitmap;
    }


   return null;
}

public bool IsReusable
{
    get { return false; }
}