Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/android/233.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android opencv眼睛检测_Android_Opencv_Eye Detection - Fatal编程技术网

Android opencv眼睛检测

Android opencv眼睛检测,android,opencv,eye-detection,Android,Opencv,Eye Detection,我正在使用安卓“opencv样本-人脸检测”检测人脸。它工作得很好。现在,我想将眼睛检测“与人脸检测“结合起来。可能吗?如果有,谁能给我一些样品来检测眼睛。我在网上搜索过。但是我没有得到任何样本。您可以使用此代码检测眼睛…(关于脸的数量) //代码 public void setFace() { FaceDetector fd; mFaceBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); mFaceWidth = mF

我正在使用安卓“opencv样本-人脸检测”检测人脸。它工作得很好。现在,我想将眼睛检测“人脸检测“结合起来。可能吗?如果有,谁能给我一些样品来检测眼睛。我在网上搜索过。但是我没有得到任何样本。

您可以使用此代码检测眼睛…(关于脸的数量)

//代码

public void setFace() {
    FaceDetector fd;
    mFaceBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true);
    mFaceWidth = mFaceBitmap.getWidth();
    mFaceHeight = mFaceBitmap.getHeight(); 
    FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
    PointF eyescenter = new PointF();
    float eyesdist = 0.0f;
    int [] fpx = null;
    int [] fpy = null;

    try {
        Log.e("setFace(): ", "try");
        fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);        
        count = fd.findFaces(mFaceBitmap, faces);
    } catch (Exception e) {
        return;
    }

    // check if we detect any faces
    if (count > 0) {
        fpx = new int[count * 1];
        fpy = new int[count * 1];

        for (int i = 0; i < count; i++) { 
            try {
                Log.e("setFace(): ", "loop");
                faces[i].getMidPoint(eyescenter);                  
                eyesdist = faces[i].eyesDistance(); 

                // set up left eye location
                fpx[1 * i] = (int)(eyescenter.x - eyesdist / 2);
                fpy[1 * i] = (int)eyescenter.y;

                // set up right eye location
                fpx[1 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
                fpy[1 * i + 1] = (int)eyescenter.y; 
            } catch (Exception e) {
            }            
        } 
        MainActivity.clicked = true;
    }
} 
public void setFace(){
面部检测器;
mFaceBitmap=mBitmap.copy(Bitmap.Config.RGB_565,true);
mFaceWidth=mFaceBitmap.getWidth();
mFaceHeight=mFaceBitmap.getHeight();
FaceDetector.Face[]faces=新的FaceDetector.Face[MAX_faces];
PointF眼心=新的PointF();
浮动眼压=0.0f;
int[]fpx=null;
int[]fpy=null;
试一试{
Log.e(“setFace():”,“try”);
fd=新的面检测器(MFACEWITH、mFaceHeight、MAX_面);
count=fd.findFaces(mfacebramit,faces);
}捕获(例外e){
返回;
}
//检查是否检测到任何人脸
如果(计数>0){
fpx=新整数[计数*1];
fpy=新整数[计数*1];
对于(int i=0;i
您可以使用此代码检测眼睛…(关于脸的数量)

//代码

public void setFace() {
    FaceDetector fd;
    mFaceBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true);
    mFaceWidth = mFaceBitmap.getWidth();
    mFaceHeight = mFaceBitmap.getHeight(); 
    FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
    PointF eyescenter = new PointF();
    float eyesdist = 0.0f;
    int [] fpx = null;
    int [] fpy = null;

    try {
        Log.e("setFace(): ", "try");
        fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);        
        count = fd.findFaces(mFaceBitmap, faces);
    } catch (Exception e) {
        return;
    }

    // check if we detect any faces
    if (count > 0) {
        fpx = new int[count * 1];
        fpy = new int[count * 1];

        for (int i = 0; i < count; i++) { 
            try {
                Log.e("setFace(): ", "loop");
                faces[i].getMidPoint(eyescenter);                  
                eyesdist = faces[i].eyesDistance(); 

                // set up left eye location
                fpx[1 * i] = (int)(eyescenter.x - eyesdist / 2);
                fpy[1 * i] = (int)eyescenter.y;

                // set up right eye location
                fpx[1 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
                fpy[1 * i + 1] = (int)eyescenter.y; 
            } catch (Exception e) {
            }            
        } 
        MainActivity.clicked = true;
    }
} 
public void setFace(){
面部检测器;
mFaceBitmap=mBitmap.copy(Bitmap.Config.RGB_565,true);
mFaceWidth=mFaceBitmap.getWidth();
mFaceHeight=mFaceBitmap.getHeight();
FaceDetector.Face[]faces=新的FaceDetector.Face[MAX_faces];
PointF眼心=新的PointF();
浮动眼压=0.0f;
int[]fpx=null;
int[]fpy=null;
试一试{
Log.e(“setFace():”,“try”);
fd=新的面检测器(MFACEWITH、mFaceHeight、MAX_面);
count=fd.findFaces(mfacebramit,faces);
}捕获(例外e){
返回;
}
//检查是否检测到任何人脸
如果(计数>0){
fpx=新整数[计数*1];
fpy=新整数[计数*1];
对于(int i=0;i
我认为这是可能的。您需要加载另一个级联(嵌套级联),并在人脸级联检测到的ROI中使用detectMultiScale。看看c示例中的facedetect.cpp示例。

我认为这是可能的。您需要加载另一个级联(嵌套级联),并在人脸级联检测到的ROI中使用detectMultiScale。看看c示例中的facedetect.cpp示例。

是的,您可以这样做。既有眼睛检测器,也有特殊/眼睛检测器

您可以使用与人脸相同的Haar级联代码,只需替换眼睛检测器


在实践中,我发现最好同时尝试这两种方法-从通用的眼睛检测器开始,然后如果在合理的位置上没有发现任何东西,则转到左眼检测器和右眼检测器。

是的,您可以这样做。既有眼睛检测器,也有特殊/眼睛检测器

您可以使用与人脸相同的Haar级联代码,只需替换眼睛检测器


在实践中,我发现最好同时尝试这两种方法-从通用的眼睛检测器开始,然后如果在合理的位置没有发现任何东西,则转到左眼检测器和右眼检测器。

您可以查看此代码

public FdView(Context context) {
    super(context);
    this.context = context;


    try {
        InputStream is = context.getResources().openRawResource(
                R.raw.lbpcascade_frontalface);
        File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
        mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
        FileOutputStream os = new FileOutputStream(mCascadeFile);

        byte[] buffer = new byte[4096];
        int bytesRead;
        while ((bytesRead = is.read(buffer)) != -1) {
            os.write(buffer, 0, bytesRead);
        }
        is.close();
        os.close();

        // ------------------------- load left eye classificator//
        // -----------------------------------
        InputStream iser = context.getResources().openRawResource(
                R.raw.haarcascade_lefteye_2splits);
        File cascadeDirER = context.getDir("cascadeER",
                Context.MODE_PRIVATE);
        File cascadeFileER = new File(cascadeDirER,
                "haarcascade_eye_right.xml");
        FileOutputStream oser = new FileOutputStream(cascadeFileER);

        byte[] bufferER = new byte[4096];
        int bytesReadER;
        while ((bytesReadER = iser.read(bufferER)) != -1) {
            oser.write(bufferER, 0, bytesReadER);
        }
        iser.close();
        oser.close();
        // ----------------------------------------------------------------------------------------------------

        // --------------------------------- load right eye classificator//
        // ------------------------------------
        InputStream isel = context.getResources().openRawResource(
                R.raw.haarcascade_lefteye_2splits);
        File cascadeDirEL = context.getDir("cascadeEL",
                Context.MODE_PRIVATE);
        File cascadeFileEL = new File(cascadeDirEL,
                "haarcascade_eye_left.xml");
        FileOutputStream osel = new FileOutputStream(cascadeFileEL);

        byte[] bufferEL = new byte[4096];
        int bytesReadEL;
        while ((bytesReadEL = isel.read(bufferEL)) != -1) {
            osel.write(bufferEL, 0, bytesReadEL);
        }
        isel.close();
        osel.close();

        // ------------------------------------------------------------------------------------------------------

        mJavaDetector = new CascadeClassifier(
                mCascadeFile.getAbsolutePath());
        mCascadeER = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        mCascadeEL = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        if (mJavaDetector.empty() || mCascadeER.empty()
                || mCascadeEL.empty()) {
            Log.e(TAG, "Failed to load cascade classifier");
            mJavaDetector = null;
            mCascadeER = null;
            mCascadeEL = null;
        } else
            Log.i(TAG,
                    "Loaded cascade classifier from "
                            + mCascadeFile.getAbsolutePath());

        mNativeDetector = new DetectionBasedTracker(
                mCascadeFile.getAbsolutePath(), 0);

        cascadeDir.delete();
        cascadeFileER.delete();
        cascadeDirER.delete();
        cascadeFileEL.delete();
        cascadeDirEL.delete();

    } catch (IOException e) {
        e.printStackTrace();
        Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    synchronized (this) {
        // initialize Mats before usage
        mGray = new Mat();
        mRgba = new Mat();
    }

        super.surfaceCreated(holder);
}

@Override
protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0)

        {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
        mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
        if (mJavaDetector != null)
            mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
                    new Size());

        /*
         * if (mZoomCorner == null || mZoomWindow == null)
         * CreateAuxiliaryMats();
         */

        Rect[] facesArray = faces.toArray();

        for (int i = 0; i < facesArray.length; i++) {

            Rect r = facesArray[i];

            Core.rectangle(mGray, r.tl(), r.br(),new Scalar(255, 0, 255, 0), 3);

            Core.rectangle(mRgba, r.tl(), r.br(),new Scalar(46, 139, 87, 5), 3);

            //eyearea = new Rect(r.x + r.width / 8,(int) (r.y + (r.height / 4.5)), r.width - 10 * r.width/ 8, (int) (r.height / 3.0));

        //  Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 3);

            Rect eyearea_right = new Rect(r.x + r.width / 16,
                    (int) (r.y + (r.height / 4.5)),
                    (r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));

            Rect eyearea_left = new Rect(r.x + r.width / 16+ (r.width - 2 * r.width / 16) / 2,(int) (r.y + (r.height / 4.5)),(r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));

            // outer recTangles
            //Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(),new Scalar(0, 255, 255, 255), 3);

            //Core.rectangle(mRgba, eyearea_right.tl(), eyearea_right.br(),new Scalar(0, 255, 255, 255), 3);

            teplateR = get_template(mCascadeER, eyearea_right, 3);

            teplateL = get_template(mCascadeEL, eyearea_left, 3);

            learn_frames++;

            /*
             * match_value = match_eye(eyearea_right, teplateR,
             * FdActivity.method);
             * 
             * match_value = match_eye(eyearea_left, teplateL,
             * FdActivity.method); ; }
             */

            /************************************************************************************************/                  

            /*
             * Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2,
             * mZoomWindow2.size());
             * Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow,
             * mZoomWindow.size());
             */

        }
    } else if (mDetectorType == NATIVE_DETECTOR) {
        if (mNativeDetector != null)
            mNativeDetector.detect(mGray, faces);
    } else {
        Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
                FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(),
            Bitmap.Config.ARGB_8888);

    try {
        Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
        Log.e(TAG,
                "Utils.matToBitmap() throws an exception: "
                        + e.getMessage());
        bmp.recycle();
        bmp = null;
    }

    return bmp;
}
公共FdView(上下文){
超级(上下文);
this.context=上下文;
试一试{
InputStream=context.getResources().openRawResource(
R.raw.lbpcascade_frontalface);
文件cascadeDir=context.getDir(“cascade”,context.MODE\u PRIVATE);
mCascadeFile=新文件(cascadeDir,“lbpcascade_frontalface.xml”);
FileOutputStream os=新的FileOutputStream(mCascadeFile);
字节[]缓冲区=新字节[4096];
int字节读取;
而((bytesRead=is.read(buffer))!=-1){
写操作(缓冲区,0,字节读);
}
is.close();
os.close();
//----------------------------加载左眼分类器//
// -----------------------------------
InputStream iser=context.getResources().openRawResource(
R.raw.haarcascade(左眼裂);
文件cascadeDirER=context.getDir(“cascader”,
上下文。模式(私人);
文件级联文件器=新文件(级联文件器,
“haarcascade_eye_right.xml”);
FileOutputStream oser=新FileOutputStream(级联文件管理器);
字节[]缓冲区=新字节[4096];
int字节读取器;
while((bytesReadER=iser.read(bufferER))!=-1){
oser.write(缓冲区,0,字节读取器);
}
iser.close();
oser.close();
// ------------