Java Android如何使用OpenCV在后台运行瞳孔检测?

Java Android如何使用OpenCV在后台运行瞳孔检测?,java,android,opencv,image-processing,android-camera,Java,Android,Opencv,Image Processing,Android Camera,我正在开发一款科学的Android应用程序,它使用OpenCV库检测瞳孔运动。 我正在编写下面的代码,该代码在应用程序活动中运行良好。但这项计划的目的是什么 应用程序将在后台运行此图像处理,以便能够在前台运行VR场景。因此,场景是,前景中的VR场景显示一些东西,背景中的瞳孔运动检测器应用程序 问题是:我正试图在android服务的代码中使用相同的功能。但似乎没有办法做到这一点 我已经做了几天的搜索,我得出结论,没有它,我既不能让CameraBridgeViewBase在背景中运行,也不能从相机中

我正在开发一款科学的Android应用程序,它使用OpenCV库检测瞳孔运动。 我正在编写下面的代码,该代码在应用程序活动中运行良好。但这项计划的目的是什么 应用程序将在后台运行此图像处理,以便能够在前台运行VR场景。因此,场景是,前景中的VR场景显示一些东西,背景中的瞳孔运动检测器应用程序

问题是:我正试图在android服务的代码中使用相同的功能。但似乎没有办法做到这一点

我已经做了几天的搜索,我得出结论,没有它,我既不能让CameraBridgeViewBase在背景中运行,也不能从相机中提取帧

以下是活动代码:

public class MainActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {

private static final String TAG = "pupilDetector";

public static final int JAVA_DETECTOR = 0;

public static Mat mRgba;
public static Mat mGray;

public static CascadeClassifier mJavaDetectorLeftEye;

public static CameraBridgeViewBase mOpenCvCameraView;

public static double xCenter = -1;
public static double yCenter = -1;

public static Mat mIntermediateMat;
public static Mat hierarchy;

public static boolean captureFrame;

public static int count = 0;

public static Mat mZoomWindow;


private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");

                // load cascade file from application resources
                File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);

                mJavaDetectorLeftEye = loadClassifier(R.raw.haarcascade_lefteye_2splits, "haarcascade_eye_left.xml",
                        cascadeDir);

                cascadeDir.delete();
                //Use front Camera
                mOpenCvCameraView.setCameraIndex(1);
                mOpenCvCameraView.enableFpsMeter();
                mOpenCvCameraView.enableView();
                mOpenCvCameraView.setMaxFrameSize(640, 360); //best resolution for real time processing on Lenovo K6
            }
            break;
            default: {
                super.onManagerConnected(status);
            }
            break;
        }
    }
};

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    setContentView(R.layout.activity_main);

    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.eyes_activity_surface_view);
    mOpenCvCameraView.setCvCameraViewListener(this);


}

@Override
public void onPause() {
    super.onPause();
    if (mOpenCvCameraView != null)
        mOpenCvCameraView.disableView();
}

@Override
public void onResume() {
    super.onResume();
    if (!OpenCVLoader.initDebug()) {
        Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
    } else {
        Log.d(TAG, "OpenCV library found inside package. Using it!");
        mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
    }
}

@Override
protected void onStop() {
    super.onStop();
}

public void onDestroy() {
    super.onDestroy();
    mOpenCvCameraView.disableView();
}

public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
    mGray = new Mat(height, width, CvType.CV_8UC1);
    hierarchy = new Mat();
}

public void onCameraViewStopped() {
    mGray.release();
    mRgba.release();

    mIntermediateMat.release();
    hierarchy.release();

    mZoomWindow.release();
}

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mZoomWindow == null)
        createAuxiliaryMats();

    Rect area = new Rect(new Point(20, 20), new Point(mGray.width() - 20, mGray.height() - 20));
    detectEye(mJavaDetectorLeftEye, area, 100);

    if (captureFrame) {
        saveImage();
        captureFrame = false;
    }

    return mRgba;
}

private void createAuxiliaryMats() {
    if (mGray.empty())
        return;

    int rows = mGray.rows();
    int cols = mGray.cols();

    if (mZoomWindow == null) {
        mZoomWindow = mRgba.submat(rows / 2 + rows / 10, rows, cols / 2 + cols / 10, cols);
    }

}

private Mat detectEye(CascadeClassifier clasificator, Rect area, int size) {
    Mat template = new Mat();
    Mat mROI = mGray.submat(area);
    MatOfRect eyes = new MatOfRect();
    Point iris = new Point();

    //isolate the eyes first
    clasificator.detectMultiScale(mROI, eyes, 1.15, 2, Objdetect.CASCADE_FIND_BIGGEST_OBJECT
            | Objdetect.CASCADE_SCALE_IMAGE, new Size(30, 30), new Size());

    Rect[] eyesArray = eyes.toArray();
    for (int i = 0; i < eyesArray.length; ) {
        Rect e = eyesArray[i];
        e.x = area.x + e.x;
        e.y = area.y + e.y;
        Rect eye_only_rectangle = new Rect((int) e.tl().x, (int) (e.tl().y + e.height * 0.4), (int) e.width,
                (int) (e.height * 0.6));

        Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);

        iris.x = mmG.minLoc.x + eye_only_rectangle.x;
        iris.y = mmG.minLoc.y + eye_only_rectangle.y;
        Core.rectangle(mRgba, eye_only_rectangle.tl(), eye_only_rectangle.br(), new Scalar(255, 255, 0, 255), 2);

        //find the pupil inside the eye rect
        detectPupil(eye_only_rectangle);

        return template;
    }

    return template;
}

protected void detectPupil(Rect eyeRect) {
    hierarchy = new Mat();

    Mat img = mRgba.submat(eyeRect);
    Mat img_hue = new Mat();

    Mat circles = new Mat();

    //  Convert it to hue, convert to range color, and blur to remove false
    // circles
    Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGB2HSV);

    Core.inRange(img_hue, new Scalar(0, 0, 0), new Scalar(255, 255, 32), img_hue);

    Imgproc.erode(img_hue, img_hue, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)));

    Imgproc.dilate(img_hue, img_hue, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(6, 6)));

    Imgproc.Canny(img_hue, img_hue, 170, 220);
    Imgproc.GaussianBlur(img_hue, img_hue, new Size(9, 9), 2, 2);

    // Apply Hough Transform to find the circles
    Imgproc.HoughCircles(img_hue, circles, Imgproc.CV_HOUGH_GRADIENT, 3, img_hue.rows(), 200, 75, 10, 25);

    if (circles.cols() > 0)
        for (int x = 0; x < circles.cols(); x++) {
            double vCircle[] = circles.get(0, x);

            if (vCircle == null)
                break;

            Point pt = new Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
            int radius = (int) Math.round(vCircle[2]);

            // draw the found circle
            Core.circle(img, pt, radius, new Scalar(0, 255, 0), 2);
            Core.circle(img, pt, 3, new Scalar(0, 0, 255), 2);

            Log.i("Coo", String.valueOf(pt.x) + ", " + String.valueOf(pt.y));

        }

}

private CascadeClassifier loadClassifier(int rawResId, String filename, File cascadeDir) {
    CascadeClassifier classifier = null;
    try {
        InputStream is = getResources().openRawResource(rawResId);
        File cascadeFile = new File(cascadeDir, filename);
        FileOutputStream os = new FileOutputStream(cascadeFile);

        byte[] buffer = new byte[4096];
        int bytesRead;
        while ((bytesRead = is.read(buffer)) != -1) {
            os.write(buffer, 0, bytesRead);
        }
        is.close();
        os.close();

        classifier = new CascadeClassifier(cascadeFile.getAbsolutePath());
        if (classifier.empty()) {
            Log.e(TAG, "Failed to load cascade classifier");
            classifier = null;
        } else
            Log.i(TAG, "Loaded cascade classifier from " + cascadeFile.getAbsolutePath());
    } catch (IOException e) {
        e.printStackTrace();
        Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }

    return classifier;
}

public void onRecreateClick(View v) {
    captureFrame = true;
}


public void saveImage() {
    Mat mIntermediateMat = new Mat();
    Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGBA2BGR, 3);

    File path = new File(Environment.getExternalStorageDirectory() + "/OpenCV/");
    path.mkdirs();
    File file = new File(path, "image" + count + ".png");
    count++;

    String filename = file.toString();
    Boolean bool = Highgui.imwrite(filename, mIntermediateMat);

    if (bool)
        Log.i(TAG, "SUCCESS writing image to external storage");
    else
        Log.i(TAG, "Fail writing image to external storage");
}}
public类MainActivity扩展活动实现CameraBridgeViewBase.CvCameraViewListener2{
私有静态最终字符串标记=“pupildector”;
公共静态最终整数JAVA_检测器=0;
公共静电垫mRgba;
公共静电场;
公共静态级联检测器;
公共静态摄像机BridgeViewBase mOpenCvCameraView;
公共静态双xCenter=-1;
公共静态双中心=-1;
公共静电垫;
公共静态矩阵层次结构;
公共静态布尔捕获框架;
公共静态整数计数=0;
公共静电垫mZoomWindow;
专用BaseLoaderCallback mlLoaderCallback=新BaseLoaderCallback(此){
@凌驾
已连接管理器上的公共无效(int状态){
开关(状态){
案例加载程序CallbackInterface.SUCCESS:{
Log.i(标记“OpenCV已成功加载”);
//从应用程序资源加载级联文件
文件cascadeDir=getDir(“cascade”,Context.MODE_PRIVATE);
mJavaDetectorLeftEye=loadClassifier(R.raw.haarcascade\u lefteye\u 2splits,“haarcascade\u eye\u left.xml”,
卡斯卡迪尔);
cascadeDir.delete();
//使用前置摄像头
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.enablefspmeter();
mOpenCvCameraView.enableView();
mOpenCvCameraView.setMaxFrameSize(640360);//在联想K6上实时处理的最佳分辨率
}
打破
默认值:{
超级管理器已连接(状态);
}
打破
}
}
};
@凌驾
创建时的公共void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG\u保持屏幕打开);
setContentView(R.layout.activity_main);
mOpenCvCameraView=(CameraBridgeViewBase)findViewById(R.id.眼睛\活动\表面\视图);
mOpenCvCameraView.setCvCameraViewListener(本);
}
@凌驾
公共无效暂停(){
super.onPause();
if(mOpenCvCameraView!=null)
mOpenCvCameraView.disableView();
}
@凌驾
恢复时公开作废(){
super.onResume();
如果(!OpenCVLoader.initDebug()){
d(标记“找不到内部OpenCV库。使用OpenCV管理器进行初始化”);
initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11,this,mLoaderCallback);
}否则{
d(标记“在包中找到OpenCV库。正在使用它!”);
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@凌驾
受保护的void onStop(){
super.onStop();
}
公共空间{
super.ondestory();
mOpenCvCameraView.disableView();
}
开始时的公共空隙(整数宽度、整数高度){
mRgba=新垫(高度、宽度、CvType.CV_8UC4);
mIntermediateMat=新垫(高度、宽度、CvType.CV_8UC4);
mGray=新垫(高度、宽度、CvType.CV_8UC1);
层次结构=新的Mat();
}
CAMERAVIEWSTOPPED()上的公共空白{
mGray.release();
mRgba.release();
mIntermediateMat.release();
hierarchy.release();
mZoomWindow.release();
}
CameraFrame上的公用Mat(CameraBridgeViewBase.CvCameraViewFrame inputFrame){
mRgba=inputFrame.rgba();
mGray=inputFrame.gray();
if(mZoomWindow==null)
CreateAxiliaryMats();
矩形区域=新矩形(新点(20,20),新点(网格宽度()-20,网格高度()-20));
detectEye(MJAVA Detector左眼,面积100);
if(captureFrame){
saveImage();
captureFrame=false;
}
返回mRgba;
}
私有void CreateAxiliaryMats(){
if(mGray.empty())
返回;
int rows=mGray.rows();
int cols=mGray.cols();
if(mZoomWindow==null){
mZoomWindow=mRgba.submat(行/2+行/10,行,列/2+列/10,列);
}
}
专用Mat detectEye(级联分类器分类指示器、矩形区域、整数大小){
Mat模板=新Mat();
Mat mROI=副经理(区域);
MatOfRect眼睛=新的MatOfRect();
点虹膜=新点();
//先隔离眼睛
分类指示器。检测多尺度(mROI,眼睛,1.15,2,对象检测。级联查找最大对象
|Objdetect.CASCADE_SCALE_IMAGE,新大小(30,30),新大小();
Rect[]eyesArray=eyes.toArray();
对于(int i=0;i