如何在android中使用Google vision API检测眨眼?

如何在android中使用Google vision API检测眨眼?,android,google-play-services,google-vision,android-vision,Android,Google Play Services,Google Vision,Android Vision,我正在使用vision API进行人脸检测,现在我想实现眨眼,但仍然使用vision API检测人(非活体)图像(照片)中的眨眼 此外,我正在使用一个跟踪器来跟踪一段时间内的眼睛状态,以检测指示左眼眨眼的事件序列: 左眼睁开->左眼闭->左眼睁开 GraphicFaceTracker类定义如下: private class GraphicFaceTracker extends Tracker<Face> { private GraphicOverlay mOverla

我正在使用vision API进行人脸检测,现在我想实现眨眼,但仍然使用vision API检测人(非活体)图像(照片)中的眨眼

此外,我正在使用一个跟踪器来跟踪一段时间内的眼睛状态,以检测指示左眼眨眼的事件序列:

左眼睁开->左眼闭->左眼睁开

GraphicFaceTracker类定义如下:

private class GraphicFaceTracker extends Tracker<Face> {
        private GraphicOverlay mOverlay;
        private FaceGraphic mFaceGraphic;
        private Context context ;

        GraphicFaceTracker(Context context, GraphicOverlay overlay) {
            mOverlay = overlay;
            this.context= context;
            mFaceGraphic = new FaceGraphic(overlay);
        }

        private final float OPEN_THRESHOLD = 0.85f;
        private final float CLOSE_THRESHOLD = 0.4f;

        private int state = 0;


        void blink(float value, final int eyeNo, String whichEye) {
            switch (state) {
                case 0:
                    if (value > OPEN_THRESHOLD) {
                        // Both eyes are initially open
                        state = 1;
                    }
                    break;

                case 1:
                    if (value < CLOSE_THRESHOLD ) {
                        // Both eyes become closed
                        state = 2;
                    }
                    break;

                case 2:
                    if (value > OPEN_THRESHOLD)  {
                        // Both eyes are open again
                        Log.i("BlinkTracker", "blink occurred!");

                        mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
                            @Override
                            public void onPictureTaken(byte[] bytes) {
                                Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
                                Log.d("BITMAP", bmp.getWidth() + "x" + bmp.getHeight());
                                System.out.println(bmp.getWidth() + "x" + bmp.getHeight());
                            }
                        });
                        state = 0;
                    }
                    break;
            }


        }

        /**
         * Start tracking the detected face instance within the face overlay.
         */
        @Override
        public void onNewItem(int faceId, Face item) {
            mFaceGraphic.setId(faceId);
        }

        /**
         * Update the position/characteristics of the face within the overlay.
         */
        @Override
        public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
            mOverlay.add(mFaceGraphic);
            mFaceGraphic.updateFace(face);

            float left = face.getIsLeftEyeOpenProbability();
            float right = face.getIsRightEyeOpenProbability();
            if (left == Face.UNCOMPUTED_PROBABILITY)  {
                // At least one of the eyes was not detected.
                return;
            }
            blink(left,0,"left");

            if(right == Face.UNCOMPUTED_PROBABILITY ){
                return ;
            }
        }
}
然后将跟踪器添加为处理器,用于接收来自检测器的人脸随时间的更新。例如,此配置将用于跟踪视图中最大的面是否闪烁:

Tracker<Face> tracker = new GraphicFaceTracker(this,mGraphicOverlay);
detector.setProcessor(new LargestFaceFocusingProcessor.Builder(detector, tracker).build());
Tracker Tracker=新的GraphicFaceTracker(此,MGRAPHOCOVERLAY);
detector.setProcessor(新的LargestFaceFocusingProcessor.Builder(detector,tracker.build());

但是上面的代码检测到人的图像中的闪烁。但是人的形象不能眨眼。我如何通过相机检测眨眼

我认为这看起来是对的。如果将检测器与正在运行的CameraSource实例关联,如本例所示:

这将跟踪摄像机的眼球运动。我还认为您可以稍微更改onUpdate代码,以更好地确定闪烁阈值:

    @Override
    public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
        mOverlay.add(mFaceGraphic);
        mFaceGraphic.updateFace(face);

        float left = face.getIsLeftEyeOpenProbability();
        float right = face.getIsRightEyeOpenProbability();
        if ((left == Face.UNCOMPUTED_PROBABILITY) ||
            (right == Face.UNCOMPUTED_PROBABILITY)) {
            // One of the eyes was not detected.
            return;
        }

        float value = Math.min(left, right);
        blink(value);
    }
@覆盖
公共空隙更新(面检测器。检测检测结果,面){
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(面);
float left=face.getIsLeftEyeOpenProbability();
float right=face.getIsRightEyeOpenProbability();
if((左==面未计算的概率)||
(右==面未计算的概率){
//其中一只眼睛没有被发现。
返回;
}
浮点值=数学最小值(左、右);
闪烁(值);
}

您可以将探测器传递到相机源,并从曲面视图处理闪烁检测

public class LivelinessScanFragment extends Fragment {

    SurfaceView cameraView;
    CameraSource cameraSource;
    final int RequestCameraPermissionID = 1001;
    FaceDetector detector;

       @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {

        switch (requestCode) {
            case RequestCameraPermissionID: {
                if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                    if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                        return;
                    }
                    try {
                        cameraSource.start(cameraView.getHolder());
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        }
    }


    public LivelinessScanFragment() {
        // Required empty public constructor
    }


    @Override
    public View onCreateView(LayoutInflater inflater, ViewGroup container,
                             Bundle savedInstanceState) {

            // Inflate the layout for this fragment
            View rootView = inflater.inflate(R.layout.fragment_liveliness_scan, container, false);



            cameraView = (SurfaceView)rootView.findViewById(R.id.surface_view);

            detector = new FaceDetector.Builder(getActivity())
                .setProminentFaceOnly(true) // optimize for single, relatively large face
                .setTrackingEnabled(true) // enable face tracking
                .setClassificationType(/* eyes open and smile */ FaceDetector.ALL_CLASSIFICATIONS)
                .setMode(FaceDetector.FAST_MODE) // for one face this is OK
                .build();


            if (!detector.isOperational()) {
                Log.w("MainActivity", "Detector Dependencies are not yet available");
            } else {
                cameraSource = new CameraSource.Builder(Application.getContext(), detector)
                        .setFacing(CameraSource.CAMERA_FACING_FRONT)
                        .setRequestedFps(2.0f)
                        .setRequestedPreviewSize(1280, 1024)
                        .setAutoFocusEnabled(true)
                        .build();

                cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
                    @Override
                    public void surfaceCreated(SurfaceHolder surfaceHolder) {
                        try {
                            if (ActivityCompat.checkSelfPermission(Application.getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {

                                ActivityCompat.requestPermissions(getActivity(),
                                        new String[]{Manifest.permission.CAMERA}, RequestCameraPermissionID);
                                return;
                            }
                            cameraSource.start(cameraView.getHolder());
                            detector.setProcessor(
                                    new LargestFaceFocusingProcessor(detector, new GraphicFaceTracker()));

                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {

                    }

                    @Override
                    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
                        cameraSource.stop();
                    }
                });


            }

            return rootView;
        }

    private class GraphicFaceTracker extends Tracker<Face> {

        private final float OPEN_THRESHOLD = 0.85f;
        private final float CLOSE_THRESHOLD = 0.4f;

        private int state = 0;


        void blink(float value) {
            switch (state) {
                case 0:
                    if (value > OPEN_THRESHOLD) {
                        // Both eyes are initially open
                        state = 1;
                    }
                    break;

                case 1:
                    if (value < CLOSE_THRESHOLD ) {
                        // Both eyes become closed
                        state = 2;
                    }
                    break;

                case 2:
                    if (value > OPEN_THRESHOLD)  {
                        // Both eyes are open again
                        Log.i("BlinkTracker", "blink occurred!");
                        state = 0;

                    }
                    break;
            }


        }

        /**
         * Update the position/characteristics of the face within the overlay.
         */
        @Override
        public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {

            float left = face.getIsLeftEyeOpenProbability();
            float right = face.getIsRightEyeOpenProbability();
            if ((left == Face.UNCOMPUTED_PROBABILITY) ||
                    (right == Face.UNCOMPUTED_PROBABILITY)) {
                // One of the eyes was not detected.
                return;
            }

            float value = Math.min(left, right);
            blink(value);
        }
    }


}
公共类LiveLinesScanFragment扩展了片段{
水面观景摄像;
摄像源摄像源;
最终int RequestCameraPermissionID=1001;
面部检测器;
@凌驾
public void onRequestPermissionsResult(int-requestCode,@NonNull-String[]permissions,@NonNull-int[]grantResults){
开关(请求代码){
案例请求CameraPermissionID:{
if(grantResults[0]==已授予PackageManager.权限){
if(ActivityCompat.checkSelfPermission(getActivity(),Manifest.permission.CAMERA)!=PackageManager.permission\u已授予){
返回;
}
试一试{
cameraSource.start(cameraView.getHolder());
}捕获(IOE异常){
e、 printStackTrace();
}
}
}
}
}
公共LiveLinesScanFragment(){
//必需的空公共构造函数
}
@凌驾
创建视图上的公共视图(布局、充气机、视图组容器、,
Bundle savedInstanceState){
//为该碎片膨胀布局
视图根视图=充气机。充气(R.layout.fragment\u livity\u scan,container,false);
cameraView=(SurfaceView)rootView.findviewbyd(R.id.surface\u视图);
detector=新建FaceDetector.Builder(getActivity())
.SetProgregatFaceOnly(true)//针对单个相对较大的面进行优化
.setTrackinEnabled(true)//启用人脸跟踪
.setClassificationType(/*睁眼微笑*/FaceDetector.ALL_分类)
.setMode(FaceDetector.FAST_MODE)//对于一张脸,这是可以的
.build();
如果(!detector.isOperational()){
Log.w(“MainActivity”,“检测器依赖项尚不可用”);
}否则{
cameraSource=新建cameraSource.Builder(Application.getContext(),detector)
.setFacing(摄像机源.摄像机面向前方)
.setRequestedFps(2.0f)
.setRequestedPreviewSize(12801024)
.setAutoFocusEnabled(真)
.build();
cameraView.getHolder().addCallback(新的SurfaceHolder.Callback()){
@凌驾
已创建的公共空隙表面(表面层表面层){
试一试{
if(ActivityCompat.checkSelfPermission(Application.getContext(),Manifest.permission.CAMERA)!=PackageManager.permission\u已授予){
ActivityCompat.requestPermissions(getActivity(),
新字符串[]{Manifest.permission.CAMERA},RequestCameraPermissionID);
返回;
}
cameraSource.start(cameraView.getHolder());
检测器.设置处理器(
新的最大面聚焦处理器(检测器,新的GraphicFaceTracker());
}捕获(IOE异常){
e、 printStackTrace();
}
}
@凌驾
公共无效表面更改(表面更改表面更改表面更改,int i,int i1,int i2){
}
@凌驾
公共空间表面已覆盖(表面层表面层){
cameraSource.stop();
}
});
}
返回rootView;
}
私有类GraphicFaceTracker扩展跟踪程序{
私人最终浮动开放_阈值=0.85f;
私人最终浮动关闭_阈值=0.4f;
私有int状态=0;
无效闪烁(浮动值){
开关(状态){
public class LivelinessScanFragment extends Fragment {

    SurfaceView cameraView;
    CameraSource cameraSource;
    final int RequestCameraPermissionID = 1001;
    FaceDetector detector;

       @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {

        switch (requestCode) {
            case RequestCameraPermissionID: {
                if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                    if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                        return;
                    }
                    try {
                        cameraSource.start(cameraView.getHolder());
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        }
    }


    public LivelinessScanFragment() {
        // Required empty public constructor
    }


    @Override
    public View onCreateView(LayoutInflater inflater, ViewGroup container,
                             Bundle savedInstanceState) {

            // Inflate the layout for this fragment
            View rootView = inflater.inflate(R.layout.fragment_liveliness_scan, container, false);



            cameraView = (SurfaceView)rootView.findViewById(R.id.surface_view);

            detector = new FaceDetector.Builder(getActivity())
                .setProminentFaceOnly(true) // optimize for single, relatively large face
                .setTrackingEnabled(true) // enable face tracking
                .setClassificationType(/* eyes open and smile */ FaceDetector.ALL_CLASSIFICATIONS)
                .setMode(FaceDetector.FAST_MODE) // for one face this is OK
                .build();


            if (!detector.isOperational()) {
                Log.w("MainActivity", "Detector Dependencies are not yet available");
            } else {
                cameraSource = new CameraSource.Builder(Application.getContext(), detector)
                        .setFacing(CameraSource.CAMERA_FACING_FRONT)
                        .setRequestedFps(2.0f)
                        .setRequestedPreviewSize(1280, 1024)
                        .setAutoFocusEnabled(true)
                        .build();

                cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
                    @Override
                    public void surfaceCreated(SurfaceHolder surfaceHolder) {
                        try {
                            if (ActivityCompat.checkSelfPermission(Application.getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {

                                ActivityCompat.requestPermissions(getActivity(),
                                        new String[]{Manifest.permission.CAMERA}, RequestCameraPermissionID);
                                return;
                            }
                            cameraSource.start(cameraView.getHolder());
                            detector.setProcessor(
                                    new LargestFaceFocusingProcessor(detector, new GraphicFaceTracker()));

                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {

                    }

                    @Override
                    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
                        cameraSource.stop();
                    }
                });


            }

            return rootView;
        }

    private class GraphicFaceTracker extends Tracker<Face> {

        private final float OPEN_THRESHOLD = 0.85f;
        private final float CLOSE_THRESHOLD = 0.4f;

        private int state = 0;


        void blink(float value) {
            switch (state) {
                case 0:
                    if (value > OPEN_THRESHOLD) {
                        // Both eyes are initially open
                        state = 1;
                    }
                    break;

                case 1:
                    if (value < CLOSE_THRESHOLD ) {
                        // Both eyes become closed
                        state = 2;
                    }
                    break;

                case 2:
                    if (value > OPEN_THRESHOLD)  {
                        // Both eyes are open again
                        Log.i("BlinkTracker", "blink occurred!");
                        state = 0;

                    }
                    break;
            }


        }

        /**
         * Update the position/characteristics of the face within the overlay.
         */
        @Override
        public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {

            float left = face.getIsLeftEyeOpenProbability();
            float right = face.getIsRightEyeOpenProbability();
            if ((left == Face.UNCOMPUTED_PROBABILITY) ||
                    (right == Face.UNCOMPUTED_PROBABILITY)) {
                // One of the eyes was not detected.
                return;
            }

            float value = Math.min(left, right);
            blink(value);
        }
    }


}
 float leftOpenScore = face.getIsLeftEyeOpenProbability();
if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) {//left eye is open }else{//left eye closed }

 float leftOpenScore = face.getIsRightEyeOpenProbability();
if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) {//Right eye is open }else{//Right eye closed }