Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/batch-file/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android studio 失控线程的问题_Android Studio - Fatal编程技术网

Android studio 失控线程的问题

Android studio 失控线程的问题,android-studio,Android Studio,我对安卓工作室还很陌生。我注意到我的程序有一个非常严重的性能问题,我相信在我每次运行应用程序后,它的速度会减慢。我想我有一个失控的线程,我会附上照片在我的职位结束。我真的需要一些帮助。第一张图片显示线程的示例,然后第二张图片显示等待5分钟左右后的线程。我附上了两个密码。CameraSurfaceView在FaceDetectionThread创建线程时运行代码 package com.example.phliip_vision; import java.util.ArrayList; impo

我对安卓工作室还很陌生。我注意到我的程序有一个非常严重的性能问题,我相信在我每次运行应用程序后,它的速度会减慢。我想我有一个失控的线程,我会附上照片在我的职位结束。我真的需要一些帮助。第一张图片显示线程的示例,然后第二张图片显示等待5分钟左右后的线程。我附上了两个密码。CameraSurfaceView在FaceDetectionThread创建线程时运行代码

package com.example.phliip_vision;

import java.util.ArrayList;
import java.util.List;

import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.media.FaceDetector;
import android.media.FaceDetector.Face;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;

import com.example.phliip_vision.Point;
import com.example.phliip_vision.MeasurementStepMessage;
import com.example.phliip_vision.MessageHUB;
import com.example.phliip_vision.Util;

public class CameraSurfaceView extends SurfaceView implements Callback,
        Camera.PreviewCallback {


    public static final int CALIBRATION_DISTANCE_A4_MM = 294;

    public static final int CALIBRATION_MEASUREMENTS = 10;

    public static final int AVERAGE_THREASHHOLD = 5;
    private static final String TAG = "CameraSurfaceView";

    /**
     * Measured distance at calibration point
     */
    private float _distanceAtCalibrationPoint = -1;

    private float _currentAvgEyeDistance = -1;

    // private int _facesFoundInMeasurement = -1;

    /**
     * in cm
     */
    private float _currentDistanceToFace = -1;

    private final SurfaceHolder mHolder;

    private Camera mCamera;

    private Face _foundFace = null;

    private int _threashold = CALIBRATION_MEASUREMENTS;

    private FaceDetectionThread _currentFaceDetectionThread;

    private List<Point> _points;

    protected final Paint _middlePointColor = new Paint();
    protected final Paint _eyeColor = new Paint();

    private Size _previewSize;

    // private boolean _measurementStartet = false;
    private boolean _calibrated = false;
    private boolean _calibrating = false;
    private int _calibrationsLeft = -1;

    public CameraSurfaceView(final Context context, final AttributeSet attrs) {
        super(context, attrs);
        _middlePointColor.setARGB(100, 200, 0, 0);
        _middlePointColor.setStyle(Paint.Style.FILL);
        _middlePointColor.setStrokeWidth(2);

        _eyeColor.setColor(Color.GREEN);

        mHolder = getHolder();
        mHolder.addCallback(this);
    }

    public void setCamera(final Camera camera) {
        mCamera = camera;

        if (mCamera != null) {
            requestLayout();
            Log.d(TAG, "mCamera RANNNNNNN!!!!");
            Camera.Parameters params = mCamera.getParameters();
            camera.setDisplayOrientation(90);
            List<String> focusModes = params.getSupportedFocusModes();
            if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
                Log.d(TAG, "FOCUS_MODE_AUTO RANNNNNNN!!!!");
                // set the focus mode
                params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
                // set Camera parameters
                mCamera.setParameters(params);
            }
        }
    }

    /**
     * Variables for the onDraw method, in order to prevent variable allocation
     * to slow down the sometimes heavily called onDraw method
     */
    private final PointF _middlePoint = new PointF();
    private final Rect _trackingRectangle = new Rect();

    private final static int RECTANGLE_SIZE = 20;
    private boolean _showEyes = false;
    private boolean _showTracking = true;

    @SuppressLint("DrawAllocation")
    @Override
    protected void onDraw(final Canvas canvas) {
        // super.onDraw(canvas);

        if (_foundFace != null) {

            _foundFace.getMidPoint(_middlePoint);
            Log.d(TAG, "_middlePoint RANNNNNNN!!!!");
            Log.i("Camera", _middlePoint.x + " : " + _middlePoint.y);

            // portrait mode!
            float heightRatio = getHeight() / (float) _previewSize.width;
            float widthRatio = getWidth() / (float) _previewSize.height;

            Log.i("Drawcall", _middlePoint.x + " : " + _middlePoint.y);

            int realX = (int) (_middlePoint.x * widthRatio);
            int realY = (int) (_middlePoint.y * heightRatio);

            Log.i("Drawcall", "Real :" + realX + " : " + realY);
            int halfEyeDist = (int) (widthRatio * _foundFace.eyesDistance() / 2);

            if (_showTracking) {
                // Middle point
                Log.d(TAG, "_showTracking RANNNNNNN!!!!");
                _trackingRectangle.left = realX - RECTANGLE_SIZE;
                _trackingRectangle.top = realY - RECTANGLE_SIZE;
                _trackingRectangle.right = realX + RECTANGLE_SIZE;
                _trackingRectangle.bottom = realY + RECTANGLE_SIZE;
                canvas.drawRect(_trackingRectangle, _middlePointColor);
            }

            if (_showEyes) {
                // Left eye
                Log.d(TAG, "_showEyes RANNNNNNN!!!!");
                _trackingRectangle.left = realX - halfEyeDist - RECTANGLE_SIZE;
                _trackingRectangle.top = realY - RECTANGLE_SIZE;
                _trackingRectangle.right = realX - halfEyeDist + RECTANGLE_SIZE;
                _trackingRectangle.bottom = realY + RECTANGLE_SIZE;
                canvas.drawRect(_trackingRectangle, _eyeColor);

                // Right eye
                _trackingRectangle.left = realX + halfEyeDist - RECTANGLE_SIZE;
                _trackingRectangle.top = realY - RECTANGLE_SIZE;
                _trackingRectangle.right = realX + halfEyeDist + RECTANGLE_SIZE;
                _trackingRectangle.bottom = realY + RECTANGLE_SIZE;
                canvas.drawRect(_trackingRectangle, _eyeColor);
            }
        }
    }

    public void reset() {
        Log.d(TAG, "reset RANNNNNNN!!!!");
        _distanceAtCalibrationPoint = -1;
        _currentAvgEyeDistance = -1;
        _calibrated = false;
        _calibrating = false;
        _calibrationsLeft = -1;
    }

    /**
     * Sets this current EYE distance to be the distance of a peace of a4 paper
     * e.g. 29,7cm
     */
    public void calibrate() {
        Log.d(TAG, "calibrate RANNNNNNN!!!!");
        if (!_calibrating || !_calibrated) {
            _points = new ArrayList<>();
            _calibrating = true;
            _calibrationsLeft = CALIBRATION_MEASUREMENTS;
            _threashold = CALIBRATION_MEASUREMENTS;
        }
    }

    private void doneCalibrating() {
        Log.d(TAG, "doneCalibrating RANNNNNNN!!!!");
        _calibrated = true;
        _calibrating = false;
        _currentFaceDetectionThread = null;
        // _measurementStartet = false;

        _threashold = AVERAGE_THREASHHOLD;

        _distanceAtCalibrationPoint = _currentAvgEyeDistance;
        MessageHUB.get().sendMessage(MessageHUB.DONE_CALIBRATION, null);
    }

    public boolean isCalibrated() {
        Log.d(TAG, "isCalibrated RANNNNNNN!!!!");
        return _calibrated || _calibrating;
    }

    public void showMiddleEye(final boolean on) {
        Log.d(TAG, "showMiddleEye RANNNNNNN!!!!");
        _showTracking = on;
    }

    public void showEyePoints(final boolean on) {
        Log.d(TAG, "showEyePoints RANNNNNNN!!!!");
        _showEyes = on;
    }

    private void updateMeasurement(final FaceDetector.Face currentFace) {
        if (currentFace == null) {
            Log.d(TAG, "updateMeasurement RANNNNNNN!!!!");
            // _facesFoundInMeasurement--;
            return;
        }

        _foundFace = _currentFaceDetectionThread.getCurrentFace();

        _points.add(new Point(_foundFace.eyesDistance(),
                CALIBRATION_DISTANCE_A4_MM
                        * (_distanceAtCalibrationPoint / _foundFace
                        .eyesDistance())));

        while (_points.size() > _threashold) {
            _points.remove(0);
            Log.d(TAG, "Removing points RANNNNNNN!!!!");
        }

        float sum = 0;
        for (Point p : _points) {
            sum += p.getEyeDistance();
            Log.d(TAG, "adding points RANNNNNNN!!!!");
        }

        _currentAvgEyeDistance = sum / _points.size();

        _currentDistanceToFace = CALIBRATION_DISTANCE_A4_MM
                * (_distanceAtCalibrationPoint / _currentAvgEyeDistance);

        _currentDistanceToFace = Util.MM_TO_CM(_currentDistanceToFace);

        MeasurementStepMessage message = new MeasurementStepMessage();
        message.setConfidence(currentFace.confidence());
        message.setCurrentAvgEyeDistance(_currentAvgEyeDistance);
        message.setDistToFace(_currentDistanceToFace);
        message.setEyesDistance(currentFace.eyesDistance());
        message.setMeasurementsLeft(_calibrationsLeft);
        message.setProcessTimeForLastFrame(_processTimeForLastFrame);

        MessageHUB.get().sendMessage(MessageHUB.MEASUREMENT_STEP, message);
    }

    private long _lastFrameStart = System.currentTimeMillis();
    private float _processTimeForLastFrame = -1;

    @Override
    public void onPreviewFrame(final byte[] data, final Camera camera) {
        Log.d(TAG, "onPreviewFrame RANNNNNNN!!!!" + _calibrationsLeft);
        if (_calibrationsLeft == -1)
            return;

        if (_calibrationsLeft > 0) {
            // Doing calibration !
            Log.d(TAG, "_calibrationLeft RANNNNNNN!!!!" + _calibrationsLeft);
            if (_currentFaceDetectionThread != null
                    && _currentFaceDetectionThread.isAlive()) {
                Log.d(TAG, "_currentFaceDectectionThread RANNNNNNN!!!!" + _currentFaceDetectionThread);
                // Drop Frame
                return;
            }

            // No face detection started or already finished
            _processTimeForLastFrame = System.currentTimeMillis()
                    - _lastFrameStart;
            _lastFrameStart = System.currentTimeMillis();

            if (_currentFaceDetectionThread != null) {
                Log.d(TAG, "_calibrationLeft-- RANNNNNNN!!!!");
                _calibrationsLeft--;
                updateMeasurement(_currentFaceDetectionThread.getCurrentFace());

                if (_calibrationsLeft == 0) {
                    Log.d(TAG, "Calibrating done RANNNNNNN!!!!");
                    doneCalibrating();

                    invalidate();
                    return;
                }
            }

            _currentFaceDetectionThread = new FaceDetectionThread(data,
                    _previewSize);
            _currentFaceDetectionThread.start();

            invalidate();
        } else {
            // Simple Measurement

            if (_currentFaceDetectionThread != null
                    && _currentFaceDetectionThread.isAlive()) {
                    Log.d(TAG, "Dropping frames RANNNNNNN!!!!");
                // Drop Frame
                return;
            }

            // No face detection started or already finished
            _processTimeForLastFrame = System.currentTimeMillis()
                    - _lastFrameStart;
            _lastFrameStart = System.currentTimeMillis();

            if (_currentFaceDetectionThread != null)
                updateMeasurement(_currentFaceDetectionThread.getCurrentFace());
                Log.d(TAG, "Updating measurements RANNNNNNN!!!!");

            _currentFaceDetectionThread = new FaceDetectionThread(data,
                    _previewSize);
            _currentFaceDetectionThread.start();
            Log.d(TAG, "invalidate RANNNNNNN!!!!");
            invalidate();
        }
    }

    /*
     * SURFACE METHODS, TO CREATE AND RELEASE SURFACE THE CORRECT WAY.
     *
     * @see
     * android.view.SurfaceHolder.Callback#surfaceCreated(android.view.SurfaceHolder
     * )
     */

    @Override
    public void surfaceCreated(final SurfaceHolder holder) {
        synchronized (this) {
            // This allows us to make our own drawBitmap
            this.setWillNotDraw(false);
        }
    }

    @Override
    public void surfaceDestroyed(final SurfaceHolder holder) {
        mCamera.release();
        mCamera = null;
    }

    @Override
    public void surfaceChanged(final SurfaceHolder holder, final int format,
                               final int width, final int height) {

        if (mHolder.getSurface() == null) {
            // preview surface does not exist
            return;
        }

        // stop preview before making changes
        try {
            mCamera.stopPreview();
        } catch (Exception e) {
            // ignore: tried to stop a non-existent preview
        }

        Parameters parameters = mCamera.getParameters();
        _previewSize = parameters.getPreviewSize();
        // mCamera.setDisplayOrientation(90);
        // mCamera.setParameters(parameters);

        // start preview with new settings
        try {
            mCamera.setPreviewDisplay(mHolder);
            mCamera.startPreview();
            mCamera.setPreviewCallback(this);

        } catch (Exception e) {
            Log.d("This", "Error starting camera preview: " + e.getMessage());
        }
    }
}

package com.example.phliip_vision;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera.Size;
import android.media.FaceDetector;
import android.media.FaceDetector.Face;
import android.util.Log;

public class FaceDetectionThread extends Thread {

    public static final String FACEDETECTIONTHREAD_TAG = "FaceDetectionThread_Tag";
    private static final String TAG = "FaceDetectionThread";

    private Face _currentFace;
    private final byte[] _data;
    private final Size _previewSize;
    private Bitmap _currentFrame;

    public FaceDetectionThread(final byte[] data, final Size previewSize) {
        Log.d(TAG, "What are we waiting on in FaceDetectionThread????");
        _data = data;
        _previewSize = previewSize;
    }

    public Face getCurrentFace() {
        Log.d(TAG, "What are we waiting on in Current faces????");

        return _currentFace;
    }

    public Bitmap getCurrentFrame() {
        return _currentFrame;
    }

    /**
     * bla bla bla
     */
    @Override
    public void run() {

        long t = System.currentTimeMillis();

        YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
                _previewSize.width, _previewSize.height, null);

        ByteArrayOutputStream baos = new ByteArrayOutputStream();

        if (!yuvimage.compressToJpeg(new Rect(0, 0, _previewSize.width,
                _previewSize.height), 100, baos)) {

            Log.e("Camera", "compressToJpeg failed");

        }

        Log.i("Timing", "Compression finished: "
                + (System.currentTimeMillis() - t));
        t = System.currentTimeMillis();

        BitmapFactory.Options bfo = new BitmapFactory.Options();
        bfo.inPreferredConfig = Bitmap.Config.RGB_565;

        _currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(
                baos.toByteArray()), null, bfo);

        Log.i("Timing", "Decode Finished: " + (System.currentTimeMillis() - t));
        t = System.currentTimeMillis();

        // Rotate the so it siuts our portrait mode
        Matrix matrix = new Matrix();
        matrix.postRotate(90);
        matrix.preScale(-1, 1);
        // We rotate the same Bitmap
        _currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,
                _previewSize.width, _previewSize.height, matrix, false);

        Log.i("Timing",
                "Rotate, Create finished: " + (System.currentTimeMillis() - t));
        t = System.currentTimeMillis();

        if (_currentFrame == null) {
            Log.e(FACEDETECTIONTHREAD_TAG, "Could not decode Image");
            return;
        }

        FaceDetector d = new FaceDetector(_currentFrame.getWidth(),
                _currentFrame.getHeight(), 1);

        Face[] faces = new Face[1];
        d.findFaces(_currentFrame, faces);

        Log.i("Timing",
                "FaceDetection finished: " + (System.currentTimeMillis() - t));
        t = System.currentTimeMillis();

        _currentFace = faces[0];
        Log.d(FACEDETECTIONTHREAD_TAG, "Found: " + faces[0] + " Faces");
    }
}