Java android摄像机的帧显示与处理

Java android摄像机的帧显示与处理,java,android,opencv,android-camera,Java,Android,Opencv,Android Camera,我有一个应用程序,可以处理来自摄影机的帧并显示在布局上,捕获和管理摄影机帧的类如下所示: package org.opencv.face; import java.io.IOException; import java.util.List; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.ImageFo

我有一个应用程序,可以处理来自摄影机的帧并显示在布局上,捕获和管理摄影机帧的类如下所示:

package org.opencv.face;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";

public Camera              mCamera;
private SurfaceHolder       mHolder;
private int                 mFrameWidth;
private int                 mFrameHeight;
private byte[]              mFrame;
private boolean             mThreadRun;
private byte[]              mBuffer;


public SampleViewBase(Context context) {
    super(context);
    mHolder = getHolder();
    mHolder.addCallback(this);
    Log.i(TAG, "Instantiated new " + this.getClass());
}

public int getFrameWidth() {
    return mFrameWidth;
}

public int getFrameHeight() {
    return mFrameHeight;
}

public void setPreview() throws IOException {
        mCamera.setPreviewDisplay(null);
}

public boolean openCamera() {
    Log.i(TAG, "openCamera");
    releaseCamera();
    mCamera = Camera.open();
    if(mCamera == null) {
        Log.e(TAG, "Can't open camera!");
        return false;
    }

    mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
        public void onPreviewFrame(byte[] data, Camera camera) {
            synchronized (SampleViewBase.this) {
                System.arraycopy(data, 0, mFrame, 0, data.length);
                SampleViewBase.this.notify(); 
            }
            camera.addCallbackBuffer(mBuffer);
        }
    });
    return true;
}

public void releaseCamera() {
    Log.i(TAG, "releaseCamera");
    mThreadRun = false;
    synchronized (this) {
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.setPreviewCallback(null);
            mCamera.release();
            mCamera = null;
        }
    }
    onPreviewStopped();
}

public void setupCamera(int width, int height) {
    Log.i(TAG, "setupCamera");
    synchronized (this) {
        if (mCamera != null) {
            Camera.Parameters params = mCamera.getParameters();
            List<Camera.Size> sizes = params.getSupportedPreviewSizes();
            mFrameWidth = width;
            mFrameHeight = height;

            // selecting optimal camera preview size
            {
                int  minDiff = Integer.MAX_VALUE;
                for (Camera.Size size : sizes) {
                    if (Math.abs(size.height - height) < minDiff) {
                        mFrameWidth = size.width;
                        mFrameHeight = size.height;
                        minDiff = Math.abs(size.height - height);
                    }
                }
            }

            params.setPreviewSize(getFrameWidth(), getFrameHeight());

            List<String> FocusModes = params.getSupportedFocusModes();
            if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
            {
                params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            }            

            mCamera.setParameters(params);

            /* Now allocate the buffer */
            params = mCamera.getParameters();
            int size = params.getPreviewSize().width * params.getPreviewSize().height;
            size  = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8 * 2;
            mBuffer = new byte[size];
            /* The buffer where the current frame will be copied */
            mFrame = new byte [size];
            mCamera.addCallbackBuffer(mBuffer);

            try {
                setPreview();
            } catch (IOException e) {
                Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
            }

            /* Notify that the preview is about to be started and deliver preview size */
            onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);

            /* Now we can start a preview */
            mCamera.startPreview();
        }
    }
}

public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
    Log.i(TAG, "surfaceChanged");
    setupCamera(width, height);
}

public void surfaceCreated(SurfaceHolder holder) {
    Log.i(TAG, "surfaceCreated");
    (new Thread(this)).start();
}

public void surfaceDestroyed(SurfaceHolder holder) {
    Log.i(TAG, "surfaceDestroyed");
    releaseCamera();
}


/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);

/**
 * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
 * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
 * @param previewWidth - the width of the preview frames that will be delivered via processFrame
 * @param previewHeight - the height of the preview frames that will be delivered via processFrame
 */
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);

/**
 * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
 * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
 * Any other resources used during the preview can be released.
 */
protected abstract void onPreviewStopped();

public void run() {
    mThreadRun = true;
    Log.i(TAG, "Starting processing thread");
    while (mThreadRun) {
        Bitmap bmp = null;

        synchronized (this) {
            try {
                this.wait();
                bmp = processFrame(mFrame);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        if (bmp != null) {
            Canvas canvas = mHolder.lockCanvas();
            if (canvas != null) {
                canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
                mHolder.unlockCanvasAndPost(canvas);
            }
        }
    }
}
现在可以正常工作,但只适用于3.0以上的Android版本,我想要的是适用于任何版本


如何解决此问题?

如果要接收预览回调,则必须使用带有非空参数的
setPreviewDisplay()
显示预览。该API旨在强制您在屏幕上显示此预览。任何允许您创建从未渲染过的虚拟SurfaceView的变通方法都可能无法在某些设备上或在下次升级后使用

在使用蜂巢之前,可以在屏幕外创建预览SurfaceView(使其位置远离右侧),因此预览不会显示。这个错误后来被修复了


幸运的是,使用3+可以使用
setPreviewTexture()
,平台无法强制您实际显示纹理。

您看过opencv android示例吗?他们有适用于Android 2.2及以上版本的功能。你能在这里解释一下同步(锁定)的用法吗。
@TargetApi(11)
public void setPreview() throws IOException {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
        mSf = new SurfaceTexture(10);
        mCamera.setPreviewTexture( mSf );
    }
    else
        mCamera.setPreviewDisplay(null);