在没有OpenCV的android环境下,如何检测视频流中心的颜色

在没有OpenCV的android环境下,如何检测视频流中心的颜色,android,android-camera,pixel,color-picker,Android,Android Camera,Pixel,Color Picker,启动相机预览后,有什么方法可以检测图像中心的颜色 (My problem is solved thanks for help) 我刚刚开发了一个解决方案,用于检测安卓摄像头拍摄的图像数据流的平均颜色 这是我的代码,我尝试每500毫秒拍摄一张图像,裁剪中心(60*60的平方),然后得到平均颜色 public class TakePicture extends Activity implements SurfaceHolder.Callback { // a variable to store a

启动相机预览后,有什么方法可以检测图像中心的颜色

(My problem is solved thanks for help)
我刚刚开发了一个解决方案,用于检测安卓摄像头拍摄的图像数据流的平均颜色

这是我的代码,我尝试每500毫秒拍摄一张图像,裁剪中心(60*60的平方),然后得到平均颜色

public class TakePicture extends Activity implements SurfaceHolder.Callback {
// a variable to store a reference to the Image View at the main.xml file
private ImageView iv_image;
// a variable to store a reference to the Surface View at the main.xml file
private SurfaceView sv;

// a bitmap to display the captured image
private Bitmap bmp;

// Camera variables
// a surface holder
private SurfaceHolder sHolder;
// a variable to control the camera
Camera mCamera;
// the camera parameters
private Parameters parameters;
Camera.PictureCallback mCall;
Button takePicture;
Handler handler;
TextView colorName, Hex;
SharedPreferences pref;
Editor editor;
int width = 0, height = 0;
private Camera.Size pictureSize;

boolean mStopHandler = false;
private static final HashMap<String, String> sColorNameMap;

static {
    sColorNameMap = new HashMap();
    sColorNameMap.put("#000000", "black");
    sColorNameMap.put("#A9A9A9", "darkgray");
    sColorNameMap.put("#808080", "gray");
    sColorNameMap.put("#D3D3D3", "lightgray");
    sColorNameMap.put("#FFFFFF", "white");
    // .....

}

Runnable runnable = new Runnable() {
    @Override
    public void run() {
        if (mCamera != null) {
            // do your stuff - don't create a new runnable here!
            mCamera.takePicture(null, null, mCall);

            if (!mStopHandler) {
                handler.postDelayed(this, 500);
            }
        }
    }
};

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.main);
    handler = new Handler();
    pref = getApplicationContext().getSharedPreferences("MyPref", 0);
    editor = pref.edit();
    colorName = (TextView) findViewById(R.id.colorName);
    Hex = (TextView) findViewById(R.id.colorHex);
    // get the Image View at the main.xml file
    iv_image = (ImageView) findViewById(R.id.imageView);
    takePicture = (Button) findViewById(R.id.takePicture);

    // get the Surface View at the main.xml file
    sv = (SurfaceView) findViewById(R.id.surfaceView);

    // Get a surface
    sHolder = sv.getHolder();

    // add the callback interface methods defined below as the Surface View
    // callbacks
    sHolder.addCallback(this);

    // tells Android that this surface will have its data constantly
    // replaced
    sHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    takePicture.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            mCamera.takePicture(null, null, mCall);

        }
    });
}

@Override
public void surfaceChanged(SurfaceHolder sv, int arg1, int arg2, int arg3) {
    // get camera parameters
    parameters = mCamera.getParameters();
    // parameters.setPreviewFormat(ImageFormat.NV21);
    mCamera.setDisplayOrientation(90);
    setBesttPictureResolution();

    mCamera.setParameters(parameters);
    try {
        mCamera.setPreviewDisplay(sv);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    mCamera.setParameters(parameters);
    // set camera parameters
    mCamera.startPreview();

    // sets what code should be executed after the picture is taken
    mCall = new Camera.PictureCallback() {
        @Override
        public void onPictureTaken(byte[] data, Camera camera) {
            // decode the data obtained by the camera into a Bitmap
            if (data != null) {
                bmp = decodeBitmap(data);
            }
            // set the iv_image
            if (bmp != null) {
                ByteArrayOutputStream bytes = new ByteArrayOutputStream();
                // bmp.compress(Bitmap.CompressFormat.JPEG, 70, bytes);
                Bitmap resizebitmap = Bitmap.createBitmap(bmp,
                        bmp.getWidth() / 2, bmp.getHeight() / 2, 60, 60);
                iv_image.setImageBitmap(rotateImage(resizebitmap, 90));

                int color = getAverageColor(resizebitmap);
                Log.i("Color Int", color + "");
                // int color =
                // resizebitmap.getPixel(resizebitmap.getWidth()/2,resizebitmap.getHeight()/2);

                String strColor = String.format("#%06X", 0xFFFFFF & color);
                Hex.setText(strColor);
                String colorname = sColorNameMap.get(strColor);
                if (colorName != null) {
                    colorName.setText(colorname);
                }

                Log.i("Pixel Value",
                        "Top Left pixel: " + Integer.toHexString(color));
            }

        }
    };
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    // The Surface has been created, acquire the camera and tell it where
    // to draw the preview.
    // mCamera = Camera.open();
    mCamera = getCameraInstance();
    if (mCamera != null) {
        try {
            mCamera.setPreviewDisplay(holder);
            handler.post(runnable);

        } catch (IOException exception) {
            mCamera.release();
            mCamera = null;
        }
    } else
        Toast.makeText(getApplicationContext(), "Camera is not available",
                Toast.LENGTH_SHORT).show();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {

    if (mCamera != null) {
        // stop the preview
        mCamera.stopPreview();
        // release the camera
        mCamera.release();
    }
    // unbind the camera from this object
    if (handler != null)
        handler.removeCallbacks(runnable);
}

public static Bitmap decodeBitmap(byte[] data) {

    Bitmap bitmap = null;
    BitmapFactory.Options bfOptions = new BitmapFactory.Options();
    bfOptions.inDither = false; // Disable Dithering mode
    bfOptions.inPurgeable = true; // Tell to gc that whether it needs free
                                    // memory, the Bitmap can be cleared
    bfOptions.inInputShareable = true; // Which kind of reference will be
                                        // used to recover the Bitmap data
                                        // after being clear, when it will
                                        // be used in the future
    bfOptions.inTempStorage = new byte[32 * 1024];

    if (data != null)
        bitmap = BitmapFactory.decodeByteArray(data, 0, data.length,
                bfOptions);

    return bitmap;
}

public Bitmap rotateImage(Bitmap src, float degree) {
    // create new matrix object
    Matrix matrix = new Matrix();
    // setup rotation degree
    matrix.postRotate(degree);
    // return new bitmap rotated using matrix
    return Bitmap.createBitmap(src, 0, 0, src.getWidth(), src.getHeight(),
            matrix, true);
}

public int getAverageColor(Bitmap bitmap) {
    int redBucket = 0;
    int greenBucket = 0;
    int blueBucket = 0;
    int pixelCount = 0;

    for (int y = 0; y < bitmap.getHeight(); y++) {
        for (int x = 0; x < bitmap.getWidth(); x++) {
            int c = bitmap.getPixel(x, y);

            pixelCount++;
            redBucket += Color.red(c);
            greenBucket += Color.green(c);
            blueBucket += Color.blue(c);
            // does alpha matter?
        }
    }

    int averageColor = Color.rgb(redBucket / pixelCount, greenBucket
            / pixelCount, blueBucket / pixelCount);
    return averageColor;
}

int[] averageARGB(Bitmap pic) {
    int A, R, G, B;
    A = R = G = B = 0;
    int pixelColor;
    int width = pic.getWidth();
    int height = pic.getHeight();
    int size = width * height;

    for (int x = 0; x < width; ++x) {
        for (int y = 0; y < height; ++y) {
            pixelColor = pic.getPixel(x, y);
            A += Color.alpha(pixelColor);
            R += Color.red(pixelColor);
            G += Color.green(pixelColor);
            B += Color.blue(pixelColor);
        }
    }

    A /= size;
    R /= size;
    G /= size;
    B /= size;

    int[] average = { A, R, G, B };
    return average;

}

private void setBesttPictureResolution() {
    // get biggest picture size
    width = pref.getInt("Picture_Width", 0);
    height = pref.getInt("Picture_height", 0);

    if (width == 0 | height == 0) {
        pictureSize = getBiggesttPictureSize(parameters);
        if (pictureSize != null)
            parameters
                    .setPictureSize(pictureSize.width, pictureSize.height);
        // save width and height in sharedprefrences
        width = pictureSize.width;
        height = pictureSize.height;
        editor.putInt("Picture_Width", width);
        editor.putInt("Picture_height", height);
        editor.commit();

    } else {
        // if (pictureSize != null)
        parameters.setPictureSize(width, height);
    }
}

private Camera.Size getBiggesttPictureSize(Camera.Parameters parameters) {
    Camera.Size result = null;

    for (Camera.Size size : parameters.getSupportedPictureSizes()) {
        if (result == null) {
            result = size;
        } else {
            int resultArea = result.width * result.height;
            int newArea = size.width * size.height;

            if (newArea > resultArea) {
                result = size;
            }
        }
    }

    return (result);
}

/** A safe way to get an instance of the Camera object. */
public Camera getCameraInstance() {
    Camera c = null;
    try {
        c = Camera.open(); // attempt to get a Camera instance
    } catch (Exception e) {
        // Camera is not available (in use or does not exist)
    }
    return c; // returns null if camera is unavailable
}
public类TakePicture扩展活动实现SurfaceHolder.Callback{
//用于在main.xml文件中存储对图像视图的引用的变量
私人影像查看iv_影像;
//用于在main.xml文件中存储对曲面视图的引用的变量
私人SurfaceView sv;
//显示捕获图像的位图
私有位图bmp;
//摄像机变量
//表面固定器
私人地面商;
//控制摄像机的变量
麦克默拉照相机;
//摄像机参数
私有参数;
Camera.PictureCallback mCall;
按钮拍照;
处理者;
TextView颜色名称,十六进制;
共享参考优先;
编辑;
整数宽度=0,高度=0;
私人相机。大小图片化;
布尔mStopHandler=false;
私有静态最终HashMap sColorNameMap;
静止的{
sColorNameMap=newhashmap();
sColorNameMap.put(“#000000”,“黑色”);
sColorNameMap.put(“#a9a9”,“暗灰色”);
sColorNameMap.put(“#808080”,“灰色”);
sColorNameMap.put(“#d3d3”,“浅灰色”);
sColorNameMap.put(“#FFFFFF”,“白色”);
// .....
}
Runnable Runnable=新的Runnable(){
@凌驾
公开募捐{
if(mCamera!=null){
//做你的事情-不要在这里创建新的runnable!
takePicture(null,null,mCall);
如果(!mStopHandler){
handler.postDelayed(这个,500);
}
}
}
};
/**在首次创建活动时调用*/
@凌驾
创建时的公共void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
handler=新的handler();
pref=getApplicationContext().GetSharedReferences(“MyPref”,0);
编辑器=pref.edit();
colorName=(TextView)findViewById(R.id.colorName);
Hex=(TextView)findViewById(R.id.colorHex);
//在main.xml文件中获取图像视图
iv_image=(ImageView)findViewById(R.id.ImageView);
takePicture=(按钮)findViewById(R.id.takePicture);
//在main.xml文件中获取曲面视图
sv=(表面视图)findViewById(R.id.SurfaceView);
//浮出水面
sHolder=sv.getHolder();
//添加下面定义为曲面视图的回调接口方法
//回调
sHolder.addCallback(this);
//告诉Android这个表面将不断地有它的数据
//取代
sHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_缓冲器);
takePicture.setOnClickListener(新视图.OnClickListener(){
@凌驾
公共void onClick(视图v){
takePicture(null,null,mCall);
}
});
}
@凌驾
公共空间表面更改(表面文件夹sv、内部arg1、内部arg2、内部arg3){
//获取相机参数
parameters=mCamera.getParameters();
//setPreviewFormat(ImageFormat.NV21);
mCamera.setDisplayOrientation(90);
挫折图片解决方案();
mCamera.setParameters(参数);
试一试{
mCamera.setPreviewDisplay(sv);
}捕获(IOE异常){
//TODO自动生成的捕捉块
e、 printStackTrace();
}
mCamera.setParameters(参数);
//设置相机参数
mCamera.startPreview();
//设置拍照后应执行的代码
mCall=new-Camera.PictureCallback(){
@凌驾
公共void onPictureTaken(字节[]数据,摄像头){
//将摄像头获取的数据解码为位图
如果(数据!=null){
bmp=解码位图(数据);
}
//设置iv_图像
如果(bmp!=null){
ByteArrayOutputStream字节=新建ByteArrayOutputStream();
//bmp.compress(Bitmap.CompressFormat.JPEG,70字节);
位图大小位图=位图。创建位图(bmp,
bmp.getWidth()/2,bmp.getHeight()/2,60,60);
iv_image.setImageBitmap(rotateImage(resizebitmap,90));
int color=getAverageColor(resizebitmap);
Log.i(“Color Int”,Color+);
//内色=
//resizebitmap.getPixel(resizebitmap.getWidth()/2,resizebitmap.getHeight()/2);
String strColor=String.format(“#%06X”,0xFFFFFF&color);
十六进制设置文本(strColor);
字符串colorname=sColorNameMap.get(strColor);
if(colorName!=null){
colorName.setText(colorName);
}
Log.i(“像素值”,
“左上角像素:”+Integer.tohextstring(颜色));
}
}
};
}
@凌驾
已创建的公共空白表面(表面持有人){
//曲面已创建,获取摄影机并告知其位置
//绘制预览。
//mCamera=Camera.open();
mCamera=getCameraInstance();
if(mCamera!=null){
试一试{
mCamera.setPreviewDisplay(支架);
handler.post(可运行);
}捕获(IOException异常){
mCamera.release();
mCamera=null;
}
}否则
Toast.makeText(getApplicationContext(),“摄像头不可用”,
吐司。长度(短)。show();
}
@凌驾
公共空间表面覆盖(表面覆盖物持有人){
if(mCamera!=null){
//停止预览
mCamera.stopPreview();
//松开相机
mCamera.release();
}
//解除相机与此对象的绑定
if(处理程序!=null)
handler.removeCallbacks(可运行);
}
公共静态位图解码位图(字节[]数据)
Bitmap mBitmap = BitmapFactory.decodeResource(getResources(), "Your image goes here");

imageView.setOnTouchListener(new OnTouchListener() 
    {
        public boolean onTouch(View v, MotionEvent event) 
        {

            int x_coordinate  = (int)event.getX(); 
            int y_coordinate = (int)event.getY(); 

        int color = mBitmap.getPixel(x,y);

        if (event.getAction() == MotionEvent.ACTION_DOWN) 
            {
            }

        if (event.getAction() == MotionEvent.ACTION_UP) 
            {
            }

            return true;
        }
    });
int color = mBitmap.getPixel(mBitmap.getWidth()/2,mBitmap.getHeight()/2);