Java 预览时CameraX逐像素颜色检测

Java 预览时CameraX逐像素颜色检测,java,pixel,Java,Pixel,我使用了基本的CameraX示例,以便能够使用ImageAnalysis例程,逐像素检查完整的位图,以检测与基本给定颜色的颜色相似性。 我想要的是显示除所选颜色以外的灰度预览。 我使用“bitmap bMap=txView.getBitmap();”获取位图,在浏览位图并更改每个像素后,我使用“img.setImageBitmap(bMap);”将位图发送到名为“img”的Imagview 我的TextureView不可见,我的Imageview位于顶部,以显示修改后的位图。 如果我只得到位图并

我使用了基本的CameraX示例,以便能够使用ImageAnalysis例程,逐像素检查完整的位图,以检测与基本给定颜色的颜色相似性。 我想要的是显示除所选颜色以外的灰度预览。 我使用“bitmap bMap=txView.getBitmap();”获取位图,在浏览位图并更改每个像素后,我使用“img.setImageBitmap(bMap);”将位图发送到名为“img”的Imagview 我的TextureView不可见,我的Imageview位于顶部,以显示修改后的位图。 如果我只得到位图并显示它,我没有问题。但是当我添加嵌套循环来检查每个像素时,它只需要很长的时间

这是我的密码

    private void startCamera() {
        //make sure there isn't another camera instance running before starting
        CameraX.unbindAll();

        /* start preview */
        int aspRatioW = 480; //txView.getWidth(); //get width of screen
        int aspRatioH = 640;//txView.getHeight(); //get height
        Rational asp = new Rational (aspRatioW, aspRatioH); //aspect ratio
        Size screen = new Size(aspRatioW, aspRatioH); //size of the screen

        //config obj for preview/viewfinder.
        PreviewConfig pConfig = new PreviewConfig.Builder().setTargetAspectRatio(asp).setTargetResolution(screen).build();
        Preview preview = new Preview(pConfig); //lets build it

        preview.setOnPreviewOutputUpdateListener(
                new Preview.OnPreviewOutputUpdateListener() {
                    //to update the surface texture we have to destroy it first, then re-add it
                    @Override
                    public void onUpdated(Preview.PreviewOutput output){
                        ViewGroup parent = (ViewGroup) txView.getParent();
                        parent.removeView(txView);
                        parent.addView(txView, 0);

                        txView.setSurfaceTexture(output.getSurfaceTexture());
                        updateTransform();
                    }
                });

        /* image capture */

        //config obj, selected capture mode
        ImageCaptureConfig imgCapConfig = new ImageCaptureConfig.Builder().setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY)
                .setTargetRotation(getWindowManager().getDefaultDisplay().getRotation()).build();
        final ImageCapture imgCap = new ImageCapture(imgCapConfig);

        findViewById(R.id.capture_button).setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                File file = new File(Environment.getExternalStorageDirectory() + "/" + System.currentTimeMillis() + ".jpg");
                imgCap.takePicture(file, new ImageCapture.OnImageSavedListener() {
                    @Override
                    public void onImageSaved(@NonNull File file) {
                        String msg = "Photo capture succeeded: " + file.getAbsolutePath();
                        Toast.makeText(getBaseContext(), msg,Toast.LENGTH_LONG).show();
                    }

                    @Override
                    public void onError(@NonNull ImageCapture.UseCaseError useCaseError, @NonNull String message, @Nullable Throwable cause) {
                        String msg = "Photo capture failed: " + message;
                        Toast.makeText(getBaseContext(), msg,Toast.LENGTH_LONG).show();
                        if(cause != null){
                            cause.printStackTrace();
                        }
                    }
                });
            }
        });

        /* image analyser */

        ImageAnalysisConfig imgAConfig = new ImageAnalysisConfig.Builder().setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE).build();
        ImageAnalysis analysis = new ImageAnalysis(imgAConfig);

        analysis.setAnalyzer(
            new ImageAnalysis.Analyzer(){
                @Override
                public void analyze(ImageProxy image, int rotationDegrees){

                    //https://www.codota.com/code/java/methods/android.media.Image/getPlanes
                    //ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); //  planes[0].buffer // Y
                    //ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); //  planes[1].buffer // U
                    //ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); //  planes[2].buffer // V
                    //byte[] yBufferBytes= new byte[yBuffer.remaining()]; // .remaining is for the size of the buffer

                    Bitmap bMap = txView.getBitmap();
                    if (bMap != null){
                        int Hsize = bMap.getHeight();
                        int Wsize = bMap.getWidth();
                        int bmPixel, alpha, redValue, blueValue, greenValue;
                        int bt_alpha, bt_redValue, bt_blueValue, bt_greenValue;
                        bt_alpha = Color.alpha(mDefaultColor);
                        bt_redValue = Color.red(mDefaultColor);
                        bt_blueValue = Color.blue(mDefaultColor);
                        bt_greenValue = Color.green(mDefaultColor);

                        for (int h = 0; h < Hsize; h++){
                            for (int w = 0; w < Wsize; w++){
                                bmPixel = bMap.getPixel(w,h);
                                alpha = Color.alpha(bmPixel);
                                redValue = Color.red(bmPixel);
                                blueValue = Color.blue(bmPixel);
                                greenValue = Color.green(bmPixel);
                                double d2 = Math.sqrt((0.3*(bt_redValue-redValue)*(bt_redValue-redValue)) + (0.59*(bt_greenValue-greenValue)*(bt_greenValue-greenValue)) + (0.11*(bt_blueValue-blueValue)*(bt_blueValue-blueValue)));
                               if (d2 > 10){
                                    int newRed, newBlue, newGreen, bmGrey;
                                    bmGrey = (redValue + blueValue + greenValue)/3;
                                    newRed = bmGrey;
                                    newBlue = bmGrey;
                                    newGreen = bmGrey;
                                    bMap.setPixel(w,h,Color.argb(alpha,newRed,newGreen,newBlue));
                                }
                            }
                        }
                        ImageView img = (ImageView) findViewById(R.id.image_Changed);
                        img.setImageBitmap(bMap);
                    }

                    image.close();
                    // https://stackoverflow.com/questions/36212904/yuv-420-888-interpretation-on-samsung-galaxy-s7-camera2

                    //y'all can add code to analyse stuff here idek go wild.
                }
            });

        //bind to lifecycle:
        CameraX.bindToLifecycle((LifecycleOwner)this, analysis, imgCap, preview);
    }

private void startCamera(){
//在启动之前,确保没有其他摄影机实例正在运行
CameraX.unbindAll();
/*开始预览*/
int aspRatioW=480;//txView.getWidth();//获取屏幕宽度
int aspRatioH=640;//txView.getHeight();//获取高度
RationalASP=newRational(aspRatioW,aspRatioH);//纵横比
屏幕大小=新大小(aspRatioW,aspRatioH);//屏幕大小
//为预览/取景器配置obj。
PreviewConfig pConfig=新建PreviewConfig.Builder().setTargetSpectratio(asp.setTargetResolution(screen.build());
Preview Preview=新建预览(pConfig);//让我们构建它
preview.SetonPreviewOutUpdateListener(
新建Preview.onPreviewOutUpdateListener(){
//要更新表面纹理,我们必须先破坏它,然后重新添加它
@凌驾
公共void未更新(Preview.PreviewOutput输出){
ViewGroup parent=(ViewGroup)txView.getParent();
parent.removeView(txView);
parent.addView(txView,0);
setSurfaceTexture(output.getSurfaceTexture());
updateTransform();
}
});
/*图像捕获*/
//配置obj,选择的捕获模式
ImageCaptureConfig imgCapConfig=新建ImageCaptureConfig.Builder().setCaptureMode(ImageCapture.CaptureMode.MIN_延迟)
.setTargetRotation(getWindowManager().getDefaultDisplay().getRotation()).build();
最终图像捕获imgCap=新图像捕获(imgCapConfig);
findviewbyd(R.id.capture_按钮).setOnClickListener(新视图.OnClickListener(){
@凌驾
公共void onClick(视图v){
File File=新文件(Environment.getExternalStorageDirectory()+“/”+System.currentTimeMillis()+”.jpg”);
takePicture(文件,newimagecapture.OnImageSavedListener(){
@凌驾
public void onImageSaved(@NonNull File){
String msg=“照片捕获成功:”+file.getAbsolutePath();
Toast.makeText(getBaseContext(),msg,Toast.LENGTH_LONG).show();
}
@凌驾
public void onError(@NonNull ImageCapture.UseCaseError-UseCaseError,@NonNull字符串消息,@null可丢弃原因){
String msg=“照片捕获失败:”+消息;
Toast.makeText(getBaseContext(),msg,Toast.LENGTH_LONG).show();
如果(原因!=null){
cause.printStackTrace();
}
}
});
}
});
/*图像分析仪*/
ImageAnalysisConfig imgAConfig=new ImageAnalysisConfig.Builder().setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE).build();
ImageAnalysis=新的ImageAnalysis(imgAConfig);
分析设置分析器(
新的ImageAnalysis.Analyzer(){
@凌驾
公共空间分析(图像代理图像,整数旋转度){
//https://www.codota.com/code/java/methods/android.media.Image/getPlanes
//ByteBuffer yBuffer=image.getPlanes()[0].getBuffer();//平面[0].buffer//Y
//ByteBuffer uBuffer=image.getPlanes()[1].getBuffer();//平面[1].buffer//U
//ByteBuffer vBuffer=image.getPlanes()[2].getBuffer();//平面[2].buffer//V
//byte[]yBufferBytes=新字节[yBuffer.remaining()];/。remaining表示缓冲区的大小
位图bMap=txView.getBitmap();
如果(bMap!=null){
int Hsize=bMap.getHeight();
int Wsize=bMap.getWidth();
int bmPixel、alpha、redValue、blueValue、greenValue;
int bt_alpha、bt_redValue、bt_blueValue、bt_greenValue;
bt_alpha=Color.alpha(mDefaultColor);
bt_redValue=Color.red(mDefaultColor);
bt_blueValue=Color.blue(mDefaultColor);
bt_greenValue=Color.green(mDefaultColor);
for(inth=0;h10){
int纽瑞德、纽布鲁、纽格林、BM格雷;