Java 如何在EclipseIndigo中删除调试透视图中的类文件编辑器

Java 如何在EclipseIndigo中删除调试透视图中的类文件编辑器,java,android,opencv,javacv,Java,Android,Opencv,Javacv,我正试图在eclipse中运行一个眼睛检测应用程序。我知道它可以在我的android设备上运行 然而,当我试图在合适的位置设置System.out.println时,它完全跳过了它们并启动了活动。另外,当我尝试调试应用程序时(设置断点后),它会一直显示未找到类文件编辑器源代码。我认为这是因为调试器未找到库源代码。我能做什么。我只关心我的println,但这个类文件编辑器不断弹出。我如何停止它。下面是代码 package com.googlecode.javacv.eyepreview; imp

我正试图在eclipse中运行一个眼睛检测应用程序。我知道它可以在我的android设备上运行

然而,当我试图在合适的位置设置System.out.println时,它完全跳过了它们并启动了活动。另外,当我尝试调试应用程序时(设置断点后),它会一直显示未找到类文件编辑器源代码。我认为这是因为调试器未找到库源代码。我能做什么。我只关心我的println,但这个类文件编辑器不断弹出。我如何停止它。下面是代码

package com.googlecode.javacv.eyepreview;

import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_objdetect;

import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import static com.googlecode.javacv.cpp.opencv_objdetect.*;
import static com.googlecode.javacv.cpp.opencv_highgui.*;

// ----------------------------------------------------------------------

public class EyePreview extends Activity {
    private FrameLayout layout;
    private EyeView eyeView;
    private Preview mPreview;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

        // Hide the window title.
        requestWindowFeature(Window.FEATURE_NO_TITLE);

        // Create our Preview view and set it as the content of our activity.
        try {
            layout = new FrameLayout(this);
            eyeView = new EyeView(this);
            mPreview = new Preview(this, eyeView);
            layout.addView(mPreview);
            layout.addView(eyeView);
            setContentView(layout);
        } catch (IOException e) {
            e.printStackTrace();
            new AlertDialog.Builder(this).setMessage(e.getMessage()).create().show();
        }
    }
}

// ----------------------------------------------------------------------

class EyeView extends View implements Camera.PreviewCallback {
    public static final int SUBSAMPLING_FACTOR = 4;

    private IplImage grayImage;
    private CvHaarClassifierCascade classifier;
    private CvMemStorage storage;
    private CvSeq eyes;

    public EyeView(EyePreview context) throws IOException {
         super(context);

        // Load the classifier file from Java resources.
        File classifierFile = Loader.extractResource(getClass(),
            "/com/googlecode/javacv/eyepreview/haarcascade_eye.xml",
            context.getCacheDir(), "classifier", ".xml");
        if (classifierFile == null || classifierFile.length() <= 0) {
            throw new IOException("Could not extract the classifier file from Java resource.");
        }

        // Preload the opencv_objdetect module to work around a known bug.
        Loader.load(opencv_objdetect.class);
        classifier = new CvHaarClassifierCascade(cvLoad(classifierFile.getAbsolutePath()));
        classifierFile.delete();
        if (classifier.isNull()) {
            throw new IOException("Could not load the classifier file.");
        }
        storage = CvMemStorage.create();
    }
    public void IrisDetection(){

    }
    public void onPreviewFrame(final byte[] data, final Camera camera) {
        try {
            Camera.Size size = camera.getParameters().getPreviewSize();
            processImage(data, size.width, size.height);
            camera.addCallbackBuffer(data);
        } catch (RuntimeException e) {
            // The camera has probably just been released, ignore.
        }
    }

    protected void processImage(byte[] data, int width, int height) {
        // First, downsample our image and convert it into a grayscale IplImage
        int f = SUBSAMPLING_FACTOR;
        if (grayImage == null || grayImage.width() != width/f || grayImage.height() != height/f) {
            grayImage = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);
        }
        int imageWidth  = grayImage.width();
        System.out.println("imageWidth=" + imageWidth + "\n");
        int imageHeight = grayImage.height();
        System.out.println("imageheight" + imageHeight + "\n");
        int dataStride = f*width;
        System.out.println("dataStride=" + dataStride + "\n");
        int imageStride = grayImage.widthStep();
        System.out.println("imageStride" + imageStride + "\n");
        ByteBuffer imageBuffer = grayImage.getByteBuffer();
        for (int y = 0; y < imageHeight; y++) {
            int dataLine = y*dataStride;
            int imageLine = y*imageStride;
            for (int x = 0; x < imageWidth; x++) {
                imageBuffer.put(imageLine + x, data[dataLine + f*x]);
            }
        }

        eyes = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING);
        postInvalidate();
        cvClearMemStorage(storage);
    }

    @Override
    protected void onDraw(Canvas canvas) {
        Paint paint = new Paint();
        paint.setColor(Color.RED);
        paint.setTextSize(20);

        String s = "EyePreview - This side up.";
        float textWidth = paint.measureText(s);
        canvas.drawText(s, (getWidth()-textWidth)/2, 20, paint);

        if (eyes != null) {
            paint.setStrokeWidth(2);
            paint.setStyle(Paint.Style.STROKE);
            float scaleX = (float)getWidth()/grayImage.width();
            float scaleY = (float)getHeight()/grayImage.height();
            int total = eyes.total();
            for (int i = 0; i < total; i++) {
                CvRect r = new CvRect(cvGetSeqElem(eyes, i));
                int x = r.x(), y = r.y(), w = r.width(), h = r.height();
                canvas.drawRect(x*scaleX, y*scaleY, (x+w)*scaleX, (y+h)*scaleY, paint);
            }
        }
    }
}

// ----------------------------------------------------------------------

class Preview extends SurfaceView implements SurfaceHolder.Callback {
    SurfaceHolder mHolder;
    Camera mCamera;
    Camera.PreviewCallback previewCallback;

    Preview(Context context, Camera.PreviewCallback previewCallback) {
        super(context);
        this.previewCallback = previewCallback;

        // Install a SurfaceHolder.Callback so we get notified when the
        // underlying surface is created and destroyed.
        mHolder = getHolder();
        mHolder.addCallback(this);
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    public void surfaceCreated(SurfaceHolder holder) {
        // The Surface has been created, acquire the camera and tell it where
        // to draw.
        mCamera = Camera.open();
        try {
           mCamera.setPreviewDisplay(holder);
        } catch (IOException exception) {
            mCamera.release();
            mCamera = null;
            // TODO: add more exception handling logic here
        }
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        mCamera.stopPreview();
        mCamera.release();
        mCamera = null;
    }


    private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.05;
        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        // Now that the size is known, set up the camera parameters and begin
        // the preview.
        Camera.Parameters parameters = mCamera.getParameters();

        List<Size> sizes = parameters.getSupportedPreviewSizes();
        Size optimalSize = getOptimalPreviewSize(sizes, w, h);
        parameters.setPreviewSize(optimalSize.width, optimalSize.height);

        mCamera.setParameters(parameters);
        if (previewCallback != null) {
            mCamera.setPreviewCallbackWithBuffer(previewCallback);
            Camera.Size size = parameters.getPreviewSize();
            byte[] data = new byte[size.width*size.height*
                    ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())/8];
            mCamera.addCallbackBuffer(data);
        }
        mCamera.startPreview();
    }

}
package com.googlecode.javacv.eyeview;
导入android.app.Activity;
导入android.app.AlertDialog;
导入android.content.Context;
导入android.graphics.Canvas;
导入android.graphics.Color;
导入android.graphics.ImageFormat;
导入android.graphics.Paint;
导入android.hardware.Camera;
导入android.hardware.Camera.Size;
导入android.os.Bundle;
导入android.view.SurfaceHolder;
导入android.view.SurfaceView;
导入android.view.view;
导入android.view.Window;
导入android.view.WindowManager;
导入android.widget.FrameLayout;
导入java.io.File;
导入java.io.IOException;
导入java.nio.ByteBuffer;
导入java.util.List;
导入com.googlecode.javacpp.Loader;
导入com.googlecode.javacv.cpp.opencv_objdetect;
导入静态com.googlecode.javacv.cpp.opencv_core.*;
导入静态com.googlecode.javacv.cpp.opencv_imgproc.*;
导入静态com.googlecode.javacv.cpp.opencv_objdetect.*;
导入静态com.googlecode.javacv.cpp.opencv_highgui.*;
// ----------------------------------------------------------------------
公共类EyePreview扩展了活动{
私人框架布局;
私人视野;
私人审查;
@凌驾
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_全屏);
//隐藏窗口标题。
requestWindowFeature(窗口。功能\u无\u标题);
//创建预览视图并将其设置为活动的内容。
试一试{
布局=新的框架布局(本);
eyeView=新的eyeView(本);
mPreview=新预览(此为eyeView);
布局。添加视图(mPreview);
布局。添加视图(eyeView);
setContentView(布局);
}捕获(IOE异常){
e、 printStackTrace();
新建AlertDialog.Builder(this).setMessage(e.getMessage()).create().show();
}
}
}
// ----------------------------------------------------------------------
类EyeView扩展视图实现Camera.PreviewCallback{
公共静态最终整数次抽样系数=4;
私有图像;
私有CvHaarClassifierCascade分类器;
私有存储;
私人CvSeq眼睛;
公共EyeView(EyePreview上下文)引发IOException{
超级(上下文);
//从Java资源加载分类器文件。
File classifierFile=Loader.extractResource(getClass(),
“/com/googlecode/javacv/eyeview/haarcascade_eye.xml”,
getCacheDir(),“分类器”,“.xml”);

if(classifierFile==null | | classifierFile.length()如果您希望调试到您自己的源代码以外的源代码中,我们可以按照这些答案中的描述进行安装,例如:这是否回答了您的问题?但是dalvik.annotation、dalvik.bytecode和dalvik.system不能通过上述方法工作。可能有什么问题。请帮助。请转到窗口->Android SDK管理器并安装“Android SDK的源代码”。现在尝试控制单击一些Android标识,您将获得通常的“无附加源代码”页面。单击“附加源代码”并获得选择外部文件夹的选项。现在浏览到/home/me/Android sdks/Sources/Android-16(或安装SDK的任何位置;这是默认设置),然后点击ok。上述方法有效,但不适用于dalvik