Java 冲浪特征检测-OpenCV

Java 冲浪特征检测-OpenCV,java,android,c++,opencv,surf,Java,Android,C++,Opencv,Surf,我正在开发一个android应用程序,主要目的是在场景中检测被询问的对象。为此,我使用OpenCV的SURF算法。我并没有“好运”的发现,因为我不知道什么时候“发现”了一个物体 我用我的设备摄像头获取一帧,然后按照以下步骤获取对象的关键点和描述符: Java代码 public void onSnapClick(View v) { Imgproc.GaussianBlur(frameGray, frameGray, new Size(3, 3), 2); Imgproc.Canny

我正在开发一个android应用程序,主要目的是在场景中检测被询问的对象。为此,我使用OpenCV的SURF算法。我并没有“好运”的发现,因为我不知道什么时候“发现”了一个物体

我用我的设备摄像头获取一帧,然后按照以下步骤获取对象的关键点和描述符:

Java代码

public void onSnapClick(View v) {
    Imgproc.GaussianBlur(frameGray, frameGray, new Size(3, 3), 2);
    Imgproc.Canny(frameGray, frameGray, 40, 120);
    Imgproc.resize(frameGray, frameGray, new Size(320, 240));
    FindFeatures(frameGray.getNativeObjAddr()); //JNI call
    //Some code to store data in DB...
}
public void onSearchClick(View v) {
    Imgproc.GaussianBlur(frameGray, frameGray, new Size(3, 3), 2);
    Imgproc.Canny(frameGray, frameGray, 40, 120);
    Imgproc.resize(frameGray, frameGray, new Size(320, 240));
    nObject = FindObjects(frameGray.getNativeObjAddr()); //JNI call
    if (nObject = searchObject) 
        //draw frame with a rectangle around the found object in the scenario....
}
JNI呼叫

double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT void JNICALL Java_es_ugr_reconocimiento_Juego_FindFeatures(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray= *(Mat*) addrGray;
    vector<KeyPoint> keyPoints;
    Mat descriptores;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray, keyPoints);
    if (keyPoints.size() > 0)
        extractor_Surf.compute(frameGray, keyPoints, descriptores);
}
double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT jint JNICALL Java_es_ugr_reconocimiento_Juego_FindObjects(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray = *(Mat*) addrGray;
    vector<KeyPoint> keyPoints_esc;
    Mat descriptores_esc;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray , keyPoints_esc);
    if (keyPoints_esc.size() == 0) return -1;
    extractor_Surf.compute(frameGray , keyPoints_esc, descriptores_esc);
    if (descriptores_esc.rows() == 0) return -1;

    for(int i=0;i<lstObjects.size();i++){
        Mat descriptores_obj = lstDescriptors.at(i);
        vector<KeyPoint> keyPoints_obj = lstKeyPoints.at(i);

        FlannBasedMatcher matcher;
        vector<vector<DMatch> > matches;
        matcher.knnMatch(descriptores_obj, descriptores_esc, matches, 2);
        // ----------------------------------------------------------------------
        // Draw only "good" matches (i.e. whose distance is less than 2*min_dist,
        // or a small arbitary value ( 0.02 ) in the event that min_dist is very
        // small)
        // PS.- radiusMatch can also be used here.
        // ----------------------------------------------------------------------
        vector<DMatch> good_matches;
        //THIS LOOP IS SENSITIVE TO SEGFAULTS
        for (int i = 0; i < min(descriptores_obj.rows - 1, (int) matches.size());i++){
            if ( (matches[i][0].distance < 0.6 * (matches[i][1].distance)) && 
                 ((int) matches[i].size() <= 2 && (int) matches[i].size() > 0) ) {
                    good_matches.push_back(matches[i][0]);
            }
        }

        if (good_matches.size() >= nThreshold) {
            vector < Point2f > obj;
            vector < Point2f > scene;

            for (int i = 0; i < good_matches.size(); i++) {
                //-- Get the keypoints from the good matches
                obj.push_back(keyPoints_obj[good_matches[i].queryIdx].pt);
                scene.push_back(keyPoints_esc[good_matches[i].trainIdx].pt);
            }

            Mat H = findHomography(obj, scene, CV_RANSAC);

            vector<Point2f> obj_corners(4);
            obj_corners[0] = cvPoint(0, 0);
            obj_corners[1] = cvPoint(240, 0);
            obj_corners[2] = cvPoint(240, 320);
            obj_corners[3] = cvPoint(0, 320);
            vector<Point2f> scene_corners(4);

            perspectiveTransform(obj_corners, scene_corners, H);

            line(frameGray, scene_corners[0], scene_corners[1], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[1], scene_corners[2], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[2], scene_corners[3], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[3], scene_corners[0], Scalar(255, 0, 0), 4);

            for (unsigned int i = 0; i < scene.size(); i++) {
                const Point2f& kp = scene[i];
                circle(frameGray, Point(kp.x, kp.y), 10, Scalar(255, 255, 255, 255));
            }

            return i; //position of the matched object

        }

    }
}
JNI呼叫

double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT void JNICALL Java_es_ugr_reconocimiento_Juego_FindFeatures(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray= *(Mat*) addrGray;
    vector<KeyPoint> keyPoints;
    Mat descriptores;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray, keyPoints);
    if (keyPoints.size() > 0)
        extractor_Surf.compute(frameGray, keyPoints, descriptores);
}
double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT jint JNICALL Java_es_ugr_reconocimiento_Juego_FindObjects(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray = *(Mat*) addrGray;
    vector<KeyPoint> keyPoints_esc;
    Mat descriptores_esc;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray , keyPoints_esc);
    if (keyPoints_esc.size() == 0) return -1;
    extractor_Surf.compute(frameGray , keyPoints_esc, descriptores_esc);
    if (descriptores_esc.rows() == 0) return -1;

    for(int i=0;i<lstObjects.size();i++){
        Mat descriptores_obj = lstDescriptors.at(i);
        vector<KeyPoint> keyPoints_obj = lstKeyPoints.at(i);

        FlannBasedMatcher matcher;
        vector<vector<DMatch> > matches;
        matcher.knnMatch(descriptores_obj, descriptores_esc, matches, 2);
        // ----------------------------------------------------------------------
        // Draw only "good" matches (i.e. whose distance is less than 2*min_dist,
        // or a small arbitary value ( 0.02 ) in the event that min_dist is very
        // small)
        // PS.- radiusMatch can also be used here.
        // ----------------------------------------------------------------------
        vector<DMatch> good_matches;
        //THIS LOOP IS SENSITIVE TO SEGFAULTS
        for (int i = 0; i < min(descriptores_obj.rows - 1, (int) matches.size());i++){
            if ( (matches[i][0].distance < 0.6 * (matches[i][1].distance)) && 
                 ((int) matches[i].size() <= 2 && (int) matches[i].size() > 0) ) {
                    good_matches.push_back(matches[i][0]);
            }
        }

        if (good_matches.size() >= nThreshold) {
            vector < Point2f > obj;
            vector < Point2f > scene;

            for (int i = 0; i < good_matches.size(); i++) {
                //-- Get the keypoints from the good matches
                obj.push_back(keyPoints_obj[good_matches[i].queryIdx].pt);
                scene.push_back(keyPoints_esc[good_matches[i].trainIdx].pt);
            }

            Mat H = findHomography(obj, scene, CV_RANSAC);

            vector<Point2f> obj_corners(4);
            obj_corners[0] = cvPoint(0, 0);
            obj_corners[1] = cvPoint(240, 0);
            obj_corners[2] = cvPoint(240, 320);
            obj_corners[3] = cvPoint(0, 320);
            vector<Point2f> scene_corners(4);

            perspectiveTransform(obj_corners, scene_corners, H);

            line(frameGray, scene_corners[0], scene_corners[1], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[1], scene_corners[2], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[2], scene_corners[3], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[3], scene_corners[0], Scalar(255, 0, 0), 4);

            for (unsigned int i = 0; i < scene.size(); i++) {
                const Point2f& kp = scene[i];
                circle(frameGray, Point(kp.x, kp.y), 10, Scalar(255, 255, 255, 255));
            }

            return i; //position of the matched object

        }

    }
}
我一直在搜索,几乎我找到的每一个代码都包含数字4作为nThreshold,但对我来说,它工作得不好。我的代码几乎每次“找到”一个对象


还有其他更好的方法吗?比如使用不同的匹配器或另一个阈值,或者试图弄清楚执行单应性是否会创建类似于矩形的东西(我这样说是因为有时它会“找到”某个东西,但却绘制了四条不构成矩形的线)。

请在代码中进行以下更改

int nThreshold= 100;
       if (good_matches.size() >= nThreshold) 
        {
        continue; // This line is to prevent further steps of matching if there are too many good matches (Lot of ambiguous points results in false match)
        }
        vector < Point2f > obj;
        vector < Point2f > scene;

        for (int i = 0; i < good_matches.size(); i++) {
            //-- Get the keypoints from the good matches
            obj.push_back(keyPoints_obj[good_matches[i].queryIdx].pt);
            scene.push_back(keyPoints_esc[good_matches[i].trainIdx].pt);
               }

// Skip doing homography if the object and scene contains less than four points(cant draw a rectangle if less than 4 points, hence your program will crash here if you do not handle the exception)
      if(obj.size() < 4 || scene.size() < 4)
       {
       continue;
       }

       Mat H = findHomography(obj, scene, CV_RANSAC);
int nThreshold=100;
if(good_matches.size()>=nThreshold)
{
continue;//如果有太多好的匹配(很多不明确的点导致错误匹配),则此行将阻止进一步的匹配步骤
}
向量obj;
向量场景;
for(int i=0;i
您可能需要两个条件。至少需要4点来做单应,以及一个好匹配的阈值(可能有60%的匹配是好的)@migue02我的解决方案对你有帮助吗?@migue02你能解决这个问题吗?我试过了,但好匹配的大小远小于100,(最多20)。我还尝试了@berak所说的
良好匹配。size()>=0.6*object\u keypoints.size()
,但它也比良好匹配的大小小得多