OpenCV立体GBM提供了非常糟糕的视差贴图

OpenCV立体GBM提供了非常糟糕的视差贴图,opencv,camera-calibration,disparity-mapping,Opencv,Camera Calibration,Disparity Mapping,这是我的校准代码: void calibrate() { int numBoards = 10; int board_w = 6; int board_h = 9; Size board_sz = Size(board_w, board_h); int board_n = board_w*board_h; vector<vector<Point3f> > object_points; vector<vec

这是我的校准代码:

  void calibrate()
 {
    int numBoards = 10;
    int board_w = 6;
    int board_h = 9;
    Size board_sz = Size(board_w, board_h);
    int board_n = board_w*board_h;
    vector<vector<Point3f> > object_points;
    vector<vector<Point2f> > imagePoints1, imagePoints2;
    vector<Point2f> corners1, corners2;
    vector<Point3f> obj;
    for (int j=0; j<board_n; j++)
    {
        obj.push_back(Point3f(j/board_w, j%board_w, 0.0f));
    }
    Mat img1, img2, gray1, gray2;
    VideoCapture cap1(0);
    VideoCapture cap2(1);
    int success = 0, k = 0;
    bool found1 = false, found2 = false;
    namedWindow("left 1");
    namedWindow("right 1");
    while (success < numBoards)
    {
        cap1 >> img1;
        cap2 >> img2;

        cvtColor(img1, gray1, CV_BGR2GRAY);
        cvtColor(img2, gray2, CV_BGR2GRAY);
        found1 = findChessboardCorners(img1, board_sz, corners1, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FILTER_QUADS);
        found2 = findChessboardCorners(img2, board_sz, corners2, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FILTER_QUADS);
        if (found1)
        {
            cornerSubPix(gray1, corners1, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.1));
            drawChessboardCorners(gray1, board_sz, corners1, found1);
        }
        if (found2)
        {
            cornerSubPix(gray2, corners2, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.1));
            drawChessboardCorners(gray2, board_sz, corners2, found2);
        }
        imshow("left 1", img1);
        imshow("right 1", img2);
        k = cv::waitKey(2);
        if(found1) cout<<"found 1"<<endl;
        if(found2) cout<<"found 2"<<endl;
        if(!found1 && !found2) cout<<"no"<<endl;

        if ( found1  && found2)
        {
            imagePoints1.push_back(corners1);
            imagePoints2.push_back(corners2);
            object_points.push_back(obj);
            printf ("Corners stored\n");
            success++;
            if (success >= numBoards)
            {
                break;
            }
        }
    }
    destroyAllWindows();
    printf("Starting Calibration\n");
    Mat CM1 = Mat(3, 3, CV_64FC1);
    Mat CM2 = Mat(3, 3, CV_64FC1);
    Mat D1, D2;
    Mat R, T, E, F;
    stereoCalibrate(object_points, imagePoints1, imagePoints2,
    CM1, D1, CM2, D2, img1.size(), R, T, E, F,
    cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5),
    CV_CALIB_SAME_FOCAL_LENGTH | CV_CALIB_ZERO_TANGENT_DIST);
    FileStorage fs1("mystereocalib.yml", FileStorage::WRITE);
    fs1 << "CM1" << CM1;
    fs1 << "CM2" << CM2;
    fs1 << "D1" << D1;
    fs1 << "D2" << D2;
    fs1 << "R" << R;
    fs1 << "T" << T;
    fs1 << "E" << E;
    fs1 << "F" << F;
    printf("Done Calibration\n");
    printf("Starting Rectification\n");
    Mat R1, R2, P1, P2, Q;
    stereoRectify(CM1, D1, CM2, D2, img1.size(), R, T, R1, R2, P1, P2, Q);
    fs1 << "R1" << R1;
    fs1 << "R2" << R2;
    fs1 << "P1" << P1;
    fs1 << "P2" << P2;
    fs1 << "Q" << Q;
    printf("Done Rectification\n");
    printf("Applying Undistort\n");
    Mat map1x, map1y, map2x, map2y;
    Mat imgU1, imgU2;
    initUndistortRectifyMap(CM1, D1, R1, P1, img1.size(), CV_32FC1, map1x, map1y);
    initUndistortRectifyMap(CM2, D2, R2, P2, img2.size(), CV_32FC1, map2x, map2y);
    printf("Undistort complete\n");

    cap1.release();
    cap2.release();
    return;
 }

我是OpenCV和图像处理的新手,我做错了什么

尝试使用此校准文件-我也尝试校准我的相机,但要获得小的错误非常困难,使用此配置比尝试查找相机的参数效果更好(很可能我的相机与校准文件中的类似)。最终,您可能会尝试根本不使用校准文件。

stereoCalibrate()返回一个错误值,如果您的校准已经不好,请检查。@berak Ok,谢谢,我会检查的。也许numBoards=10“还不足以进行良好的校准?不幸的是,这只是“实验”部分。尝试在10到30块板之间进行测试,确保错误率尽可能低possible@berak:可接受的错误值是多少?尽可能小。任何>0.5的内容都会很糟糕,抱歉,您没有得到它的主要部分:校准文件是实际相机校准的输出,您不能在那里随意交换数据文件。请删除此答案,它无效。这纯粹是一个机会,你的“解决方案”在你的单一案例中起作用。我知道这一点,正如我在帖子中所写的,我也遇到过类似的问题——校准我的相机的结果非常糟糕,所以我使用了这个校准文件,它比我的任何尝试都好。我知道这很奇怪,但是试一下,也许对你也有用。另外,别忘了很多相机彼此非常相似(或者至少它们的镜头是相似的)。请注意我帖子的最后一句话。同样,纯属偶然。(不是一个有效的选项)纯属偶然,但在你尝试之前,你无法判断它是否适合你——也许你的相机也与这一款类似(你没有指定你的相机是什么,所以我们只能猜测)。另外,在没有任何校准文件的情况下尝试-错误的校准文件肯定会导致非常糟糕的结果,没有校准文件至少会让您检查GBM代码是否正确。
    int main(int argc, char* argv[])
{
    //calibrate();

    Mat img1, img2;

    VideoCapture cap1 = VideoCapture(0);
    VideoCapture cap2 = VideoCapture(1);

    cap1 >> img1;
    cap2 >> img2;

    Mat Q;
    FileStorage fs("mystereocalib.yml", FileStorage::READ);
    fs["Q"] >> Q;

    Mat CM1, CM2, D1, D2, P1, P2, R1, R2;

    fs["CM1"] >> CM1;
    fs["CM2"] >> CM2;
    fs["D1"] >> D1;
    fs["D2"] >> D2;
    fs["P1"] >> P1;
    fs["P2"] >> P2;
    fs["R1"] >> R1;
    fs["R2"] >> R2;

    fs.release();

    Mat map1x, map1y, map2x, map2y;
    Mat imgU1, imgU2;
    initUndistortRectifyMap(CM1, D1, R1, P1, img1.size(), CV_32FC1, map1x, map1y);
    initUndistortRectifyMap(CM2, D2, R2, P2, img2.size(), CV_32FC1, map2x, map2y);


    while(1) 
    {
    cap1 >> img1;
    cap2 >> img2;

    imshow("img1", img1);
    imshow("img2", img2);
    cv::waitKey(1);

    remap(img1, imgU1, map1x, map1y, INTER_LINEAR, BORDER_CONSTANT, Scalar());
    remap(img2, imgU2, map2x, map2y, INTER_LINEAR, BORDER_CONSTANT, Scalar());


    Mat g1,g2, disp, disp8;

    cvtColor(imgU1, g1, CV_BGR2GRAY);
    cvtColor(imgU2, g2, CV_BGR2GRAY);

    /*StereoSGBM sbm;
    sbm.SADWindowSize = 5;
    sbm.numberOfDisparities = 144;
    sbm.preFilterCap = 63;
    sbm.minDisparity = -39;
    sbm.uniquenessRatio = 10;
    sbm.speckleWindowSize = 100;
    sbm.speckleRange = 32;
    sbm.disp12MaxDiff = 2;
    sbm.fullDP = true;
    sbm.P1 = 216;
    sbm.P2 = 864;
    sbm(g1, g2, disp);*/

    int sadSize = 3;
    StereoSGBM sbm;
    sbm.SADWindowSize = sadSize;
    sbm.numberOfDisparities = 128;//144;
    sbm.preFilterCap = 63;
    sbm.minDisparity = 0; //-39;
    sbm.uniquenessRatio = 10;
    sbm.speckleWindowSize = 100;
    sbm.speckleRange = 32;
    sbm.disp12MaxDiff = 1;
    sbm.fullDP = true;
    sbm.P1 = sadSize*sadSize*4;
    sbm.P2 = sadSize*sadSize*32;
    sbm(g1, g2, disp);

    //StereoSGBM sgbm;
    //sgbm.SADWindowSize = 5;
    //sgbm.numberOfDisparities = 192;
    //sgbm.preFilterCap = 4;
    //sgbm.minDisparity = -64;
    //sgbm.uniquenessRatio = 1;
    //sgbm.speckleWindowSize = 150;
    //sgbm.speckleRange = 2;
    //sgbm.disp12MaxDiff = 10;
    //sgbm.fullDP = false;
    //sgbm.P1 = 600;
    //sgbm.P2 = 2400;
    //sgbm(g1, g2, disp);


    //StereoBM sbm;
    //sbm.state->SADWindowSize = 9;
    //sbm.state->numberOfDisparities = 112;
    //sbm.state->preFilterSize = 5;
    //sbm.state->preFilterCap = 61;
    //sbm.state->minDisparity = -39;
    //sbm.state->textureThreshold = 507;
    //sbm.state->uniquenessRatio = 0;
    //sbm.state->speckleWindowSize = 0;
    //sbm.state->speckleRange = 8;
    //sbm.state->disp12MaxDiff = 1;
    //sbm(g1, g2, disp);


    normalize(disp, disp8, 0, 255, CV_MINMAX, CV_8U);
    //disp.convertTo(disp8, CV_8U);

    imshow("disp8", disp8);


    }

    waitKey(0);

    return 0;
}