Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/opencv/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
openCV三角点_Opencv_Point Cloud Library - Fatal编程技术网

openCV三角点

openCV三角点,opencv,point-cloud-library,Opencv,Point Cloud Library,首先,谢谢你的阅读 根据openCV函数提供的信息,我在使用PCL生成点云时遇到问题 我使用了两个图像,函数识别出几个关键点 然后用RANSAC算法进行匹配并计算基本函数 然后,我打印了每个图像中的点,以查看相关点,我有几个匹配良好的点 现在我正在尝试生成点云来重新投影这些点,因为下一步是制作一个包含两个以上图像的更大点云。。利用二维信息进行三维重建 我的问题是,我不能很好地填充云,因为点的位置很奇怪,而且所有的点看起来都很近。。。我使用的代码有问题吗 下面是我使用的函数和矩阵: 调用三角化函数

首先,谢谢你的阅读

根据openCV函数提供的信息,我在使用PCL生成点云时遇到问题

  • 我使用了两个图像,函数识别出几个关键点
  • 然后用RANSAC算法进行匹配并计算基本函数
  • 然后,我打印了每个图像中的点,以查看相关点,我有几个匹配良好的点
  • 现在我正在尝试生成点云来重新投影这些点,因为下一步是制作一个包含两个以上图像的更大点云。。利用二维信息进行三维重建
  • 我的问题是,我不能很好地填充云,因为点的位置很奇怪,而且所有的点看起来都很近。。。我使用的代码有问题吗
  • 下面是我使用的函数和矩阵:

    调用三角化函数:

    TriangulatePoints(keypoints1, keypoints2, K.t(), P, P1, pointCloud)
    
    人满为患

    PopulatePCLPointCloud(pointCloud);
    
    填充函数:

    void PopulatePCLPointCloud(const vector<Point3d>& pointcloud) //Populate point cloud
    {
        cout << "Creating point cloud...";
        cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
        for (unsigned int i = 0; i<pointcloud.size(); i++)
        {
            // get the RGB color value for the point
            Vec3b rgbv(255,255, 0);
            // check for erroneous coordinates (NaN, Inf, etc.)
            if (pointcloud[i].x != pointcloud[i].x || _isnan(pointcloud[i].x) || pointcloud[i].y != pointcloud[i].y || _isnan(pointcloud[i].y) || pointcloud[i].z != pointcloud[i].z || _isnan(pointcloud[i].z) || fabsf(pointcloud[i].x) > 10.0 || fabsf(pointcloud[i].y) > 10.0 || fabsf(pointcloud[i].z) > 10.0)
            {
                continue;
            }
            pcl::PointXYZRGB pclp;
            // 3D coordinates
            pclp.x = pointcloud[i].x;
            pclp.y = pointcloud[i].y;
            pclp.z = pointcloud[i].z;
            // RGB color, needs to be represented as an integer uint32_t
            float rgb = ((uint32_t)rgbv[2] << 16 | (uint32_t)rgbv[1] << 8 | (uint32_t)rgbv[0]);
            pclp.rgb = *reinterpret_cast<float*>(&rgb);
            cloud->push_back(pclp);
        }
        cloud->width = (uint32_t)cloud->points.size();
        // number of points
        cloud->height = 1;
        // a list of points, one row of data
    }
    

    我解决了这个问题,我有两个大问题

    首先,我将未过滤的关键点传递给三角剖分函数,因此我看到了匹配点和无用点。也许我们会有更多无用的观点

    正如你们将在三角函数中看到的,我给出了我通过ransacTest和SymTest过滤得到的匹配点。然后只使用关键点匹配的索引。所以一切都很好(=)只是展示了好的匹配

    其次,三角函数是错误的

    在此更正:

      double TriangulatePoints(const vector<KeyPoint>& pt_set1, const vector<KeyPoint>& pt_set2, const Mat&Kinv, const Matx34d& P, const Matx34d& P1, vector<Point3d>& pointcloud, vector<DMatch>& matches)
    {
        //Mat_<double> KP1 = Kinv.inv() *Mat(P1);
        vector<double> reproj_error;
        for (unsigned int i = 0; i < matches.size(); i++)
        {    //convert to normalized homogeneous coordinates   
            Point2f kp = pt_set1[matches[i].queryIdx].pt;
            Point3d u(kp.x, kp.y, 1.0);
            Mat_<double> um = Kinv * Mat_<double>(u);
            u.x = um(0);
            u.y = um(1);
            u.z = um(2);
            Point2f kp1 = pt_set2[matches[i].trainIdx].pt;
            Point3d u1(kp1.x, kp1.y, 1.0);
            Mat_<double> um1 = Kinv * Mat_<double>(u1);
            u1.x = um1(0);
            u1.y = um1(1);
            u1.z = um1(2);
            //triangulate
            Mat_<double> X = LinearLSTriangulation(u, P, u1, P1);
            pointcloud.push_back(Point3d(X(0), X(1), X(2)));
        }
        cout << "cantidad Puntos" << pointcloud.size() << endl;
        return 1;
    }
    
    双三角点(常量向量和pt_集1、常量向量和pt_集2、常量Mat和Kinv、常量Matx34d和P、常量Matx34d和P1、向量和点云、向量和匹配)
    {
    //Mat_ukp1=Kinv.inv()*Mat(P1);
    矢量再现误差;
    for(无符号整数i=0;iMat_<double> LinearLSTriangulation(Point3d u,//homogenous image point (u,v,1) 
        Matx34d P,//camera 1 matrix 
        Point3d u1,//homogenous image point in 2nd camera 
        Matx34d P1//camera 2 matrix 
        ) {
        //build A matrix 
        Matx43d A(u.x*P(2, 0) - P(0, 0), u.x*P(2, 1) - P(0, 1), u.x*P(2, 2) - P(0, 2), u.y*P(2, 0) - P(1, 0), u.y*P(2, 1) - P(1, 1), u.y*P(2, 2) - P(1, 2), u1.x*P1(2, 0) - P1(0, 0), u1.x*P1(2, 1) - P1(0, 1), u1.x*P1(2, 2) - P1(0, 2), u1.y*P1(2, 0) - P1(1, 0), u1.y*P1(2, 1) - P1(1, 1), u1.y*P1(2, 2) - P1(1, 2));
        //build B vector 
        Matx41d B(-(u.x*P(2, 3) - P(0, 3)), -(u.y*P(2, 3) - P(1, 3)), -(u1.x*P1(2, 3) - P1(0, 3)), -(u1.y*P1(2, 3) - P1(1, 3))); //solve for X 
        Mat_<double> X;
        solve(A, B, X, DECOMP_SVD);
        return X;
    }
    
     Fundamental =
    [-5.365548729323536e-007, 0.0003108718787914248, -0.0457266834161677;
      -0.0003258809500026533, 4.695400741230473e-006, 1.295466303565132;
      0.05008017646011816, -1.300323239531621, 1]
    
    
    
    Calibration Matrix =
     [744.2366711500123, 0, 304.166818982576;
      0, 751.1308610972965, 225.3750058508892;
      0, 0, 1]
    
    Essential =
     [-0.2971914249411831, 173.7833277398352, 17.99033324690517;
      -182.1736856953757, 2.649133690692166, 899.405863948026;
      -17.51073288084396, -904.8934348365967, 0.3895173270497594]
    
    Rotation matrix =
     [-0.9243506387712034, 0.03758098759490174, -0.3796887751496749;
      0.03815782996164848, 0.9992536546828119, 0.006009460513344713;
      -0.379631237671357, 0.008933251056327281, 0.9250947629349537]
    
    Traslation matrix =
     [-0.9818733349058273;
      0.01972152607878091;
      -0.1885094576142884]
    
    P0 matrix =
     [1, 0, 0, 0;
      0, 1, 0, 0;
      0, 0, 1, 0]
    
    P1 matrix =
     [-0.9243506387712034, 0.03758098759490174, -0.3796887751496749, -0.9818733349058273;
      0.03815782996164848, 0.9992536546828119, 0.006009460513344713, 0.01972152607878091;
      -0.379631237671357, 0.008933251056327281, 0.9250947629349537, -0.1885094576142884]
    
      double TriangulatePoints(const vector<KeyPoint>& pt_set1, const vector<KeyPoint>& pt_set2, const Mat&Kinv, const Matx34d& P, const Matx34d& P1, vector<Point3d>& pointcloud, vector<DMatch>& matches)
    {
        //Mat_<double> KP1 = Kinv.inv() *Mat(P1);
        vector<double> reproj_error;
        for (unsigned int i = 0; i < matches.size(); i++)
        {    //convert to normalized homogeneous coordinates   
            Point2f kp = pt_set1[matches[i].queryIdx].pt;
            Point3d u(kp.x, kp.y, 1.0);
            Mat_<double> um = Kinv * Mat_<double>(u);
            u.x = um(0);
            u.y = um(1);
            u.z = um(2);
            Point2f kp1 = pt_set2[matches[i].trainIdx].pt;
            Point3d u1(kp1.x, kp1.y, 1.0);
            Mat_<double> um1 = Kinv * Mat_<double>(u1);
            u1.x = um1(0);
            u1.y = um1(1);
            u1.z = um1(2);
            //triangulate
            Mat_<double> X = LinearLSTriangulation(u, P, u1, P1);
            pointcloud.push_back(Point3d(X(0), X(1), X(2)));
        }
        cout << "cantidad Puntos" << pointcloud.size() << endl;
        return 1;
    }