Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/opencv/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Opencv 如何获得清晰的视差图和深度图_Opencv_Camera Calibration_Stereo 3d_Disparity Mapping - Fatal编程技术网

Opencv 如何获得清晰的视差图和深度图

Opencv 如何获得清晰的视差图和深度图,opencv,camera-calibration,stereo-3d,disparity-mapping,Opencv,Camera Calibration,Stereo 3d,Disparity Mapping,目前,我正在使用titathink(TT522PW)的IP摄像头,它提供1280*720的流,每秒30帧,使用普通传感器(不是高灵敏度低光的型号) 捕获视频流时,我们在帧上看到鱼眼类型的失真 非校正图像 我首先单独校准每个摄像头以消除失真(在校准过程中,我得到左摄像头的rms误差rms_left=0.166和右摄像头的rmsrms_right=0.162)。然后使用各个校准摄像机过程中产生的xml文件,我校准了立体摄像机,在立体校准中,我得到RMS error=0.207 通过显示校准的图像,

目前,我正在使用titathink(TT522PW)的IP摄像头,它提供1280*720的流,每秒30帧,使用普通传感器(不是高灵敏度低光的型号)

捕获视频流时,我们在帧上看到鱼眼类型的失真

非校正图像

  • 我首先单独校准每个摄像头以消除失真(在校准过程中,我得到左摄像头的rms误差rms_left=0.166和右摄像头的rmsrms_right=0.162)。然后使用各个校准摄像机过程中产生的xml文件,我校准了立体摄像机,在立体校准中,我得到RMS error=0.207

  • 通过显示校准的图像,我们可以看到立体校准做得很好

  • 校准图像

    水平线校正图像

  • 我接管了dji计算视差图和计算点云的功能
  • 计算和过滤视差图的代码

    bool Disparity_filter::initDispParam(){
    
    
    #ifdef USE_CUDA
      block_matcher_ = cv::cuda::createStereoBM(num_disp_, block_size_);
    #else
      block_matcher_ = cv::StereoBM::create(num_disp_, block_size_);
    #endif
    
    #ifdef USE_OPEN_CV_CONTRIB
      wls_filter_ = cv::ximgproc::createDisparityWLSFilter(block_matcher_); // left_matcher
      wls_filter_->setLambda(8000.0);
      wls_filter_->setSigmaColor(1.5);
    
      right_matcher_ = cv::ximgproc::createRightMatcher(block_matcher_);
    #endif
       return true;
    }
    
    
    void Disparity_filter::computeDisparityMap(std::shared_ptr<Frame> framel, std::shared_ptr<Frame> framer){
    framel->raw_disparity_map_=cv::Mat(HEIGHT, WIDTH, CV_16SC1); 
    #ifdef USE_CUDA
        cv::cuda::GpuMat cuda_disp_left;
        framel->cuda_crop_left.upload(framel->cpu_crop_left);
        framer->cuda_crop_right.upload(framer->cpu_crop_right);
    
        // GPU implementation of stereoBM outputs uint8_t, i.e. CV_8U
        block_matcher_->compute(framel->cuda_crop_left.clone(),
                              framer->cuda_crop_right.clone(),
                              cuda_disp_left);
        cuda_disp_left.download(framel->raw_disparity_map_);
    
        framel->raw_disparity_map_.convertTo(framel->disparity_map_8u_, CV_8UC1, 1);
    
        // convert it from CV_8U to CV_16U for unified
        // calculation in filterDisparityMap() & unprojectPtCloud()
        framel->raw_disparity_map_.convertTo(framel->raw_disparity_map_, CV_16S, 16);
    #else
    
        // CPU implementation of stereoBM outputs short int, i.e. CV_16S
    
        cv::Mat left_for_matcher ,right_for_matcher;
    
       left_for_matcher  = framel->cpu_crop_left.clone();
       right_for_matcher = framer->cpu_crop_right.clone();      
        cv::cvtColor(left_for_matcher,  left_for_matcher,  cv::COLOR_BGR2GRAY);
        cv::cvtColor(right_for_matcher, right_for_matcher, cv::COLOR_BGR2GRAY);
    
        block_matcher_->compute(left_for_matcher, right_for_matcher, framel->raw_disparity_map_);
        framel->raw_disparity_map_.convertTo(framel->disparity_map_8u_, CV_8UC1, 0.0625);
    #endif 
    
    } 
    
    
    void Disparity_filter::filterDisparityMap(std::shared_ptr<Frame> framel, std::shared_ptr<Frame> framer){
    
        right_matcher_->compute(framer->cpu_crop_right.clone(),
                                                        framel->cpu_crop_left.clone(),
                                                        raw_right_disparity_map_);
    
      // Only takes CV_16S type cv::Mat
      wls_filter_->filter(framel->raw_disparity_map_,
                          framel->cpu_crop_left,
                          filtered_disparity_map_,
                          raw_right_disparity_map_);
    
      filtered_disparity_map_.convertTo(framel->filtered_disparity_map_8u_, CV_8UC1, 0.0625); 
    
    }
    
    bool\u过滤器::initDispParam(){
    #ifdef使用_CUDA
    块匹配器=cv::cuda::createStereoBM(num\u disp\uu,块大小);
    #否则
    块匹配器=cv::StereoBM::创建(num\u disp\u,块大小);
    #恩迪夫
    #如果定义使用“打开”或“控制”
    wls_filter_u=cv::ximgproc::CreateDisparityWLFilter(块匹配器);//左匹配器
    wls_过滤器->setLambda(8000.0);
    wls_过滤器->设置西格玛颜色(1.5);
    右匹配器=cv::ximgproc::createRightMatcher(块匹配器);
    #恩迪夫
    返回true;
    }
    void Disparity_filter::computeDisparityMap(std::shared_ptr framel,std::shared_ptr framer){
    框架->原始视差图=cv::Mat(高度、宽度、cv\U 16SC1);
    #ifdef使用_CUDA
    cv::cuda::GpuMat cuda_disp_左;
    framel->cuda\u crop\u left.上传(framel->cpu\u crop\u left);
    成帧器->cuda\u右裁剪。上传(成帧器->cpu\u右裁剪);
    //stereoBM输出uint8_t(即CV_8U)的GPU实现
    block_matcher_uu->compute(framel->cuda_crop_left.clone(),
    framer->cuda\u crop\u right.clone(),
    cuda_disp_左);
    cuda_disp_left.下载(framel->raw_disp_map_);
    framel->raw\u disparence\u map.convertTo(framel->disparence\u map\u 8u,CV\u 8UC1,1);
    //将其从CV_8U转换为CV_16U以实现统一
    //filterDisparityMap()和unprojectPtCloud()中的计算
    framel->raw\u disparence\u map.convertTo(framel->raw\u disparence\u map\uz,CV\u 16S,16);
    #否则
    //stereoBM输出短int(即CV_16S)的CPU实现
    cv::Mat left_代表_matcher,right_代表_matcher;
    左\u为\u matcher=framel->cpu\u裁剪\u left.clone();
    右键\u用于\u matcher=framer->cpu\u crop\u right.clone();
    cv::cvtColor(左_表示匹配器,左_表示匹配器,cv::COLOR表示灰色);
    cv::CVT颜色(右箭头表示匹配器,右箭头表示匹配器,cv::颜色为灰色);
    块匹配器->计算(左匹配器,右匹配器,帧->原始视差图);
    framel->raw_disparence_map_uu.convertTo(framel->disparence_map_u8u,CV_8UC1,0.0625);
    #恩迪夫
    } 
    void Disparity\u filter::filterDisparityMap(std::shared\u ptr framel,std::shared\u ptr framer){
    右匹配器->计算(成帧器->cpu\u裁剪\u右.clone(),
    framel->cpu\u裁剪\u左。克隆(),
    原始(右)视差(地图);
    //仅接受CV_16S类型CV::Mat
    wls\U过滤器->过滤器(框架->原始视差图),
    框架->cpu\u裁剪\u左,
    过滤的视差图,
    原始(右)视差(地图);
    过滤的视差图转换到(框架->过滤的视差图,CV 8UC1,0.0625);
    }
    
    计算点云的代码

    
    bool PointCloud::initPointCloud(){
        std::string stereo_c2="../calibration/sterolast.xml";  //calib_stereo.xml"; //
      cv::FileStorage ts(stereo_c2,cv::FileStorage::READ);
      if (!ts.isOpened()) {
        std::cerr << "Failed to open calibration parameter file." << std::endl;
        return false;
        }
        cv::Mat P1,P2;
        ts["P1"] >> param_proj_left_;
        ts["P2"] >> param_proj_right_;
    
      principal_x_ = param_proj_left_.at<double>(0, 2);
      principal_y_ = param_proj_left_.at<double>(1, 2);
      fx_ = param_proj_left_.at<double>(0, 0);
      fy_ = param_proj_left_.at<double>(1, 1);
      baseline_x_fx_ = -param_proj_right_.at<double>(0, 3);
      std::cout<<"** principal_x= " << principal_x_ <<"  ** principal_y= " << principal_y_  <<"  ** fx= " << fx_ <<"  ** fy= " << fy_<<"  ** baseline_x_fx=  " << baseline_x_fx_<<std::endl<< std::flush;
      return true;
    
    
    
        }
    void PointCloud::unprojectPtCloud(std::shared_ptr<Frame> framel)
    {
      // due to rectification, the image boarder are blank
      // we cut them out
      int border_size = num_disp_;
      const int trunc_img_width_end = HEIGHT - border_size;
      const int trunc_img_height_end = WIDTH - border_size;
    
       mat_vec3_pt_ = cv::Mat_<cv::Vec3f>(HEIGHT, WIDTH, cv::Vec3f(0, 0, 0));
         cv::Mat color_mat_(HEIGHT, WIDTH, CV_8UC1, &color_buffer_[0])  ;
      for(int v = border_size; v < trunc_img_height_end; ++v)
      {
        for(int u = border_size; u < trunc_img_width_end; ++u)
        {
          cv::Vec3f &point = mat_vec3_pt_.at<cv::Vec3f>(v, u);
    
    #ifdef USE_OPEN_CV_CONTRIB
          float disparity = (float)(framel->raw_disparity_map_.at<short int>(v, u)*0.0625);
    #else
          float disparity = (float)(framel->raw_disparity_map_.at<short int>(v, u)*0.0625);
    #endif
                //std::cout<<"** disparity " << disparity << std::endl<< std::flush;
          // do not consider pts that are farther than 8.6m, i.e. disparity < 6
          if(disparity >= 60)
          {
            point[2] = baseline_x_fx_/disparity;
            point[0] = (u-principal_x_)*point[2]/fx_;
            point[1] = (v-principal_y_)*point[2]/fy_;
          }
          color_buffer_[v*WIDTH+u] = framel->cpu_crop_left.at<uint8_t>(v, u);
        }
      }
    
      color_mat_ = cv::Mat(HEIGHT, WIDTH, CV_8UC1, &color_buffer_[0]).clone();
        framel->mat_vec3=mat_vec3_pt_;
        framel->color_m=color_mat_;
        pt_cloud_ = cv::viz::WCloud(mat_vec3_pt_, color_mat_);
    }
    
    
    
    bool PointCloud::initPointCloud(){
    std::string stereo_c2=“../calibration/sterolast.xml”;//calib_stereo.xml”//
    cv::FileStorage ts(立体声c2,cv::FileStorage::READ);
    如果(!ts.isOpened()){
    标准::cerr参数项目左;
    ts[“P2”]>>项目右参数;
    principal_x_uu=参数项目左(0,2);
    principal_y_=参数项目左(1,2);
    fx=在(0,0)处的项目左参数;
    fy=左项目参数(1,1);
    (0,3)处的基线值=项目右参数;
    标准::cout
    如何获得稳定、干净的视差图和干净的深度图

    为了回答这个问题,我看了你共享的视频。过滤后的视差图看起来不错。你使用的WLS过滤器提供的视差图就是这样。它没有什么问题。但通常,对于点云,不建议将过滤后的视差图作为输入。这是因为过滤器往往会填充不可用的孔算法找不到。换句话说,它们是不可靠的数据。因此,请尝试将未过滤的视差图作为点云的输入

    此外,您用于查看点云的查看器(即meshlab)通常会消耗一些点。因此,您可以使用其他查看器,如CloudCompare

    我在RMS=0.20的误差下进行的立体校准是否足以获得清晰的视差图和两台摄像机视野的完整云点

    是的,在大多数情况下,0.20 RMS误差就足够了。但同样,误差越小越好