Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/opencv/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C++ 使用OpenCV 2.4.3进行双线性插值的视频捕获旋转_C++_Opencv_Visual Studio 2012_Interpolation_Rotatetransform - Fatal编程技术网

C++ 使用OpenCV 2.4.3进行双线性插值的视频捕获旋转

C++ 使用OpenCV 2.4.3进行双线性插值的视频捕获旋转,c++,opencv,visual-studio-2012,interpolation,rotatetransform,C++,Opencv,Visual Studio 2012,Interpolation,Rotatetransform,我正在使用双线性插值实现视频捕获旋转,就像warpAffine()在OpenCV库中所做的那样。但到目前为止,我遇到了一些问题: #include <opencv2/highgui/highgui.hpp> #include <opencv2/imgproc/imgproc.hpp> #include <cmath> #include <ctime> #include <iostream> #include <cstdlib&

我正在使用双线性插值实现视频捕获旋转,就像warpAffine()在OpenCV库中所做的那样。但到目前为止,我遇到了一些问题:

#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>

#include <cmath>
#include <ctime>

#include <iostream>
#include <cstdlib>

using namespace cv;
using namespace std;

const double M_PI = 3.14159265359;

void print_help( const char* prg );
Mat rotate( Mat& in, int angle , Point2f rotationCenter );

inline uchar int2uchar( int color  ) {
    return (uchar)( color < 0 ? 0 : color > 255 ? 255 : color );
}

void print_help( const char* prg ) {
    cout << "Report:" << endl;
    cout << "Application : " << prg << endl;
    cout << "Can't access capture device" << endl;
}

// rotation with bilinear interpolation
Mat rotate( Mat& in, int angle , Point2f rotationCenter ) {

    // Note : added Scalar(0) for unused pixels to be black
    Mat out( in.size(), in.type(), Scalar(0) );

    float in_radians    = (float)( angle * M_PI / 180 );
    float sinAngle      = (float)( sin( in_radians ) );
    float cosAngle      = (float)( cos( in_radians ) );

    for ( int col(0); col < in.cols; ++col ) {
        for ( int row(0); row < in.rows; ++row ) {

            // already around rotationCenter
            // x' = x * cos(angle) - y * sin(angle)
            float temp_columns( ( col    - rotationCenter.x ) * (cosAngle) -
                                ( row    - rotationCenter.y ) * (sinAngle) +
                                rotationCenter.x );
            // y' = x * sin(angle) + y * cos(angle)
            float temp_rows   ( ( col    - rotationCenter.x ) * (sinAngle) +
                                ( row    - rotationCenter.y ) * (cosAngle) +
                                rotationCenter.y );

            float max_col( ceil (temp_columns) );
            float min_col( floor(temp_columns) );
            float max_row( ceil (temp_rows)  );
            float min_row( floor(temp_rows)  );

            // clip all irrelevant parts
            if ( max_col >= in.cols || max_row >= in.rows ||
                        min_col < 0 || min_row < 0 ) {
                // don't draw
                continue;
            }

            float deltaCol( temp_columns - min_col );
            float deltaRow( temp_rows     - min_row );

            // left top, right top, left bottom and right bottom
            Vec3b q12( in.at < Vec3b >( (int)min_row, (int)min_col ) );
            Vec3b q22( in.at < Vec3b >( (int)min_row, (int)max_col ) );
            Vec3b q11( in.at < Vec3b >( (int)max_row, (int)min_col ) );
            Vec3b q21( in.at < Vec3b >( (int)max_row, (int)max_col ) );

            // R1 - linear interpolation of bottom neighborhoods
            double blueR1   ( ( 1 - deltaCol ) * q11[0] + deltaCol * q21[0] );
            double greenR1  ( ( 1 - deltaCol ) * q11[1] + deltaCol * q21[1] );
            double redR1    ( ( 1 - deltaCol ) * q11[2] + deltaCol * q21[2] );

            // R2 - linear interpolation of top neighborhoods
            double blueR2   ( ( 1 - deltaCol ) * q12[0] + deltaCol * q22[0] );
            double greenR2  ( ( 1 - deltaCol ) * q12[1] + deltaCol * q22[1] );
            double redR2    ( ( 1 - deltaCol ) * q12[2] + deltaCol * q22[2] );

            // P - linear interpolation of R1 and R2
            int blue ( (int)ceil( ( 1 - deltaRow ) * blueR2 + deltaRow * blueR1   ) );
            int green( (int)ceil( ( 1 - deltaRow ) * greenR2 + deltaRow * greenR1 ) );
            int red  ( (int)ceil( ( 1 - deltaRow ) * redR2  + deltaRow * redR1    ) );

            // Vec3b stands for 3-channel value, each channel is a byte
            out.at < Vec3b >( row, col )[ 0 ] = int2uchar(blue);
            out.at < Vec3b >( row, col )[ 1 ] = int2uchar(green);
            out.at < Vec3b >( row, col )[ 2 ] = int2uchar(red);
        }
    }

    return out;
}

int main( int ac, char ** av ) {
   if ( ac < 2 ) {
      print_help( av[ 0 ] );
      return -1;
   }

   // In degrees
   int step = 1, angle = 90;

   VideoCapture capture;

   // doesn't work properly
   if ( capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280 ) &&
        capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720 ) ) {
       cout << "Resolution : "
            << capture.get(CV_CAP_PROP_FRAME_WIDTH )
            << " x "
            << capture.get(CV_CAP_PROP_FRAME_HEIGHT )
            << endl;
   } else {
       cout << "There's some problem with VideoCapture::set()" << endl;
   }

   capture.open( atoi( av[ 1 ] ) );

   while ( !capture.isOpened( ) ) {
       print_help( av[ 0 ] );
       cout << "Capture device " << atoi( av[ 1 ] ) << " failed to open!" << endl;
       cout << "Connect capture device to PC\a" << endl;
       system("pause");
       cout << endl;
       capture.open( atoi( av[ 1 ] ) );
   }

   cout << "Device " << atoi( av[ 1 ] ) << " is connected" << endl;

   string original("Original");
   string withInterpolation("With Bilinear Interpolation");

   namedWindow( original, CV_WINDOW_AUTOSIZE );
   namedWindow( withInterpolation, CV_WINDOW_AUTOSIZE);

   Mat frame;

   for ( ;; ) {
      capture >> frame;
      if ( frame.empty( ) )
         break;

      createTrackbar("Rotate", withInterpolation, &angle, 360, 0);

      imshow( original, frame );

      char key = ( char ) waitKey( 2 );
      switch ( key ) {
      case '+':
         angle += step;
         break;
      case '-':
         angle -= step;
         break;
      case 27:
      case 'q':
         return 0;
         break;
      }

      Mat result;

      Point2f rotationCenter( (float)( frame.cols / 2.0 ),
                              (float)( frame.rows / 2.0 ) );

      result = rotate( frame, angle, rotationCenter );

      // Note : mirror effect
      // 1 says, that given frame will be flipped horizontally
      flip(result,result, 1);

      imshow( withInterpolation, result );

      // test to compare my bilinear interpolation and of OpenCV
      Mat temp;
      warpAffine( frame, temp,
                  getRotationMatrix2D( rotationCenter, angle, (double)(1.0) ),
                  frame.size(), 1, 0 );
      string openCVInterpolation("OpenCV Bilinear Interpolation");
      namedWindow( openCVInterpolation, CV_WINDOW_AUTOSIZE );
      createTrackbar("Rotate", openCVInterpolation, &angle, 360, 0);
      flip(temp,temp, 1);
      imshow( openCVInterpolation, temp );
   }

   return 0;
}
1.我在旋转过程中得到了一些伪影。以下是边框、90度旋转和360度伪影的示例

2.我无法使用

capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280 )
capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720 )
它们都返回假值

我使用LifeCam电影院

这是我的代码:

#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>

#include <cmath>
#include <ctime>

#include <iostream>
#include <cstdlib>

using namespace cv;
using namespace std;

const double M_PI = 3.14159265359;

void print_help( const char* prg );
Mat rotate( Mat& in, int angle , Point2f rotationCenter );

inline uchar int2uchar( int color  ) {
    return (uchar)( color < 0 ? 0 : color > 255 ? 255 : color );
}

void print_help( const char* prg ) {
    cout << "Report:" << endl;
    cout << "Application : " << prg << endl;
    cout << "Can't access capture device" << endl;
}

// rotation with bilinear interpolation
Mat rotate( Mat& in, int angle , Point2f rotationCenter ) {

    // Note : added Scalar(0) for unused pixels to be black
    Mat out( in.size(), in.type(), Scalar(0) );

    float in_radians    = (float)( angle * M_PI / 180 );
    float sinAngle      = (float)( sin( in_radians ) );
    float cosAngle      = (float)( cos( in_radians ) );

    for ( int col(0); col < in.cols; ++col ) {
        for ( int row(0); row < in.rows; ++row ) {

            // already around rotationCenter
            // x' = x * cos(angle) - y * sin(angle)
            float temp_columns( ( col    - rotationCenter.x ) * (cosAngle) -
                                ( row    - rotationCenter.y ) * (sinAngle) +
                                rotationCenter.x );
            // y' = x * sin(angle) + y * cos(angle)
            float temp_rows   ( ( col    - rotationCenter.x ) * (sinAngle) +
                                ( row    - rotationCenter.y ) * (cosAngle) +
                                rotationCenter.y );

            float max_col( ceil (temp_columns) );
            float min_col( floor(temp_columns) );
            float max_row( ceil (temp_rows)  );
            float min_row( floor(temp_rows)  );

            // clip all irrelevant parts
            if ( max_col >= in.cols || max_row >= in.rows ||
                        min_col < 0 || min_row < 0 ) {
                // don't draw
                continue;
            }

            float deltaCol( temp_columns - min_col );
            float deltaRow( temp_rows     - min_row );

            // left top, right top, left bottom and right bottom
            Vec3b q12( in.at < Vec3b >( (int)min_row, (int)min_col ) );
            Vec3b q22( in.at < Vec3b >( (int)min_row, (int)max_col ) );
            Vec3b q11( in.at < Vec3b >( (int)max_row, (int)min_col ) );
            Vec3b q21( in.at < Vec3b >( (int)max_row, (int)max_col ) );

            // R1 - linear interpolation of bottom neighborhoods
            double blueR1   ( ( 1 - deltaCol ) * q11[0] + deltaCol * q21[0] );
            double greenR1  ( ( 1 - deltaCol ) * q11[1] + deltaCol * q21[1] );
            double redR1    ( ( 1 - deltaCol ) * q11[2] + deltaCol * q21[2] );

            // R2 - linear interpolation of top neighborhoods
            double blueR2   ( ( 1 - deltaCol ) * q12[0] + deltaCol * q22[0] );
            double greenR2  ( ( 1 - deltaCol ) * q12[1] + deltaCol * q22[1] );
            double redR2    ( ( 1 - deltaCol ) * q12[2] + deltaCol * q22[2] );

            // P - linear interpolation of R1 and R2
            int blue ( (int)ceil( ( 1 - deltaRow ) * blueR2 + deltaRow * blueR1   ) );
            int green( (int)ceil( ( 1 - deltaRow ) * greenR2 + deltaRow * greenR1 ) );
            int red  ( (int)ceil( ( 1 - deltaRow ) * redR2  + deltaRow * redR1    ) );

            // Vec3b stands for 3-channel value, each channel is a byte
            out.at < Vec3b >( row, col )[ 0 ] = int2uchar(blue);
            out.at < Vec3b >( row, col )[ 1 ] = int2uchar(green);
            out.at < Vec3b >( row, col )[ 2 ] = int2uchar(red);
        }
    }

    return out;
}

int main( int ac, char ** av ) {
   if ( ac < 2 ) {
      print_help( av[ 0 ] );
      return -1;
   }

   // In degrees
   int step = 1, angle = 90;

   VideoCapture capture;

   // doesn't work properly
   if ( capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280 ) &&
        capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720 ) ) {
       cout << "Resolution : "
            << capture.get(CV_CAP_PROP_FRAME_WIDTH )
            << " x "
            << capture.get(CV_CAP_PROP_FRAME_HEIGHT )
            << endl;
   } else {
       cout << "There's some problem with VideoCapture::set()" << endl;
   }

   capture.open( atoi( av[ 1 ] ) );

   while ( !capture.isOpened( ) ) {
       print_help( av[ 0 ] );
       cout << "Capture device " << atoi( av[ 1 ] ) << " failed to open!" << endl;
       cout << "Connect capture device to PC\a" << endl;
       system("pause");
       cout << endl;
       capture.open( atoi( av[ 1 ] ) );
   }

   cout << "Device " << atoi( av[ 1 ] ) << " is connected" << endl;

   string original("Original");
   string withInterpolation("With Bilinear Interpolation");

   namedWindow( original, CV_WINDOW_AUTOSIZE );
   namedWindow( withInterpolation, CV_WINDOW_AUTOSIZE);

   Mat frame;

   for ( ;; ) {
      capture >> frame;
      if ( frame.empty( ) )
         break;

      createTrackbar("Rotate", withInterpolation, &angle, 360, 0);

      imshow( original, frame );

      char key = ( char ) waitKey( 2 );
      switch ( key ) {
      case '+':
         angle += step;
         break;
      case '-':
         angle -= step;
         break;
      case 27:
      case 'q':
         return 0;
         break;
      }

      Mat result;

      Point2f rotationCenter( (float)( frame.cols / 2.0 ),
                              (float)( frame.rows / 2.0 ) );

      result = rotate( frame, angle, rotationCenter );

      // Note : mirror effect
      // 1 says, that given frame will be flipped horizontally
      flip(result,result, 1);

      imshow( withInterpolation, result );

      // test to compare my bilinear interpolation and of OpenCV
      Mat temp;
      warpAffine( frame, temp,
                  getRotationMatrix2D( rotationCenter, angle, (double)(1.0) ),
                  frame.size(), 1, 0 );
      string openCVInterpolation("OpenCV Bilinear Interpolation");
      namedWindow( openCVInterpolation, CV_WINDOW_AUTOSIZE );
      createTrackbar("Rotate", openCVInterpolation, &angle, 360, 0);
      flip(temp,temp, 1);
      imshow( openCVInterpolation, temp );
   }

   return 0;
}
#包括
#包括
#包括
#包括
#包括
#包括
使用名称空间cv;
使用名称空间std;
常数双M_PI=3.14159265359;
无效打印帮助(常量字符*prg);
垫旋转(垫和内、内角、点2F旋转中心);
内联uchar int2uchar(内联颜色){
返回(uchar)(颜色<0?0:color>255?255:color);
}
无效打印帮助(常量字符*prg){
无法解决第二个问题-使用OpenCV设置Lifecam分辨率
我发现Lifecam仪表板可能干扰了OpenCV Videocapture调用。如果您使用控制面板中的程序和功能卸载Lifecam,则调用

capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280)
capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720)
很好。

解决第二个问题-使用OpenCV设置Lifecam分辨率 我发现Lifecam仪表板可能干扰了OpenCV Videocapture调用。如果您使用控制面板中的程序和功能卸载Lifecam,则调用

capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280)
capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720)

很好。

我对LifeCam和OpenCV有很多问题。我最终使用DirectShow拍摄图像。你可以尝试一下videoInput Library。事实上,LifeCam和OpenCV在一起工作时,除了设置另一个分辨率外,没有任何问题。如果我使用OpenCV的warpAffine(…),效果非常好,没有瑕疵(参见图片)。我认为,问题在于我的旋转算法。我对LifeCam和OpenCV有很多问题。我最终使用DirectShow拍摄图像。你可以尝试一下videoInput库。事实上,我对LifeCam和OpenCV一起工作没有真正的问题,只是设置了另一个分辨率。如果我使用warpAffine(…)对于OpenCV,它工作得非常好,没有伪影(见图)。我认为,问题在于我的旋转算法。