C++ OpenCV:处理每一帧

C++ OpenCV:处理每一帧,c++,c,opencv,video,image-processing,C++,C,Opencv,Video,Image Processing,我想使用OpenCV编写一个用于视频捕获的跨平台应用程序。在所有的例子中,我发现相机的帧都是使用抓取功能处理的,并等待一段时间。我想按顺序处理每一帧。我想定义我自己的回调函数,每当一个新的帧准备好被处理时(比如在directshow for Windows中,当您定义自己的过滤器并将其放入图形中时),都会执行回调函数 因此,问题是:我如何做到这一点?快速思考应该是有两个线程,第一个线程负责抓取帧,并在帧可用时通知第二个线程(将它们放在处理队列中),第二个线程以事件循环类型的方式执行所有处理 请参

我想使用OpenCV编写一个用于视频捕获的跨平台应用程序。在所有的例子中,我发现相机的帧都是使用抓取功能处理的,并等待一段时间。我想按顺序处理每一帧。我想定义我自己的回调函数,每当一个新的帧准备好被处理时(比如在directshow for Windows中,当您定义自己的过滤器并将其放入图形中时),都会执行回调函数


因此,问题是:我如何做到这一点?

快速思考应该是有两个线程,第一个线程负责抓取帧,并在帧可用时通知第二个线程(将它们放在处理队列中),第二个线程以事件循环类型的方式执行所有处理


请参阅boost::thread和boost::signals2,因为这两个组件应该一起提供我上面描述的大部分框架(队列除外)。

根据下面的代码,所有回调都必须遵循以下定义:

IplImage* custom_callback(IplImage* frame);
这个签名意味着回调将在系统检索到的每个帧上执行。在我的示例中,make_it_gray()分配一个新图像以保存灰度转换的结果并返回它。这意味着您以后必须在代码中释放此框架。我在代码中添加了关于它的注释

请注意,如果回调需要大量处理,系统可能会跳过相机的几帧。考虑保罗R和diVIECUBA23所做的建议。

#包括
#包括“cv.h”
#包括“highgui.h”
typedef-IplImage*(*回调_原型)(IplImage*);
/* 
*make_it_gray:我们的自定义回调,用于将彩色框架转换为灰度版本。
*请记住,在调用此函数后,必须自行解除分配返回的IplImage*。
*/
IplImage*使其变成灰色(IplImage*框架)
{
//为新图像分配空间
IplImage*灰色_帧=0;
gray_frame=cvCreateImage(cvSize(帧->宽度,帧->高度),帧->深度,1);
如果(!灰色框架)
{
fprintf(stderr,“!!!cvCreateImage失败!\n”);
返回NULL;
}
CVT颜色(边框、灰色边框、CV\U RGB2GRAY);
返回灰色框架;
}
/*
*process_video:从相机检索帧并执行回调以执行单个帧处理。
*请记住,如果回调执行时间过长,则可能会从中丢失一些帧
*照相机。
*/
无效处理\u视频(回调\u原型自定义\u cb)
{           
//初始化摄像机
CvCapture*capture=0;
捕获=cvCaptureFromCAM(-1);
如果(!捕获)
{
fprintf(stderr,“!!!无法打开初始化网络摄像头!\n”);
返回;
}
//为视频创建一个窗口
cvNamedWindow(“结果”,CV_窗口_自动调整大小);
IplImage*frame=0;
字符键=0;
while(key!=27)//ESC
{    
帧=cvQueryFrame(捕获);
如果(!帧)
{
fprintf(stderr,“!!!cvQueryFrame失败!\n”);
打破
}
//在每个帧上执行回调
IplImage*已处理的_帧=(*自定义_cb)(帧);
//显示处理过的帧
cvShowImage(“结果”,已处理的帧);
//释放资源
cvReleaseImage(已处理的帧(&U);
//当用户按ESC键时退出
key=cvWaitKey(10);
}
//空闲内存
“结果”;
cvReleaseCapture(&capture);
}
int main(int argc,字符**argv)
{
处理视频(使其变成灰色);
返回0;
}
编辑:

我更改了上面的代码,以便它打印当前帧率并执行手动灰度转换。它们是对代码的小调整,我这样做是为了教育目的,这样人们就知道如何在像素级执行操作

#include <stdio.h>
#include <time.h>

#include "cv.h"
#include "highgui.h"


typedef IplImage* (*callback_prototype)(IplImage*);


/* 
 * make_it_gray: our custom callback to convert a colored frame to its grayscale version.
 * Remember that you must deallocate the returned IplImage* yourself after calling this function.
 */
IplImage* make_it_gray(IplImage* frame)
{
    // New IplImage* to store the processed image
    IplImage* gray_frame = 0; 

    // Manual grayscale conversion: ugly, but shows how to access each channel of the pixels individually
    gray_frame = cvCreateImage(cvSize(frame->width, frame->height), frame->depth, frame->nChannels);
    if (!gray_frame)
    {
      fprintf(stderr, "!!! cvCreateImage failed!\n" );
      return NULL;
    }

    for (int i = 0; i < frame->width * frame->height * frame->nChannels; i += frame->nChannels)
    {
        gray_frame->imageData[i] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3;   //B
        gray_frame->imageData[i+1] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3; //G
        gray_frame->imageData[i+2] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3; //R
    }

    return gray_frame; 
}

/*
 * process_video: retrieves frames from camera and executes a callback to do individual frame processing.
 * Keep in mind that if your callback takes too much time to execute, you might loose a few frames from 
 * the camera.
 */
void process_video(callback_prototype custom_cb)
{           
    // Initialize camera
    CvCapture *capture = 0;
    capture = cvCaptureFromCAM(-1);
    if (!capture) 
    {
      fprintf(stderr, "!!! Cannot open initialize webcam!\n" );
      return;
    }

    // Create a window for the video 
    cvNamedWindow("result", CV_WINDOW_AUTOSIZE);    

    double elapsed = 0;
    int last_time = 0;
    int num_frames = 0;

    IplImage* frame = 0;
    char key = 0;
    while (key != 27) // ESC
    {    
      frame = cvQueryFrame(capture);
      if(!frame) 
      {
          fprintf( stderr, "!!! cvQueryFrame failed!\n" );
          break;
      }

      // Calculating framerate
      num_frames++;
      elapsed = clock() - last_time;
      int fps = 0;
      if (elapsed > 1)
      {
          fps = floor(num_frames / (float)(1 + (float)elapsed / (float)CLOCKS_PER_SEC));
          num_frames = 0;
          last_time = clock() + 1 * CLOCKS_PER_SEC;
          printf("FPS: %d\n", fps);
      }

      // Execute callback on each frame
      IplImage* processed_frame = (*custom_cb)(frame);  

      // Display processed frame
      cvShowImage("result", processed_frame);

      // Release resources
      cvReleaseImage(&processed_frame);

      // Exit when user press ESC
      key = cvWaitKey(10);
    }

    // Free memory
    cvDestroyWindow("result");
    cvReleaseCapture(&capture);
}

int main( int argc, char **argv )
{
    process_video(make_it_gray);

    return 0;
}
#包括
#包括
#包括“cv.h”
#包括“highgui.h”
typedef-IplImage*(*回调_原型)(IplImage*);
/* 
*make_it_gray:我们的自定义回调,用于将彩色框架转换为灰度版本。
*请记住,在调用此函数后,必须自行解除分配返回的IplImage*。
*/
IplImage*使其变成灰色(IplImage*框架)
{
//新建IplImage*以存储处理后的图像
IplImage*灰色_帧=0;
//手动灰度转换:难看,但显示了如何单独访问每个像素通道
gray_frame=cvCreateImage(cvSize(帧->宽度,帧->高度),帧->深度,帧->通道);
如果(!灰色框架)
{
fprintf(stderr,“!!!cvCreateImage失败!\n”);
返回NULL;
}
对于(int i=0;iwidth*frame->height*frame->n通道;i+=frame->n通道)
{
灰度帧->图像数据[i]=(帧->图像数据[i]+帧->图像数据[i+1]+帧->图像数据[i+2])/3;//B
gray_frame->imageData[i+1]=(frame->imageData[i]+frame->imageData[i+1]+frame->imageData[i+2])/3;//G
gray_frame->imageData[i+2]=(frame->imageData[i]+frame->imageData[i+1]+frame->imageData[i+2])/3;//R
}
返回灰色框架;
}
/*
*process_video:从相机检索帧并执行回调以执行单个帧处理。
*请记住,如果回调执行时间过长,则可能会从中丢失一些帧
*照相机。
*/
无效处理\u视频(回调\u原型自定义\u cb)
{           
//初始化摄像机
CvCapture*capture=0;
捕获=cvCaptureFromCAM(-1);
如果(!捕获)
{
fprintf(stderr,“!!!无法打开初始化网络摄像头!\n”);
返回;
}
//为视频创建一个窗口
cvNamedWindow(“结果”,CV_窗口_自动调整大小);
双时间=0;
int last_time=0;
int num_frames=0;
IplImage*frame=0;
字符键=0;
while(key!=27)//ESC
{    
帧=cvQueryFrame(捕获);
如果(!帧)
{
fprintf(stderr,“!!!cvQueryFrame失败!\n”);
打破
}
//计算帧率
num_frames++;
已用=时钟()-上次时间;
int-fp
#include <stdio.h>
#include <time.h>

#include "cv.h"
#include "highgui.h"


typedef IplImage* (*callback_prototype)(IplImage*);


/* 
 * make_it_gray: our custom callback to convert a colored frame to its grayscale version.
 * Remember that you must deallocate the returned IplImage* yourself after calling this function.
 */
IplImage* make_it_gray(IplImage* frame)
{
    // New IplImage* to store the processed image
    IplImage* gray_frame = 0; 

    // Manual grayscale conversion: ugly, but shows how to access each channel of the pixels individually
    gray_frame = cvCreateImage(cvSize(frame->width, frame->height), frame->depth, frame->nChannels);
    if (!gray_frame)
    {
      fprintf(stderr, "!!! cvCreateImage failed!\n" );
      return NULL;
    }

    for (int i = 0; i < frame->width * frame->height * frame->nChannels; i += frame->nChannels)
    {
        gray_frame->imageData[i] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3;   //B
        gray_frame->imageData[i+1] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3; //G
        gray_frame->imageData[i+2] = (frame->imageData[i] + frame->imageData[i+1] + frame->imageData[i+2])/3; //R
    }

    return gray_frame; 
}

/*
 * process_video: retrieves frames from camera and executes a callback to do individual frame processing.
 * Keep in mind that if your callback takes too much time to execute, you might loose a few frames from 
 * the camera.
 */
void process_video(callback_prototype custom_cb)
{           
    // Initialize camera
    CvCapture *capture = 0;
    capture = cvCaptureFromCAM(-1);
    if (!capture) 
    {
      fprintf(stderr, "!!! Cannot open initialize webcam!\n" );
      return;
    }

    // Create a window for the video 
    cvNamedWindow("result", CV_WINDOW_AUTOSIZE);    

    double elapsed = 0;
    int last_time = 0;
    int num_frames = 0;

    IplImage* frame = 0;
    char key = 0;
    while (key != 27) // ESC
    {    
      frame = cvQueryFrame(capture);
      if(!frame) 
      {
          fprintf( stderr, "!!! cvQueryFrame failed!\n" );
          break;
      }

      // Calculating framerate
      num_frames++;
      elapsed = clock() - last_time;
      int fps = 0;
      if (elapsed > 1)
      {
          fps = floor(num_frames / (float)(1 + (float)elapsed / (float)CLOCKS_PER_SEC));
          num_frames = 0;
          last_time = clock() + 1 * CLOCKS_PER_SEC;
          printf("FPS: %d\n", fps);
      }

      // Execute callback on each frame
      IplImage* processed_frame = (*custom_cb)(frame);  

      // Display processed frame
      cvShowImage("result", processed_frame);

      // Release resources
      cvReleaseImage(&processed_frame);

      // Exit when user press ESC
      key = cvWaitKey(10);
    }

    // Free memory
    cvDestroyWindow("result");
    cvReleaseCapture(&capture);
}

int main( int argc, char **argv )
{
    process_video(make_it_gray);

    return 0;
}