Mac OS X 10.10.2(使用c+;+;)上带有waitKey的opencv imshow速度太慢 我在Mac OS X107.2上使用OpenCV C++来处理视频帧并显示它们。使用waitKey显示视频的imshow的性能非常慢
我有以下代码,可以正确显示HD(1920x1080)灰度帧,只是速度太慢了10倍(即每秒2到3帧,而不是每秒30帧)Mac OS X 10.10.2(使用c+;+;)上带有waitKey的opencv imshow速度太慢 我在Mac OS X107.2上使用OpenCV C++来处理视频帧并显示它们。使用waitKey显示视频的imshow的性能非常慢,c++,macos,opencv,imshow,C++,Macos,Opencv,Imshow,我有以下代码,可以正确显示HD(1920x1080)灰度帧,只是速度太慢了10倍(即每秒2到3帧,而不是每秒30帧) cv::Mat framebuf[TEST_COUNT]; //---这里的代码用于分配并用大约4秒的视频填充帧缓冲区。这部分工作正常。 //---该循环运行速度太慢,约为10倍 对于(int f=0;f
cv::Mat framebuf[TEST_COUNT];
//---这里的代码用于分配并用大约4秒的视频填充帧缓冲区。这部分工作正常。
//---该循环运行速度太慢,约为10倍
对于(int f=0;f
有人能建议如何从opencv imshow()获得实时或接近实时的性能吗?我看到很多帖子说他们正在实时甚至比实时更快地显示视频,所以我不确定我做错了什么。非常感谢您的帮助。您必须减少对函数等待键的输入。尝试使用2-5范围内的较低数字。这还取决于您同时运行的其他进程,请尝试关闭其他进程,看看是否有改进我可能错了,但对我来说,问题不在于代码,而在于操作系统/配置。我写了一个小测试:
import cv2
import numpy as np
from random import randrange
img = np.zeros((1920, 1080), dtype = np.uint8)
counter = 0
while counter < 1000:
cv2.line(img, (randrange(0, 1920), randrange(0, 1080)), (randrange(0, 1920), randrange(0, 1080)), (randrange(0, 255)))
cv2.imshow('test', img)
temp = cv2.waitKey(1)
counter += 1
print counter
导入cv2
将numpy作为np导入
从随机输入范围
img=np.zero((19201080),dtype=np.uint8)
计数器=0
当计数器<1000时:
cv2.线(img,(randrange(01220),randrange(01080)),(randrange(01220),randrange(01080)),(randrange(0255)))
cv2.imshow(“测试”,img)
温度=cv2。等待键(1)
计数器+=1
打印计数器
在我的机器上(Core 2 duo 2,6Ghz x64,8gb ram,ssd),完成此测试大约需要30秒。运行它,如果你的时间会比实际时间长很多,那么你的笔记本电脑/opencv配置/等等肯定有问题。我在Mac OS x上使用了opencv 2.4.x(我想是10.9),运行得很好。重新安装OpenCV是我想到的最明显的解决方案。删除OpenCV时,请使用brew再次安装—
brew安装OpenCV——使用tbb——使用python——使用ffpmeg
(或类似的东西——使用brew选项OpenCV
)应该可以。第一个选项告诉brew使用tbb(线程构建块-多线程库,有时可以显著提高速度)构建opencv,第二个选项告诉brew安装python包装器,最后一个选项告诉brew安装ffmpeg(处理编解码器等)。您可以创建自己的窗口来显示图像。将MyWindow.m MyWindow.h文件添加到项目中
我的窗户
#ifndef MY_WINDOW_H
#define MY_WINDOW_H
#ifdef __cplusplus
extern "C" {
#endif
void* createNSWindow(int x, int y, int w, int h);
void renderNSWindow(void* inwindow, void* data, int w, int h, int c);
void processNSEvent();
#ifdef __cplusplus
}
#endif
#endif
用法,在main.cpp中,不要忘记waitKey
#include "MyWindow.h"
// need create a cv window and do nothing
cv::namedWindow("xxx", 1);
// create window
void* w = createNSWindow(0, 0, 0, 0);
// frame image
cv::Mat frameImage;
// render loop
renderNSWindow(w, frameImage.data, frameImage.cols, frameImage.rows, frameImage.channels());
// need waitKey to display window
processNSEvent();
在MyWindow.m中实现删除导入“MyWindow.h”
#导入
@接口MyWindow:NSWindow
@属性(非原子,强)NSImageView*imgv;
@结束
@实现MyWindow
@结束
静态NSImage*_创建NSImage(void*数据,int w,int h,int c);
void*createNSWindow(整数x,整数y,整数w,整数h){
NSRect screen frame=[[NSScreen main screen]frame];
NSRect frame=NSMakeRect(x,y,w,h);
如果(w==0 | | h==0){
帧=屏幕帧;
}
MyWindow*window=[[MyWindow alloc]initWithContentRect:frame
样式掩码:NSWindowsTyleMaskOrderless
backing:NSBackingStoreBuffered
延期:否];
//_初始化应用程序(窗口);
[车窗MakeKeyandDerfront:NSApp];
window.titlevibility=TRUE;
window.styleMask=nswindowstylemaskresizeable | NSWindowStyleMaskTitled | NSWindowStyleMaskFullSizeContentView;
window.imgv=[[NSImageView alloc]initWithFrame:NSMakeRect(0,0,frame.size.width,frame.size.height)];
[window.contentView addSubview:window.imgv];
返回(作废*)CfBrigingRetain(窗口);
}
静态NSImage*\u创建NSImage(无效*数据,整数w,整数h,整数c){
大小缓冲长度=w*h*c;
CGDataProviderRef provider=CGDataProviderCreateWithData(NULL,data,bufferLength,NULL);
大小\u t比特分量=8;
大小\u t bitsPerPixel=c*bitsPerComponent;
尺寸×bytesPerRow=c*w;
CGColorSpaceRef colorSpaceRef=CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo=kCGBitmapByteOrder32Little | KCGimageAlphaPremultipledLast;
if(c<4){
bitmapInfo=kCGBitmapByteOrderDefault | kCGImageAlphaNone;
无符号字符*buf=数据;
对于(int i=0;i#include "MyWindow.h"
// need create a cv window and do nothing
cv::namedWindow("xxx", 1);
// create window
void* w = createNSWindow(0, 0, 0, 0);
// frame image
cv::Mat frameImage;
// render loop
renderNSWindow(w, frameImage.data, frameImage.cols, frameImage.rows, frameImage.channels());
// need waitKey to display window
processNSEvent();
#import <Cocoa/Cocoa.h>
@interface MyWindow : NSWindow
@property(nonatomic, strong) NSImageView *imgv;
@end
@implementation MyWindow
@end
static NSImage* _createNSImage(void* data, int w, int h, int c);
void* createNSWindow(int x, int y, int w, int h) {
NSRect screenFrame = [[NSScreen mainScreen] frame];
NSRect frame = NSMakeRect(x, y, w, h);
if (w == 0 || h == 0) {
frame = screenFrame;
}
MyWindow* window = [[MyWindow alloc] initWithContentRect:frame
styleMask:NSWindowStyleMaskBorderless
backing:NSBackingStoreBuffered
defer:NO] ;
//_initApp(window);
[window makeKeyAndOrderFront:NSApp];
window.titleVisibility = TRUE;
window.styleMask = NSWindowStyleMaskResizable | NSWindowStyleMaskTitled |NSWindowStyleMaskFullSizeContentView;
window.imgv = [[NSImageView alloc] initWithFrame:NSMakeRect(0, 0, frame.size.width, frame.size.height)];
[window.contentView addSubview:window.imgv];
return (void*)CFBridgingRetain(window);
}
static NSImage* _createNSImage(void* data, int w, int h, int c) {
size_t bufferLength = w * h * c;
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, data, bufferLength, NULL);
size_t bitsPerComponent = 8;
size_t bitsPerPixel = c * bitsPerComponent;
size_t bytesPerRow = c * w;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast;
if (c < 4) {
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
unsigned char* buf = data;
for(int i = 0; i < w*h; i++) {
unsigned char temp = buf[i*c];
buf[i*c] = buf[i*c+c-1];
buf[i*c+c-1] = temp;
}
}
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef iref = CGImageCreate(w,
h,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpaceRef,
bitmapInfo,
provider, // data provider
NULL, // decode
YES, // should interpolate
renderingIntent);
NSImage* image = [[NSImage alloc] initWithCGImage:iref size:NSMakeSize(w, h)];
return image;
}
void renderNSWindow(void* inwindow, void* data, int w, int h, int c) {
MyWindow* window = (__bridge MyWindow*)inwindow;
window.imgv.image = _createNSImage(data, w, h, c);
}
void processNSEvent() {
for (;;)
{
NSEvent* event = [NSApp nextEventMatchingMask:NSEventMaskAny
untilDate:[NSDate distantPast]
inMode:NSDefaultRunLoopMode
dequeue:YES];
if (event == nil)
break;
[NSApp sendEvent:event];
}
}
#include <iostream>
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <dispatch/dispatch.h>
#include "MyWindow.h"
using namespace std;
using namespace cv;
int opencvfunc(int argc, const char *argv[]);
bool newFrame = false;
cv::Mat back_frame;
int main(int argc, const char * argv[]) {
cv::namedWindow("render", 1);
void* w = createNSWindow(0, 0, 0, 0);
dispatch_queue_t opencvq = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0);
dispatch_async(opencvq, ^{
opencvfunc(argc, argv);
});
while(true) {
usleep(3*1000);
if(newFrame) {
std::chrono::system_clock::time_point starttime = std::chrono::system_clock::now();
renderNSWindow(w, back_frame.data, back_frame.cols, back_frame.rows, back_frame.channels());
newFrame = false;
//auto key = cv::waitKey(1);
//if (key == 'q') {
// break;
//}
processNSEvent();
std::chrono::system_clock::time_point endtime = std::chrono::system_clock::now();
std::cout << "imshow:" << std::chrono::duration_cast<std::chrono::duration<double>>(endtime-starttime).count()*1000 << std::endl;
}
}
return 0;
}
int opencvfunc(int argc, const char *argv[]) {
cv::VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) {
std::cout << "Couldn't open camera 0." << endl;
return EXIT_FAILURE;
}
Mat frame, unmodified_frame;
for (;;) {
cap >> frame; // get a new frame from camera
if (frame.empty()) { // stop if we're at the end of the video
break;
}
//unmodified_frame = frame.clone();
// ...
back_frame = frame.clone();
newFrame = true;
}
return EXIT_SUCCESS;
}
#include <stdio.h>
#include <iostream>
#include <opencv2/opencv.hpp>
#include <dispatch/dispatch.h>
using namespace cv;
using namespace std;
bool newFrame = false;
Mat back_frame;
int opencvmain(int argc, char** argv ) {
// open camear
cv::VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) {
std::cout << "Couldn't open camera 0." << std::endl;
return EXIT_FAILURE;
}
// define frame images
cv::Mat frame;
// frame loop
for (;;) {
// get video frame
cap >> frame;
if (frame.empty()) {
break;
}
// render
back_frame = frame.clone();
newFrame = true;
}
return 0;
}
int main(int argc, char** argv ) {
namedWindow("video", WINDOW_AUTOSIZE );
dispatch_queue_t opencvq = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
dispatch_async(opencvq, ^{
opencvmain(argc, argv);
});
while(true) {
usleep(3*1000);
if(newFrame) {
imshow("video", back_frame);
auto key = cv::waitKey(1);
if (key == ' ') {
break;
}
newFrame = false;
}
}
return 0;
}