问题
I am using opencv c++ on Mac OS X 10.10.2 to process video frames and display them. The performance of imshow with waitKey to display the video is extremely slow.
I have the following code which displays HD (1920x1080) grayscale frames correctly, except that it runs about 10 times too slow (i.e. 2 to 3 frames per second instead of 30 frames per second).
cv::Mat framebuf[TEST_COUNT];
//--- Code here to allocate and fill the frame buffer with about 4 seconds of video. This part works correctly.
//--- This loop runs too slow by factor of approximately 10x
for (int f = 0; f < TEST_COUNT; f++)
{
cv::imshow(windowName, framebuf[f]);
cv::waitKey(33);
}
Can anyone suggest how to get real-time or near real-time performance from opencv imshow()? I have seen many posts that state that they are displaying video in real-time or even faster than real-time, so I am not sure what I am doing wrong. Any help would be greatly appreciated.
回答1:
I could be wrong but for me the problem is not with your code, but with your os/configuration. I've written a small test:
import cv2
import numpy as np
from random import randrange
img = np.zeros((1920, 1080), dtype = np.uint8)
counter = 0
while counter < 1000:
cv2.line(img, (randrange(0, 1920), randrange(0, 1080)), (randrange(0, 1920), randrange(0, 1080)), (randrange(0, 255)))
cv2.imshow('test', img)
temp = cv2.waitKey(1)
counter += 1
print counter
On my machine (Core 2 duo 2,6Ghz x64, 8gb ram, ssd) it took about 30 seconds for this test to complete. Run it and if you will get significantly bigger time than definitelly something is wrong with your laptop/opencv configuration/etc. I've used OpenCV 2.4.x on Mac OS X (it was 10.9 i think) and it was running fine. Reinstalling OpenCV is the most obvious solution which comes to my mind. When you remove OpenCV, use brew to install it again - brew install opencv --with-tbb --with-python --with-ffpmeg
(or something similar - check using brew options opencv
) should be fine. First options tells brew to build opencv with tbb(thread building block - library for multithreading, sometimes can significantly improve speed), second to install python wrappers, and the last one to install ffmpeg(handle codecs etc).
回答2:
You would have to reduce the input to the function wait key. Try using a lower number in the range of 2-5. It also depends on the other processes you have running simultaneously, try shutting down other processes and see if it improves
回答3:
you can create your own window to show the image. Add MyWindow.m MyWindow.h file to project.
MyWindow.h
#ifndef MY_WINDOW_H
#define MY_WINDOW_H
#ifdef __cplusplus
extern "C" {
#endif
void* createNSWindow(int x, int y, int w, int h);
void renderNSWindow(void* inwindow, void* data, int w, int h, int c);
void processNSEvent();
#ifdef __cplusplus
}
#endif
#endif
usage, in main.cpp, do not forget waitKey
#include "MyWindow.h"
// need create a cv window and do nothing
cv::namedWindow("xxx", 1);
// create window
void* w = createNSWindow(0, 0, 0, 0);
// frame image
cv::Mat frameImage;
// render loop
renderNSWindow(w, frameImage.data, frameImage.cols, frameImage.rows, frameImage.channels());
// need waitKey to display window
processNSEvent();
implement, in MyWindow.m, delete import "MyWindow.h"
#import <Cocoa/Cocoa.h>
@interface MyWindow : NSWindow
@property(nonatomic, strong) NSImageView *imgv;
@end
@implementation MyWindow
@end
static NSImage* _createNSImage(void* data, int w, int h, int c);
void* createNSWindow(int x, int y, int w, int h) {
NSRect screenFrame = [[NSScreen mainScreen] frame];
NSRect frame = NSMakeRect(x, y, w, h);
if (w == 0 || h == 0) {
frame = screenFrame;
}
MyWindow* window = [[MyWindow alloc] initWithContentRect:frame
styleMask:NSWindowStyleMaskBorderless
backing:NSBackingStoreBuffered
defer:NO] ;
//_initApp(window);
[window makeKeyAndOrderFront:NSApp];
window.titleVisibility = TRUE;
window.styleMask = NSWindowStyleMaskResizable | NSWindowStyleMaskTitled |NSWindowStyleMaskFullSizeContentView;
window.imgv = [[NSImageView alloc] initWithFrame:NSMakeRect(0, 0, frame.size.width, frame.size.height)];
[window.contentView addSubview:window.imgv];
return (void*)CFBridgingRetain(window);
}
static NSImage* _createNSImage(void* data, int w, int h, int c) {
size_t bufferLength = w * h * c;
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, data, bufferLength, NULL);
size_t bitsPerComponent = 8;
size_t bitsPerPixel = c * bitsPerComponent;
size_t bytesPerRow = c * w;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast;
if (c < 4) {
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
unsigned char* buf = data;
for(int i = 0; i < w*h; i++) {
unsigned char temp = buf[i*c];
buf[i*c] = buf[i*c+c-1];
buf[i*c+c-1] = temp;
}
}
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef iref = CGImageCreate(w,
h,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpaceRef,
bitmapInfo,
provider, // data provider
NULL, // decode
YES, // should interpolate
renderingIntent);
NSImage* image = [[NSImage alloc] initWithCGImage:iref size:NSMakeSize(w, h)];
return image;
}
void renderNSWindow(void* inwindow, void* data, int w, int h, int c) {
MyWindow* window = (__bridge MyWindow*)inwindow;
window.imgv.image = _createNSImage(data, w, h, c);
}
void processNSEvent() {
for (;;)
{
NSEvent* event = [NSApp nextEventMatchingMask:NSEventMaskAny
untilDate:[NSDate distantPast]
inMode:NSDefaultRunLoopMode
dequeue:YES];
if (event == nil)
break;
[NSApp sendEvent:event];
}
}
other things, the waitKey now take about 20ms, you can do OpenCV in background thread, and show window in main thread. Also use processNSEvent instead of waitKey that only take about 10ms.
full source code:
#include <iostream>
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <dispatch/dispatch.h>
#include "MyWindow.h"
using namespace std;
using namespace cv;
int opencvfunc(int argc, const char *argv[]);
bool newFrame = false;
cv::Mat back_frame;
int main(int argc, const char * argv[]) {
cv::namedWindow("render", 1);
void* w = createNSWindow(0, 0, 0, 0);
dispatch_queue_t opencvq = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0);
dispatch_async(opencvq, ^{
opencvfunc(argc, argv);
});
while(true) {
usleep(3*1000);
if(newFrame) {
std::chrono::system_clock::time_point starttime = std::chrono::system_clock::now();
renderNSWindow(w, back_frame.data, back_frame.cols, back_frame.rows, back_frame.channels());
newFrame = false;
//auto key = cv::waitKey(1);
//if (key == 'q') {
// break;
//}
processNSEvent();
std::chrono::system_clock::time_point endtime = std::chrono::system_clock::now();
std::cout << "imshow:" << std::chrono::duration_cast<std::chrono::duration<double>>(endtime-starttime).count()*1000 << std::endl;
}
}
return 0;
}
int opencvfunc(int argc, const char *argv[]) {
cv::VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) {
std::cout << "Couldn't open camera 0." << endl;
return EXIT_FAILURE;
}
Mat frame, unmodified_frame;
for (;;) {
cap >> frame; // get a new frame from camera
if (frame.empty()) { // stop if we're at the end of the video
break;
}
//unmodified_frame = frame.clone();
// ...
back_frame = frame.clone();
newFrame = true;
}
return EXIT_SUCCESS;
}
回答4:
OpenCV 4 had resolved this issue, please update to new version.
One more thing, process video and show video in two thread.
#include <stdio.h>
#include <iostream>
#include <opencv2/opencv.hpp>
#include <dispatch/dispatch.h>
using namespace cv;
using namespace std;
bool newFrame = false;
Mat back_frame;
int opencvmain(int argc, char** argv ) {
// open camear
cv::VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) {
std::cout << "Couldn't open camera 0." << std::endl;
return EXIT_FAILURE;
}
// define frame images
cv::Mat frame;
// frame loop
for (;;) {
// get video frame
cap >> frame;
if (frame.empty()) {
break;
}
// render
back_frame = frame.clone();
newFrame = true;
}
return 0;
}
int main(int argc, char** argv ) {
namedWindow("video", WINDOW_AUTOSIZE );
dispatch_queue_t opencvq = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
dispatch_async(opencvq, ^{
opencvmain(argc, argv);
});
while(true) {
usleep(3*1000);
if(newFrame) {
imshow("video", back_frame);
auto key = cv::waitKey(1);
if (key == ' ') {
break;
}
newFrame = false;
}
}
return 0;
}
来源:https://stackoverflow.com/questions/28593645/opencv-imshow-with-waitkey-too-slow-on-mac-os-x-10-10-2-using-c