Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/115.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
如何将CvPhotoCamera与iOS openCV一起使用?_Ios_Opencv_Avfoundation_Avcapturesession_Ios Camera - Fatal编程技术网

如何将CvPhotoCamera与iOS openCV一起使用?

如何将CvPhotoCamera与iOS openCV一起使用?,ios,opencv,avfoundation,avcapturesession,ios-camera,Ios,Opencv,Avfoundation,Avcapturesession,Ios Camera,我已经学习了如何使用对象播放视频并应用一些处理 我现在感兴趣的是使用该对象通过使用该方法捕获高清晰度图片。但是,本教程使用并且不引用 和都是继承自。它们是姐妹类,因此我不知道如何使用该函数 我的解决方案是创建一个名为CvVideoPhotoCamera的新类,该类继承自CvAbstractCamera。CvVideoPhotoCamera的内容由CvVideoCamera和CvPhotoCamera合并而成 执行与@pouytrez相同的操作:添加对cvPhotoCamera的修改 CvPhot

我已经学习了如何使用对象播放视频并应用一些处理

我现在感兴趣的是使用该对象通过使用该方法捕获高清晰度图片。但是,本教程使用并且不引用


和都是继承自。它们是姐妹类,因此我不知道如何使用该函数

我的解决方案是创建一个名为CvVideoPhotoCamera的新类,该类继承自CvAbstractCamera。CvVideoPhotoCamera的内容由CvVideoCamera和CvPhotoCamera合并而成

执行与@pouytrez相同的操作:添加对cvPhotoCamera的修改

CvPhotoCameraMod.h:

#import <UIKit/UIKit.h>
#import <opencv2/highgui/cap_ios.h>
#import <opencv2/highgui/ios.h>

#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)

@class CvPhotoCameraMod;

@protocol CvPhotoCameraDelegateMod <CvPhotoCameraDelegate>
- (void)processImage:(cv::Mat&)image;
@end

@interface CvPhotoCameraMod : CvPhotoCamera <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, retain) CALayer *customPreviewLayer;
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic, weak) id <CvPhotoCameraDelegateMod> delegate;

- (void)createCustomVideoPreview;

@end
#import "CvPhotoCameraMod.h"
#import <CoreGraphics/CoreGraphics.h>
#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)

@implementation CvPhotoCameraMod


-(void)createCaptureOutput;
{
    [super createCaptureOutput];
    [self createVideoDataOutput];
}
- (void)createCustomVideoPreview;
{
    [self.parentView.layer addSublayer:self.customPreviewLayer];
}


//Method mostly taken from this source: https://github.com/Itseez/opencv/blob/b46719b0931b256ab68d5f833b8fadd83737ddd1/modules/videoio/src/cap_ios_video_camera.mm

-(void)createVideoDataOutput{
    // Make a video data output
    self.videoDataOutput = [AVCaptureVideoDataOutput new];

    //Drop grayscale support here
    self.videoDataOutput.videoSettings  = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];


    // discard if the data output queue is blocked (as we process the still image)
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
    if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
        [self.captureSession addOutput:self.videoDataOutput];
    }
    [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];

    // set video mirroring for front camera (more intuitive)
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
        if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
        } else {
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
        }
    }

    // set default video orientation
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
    }

    // create a custom preview layer
    self.customPreviewLayer = [CALayer layer];

    self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);

    self.customPreviewLayer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);

    // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
    // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
    // see the header doc for setSampleBufferDelegate:queue: for more information
    dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
    [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    (void)captureOutput;
    (void)connection;
    if (self.delegate) {

        // convert from Core Media to Core Video
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer, 0);

        void* bufferAddress;
        size_t width;
        size_t height;
        size_t bytesPerRow;

        CGColorSpaceRef colorSpace;
        CGContextRef context;

        int format_opencv;

        OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
        if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {

            format_opencv = CV_8UC1;

            bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
            height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
            bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);

        } else { // expect kCVPixelFormatType_32BGRA

            format_opencv = CV_8UC4;

            bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
            width = CVPixelBufferGetWidth(imageBuffer);
            height = CVPixelBufferGetHeight(imageBuffer);
            bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

        }

        // delegate image processing to the delegate
        cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);

        CGImage* dstImage;

        if ([self.delegate respondsToSelector:@selector(processImage:)]) {
            [self.delegate processImage:image];
        }

        // check if matrix data pointer or dimensions were changed by the delegate
        bool iOSimage = false;
        if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
            iOSimage = true;
        }


        // (create color space, create graphics context, render buffer)
        CGBitmapInfo bitmapInfo;

        // basically we decide if it's a grayscale, rgb or rgba image
        if (image.channels() == 1) {
            colorSpace = CGColorSpaceCreateDeviceGray();
            bitmapInfo = kCGImageAlphaNone;
        } else if (image.channels() == 3) {
            colorSpace = CGColorSpaceCreateDeviceRGB();
            bitmapInfo = kCGImageAlphaNone;
            if (iOSimage) {
                bitmapInfo |= kCGBitmapByteOrder32Little;
            } else {
                bitmapInfo |= kCGBitmapByteOrder32Big;
            }
        } else {
            colorSpace = CGColorSpaceCreateDeviceRGB();
            bitmapInfo = kCGImageAlphaPremultipliedFirst;
            if (iOSimage) {
                bitmapInfo |= kCGBitmapByteOrder32Little;
            } else {
                bitmapInfo |= kCGBitmapByteOrder32Big;
            }
        }

        if (iOSimage) {
            context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
            dstImage = CGBitmapContextCreateImage(context);
            CGContextRelease(context);
        } else {

            NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
            CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

            // Creating CGImage from cv::Mat
            dstImage = CGImageCreate(image.cols,                                 // width
                                     image.rows,                                 // height
                                     8,                                          // bits per component
                                     8 * image.elemSize(),                       // bits per pixel
                                     image.step,                                 // bytesPerRow
                                     colorSpace,                                 // colorspace
                                     bitmapInfo,                                 // bitmap info
                                     provider,                                   // CGDataProviderRef
                                     NULL,                                       // decode
                                     false,                                      // should interpolate
                                     kCGRenderingIntentDefault                   // intent
                                     );

            CGDataProviderRelease(provider);
        }


        // render buffer
        dispatch_sync(dispatch_get_main_queue(), ^{
            self.customPreviewLayer.contents = (__bridge id)dstImage;
        });


        // cleanup
        CGImageRelease(dstImage);

        CGColorSpaceRelease(colorSpace);

        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
}


@end
#导入
#进口
#进口
#定义度弧度(角度)((角度)/180.0*M_π)
@CVD类光电摄像机;
@协议CvPhotoCameraDelegateMod
-(void)processImage:(cv::Mat&)image;
@结束
@接口CvPhotoCameraMod:CvPhotoCamera
@属性(非原子,保留)CALayer*customPreviewLayer;
@属性(非原子,保留)AVCaptureVideoDataOutput*videoDataOutput;
@属性(非原子,弱)id委托;
-(作废)创建自定义视频预览;
@结束
CvPhotoCameraMod.mm:

#import <UIKit/UIKit.h>
#import <opencv2/highgui/cap_ios.h>
#import <opencv2/highgui/ios.h>

#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)

@class CvPhotoCameraMod;

@protocol CvPhotoCameraDelegateMod <CvPhotoCameraDelegate>
- (void)processImage:(cv::Mat&)image;
@end

@interface CvPhotoCameraMod : CvPhotoCamera <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, retain) CALayer *customPreviewLayer;
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic, weak) id <CvPhotoCameraDelegateMod> delegate;

- (void)createCustomVideoPreview;

@end
#import "CvPhotoCameraMod.h"
#import <CoreGraphics/CoreGraphics.h>
#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)

@implementation CvPhotoCameraMod


-(void)createCaptureOutput;
{
    [super createCaptureOutput];
    [self createVideoDataOutput];
}
- (void)createCustomVideoPreview;
{
    [self.parentView.layer addSublayer:self.customPreviewLayer];
}


//Method mostly taken from this source: https://github.com/Itseez/opencv/blob/b46719b0931b256ab68d5f833b8fadd83737ddd1/modules/videoio/src/cap_ios_video_camera.mm

-(void)createVideoDataOutput{
    // Make a video data output
    self.videoDataOutput = [AVCaptureVideoDataOutput new];

    //Drop grayscale support here
    self.videoDataOutput.videoSettings  = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];


    // discard if the data output queue is blocked (as we process the still image)
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
    if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
        [self.captureSession addOutput:self.videoDataOutput];
    }
    [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];

    // set video mirroring for front camera (more intuitive)
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
        if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
        } else {
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
        }
    }

    // set default video orientation
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
    }

    // create a custom preview layer
    self.customPreviewLayer = [CALayer layer];

    self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);

    self.customPreviewLayer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);

    // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
    // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
    // see the header doc for setSampleBufferDelegate:queue: for more information
    dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
    [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    (void)captureOutput;
    (void)connection;
    if (self.delegate) {

        // convert from Core Media to Core Video
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer, 0);

        void* bufferAddress;
        size_t width;
        size_t height;
        size_t bytesPerRow;

        CGColorSpaceRef colorSpace;
        CGContextRef context;

        int format_opencv;

        OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
        if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {

            format_opencv = CV_8UC1;

            bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
            height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
            bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);

        } else { // expect kCVPixelFormatType_32BGRA

            format_opencv = CV_8UC4;

            bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
            width = CVPixelBufferGetWidth(imageBuffer);
            height = CVPixelBufferGetHeight(imageBuffer);
            bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

        }

        // delegate image processing to the delegate
        cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);

        CGImage* dstImage;

        if ([self.delegate respondsToSelector:@selector(processImage:)]) {
            [self.delegate processImage:image];
        }

        // check if matrix data pointer or dimensions were changed by the delegate
        bool iOSimage = false;
        if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
            iOSimage = true;
        }


        // (create color space, create graphics context, render buffer)
        CGBitmapInfo bitmapInfo;

        // basically we decide if it's a grayscale, rgb or rgba image
        if (image.channels() == 1) {
            colorSpace = CGColorSpaceCreateDeviceGray();
            bitmapInfo = kCGImageAlphaNone;
        } else if (image.channels() == 3) {
            colorSpace = CGColorSpaceCreateDeviceRGB();
            bitmapInfo = kCGImageAlphaNone;
            if (iOSimage) {
                bitmapInfo |= kCGBitmapByteOrder32Little;
            } else {
                bitmapInfo |= kCGBitmapByteOrder32Big;
            }
        } else {
            colorSpace = CGColorSpaceCreateDeviceRGB();
            bitmapInfo = kCGImageAlphaPremultipliedFirst;
            if (iOSimage) {
                bitmapInfo |= kCGBitmapByteOrder32Little;
            } else {
                bitmapInfo |= kCGBitmapByteOrder32Big;
            }
        }

        if (iOSimage) {
            context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
            dstImage = CGBitmapContextCreateImage(context);
            CGContextRelease(context);
        } else {

            NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
            CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

            // Creating CGImage from cv::Mat
            dstImage = CGImageCreate(image.cols,                                 // width
                                     image.rows,                                 // height
                                     8,                                          // bits per component
                                     8 * image.elemSize(),                       // bits per pixel
                                     image.step,                                 // bytesPerRow
                                     colorSpace,                                 // colorspace
                                     bitmapInfo,                                 // bitmap info
                                     provider,                                   // CGDataProviderRef
                                     NULL,                                       // decode
                                     false,                                      // should interpolate
                                     kCGRenderingIntentDefault                   // intent
                                     );

            CGDataProviderRelease(provider);
        }


        // render buffer
        dispatch_sync(dispatch_get_main_queue(), ^{
            self.customPreviewLayer.contents = (__bridge id)dstImage;
        });


        // cleanup
        CGImageRelease(dstImage);

        CGColorSpaceRelease(colorSpace);

        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
}


@end
#导入“CvPhotoCameraMod.h”
#进口
#定义度弧度(角度)((角度)/180.0*M_π)
@光电摄像机的实现
-(void)createCaptureOutput;
{
[超级createCaptureOutput];
[自行创建视频数据输出];
}
-(作废)创建自定义视频预览;
{
[self.parentView.layer addSublayer:self.customPreviewLayer];
}
//方法主要取自以下来源:https://github.com/Itseez/opencv/blob/b46719b0931b256ab68d5f833b8fadd83737ddd1/modules/videoio/src/cap_ios_video_camera.mm
-(void)createVideoDataOutput{
//进行视频数据输出
self.videoDataOutput=[avcapturedvideodataoutput new];
//在此处放置灰度支持
self.videoDataOutput.videoSettings=[NSDictionary Dictionary WithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//如果数据输出队列被阻塞,则放弃(当我们处理静态图像时)
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:是];
if([self.captureSession CanadOutput:self.videoDataOutput]){
[self.captureSession addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo]设置启用:是];
//为前置摄像头设置视频镜像(更直观)
if([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].支持视频镜像){
if(self.defaultAVCaptureDevicePosition==AVCaptureDevicePositionFront){
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored=YES;
}否则{
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored=否;
}
}
//设置默认视频方向
if([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation){
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation=self.defaultAVCaptureVideoOrientation;
}
//创建自定义预览层
self.customPreviewLayer=[CALayer layer];
self.customPreviewLayer.bounds=CGRectMake(0,0,self.parentView.frame.size.width,self.parentView.frame.size.height);
self.customPreviewLayer.position=CGPointMake(self.parentView.frame.size.width/2.,self.parentView.frame.size.height/2.);
//创建用于示例缓冲区委托以及捕获静态图像时的串行调度队列
//必须使用串行调度队列来保证视频帧按顺序交付
//有关更多信息,请参阅setSampleBufferDelegate:queue:的标题文档
调度队列\u t videoDataOutputQueue=调度队列\u创建(“videoDataOutputQueue”,调度队列\u串行);
[self.videoDataOutput setSampleBufferDelegate:self-queue:videoDataOutputQueue];
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接
{
(无效)捕获输出;
(b)联系;
if(自授权){
//从核心媒体转换为核心视频
CVImageBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
void*缓冲地址;
尺寸和宽度;
尺寸和高度;
比特斯佩罗的尺寸;
CGCOLORSPACTEREF色彩空间;
CGContextRef上下文;
int格式的opencv;
OSType format=CVPixelBufferGetPixelFormatType(imageBuffer);
if(格式==KCVPIXELFORMATTYPE420YPCBCRC8BIPLANARFULLRANGE){
格式_opencv=CV_8UC1;
bufferAddress=CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
宽度=CVPixelBufferGetWidthOfPlane(imageBuffer,0);
高度=CVPixelBufferGetHeightof平面(imageBuffer,0);
bytesPerRow=CVPixelBufferGetBytesPerrowOffPlane(imageBuffer,0);
}else{//预期kCVPixelFormatType_32BGRA
格式_opencv=CV_8UC4;
bufferAddress=CVPixelBufferGetBaseAddress(imageBuffer);
宽度=CVPixelBufferGetWidth(imageBuffer);
高度=CVPixelBufferGetHeight(imageBuffer);
bytesPerRow=CVPixelBufferGetBytesPerRow(图像缓冲区);
}
//将图像处理委托给代理
cv::Mat图像((int)高度、(int)宽度、格式、opencv、缓冲地址、字节数);
CGImage*dstImage;
if([self.delegate respondsToSelector:@selector(processImage:)])){
[self.delegate processImage:image];
}
//检查代理是否更改了矩阵数据指针或维度
bool-iOSimage=false;
if(height==(size\t)image.rows&&width==(size\t)image.cols&&format\u opencv==image.type()&&bufferAdd