Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/cocoa/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c 从IDeckLinkVideoInputFrame创建CVPixelBufferRef_Objective C_Cocoa_Avfoundation_Video Capture_Objective C++ - Fatal编程技术网

Objective c 从IDeckLinkVideoInputFrame创建CVPixelBufferRef

Objective c 从IDeckLinkVideoInputFrame创建CVPixelBufferRef,objective-c,cocoa,avfoundation,video-capture,objective-c++,Objective C,Cocoa,Avfoundation,Video Capture,Objective C++,我正在使用BlackMagic DeckLink SDK尝试从BM设备捕获帧 我试图从DeckLinkController::VideoInputFrameArrived回调中的IDeckLinkVideoInputFrame中获取像素数据,并将其转换为CVPixelBufferRef,以便能够使用AVFoundation的AvassetWriterInputPixelBufferAdapter和AVAssetWriter将其写入磁盘。除了写入磁盘的所有帧都是绿色的之外,我使用的代码似乎正常工作

我正在使用BlackMagic DeckLink SDK尝试从BM设备捕获帧

我试图从
DeckLinkController::VideoInputFrameArrived
回调中的
IDeckLinkVideoInputFrame
中获取像素数据,并将其转换为
CVPixelBufferRef
,以便能够使用AVFoundation的
AvassetWriterInputPixelBufferAdapter
AVAssetWriter
将其写入磁盘。除了写入磁盘的所有帧都是绿色的之外,我使用的代码似乎正常工作。(BlackMagic在屏幕上生成预览的示例代码确实显示了一幅图像,因此设备和设备设置应该是正常的)

AVAssetWriter
的设置如下:

writer = [[AVAssetWriter assetWriterWithURL:destinationUrl
                                  fileType:AVFileTypeAppleM4V
                                     error:&error] retain];
if(error)
    NSLog(@"ERROR: %@", [error localizedDescription]);

NSMutableDictionary * outputSettings = [[NSMutableDictionary alloc] init];

[outputSettings setObject: AVVideoCodecH264
                   forKey: AVVideoCodecKey];
[outputSettings setObject: [NSNumber numberWithInt:1920]
                   forKey: AVVideoWidthKey];
[outputSettings setObject: [NSNumber numberWithInt:1080]
                   forKey: AVVideoHeightKey];

NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
[compressionProperties setObject: [NSNumber numberWithInt: 1000000]
                          forKey: AVVideoAverageBitRateKey];
[compressionProperties setObject: [NSNumber numberWithInt: 16]
                          forKey: AVVideoMaxKeyFrameIntervalKey];
[compressionProperties setObject: AVVideoProfileLevelH264Main31
                          forKey: AVVideoProfileLevelKey];

[outputSettings setObject: compressionProperties
                   forKey: AVVideoCompressionPropertiesKey];

writerVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings] retain];

NSMutableDictionary * pixBufSettings = [[NSMutableDictionary alloc] init];

[pixBufSettings setObject: [NSNumber numberWithInt: kCVPixelFormatType_422YpCbCr8_yuvs]
                   forKey: (NSString *) kCVPixelBufferPixelFormatTypeKey];
[pixBufSettings setObject: [NSNumber numberWithInt: 1920]
                   forKey: (NSString *) kCVPixelBufferWidthKey];
[pixBufSettings setObject: [NSNumber numberWithInt: 1080]
                   forKey: (NSString *) kCVPixelBufferHeightKey];

writerVideoInput.expectsMediaDataInRealTime = YES;

writer.shouldOptimizeForNetworkUse = NO;

adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerVideoInput
                                                                           sourcePixelBufferAttributes:pixBufSettings] retain];
[writer addInput:writerVideoInput];
void *videoData;
int64_t frameTime;
int64_t frameDuration;

videoFrame->GetBytes(&videoData);
videoFrame->GetStreamTime(&frameTime, &frameDuration, 3000);

CMTime presentationTime = CMTimeMake(frameDuration, 3000);

CVPixelBufferRef buffer = NULL;

CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CVPixelBufferLockBaseAddress(buffer, 0);

void *rasterData = CVPixelBufferGetBaseAddress(buffer);
memcpy(rasterData, videoData, (videoFrame->GetRowBytes()*videoFrame->GetHeight()));

CVPixelBufferUnlockBaseAddress(buffer, 0);

if (buffer)
{
    if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentationTime]) {
        NSLog(@"ERROR appending pixelbuffer: %@", writer.error);
        [writerVideoInput markAsFinished];
        if(![writer finishWriting])
            NSLog(@"ERROR finishing writing: %@", [writer.error localizedDescription]);
    }
    else {
        NSLog(@"SUCCESS");
        if(buffer)
            CVPixelBufferRelease(buffer);
    }
}
作为参考,这些输出设置和压缩选项应该是正确的,但我尝试了几种不同的选择

当一帧从设备进入时,我将其转换为
CVPixelBufferRef
,如下所示:

writer = [[AVAssetWriter assetWriterWithURL:destinationUrl
                                  fileType:AVFileTypeAppleM4V
                                     error:&error] retain];
if(error)
    NSLog(@"ERROR: %@", [error localizedDescription]);

NSMutableDictionary * outputSettings = [[NSMutableDictionary alloc] init];

[outputSettings setObject: AVVideoCodecH264
                   forKey: AVVideoCodecKey];
[outputSettings setObject: [NSNumber numberWithInt:1920]
                   forKey: AVVideoWidthKey];
[outputSettings setObject: [NSNumber numberWithInt:1080]
                   forKey: AVVideoHeightKey];

NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
[compressionProperties setObject: [NSNumber numberWithInt: 1000000]
                          forKey: AVVideoAverageBitRateKey];
[compressionProperties setObject: [NSNumber numberWithInt: 16]
                          forKey: AVVideoMaxKeyFrameIntervalKey];
[compressionProperties setObject: AVVideoProfileLevelH264Main31
                          forKey: AVVideoProfileLevelKey];

[outputSettings setObject: compressionProperties
                   forKey: AVVideoCompressionPropertiesKey];

writerVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings] retain];

NSMutableDictionary * pixBufSettings = [[NSMutableDictionary alloc] init];

[pixBufSettings setObject: [NSNumber numberWithInt: kCVPixelFormatType_422YpCbCr8_yuvs]
                   forKey: (NSString *) kCVPixelBufferPixelFormatTypeKey];
[pixBufSettings setObject: [NSNumber numberWithInt: 1920]
                   forKey: (NSString *) kCVPixelBufferWidthKey];
[pixBufSettings setObject: [NSNumber numberWithInt: 1080]
                   forKey: (NSString *) kCVPixelBufferHeightKey];

writerVideoInput.expectsMediaDataInRealTime = YES;

writer.shouldOptimizeForNetworkUse = NO;

adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerVideoInput
                                                                           sourcePixelBufferAttributes:pixBufSettings] retain];
[writer addInput:writerVideoInput];
void *videoData;
int64_t frameTime;
int64_t frameDuration;

videoFrame->GetBytes(&videoData);
videoFrame->GetStreamTime(&frameTime, &frameDuration, 3000);

CMTime presentationTime = CMTimeMake(frameDuration, 3000);

CVPixelBufferRef buffer = NULL;

CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CVPixelBufferLockBaseAddress(buffer, 0);

void *rasterData = CVPixelBufferGetBaseAddress(buffer);
memcpy(rasterData, videoData, (videoFrame->GetRowBytes()*videoFrame->GetHeight()));

CVPixelBufferUnlockBaseAddress(buffer, 0);

if (buffer)
{
    if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentationTime]) {
        NSLog(@"ERROR appending pixelbuffer: %@", writer.error);
        [writerVideoInput markAsFinished];
        if(![writer finishWriting])
            NSLog(@"ERROR finishing writing: %@", [writer.error localizedDescription]);
    }
    else {
        NSLog(@"SUCCESS");
        if(buffer)
            CVPixelBufferRelease(buffer);
    }
}
此代码将帧附加到AvassetWriterInputPixelBufferAdapter,但所有帧都是绿色的


有人知道我做错了什么吗?或者有人有使用BlackMagic Decklink SDK使用AVFoundation捕获和压缩帧的经验吗

当您看到“绿色”并在YUV颜色空间中工作时,您会看到缓冲区中的值为0。由于AVWriter正在写入帧,“缓冲区”包含的值很可能为0。我认为有几种方法可以实现

1) 您要附加的缓冲区很可能已用0初始化,因此您的复制可能失败。在代码中,如果(videoFrame->GetRowBytes()*videoFrame->GetHeight())的计算结果为0,则可能会发生这种情况。这似乎不可能,但我会检查一下

2) CVPixelBufferGetBaseAddress要么返回错误的指针,要么PixelBuffer本身的格式错误,要么可能无效(但由于API中的保护措施,没有崩溃)

3) 不管出于什么原因,“视频数据”本身就充满了0。DeckLinkCaptureDelegate在不喜欢输入格式时返回的帧中没有任何内容(通常这是因为传递给EnableVideoInput的BMDDisplayMode与您的视频源不匹配)

int flags=videoFrame->GetFlags();
if (flags & bmdFrameHasNoInputSource)
{
    //our input format doesn't match the source
}
除了更改源模式并重试之外,快速检查是将memcpy行更改为以下内容:

memset(rasterData, 0x3f, 1920*1080*2);
如果仍然看到绿色帧,请仔细查看#2。如果看到不同颜色的帧,则问题是#1或#3,并且很可能视频输入的分辨率与您选择的BMDDisplay模式不匹配

还有一件事需要注意。我认为您创建演示时间的行是错误的。它可能应该是(注意将frameDuration更改为frameTime:

CMTime presentationTime = CMTimeMake(frameTime, 3000);