Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/24.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
从YUV创建支持IOSurface的CVPixelBuffer_Ios_Objective C_Video_Opengl Es_Glkit - Fatal编程技术网

从YUV创建支持IOSurface的CVPixelBuffer

从YUV创建支持IOSurface的CVPixelBuffer,ios,objective-c,video,opengl-es,glkit,Ios,Objective C,Video,Opengl Es,Glkit,因此,我从一个网络回调(voip应用程序)中获取3个独立阵列中的原始YUV数据。据我所知,您无法根据 重要提示:不能使用CVPixelBufferCreateWithBytes()或 CVPixelBufferCreateWithPlanarBytes()与 KCVPIXelBufferioSurface属性键。使命感 CVPixelBufferCreateWithBytes()或CVPixelBufferCreateWithPlanarBytes() 将导致不支持iSurface的CVPixe

因此,我从一个网络回调(voip应用程序)中获取3个独立阵列中的原始YUV数据。据我所知,您无法根据

重要提示:不能使用CVPixelBufferCreateWithBytes()或 CVPixelBufferCreateWithPlanarBytes()与 KCVPIXelBufferioSurface属性键。使命感 CVPixelBufferCreateWithBytes()或CVPixelBufferCreateWithPlanarBytes() 将导致不支持iSurface的CVPixelBuffers

因此,您必须使用
CVPixelBufferCreate
创建它,但是如何将数据从回调传输到您创建的
CVPixelBufferRef

- (void)videoCallBack(uint8_t *yPlane, uint8_t *uPlane, uint8_t *vPlane, size_t width, size_t height, size_t stride yStride,
                      size_t uStride, size_t vStride)
    NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
    CVPixelBufferRef pixelBuffer = NULL;
    CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
                                          width,
                                          height,
                                          kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                          (__bridge CFDictionaryRef)(pixelAttributes),
                                          &pixelBuffer);

我不知道以后在这里该怎么办?最后,我想把它转换成一个CIImage,然后我可以使用我的GLKView来渲染视频。人们从创建数据时起是如何将数据“放入”缓冲区的?

我想出来了,这相当简单。下面是完整的代码。唯一的问题是,我收到了一个消息的
BSXPCMessage received error:Connection interrupted
,视频显示需要一段时间

NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef pixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
                                      width,
                                      height,
                                      kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                      (__bridge CFDictionaryRef)(pixelAttributes),
                                      &pixelBuffer);

CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
memcpy(yDestPlane, yPlane, width * height);
uint8_t *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
memcpy(uvDestPlane, uvPlane, numberOfElementsForChroma);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

if (result != kCVReturnSuccess) {
    DDLogWarn(@"Unable to create cvpixelbuffer %d", result);
}

CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; //success!
CVPixelBufferRelease(pixelBuffer);

我忘了添加代码来交错两个U和V平面,但这应该不会太糟糕。

我有一个类似的问题,下面是我在SWIFT 2.0中获得的信息,这些信息来自对其他问题的回答或链接

func generatePixelBufferFromYUV2(inout yuvFrame: YUVFrame) -> CVPixelBufferRef?
{
    var uIndex: Int
    var vIndex: Int
    var uvDataIndex: Int
    var pixelBuffer: CVPixelBufferRef? = nil
    var err: CVReturn;

    if (m_pixelBuffer == nil)
    {
        err = CVPixelBufferCreate(kCFAllocatorDefault, yuvFrame.width, yuvFrame.height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, nil, &pixelBuffer)
        if (err != 0) {
            NSLog("Error at CVPixelBufferCreate %d", err)
            return nil
        }
    }

    if (pixelBuffer != nil)
    {
        CVPixelBufferLockBaseAddress(pixelBuffer!, 0)
        let yBaseAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer!, 0)
        if (yBaseAddress != nil)
        {
            let yData = UnsafeMutablePointer<UInt8>(yBaseAddress)
            let yDataPtr = UnsafePointer<UInt8>(yuvFrame.luma.bytes)

            // Y-plane data
            memcpy(yData, yDataPtr, yuvFrame.luma.length)
        }

        let uvBaseAddress = CVPixelBufferGetBaseAddressOfPlane(m_pixelBuffer!, 1)
        if (uvBaseAddress != nil)
        {
            let uvData = UnsafeMutablePointer<UInt8>(uvBaseAddress)
            let pUPointer = UnsafePointer<UInt8>(yuvFrame.chromaB.bytes)
            let pVPointer = UnsafePointer<UInt8>(yuvFrame.chromaR.bytes)

            // For the uv data, we need to interleave them as uvuvuvuv....
            let iuvRow = (yuvFrame.chromaB.length*2/yuvFrame.width)
            let iHalfWidth = yuvFrame.width/2

            for i in 0..<iuvRow
            {
                for j in 0..<(iHalfWidth)
                {
                    // UV data for original frame.  Just interleave them.
                    uvDataIndex = i*iHalfWidth+j
                    uIndex = (i*yuvFrame.width) + (j*2)
                    vIndex = uIndex + 1
                    uvData[uIndex] = pUPointer[uvDataIndex]
                    uvData[vIndex] = pVPointer[uvDataIndex]
                }
            }
        }
        CVPixelBufferUnlockBaseAddress(pixelBuffer!, 0)
    }

    return pixelBuffer
}
func generatePixelBufferFromYUV2(inout-yuvFrame:yuvFrame)->CVPixelBufferRef?
{
变量uIndex:Int
var-vIndex:Int
var-uvDataIndex:Int
var pixelBuffer:CVPixelBufferRef?=nil
var-err:CVReturn;
if(m_pixelBuffer==nil)
{
err=CVPixelBufferCreate(kCFAllocatorDefault、yuvFrame.width、yuvFrame.height、kCVPixelFormatType_420YPCBCCR8BIPLANARVidearRange、nil和pixelBuffer)
如果(错误!=0){
NSLog(“CVPixelBufferCreate%d处出错”,错误)
归零
}
}
if(像素缓冲区!=nil)
{
CVPixelBufferLockBaseAddress(pixelBuffer!,0)
让yBaseAddress=CVPixelBufferGetBaseAddressOfPlane(pixelBuffer!,0)
if(ybase地址!=nil)
{
让yData=不可配置指针(yBaseAddress)
让yDataPtr=UnsafePointer(yuvFrame.luma.bytes)
//Y面数据
memcpy(yData、yDataPtr、yuvFrame.luma.length)
}
设uvBaseAddress=CVPixelBufferGetBaseAddressOfPlane(m_pixelBuffer!,1)
if(uvBaseAddress!=nil)
{
设uvData=UnsafeMutablePointer(uvBaseAddress)
让pUPointer=UnsafePointer(yuvFrame.chromaB.bytes)
让pVPointer=UnsafePointer(yuvFrame.chromaR.bytes)
//对于uv数据,我们需要将它们交错为UVUV。。。。
设iuvRow=(yuvFrame.chromaB.length*2/yuvFrame.width)
设iHalfWidth=yuvFrame.width/2

对于0..中的i,这里是obj-c中的完整转换。 同样,对于那些说“这很琐碎”的天才们,不要光顾任何人!如果你在这里帮助,帮助,如果你在这里展示你有多“聪明”,去其他地方做吧。 以下是有关YUV处理的详细说明的链接:


请提供U和V平面的代码,以及numberOfElementsForChroma@JULIIncognito我相信uvPlane只是一个uint_8数组。这是很久以前的事了。对不起。numberOfElementsForChroma将是您需要的任何固定大小。@JULIIncognito是numberOfElementsForChroma,宽度*高度相同?因为它不工作。@lilouch它不工作我也是。所以,我刚刚将颜色格式更改为kCVPixelFormatType_32BGRA,并对Y420或NV12进行了另一次转换,元素的数量FORCHROMA是宽度*高度/2。因为U平面和V平面的大小都是Y平面的四分之一。我有一些类似的任务要执行。我可以得到YUVFrame结构和转换代码的要点吗能够获得将Y-U-V指针转换为像素缓冲区的结果。ThanksMy代码是objective-c和SWIFT的混合。我的结构是objective-c。它由三个NSData和两个NSInteger组成:属性(强,非原子)NSData*luma;属性(强,非原子)NSData*chromaB;属性(强,非原子)NSData*chromaR;property NSInteger width;property NSInteger height;//删除属性之前的@
    /// method to convert YUV buffers to pixelBuffer in otder to feed it to face unity methods
-(CVPixelBufferRef*)pixelBufferFromYUV:(uint8_t *)yBuffer vBuffer:(uint8_t *)uBuffer uBuffer:(uint8_t *)vBuffer width:(int)width height:(int)height  {
    NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
    CVPixelBufferRef pixelBuffer;
    /// NumberOfElementsForChroma is width*height/4 because both U plane and V plane are quarter size of Y plane
    CGFloat uPlaneSize =  width * height / 4;
    CGFloat vPlaneSize = width * height / 4;
    CGFloat numberOfElementsForChroma = uPlaneSize + vPlaneSize;

    CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
                                          width,
                                          height,
                                          kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                          (__bridge CFDictionaryRef)(pixelAttributes),
                                          &pixelBuffer);

    ///for simplicity and speed create a combined UV panel to hold the pixels
    uint8_t *uvPlane = calloc(numberOfElementsForChroma, sizeof(uint8_t));
    memcpy(uvPlane, uBuffer, uPlaneSize);
    memcpy(uvPlane += (uint8_t)(uPlaneSize), vBuffer, vPlaneSize);


    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    uint8_t *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    memcpy(yDestPlane, yBuffer, width * height);

    CVPixelBufferLockBaseAddress(pixelBuffer, 1);
    uint8_t *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    memcpy(uvDestPlane, uvPlane, numberOfElementsForChroma);
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 1);
    CVPixelBufferRelease(pixelBuffer);
    free(uvPlane);
    return pixelBuffer;
}