Macos 在YUV中绘制CGImageRef

Macos 在YUV中绘制CGImageRef,macos,core-graphics,rgb,yuv,core-video,Macos,Core Graphics,Rgb,Yuv,Core Video,我使用这里的代码将OS X上的cImageRef转换为CVPixelBufferRef 但是,我需要在YUV(kCVPixelFormatType_420YpCbCr8Planar)中绘制图像,而不是现在的RBG 是否有直接在YUV颜色空间中绘制CGImage?如果没有,有没有人举个例子来说明如何将CVPixedBufferRef从RBG转换为YUV 我理解转换的公式,但在CPU上进行转换的速度非常慢。使用以下方法计算出来: CVPixelBufferRef converted_frame;

我使用这里的代码将OS X上的
cImageRef
转换为
CVPixelBufferRef

但是,我需要在YUV
(kCVPixelFormatType_420YpCbCr8Planar)
中绘制图像,而不是现在的RBG

是否有直接在YUV颜色空间中绘制
CGImage
?如果没有,有没有人举个例子来说明如何将
CVPixedBufferRef
RBG转换为YUV

我理解转换的公式,但在CPU上进行转换的速度非常慢。

使用以下方法计算出来:

CVPixelBufferRef converted_frame;

CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_420YpCbCr8Planar, 0, &converted_frame);

VTPixelTransferSessionTransferImage(_vtpt_ref, imageBuffer, converted_frame);

其中
imageBuffer
是源
CVPixedBufferRef

这是一个迟到的答案,不打算获得投票或任何东西。有一种加速框架方法可以将RGB转换为YUV,代码有点复杂,但由于很难找到工作示例,因此它可以工作并包含在内。这被包装成一个扩展CIFilter的类,但是如果您想做一些不同的事情,它将很容易适应。这段代码不包含内存泄漏,在重复调用时应该表现得很好。需要注意的是,真正有用的是,实现创建了一个CVPixelBufferRef,然后设置了所有需要的属性,以便稍后调用vImageCVImageFormat_CreateWithCVPixelBuffer()可以正常工作。代码将RGB数据呈现到CoreVideo缓冲区,然后包装YUV结果图像,并像任何其他CoreImage过滤器一样返回

//
//  CoreImageToYUVConverter.h
//
//  Make use of CoreImage to convert a RGB input image into YUV data where
//  UV is sumsampled and Y is the same dimensions as the original data.

#import <Foundation/Foundation.h>

#import <CoreImage/CoreImage.h>

@interface CoreImageToYUVConverter : CIFilter

@property (nonatomic, retain) CIImage *inputImage;

// If there is an error while processing the filter, this value is
// set to non-nil. Otherwise it is set to nil.

@property (nonatomic, retain) NSError *error;

// Dimension of the output image, not that Y is 2x
// the dimensions of u and v buffer so the Y image
// must have even width and height.

@property (nonatomic, assign) CGSize size;

@end

// CoreImageToYUVConverter.m

#import "CoreImageToYUVConverter.h"

@import Accelerate;

@interface CoreImageToYUVConverter ()

@property (nonatomic, retain) CIContext *coreImageContext;

@property (nonatomic, copy) NSNumber *inputWidth;

@property (nonatomic, copy) NSNumber *inputAspectRatio;

@property (nonatomic, assign) CVPixelBufferRef pixelBuffer;

@end

@implementation CoreImageToYUVConverter

@synthesize coreImageContext = m_coreImageContext;
@synthesize pixelBuffer = m_pixelBuffer;


- (void) deallocate
{
  self.pixelBuffer = NULL;
}

// Setter for self.rgbBuffer, this logic holds on to a retain for the CoreVideo buffer

- (void) setPixelBuffer:(CVImageBufferRef)cvBufferRef
{
  if (cvBufferRef) {
    CFRetain(cvBufferRef);
  }
  if (self->m_pixelBuffer) {
    CFRelease(self->m_pixelBuffer);
  }
  self->m_pixelBuffer = cvBufferRef;
}

- (CIImage *)outputImage
{
  self.error = nil;

  NSParameterAssert(self.inputImage != nil && [self.inputImage isKindOfClass:[CIImage class]]);

  CIImage *inputImage = self.inputImage;

  [self renderIntoYUVBuffer:inputImage];

  CIImage *outCIImage = [CIImage imageWithCVImageBuffer:self.pixelBuffer];

  return outCIImage;
}

- (NSDictionary *)customAttributes
{
  return @{
           kCIInputWidthKey : @{kCIAttributeDefault : @(0), kCIAttributeType : kCIAttributeTypeScalar},
           kCIInputAspectRatioKey : @{kCIAttributeDefault : @(0), kCIAttributeType : kCIAttributeTypeScalar},
           };
}

- (void) renderIntoYUVBuffer:(CIImage*)inputImage
{
  CGRect imageExtent = inputImage.extent;
  int width = (int) imageExtent.size.width;
  int height = (int) imageExtent.size.height;

  // Extract a CGImageRef from CIImage, this will flatten pixels possibly from
  // multiple steps of a CoreImage chain.

  if (self.coreImageContext == nil) {
    CIContext *context = [CIContext contextWithOptions:nil];
    NSAssert(context != nil, @"CIContext contextWithOptions failed");
    self.coreImageContext = context;
  }

  CGImageRef inCGImageRef = [self.coreImageContext createCGImage:inputImage fromRect:imageExtent];

  NSDictionary *pixelAttributes = @{
                                    (__bridge NSString*)kCVPixelBufferIOSurfacePropertiesKey : @{},
                                    (__bridge NSString*)kCVPixelFormatOpenGLESCompatibility : @(YES),
                                    (__bridge NSString*)kCVPixelBufferCGImageCompatibilityKey : @(YES),
                                    (__bridge NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey : @(YES),
                                    };

  CVPixelBufferRef cvPixelBuffer = NULL;

  uint32_t yuvImageFormatType;
  //yuvImageFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; // luma (0, 255)
  yuvImageFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; // luma (16, 235)

  CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
                                        width,
                                        height,
                                        yuvImageFormatType,
                                        (__bridge CFDictionaryRef)(pixelAttributes),
                                        &cvPixelBuffer);

  NSAssert(result == kCVReturnSuccess, @"CVPixelBufferCreate failed");

  // FIXME: UHDTV : HEVC uses kCGColorSpaceITUR_2020

  CGColorSpaceRef yuvColorSpace = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709);

  {
    // Attach colorspace info to pixel buffer

    //CFDataRef colorProfileData = CGColorSpaceCopyICCProfile(yuvColorSpace); // deprecated
    CFDataRef colorProfileData = CGColorSpaceCopyICCData(yuvColorSpace);

    NSDictionary *pbAttachments = @{
      (__bridge NSString*)kCVImageBufferYCbCrMatrixKey: (__bridge NSString*)kCVImageBufferYCbCrMatrix_ITU_R_709_2,
      (__bridge NSString*)kCVImageBufferColorPrimariesKey: (__bridge NSString*)kCVImageBufferColorPrimaries_ITU_R_709_2,
      (__bridge NSString*)kCVImageBufferTransferFunctionKey: (__bridge NSString*)kCVImageBufferTransferFunction_ITU_R_709_2,

      (__bridge NSString*)kCVImageBufferICCProfileKey: (__bridge NSData *)colorProfileData,

      (__bridge NSString*)kCVImageBufferChromaLocationTopFieldKey: (__bridge NSString*)kCVImageBufferChromaLocation_Center,
      (__bridge NSString*)kCVImageBufferAlphaChannelIsOpaque: (id)kCFBooleanTrue,
    };

    CVBufferRef pixelBuffer = cvPixelBuffer;

    CVBufferSetAttachments(pixelBuffer, (__bridge CFDictionaryRef)pbAttachments, kCVAttachmentMode_ShouldPropagate);

    // Drop ref to NSDictionary to enable explicit checking of ref count of colorProfileData, after the
    // release below the colorProfileData must be 1.
    pbAttachments = nil;
    CFRelease(colorProfileData);
  }

  // Note that this setter will implicitly release an earlier held ref to a pixel buffer
  self.pixelBuffer = cvPixelBuffer;

  vImageCVImageFormatRef cvImgFormatRef;

  cvImgFormatRef = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer);

  // vImage_CGImageFormat for input RGB

  // FIXME: Need to select sRGB if running under MacOSX
  //CGColorSpaceRef defaultColorspaceRef = CGColorSpaceCreateDeviceRGB();

  // Default to sRGB on both MacOSX and iOS
  CGColorSpaceRef defaultColorspaceRef = NULL;

  vImage_CGImageFormat rgbCGImgFormat = {
    .bitsPerComponent = 8,
    .bitsPerPixel = 32,
    .bitmapInfo = (CGBitmapInfo)(kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst),
    .colorSpace = defaultColorspaceRef,
  };

  // Copy input CoreGraphic image into a CoreVideo buffer

  vImage_Buffer sourceBuffer;

  const CGFloat backgroundColor = 0.0f;

  vImage_Flags flags = 0;
  flags = kvImagePrintDiagnosticsToConsole;

  vImage_Error err;

  err = vImageBuffer_InitWithCGImage(&sourceBuffer, &rgbCGImgFormat, &backgroundColor, inCGImageRef, flags);

  NSAssert(err == kvImageNoError, @"vImageBuffer_InitWithCGImage failed");

  err = vImageBuffer_CopyToCVPixelBuffer(&sourceBuffer, &rgbCGImgFormat, cvPixelBuffer, cvImgFormatRef, &backgroundColor, flags);

  NSAssert(err == kvImageNoError, @"error in vImageBuffer_CopyToCVPixelBuffer %d", (int)err);

  // Manually free() the allocated buffer

  free(sourceBuffer.data);

  vImageCVImageFormat_Release(cvImgFormatRef);
  CVPixelBufferRelease(cvPixelBuffer);
  CGColorSpaceRelease(yuvColorSpace);
  CGColorSpaceRelease(defaultColorspaceRef);
  CGImageRelease(inCGImageRef);
}

@end
//
//CoreMageToYuvconverter.h
//
//使用CoreImage将RGB输入图像转换为YUV数据,其中
//UV采样,Y与原始数据的尺寸相同。
#进口
#进口
@接口CoreMageToYUV转换器:CIFilter
@属性(非原子,保留)CIImage*inputImage;
//如果在处理筛选器时出错,则此值为
//设置为非零。否则设置为零。
@属性(非原子,保留)n错误*错误;
//输出图像的尺寸,不是Y是2x
//u和v缓冲区的尺寸,因此Y图像
//必须具有均匀的宽度和高度。
@属性(非原子,赋值)CGSize大小;
@结束
//CoreImageToYUVConverter.m
#导入“CoreMageToYuvConverter.h”
@进口加速;
@接口CoreImageToYUVConverter()
@属性(非原子,保留)CIContext*CoreMageContext;
@属性(非原子,副本)NSNumber*inputWidth;
@属性(非原子,副本)NSNumber*InputSpectratio;
@属性(非原子,赋值)CVPixelBufferRef pixelBuffer;
@结束
@CoreMageToYuvConverter的实现
@合成CoreMageContext=m_CoreMageContext;
@合成pixelBuffer=m_pixelBuffer;
-(无效)解除分配
{
self.pixelBuffer=NULL;
}
//对于self.rgbBuffer,此逻辑保留CoreVideo缓冲区的retain
-(void)setPixelBuffer:(CVImageBufferRef)cvBufferRef
{
if(cvBufferRef){
CFRetain(cvBufferRef);
}
if(自->m_像素缓冲区){
CFRelease(self->m_pixelBuffer);
}
self->m_pixelBuffer=cvBufferRef;
}
-(CIImage*)输出图像
{
self.error=nil;
NSParameterAssert(self.inputImage!=nil&&[self.inputImage iskindof类:[CIImage类]]);
CIImage*inputImage=self.inputImage;
[自渲染UvBuffer:inputImage];
CIImage*outCIImage=[CIImage-imageWithCVImageBuffer:self.pixelBuffer];
返回图像;
}
-(NSDictionary*)自定义属性
{
返回@{
KCIIInputWidthKey:@{KCiatTributedDefault:@(0),kCIAttributeType:KCiatTributeTypeScalate},
kCIInputAspectRatioKey:@{kCIAttributeDefault:@(0),kCIAttributeType:KCiatTributeTypeScalate},
};
}
-(无效)renderIntoYUVBuffer:(CIImage*)输入图像
{
CGRect imageExtent=inputImage.extent;
int-width=(int)imageExtent.size.width;
int height=(int)imageExtent.size.height;
//从CIImage中提取一个CGImageRef,这可能会从
//CoreImage链的多个步骤。
if(self.coreImageContext==nil){
CIContext*上下文=[CIContext contextWithOptions:nil];
NSAssert(context!=nil,@“CIContext contextWithOptions失败”);
self.coreImageContext=上下文;
}
CGImageRef inCGImageRef=[self.coreMageContext CreateCImage:inputImage fromRect:imageExtent];
NSDictionary*像素属性=@{
(_桥NSString*)kCVPixelBufferIOSurfacePropertiesKey:@{},
(uuu桥NSString*)kCVPixelFormatOpenGLESCompatibility:@(是),
(uu桥NSString*)kCVPixelBufferCGImageCompatibilityKey:@(是),
(uu桥NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey:@(是),
};
CVPixelBufferRef cvPixelBuffer=NULL;
uint32_t yuvimageformatype;
//yuvImageFormatType=kCVPixelFormatType\u 420YpCbCr8BiPlanarFullRange;//luma(0255)
yuvImageFormatType=kcvpixelformattype420ypcbcr8biplanarvidearrange;//luma(16235)
CVReturn结果=CVPixelBufferCreate(kCFAllocatorDefault,
宽度,
高度,
yuvImageFormatType,
(_桥CfyRef)(像素属性),
&CVP(像素缓冲区);
NSAssert(结果==kCVReturnSuccess,@“CVPixelBufferCreate failed”);
//修正:UHDTV:HEVC使用kCGColorSpaceITUR_2020
CGColorSpaceRef yuvColorSpace=CGColorSpaceCreateWithName(kCGColorSpaceITUR_709);
{
//将颜色空间信息附加到像素缓冲区
//CFDataRef colorProfileData=cgcolorspace copycprofile(yuvColorSpace);//已弃用
CFDataRef colorProfileData=CGColorSpaceCopyICCData(yuvColorSpace);
NSDictionary*pbAttachments=@{
(uuu桥NSString*)kCVImageBufferYCbCrMatrixKey:(uuuu桥NSString*)kCVImageBufferYCbCrMatrix_UITU R_709_2,
(uu桥NSString*)kCVImageBufferColorPrimariesKey:(u桥NS)