iOS使用vImage-加速将QCAR YUV转换为RGB
我正在尝试使用iOS Accelerate框架的vImage调用测试将Vuforia生成的YUV图像转换为iOS使用vImage-加速将QCAR YUV转换为RGB,ios,uiimage,vuforia,accelerate-framework,vimage,Ios,Uiimage,Vuforia,Accelerate Framework,Vimage,我正在尝试使用iOS Accelerate框架的vImage调用测试将Vuforia生成的YUV图像转换为UIImage的性能。在代码的当前状态下,我只是想让它工作。现在转换产生一个暗条纹图像。关于Vuforia在其实现中如何部署YUV格式,是否有任何已发布的详细信息?我最初的假设是,他们使用iOS设备使用的双平面420p格式。相关测试代码如下 UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage) { UIIm
UIImage
的性能。在代码的当前状态下,我只是想让它工作。现在转换产生一个暗条纹图像。关于Vuforia在其实现中如何部署YUV格式,是否有任何已发布的详细信息?我最初的假设是,他们使用iOS设备使用的双平面420p格式。相关测试代码如下
UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage)
{
UIImage *image = nil;
if (cameraImage) {
QCAR::PIXEL_FORMAT pixelFormat = cameraImage->getFormat();
CGColorSpaceRef colorSpace = NULL;
switch (pixelFormat) {
case QCAR::YUV:
case QCAR::RGB888:
colorSpace = CGColorSpaceCreateDeviceRGB();
break;
case QCAR::GRAYSCALE:
colorSpace = CGColorSpaceCreateDeviceGray();
break;
case QCAR::RGB565:
case QCAR::RGBA8888:
case QCAR::INDEXED:
std::cerr << "Image format conversion not implemented." << std::endl;
break;
case QCAR::UNKNOWN_FORMAT:
std::cerr << "Image format unknown." << std::endl;
break;
}
int bitsPerComponent = 8;
int width = cameraImage->getWidth();
int height = cameraImage->getHeight();
const void *baseAddress = cameraImage->getPixels();
size_t totalBytes = QCAR::getBufferSize(width, height, pixelFormat);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = NULL;
if (pixelFormat == QCAR::YUV) {
int bytesPerPixel = 4;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
static vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = const_cast<void *>(baseAddress)
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
static vImage_Buffer srcCb = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes)
};
static vImage_Buffer srcCr = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2))
};
static vImage_Buffer dest = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = imageData
};
//uint8_t permuteMap[] = { 1, 2, 3, 0 };
vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrToARGB info;
vImage_Error error;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
NULL,
1,
kvImageNoFlags);
vImage_CGImageFormat format =
{
.bitsPerComponent = static_cast<uint32_t>(bitsPerComponent),
.bitsPerPixel = static_cast<uint32_t>(3 * bitsPerComponent),
.colorSpace = colorSpace,
.bitmapInfo = bitmapInfo,
.version = 0,
.decode = NULL,
.renderingIntent = renderingIntent
};
imageRef = vImageCreateCGImageFromBuffer(&dest,
&format,
NULL,
NULL,
kvImageNoFlags,
&error);
if (error) {
std::cerr << "Err." << std::endl;
}
} else {
int bitsPerPixel = QCAR::getBitsPerPixel(pixelFormat);
int bytesPerRow = cameraImage->getStride();
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,
baseAddress,
totalBytes,
NULL);
imageRef = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
bitmapInfo,
provider,
NULL,
false,
renderingIntent);
CGDataProviderRelease(provider);
}
if (imageRef != NULL) {
image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
}
if (colorSpace != NULL) {
CGColorSpaceRelease(colorSpace);
}
}
return image;
}
UIImage*imageWithQCARCameraImage(常量QCAR::Image*cameraImage)
{
UIImage*image=nil;
如果(摄影机图像){
QCAR::PIXEL_FORMAT pixelFormat=cameraImage->getFormat();
CGCOLORSPACTEREF colorSpace=NULL;
开关(像素格式){
案例QCAR::YUV:
案例QCAR::RGB888:
colorSpace=CGColorSpaceCreateDeviceRGB();
打破
案例QCAR::灰度:
colorSpace=CGColorSpaceCreateDeviceGray();
打破
案例QCAR::RGB565:
案例QCAR::RGBA8888:
案例QCAR::索引:
std::cerrvoid*baseAddress=缓冲区;
总字节数=宽度*高度*3/2;
uint8_t*源数据地址=(uint8_t*)基地址;
vImage_缓冲区srcYp={
.宽度=静态铸件(宽度),
.高度=静态铸件(高度),
.rowBytes=静态_转换(宽度),
.data=常数转换(基地址),
};
尺寸=宽度*高度;
大小\u t色度字节=总字节-最大字节;
vImage_缓冲区srcCb={
.宽度=静态_铸件(宽度)/2,
.高度=静态铸件(高度)/2,
.rowBytes=静态_转换(宽度)/2,
.data=静态广播(sourceDataAddress+lumaBytes),
};
vImage_缓冲区srcCr={
.宽度=静态_铸件(宽度)/2,
.高度=静态铸件(高度)/2,
.rowBytes=静态_转换(宽度)/2,
.data=静态广播(sourceDataAddress+lumaBytes+(色度字节/2)),
};
vImage_缓冲区目的地;
dest.data=NULL;
vImage_Error Error=kvImageNoError;
错误=vImageBuffer_Init(&dest,高度,宽度,32,kvImageNoFlags);
//vImage_YpCbCrPixelRange pixelRange=(vImage_YpCbCrPixelRange){0,128,255,255,255,1,255,0};
vImage_YpCbCrPixelRange pixelRange={16,128,235,240,255,0,255,0};
vImage_YPCBCCRTOARGB信息;
错误=kvImageNoError;
错误=vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU R_601_4,
&像素范围,
&信息,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
错误=kvImageNoError;
uint8_t permuteMap[4]={3,2,1,0};//BGRA-iOS仅支持BGRA
错误=vImageConvert_420Yp8_Cb8_CR8转换为RGB8888(&srcYp,
&srcCb,
&srcCr,
&目的地,
&信息,
permuteMap,//对于iOS,不能为空,mac可以为空,iOS仅支持BGRA
255,
kvImageNoFlags);
到目前为止,高通公司发布的任何细节都毫无用处。但在他们的论坛上,主持人“尝试”回答问题。如果这是一个好问题,他们只有两种回答方式,“参考文档”或者是一个实际的答案。希望你能得到你的答案,虽然听起来很有趣。我也会在论坛上提问,尽管他们的论坛不太好。我对YUV的了解告诉我,他们的API没有提供足够的细节,你只能猜测,因为有无数种潜在的格式。非常非常非常正确!高通公司需要加强它。你必须ed每次都要填写vImage_Buffer结构的所有四个字段。这是我自己的引证。将根据需要提供Bush Bush Bush Bush Bush Bush Bush Buffer Buffer。您的回答和添加解释此代码如何/为什么解决问题(以上代码,而不是注释中的某个地方)。
void *baseAddress = buffer;
size_t totalBytes = width * height * 3 / 2;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.rowBytes = static_cast<size_t>(width),
.data = const_cast<void *>(baseAddress),
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
vImage_Buffer srcCb = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes),
};
vImage_Buffer srcCr = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2)),
};
vImage_Buffer dest;
dest.data = NULL;
vImage_Error error = kvImageNoError;
error = vImageBuffer_Init(&dest, height, width, 32, kvImageNoFlags);
// vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrPixelRange pixelRange = { 16, 128, 235, 240, 255, 0, 255, 0 };
vImage_YpCbCrToARGB info;
error = kvImageNoError;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = kvImageNoError;
uint8_t permuteMap[4] = {3, 2, 1, 0}; // BGRA - iOS only support BGRA
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
permuteMap, // for iOS must be no NULL, mac can be NULL iOS only support BGRA
255,
kvImageNoFlags);