Ios 无法使CISourceOverCompositing工作
我使用CISOURCEOVERCOMPING合并图像并导出为CVPixelBuffer。当图像计数低于41时,一切正常。但当它达到42或更大时,结果变成全黑。这是我的处理代码:Ios 无法使CISourceOverCompositing工作,ios,avfoundation,cifilter,Ios,Avfoundation,Cifilter,我使用CISOURCEOVERCOMPING合并图像并导出为CVPixelBuffer。当图像计数低于41时,一切正常。但当它达到42或更大时,结果变成全黑。这是我的处理代码: - (CVPixelBufferRef)pixelBufferRefWithMergingStillMaterials:(NSArray *)materials withBackgroundSampleBuffer:(CMSampleBufferRef)sampleBuffer { // 基于gpu合并所有图层 CVI
- (CVPixelBufferRef)pixelBufferRefWithMergingStillMaterials:(NSArray *)materials withBackgroundSampleBuffer:(CMSampleBufferRef)sampleBuffer {
// 基于gpu合并所有图层
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CIImage *backgroundImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
static int i = 0;
i=0;
CIFilter *filter = [CIFilter filterWithName:@"CISourceOverCompositing"];
// 合并静态素材到一个CIImage
for (id<MicroVideoStillMaterial> stillMaterial in materials) {
CIImage *foregroundImage = [stillMaterial image];
[filter setDefaults];
[filter setValue:backgroundImage forKey:kCIInputBackgroundImageKey];
[filter setValue:foregroundImage forKey:kCIInputImageKey];
backgroundImage = filter.outputImage;
i++;
}
NSLog(@"iiiiiiiiiii %d", i);
EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
CIContext *context = [CIContext contextWithEAGLContext:glContext];
CFDictionaryRef empty = CFDictionaryCreate(kCFAllocatorDefault, // EMPTY IOSURFACE DICT
NULL,
NULL,
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFMutableDictionaryRef attributes = CFDictionaryCreateMutable(kCFAllocatorDefault,
1,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attributes,kCVPixelBufferIOSurfacePropertiesKey,empty);
CVPixelBufferRef outputPixelBuffer = NULL;
CVPixelBufferCreate(kCFAllocatorSystemDefault, width, height, kCVPixelFormatType_32BGRA, attributes, &outputPixelBuffer);
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
[context render:backgroundImage toCVPixelBuffer:outputPixelBuffer];
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
return outputPixelBuffer;
}
-(CVPixelBufferRef)PixelBufferRef与MergingStillMaterials:(NSArray*)材料与背景sampleBuffer:(CMSampleBufferRef)sampleBuffer{
// 基于gpu合并所有图层
CVImageBufferRef pixelBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
size\u t width=CVPixelBufferGetWidth(pixelBuffer);
大小\u t高度=CVPixelBufferGetHeight(pixelBuffer);
CIImage*backgroundImage=[CIImage imageWithCVPixelBuffer:pixelBuffer];
静态int i=0;
i=0;
CIFilter*过滤器=[CIFilter过滤器名称:@“CISOURCEOVERCOMPITING”];
// 合并静态素材到一个CIImage
用于(材质中的材质id){
CIImage*前景图像=[stillMaterial图像];
[过滤器设置默认值];
[过滤器设置值:backgroundImage forKey:KCIIInputBackgroundImageKey];
[过滤器设置值:foregroundImage forKey:KCIIInputImageKey];
backgroundImage=filter.outputImage;
i++;
}
NSLog(@“IIIII%d”,i);
EAGLContext*glContext=[[EAGLContext alloc]initWithAPI:kEAGLRenderingAPIOpenGLES2];
CIContext*context=[CIContext ContextWithEagleContext:glContext];
CFDictionaryRef empty=CFDictionaryCreate(kCFAllocatorDefault,//空IOSURFACE DICT
无效的
无效的
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFMutableDictionaryRef attributes=CFDictionaryCreateMutable(kCFAllocatorDefault,
1.
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(属性,kCVPixelBufferIOSurfacePropertiesKey,空);
CVPixelBufferRef outputPixelBuffer=NULL;
CVPixelBufferCreate(kCFAllocatorSystemDefault、宽度、高度、KCVPIXelFormattType_32BGRA、属性和输出PixelBuffer);
CVPixelBufferLockBaseAddress(outputPixelBuffer,0);
[上下文渲染:背景图像ToCpPixelBuffer:outputPixelBuffer];
CVPixelBufferUnlockBaseAddress(outputPixelBuffer,0);
返回输出像素缓冲区;
}
这段代码在后台线程中运行。任何人有任何想法,请帮助。谢谢