Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/106.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/22.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 自定义视频大小导致AVAssetWriter出错_Ios_Objective C_Avfoundation_Avassetwriter_Avasset - Fatal编程技术网

Ios 自定义视频大小导致AVAssetWriter出错

Ios 自定义视频大小导致AVAssetWriter出错,ios,objective-c,avfoundation,avassetwriter,avasset,Ios,Objective C,Avfoundation,Avassetwriter,Avasset,我使用下面的代码渲染了一个带有红色矩形的简单视频。使用\u CanvasSize=CGSizeMake(320200)一切都正常。但是,如果我将大小更改为\u CanvasSize=CGSizeMake(321200),则视频会撕裂或(100100)。 有人知道我为什么要选择,应该选择什么尺寸吗?(我使用XCode 7.3.1 iOS 9 SDK) NSString*fileNameOut=@“temp.mp4”; NSString*directoryOut=@“tmp/”; NSString*

我使用下面的代码渲染了一个带有红色矩形的简单视频。使用
\u CanvasSize=CGSizeMake(320200)一切都正常。但是,如果我将大小更改为
\u CanvasSize=CGSizeMake(321200),则视频会撕裂
(100100)
。 有人知道我为什么要选择,应该选择什么尺寸吗?(我使用XCode 7.3.1 iOS 9 SDK)

NSString*fileNameOut=@“temp.mp4”;
NSString*directoryOut=@“tmp/”;
NSString*outFile=[NSString stringWithFormat:@“%@%@”,目录输出,文件名输出];
NSString*path=[NSHomeDirectory()stringByAppendingPathComponent:[NSString stringWithFormat:@“%@”,outFile]];
NSURL*videoTempURL=[NSURL fileURLWithPath:[NSString stringWithFormat:@“%@%@”,NSTemporaryDirectory(),fileNameOut]];
//警告:AVAssetWriter不会为我们覆盖文件,因此如果目标文件已经存在,请将其删除
NSFileManager*fileManager=[NSFileManager defaultManager];
[fileManager removeItemAtPath:[videoTempURL路径]错误:NULL];
CGSize\u CanvasSize;//=CGSizeMake(大小、宽度、大小、高度);
n错误*错误=nil;
NSInteger FPS=30;
AVAssetWriter*VIDCtrl=[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:path]文件类型:AVFileTypeMPEG4错误:&错误];
如果(!VIDCtrl | |错误)
{
NSLog(@“无法创建视频写入程序”);
返回;
}
_画布尺寸=CGSizeMake(321200);
NSDictionary*videoSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
[NSNumber numberWithInt:\u CanvasSize.width],AVVideoWidthKey,
[NSNumber numberWithInt:\u CanvasSize.height],AVVideoHeightKey,
零];
AVAssetWriterInput*writerInput=[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
输出设置:视频设置];
AvassetWriterInputPixelBufferAdapter*适配器=[AvassetWriterInputPixelBufferAdapter AssetWriterInputPixelBufferAdapter WithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([VIDCtrl-canAddInput:writerInput]);
[VIDCtrl-addInput:writerInput];
[VIDCtrl startWriting];
[VIDCtrl-startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer=NULL;
双ftime=600.0/FPS;
双电流时间=0;
双帧时间=1.0/FPS;
int i=0;
而(1)
{
//检查写入程序是否准备好接收更多数据,如果没有,请稍候
如果(writerInput.readyForMoreMediaData){
CMTime frameTime=CMTimeMake(ftime,600);
//CMTime=值和时间刻度。
//Timescale=每秒所需的tics数
//值是tic的数目
//对于我们来说,我们添加的每一帧都是1/4秒
//苹果公司建议视频播放速度为每秒600次,因为它是一个
//标准视频速率的倍数为24、30、60 fps等。
CMTime lastTime=CMTimeMake(i*ftime,600);
CMTime presentTime=CMTimeAdd(lastTime,frameTime);
如果(i==0){presentTime=CMTimeMake(0600);}
//这确保第一帧从0开始。
缓冲区=空;
如果(i<30)
{
NSLog(@“%d%d”,i,presentTime.value);
CGSize sz=_画布尺寸;
int height=sz.height,width=sz.width;
NSDictionary*选项=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberWithBool:是]、kCVPixelBufferCGImageCompatibilityKey、,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,
零];
CVPixelBufferRef pxbuffer=NULL;
if(!pxbuffer)
{
CVReturn status=CVPixelBufferCreate(kCFAllocatorDefault,宽度,
高度,kCVPixelFormatType_32ARGB,(u桥CFDictionaryRef)选项,
&PX缓冲区);
NSParameterAssert(状态==kCVReturnSuccess&&pxbuffer!=NULL);
}
CVPixelBufferLockBaseAddress(pxbuffer,0);
void*pxdata=CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata!=NULL);
NSU整数字节/像素=4;
NSUInteger bytesPerRow=bytesPerPixel*sz.width;
NSU整数比特分量=8;
CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef gc=CGBitmapContextCreate(pxdata,sz.width,sz.height,
bitsPerComponent、bytesPerRow、颜色空间、,
KCGIMAGEALPHANEONSKIPFIRST);
UISpushContext(gc);
CGContextTranslateCm(gc,0,sz.高度);
CGContextScaleCTM(gc,1.0,-1.0);
CGContextSetFillColorWithColor(gc[UIColor whiteColor].CGColor);
CGContextFillRect(gc,(CGRect){0,0,sz});
CGContextSetStrokeColorWithColor(gc[UIColor redColor].CGColor);
CGContextStrokeRect(gc,CGRectMake(10,10,30,30));
CGCOLORSPACTERELEASE(色彩空间);
CGContextRelease(gc);
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
缓冲区=pxbuffer;
i++;
}
当前时间+=帧时间;
if(缓冲区)
{
//将CGImage提供给AVAssetWriter以添加到视频中
[PixelBuffer:BufferwithPresentationTime:presentTime];
//CVBufferRelease(缓冲区);
释放(缓冲);
}
其他的
NSString *fileNameOut = @"temp.mp4";
NSString *directoryOut = @"tmp/";
NSString *outFile = [NSString stringWithFormat:@"%@%@",directoryOut,fileNameOut];
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",outFile]];
NSURL *videoTempURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), fileNameOut]];

// WARNING: AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fileManager = [NSFileManager defaultManager];
[fileManager removeItemAtPath:[videoTempURL path]  error:NULL];


CGSize _CanvasSize;// = CGSizeMake(size.width, size.height);
NSError *error = nil;
NSInteger FPS = 30;
AVAssetWriter* VIDCtrl = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 error:&error];
if (!VIDCtrl || error)
{
    NSLog(@"Can NOT Create Video Writer");
    return;
}

_CanvasSize = CGSizeMake(321, 200);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:_CanvasSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:_CanvasSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                     outputSettings:videoSettings];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                 sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([VIDCtrl canAddInput:writerInput]);
[VIDCtrl addInput:writerInput];
[VIDCtrl startWriting];
[VIDCtrl startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;

double ftime = 600.0 / FPS;
double currenttime = 0;
double frametime = 1.0 / FPS;

int i = 0;
while (1)
{
    // Check if the writer is ready for more data, if not, just wait
    if(writerInput.readyForMoreMediaData){
        
        CMTime frameTime = CMTimeMake(ftime, 600);
        // CMTime = Value and Timescale.
        // Timescale = the number of tics per second you want
        // Value is the number of tics
        // For us - each frame we add will be 1/4th of a second
        // Apple recommend 600 tics per second for video because it is a
        // multiple of the standard video rates 24, 30, 60 fps etc.
        CMTime lastTime=CMTimeMake(i*ftime, 600);
        CMTime presentTime=CMTimeAdd(lastTime, frameTime);
        
        if (i == 0) {presentTime = CMTimeMake(0, 600);}
        // This ensures the first frame starts at 0.
        
        buffer = NULL;
        if (i < 30)
        {
            
            NSLog(@"%d %d",i, presentTime.value);
            CGSize sz = _CanvasSize;
            int height = sz.height, width = sz.width;
            
            NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                                     [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                                     [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                                     nil];
            CVPixelBufferRef pxbuffer = NULL;
            if (!pxbuffer)
            {
                CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,
                                                      height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                                      &pxbuffer);
                NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
            }
            
            CVPixelBufferLockBaseAddress(pxbuffer, 0);
            void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
            NSParameterAssert(pxdata != NULL);
            
            NSUInteger bytesPerPixel = 4;
            NSUInteger bytesPerRow = bytesPerPixel * sz.width;
            NSUInteger bitsPerComponent = 8;
            CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
            CGContextRef gc = CGBitmapContextCreate(pxdata, sz.width, sz.height,
                                                    bitsPerComponent, bytesPerRow, colorSpace,
                                                    kCGImageAlphaNoneSkipFirst);
            UIGraphicsPushContext(gc);
            CGContextTranslateCTM(gc, 0, sz.height);
            CGContextScaleCTM(gc, 1.0, -1.0);
            CGContextSetFillColorWithColor(gc, [UIColor whiteColor].CGColor);
            CGContextFillRect(gc, (CGRect){0,0,sz});

            CGContextSetStrokeColorWithColor(gc, [UIColor redColor].CGColor);
            CGContextStrokeRect(gc, CGRectMake(10, 10, 30, 30));
            
            CGColorSpaceRelease(colorSpace);
            CGContextRelease(gc);
            
            CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
            buffer = pxbuffer;
            i++;
        }
        
        currenttime+=frametime;
        
        if (buffer)
        {
            // Give the CGImage to the AVAssetWriter to add to your video
            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
            // CVBufferRelease(buffer);
            CFRelease(buffer);
        }
        else
        {
            //Finish the session:
            // This is important to be done exactly in this order
            [writerInput markAsFinished];
            // WARNING: finishWriting in the solution above is deprecated.
            // You now need to give a completion handler.
            [VIDCtrl finishWritingWithCompletionHandler:^{
                NSLog(@"Finished writing...checking completion status...");
                if (VIDCtrl.status != AVAssetWriterStatusFailed && VIDCtrl.status == AVAssetWriterStatusCompleted)
                {
                    NSLog(@"Video writing succeeded To %@",path);
                } else
                {
                    NSLog(@"Video writing failed: %@", VIDCtrl.error);
                }
                
            }]; // end videoWriter finishWriting Block
            
            CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
            
            NSLog (@"Done");
            break;
        }
    }
}