将生成的视频保存到下一个视图,并将其保存到iOS中的摄像头滚动

将生成的视频保存到下一个视图,并将其保存到iOS中的摄像头滚动,ios,video,uiimage,Ios,Video,Uiimage,在这里,我需要生成视频,选择多个图像,选择音乐。在这里,我可以选择多个图像,并通过将所选图像保存在阵列中并生成视频来生成这些图像的视频。但我的问题是,生成的视频应该出现在下一个视图中,并且应该保存在CameraRoll中。但是生成的视频将被保存到文档中并保存在那里。有人来帮我 我正在跟踪这个链接 我的代码: NSError *error = nil; NSFileManager *fileMgr = [NSFileManager defaultManager]; NSString *do

在这里,我需要生成视频,选择多个图像,选择音乐。在这里,我可以选择多个图像,并通过将所选图像保存在阵列中并生成视频来生成这些图像的视频。但我的问题是,生成的视频应该出现在下一个视图中,并且应该保存在CameraRoll中。但是生成的视频将被保存到文档中并保存在那里。有人来帮我

我正在跟踪这个链接

我的代码:

 NSError *error = nil;
 NSFileManager *fileMgr = [NSFileManager defaultManager];
 NSString *documentsDirectory = [NSHomeDirectory()
 stringByAppendingPathComponent:@"Documents"];
 NSString *videoOutputPath = [documentsDirectory      stringByAppendingPathComponent:@"test_output.mp4"];
 if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
    NSLog(@"Unable to delete file: %@", [error localizedDescription]);
   CGSize imageSize = CGSizeMake(400, 200);
   NSUInteger fps = 30;
  NSArray* imagePaths = [[NSBundle mainBundle]   pathsForResourcesOfType:@"jpg" inDirectory:nil];
 self.chosenImages = [[NSMutableArray alloc]   initWithCapacity:imagePaths.count];
 NSLog(@"-->imageArray.count= %i", self.chosenImages.count);
  for (NSString* path in imagePaths)
  {
    [self.chosenImages addObject:[UIImage imageWithContentsOfFile:path]];

 }

 NSLog(@"Start building video from defined frames.");

  AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
  [NSURL fileURLWithPath:videoOutputPath]  fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
  NSParameterAssert(videoWriter);

   NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
   [NSNumber numberWithInt:imageSize.width],   AVVideoWidthKey,
   [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

   AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
    assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
   AVAssetWriterInputPixelBufferAdaptor *adaptor =    [AVAssetWriterInputPixelBufferAdaptor
 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
 NSParameterAssert(videoWriterInput);
 NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
 videoWriterInput.expectsMediaDataInRealTime = YES;
 [videoWriter addInput:videoWriterInput];
 [videoWriter startWriting];
 [videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;

//for(VideoFrame * frm in imageArray)
NSLog(@"**************************************************");
for(UIImage * img in self.chosenImages)
{
    //UIImage * img = frm._imageFrame;
    buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%lu)",frameCount,(unsigned long)[self.chosenImages count]);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}
NSLog(@"**************************************************");

//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"Write Ended");



////////////////////////////////////////////////////////////////////////////
//////////////  OK now add an audio file to move file  /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];

NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
NSURL    *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

   if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
    [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

  CMTime nextClipStartTime = kCMTimeZero;

  AVURLAsset* videoAsset = [[AVURLAsset  alloc]initWithURL:video_inputFileUrl options:nil];
    CMTimeRange video_timeRange =    CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition   addMutableTrackWithMediaType:AVMediaTypeVideo     preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:      [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]   atTime:nextClipStartTime error:nil];

  //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl  options:nil];
    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
   [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
   AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
   //_assetExport.outputFileType = @"com.apple.quicktime-movie";
   _assetExport.outputFileType = @"public.mpeg-4";
  //NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
  _assetExport.outputURL = outputFileUrl;

  [_assetExport exportAsynchronouslyWithCompletionHandler:
   ^(void ) {
     //[self saveVideoToAlbum:outputFilePath];
   }
   ];

   NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);
NSError*error=nil;
NSFileManager*fileMgr=[NSFileManager defaultManager];
NSString*documentsDirectory=[NSHomeDirectory()
stringByAppendingPathComponent:@“文档”];
NSString*videoOutputPath=[DocumentsDirectoryStringByAppendingPathComponent:@“test_output.mp4”];
如果([fileMgr removeItemAtPath:videoOutputPath错误:&错误]!=YES)
NSLog(@“无法删除文件:%@,[错误本地化描述]);
cgsizeimagesize=CGSizeMake(400200);
整数fps=30;
NSArray*ImagePath=[[NSBundle mainBundle]路径ForResourceSoftType:@“jpg”目录:nil];
self.chosenImages=[[NSMutableArray alloc]initWithCapacity:imagepath.count];
NSLog(@”-->imageArray.count=%i”,self.chosenImages.count);
for(图像路径中的NSString*路径)
{
[self.chosenImages addObject:[UIImage-imageswithcontentsoffile:path]];
}
NSLog(@“从定义的帧开始构建视频”);
AVAssetWriter*videoWriter=[[AVAssetWriter alloc]initWithURL:
[NSURL fileURLWithPath:videoOutputPath]文件类型:AVFileTypeQuickTimeMovie
错误:&错误];
NSParameterAssert(视频编写器);
NSDictionary*videoSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
[NSNumber numberWithInt:imageSize.width],AVVideoWidthKey,
[NSNumber Number Withint:imageSize.height],AVVideoHeightKey,
零];
AVAssetWriterInput*videoWriterInput=[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
输出设置:视频设置];
AvassetWriterInputPixelBufferAdapter*适配器=[AvassetWriterInputPixelBufferAdapter]
AssetWriterInputPixelBufferAdapter带AssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime=是;
[videoWriter附加输入:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer=NULL;
//将uiimage转换为CGImage。
int frameCount=0;
双精度秒帧=6;
双帧持续时间=fps*numberOfSecondsPerFrame;
//用于(图像阵列中的视频帧*frm)
NSLog(@“****************************************************************”);
for(UIImage*img在self.chosenImages中)
{
//UIImage*img=frm.\u图像帧;
缓冲区=[self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok=否;
int j=0;
而(!append_ok&&j<30){
if(适配器.assetWriterInput.readyForMoreMediaData){
//打印输出状态:
NSLog(@“正在处理视频帧(%d,%lu)”,帧计数,(无符号长)[self.chosenImages计数];
CMTime frameTime=CMTimeMake(帧数*帧持续时间,(int32_t)fps);
append_ok=[Adapter appendPixelBuffer:BufferwithPresentationTime:frameTime];
如果(!append_ok){
NSError*error=videoWriter.error;
如果(错误!=nil){
NSLog(@“未解决的错误%@,%@.”,错误,[error userInfo]);
}
}
}
否则{
printf(“适配器未就绪%d,%d\n”,帧数,j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
如果(!append_ok){
printf(“附加图像%d次时出错%d\n,有错误。”,帧数,j);
}
frameCount++;
}
NSLog(@“****************************************************************”);
//完成课程:
[videoWriterInput markAsFinished];
[视频写手完成写作];
NSLog(@“写入结束”);
////////////////////////////////////////////////////////////////////////////
//////////////现在添加一个音频文件以移动文件/////////////////////
AVMutableComposition*mixComposition=[AVMutableComposition];
NSString*bundleDirectory=[[NSBundle mainBundle]bundlePath];
//音频输入文件。。。
NSString*audio_inputFilePath=[bundleDirectory stringByAppendingPathComponent:@“30secs.mp3”];
NSURL*audio_inputFileUrl=[NSURL fileURLWithPath:audio_inputFilePath];
//这是刚才写入的视频文件,文件的完整路径在-->videoOutputPath中
NSURL*video_inputFileUrl=[NSURL fileURLWithPath:videoOutputPath];
//创建作为MOV文件的最终视频输出文件-可能需要是MP4,但这到目前为止仍然有效。。。
NSString*outputFilePath=[DocumentsDirectoryStringByAppendingPathComponent:@“final_video.mp4”];
NSURL*outputFileUrl=[NSURL fileURLWithPath:outputFilePath];
如果([[NSFileManager defaultManager]文件ExistSatPath:outputFilePath])
[[NSFileManager defaultManager]removeItemAtPath:outputFilePath错误:nil];
CMTime nextClipStartTime=KCMTIME0;
AVURLAsset*videoAsset=[[AVURLAsset alloc]initWithURL:video\u inputFileUrl选项:nil];
CMTimeRange video\u timeRange=CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_无效];
[a_compositionVideoTrack insertTimeRange:video_Track的时间范围:[[videoAsset tracksWithMediaType:AVMediaTypeVideo]对象索引:0]时间:下一个TCLIPStartTime错误:无];
//nextClipStartTime=CMTimeAdd(nextClipStartTime,a_timeRange.duration);
AVURLAsset*audioAsset=[[AVURLAsset alloc]initWithURL:audio\u inp
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary writeVideoAtPathToSavedPhotosAlbum:DocumentPath completionBlock:^(NSURL *assetURL, NSError *error){
  If(error == nil){
   // Saved successfully
  }
}];