iOS AVAssetExportSession失败代码=-11820仅限iPhone 5(c)
我想从包含两个视频(带音频)和一个音轨的合成中导出一个视频文件。它适用于iOS AVAssetExportSession失败代码=-11820仅限iPhone 5(c),ios,objective-c,iphone,avfoundation,avassetexportsession,Ios,Objective C,Iphone,Avfoundation,Avassetexportsession,我想从包含两个视频(带音频)和一个音轨的合成中导出一个视频文件。它适用于iphone5s及更高版本,但在iphone5c(iOS 9.2.1)上失败。在以下情况下返回错误: [_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) { if (AVAssetExportSessionStatusCompleted == _assetExport.status) { [self performS
iphone5s
及更高版本,但在iphone5c
(iOS 9.2.1)
上失败。在以下情况下返回错误:
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(@"Export error: %@", _assetExport.error);
[self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
打印的日志:
导出错误:error Domain=AVFoundationErrorDomain code=-11820“无法完成导出”UserInfo={nsLocalizedRecoverysSuggestion=再次尝试导出,NSLocalizedDescription=无法完成导出}
如上所述,在我的iphone5s、6和6s上它工作得非常好,但只有在我的iphone5c上它才会返回此错误。希望有人对此有经验
创建曲目和合成的完整代码:
- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:@"IntroVideo" ofType:@"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];
self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result
// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];
// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);
//add VideoTrack of introVideo to composition
NSArray* introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];
// Add AudioTrack of introVideo to composition
NSArray* audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];
// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
NSLog(@"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);
// Add VideoTrack of inputVideo to composition
NSArray* videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
// CMTime audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
// CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);
AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];
// Add AudioTrack of inputVideo to composition
NSArray* audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];
AVMutableAudioMix* audioMix = NULL;
if (audioFileName) {
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];
// Create Asset for audio (song)
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
// Add Audio of song to composition
NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];
// Set Volume of song
NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *trackMixArray = [NSMutableArray array];
// for (int i = 0; i < [tracksToDuck count]; i++) {
AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
[trackMix setVolume:1 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
[trackMix2 setVolume:volume atTime:kCMTimeZero];
[trackMixArray addObject:trackMix2];
// }
audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = trackMixArray;
}
// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //@"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
_assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(@"Export error: %@", _assetExport.error);
[self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
}
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{
AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *introTrack;
if (audioTracksArray.count > 0) {
introTrack = [audioTracksArray objectAtIndex:0];
[compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
}
NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack;
if (videoTracksArray.count > 0) {
videoTrack = [videoTracksArray objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];
AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGSize videoSize;
if (videoTrack && introTrack) {
CGSize trackDimensions = [videoTrack naturalSize];
videoSize = CGSizeMake(0, 0);
// turn around for portrait
if (trackDimensions.height>trackDimensions.width) {
videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
} else {
videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
}
CGAffineTransform transform = videoTrack.preferredTransform;
CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
[firstLayerInst setTransform:scale atTime:kCMTimeZero];
[secondLayerInst setTransform:transform atTime:kCMTimeZero];
} else {
videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
}
CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
NSLog(@"Total videotime: %lld", totalTime.value);
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
-(void)generateVideoWithInputPath:(NSString*)inputVideo和audioFileName:(NSString*)audioFileName和volume:(float)volume{
NSString*introVideoPath=[[NSBundle mainBundle]pathForResource:@“IntroVideo”类型:@“mp4”];
NSURL*introVideoUrl=[NSURL fileURLWithPath:introVideoPath];
NSURL*video_inputFileUrl=[NSURL fileURLWithPath:inputVideo];
self.outputAssetURL=NULL;
self.outputFilePath=finalVideoPath;
NSURL*outputFileUrl=[NSURL fileURLWithPath:self.outputFilePath];
取消链接([self.outputFilePath UTF8String]);//删除现有结果
//创作构图
AVMutableComposition*mixComposition=[AVMutableComposition];
//为视频创建资源
AVURLAsset*introVideoAsset=[[AVURLAsset alloc]initWithURL:introVideoUrl选项:nil];
//创建时间范围
CMTime introStartTime=KCMTIME0;
CMTime introEndTime=introVideoAsset.duration;
CMTimeRange introVideo_timeRange=CMTimeRangeMake(introStartTime,introEndTime);
//将视频的视频轨迹添加到合成中
NSArray*introVideoAssetTracks=[introVideoAssetTracks-SwithMediaType:AVMediaTypeVideo];
AVAssetTrack*introVideoAssetTrack=([introVideoAssetTracks计数]>0?[introVideoAssetTracks对象索引:0]:无);
AVMutableCompositionTrack*b_compositionVideoTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_无效];
[b_compositionVideoTrack insertTimeRange:introVideo_Track的时间范围:introVideoAssetTrack时间:introStartTime错误:nil];
//在合成中添加视频的音轨
NSArray*audioAssetTracksIntro=[IntroVideoAssetTracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*audioAssetTrackIntro=([audioAssetTrackIntro计数]>0?[audioAssetTrackIntro对象索引:0]:无);
AVMutableCompositionTrack*a_compositionAudioTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_无效];
[a_compositionAudioTrack insertTimeRange:introVideo_Track:audioAssetTrackIntro时间:introStartTime错误:nil];
//为inputVideo创建资源
CMTime nextClipStartTime=introEndTime;
AVURLAsset*videoAsset=[[AVURLAsset alloc]initWithURL:video\u inputFileUrl选项:nil];
//创建时间范围
CMTime videoStartTime=KCMTIME0;
CMTime videoEndTime=videoAsset.duration;
如果(CMTIME_无效(videoEndTime)){
NSLog(@“videoEndTime无效”);
}
CMTimeRange mainVideo_timeRange=CMTimeRangeMake(videoStartTime,videoEndTime);
//将输入视频的视频轨迹添加到合成中
NSArray*videoAssetTracks2=[videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack*videoAssetTrack2=([videoAssetTracks2计数]>0?[videoAssetTracks2对象索引:0]:无);
//CMTime audioDurationFix=CMTimeAdd(videoAsset.duration,CMTimeMakeWithSeconds(-1.0f,1));
//CMTimeRange video\u timeRange=CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
//CMTimeRange audio_timeRange=CMTimeRangeMake(kCMTimeZero,audioDurationFix);
AVMutableCompositionTrack*a_compositionVideoTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_无效];
[a_compositionVideoTrack insertTimeRange:Main Video_Track的时间范围:videoAssetTrack2时间:nextClipStartTime错误:nil];
//将输入视频的音轨添加到合成中
NSArray*audioAssetTracks2=[videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*audioAssetTrack2=([audioAssetTracks2计数]>0?[audioAssetTracks2对象索引:0]:无);
//AVMutableCompositionTrack*a_compositionAudioTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_无效];
[a_compositionAudioTrack insertTimeRange:Main Video_Track的时间范围:audioAssetTrack2时间:nextClipStartTime错误:nil];
AVMutableAudioMix*audioMix=NULL;
如果(音频文件名){
NSURL*audio_inputFileUrl=[NSURL fileURLWithPath:audioFileName];
//为音频(歌曲)创建资源
AVURLAsset*audioAsset=[[AVURLAsset alloc]initWithURL:audio\u inputFileUrl选项:nil];
//将歌曲的音频添加到作文中
NSArray*audioAssetTracks=[audioAssetTracks-SwithMediaType:AVMediaTypeAudio];
AVAssetTrack*audioAssetTrack=([audioAssetTracks计数]>0?[audioAssetTracks对象索引:0]:无);
AVMutableCompositionTrack*b_compositionAudioTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_无效];
[b_compositionAudioTrack insertTimeRange:Main Video_Track的时间范围:audioAssetTrack时间:nextClipStartTime错误:nil];
//定音量
NSArray*tracksToDuck=[mixComposition tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray*trackMixArray=[NSMutableArray];
//对于(int i=0;i<[tracksToDuck count];i++){
AVAssetTrack*leTrack=[tracksToDuck objectAtIndex:0];
AVMutableAudioMixInputParameters*trackMix=[AVMutableAudioMixInputParameters AudioMixInputParameters swithTrack:leTrack];
[trackMix setVolume:1 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
AVAssetTrack*leTrack2=[tracksToDuck objectAtIndex:1];
AVMutabl