Merge 在每个视频上添加标题

Merge 在每个视频上添加标题,merge,avfoundation,avmutablecomposition,Merge,Avfoundation,Avmutablecomposition,我正在尝试将多个视频合并到一个视频与AVMutableComposition和它的工作罚款现在我想在每个视频添加不同的标题。 任何帮助都是值得的。 多谢各位 这就是我迄今为止所尝试的 for(int i=0;i< [arrSelectedUrls count];i++) { AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for lo

我正在尝试将多个视频合并到一个视频与AVMutableComposition和它的工作罚款现在我想在每个视频添加不同的标题。 任何帮助都是值得的。 多谢各位

这就是我迄今为止所尝试的

 for(int i=0;i< [arrSelectedUrls count];i++)

{

    AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset

    /* Current Asset is the asset of the video From the Url Using AVAsset */

   // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];

    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;

    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];

   [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];



 //       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];


    if(hasAudio)

    {

       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    }



    float sect = CMTimeGetSeconds(currentAsset.duration);

    NSString *strSect = [NSString stringWithFormat:@"%f",sect];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];







    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];



    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;

    BOOL  isCurrentAssetPortrait  = NO;

    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;



    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}

    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}

    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}

    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}



    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;

    if(isCurrentAssetPortrait){

        FirstAssetScaleToFitRatio = 320.0/320.0;

        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);

        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];

    }else{

        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);

        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];

    }



    duration=CMTimeAdd(duration, currentAsset.duration);



    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];

    [arrayInstruction addObject:currentAssetLayerInstruction];



    NSLog(@"%lld", duration.value/duration.timescale);



    CATextLayer *titleLayer = [CATextLayer layer];

    if (i==0) {

         titleLayer.string = @"www.miivdo.com";

    }

    if (i==1) {

         titleLayer.string = @"www.mail.com";

    }



    //titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);

    CGSize videoSize = [currentAssetTrack naturalSize];

    titleLayer.fontSize = videoSize.height / 14;

   // titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);

     titleLayer.shadowOpacity = 0.5;

    titleLayer.alignmentMode = kCAAlignmentRight;

   titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display



    parentLayer = [CALayer layer];

    videoLayer  = [CALayer layer];

    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

   [parentLayer addSublayer:videoLayer];

    [parentLayer addSublayer:titleLayer];



    }



MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);

    MainInstruction.layerInstructions = arrayInstruction;

    MainCompositionInst = [AVMutableVideoComposition videoComposition];



    MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];



    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];

    MainCompositionInst.frameDuration = CMTimeMake(1,30);


    MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
for(int i=0;i<[arrSelectedUrls count];i++)
{
AVAsset*currentAsset=[AVAsset AssetTwithUrl:[arrSelectedUrls objectAtIndex:i]];//我使用for循环获取资产
/*当前资源是使用AVAsset从Url获取的视频资源*/
//AVURLAsset*newAudioAsset=[AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i]options:nil];
BOOL hasAudio=[currentAsset tracksWithMediaType:AVMediaTypeAudio]。计数>0;
AVMutableCompositionTrack*currentTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,currentAsset.duration)跟踪:[[currentAsset tracksWithMediaType:AVMediaTypeVideo]对象索引:0]时间:持续时间错误:零];
//[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,currentAsset.duration)跟踪:[[currentAsset tracksWithMediaType:AVMediaTypeAudio]对象索引:0]时间:持续时间错误:无];
audioTrack=[mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_无效];
如果(音频)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,currentAsset.duration)跟踪:[[currentAsset tracksWithMediaType:AVMediaTypeAudio]对象索引:0]时间:持续时间错误:无];
}
float sect=CMTimeGetSeconds(currentAsset.duration);
NSString*STRESCT=[NSString stringWithFormat:@“%f”,sect];
[对象:@“0.5”];
[对象:@“0.5”];
[对象:@“0.5”];
[对象:@“0.5”];
AvmutableVideoCompositionLayerStruction*currentAssetLayerInstruction=[AvmutableVideoCompositionLayerStruction VideoCompositionLayerStructionWithAssetTrack:currentTrack];
AVAssetTrack*currentAssetTrack=[[currentAsset Tracks-SwithMediaType:AVMediaTypeVideo]对象索引:0];
UIImageOrientation CurrentAssertionation=UIImageOrientationUp;
BOOL isCurrentAssetPortrait=否;
CGAffineTransform currentTransform=currentAssetTrack.preferredTransform;
如果(currentTransform.a==0&¤tTransform.b==1.0&¤tTransform.c==1.0&¤tTransform.d==0){currentAssetOrientation=UIImageOrientationRight;isCurrentAssetPortrait=YES;}
如果(currentTransform.a==0&¤tTransform.b==1.0&¤tTransform.c==1.0&¤tTransform.d==0){currentAssetOrientation=UIImageOrientationLeft;isCurrentAssetPortrait=YES;}
如果(currentTransform.a==1.0&¤tTransform.b==0&¤tTransform.c==0&¤tTransform.d==1.0){currentAssertionation=UIImageOrientationUp;}
如果(currentTransform.a==-1.0&¤tTransform.b==0&¤tTransform.c==0&¤tTransform.d==-1.0){currentAssetOrientation=UIImageOrientionDown;}
CGFloat FirstAssetCaleToFiratio=320.0/320.0;
如果(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio=320.0/320.0;
CGAffineTransform FirstAssetScaleFactor=CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[CurrentAssetLayer指令集转换:CGAffineTransformConcat(currentAssetTrack.preferredTransform,FirstAssetScaleFactor)时间:持续时间];
}否则{
CGAffineTransform FirstAssetScaleFactor=CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[CurrentAssetLayer指令集转换:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform,FirstAssetScaleFactor),CGAffineTransformMakeTransform转换(0,0))时间:持续时间];
}
持续时间=CMTimeAdd(持续时间,currentAsset.duration);
[CurrentAssetLayer指令集不透明度:0.0时间:持续时间];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(@“%lld”,持续时间.值/持续时间.时间刻度);
CATextLayer*标题层=[CATextLayer层];
如果(i==0){
titleLayer.string=@“www.miivdo.com”;
}
如果(i==1){
titleLayer.string=@“www.mail.com”;
}
//titleLayer.backgroundColor=(uu桥CGColorRef)([UIColor redColor]);
CGSize videoSize=[currentAssetTrack naturalSize];
titleLayer.fontSize=videoSize.height/14;
//titleLayer.foregroundColor=(uu桥CGColorRef)([UIColor redColor]);
titleLayer.shadowOpacity=0.5;
titleLayer.alignmentMode=kCAAlignmentRight;
titleLayer.bounds=CGRectMake(0,0,320,50);//您可能需要调整此选项以正确显示
父层=[CALayer层];
视频层=[CALayer层];
parentLayer.frame=CGRectMake(0,0,videoSize.width,videoSize.height);
videoLayer.frame=CGRectMake(0,0,videoSize.width,videoSize.height);
[父层添加子层:视频层];
[父层添加子层:标题层];
}
MainInstruction.timeRange=CMTimeRangeMake(KCMTIME0ro,持续时间);
MainInstruction.layerInstructions=arrayInstruction;
MainCompositionST=[AVMutableVideoComposition];
Main CompositionList.animationTool=[AVVideoCompositionCoreAnimationTool VideoCompositionCoreAnimationToolWith PostProcessingAsvideoLayer:videoLayer inLayer:parentLayer];
MainCompositionList.instructions=[NSArray arrayWithObject:MainInstruction];
MainCompositionist.frameDuration=CMTimeMake(1,30);
main compositionist.renderSize=CGSizeMake(320.0320.0);
你可以试试这个

- (void)MergeAndSave
{


   for(int i=0;i< [arrSelectedUrls count];i++)
{

    NSURL *url;
    CALayer * parentLayer;
    CALayer * videoLayer;
    AVSynchronizedLayer *animationLayer = [AVSynchronizedLayer layer];
    UIImage *image1 = [UIImage imageNamed:@"Fire1.jpeg"];
    UIImage *image2 = [UIImage imageNamed:@"Fire2.jpeg"];
    UIImage *image3 = [UIImage imageNamed:@"Fire3.jpeg"];
    UIImage *image4 = [UIImage imageNamed:@"Fire4.jpeg"];
    //int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
    NSMutableArray *starImageArray = [NSMutableArray arrayWithObjects:(id)image1.CGImage,(id)image2.CGImage,(id)image3.CGImage,(id)image4.CGImage, nil];
    NSMutableArray *arrDuration = [[NSMutableArray alloc] init];
    NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

    AVMutableVideoCompositionInstruction * MainInstruction =
    [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    AVMutableCompositionTrack *audioTrack;

    audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                             preferredTrackID:kCMPersistentTrackID_Invalid];


    CMTime duration = kCMTimeZero;

         AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls    objectAtIndex:i]]; // i take the for loop for geting the asset
    /* Current Asset is the asset of the video From the Url Using AVAsset */
   // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
 //       [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,  currentAsset.duration) ofTrack:[[currentAsset  tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
      [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
//        
 ////       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,   currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
 //        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  //        
 //        // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    if(hasAudio)
    {
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
    }

    float sect = CMTimeGetSeconds(currentAsset.duration);
    NSString *strSect = [NSString stringWithFormat:@"%f",sect];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];



    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];

    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;
    BOOL  isCurrentAssetPortrait  = NO;
    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}
    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}

    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
    if(isCurrentAssetPortrait){
        FirstAssetScaleToFitRatio = 320.0/320.0;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
    }else{
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
    }

    duration=CMTimeAdd(duration, currentAsset.duration);

    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
    [arrayInstruction addObject:currentAssetLayerInstruction];

    NSLog(@"%lld", duration.value/duration.timescale);

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = [NSString stringWithFormat:@"Final%@",[Titlearry objectAtIndex:i]];

    CGSize videoSize = [currentAssetTrack naturalSize];
    titleLayer.fontSize = videoSize.height / 14;

     titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentRight;
   titleLayer.bounds = CGRectMake(0, 0, 320, 500); //You may need to adjust this for proper display

    parentLayer = [CALayer layer];
    videoLayer  = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
   [parentLayer addSublayer:videoLayer];
   [parentLayer addSublayer:titleLayer];




    // new addition From Ritesh //




    double time1 = 0.01;
    // in ms, (0.2*1000)/1000 == 200/1000 == 0.2
   // CMTime time2 = CMTimeMake(time1*1000, 1000);
   NSMutableArray * keyTimesArray = [[NSMutableArray alloc]init];
    for (int z = 1; z<4; z++)
    {
        NSNumber *temp = [NSNumber numberWithFloat:(time1+(float)z/30)];
        [keyTimesArray addObject:temp];
    }





    animationLayer.opacity = 1.0;
   // animationLayer.backgroundColor = [UIColor yellowColor].CGColor;
    [animationLayer setFrame:CGRectMake(0, 0, 320, 50)];
    [parentLayer addSublayer:animationLayer];


    CAKeyframeAnimation *changeImageAnimation = [CAKeyframeAnimation animationWithKeyPath:@"contents"];
    [changeImageAnimation setDelegate:self];
    changeImageAnimation.calculationMode = kCAAnimationDiscrete;



    [animationLayer setContents:[starImageArray lastObject]];

    changeImageAnimation.duration = 10.0f;
    changeImageAnimation.repeatCount = 30;
    changeImageAnimation.values = [NSArray arrayWithArray:starImageArray];
    //changeImageAnimation.removedOnCompletion = YES;
    // [changeImageAnimation setKeyTimes:arrDuration];
    [changeImageAnimation setBeginTime:1.0];
    [changeImageAnimation setRemovedOnCompletion:NO];
    [changeImageAnimation setDelegate:self];
    [animationLayer addAnimation:changeImageAnimation forKey:@"contents"];



    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
    MainInstruction.layerInstructions = arrayInstruction;
   AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

    MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
    MainCompositionInst.frameDuration = CMTimeMake(1,30);
    MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);

    // NSString *myPathDocs =  [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
    NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *docsDir = [dirPaths objectAtIndex:0];
    NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:@"merge%@.mov",[Titlearry objectAtIndex:i]]];
    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
     url = [NSURL fileURLWithPath:outputFilePath];
    [UrlArray addObject:outputFilePath];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL=url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.videoComposition = MainCompositionInst;
    exporter.shouldOptimizeForNetworkUse = YES;
    [exporter exportAsynchronouslyWithCompletionHandler:^
     {
         switch (exporter.status)
         {
             case AVAssetExportSessionStatusCompleted:
             {




                if(i == [arrSelectedUrls count]-1)
                {
                    [self mergeAllVideoClipscompletionCallback];
                }

             }
                 break;
             case AVAssetExportSessionStatusFailed:
                 NSLog(@"Failed:%@", exporter.error.description);
                 break;
             case AVAssetExportSessionStatusCancelled:
                 NSLog(@"Canceled:%@", exporter.error);
                 break;
             case AVAssetExportSessionStatusExporting:
                 NSLog(@"Exporting!");
                 break;
             case AVAssetExportSessionStatusWaiting:
                 NSLog(@"Waiting");
                 break;
             default:
                 break;
         }
     }];
}



     // [self performSelector:@selector(MergeAndExport) withObject:nil afterDelay:3.0];

  //    [self MergeAndExport];






    }


-(void)MergeAndExport
{

CALayer * parentLayer;
CALayer * videoLayer;

//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;

audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                         preferredTrackID:kCMPersistentTrackID_Invalid];


CMTime duration = kCMTimeZero;
for(int i=0;i< [arrSelectedUrls count];i++)
{
  //        AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]];

    NSURL *url=[NSURL fileURLWithPath:[UrlArray objectAtIndex:i]];

    AVURLAsset *currentAsset=[AVURLAsset URLAssetWithURL:url options:nil];

    // i take the for loop for geting the asset
    /* Current Asset is the asset of the video From the Url Using AVAsset */
        // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];

            [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
   // audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    if(hasAudio)
    {
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
    }



    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;
    BOOL  isCurrentAssetPortrait  = NO;
    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}
    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}

    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
    if(isCurrentAssetPortrait){
        FirstAssetScaleToFitRatio = 320.0/320.0;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
    }else{
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
    }

    duration=CMTimeAdd(duration, currentAsset.duration);

    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
    [arrayInstruction addObject:currentAssetLayerInstruction];

    NSLog(@"%lld", duration.value/duration.timescale);

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = @"www.miivdo.com";
    //titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
    CGSize videoSize = [currentAssetTrack naturalSize];
    titleLayer.fontSize = videoSize.height / 14;
    // titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
    titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentRight;
    titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display

    parentLayer = [CALayer layer];
    videoLayer  = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:titleLayer];


}




MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);


// NSString *myPathDocs =  [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:@"merge.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
    [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];


NSURL *url = [NSURL fileURLWithPath:outputFilePath];


AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     switch (exporter.status)
     {
         case AVAssetExportSessionStatusCompleted:
         {

             NSURL *outputURL = exporter.outputURL;

             ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
             if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

                 [self writeExportedVideoToAssetsLibrary:outputURL];

                 //
             }

         }
             break;
         case AVAssetExportSessionStatusFailed:
             NSLog(@"Failed:%@", exporter.error.description);
             break;
         case AVAssetExportSessionStatusCancelled:
             NSLog(@"Canceled:%@", exporter.error);
             break;
         case AVAssetExportSessionStatusExporting:
             NSLog(@"Exporting!");
             break;
         case AVAssetExportSessionStatusWaiting:
             NSLog(@"Waiting");
             break;
         default:
             break;
     }
 }];




}
-(无效)合并并保存
{
对于(int i=0;i<[arrSelectedUrls count];i++)
{
NSURL*url;
CALayer*父层;
CALayer*视频层;
AVSynchronizedLayer*动画层=[AVSynchronizedLayer层];
UIImage*image1=[UIImage ImageName:@“Fire1.jpeg”];
UIImage*image2=[UIImage ImageName:@“Fire2.jpeg”]