Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/iphone/38.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios AVCaptureSession带文本导出_Ios_Iphone_Avcapturesession - Fatal编程技术网

Ios AVCaptureSession带文本导出

Ios AVCaptureSession带文本导出,ios,iphone,avcapturesession,Ios,Iphone,Avcapturesession,我正在使用AVCaptureSession捕获视频并将其保存到cameraroll,一切正常,但问题是如何在导出时将其导出并写入文本,甚至在导出时放置图像?谢谢看看avmutablevideocompositionlayerrinstruction你基本上需要在你的视频中添加层指令,我不记得链接了,但是有一个苹果WWDC视频是关于这个的。我已经设法在视频上写了一个图像和一个catextlayer在导出捆绑电影时,一切都很好,但是现在我如何才能将文本写入特定的框架?(时间) 这是我的代码 - (I

我正在使用AVCaptureSession捕获视频并将其保存到cameraroll,一切正常,但问题是如何在导出时将其导出并写入文本,甚至在导出时放置图像?谢谢

看看
avmutablevideocompositionlayerrinstruction
你基本上需要在你的视频中添加层指令,我不记得链接了,但是有一个苹果WWDC视频是关于这个的。

我已经设法在视频上写了一个图像和一个catextlayer在导出捆绑电影时,一切都很好,但是现在我如何才能将文本写入特定的框架?(时间)

这是我的代码

- (IBAction)btn1:(id)sender {

    NSString *filePath = [[NSBundle mainBundle] pathForResource:@"movie"     ofType:@"mov"];



    //NSURL * urlpath = [NSURL URLWithString:filePath];
    NSLog(@"%@",filePath);
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL     fileURLWithPath:filePath]  options:nil];

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition     addMutableTrackWithMediaType:AVMediaTypeVideo     preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *clipVideoTrack = [[videoAsset     tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,     videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[videoAsset     tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];








    AVMutableCompositionTrack *audioTrack1 = [mixComposition     addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                         preferre    dTrackID    :kCMPers    istentTr    ackID_In    valid];

    [audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration    )
                         ofTrack:[[videoAsset     tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                          atTime:kCMTimeZero
                           error:nil];



    CGSize videoSize = [clipVideoTrack naturalSize];

    UIImage *myImage = [UIImage imageNamed:@"close@2x.png"];
    CALayer *aLayer = [CALayer layer];
    aLayer.contents = (id)myImage.CGImage;
    aLayer.frame = CGRectMake(videoSize.width - 65, videoSize.height - 75, 57, 57    );
    aLayer.opacity = 0.65;
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:aLayer];

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = @"Text goes here";
    titleLayer.font = (__bridge CFTypeRef)(@"Helvetica");
    titleLayer.fontSize = videoSize.height / 6;
    //?? titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentCenter;
    titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6);     //You may need to adjust this for proper display
    [parentLayer addSublayer:titleLayer]; 

    AVMutableVideoComposition* videoComp = [AVMutableVideoComposition     videoComposition];
    videoComp.renderSize = videoSize;
    videoComp.frameDuration = CMTimeMake(1, 30);
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool     videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer     inLayer:parentLayer];

    AVMutableVideoCompositionInstruction *instruction = [    AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration    ]);
    AVAssetTrack *videoTrack = [[mixComposition     tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [    AVMutableVideoCompositionLayerInstruction     videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    videoComp.instructions = [NSArray arrayWithObject: instruction];

    AVAssetExportSession *assetExport = [[AVAssetExportSession alloc]     initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]    ;//AVAssetExportPresetPassthrough
    assetExport.videoComposition = videoComp;

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,     NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString* VideoName = [NSString stringWithFormat:@"%@/mynewwatermarkedvideo.    mp4",documentsDirectory];


    //NSString *exportPath = [NSTemporaryDirectory()     stringByAppendingPathComponent:VideoName];
    NSURL *exportUrl = [NSURL fileURLWithPath:VideoName];

    if ([[NSFileManager defaultManager] fileExistsAtPath:VideoName])
    {
        [[NSFileManager defaultManager] removeItemAtPath:VideoName error:nil];
    }

    assetExport.outputFileType = AVFileTypeQuickTimeMovie;
    assetExport.outputURL = exportUrl;
    assetExport.shouldOptimizeForNetworkUse = YES;

    //[strRecordedFilename setString: exportPath];

    [assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         //[assetExport release];
         dispatch_async(dispatch_get_main_queue(), ^{
             [self exportDidFinish:assetExport];
         });
     }
     ];


}

-(void)exportDidFinish:(AVAssetExportSession*)session
{
    NSURL *exportUrl = session.outputURL;
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    NSLog(@"%@",exportUrl);
    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportUrl])
    {
        [library writeVideoAtPathToSavedPhotosAlbum:exportUrl completionBlock:^(    NSURL *assetURL, NSError *error)
         {
             dispatch_async(dispatch_get_main_queue(), ^{
                 if (error) {
                     UIAlertView *alert = [[UIAlertView alloc]     initWithTitle:@"Error" message:@"Video Saving Failed"
                                                                    delegate:nil     cancelButtonT    itle:@"OK" ot    herButtonTitl    es:nil];
                     [alert show];
                 } else {
                     UIAlertView *alert = [[UIAlertView alloc]     initWithTitle:@"Video Saved" message:@"Saved To Photo Album"
                                                                    delegate:self     cancelButton    Title:@"OK" o    therButtonTit    les:nil];
                     [alert show];
                 }
             });
         }];

    }
    NSLog(@"Completed");
    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"AlertView"     message:@"Video is edited successfully." delegate:self     cancelButtonTitle:@"OK" otherButtonTitles:nil];
    [alert show];

}
视频来自wwdc10“会话407-使用AV编辑媒体”