Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/video/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
iOS-带透明视频的水印视频_Ios_Video_Avfoundation_Avmutablecomposition - Fatal编程技术网

iOS-带透明视频的水印视频

iOS-带透明视频的水印视频,ios,video,avfoundation,avmutablecomposition,Ios,Video,Avfoundation,Avmutablecomposition,我正在使用AVFoundation为一个视频加上另一个透明的视频水印。我已经使用下面的代码成功地将一个视频覆盖在另一个视频之上,但是一旦我开始使用一个透明的资源,导出就会失败,没有任何有用的错误 谈到Preres4444是AVFoundation在alpha频道上唯一支持的编解码器,但我找不到任何官方文档。我目前试图添加的覆盖文件是一个H.264编码的mp4,这似乎是阅读后的最佳选择,其中指出 ProRes编解码器仅在OS X上可用。如果您正在开发 只有iOS版,H264是城里唯一的游戏 我总是

我正在使用AVFoundation为一个视频加上另一个透明的视频水印。我已经使用下面的代码成功地将一个视频覆盖在另一个视频之上,但是一旦我开始使用一个透明的资源,导出就会失败,没有任何有用的错误

谈到Preres4444是AVFoundation在alpha频道上唯一支持的编解码器,但我找不到任何官方文档。我目前试图添加的覆盖文件是一个H.264编码的mp4,这似乎是阅读后的最佳选择,其中指出

ProRes编解码器仅在OS X上可用。如果您正在开发 只有iOS版,H264是城里唯一的游戏

我总是可以回过头来添加动画层,而不是视频叠加,但如果没有解决方案,我会感到惊讶

- (void)addWatermarkToAsset:(NSURL *)assetURL completionHandler:(void (^)(NSURL *videoURL))handler
{
    AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];

    // This asset contains an alpha channel, and has a shorter duration than videoAsset
    NSURL *animationUrl = [[NSBundle mainBundle] URLForResource:@"InstagramAnimation" withExtension:@"mp4"];
    AVURLAsset *animationAsset = [AVURLAsset URLAssetWithURL:animationUrl options:nil];

    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                        ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                         atTime:kCMTimeZero error:nil];

    AVMutableCompositionTrack *animationTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                            preferredTrackID:kCMPersistentTrackID_Invalid];

    [animationTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, animationAsset.duration)
                            ofTrack:[[animationAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                             atTime:kCMTimeZero error:nil];

    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);

    AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    CGAffineTransform scale = CGAffineTransformMakeScale(0.7f,0.7f);
    CGAffineTransform move = CGAffineTransformMakeTranslation(230,230);
    [videoLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];

    AVMutableVideoCompositionLayerInstruction *animationLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:animationTrack];
    CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2f,1.5f);
    CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0,0);

    [animationLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];

    mainInstruction.layerInstructions = [NSArray arrayWithObjects:videoLayerInstruction, animationLayerInstruction, nil];

    AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
    mainCompositionInst.frameDuration = CMTimeMake(1, 30);
    mainCompositionInst.renderSize = videoTrack.naturalSize;

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:
                             [NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL = url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = mainCompositionInst;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            [self exportDidFinish:exporter];
        });
    }];
} 

我通过做一个
1.H264内部的自定义透明度格式,以及a
2.自定义透明度格式的自定义(perforce)合成器

自定义格式是一个“高”视频,其中每个彩色帧直接位于其黑白遮罩的上方

合成器从水印和要加水印的文件中提取帧,合成它们,并将结果写入第三个文件。 每个帧的每个像素被解释为YUV值,一个用作颜色,另一个用作遮罩,并与背景帧相结合


为了加快速度,合成器是OpenGL像素着色器+TextureCache,尽管我想今天您会使用和
CVMetalTextureCache
s。

非常感谢。我最终将后效动画转换为代码中的自定义动画,因为使用OpenGL着色器的解决方案现在似乎太耗时了。但我肯定会尝试你的解决方案。