Swift 如何将*单个图像*与视频合并

Swift 如何将*单个图像*与视频合并,swift,uiimage,buffer,avassetwriter,avmutablecomposition,Swift,Uiimage,Buffer,Avassetwriter,Avmutablecomposition,我正在尝试将单个视频与单个图像结合起来这并不是试图将多个图像组合成一个视频,例如 我正在使用AVMutableComposition组合曲目。我的应用程序能够组合视频和图像(但就目前而言,视频组合很好!)我尝试使用AVAssetWriter将单个图像转换为视频(我相信这就是我的问题所在,但不是100%确定)。然后我将其保存到应用程序(文档目录)。从那里,我在我的合并中访问它,并将一个视频和现在已经变成视频的图像结合起来 流量: 用户选择图像-> 将图像转换为AVAssetWriter以

我正在尝试将单个视频与单个图像结合起来这并不是试图将多个图像组合成一个视频,例如

我正在使用
AVMutableComposition
组合曲目。我的应用程序能够组合视频和图像(但就目前而言,视频组合很好!)我尝试使用
AVAssetWriter
将单个图像转换为视频(我相信这就是我的问题所在,但不是100%确定)。然后我将其保存到应用程序(
文档目录
)。从那里,我在我的合并中访问它,并将一个视频和现在已经变成视频的图像结合起来

流量:

用户选择图像->

将图像转换为AVAssetWriter以更改为视频->

将我已预设的视频与视频合并->

结果:从所选图像和预设视频制作1个视频

我的问题是:我的代码提供了一个空白区域,视频中的图像应该在那里。如中所示,我拥有的ImageConverter文件会将其转换为视频,但我只会将最后一帧视为图像,而每隔一帧都是透明的,就像图片不在那里一样。因此,如果我将图像转换为视频5秒(比如30帧/秒),那么我将看到(30*5)-1帧的空白,然后最后一帧,图片最终将出现。我只是想了解如何将单个图像制作成视频或将视频和图像组合在一起而不将图像转换成视频的指导。谢谢

在此合并文件

func merge() {
    if let firstAsset = controller.firstAsset, secondAsset = self.asset {

        // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
        let mixComposition = AVMutableComposition()

        let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
                                                                     preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        do {
            try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTime(seconds: 8, preferredTimescale: 600)),
                                           ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                                           atTime: kCMTimeZero)
        } catch _ {
            print("Failed to load first track")
        }

        do {
            //HERE THE TIME IS 0.666667, BUT SHOULD BE 0
            print(CMTimeGetSeconds(secondAsset.duration), CMTimeGetSeconds(firstTrack.timeRange.duration))
            try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
                                            ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0],
                                            atTime: firstTrack.timeRange.duration)
        } catch _ {
            print("Failed to load second track")
        }
        do {
            try firstTrack.insertTimeRange(CMTimeRangeMake(CMTime(seconds: 8+CMTimeGetSeconds(secondAsset.duration), preferredTimescale: 600), firstAsset.duration),
                                           ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                                           atTime: firstTrack.timeRange.duration+secondTrack.timeRange.duration)
        } catch _ {
            print("failed")
        }

        // 3 - Audio track
        if let loadedAudioAsset = controller.audioAsset {
            let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
            do {
                try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
                                               ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
                                               atTime: kCMTimeZero)
            } catch _ {
                print("Failed to load Audio track")
            }
        }

        // 4 - Get path
        let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
        let dateFormatter = NSDateFormatter()
        dateFormatter.dateStyle = .LongStyle
        dateFormatter.timeStyle = .ShortStyle
        let date = dateFormatter.stringFromDate(NSDate())
        let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("mergeVideo.mov")
        let url = NSURL(fileURLWithPath: savePath)
        _ = try? NSFileManager().removeItemAtURL(url)

        // 5 - Create Exporter
        print("exporting")
        guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
        exporter.outputURL = url
        exporter.outputFileType = AVFileTypeQuickTimeMovie
        exporter.shouldOptimizeForNetworkUse = false
        exporter.videoComposition = mainComposition

        // 6 - Perform the Export
        controller.currentlyEditing = true
        exporter.exportAsynchronouslyWithCompletionHandler() {
            dispatch_async(dispatch_get_main_queue()) { _ in
                print("done")
                self.controller.currentlyEditing = false
                self.controller.merged = true
                self.button.blurView.superview?.hidden = true
                self.controller.player.replaceCurrentItemWithPlayerItem(AVPlayerItem(URL: url))
                self.controller.firstAsset = AVAsset(URL: url)
            }
        }
    }
}
func exportDidFinish(session: AVAssetExportSession) {
    if session.status == AVAssetExportSessionStatus.Failed {
        print(session.error)
    }
    if session.status == AVAssetExportSessionStatus.Completed {
        print("succed")
    }
}
在此处转换图像

class MyConverter: NSObject {

    var image:UIImage!

    convenience init(image:UIImage) {
        self.init()
        self.image = image
    }

    var outputURL: NSURL {
        let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
        let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("mergeVideo-pic.mov")
        return getURL(savePath)
    }

    func getURL(path:String) -> NSURL {
        let movieDestinationUrl = NSURL(fileURLWithPath: path)
        _ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
        let url = NSURL(fileURLWithPath: path)
        return url
    }

    func build(completion:() -> Void) {
        guard let videoWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(image.size.width)), AVVideoHeightKey : NSNumber(float: Float(image.size.height))]

        guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
            fatalError("Negative : Can't apply the Output settings...")
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(image.size.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(image.size.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if videoWriter.canAddInput(videoWriterInput) {
            videoWriter.addInput(videoWriterInput)
        }

        if videoWriter.startWriting() {
            videoWriter.startSessionAtSourceTime(kCMTimeZero)
            assert(pixelBufferAdaptor.pixelBufferPool != nil)
        }

        let media_queue = dispatch_queue_create("mediaInputQueue", nil)

        videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
            var appendSucceeded = true
            //Time HERE IS ZERO, but in Merge file, it is 0.66667
            let presentationTime = CMTimeMake(0, 600)

            var pixelBuffer: CVPixelBuffer? = nil
            let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

            if let pixelBuffer = pixelBuffer where status == 0 {
                let managedPixelBuffer = pixelBuffer
            CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)

                let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                let context = CGBitmapContextCreate(data, Int(self.image.size.width), Int(self.image.size.height), 8, CVPixelBufferGetBytesPerRow(managedPixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

                CGContextClearRect(context, CGRectMake(0, 0, CGFloat(self.image.size.width), CGFloat(self.image.size.height)))


                CGContextDrawImage(context, CGRectMake(0, 0, self.image.size.width, self.image.size.height), self.image.CGImage)

                CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)

                appendSucceeded =     pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime)
            } else {
                print("Failed to allocate pixel buffer")
                appendSucceeded = false
            }
            if !appendSucceeded {
                print("append failed")
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
                completion()
            }
        })
    }
}
注意:我发现,如果我在ImageConverter中执行
打印(presentationTime)
操作,它会打印0,然后打印合并期间的时间,我会得到0.666667


注意:目前还没有答案,但我会一直悬赏这个问题,直到我找到答案或其他人帮助我!谢谢

是的,所以我刚才确实处理过这个问题。问题确实在于如何从图片中创建视频。您需要做的是在时间零点添加像素缓冲区,然后在结束时再次添加,否则您将得到一个空视频,直到最后一帧,就像您正在经历的那样

以下代码将是我更新代码的最佳尝试。在最后,我将发布我的解决方案,它是在Objective-C中,以防它对其他人有帮助

func build(completion:() -> Void) {
    guard let videoWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie) else {
        fatalError("AVAssetWriter error")
    }

    // This might not be a problem for you but width HAS to be divisible by 16 or the movie will come out distorted... don't ask me why. So this is a safeguard
    let pixelsToRemove: Double = fmod(image.size.width, 16)
    let pixelsToAdd: Double = 16 - pixelsToRemove
    let size: CGSize = CGSizeMake(image.size.width + pixelsToAdd, image.size.height)

    let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(size.width)), AVVideoHeightKey : NSNumber(float: Float(size.height))]

    guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
        fatalError("Negative : Can't apply the Output settings...")
    }

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(size.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(size.height))]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

    if videoWriter.canAddInput(videoWriterInput) {
        videoWriter.addInput(videoWriterInput)
    }

    if videoWriter.startWriting() {
        videoWriter.startSessionAtSourceTime(kCMTimeZero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)
    }

    // For simplicity, I'm going to remove the media queue you created and instead explicitly wait until I can append since i am only writing one pixel buffer at two different times

    var pixelBufferCreated = true
    var pixelBuffer: CVPixelBuffer? = nil
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

    if let pixelBuffer = pixelBuffer where status == 0 {
        let managedPixelBuffer = pixelBuffer
        CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)

        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGBitmapContextCreate(data, Int(size.width), Int(size.height), 8, CVPixelBufferGetBytesPerRow(managedPixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

        CGContextClearRect(context, CGRectMake(0, 0, CGFloat(size.width), CGFloat(size.height)))

        CGContextDrawImage(context, CGRectMake(0, 0, size.width, size.height), self.image.CGImage)

        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)
    } else {
        print("Failed to allocate pixel buffer")
        pixelBufferCreated = false
    }

    if (pixelBufferCreated) {
        // Here is where the magic happens, we have our pixelBuffer it's time to start writing

        // FIRST - add at time zero
        var appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: kCMTimeZero];
        if (!appendSucceeded) {
            // something went wrong, up to you to handle. Should probably return so the rest of the code is not executed though
        }
        // SECOND - wait until the writer is ready for more data with an empty while
        while !writerInput.readyForMoreMediaData {} 

        // THIRD - make a CMTime with the desired length of your picture-video. I am going to arbitrarily make it 5 seconds here
        let frameTime: CMTime = CMTimeMake(5, 1) // 5 seconds

        // FOURTH - add the same exact pixel to the end of the video you are creating
        appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: frameTime];
        if (!appendSucceeded) {
            // something went wrong, up to you to handle. Should probably return so the rest of the code is not executed though
        }

        videoWriterInput.markAsFinished() {
            videoWriter.endSessionAtSourceTime(frameTime)
        }
        videoWriter.finishWritingWithCompletionHandler { () -> Void in
            if videoWriter.status != .Completed {
                // Error writing the video... handle appropriately 
            } else {
                print("FINISHED!!!!!")
                completion()
            }
        }
    }
}

我是如何在Obj-C中做到这一点的 注意:我必须进行一些编辑才能使其独立,因此此方法将返回一个字符串,其中包含视频将要写入的路径。它是在视频写入完成之前返回的,因此如果您不小心,可以在准备好之前访问它

-(NSString *)makeMovieFromImageData:(NSData *)imageData {
    NSError *error;
    UIImage *image = [UIImage imageWithData:imageData];


    // width has to be divisible by 16 or the movie comes out distorted... don't ask me why
    double pixelsToRemove = fmod(image.size.width, 16);

    double pixelsToAdd = 16 - pixelsToRemove; 

    CGSize size = CGSizeMake(image.size.width+pixelsToAdd, image.size.height);

    BOOL hasFoundValidPath = NO;
    NSURL *tempFileURL;
    NSString *outputFile;

    while (!hasFoundValidPath) {

        NSString *guid = [[NSUUID new] UUIDString];
        outputFile = [NSString stringWithFormat:@"picture_%@.mp4", guid];

        NSString *outputDirectory = [NSSearchPathForDirectoriesInDomains(NSTemporaryDirectory, NSUserDomainMask, YES) objectAtIndex:0];

        NSString *tempPath = [outputDirectory stringByAppendingPathComponent:outputFile];

        // Will fail if destination already has a file
        if ([[NSFileManager defaultManager] fileExistsAtPath:tempPath]) {
            continue;
        } else {
            hasFoundValidPath = YES;
        }
        tempFileURL = [NSURL fileURLWithPath:tempPath];
    }


    // Start writing
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:tempFileURL
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];

    if (error) {
       // handle error
    }

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                         outputSettings:videoSettings];

    NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];
    if ([videoWriter canAddInput:writerInput]) {
        [videoWriter addInput:writerInput];
    } else {
        // handle error
    }

    [videoWriter startWriting];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CGImageRef img = [image CGImage];

    // Now I am going to create the bixelBuffer
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                            nil];
    CVPixelBufferRef buffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);

    if ( !(status == kCVReturnSuccess && pxbuffer != NULL) ) {
        NSLog(@"There be some issue. We didn't get a buffer from the image");
    }


    CVPixelBufferLockBaseAddress(buffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(buffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
    CGContextSetRGBFillColor(context, 0, 0, 0, 0);

    CGContextConcatCTM(context, CGAffineTransformIdentity);

    CGContextDrawImage(context, CGRectMake(0, 0, size.width,
                                           size.height), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(buffer, 0);

    // At this point we have our buffer so we are going to start by adding to time zero

    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    while (!writerInput.readyForMoreMediaData) {} // wait until ready

    CMTime frameTime = CMTimeMake(5, 1); // 5 second frame

    [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
    CFRelease(buffer);

    [writerInput markAsFinished];

    [videoWriter endSessionAtSourceTime:frameTime];

    [videoWriter finishWritingWithCompletionHandler:^{
        if (videoWriter.status != AVAssetWriterStatusCompleted) {
            // Error
        }
    }]; // end videoWriter finishWriting Block

    // NOTE: the URL is actually being returned before the videoWriter finishes writing so be careful to not access it until it's ready
    return outputFile;
}

我希望这对你有用:-

-(void)MixVideo:(NSString *)vidioUrlString withImage:(UIImage *)img
{
    NSURL *videoUrl1 = [[NSURL alloc] initFileURLWithPath:vidioUrlString];
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl1 options:nil];

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];


    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

    CGSize sizeOfVideo = CGSizeMake(320, 568);

    //Image of watermark
    UIImage *myImage=img;

    CALayer *layerCa = [CALayer layer];

    layerCa.contents = (id)myImage.CGImage;
    layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);

    layerCa.opacity = 1.0;

    CALayer *parentLayer=[CALayer layer];

    CALayer *videoLayer=[CALayer layer];

    parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);

    videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
    [parentLayer addSublayer:videoLayer];

    [parentLayer addSublayer:layerCa];

    AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;

    videoComposition.frameDuration=CMTimeMake(1, 30);

    videoComposition.renderSize=sizeOfVideo;

    videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);

    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];

    videoComposition.instructions = [NSArray arrayWithObject: instruction];

    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];

    finalPath = [documentsDirectory stringByAppendingFormat:@"/myVideo.mp4"];

    if ([[NSFileManager defaultManager] fileExistsAtPath:finalPath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:finalPath error:nil];
    }

    SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:mixComposition];
    encoder.outputFileType = AVFileTypeMPEG4;
    encoder.outputURL = [NSURL fileURLWithPath:finalPath];
    encoder.videoComposition=videoComposition;
    encoder.videoSettings = @
    {
    AVVideoCodecKey: AVVideoCodecH264,
    AVVideoWidthKey: @320,
    AVVideoHeightKey: @568,
    AVVideoCompressionPropertiesKey: @
        {
        AVVideoAverageBitRateKey: @900000,
        AVVideoProfileLevelKey: AVVideoProfileLevelH264MainAutoLevel,
        },
    };
    encoder.audioSettings = @
    {
    AVFormatIDKey: @(kAudioFormatMPEG4AAC),
    AVNumberOfChannelsKey: @2,
    AVSampleRateKey: @44100,
    AVEncoderBitRateKey: @128000,
    };

    [encoder exportAsynchronouslyWithCompletionHandler:^
     {

         if (encoder.status == AVAssetExportSessionStatusCompleted)
         {

             NSLog(@"Video export succeeded");
             if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(finalPath))
             {

                 NSLog(@"Video exported successfully path = %@ ",finalPath);
             }

         }
         else if (encoder.status == AVAssetExportSessionStatusCancelled)
         {
             NSLog(@"Video export cancelled");
         }
         else
         {
             NSLog(@"Video export failed with error: %@ (%ld)", encoder.error.localizedDescription, (long)encoder.error.code);
         }
     }];

}

在这里,它为我工作,导出一个单一的图像到一个视频(视频是可移动的,而不是静态的)。斯威夫特3

//
//  CXEImageToAssetURL.swift
//  CXEngine
//
//  Created by wulei on 16/12/14.
//  Copyright © 2016年 wulei. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit
import Photos

fileprivate extension UIImage{
    func normalizedImage() -> UIImage?{
//        if self.imageOrientation == .up{
//            return self
//        }
        let factor = CGFloat(0.8)
        UIGraphicsBeginImageContextWithOptions(CGSize(width:self.size.width * factor, height: self.size.height * factor), false, self.scale)
        self.draw(in: CGRect(x: 0, y: 0, width: self.size.width * factor, height: self.size.height * factor))
        let normalImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return normalImage
    }

//    func clipImage() -> UIImage {

//        var x = CGFloat(0)
//        var y = CGFloat(0)
//        let imageHeight = (self.size.width * 9) / 16
//        y = (self.size.height - imageHeight)/2
//        var rcTmp = CGRect(origin: CGPoint(x: x, y: y), size: self.size)
//        if self.scale > 1.0 {
//            rcTmp = CGRect(x: rcTmp.origin.x * self.scale, y: rcTmp.origin.y * self.scale, width: rcTmp.size.width * self.scale, height: rcTmp.size.height * self.scale)
//        }
//        rcTmp.size.height = imageHeight
//        let imageRef = self.cgImage!.cropping(to: rcTmp)
//        let result = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation)
//        return result
//        return self
//    }
}

public typealias CXEImageToVideoProgress = (Float) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImageToVideo: NSObject{

    //MARK: Private Properties

    private var assetWriter:AVAssetWriter!
    private var writeInput:AVAssetWriterInput!
    private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    private var videoSettings:[String : Any]!
    private var frameTime:CMTime!
    private var fileURL:URL!
    private var duration:Int = 0

    //MARK: Class Method

     private func videoSettingsFunc(width:Int, height:Int) -> [String: Any]{
        if(Int(width) % 16 != 0){
            print("warning: video settings width must be divisible by 16")
        }

        let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                           AVVideoWidthKey: width,
                                           AVVideoHeightKey: height]

        return videoSettings
    }

    //MARK: Public methods

    public init(fileURL: URL, videoWidth:Int, videoHeight:Int) {
        super.init()

        self.videoSettings = videoSettingsFunc(width: videoWidth, height: videoHeight)

        self.fileURL = fileURL
        self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

        self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
        assert(self.assetWriter.canAdd(self.writeInput), "add failed")

        self.assetWriter.add(self.writeInput)
        let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
        self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
        self.frameTime = CMTimeMake(1, 25)
    }

//    public func createMovieFrom(url: URL, duration:Int, progressExtractor: CXEImageToVideoProgress){
//        self.duration = duration
//        self.createMovieFromSource(image: url as AnyObject, extractor:{(inputObject:AnyObject) ->UIImage? in
//            return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, progressExtractor: progressExtractor)
//    }

    public func createMovieFrom(imageData: Data, duration:Int, progressExtractor: CXEImageToVideoProgress){
        var image = UIImage(data: imageData)
        image = image?.normalizedImage()
        assert(image != nil)
        self.duration = duration

        self.createMovieFromSource(image: image!, extractor: {(inputObject:AnyObject) -> UIImage? in
            return inputObject as? UIImage}, progressExtractor: progressExtractor)
    }

    //MARK: Private methods

    private func createMovieFromSource(image: AnyObject, extractor: @escaping CXEMovieMakerUIImageExtractor, progressExtractor: CXEImageToVideoProgress){

        self.assetWriter.startWriting()
        let zeroTime = CMTimeMake(Int64(0),self.frameTime.timescale)
        self.assetWriter.startSession(atSourceTime: zeroTime)

        while !self.writeInput.isReadyForMoreMediaData {
            usleep(100)
        }

        var sampleBuffer:CVPixelBuffer?
        var pxDataBuffer:CVPixelBuffer?
        let img = extractor(image)
        assert(img != nil)

        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let originHeight = frameWidth * img!.cgImage!.height / img!.cgImage!.width
        let heightDifference = originHeight - frameHeight

        let frameCounts = self.duration * Int(self.frameTime.timescale)
        let spacingOfHeight = heightDifference / frameCounts

        sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
        assert(sampleBuffer != nil)

        var presentTime = CMTimeMake(1, self.frameTime.timescale)
        var stepRows = 0

        for i in 0..<frameCounts {
            progressExtractor(Float(i) / Float(frameCounts))

            CVPixelBufferLockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pointer = CVPixelBufferGetBaseAddress(sampleBuffer!)
            var pxData = pointer?.assumingMemoryBound(to: UInt8.self)
            let bytes = CVPixelBufferGetBytesPerRow(sampleBuffer!) * stepRows
            pxData = pxData?.advanced(by: bytes)

            let status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, pxData!, CVPixelBufferGetBytesPerRow(sampleBuffer!), nil, nil, options as CFDictionary?, &pxDataBuffer)
            assert(status == kCVReturnSuccess && pxDataBuffer != nil, "newPixelBuffer failed")
            CVPixelBufferUnlockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))

            while !self.writeInput.isReadyForMoreMediaData {
                usleep(100)
            }
            if (self.writeInput.isReadyForMoreMediaData){
                if i == 0{
                    self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: zeroTime)
                }else{
                    self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: presentTime)
                }
                presentTime = CMTimeAdd(presentTime, self.frameTime)
            }

            stepRows += spacingOfHeight
        }


        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {}

        var isSuccess:Bool = false
        while(!isSuccess){
            switch self.assetWriter.status {
            case .completed:
                isSuccess = true
                print("completed")
            case .writing:
                usleep(100)
                print("writing")
            case .failed:
                isSuccess = true
                print("failed")
            case .cancelled:
                isSuccess = true
                print("cancelled")
            default:
                isSuccess = true
                print("unknown")
            }
        }
    }

    private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
        var pxbuffer:CVPixelBuffer?
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

        let originHeight = frameWidth * cgImage.height / cgImage.width

        let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, originHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
        assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

        CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGContext(data: pxdata, width: frameWidth, height: originHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
        assert(context != nil, "context is nil")

        context!.concatenate(CGAffineTransform.identity)
        context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: frameWidth, height: originHeight))
        CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        return pxbuffer
    }
}
//
//CXEImageToAssetURL.swift
//CXEngine
//
//吴磊于2014年12月16日创作。
//版权所有©2016年 乌雷。版权所有。
//
进口基金会
进口AVF基金会
导入UIKit
导入照片
文件专用扩展UIImage{
func normalizedImage()->UIImage{
//如果self.imageOrientation==.up{
//回归自我
//        }
let factor=CGFloat(0.8)
UIGraphicsBeginImageContextWithOptions(CGSize(宽度:self.size.width*因子,高度:self.size.height*因子),false,self.scale)
self.draw(in:CGRect(x:0,y:0,宽度:self.size.width*因子,高度:self.size.height*因子))
让normalImage=UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsSendImageContext()
返回正常图像
}
//func clipImage()->UIImage{
//var x=CGFloat(0)
//变量y=CGFloat(0)
//让imageHeight=(self.size.width*9)/16
//y=(self.size.height-imageHeight)/2
//var rcTmp=CGRect(原点:CGPoint(x:x,y:y),大小:self.size)
//如果self.scale>1.0{
//rcTmp=CGRect(x:rcTmp.origin.x*self.scale,y:rcTmp.origin.y*self.scale,宽度:rcTmp.size.width*self.scale,高度:rcTmp.size.height*self.scale)
//        }
//rcTmp.size.height=图像高度
//让imageRef=self.cgImage!。裁剪(到:rcTmp)
//让结果=UIImage(cgImage:imageRef!,比例:self.scale,方向:self.imageOrientation)
//返回结果
//回归自我
//    }
}
公共类型别名CXEImageToVideoProgress=(浮点)->Void
typealias CXEMovieMakerUIImageExtractor=(任何对象)->UIImage?
公共类CXEImageToVideo:NSObject{
//马克:私人物业
私人var assetWriter:AVAssetWriter!
私有变量writeInput:AVAssetWriterInput!
私有var bufferAdapter:AvassetWriterInputPixelBufferAdapter!
私有var视频设置:[字符串:任意]!
私有变量帧时间:CMTime!
私有var fileURL:URL!
私有变量持续时间:Int=0
//马克:课堂教学法
private func videoSettingsFunc(宽度:Int,高度:Int)->[字符串:任意]{
如果(整数(宽度)%16!=0){
打印(“警告:视频设置宽度必须能被16整除”)
}
let videoSettings:[字符串:任意]=[AVVideoCodecKey:AVVideoCodecH264,
AVVideoWidthKey:宽度,
AVVideoHeightKey:高度]
返回视频设置
}
//标记:公共方法
public init(fileURL:URL,videoWidth:Int,videoHeight:Int){
super.init()
self.videoSettings=videoSettingsFunc(宽度:videoWidth,高度:videoHeight)
self.fileURL=fileURL
self.assetWriter=try!AVAssetWriter(url:self.fileURL,fileType:AVFileTypeQuickTimeMovie)
self.writeInput=AVAssetWriterInput(媒体类型:AVMediaT
@discardableResult func merge(
    video videoPath: String,
    withForegroundImage foregroundImage: UIImage,
    completion: @escaping (AVAssetExportSession) -> Void) -> AVAssetExportSession {

    let videoUrl = URL(fileURLWithPath: videoPath)
    let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)

    // Setup `mutableComposition` from the existing video
    let mutableComposition = AVMutableComposition()
    let videoAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaTypeVideo).first!
    let videoCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    videoCompositionTrack.preferredTransform = videoAssetTrack.preferredTransform
    try! videoCompositionTrack.insertTimeRange(CMTimeRange(start:kCMTimeZero, duration:videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: kCMTimeZero)
    let audioAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaTypeAudio).first!
    let audioCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    try! audioCompositionTrack.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration:audioAssetTrack.timeRange.duration), of: audioAssetTrack, at: kCMTimeZero)

    // Create a `videoComposition` to represent the `foregroundImage`
    let videoSize: CGSize = videoCompositionTrack.naturalSize
    let frame = CGRect(x: 0.0, y: 0.0, width: videoSize.width, height: videoSize.height)
    let imageLayer = CALayer()
    imageLayer.contents = foregroundImage.cgImage
    imageLayer.frame = frame
    let videoLayer = CALayer()
    videoLayer.frame = frame
    let animationLayer = CALayer()
    animationLayer.frame = frame
    animationLayer.addSublayer(videoLayer)
    animationLayer.addSublayer(imageLayer)
    let videoComposition = AVMutableVideoComposition(propertiesOf: videoCompositionTrack.asset!)
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)

    // Export the video
    let documentDirectory = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.cachesDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).first!
    let documentDirectoryUrl = URL(fileURLWithPath: documentDirectory)
    let destinationFilePath = documentDirectoryUrl.appendingPathComponent("video_\(NSUUID().uuidString).mov")
    let exportSession = AVAssetExportSession( asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
    exportSession.videoComposition = videoComposition
    exportSession.outputURL = destinationFilePath
    exportSession.outputFileType = AVFileTypeQuickTimeMovie
    exportSession.exportAsynchronously { [weak exportSession] in
        if let strongExportSession = exportSession {
            completion(strongExportSession)
        }
    }

    return exportSession
}