Ios 从[UIImage]创建电影,Swift

Ios 从[UIImage]创建电影,Swift,ios,swift,video,avfoundation,Ios,Swift,Video,Avfoundation,我找到了,但都在ObjectiveC中,我无法为Swift找到答案 我需要从[UIImage]创建一个视频 在上面的链接中处理Zoul的答案。第1部分)给作者打电话 到目前为止,我已经: let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask) let documentsURL = paths[0] as! NSURL le

我找到了,但都在ObjectiveC中,我无法为Swift找到答案

我需要从[UIImage]创建一个视频

在上面的链接中处理Zoul的答案。第1部分)给作者打电话

到目前为止,我已经:

let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
    let documentsURL = paths[0] as! NSURL
    let videoWriter:AVAssetWriter = AVAssetWriter(URL: documentsURL, fileType: AVFileTypeQuickTimeMovie, error: nil)

    var videoSettings: NSDictionary = NSDictionary(
我想不出他的正确的Swift版本

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];


构建
字典
文本非常简单:

import AVFoundation

let videoSettings = [
    AVVideoCodecKey: AVVideoCodecH264,
    AVVideoWidthKey: 640,
    AVVideoHeightKey: 480
]

至于其他方面,我鼓励你先通读苹果的《建立基本原理》,而不是依赖于SO或教程,这些教程恰好涵盖了你想要做的事情。“教人钓鱼”,正如他们所说。

我将“@Cameron E”发布的objective-c代码转换为Swift 3,它开始工作了。答案链接:

以下是CXEImagesToVideo类:

//
//  CXEImagesToVideo.swift
//  VideoAPPTest
//
//  Created by Wulei on 16/12/14.
//  Copyright © 2016 wulei. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit

typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImagesToVideo: NSObject{
    var assetWriter:AVAssetWriter!
    var writeInput:AVAssetWriterInput!
    var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    var videoSettings:[String : Any]!
    var frameTime:CMTime!
    var fileURL:URL!

    var completionBlock: CXEMovieMakerCompletion?
    var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?


public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
    if(Int(width) % 16 != 0){
        print("warning: video settings width must be divisible by 16")
    }

    let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                         AVVideoWidthKey: width,
                         AVVideoHeightKey: height]

    return videoSettings
}

public init(videoSettings: [String: Any]) {
    super.init()

    let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let tempPath = paths[0] + "/exprotvideo.mp4"
    if(FileManager.default.fileExists(atPath: tempPath)){
        guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
            print("remove path failed")
            return
        }
    }

    self.fileURL = URL(fileURLWithPath: tempPath)
    self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

    self.videoSettings = videoSettings
    self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
    assert(self.assetWriter.canAdd(self.writeInput), "add failed")

    self.assetWriter.add(self.writeInput)
    let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
    self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
    self.frameTime = CMTimeMake(1, 10)
}

func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
        return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
}

func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
    return inputObject as? UIImage}, withCompletion: withCompletion)
}

func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
    self.completionBlock = withCompletion

    self.assetWriter.startWriting()
    self.assetWriter.startSession(atSourceTime: kCMTimeZero)

    let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
    var i = 0
    let frameNumber = images.count

    self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
        while(true){
            if(i >= frameNumber){
                break
            }

            if (self.writeInput.isReadyForMoreMediaData){
                var sampleBuffer:CVPixelBuffer?
                autoreleasepool{
                    let img = extractor(images[i])
                    if img == nil{
                        i += 1
                        print("Warning: counld not extract one of the frames")
                        //continue
                    }
                    sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                }
                if (sampleBuffer != nil){
                    if(i == 0){
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                    }else{
                        let value = i - 1
                        let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                        let presentTime = CMTimeAdd(lastTime, self.frameTime)
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                    }
                    i = i + 1
                }
            }
        }
        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {
            DispatchQueue.main.sync {
                self.completionBlock!(self.fileURL)
            }
        }
    }
}

func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
    let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
    var pxbuffer:CVPixelBuffer?
    let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
    let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

    let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
    assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

    CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
    assert(context != nil, "context is nil")

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
    CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    return pxbuffer
}
}
用法:

        var uiImages = [UIImage]()

    /** add image to uiImages */

    let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
    let movieMaker = CXEImagesToVideo(videoSettings: settings)
    movieMaker.createMovieFrom(images: uiImages){ (fileURL:URL) in
        let video = AVAsset(url: fileURL)
        let playerItem = AVPlayerItem(asset: video)
        let avPlayer = AVPlayer(playerItem: playerItem)
        let playerLayer = AVPlayerLayer(player: avPlayer)
        playerLayer.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width * 3.0 / 4.0)
        self.view.layer.addSublayer(playerLayer)
        avPlayer.play()
    }
使用fileURL导出或播放视频。 异步和同步有两种方式。要点:

**适用于swift 4.2**

  • 从图像生成视频并手动保存
  • 图片来自prev 控制器

//VideoMakerViewController.swift
//录像机
//伊莎·帕特尔于2018年10月5日创作。
//版权所有©2018 Isha Patel。版权所有。
进口AVF基金会
导入UIKit
导入照片
进口AVKit
var tempurl=“”
类VideoMakerViewController:UIViewController{
变量图像:[UIImage]=[]
@IBVAR视频视图:UIView!
重写func viewDidLoad(){
super.viewDidLoad()
DispatchQueue.main.async{
让设置=渲染设置()
让imageAnimator=imageAnimator(渲染设置:设置,imagearr:self.images)
imageAnimator.render(){
self.displayVideo()
}
}
}
func displayVideo()
{
让u:String=tempurl
let player=AVPlayer(url:url(fileURLWithPath:u))
让playerController=AVPlayerViewController()
playerController.player=玩家
self.addChild(playerController)
videoview.addSubview(playerController.view)
playerController.view.frame.size=(videoview.frame.size)
playerController.view.contentMode=.ScaleSpectFit
playerController.view.backgroundColor=UIColor.clear
videoview.backgroundColor=UIColor.clear
player.play()
}
@iAction func save(uu发送方:UIBarButtonim){
PHPhotoLibrary.requestAuthorization{中的状态
保护状态==.authorized else{return}
让u:String=tempurl
PHPhotoLibrary.shared().performChanges({
PhaseSetChangeRequest.creationRequestForAssetFromVideo(atFileURL:URL(fileURLWithPath:u)作为URL)
}){成功,失败
如果!成功{
打印(“无法将视频保存到照片库:”,错误!)
}
}
}
}
}
结构渲染设置{
变量宽度:CGFloat=1500
变量高度:CGFloat=844
var fps:Int32=2//2帧/秒
var avCodecKey=AVVideoCodecType.h264
var videoFilename=“renderExportVideo”
var videoFilenameExt=“mp4”
变量大小:CGSize{
返回CGSize(宽度:宽度,高度:高度)
}
变量输出URL:NSURL{
让fileManager=fileManager.default
如果让tmpdirrl=try?fileManager.url(对于:.cachesDirectory,在:.userDomainMask中,适用于:nil,创建:true){
返回tmpDirURL.appendingPathComponent(videoFilename).appendingPathExtension(videoFilenameExt)作为NSURL
}
fatalError(“URLForDirectory()失败”)
}
}
班级录像机{
让渲染设置:渲染设置
var视频写手:一个作家!
var videoWriterInput:AVAssetWriterInput!
var PixelBufferAdapter:AvassetWriterInputPixelBufferAdapter!
变量isReadyForData:Bool{
返回videoWriterInput?isReadyForMoreMediaData?false
}
类func pixelBufferFromImage(图像:UIImage,pixelBufferPool:CVPixelBufferPool,大小:CGSize)->CVPixelBuffer{
var pixelBufferOut:CVPixelBuffer?
let status=CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault、pixelBufferPool和pixelBufferOut)
如果状态!=kCVReturnSuccess{
fatalError(“CVPixelBufferPoolCreatePixelBuffer()失败”)
}
让pixelBuffer=pixelBufferOut!
CVPixelBufferLockBaseAddress(pixelBuffer,[]))
let data=CVPixelBufferGetBaseAddress(pixelBuffer)
设rgbColorSpace=CGColorSpaceCreateDeviceRGB()
让context=CGContext(数据:数据,宽度:Int(size.width),高度:Int(size.height),
bitsPerComponent:8,bytesPerRow:CVPixelBufferGetBytesPerRow(pixelBuffer),空格:rgbColorSpace,bitmapInfo:CGImageAlphaInfo.PremultipledFirst.rawValue)
context!.clear(CGRect(x:0,y:0,width:size.width,height:size.height))
让水平比率=size.width/image.size.width
让垂直比率=size.height/image.size.height
设aspectRatio=min(水平比、垂直比)//ScaleAspectFit
let newSize=CGSize(宽度:image.size.width*aspectRatio,高度:image.size.height*aspectRatio)
设x=newSize.width//  VideoMakerViewController.swift 
// VideoMaker 
//Created by ISHA PATEL on 05/10/18. 
// Copyright © 2018 Isha Patel. All rights reserved.


import AVFoundation
import UIKit
import Photos
import AVKit
var tempurl=""

class VideoMakerViewController: UIViewController {

var images:[UIImage]=[]
@IBOutlet weak var videoview: UIView!

override func viewDidLoad() {
    super.viewDidLoad()

    DispatchQueue.main.async {
        let settings = RenderSettings()
        let imageAnimator = ImageAnimator(renderSettings: settings,imagearr: self.images)
        imageAnimator.render() {
            self.displayVideo()
        }
    }
}

func displayVideo()
{

    let u:String=tempurl
    let player = AVPlayer(url: URL(fileURLWithPath: u))
    let playerController = AVPlayerViewController()
    playerController.player = player
    self.addChild(playerController)
    videoview.addSubview(playerController.view)
    playerController.view.frame.size=(videoview.frame.size)
    playerController.view.contentMode = .scaleAspectFit
    playerController.view.backgroundColor=UIColor.clear
    videoview.backgroundColor=UIColor.clear
    player.play()
}

@IBAction func save(_ sender: UIBarButtonItem) {
    PHPhotoLibrary.requestAuthorization { status in
        guard status == .authorized else { return }

        let u:String=tempurl
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: u) as URL)
        }) { success, error in
            if !success {
                print("Could not save video to photo library:", error!)
            }
        }
    }
}


}

struct RenderSettings {

var width: CGFloat = 1500
var height: CGFloat = 844
var fps: Int32 = 2   // 2 frames per second
var avCodecKey = AVVideoCodecType.h264
var videoFilename = "renderExportVideo"
var videoFilenameExt = "mp4"

var size: CGSize {
    return CGSize(width: width, height: height)
}

var outputURL: NSURL {

    let fileManager = FileManager.default
    if let tmpDirURL = try? fileManager.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) {
        return tmpDirURL.appendingPathComponent(videoFilename).appendingPathExtension(videoFilenameExt) as NSURL
    }
    fatalError("URLForDirectory() failed")
}
}

class VideoWriter {

let renderSettings: RenderSettings

var videoWriter: AVAssetWriter!
var videoWriterInput: AVAssetWriterInput!
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!

var isReadyForData: Bool {
    return videoWriterInput?.isReadyForMoreMediaData ?? false
}

class func pixelBufferFromImage(image: UIImage, pixelBufferPool: CVPixelBufferPool, size: CGSize) -> CVPixelBuffer {

    var pixelBufferOut: CVPixelBuffer?

    let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
    if status != kCVReturnSuccess {
        fatalError("CVPixelBufferPoolCreatePixelBuffer() failed")
    }

    let pixelBuffer = pixelBufferOut!

    CVPixelBufferLockBaseAddress(pixelBuffer, [])

    let data = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: data, width: Int(size.width), height: Int(size.height),
                            bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)

    context!.clear(CGRect(x: 0, y: 0, width: size.width, height: size.height))

    let horizontalRatio = size.width / image.size.width
    let verticalRatio = size.height / image.size.height

    let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

    let newSize = CGSize(width: image.size.width * aspectRatio, height: image.size.height * aspectRatio)

    let x = newSize.width < size.width ? (size.width - newSize.width) / 2 : 0
    let y = newSize.height < size.height ? (size.height - newSize.height) / 2 : 0

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(image.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))

    CVPixelBufferUnlockBaseAddress(pixelBuffer, [])

    return pixelBuffer
}

init(renderSettings: RenderSettings) {
    self.renderSettings = renderSettings
}

func start() {

    let avOutputSettings: [String: AnyObject] = [
        AVVideoCodecKey: renderSettings.avCodecKey as AnyObject,
        AVVideoWidthKey: NSNumber(value: Float(renderSettings.width)),
        AVVideoHeightKey: NSNumber(value: Float(renderSettings.height))
    ]

    func createPixelBufferAdaptor() {
        let sourcePixelBufferAttributesDictionary = [
            kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32ARGB),
            kCVPixelBufferWidthKey as String: NSNumber(value: Float(renderSettings.width)),
            kCVPixelBufferHeightKey as String: NSNumber(value: Float(renderSettings.height))
        ]
        pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
                                                                  sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
    }

    func createAssetWriter(outputURL: NSURL) -> AVAssetWriter {
        guard let assetWriter = try? AVAssetWriter(outputURL: outputURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter() failed")
        }

        guard assetWriter.canApply(outputSettings: avOutputSettings, forMediaType: AVMediaType.video) else {
            fatalError("canApplyOutputSettings() failed")
        }

        return assetWriter
    }

    videoWriter = createAssetWriter(outputURL: renderSettings.outputURL)
    videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: avOutputSettings)

    if videoWriter.canAdd(videoWriterInput) {
        videoWriter.add(videoWriterInput)
    }
    else {
        fatalError("canAddInput() returned false")
    }


    createPixelBufferAdaptor()

    if videoWriter.startWriting() == false {
        fatalError("startWriting() failed")
    }

    videoWriter.startSession(atSourceTime: CMTime.zero)

    precondition(pixelBufferAdaptor.pixelBufferPool != nil, "nil pixelBufferPool")
}

func render(appendPixelBuffers: @escaping (VideoWriter)->Bool, completion: @escaping ()->Void) {

    precondition(videoWriter != nil, "Call start() to initialze the writer")

    let queue = DispatchQueue(label: "mediaInputQueue")
    videoWriterInput.requestMediaDataWhenReady(on: queue) {
        let isFinished = appendPixelBuffers(self)
        if isFinished {
            self.videoWriterInput.markAsFinished()
            self.videoWriter.finishWriting() {
                DispatchQueue.main.async {
                    completion()
                }
            }
        }
        else {

        }
    }
}

func addImage(image: UIImage, withPresentationTime presentationTime: CMTime) -> Bool {

    precondition(pixelBufferAdaptor != nil, "Call start() to initialze the writer")

    let pixelBuffer = VideoWriter.pixelBufferFromImage(image: image, pixelBufferPool: pixelBufferAdaptor.pixelBufferPool!, size: renderSettings.size)
    return pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
}

}

class ImageAnimator{

static let kTimescale: Int32 = 600

let settings: RenderSettings
let videoWriter: VideoWriter
var images: [UIImage]!

var frameNum = 0

class func removeFileAtURL(fileURL: NSURL) {
    do {
        try FileManager.default.removeItem(atPath: fileURL.path!)
    }
    catch _ as NSError {
        // 
    }
}

init(renderSettings: RenderSettings,imagearr: [UIImage]) {
    settings = renderSettings
    videoWriter = VideoWriter(renderSettings: settings)
    images = imagearr
}

func render(completion: @escaping ()->Void) {

    // The VideoWriter will fail if a file exists at the URL, so clear it out first.
    ImageAnimator.removeFileAtURL(fileURL: settings.outputURL)

    videoWriter.start()
    videoWriter.render(appendPixelBuffers: appendPixelBuffers) {

        let s:String=self.settings.outputURL.path!

        tempurl=s
        completion()
    }

}


func appendPixelBuffers(writer: VideoWriter) -> Bool {

    let frameDuration = CMTimeMake(value: Int64(ImageAnimator.kTimescale / settings.fps), timescale: ImageAnimator.kTimescale)

    while !images.isEmpty {

        if writer.isReadyForData == false {

            return false
        }

        let image = images.removeFirst()
        let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameNum))
        let success = videoWriter.addImage(image: image, withPresentationTime: presentationTime)
        if success == false {
            fatalError("addImage() failed")
        }

        frameNum=frameNum+1
    }


    return true
}

}
    func saveVideoToLibrary(videoURL: URL) {
    
    PHPhotoLibrary.shared().performChanges({
        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
    }) { saved, error in
        
        if let error = error {
            print("Error saving video to librayr: \(error.localizedDescription)")
        }
        if saved {
            print("Video save to library")
            
        }
    }
}
func buildVideoFromImageArray(framesArray:[UIImage]) {
    var images = framesArray
    let outputSize = CGSize(width:images[0].size.width, height: images[0].size.height)
    let fileManager = FileManager.default
    let urls = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)
    guard let documentDirectory = urls.first else {
        fatalError("documentDir Error")
    }
    
    let videoOutputURL = documentDirectory.appendingPathComponent("OutputVideo.mp4")
    
    if FileManager.default.fileExists(atPath: videoOutputURL.path) {
        do {
            try FileManager.default.removeItem(atPath: videoOutputURL.path)
        } catch {
            fatalError("Unable to delete file: \(error) : \(#function).")
        }
    }
    
    guard let videoWriter = try? AVAssetWriter(outputURL: videoOutputURL, fileType: AVFileType.mp4) else {
        fatalError("AVAssetWriter error")
    }
    
    let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
    
    guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
        fatalError("Negative : Can't apply the Output settings...")
    }
    
    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [
        kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB),
        kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)),
        kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))
    ]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
    
    if videoWriter.canAdd(videoWriterInput) {
        videoWriter.add(videoWriterInput)
    }
    
    if videoWriter.startWriting() {
        videoWriter.startSession(atSourceTime: CMTime.zero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)
        
        let media_queue = DispatchQueue(__label: "mediaInputQueue", attr: nil)
        
        videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
            let fps: Int32 = 30//2
            let frameDuration = CMTimeMake(value: 1, timescale: fps)
            
            var frameCount: Int64 = 0
            var appendSucceeded = true
            
            while (!images.isEmpty) {
                if (videoWriterInput.isReadyForMoreMediaData) {
                    let nextPhoto = images.remove(at: 0)
                    let lastFrameTime = CMTimeMake(value: frameCount, timescale: fps)
                    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                    
                    var pixelBuffer: CVPixelBuffer? = nil
                    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                    
                    if let pixelBuffer = pixelBuffer, status == 0 {
                        let managedPixelBuffer = pixelBuffer
                        
                        CVPixelBufferLockBaseAddress(managedPixelBuffer, [])
                        
                        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                        let context = CGContext(data: data, width: Int(outputSize.width), height: Int(outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                        
                        context?.clear(CGRect(x: 0, y: 0, width: outputSize.width, height: outputSize.height))
                        
                        let horizontalRatio = CGFloat(outputSize.width) / nextPhoto.size.width
                        let verticalRatio = CGFloat(outputSize.height) / nextPhoto.size.height
                        
                        let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                        
                        let newSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
                        
                        let x = newSize.width < outputSize.width ? (outputSize.width - newSize.width) / 2 : 0
                        let y = newSize.height < outputSize.height ? (outputSize.height - newSize.height) / 2 : 0
                        
                        context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                        
                        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, [])
                        
                        appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        frameCount += 1
                    } else {
                        print("Failed to allocate pixel buffer")
                        appendSucceeded = false
                    }
                }
                if !appendSucceeded {
                    break
                }
                //frameCount += 1
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWriting { () -> Void in
                print("Done saving")
                self.saveVideoToLibrary(videoURL: videoOutputURL)
            }
        })
    }
}