Ios Swift |从图像阵列创建视频

Ios Swift |从图像阵列创建视频,ios,objective-c,swift,iphone,xcode,Ios,Objective C,Swift,Iphone,Xcode,我开发了一个应用程序,用户可以在其中制作一些图像的视频。 我从这篇文章中获取了代码: 一切都很好,但我有一个大问题,我不知道如何解决它 创建视频时,图像没有填充孔屏幕,有时会转到另一侧或将图像切成两半 第一个图像是原始图像,第二个图像是创建的视频与我的问题 我真的需要一些帮助 我的代码 func buildVideoFromImageArray() { for image in arrayOfImages { selectedPhotosArray.

我开发了一个应用程序,用户可以在其中制作一些图像的视频。 我从这篇文章中获取了代码:

一切都很好,但我有一个大问题,我不知道如何解决它

创建视频时,图像没有填充孔屏幕,有时会转到另一侧或将图像切成两半

第一个图像是原始图像,第二个图像是创建的视频与我的问题

我真的需要一些帮助

我的代码

func buildVideoFromImageArray() {

        for image in arrayOfImages {
            selectedPhotosArray.append(image)
        }


        imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4")
        removeFileAtURLIfExists(url: imageArrayToVideoURL)
        guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
        guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
            fatalError("Negative : Can't applay the Output settings...")
        }
        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
        if videoWriter.canAdd(videoWriterInput) {
            videoWriter.add(videoWriterInput)
        }

        if videoWriter.startWriting() {
            let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: self.fps)
            videoWriter.startSession(atSourceTime: zeroTime)

            assert(pixelBufferAdaptor.pixelBufferPool != nil)
            let media_queue = DispatchQueue(label: "mediaInputQueue")
            videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
                //let fps: Int32 = 1
                let framePerSecond: Int64 = Int64(self.imagesPerSecond)
                let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: self.fps)
                var frameCount: Int64 = 0
                var appendSucceeded = true
                while (!self.selectedPhotosArray.isEmpty) {         // wird so lange ausgeführt, bis noch etwas im Array steht
                    if (videoWriterInput.isReadyForMoreMediaData) {
                        let nextPhoto = self.selectedPhotosArray.remove(at: 0)  // foto wird aus dem selectedPhotosArray gelöscht

                        let lastFrameTime = CMTimeMake(value: frameCount * framePerSecond, timescale: self.fps)
                        let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                        var pixelBuffer: CVPixelBuffer? = nil
                        let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                        if let pixelBuffer = pixelBuffer, status == 0 {
                            let managedPixelBuffer = pixelBuffer
                            CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                            let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                            context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                            //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

                            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)

                            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0

                            context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                            CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        } else {
                            print("Failed to allocate pixel buffer")
                            appendSucceeded = false
                        }
                    }
                    if !appendSucceeded {
                        break
                    }
                    frameCount += 1
                }
                videoWriterInput.markAsFinished()
                videoWriter.finishWriting { () -> Void in
                    print("-----video1 url = \(self.imageArrayToVideoURL)")

                    //self.asset = AVAsset(url: self.imageArrayToVideoURL as URL)
                    PHPhotoLibrary.shared().performChanges({
                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.imageArrayToVideoURL as URL)
                    }) { saved, error in
                        if saved {
                            let fetchOptions = PHFetchOptions()
                            fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]

                            let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).firstObject
                            // fetchResult is your latest video PHAsset
                            // To fetch latest image  replace .video with .image
                        }
                    }
                }
            })
        }

    }

    func removeFileAtURLIfExists(url: NSURL) {
        if let filePath = url.path {
            let fileManager = FileManager.default
            if fileManager.fileExists(atPath: filePath) {
                do{
                    try fileManager.removeItem(atPath: filePath)
                } catch let error as NSError {
                    print("Couldn't remove existing destination file: \(error)")
                }
            }
        }
    }`
func buildVideoFromImageArray(){
用于阵列图像中的图像{
selectedPhotosArray.append(图像)
}
imageArrayToVideoURL=NSURL(fileURLWithPath:NSHomeDirectory()+“/Documents/video1.MP4”)
RemoveFileAturalLifeExists(url:imageArrayToVideoURL)
guard let videoWriter=try?AVAssetWriter(输出URL:imageArrayToVideoURL作为URL,文件类型:AVFileType.mp4)其他{
fatalError(“AVAssetWriter错误”)
}
让outputSettings=[AvVideoCodeKey:AvVideoCodeType.h264,AVVideoWidthKey:NSNumber(值:Float(outputSize.width)),AVVideoHeightKey:NSNumber(值:Float(outputSize.height))]作为[字符串:任意]
guard videoWriter.canApply(outputSettings:outputSettings,forMediaType:AVMediaType.video)else{
fatalError(“负:无法应用输出设置…”)
}
让videoWriterInput=AVAssetWriterInput(媒体类型:AVMediaType.video,输出设置:输出设置)
让sourcePixelBufferAttributesDictionary=[kCVPixelBufferPixelFormatTypeKey作为字符串:NSNumber(值:kCVPixelFormatType_32ARGB),kCVPixelBufferWidthKey作为字符串:NSNumber(值:Float(outputSize.width)),kCVPixelBufferHeightKey作为字符串:NSNumber(值:Float(outputSize.height))]
设PixelBufferAdapter=AvassetWriterInputPixelBufferAdapter(assetWriterInput:videoWriterInput,sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary)
如果videoWriter.canAdd(videoWriterInput){
videoWriter.add(videoWriterInput)
}
如果videoWriter.startWriting(){
设zeroTime=CMTimeMake(值:Int64(imagesPerSecond),时间刻度:self.fps)
videoWriter.startSession(atSourceTime:zeroTime)
断言(PixelBufferAdapter.pixelBufferPool!=nil)
让媒体队列=调度队列(标签:“mediaInputQueue”)
videoWriterInput.requestMediaDataWhenReady(在:media_队列上,在中使用:{()->Void
//设fps:Int32=1
设framePerSecond:Int64=Int64(self.imagesPerSecond)
设frameDuration=CMTimeMake(值:Int64(self.imagesPerSecond),时间刻度:self.fps)
var frameCount:Int64=0
var=true
而(!self.selectedPhotosArray.isEmpty){//wird so lange ausgeführt,bis noch etwas im Array steht
if(videoWriterInput.isReadyForMoreMediaData){
让nextPhoto=self.selectedPhotosArray.remove(at:0)//foto-wird-aus-dem-selectedPhotosArray-gelöscht
让lastFrameTime=CMTimeMake(值:frameCount*framePerSecond,时间刻度:self.fps)
让presentationTime=frameCount==0?lastFrameTime:CMTimeAdd(lastFrameTime,frameDuration)
var pixelBuffer:CVPixelBuffer?=nil
let状态:CVReturn=CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault,PixelBufferAdapter.pixelBufferPool!,&pixelBuffer)
如果让pixelBuffer=pixelBuffer,状态==0{
让managedPixelBuffer=pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer,CVPixelBufferLockFlags(原始值:CVOptionFlags(0)))
让数据=CVPixelBufferGetBaseAddress(managedPixelBuffer)
设rgbColorSpace=CGColorSpaceCreateDeviceRGB()
让context=CGContext(数据:data,宽度:Int(self.outputSize.width),高度:Int(self.outputSize.height),bitsPerComponent:8,bytesPerRow:CVPixelBufferGetBytesPerRow(managedPixelBuffer),空格:rgbColorSpace,bitmapInfo:cImageAlphaInfo.PremultipledFirst.rawValue)
context!.clear(CGRect(x:0,y:0,width:CGFloat(self.outputSize.width),height:CGFloat(self.outputSize.height)))
让水平比率=CGFloat(self.outputSize.width)/nextPhoto.size.width
让verticalRatio=CGFloat(self.outputSize.height)/nextPhoto.size.height
//设aspectRatio=max(水平比、垂直比)//ScaleAspectFill
设aspectRatio=min(水平比、垂直比)//ScaleAspectFit
let newSize:CGSize=CGSize(宽度:nextPhoto.size.width*aspectRatio,高度:nextPhoto.size.height*aspectRatio)
设x=newSize.widthfunc buildVideoFromImageArray() {

        for image in arrayOfImages {
            selectedPhotosArray.append(image)
        }


        imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4")
        removeFileAtURLIfExists(url: imageArrayToVideoURL)
        guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
        guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
            fatalError("Negative : Can't applay the Output settings...")
        }
        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
        if videoWriter.canAdd(videoWriterInput) {
            videoWriter.add(videoWriterInput)
        }

        if videoWriter.startWriting() {
            let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: self.fps)
            videoWriter.startSession(atSourceTime: zeroTime)

            assert(pixelBufferAdaptor.pixelBufferPool != nil)
            let media_queue = DispatchQueue(label: "mediaInputQueue")
            videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
                //let fps: Int32 = 1
                let framePerSecond: Int64 = Int64(self.imagesPerSecond)
                let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: self.fps)
                var frameCount: Int64 = 0
                var appendSucceeded = true
                while (!self.selectedPhotosArray.isEmpty) {         // wird so lange ausgeführt, bis noch etwas im Array steht
                    if (videoWriterInput.isReadyForMoreMediaData) {
                        let nextPhoto = self.selectedPhotosArray.remove(at: 0)  // foto wird aus dem selectedPhotosArray gelöscht

                        let lastFrameTime = CMTimeMake(value: frameCount * framePerSecond, timescale: self.fps)
                        let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                        var pixelBuffer: CVPixelBuffer? = nil
                        let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                        if let pixelBuffer = pixelBuffer, status == 0 {
                            let managedPixelBuffer = pixelBuffer
                            CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                            let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                            context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                            //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

                            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)

                            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0

                            context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                            CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        } else {
                            print("Failed to allocate pixel buffer")
                            appendSucceeded = false
                        }
                    }
                    if !appendSucceeded {
                        break
                    }
                    frameCount += 1
                }
                videoWriterInput.markAsFinished()
                videoWriter.finishWriting { () -> Void in
                    print("-----video1 url = \(self.imageArrayToVideoURL)")

                    //self.asset = AVAsset(url: self.imageArrayToVideoURL as URL)
                    PHPhotoLibrary.shared().performChanges({
                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.imageArrayToVideoURL as URL)
                    }) { saved, error in
                        if saved {
                            let fetchOptions = PHFetchOptions()
                            fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]

                            let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).firstObject
                            // fetchResult is your latest video PHAsset
                            // To fetch latest image  replace .video with .image
                        }
                    }
                }
            })
        }

    }

    func removeFileAtURLIfExists(url: NSURL) {
        if let filePath = url.path {
            let fileManager = FileManager.default
            if fileManager.fileExists(atPath: filePath) {
                do{
                    try fileManager.removeItem(atPath: filePath)
                } catch let error as NSError {
                    print("Couldn't remove existing destination file: \(error)")
                }
            }
        }
    }`