Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/macos/9.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/20.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Macos 我如何使用swift中的AVAssetWriter制作视频?_Macos_Swift_Avassetwriter - Fatal编程技术网

Macos 我如何使用swift中的AVAssetWriter制作视频?

Macos 我如何使用swift中的AVAssetWriter制作视频?,macos,swift,avassetwriter,Macos,Swift,Avassetwriter,我目前正在制作一个小应用程序,可以在我的mac电脑上对网络摄像头进行时间缩放,将捕获的帧保存为png格式,并且我正在考虑将捕获的帧导出为单个视频 我使用CGImage来处理原始图像,并将它们设置在一个数组中,但我不确定从那里开始。我从自己的研究中发现,我必须以某种方式使用AVAssetWriter和AVAssetWriterInput 我在这里浏览了一下,阅读了苹果的文档,搜索了谷歌。但是所有的指南等都是obj-c而不是swift,这使得它很难理解(因为我没有obj-c方面的经验) 任何帮助都将

我目前正在制作一个小应用程序,可以在我的mac电脑上对网络摄像头进行时间缩放,将捕获的帧保存为png格式,并且我正在考虑将捕获的帧导出为单个视频

我使用CGImage来处理原始图像,并将它们设置在一个数组中,但我不确定从那里开始。我从自己的研究中发现,我必须以某种方式使用AVAssetWriter和AVAssetWriterInput

我在这里浏览了一下,阅读了苹果的文档,搜索了谷歌。但是所有的指南等都是obj-c而不是swift,这使得它很难理解(因为我没有obj-c方面的经验)

任何帮助都将不胜感激

非常感谢,,
卢克。

我在斯威夫特解决了同样的问题。从阵列图像开始,尝试以下方法(有点长:-),但有效):

var choosenPhotos:[UIImage]=[]***您的UIImage数组***
var outputSize=CGSizeMake(1280720)
func生成(outputSize outputSize:CGSize){
让fileManager=NSFileManager.defaultManager()
让URL=fileManager.URLsForDirectory(.DocumentDirectory,inDomains:.UserDomainMask)
guard let documentDirectory:NSURL=url.first-else{
fatalError(“documentDir错误”)
}
让videoOutputURL=documentDirectory.URLByAppendingPathComponent(“OutputVideo.mp4”)
如果NSFileManager.defaultManager().fileExistsAtPath(videoOutputURL.path!){
做{
请尝试NSFileManager.defaultManager().removitematpath(videoOutputURL.path!)
}抓住{
fatalError(“无法删除文件:\(错误):\(\uuu函数)。”)
}
}
guard let videoWriter=try?AVAssetWriter(URL:videoOutputURL,文件类型:AVFileTypeMPEG4)其他{
fatalError(“AVAssetWriter错误”)
}
let outputSettings=[AVVideoCodeKey:AVVideoCodecH264,AVVideoWidthKey:NSNumber(float:float(outputSize.width)),AVVideoHeightKey:NSNumber(float:float(outputSize.height))]
guard videoWriter.可以应用outputSettings(outputSettings,forMediaType:AVMediaTypeVideo)或其他{
fatalError(“负:无法应用输出设置…”)
}
让videoWriterInput=AVAssetWriterInput(媒体类型:AVMediaTypeVideo,输出设置:输出设置)
让sourcePixelBufferAttributesDictionary=[kCVPixelBufferPixelFormatTypeKey作为字符串:NSNumber(unsignedInt:kCVPixelFormatType_32ARGB),kCVPixelBufferWidthKey作为字符串:NSNumber(float:float(outputSize.width)),kCVPixelBufferHeightKey作为字符串:NSNumber(float:float(outputSize.height))]
设PixelBufferAdapter=AvassetWriterInputPixelBufferAdapter(assetWriterInput:videoWriterInput,sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary)
如果videoWriter.canAddInput(videoWriterInput){
videoWriter.addInput(videoWriterInput)
}
如果videoWriter.startWriting(){
videoWriter.startSessionAtSourceTime(kCMTimeZero)
断言(PixelBufferAdapter.pixelBufferPool!=nil)
让媒体队列=调度队列创建(“媒体输入队列”,无)
videoWriterInput.RequestMediaDataWhenRepayonQueue(媒体队列,使用block:{()->Void in
设fps:Int32=1
设frameDuration=CMTimeMake(1,fps)
var frameCount:Int64=0
var=true
而(!self.choosenPhotos.isEmpty){
if(videoWriterInput.readyForMoreMediaData){
让nextPhoto=self.choosenPhotos.removetIndex(0)
设lastFrameTime=CMTimeMake(帧数,fps)
让presentationTime=frameCount==0?lastFrameTime:CMTimeAdd(lastFrameTime,frameDuration)
var pixelBuffer:CVPixelBuffer?=nil
let状态:CVReturn=CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault,PixelBufferAdapter.pixelBufferPool!,&pixelBuffer)
如果让pixelBuffer=pixelBuffer,其中状态==0{
让managedPixelBuffer=pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer,0)
让数据=CVPixelBufferGetBaseAddress(managedPixelBuffer)
设rgbColorSpace=CGColorSpaceCreateDeviceRGB()
让context=CGBitmapContextCreate(数据,Int(self.outputSize.width),Int(self.outputSize.height),8,CVPixelBufferGetBytesPerRow(managedPixelBuffer),rgbColorSpace,CGImageAlphaInfo.PremultipledFirst.rawValue)
CGContextClearRect(上下文,CGRectMake(0,0,CGFloat(self.outputSize.width),CGFloat(self.outputSize.height)))
让水平比率=CGFloat(self.outputSize.width)/nextPhoto.size.width
让verticalRatio=CGFloat(self.outputSize.height)/nextPhoto.size.height
//aspectRatio=max(水平比、垂直比)//ScaleAspectFill
设aspectRatio=min(水平比、垂直比)//ScaleAspectFit
让newSize:CGSize=CGSizeMake(nextPhoto.size.width*aspectRatio,nextPhoto.size.height*aspectRatio)
设x=newSize.widthvar choosenPhotos: [UIImage] = [] *** your array of UIImages ***
var outputSize = CGSizeMake(1280, 720)

func build(outputSize outputSize: CGSize) {
    let fileManager = NSFileManager.defaultManager()
    let urls = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
    guard let documentDirectory: NSURL = urls.first else {
        fatalError("documentDir Error")
    }

    let videoOutputURL = documentDirectory.URLByAppendingPathComponent("OutputVideo.mp4")

    if NSFileManager.defaultManager().fileExistsAtPath(videoOutputURL.path!) {
        do {
            try NSFileManager.defaultManager().removeItemAtPath(videoOutputURL.path!)
        } catch {
            fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
        }
    }

    guard let videoWriter = try? AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeMPEG4) else {
        fatalError("AVAssetWriter error")
    }

    let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]

    guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
        fatalError("Negative : Can't apply the Output settings...")
    }

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(outputSize.height))]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

    if videoWriter.canAddInput(videoWriterInput) {
        videoWriter.addInput(videoWriterInput)
    }

    if videoWriter.startWriting() {
        videoWriter.startSessionAtSourceTime(kCMTimeZero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)

        let media_queue = dispatch_queue_create("mediaInputQueue", nil)

        videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
            let fps: Int32 = 1
            let frameDuration = CMTimeMake(1, fps)

            var frameCount: Int64 = 0
            var appendSucceeded = true

            while (!self.choosenPhotos.isEmpty) {
                if (videoWriterInput.readyForMoreMediaData) {
                    let nextPhoto = self.choosenPhotos.removeAtIndex(0)
                    let lastFrameTime = CMTimeMake(frameCount, fps)
                    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)

                    var pixelBuffer: CVPixelBuffer? = nil
                    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

                    if let pixelBuffer = pixelBuffer where status == 0 {
                        let managedPixelBuffer = pixelBuffer

                        CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)

                        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                        let context = CGBitmapContextCreate(data, Int(self.outputSize.width), Int(self.outputSize.height), 8, CVPixelBufferGetBytesPerRow(managedPixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

                        CGContextClearRect(context, CGRectMake(0, 0, CGFloat(self.outputSize.width), CGFloat(self.outputSize.height)))

                        let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                        let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                        //aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                        let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

                        let newSize:CGSize = CGSizeMake(nextPhoto.size.width * aspectRatio, nextPhoto.size.height * aspectRatio)

                        let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                        let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0

                        CGContextDrawImage(context, CGRectMake(x, y, newSize.width, newSize.height), nextPhoto.CGImage)

                        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)

                        appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime)
                    } else {
                        print("Failed to allocate pixel buffer")
                        appendSucceeded = false
                    }
                }
                if !appendSucceeded {
                    break
                }
                frameCount++
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
            }
        })
    }
}
func build(outputSize: CGSize) {
    let fileManager = FileManager.default
    let urls = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)
    guard let documentDirectory = urls.first else {
        fatalError("documentDir Error")
    }

    let videoOutputURL = documentDirectory.appendingPathComponent("OutputVideo.mp4")

    if FileManager.default.fileExists(atPath: videoOutputURL.path) {
        do {
            try FileManager.default.removeItem(atPath: videoOutputURL.path)
        } catch {
            fatalError("Unable to delete file: \(error) : \(#function).")
        }
    }

    guard let videoWriter = try? AVAssetWriter(outputURL: videoOutputURL, fileType: AVFileType.mp4) else {
        fatalError("AVAssetWriter error")
    }

    let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]

    guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
        fatalError("Negative : Can't apply the Output settings...")
    }

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [
        kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB),
        kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)),
        kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))
    ]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

    if videoWriter.canAdd(videoWriterInput) {
        videoWriter.add(videoWriterInput)
    }

    if videoWriter.startWriting() {
        videoWriter.startSession(atSourceTime: CMTime.zero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)

        let media_queue = DispatchQueue(__label: "mediaInputQueue", attr: nil)

        videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
            let fps: Int32 = 2
            let frameDuration = CMTimeMake(value: 1, timescale: fps)

            var frameCount: Int64 = 0
            var appendSucceeded = true

            while (!self.images.isEmpty) {
                if (videoWriterInput.isReadyForMoreMediaData) {
                    let nextPhoto = self.images.remove(at: 0)
                    let lastFrameTime = CMTimeMake(value: frameCount, timescale: fps)
                    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)

                    var pixelBuffer: CVPixelBuffer? = nil
                    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)

                    if let pixelBuffer = pixelBuffer, status == 0 {
                        let managedPixelBuffer = pixelBuffer

                        CVPixelBufferLockBaseAddress(managedPixelBuffer, [])

                        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                        let context = CGContext(data: data, width: Int(outputSize.width), height: Int(outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)

                        context?.clear(CGRect(x: 0, y: 0, width: outputSize.width, height: outputSize.height))

                        let horizontalRatio = CGFloat(outputSize.width) / nextPhoto.size.width
                        let verticalRatio = CGFloat(outputSize.height) / nextPhoto.size.height

                        let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

                        let newSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)

                        let x = newSize.width < outputSize.width ? (outputSize.width - newSize.width) / 2 : 0
                        let y = newSize.height < outputSize.height ? (outputSize.height - newSize.height) / 2 : 0

                        context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))

                        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, [])

                        appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                    } else {
                        print("Failed to allocate pixel buffer")
                        appendSucceeded = false
                    }
                }
                if !appendSucceeded {
                    break
                }
                frameCount += 1
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWriting { () -> Void in
                print("FINISHED!!!!!")
                saveVideoToLibrary(videoURL: videoOutputURL)
            }
        })
    }
}
func saveVideoToLibrary(videoURL: URL) {

    PHPhotoLibrary.shared().performChanges({
        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
    }) { saved, error in

        if let error = error {
            print("Error saving video to librayr: \(error.localizedDescription)")
        }
        if saved {
            print("Video save to library")

        }
    }
}