Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/100.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 合并音频和视频并保存到照片库,avplayer会给出错误提示_Ios_Swift_Avplayer - Fatal编程技术网

Ios 合并音频和视频并保存到照片库,avplayer会给出错误提示

Ios 合并音频和视频并保存到照片库,avplayer会给出错误提示,ios,swift,avplayer,Ios,Swift,Avplayer,请帮帮我。我在这里( 我的音频和视频将来自文档目录。当我将视频保存到照片中时,一切都正常。但随后发生了此错误,因此无法保存 失败可选(错误域=AVFoundationErrorDomain代码=-11800“操作无法完成”用户信息={NSUnderlyingError=0x17044a2f0{Error Domain=NSOSStatusErrorDomain代码=-12842(null)},NSLocalizedFailureReason=发生未知错误(-12842),NSLocalizedD

请帮帮我。我在这里(

我的音频和视频将来自文档目录。当我将视频保存到照片中时,一切都正常。但随后发生了此错误,因此无法保存

失败可选(错误域=AVFoundationErrorDomain代码=-11800“操作无法完成”用户信息={NSUnderlyingError=0x17044a2f0{Error Domain=NSOSStatusErrorDomain代码=-12842(null)},NSLocalizedFailureReason=发生未知错误(-12842),NSLocalizedDescription=操作无法完成})

这是我的本票

func getData(){
        let documentsUrl =  FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!

        do {
            // Get the directory contents urls (including subfolders urls)
            let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
            print(directoryContents)

            // if you want to filter the directory contents you can do like this:
            videoUrlforMarge = directoryContents.filter{ $0.pathExtension == "mov" } as [AnyObject]
            //videoUrlforMarge.append(directoryContents[1] as AnyObject)
            print("this video \(videoUrlforMarge[0])")

            audioUrl = directoryContents.filter{ $0.pathExtension == "caf" } as [AnyObject]

        } catch let error as NSError {
            print(error.localizedDescription)
        }

    }
这是我的合并函数

func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
    {
        let mixComposition : AVMutableComposition = AVMutableComposition()
        var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
        var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
        let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()


        //start merge



        let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
        let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)

        mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
        mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))

        guard aVideoAsset.tracks(withMediaType: AVMediaTypeVideo).count > 0 && aAudioAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 else{
            return
        }
        let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
        let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]



        do{
            try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)

            try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)

        }catch{

        }

        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )

        let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
        mutableVideoComposition.frameDuration = CMTimeMake(1, 30)

        mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)


        let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
        if FileManager.default.fileExists(atPath: VideoFilePath)

        {
            do

            {
                try FileManager.default.removeItem(atPath: VideoFilePath)
            }
            catch { }

        }
        let tempfilemainurl =  NSURL(string: VideoFilePath)!
        let sourceAsset = AVURLAsset(url: tempfilemainurl as URL, options: nil)
        let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetMediumQuality)!
        assetExport.outputFileType = AVFileTypeQuickTimeMovie
        assetExport.outputURL = tempfilemainurl as URL


        assetExport.exportAsynchronously { () -> Void in
            switch assetExport.status
            {
            case AVAssetExportSessionStatus.completed:
                DispatchQueue.main.async(execute: {
                    do
                    {

                        self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
                        print("MB - \(self.userreponsevideoData.length) byte")
                        let assetsLib = ALAssetsLibrary()
                        assetsLib.writeVideoAtPath(toSavedPhotosAlbum: tempfilemainurl as URL!, completionBlock: nil)




                    }
                    catch
                    {

                        print(error)
                    }
                })
            case  AVAssetExportSessionStatus.failed:
                print("failed \(String(describing: assetExport.error))")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(String(describing: assetExport.error))")
            default:
                print("complete")

            }

        }
}
guard videoUrlforMarge.count > 0 && audioUrl.count > 0 else{
                return
            }
            let videoUrl  = videoUrlforMarge[0]
            let url = NSURL(fileURLWithPath: videoUrl.absoluteString!!)
            let audio  = audioUrl[0]
            let urla = NSURL(fileURLWithPath: audio.absoluteString!!)

            self.mergeFilesWithUrl(videoUrl: url as NSURL  , audioUrl: 
  urla as NSURL
还有我称之为合并函数的其他函数

func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
    {
        let mixComposition : AVMutableComposition = AVMutableComposition()
        var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
        var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
        let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()


        //start merge



        let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
        let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)

        mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
        mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))

        guard aVideoAsset.tracks(withMediaType: AVMediaTypeVideo).count > 0 && aAudioAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 else{
            return
        }
        let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
        let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]



        do{
            try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)

            try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)

        }catch{

        }

        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )

        let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
        mutableVideoComposition.frameDuration = CMTimeMake(1, 30)

        mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)


        let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
        if FileManager.default.fileExists(atPath: VideoFilePath)

        {
            do

            {
                try FileManager.default.removeItem(atPath: VideoFilePath)
            }
            catch { }

        }
        let tempfilemainurl =  NSURL(string: VideoFilePath)!
        let sourceAsset = AVURLAsset(url: tempfilemainurl as URL, options: nil)
        let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetMediumQuality)!
        assetExport.outputFileType = AVFileTypeQuickTimeMovie
        assetExport.outputURL = tempfilemainurl as URL


        assetExport.exportAsynchronously { () -> Void in
            switch assetExport.status
            {
            case AVAssetExportSessionStatus.completed:
                DispatchQueue.main.async(execute: {
                    do
                    {

                        self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
                        print("MB - \(self.userreponsevideoData.length) byte")
                        let assetsLib = ALAssetsLibrary()
                        assetsLib.writeVideoAtPath(toSavedPhotosAlbum: tempfilemainurl as URL!, completionBlock: nil)




                    }
                    catch
                    {

                        print(error)
                    }
                })
            case  AVAssetExportSessionStatus.failed:
                print("failed \(String(describing: assetExport.error))")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(String(describing: assetExport.error))")
            default:
                print("complete")

            }

        }
}
guard videoUrlforMarge.count > 0 && audioUrl.count > 0 else{
                return
            }
            let videoUrl  = videoUrlforMarge[0]
            let url = NSURL(fileURLWithPath: videoUrl.absoluteString!!)
            let audio  = audioUrl[0]
            let urla = NSURL(fileURLWithPath: audio.absoluteString!!)

            self.mergeFilesWithUrl(videoUrl: url as NSURL  , audioUrl: 
  urla as NSURL

在哪一行发生错误?我相信导出文件时有问题。但我不明白。我认为案例AVAssetExportSessionStatus.failed:print(“failed(String(description:assetExport.error))”)行出现问题如果删除这行并打印其他内容,则错误仍然存在?不…如果我删除这行并打印其他内容。错误消失..或完全删除这行,则转到打印定义哪行出现错误?我相信导出文件时有问题。但我不明白。我认为case AvassetextportSessionStatus.failed:print(“failed(String(description:assetExport.error)))行出现问题如果删除此行并打印其他内容,则错误仍然存在?否…如果删除此行并打印其他内容。错误消失..或完全删除此行,则转到打印解除