如何在不丢失视频原始声音的情况下将音频与视频合并-iOS Swift

如何在不丢失视频原始声音的情况下将音频与视频合并-iOS Swift,ios,swift,audio-video-sync,Ios,Swift,Audio Video Sync,我的目标是将音频(mp3音乐)与iPhone摄像头捕获的视频合并,我可以使用AVMutableComposition将音频与视频合并,但最终输出的视频声音不会出现 下面是我正在使用的代码: open func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL){ let mixComposition: AVMutableComposition = AVMutableComposition() var mutableComp

我的目标是将音频(mp3音乐)与iPhone摄像头捕获的视频合并,我可以使用
AVMutableComposition
将音频与视频合并,但最终输出的视频声音不会出现

下面是我正在使用的代码:

    open func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL){

    let mixComposition: AVMutableComposition = AVMutableComposition()
    var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
    var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
    let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()

    let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
    let aAudioAsset: AVAsset = AVAsset(url: audioUrl)

    if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
        mutableCompositionVideoTrack.append(videoTrack)
        mutableCompositionAudioTrack.append(audioTrack)
    }

    let time = CMTimeMakeWithSeconds(Float64(musicTrimmerController.currentPlayerPosition), 1000)

    if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first,
        let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
        do {
            try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)

            try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(time, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)

        } catch{
            print(error)
        }

        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration)

        let compositionV = mixComposition.tracks(withMediaType: AVMediaType.video).last
        if ((aVideoAssetTrack != nil) && (compositionV != nil)) {
            compositionV?.preferredTransform = (aVideoAssetTrack.preferredTransform)
        }
    }
    if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
        let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("movie.mov")
        do {
            if FileManager.default.fileExists(atPath: outputURL.path) {
                try FileManager.default.removeItem(at: outputURL)
            }
        } catch { }
        if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {0
            exportSession.outputURL = outputURL
            exportSession.outputFileType = AVFileType.mp4
            exportSession.shouldOptimizeForNetworkUse = true

            /// try to export the file and handle the status cases
            exportSession.exportAsynchronously(completionHandler: {
                switch exportSession.status {
                case .failed:
                    print(exportSession.error as Any)
                case .cancelled:
                    print(exportSession.error as Any)
                default:
                    print("Save video output")
                }
            })
        }
    }
}