Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/17.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
什么';s导致AVMutableComposition大幅增加视频大小?-iOS、Swift、AVF基金会_Ios_Swift_Video_Swift3_Avmutablecomposition - Fatal编程技术网

什么';s导致AVMutableComposition大幅增加视频大小?-iOS、Swift、AVF基金会

什么';s导致AVMutableComposition大幅增加视频大小?-iOS、Swift、AVF基金会,ios,swift,video,swift3,avmutablecomposition,Ios,Swift,Video,Swift3,Avmutablecomposition,假设我们有两个视频资源(AVAsset对象),让我们称它们为blank和main,其中main是一个随机长度有限的视频,比如说2-5分钟,blank始终是一个4秒的视频,我们希望按照以下顺序合并视频: 空白-主-空白 // Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. let mixComposition = AVMutableCo

假设我们有两个视频资源(AVAsset对象),让我们称它们为blankmain,其中main是一个随机长度有限的视频,比如说2-5分钟,blank始终是一个4秒的视频,我们希望按照以下顺序合并视频:

空白-主-空白

    // Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.

    let mixComposition = AVMutableComposition()

    let assets = [blank, main, blank]
    var totalTime : CMTime = CMTimeMake(0, 0)
    var atTimeM: CMTime = CMTimeMake(0, 0)

    Utils.log([blank.duration, main.duration])

    // VIDEO TRACK
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

    for (index,asset) in assets.enumerated() {

        do {

            if index == 0 {
                atTimeM = kCMTimeZero
            } else {
                atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
            }

            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)

        } catch let error as NSError {
            Utils.log("error: \(error)")
        }

        totalTime = CMTimeAdd(totalTime, asset.duration)
    }

    // AUDIO TRACK
    let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    do {
        try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, main.duration), of: main.tracks(withMediaType: AVMediaTypeAudio)[0], at: blank.duration)
    } catch _ {
        completionHandler(nil, ErrorType(rawValue: "Unable to add audio in composition."))
        return
    }

    let outputURL = mainVideoObject.getDirectoryURL()?.appendingPathComponent("video-with-blank.mp4")

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else {
        completionHandler(nil, ErrorType(rawValue: "Unable to create export session."))
        return
    }

    let mainInstruction = AVMutableVideoCompositionInstruction()

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(blank.duration, CMTimeAdd(main.duration, blank.duration)))

    // Fixing orientation
    let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let firstAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
    firstLayerInstruction.setTransform(firstAssetTrack.preferredTransform, at: kCMTimeZero)
    firstLayerInstruction.setOpacity(0.0, at: blank.duration)

    let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let secondAssetTrack = main.tracks(withMediaType: AVMediaTypeVideo)[0]
    var isSecondAssetPortrait = false
    let secondTransform = secondAssetTrack.preferredTransform
    if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {
        isSecondAssetPortrait = true
    }
    if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {
        isSecondAssetPortrait = true
    }
    secondLayerInstruction.setTransform(secondAssetTrack.preferredTransform, at: blank.duration)
    secondLayerInstruction.setOpacity(0.0, at: CMTimeAdd(blank.duration, main.duration))

    let thirdLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let thirdAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
    thirdLayerInstruction.setTransform(thirdAssetTrack.preferredTransform, at: CMTimeAdd(blank.duration, main.duration))

    mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction, thirdLayerInstruction]

    var naturalSize = CGSize()
    if(isSecondAssetPortrait) {
        naturalSize = CGSize(width: secondAssetTrack.naturalSize.height, height: secondAssetTrack.naturalSize.width)
    } else {
        naturalSize = secondAssetTrack.naturalSize
    }

    let renderWidth = naturalSize.width
    let renderHeight = naturalSize.height

    let mainCompositionInst = AVMutableVideoComposition()
    mainCompositionInst.instructions = [mainInstruction]
    mainCompositionInst.frameDuration = CMTimeMake(1, 30)
    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight)

    exporter.outputURL = outputURL
    exporter.outputFileType = AVFileTypeMPEG4
    exporter.videoComposition = mainCompositionInst
    //exporter.shouldOptimizeForNetworkUse = true

    exporter.exportAsynchronously {
        if exporter.status == .completed {
            completionHandler(AVAsset(url: outputURL!), nil)
        } else {
            completionHandler(nil, ErrorType(rawValue: "Unable to export video."))
            if let error = exporter.error {
                Utils.log("Unable to export video. \(error)")
            }
        }
    }
//创建AVMutableComposition对象。此对象将保存我们的多个AVMutableCompositionTrack。
设mixComposition=AVMutableComposition()
let assets=[空白、主要、空白]
var totalTime:CMTime=CMTimeMake(0,0)
var atTimeM:CMTime=CMTimeMake(0,0)
Utils.log([blank.duration,main.duration])
//录像带
让videoTrack=mixComposition.addMutableTrack(带MediaType:AVMediaTypeVideo,首选TrackID:Int32(kCMPersistentTrackID_无效))
对于assets.enumerated()中的(索引,资产){
做{
如果索引==0{
atTimeM=kCMTimeZero
}否则{

atTimeM=totalTime/在这里,我准备了一个自定义类,您可以在其中传递视频的名称并将这些视频保存到捆绑包中。一旦您运行应用程序,它将根据您的要求生成一个新的视频文件,并将其放入应用程序文档目录路径中

使用Swift 4,我已经准备好了这个演示

//
//  ViewController.swift
//  SOVideoMergingDemo
//
//  Created by iOS Test User on 03/01/18.
//  Copyright © 2018 Test User. Ltd. All rights reserved.
//

import UIKit
import AVFoundation
import MediaPlayer
import Photos
import AssetsLibrary
import AVKit


class ViewController : UIViewController {

    //--------------------------------------------------
    //MARK:
    //MARK: - IBOutlets
    //--------------------------------------------------




    //--------------------------------------------------
    //MARK:
    //MARK: - Properties
    //--------------------------------------------------

    var videoUrls : [URL]     = []
    var arrVideoAsset : [AVAsset] = []
    let video1 = "1"
    let video2 = "2"
    let outPutVideo = "MergedVideo.mp4"

    let semaphore = DispatchSemaphore(value: 1)


    //--------------------------------------------------
    //MARK:
    //MARK: - Custom Methods
    //--------------------------------------------------

    func getVideoURL(forVideo : String) -> URL {
        let videoPath = Bundle.main.path(forResource: forVideo, ofType:"mp4")
        let vidURL = URL(fileURLWithPath: videoPath!)
        return vidURL
    }

    //--------------------------------------------------

    func mergeVideos(arrVideoAsset : [AVAsset]) {

        let mixComposition = AVMutableComposition()

        //Tracks to insert in Composition for Merging
        // Create video tracks
        let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)
        let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)
         let thirdTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)

        do {
            try firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: kCMTimeZero)
        } catch _ {
            print("Failed to load first track")
        }

        do {
            try secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[1].duration), of: arrVideoAsset[1].tracks(withMediaType: .video)[0], at: arrVideoAsset[0].duration)
        } catch _ {
            print("Failed to load second track")
        }

        do {
            try thirdTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: arrVideoAsset[1].duration)
        } catch _ {
            print("Failed to load second track")
        }

        //This Instruciton is Created for Merging Video Tracks
        let compositionInstruction = AVMutableVideoCompositionInstruction()
        compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0].duration, CMTimeAdd(arrVideoAsset[1].duration, arrVideoAsset[2].duration)))

        //Creating Layer Instruction for Videos
        let firstInstruction = videoCompositionInstructionForTrack(firstTrack!, asset: arrVideoAsset[0])
        firstInstruction.setOpacity(0.0, at: arrVideoAsset[0].duration )
        let secondInstruction = videoCompositionInstructionForTrack(secondTrack!, asset: arrVideoAsset[1])
        secondInstruction.setOpacity(0.0, at: arrVideoAsset[1].duration)
        let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack!, asset: arrVideoAsset[2])

        compositionInstruction.layerInstructions = [firstInstruction, secondInstruction,thirdInstruction]

        //By  Changing These Height and Width User can affect Size of Merged Video. Calucalte it Carefully and As per you needs
        let height = (Float((firstTrack?.naturalSize.height)!) < Float((secondTrack?.naturalSize.height)!)) ? firstTrack?.naturalSize.height : secondTrack?.naturalSize.height

        let width = (Float((firstTrack?.naturalSize.width)!) < Float((secondTrack?.naturalSize.width)!)) ? firstTrack?.naturalSize.width : secondTrack?.naturalSize.width

        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [compositionInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width:width!,height: height!)

        let exporter = AVAssetExportSession(asset:mixComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputURL = URL(fileURLWithPath: getDocumentDirectoryPath() + "/" + outPutVideo)
        exporter?.outputFileType = AVFileType.mp4
        exporter?.shouldOptimizeForNetworkUse = true
        exporter?.videoComposition = mainComposition
        print(self.getDocumentDirectoryPath())

        exporter?.exportAsynchronously(completionHandler: {
            DispatchQueue.main.async {
                if exporter?.status == AVAssetExportSessionStatus.completed {
                    do {
                        let videoData = try Data(contentsOf: exporter!.outputURL!)
                        try videoData.write(to: URL(fileURLWithPath : self.getDocumentDirectoryPath() + "/" + self.outPutVideo), options: Data.WritingOptions.atomic)
                    } catch {
                        print("Failed to Save video ===>>> \(error.localizedDescription)")
                    }


                    //Uncomment This If you want to save video in Photos Library
//                    PHPhotoLibrary.shared().performChanges({
//                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (exporter?.outputURL)!)
//                    }, completionHandler: { (success, error) in
//                        if success {
//                            let fetchOptions = PHFetchOptions()
//                            fetchOptions.sortDescriptors = [NSSortDescriptor.init(key:"creationDate", ascending: false)]
//                            _ = PHAsset.fetchAssets(with: .video, options:fetchOptions).firstObject
//                        } else {
//                            print("Error in Saving File in Photo Libaray -> \(String(describing: error?.localizedDescription))")
//                        }
//                    })
                } else {
                    print("Error -> \(String(describing: exporter?.error?.localizedDescription))")
                }
            }
        })

    }

    //--------------------------------------------------

    //This Methiod is Used to Make Layer Instruction for Particular Video
    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
        let scale : CGAffineTransform = CGAffineTransform(scaleX: 1, y:1)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scale), at: kCMTimeZero)
        return instruction
    }

    //--------------------------------------------------

    func getDocumentDirectoryPath() -> String {
        let arrPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        return arrPaths[0]
    }

    //--------------------------------------------------
    //MARK:
    //MARK: - View Life Cycle Methods
    //--------------------------------------------------

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.

        //Prepare Video Assets
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video2)))
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))

        //Merge this Videos
        mergeVideos(arrVideoAsset:arrVideoAsset)
    }
}
//
//ViewController.swift
//SOVideoMergingDemo
//
//由iOS测试用户于2018年1月3日创建。
//版权所有©2018测试用户有限公司。保留所有权利。
//
导入UIKit
进口AVF基金会
导入MediaPlayer
导入照片
导入资产库
进口AVKit
类ViewController:UIViewController{
//--------------------------------------------------
//标记:
//标记:-IB出口
//--------------------------------------------------
//--------------------------------------------------
//标记:
//标记:-属性
//--------------------------------------------------
var videourl:[URL]=[]
var arrVideoAsset:[AVAsset]=[]
让video1=“1”
让video2=“2”
让outPutVideo=“MergedVideo.mp4”
让信号量=分派信号量(值:1)
//--------------------------------------------------
//标记:
//标记:-自定义方法
//--------------------------------------------------
func getVideoURL(forVideo:String)->URL{
让videoPath=Bundle.main.path(forResource:forVideo,类型为“mp4”)
让vidURL=URL(fileURLWithPath:videoPath!)
返回vidURL
}
//--------------------------------------------------
func合并视频(arrVideoAsset:[AVAsset]){
设mixComposition=AVMutableComposition()
//要在合成中插入以进行合并的曲目
//创建视频曲目
让firstTrack=mixComposition.addMutableTrack(使用MediaType:.video,preferredTrackID:kCMPersistentTrackID\u无效)
让secondTrack=mixComposition.addMutableTrack(使用MediaType:.video,preferredTrackID:kCMPersistentTrackID\u无效)
让thirdTrack=mixComposition.addMutableTrack(使用MediaType:.video,preferredTrackID:kCMPersistentTrackID\u无效)
做{
请尝试firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero,arrVideoAsset[0]。持续时间),共有:arrVideoAsset[0]。曲目(withMediaType:.video)[0],位于:kCMTimeZero)
}接住{
打印(“未能加载第一首曲目”)
}
做{
尝试secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero,arrVideoAsset[1]。持续时间),共有:arrVideoAsset[1]。曲目(withMediaType:.video)[0],位于:arrVideoAsset[0]。持续时间)
}接住{
打印(“未能加载第二个曲目”)
}
做{
请尝试第三个Track?.insertTimeRange(CMTimeRangeMake(kCMTimeZero,arrVideoAsset[0]。持续时间),共有:arrVideoAsset[0]。曲目(withMediaType:.video)[0],位于:arrVideoAsset[1]。持续时间)
}接住{
打印(“未能加载第二个曲目”)
}
//本说明用于合并视频曲目
let compositionInstruction=AVMutableVideoCompositionInstruction()
compositionInstruction.timeRange=CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0]。持续时间,CMTimeAdd(arrVideoAsset[1]。持续时间,arrVideoAsset[2]。持续时间)))
//为视频创建层指令
让firstInstruction=videoCompositionInstructionForTrack(firstTrack!,资产:arrVideoAsset[0])
firstInstruction.setOpacity(0.0,at:arrVideoAsset[0]。持续时间)
let secondInstruction=videoCompositionInstructionForTrack(secondTrack!,资产:arrVideoAsset[1])
secondInstruction.setOpacity(0.0,at:arrVideoAsset[1]。持续时间)
设thirdInstruction=videoCompositionInstructionForTrack(thirdTrack!,资产:arrVideoAsset[2])
compositionInstruction.layerInstructions=[firstInstruction,secondInstruction,thirdInstruction]
//通过更改这些高度和宽度,用户可以影响合并视频的大小。请根据需要仔细计算
设高度=(浮动((第一轨道?.naturalSize.height)!<浮动((第二轨道?.naturalSize.height)!)?第一轨道?.naturalSize.height:第二轨道?.naturalSize.height)
让宽度=(浮动((第一轨道?.naturalSize.width)!)