Ios Swift:方形视频合成

Ios Swift:方形视频合成,ios,swift,video,crop,Ios,Swift,Video,Crop,我遵循下面的方块视频合成代码 func completeWithVideoAtURL(input: NSURL) { let asset = AVAsset(url: input as URL) let output = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/Video.mp4") let session = AVAssetExportSession(asset: asset, presetName

我遵循下面的方块视频合成代码

func completeWithVideoAtURL(input: NSURL) {
    let asset = AVAsset(url: input as URL)
    let output = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/Video.mp4")

    let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)!
    session.videoComposition = self.squareVideoCompositionForAsset(asset: asset)
    session.outputURL = output as URL
    session.outputFileType = AVFileTypeMPEG4
    session.shouldOptimizeForNetworkUse = true        
    session.exportAsynchronously(completionHandler: { () -> Void in
        DispatchQueue.main.async(execute: { () -> Void in
            // do something with the output
            print("\(output)")
            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: output as URL)
            }) { saved, error in
                if saved {
                    print("saved to gallery")
                }
            }
        })
    })
}

func squareVideoCompositionForAsset(asset: AVAsset) -> AVVideoComposition {
    let track = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
    let length = max(track.naturalSize.width, track.naturalSize.height)

    var transform = track.preferredTransform

    let size = track.naturalSize
    let scale: CGFloat = (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) ? -1 : 1 // check for inversion

    transform = transform.translatedBy(x: scale * -(size.width - length) / 2, y: scale * -(size.height - length) / 2)

    let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    transformer.setTransform(transform, at: kCMTimeZero)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimePositiveInfinity)
    instruction.layerInstructions = [transformer]

    let composition = AVMutableVideoComposition()
    composition.frameDuration = CMTime(value: 1, timescale: 30)
    composition.renderSize = CGSize(width: length, height: length)
    composition.instructions = [instruction]

    return composition
}
squareVideoCompositionForAsset()函数中,我为track.naturalSize.widthtrack.naturalSize.height之间的lengthmax值,因为我不想裁剪视频的任何部分。如果我取最小值,对于纵向视频,它会剪切视频的上下部分;对于横向视频,它会剪切视频的左右部分

  • 对于横向视频,输出正常

  • 但对于纵向视频,输出类似于下图

视频变为左侧。可以将视频居中吗?任何帮助都会很好,很抱歉解释得太长。

请不要用这行

让scale:CGFloat=(transform.a=-1&&transform.b==0&& transform.c==0&&transform.d==1)-1:1

我刚用过这个

它像一个符咒一样发挥作用

Swift 4.2

func suqareCropVideo(videoURL: URL, withSide sideLength: CGFloat, completion: @escaping (_ resultURL: URL?, _ error: Error?) -> ()) {

    let asset = AVAsset(url: videoURL)
    if let assetVideoTrack = asset.tracks(withMediaType: .video).last {

        let originalSize = assetVideoTrack.naturalSize
        var scale: CGFloat
        if originalSize.width < originalSize.height {
            scale = sideLength / originalSize.width
        } else {
            scale = sideLength / originalSize.height
        }

        let scaledSize = CGSize(width: originalSize.width * scale, height: originalSize.height * scale)
        let topLeft = CGPoint(x: sideLength * 0.5 - scaledSize.width * 0.5, y: sideLength  * 0.5 - scaledSize.height * 0.5)

        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: assetVideoTrack)

        var orientationTransform = assetVideoTrack.preferredTransform

        if (orientationTransform.tx == originalSize.width || orientationTransform.tx == originalSize.height) {
            orientationTransform.tx = sideLength
        }

        if (orientationTransform.ty == originalSize.width || orientationTransform.ty == originalSize.height) {
            orientationTransform.ty = sideLength
        }

        let transform = CGAffineTransform(scaleX: scale, y: scale).concatenating(CGAffineTransform(translationX: topLeft.x, y: topLeft.y)).concatenating(orientationTransform)

        layerInstruction.setTransform(transform, at: .zero)


        let instruction = AVMutableVideoCompositionInstruction()
        instruction.layerInstructions = [layerInstruction]
        instruction.timeRange = assetVideoTrack.timeRange

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = CGSize(width: sideLength, height: sideLength)
        videoComposition.renderScale = 1.0
        videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
        videoComposition.instructions = [instruction]

        if let export = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) {
            export.videoComposition = videoComposition
            export.outputURL = NSURL.fileURL(withPath: "\(NSTemporaryDirectory())\(NSUUID().uuidString).mp4")
            export.outputFileType = AVFileType.mp4
            export.shouldOptimizeForNetworkUse = true
            export.exportAsynchronously {
                DispatchQueue.main.async {
                    if export.status == .completed {
                        completion(export.outputURL, nil)
                    } else {
                        completion(nil, export.error)
                    }
                }
            }
        } else {
            completion(nil, nil)
        }
    }

}
func suqarrecropvideo(videoURL:URL,带侧边长:CGFloat,完成:@escaping(\uresultur:URL?,\uerror:error?)->()){
let asset=AVAsset(url:videoURL)
如果让assetVideoTrack=asset.tracks(使用媒体类型:。视频)。最后{
让originalSize=assetVideoTrack.naturalSize
风险等级:CGFloat
如果originalSize.width
谢谢分享!
func suqareCropVideo(videoURL: URL, withSide sideLength: CGFloat, completion: @escaping (_ resultURL: URL?, _ error: Error?) -> ()) {

    let asset = AVAsset(url: videoURL)
    if let assetVideoTrack = asset.tracks(withMediaType: .video).last {

        let originalSize = assetVideoTrack.naturalSize
        var scale: CGFloat
        if originalSize.width < originalSize.height {
            scale = sideLength / originalSize.width
        } else {
            scale = sideLength / originalSize.height
        }

        let scaledSize = CGSize(width: originalSize.width * scale, height: originalSize.height * scale)
        let topLeft = CGPoint(x: sideLength * 0.5 - scaledSize.width * 0.5, y: sideLength  * 0.5 - scaledSize.height * 0.5)

        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: assetVideoTrack)

        var orientationTransform = assetVideoTrack.preferredTransform

        if (orientationTransform.tx == originalSize.width || orientationTransform.tx == originalSize.height) {
            orientationTransform.tx = sideLength
        }

        if (orientationTransform.ty == originalSize.width || orientationTransform.ty == originalSize.height) {
            orientationTransform.ty = sideLength
        }

        let transform = CGAffineTransform(scaleX: scale, y: scale).concatenating(CGAffineTransform(translationX: topLeft.x, y: topLeft.y)).concatenating(orientationTransform)

        layerInstruction.setTransform(transform, at: .zero)


        let instruction = AVMutableVideoCompositionInstruction()
        instruction.layerInstructions = [layerInstruction]
        instruction.timeRange = assetVideoTrack.timeRange

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = CGSize(width: sideLength, height: sideLength)
        videoComposition.renderScale = 1.0
        videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
        videoComposition.instructions = [instruction]

        if let export = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) {
            export.videoComposition = videoComposition
            export.outputURL = NSURL.fileURL(withPath: "\(NSTemporaryDirectory())\(NSUUID().uuidString).mp4")
            export.outputFileType = AVFileType.mp4
            export.shouldOptimizeForNetworkUse = true
            export.exportAsynchronously {
                DispatchQueue.main.async {
                    if export.status == .completed {
                        completion(export.outputURL, nil)
                    } else {
                        completion(nil, export.error)
                    }
                }
            }
        } else {
            completion(nil, nil)
        }
    }

}