在视频iOS上添加标签

在视频iOS上添加标签,ios,swift,video,uilabel,overlay,Ios,Swift,Video,Uilabel,Overlay,我试图实现的是在视频上添加一个自定义的UILabel,这样它就会显示为UILabel的各种属性,我们就会得到一个新的视频。标签可以有不同的颜色、不同的字体大小和阴影等。因此,使用字符串代替UILabel是没有用的。此外,由于可以平移ui标签,因此更重要的是使用准确的标签,因为它将根据视频具有帧 考虑一下我正在努力实现的目标: 这些方法帮助我完成了上述任务: 如果你面临问题,请告诉我。谢谢 func applyVideoEffects(to composition: AVMutableVideoC

我试图实现的是在视频上添加一个自定义的
UILabel
,这样它就会显示为
UILabel
的各种属性,我们就会得到一个新的视频。标签可以有不同的颜色、不同的字体大小和阴影等。因此,使用字符串代替
UILabel
是没有用的。此外,由于可以平移
ui标签
,因此更重要的是使用准确的标签,因为它将根据视频具有帧

考虑一下我正在努力实现的目标:

这些方法帮助我完成了上述任务: 如果你面临问题,请告诉我。谢谢

func applyVideoEffects(to composition: AVMutableVideoComposition, size: CGSize, currentLabel: UILabel) {

    let overlayLayer = CALayer()
    var overlayImage: UIImage? = nil
    overlayImage = UIImage.createTransparentImageFrom(label: currentLabel, imageSize: CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width))
    overlayLayer.contents = overlayImage?.cgImage
    overlayLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    overlayLayer.masksToBounds = true


    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    videoLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(overlayLayer)
    // 3 - apply magic
    composition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
}

func videoOutput(videoAsset: AVAsset, label: UILabel) {

    // Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    let mixComposition = AVMutableComposition()

    // Video track
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    do {
        try videoTrack.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
    } catch {
        print("Error selecting video track !!")
    }

    // Create AVMutableVideoCompositionInstruction

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: videoAsset.duration)

    // Create an AvmutableVideoCompositionLayerInstruction for the video track and fix orientation

    let videoLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: videoTrack)
    let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
    var videoAssetOrientation = UIImageOrientation.up
    var isVideoAssetPortrait = false
    let videoTransform = videoAssetTrack.preferredTransform

    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
        videoAssetOrientation = .right
        isVideoAssetPortrait = true
    }
    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
        videoAssetOrientation = .left
        isVideoAssetPortrait = true
    }
    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
        videoAssetOrientation = .up
    }
    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
        videoAssetOrientation = .down
    }

    videoLayerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero)
    videoLayerInstruction.setOpacity(0.0, at: videoAsset.duration)

    //Add instructions

    mainInstruction.layerInstructions = [videoLayerInstruction]
    let mainCompositionInst = AVMutableVideoComposition()
    let naturalSize : CGSize!
    if isVideoAssetPortrait {
        naturalSize = CGSize(width: videoAssetTrack.naturalSize.height, height: videoAssetTrack.naturalSize.width)
    } else {
        naturalSize = videoAssetTrack.naturalSize
    }

    let renderWidth = naturalSize.width
    let renderHeight = naturalSize.height

    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight)
    mainCompositionInst.instructions = [mainInstruction]
    mainCompositionInst.frameDuration = CMTime(value: 1, timescale: 30)

    self.applyVideoEffects(to: mainCompositionInst, size: naturalSize, currentLabel: label)

    // Get Path
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first
    let outputPath = documentsURL?.appendingPathComponent("newVideoWithLabel.mp4")
    if FileManager.default.fileExists(atPath: (outputPath?.path)!) {
        do {
            try FileManager.default.removeItem(atPath: (outputPath?.path)!)
        }
        catch {
            print ("Error deleting file")
        }
    }
    // Create exporter

    let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter?.outputURL = outputPath
    exporter?.outputFileType = AVFileTypeQuickTimeMovie
    exporter?.shouldOptimizeForNetworkUse = true
    exporter?.videoComposition = mainCompositionInst
    exporter?.exportAsynchronously(completionHandler: {
        self.exportDidFinish(session: exporter!)
    })
}

func exportDidFinish(session: AVAssetExportSession) {
    if session.status == .completed {
        let outputURL: URL? = session.outputURL
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
        }) { saved, error in
            if saved {
                let fetchOptions = PHFetchOptions()
                fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
                let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
                PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
                    let newObj = avurlAsset as! AVURLAsset
                    print(newObj.url)
                    DispatchQueue.main.async(execute: {
                        print(newObj.url.absoluteString)
                    })
                })
                print (fetchResult!)
            }
        }
    }
}

import Foundation
import UIKit

extension UIImage {
    class func createTransparentImageFrom(label: UILabel, imageSize: CGSize) -> UIImage {
        UIGraphicsBeginImageContextWithOptions(imageSize, false, 2.0)
        let currentView = UIView.init(frame: CGRect(x: 0, y: 0, width: imageSize.width, height: imageSize.height))
        currentView.backgroundColor = UIColor.clear
    currentView.addSubview(label)

        currentView.layer.render(in: UIGraphicsGetCurrentContext()!)
        let img = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return img!
    }
}

这些方法帮助我完成了上述任务: 如果你面临问题,请告诉我。谢谢

func applyVideoEffects(to composition: AVMutableVideoComposition, size: CGSize, currentLabel: UILabel) {

    let overlayLayer = CALayer()
    var overlayImage: UIImage? = nil
    overlayImage = UIImage.createTransparentImageFrom(label: currentLabel, imageSize: CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width))
    overlayLayer.contents = overlayImage?.cgImage
    overlayLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    overlayLayer.masksToBounds = true


    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    videoLayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(overlayLayer)
    // 3 - apply magic
    composition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
}

func videoOutput(videoAsset: AVAsset, label: UILabel) {

    // Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    let mixComposition = AVMutableComposition()

    // Video track
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    do {
        try videoTrack.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
    } catch {
        print("Error selecting video track !!")
    }

    // Create AVMutableVideoCompositionInstruction

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: videoAsset.duration)

    // Create an AvmutableVideoCompositionLayerInstruction for the video track and fix orientation

    let videoLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: videoTrack)
    let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
    var videoAssetOrientation = UIImageOrientation.up
    var isVideoAssetPortrait = false
    let videoTransform = videoAssetTrack.preferredTransform

    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
        videoAssetOrientation = .right
        isVideoAssetPortrait = true
    }
    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
        videoAssetOrientation = .left
        isVideoAssetPortrait = true
    }
    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
        videoAssetOrientation = .up
    }
    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
        videoAssetOrientation = .down
    }

    videoLayerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero)
    videoLayerInstruction.setOpacity(0.0, at: videoAsset.duration)

    //Add instructions

    mainInstruction.layerInstructions = [videoLayerInstruction]
    let mainCompositionInst = AVMutableVideoComposition()
    let naturalSize : CGSize!
    if isVideoAssetPortrait {
        naturalSize = CGSize(width: videoAssetTrack.naturalSize.height, height: videoAssetTrack.naturalSize.width)
    } else {
        naturalSize = videoAssetTrack.naturalSize
    }

    let renderWidth = naturalSize.width
    let renderHeight = naturalSize.height

    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight)
    mainCompositionInst.instructions = [mainInstruction]
    mainCompositionInst.frameDuration = CMTime(value: 1, timescale: 30)

    self.applyVideoEffects(to: mainCompositionInst, size: naturalSize, currentLabel: label)

    // Get Path
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first
    let outputPath = documentsURL?.appendingPathComponent("newVideoWithLabel.mp4")
    if FileManager.default.fileExists(atPath: (outputPath?.path)!) {
        do {
            try FileManager.default.removeItem(atPath: (outputPath?.path)!)
        }
        catch {
            print ("Error deleting file")
        }
    }
    // Create exporter

    let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter?.outputURL = outputPath
    exporter?.outputFileType = AVFileTypeQuickTimeMovie
    exporter?.shouldOptimizeForNetworkUse = true
    exporter?.videoComposition = mainCompositionInst
    exporter?.exportAsynchronously(completionHandler: {
        self.exportDidFinish(session: exporter!)
    })
}

func exportDidFinish(session: AVAssetExportSession) {
    if session.status == .completed {
        let outputURL: URL? = session.outputURL
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
        }) { saved, error in
            if saved {
                let fetchOptions = PHFetchOptions()
                fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
                let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
                PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
                    let newObj = avurlAsset as! AVURLAsset
                    print(newObj.url)
                    DispatchQueue.main.async(execute: {
                        print(newObj.url.absoluteString)
                    })
                })
                print (fetchResult!)
            }
        }
    }
}

import Foundation
import UIKit

extension UIImage {
    class func createTransparentImageFrom(label: UILabel, imageSize: CGSize) -> UIImage {
        UIGraphicsBeginImageContextWithOptions(imageSize, false, 2.0)
        let currentView = UIView.init(frame: CGRect(x: 0, y: 0, width: imageSize.width, height: imageSize.height))
        currentView.backgroundColor = UIColor.clear
    currentView.addSubview(label)

        currentView.layer.render(in: UIGraphicsGetCurrentContext()!)
        let img = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return img!
    }
}

请看@SuryaSubenthiran,我只处理这个。但问题是,本教程已经很老了,而且它也没有解释只添加标签。尽管我肯定会让你知道这是否有助于我实现期望的目标。请看@SuryaSubenthiran,我只是在处理这个问题。但问题是,本教程已经很老了,而且它也没有解释只添加标签。虽然我肯定会让你知道这是否有助于我实现预期目标。我也面临类似问题,请检查此:并给我建议。我也面临类似问题,请检查此:并给我建议。