Swift AVAssetExportSession大大降低了视频质量

Swift AVAssetExportSession大大降低了视频质量,swift,avassetexportsession,avmutablecomposition,avvideocomposition,Swift,Avassetexportsession,Avmutablecomposition,Avvideocomposition,我有一个本地视频url,我正试图把一个覆盖。这一切都正常工作,但视频的质量正在急剧下降后,出口 我缩小了范围,发现在设置AVAssetExportSession.videoComposition时会发生这种情况(我需要对覆盖进行设置)。如果我将导出预设质量设置为Passthrough,则视频不会丢失质量,但覆盖层不会显示 func merge3(url: URL) { activityMonitor.startAnimating() let firstAsset = AVAss

我有一个本地视频url,我正试图把一个覆盖。这一切都正常工作,但视频的质量正在急剧下降后,出口

我缩小了范围,发现在设置AVAssetExportSession.videoComposition时会发生这种情况(我需要对覆盖进行设置)。如果我将导出预设质量设置为Passthrough,则视频不会丢失质量,但覆盖层不会显示

func merge3(url: URL) {
    activityMonitor.startAnimating()

    let firstAsset = AVAsset(url: url)

    // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    let mixComposition = AVMutableComposition()

    // 2 - Create two video tracks
    guard
      let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                      preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
      else {
        return
    }
    do {
      try firstTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration),
                                     of: firstAsset.tracks(withMediaType: AVMediaType.video)[0],
                                     at: CMTime.zero)
    } catch {
      print("Failed to load first track")
      return
    }

    let s = UIScreen.main.bounds

    let imglogo = UIImage(named: "logo")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.cgImage
    imglayer.frame = CGRect(x: s.width / 2 - 125, y: s.height / 2 - 125
      , width: 25, height: 25)
    imglayer.opacity = 1.0

    let videolayer = CALayer()
    videolayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)

    // 2.1
    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,
                                                duration: firstAsset.duration)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
    layercomposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration)
    _ = mixComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
    let layerinstruction = VideoHelper.videoCompositionInstruction1(firstTrack, asset: firstAsset)
    instruction.layerInstructions = [layerinstruction]
    layercomposition.instructions = [instruction]

    // 4 - Get path
    guard let documentDirectory = FileManager.default.urls(for: .documentDirectory,
                                                           in: .userDomainMask).first else {
                                                            return
    }
    let dateFormatter = DateFormatter()
    dateFormatter.dateStyle = .long
    dateFormatter.timeStyle = .short
    let date = dateFormatter.string(from: Date())
    let url = documentDirectory.appendingPathComponent("mergeVideo-\(date).mov")

    // 5 - Create Exporter
    guard let exporter = AVAssetExportSession(asset: mixComposition,
                                              presetName: AVAssetExportPresetHighestQuality) else {
                                                return
    }
    exporter.outputURL = url
    exporter.outputFileType = AVFileType.mov
    exporter.shouldOptimizeForNetworkUse = true
    exporter.videoComposition = layercomposition

    // 6 - Perform the Export
    exporter.exportAsynchronously() {
      DispatchQueue.main.async {
        self.exportDidFinish(exporter)
      }
    }
  }

视频质量取决于当前的视频质量device@LeoDabus这不可能是问题所在,因为在完全相同的设备上,完全相同的视频在不添加覆盖层的情况下仍保持高质量。添加覆盖会降低视频质量你是说,如果导出相同的视频而不添加覆盖,质量不会改变吗?@LeoDabus-Yup。但这是因为如果我不想要叠加,我不需要设置导出会话视频合成,或者可以将质量设置为AVAssetExportPresetPassthrough。如果我不需要设置视频合成或可以使用passthrough预设,视频将以完美的方式导出quality@connorvo在这里经历同样的经历,你找到解决办法了吗?视频质量取决于当前的视频质量device@LeoDabus这不可能是问题所在,因为在完全相同的设备上,完全相同的视频在不添加覆盖层的情况下仍保持高质量。添加覆盖会降低视频质量你是说,如果导出相同的视频而不添加覆盖,质量不会改变吗?@LeoDabus-Yup。但这是因为如果我不想要叠加,我不需要设置导出会话视频合成,或者可以将质量设置为AVAssetExportPresetPassthrough。如果我不需要设置视频合成或可以使用passthrough预设,视频将以完美的方式导出quality@connorvo在这里遇到同样的问题,你找到解决办法了吗
static func videoCompositionInstruction1(_ track: AVCompositionTrack, asset: AVAsset)
    -> AVMutableVideoCompositionLayerInstruction {
      let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
      let assetTrack = asset.tracks(withMediaType: .video)[0]

      let transform = assetTrack.preferredTransform
      let assetInfo = orientationFromTransform(transform)

      var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
      if assetInfo.isPortrait { // not hit
        scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: CMTime.zero)
      } else { // hit
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
          .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 4))
        if assetInfo.orientation == .down { // not hit
          let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
          let windowBounds = UIScreen.main.bounds
          let yFix = assetTrack.naturalSize.height + windowBounds.height
          let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
          concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
        }
        instruction.setTransform(concat, at: CMTime.zero)
      }

      return instruction
  }

func exportDidFinish(_ session: AVAssetExportSession) {
    guard
      session.status == AVAssetExportSession.Status.completed,
      let outputURL = session.outputURL
      else {
        return
    }

    let saveVideoToPhotos = {
      PHPhotoLibrary.shared().performChanges({
        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
      }) { saved, error in
        let success = saved && (error == nil)
        let title = success ? "Success" : "Error"
        let message = success ? "Video saved" : "Failed to save video"

        let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
        alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.cancel, handler: nil))
        self.present(alert, animated: true, completion: nil)
      }
    }

    // Ensure permission to access Photo Library
    if PHPhotoLibrary.authorizationStatus() != .authorized {
      PHPhotoLibrary.requestAuthorization { status in
        if status == .authorized {
          saveVideoToPhotos()
        }
      }
    } else {
      saveVideoToPhotos()
    }
  }