Swift 4-在mac os上使用AVAssetWriter进行avfoundation屏幕和音频录制-视频冻结

Swift 4-在mac os上使用AVAssetWriter进行avfoundation屏幕和音频录制-视频冻结,swift,macos,avfoundation,recording,Swift,Macos,Avfoundation,Recording,我用光圈从屏幕上录制音频和视频。我们需要降低视频的比特率,所以我尝试重写它并用AVAssetWriter录制视频。我的实现基于CustomCamera项目,几乎可以正常工作。问题是在视频中,虽然音频工作正常,但几秒钟后它就被冻结了。你能帮我吗?我不知道问题出在哪里,可能是缓冲区或垃圾收集器收集某些变量的问题。谢谢 代码如下: // // ViewController.swift // CustomCamera // // Created by Taras Chernyshenko on 6

我用光圈从屏幕上录制音频和视频。我们需要降低视频的比特率,所以我尝试重写它并用AVAssetWriter录制视频。我的实现基于CustomCamera项目,几乎可以正常工作。问题是在视频中,虽然音频工作正常,但几秒钟后它就被冻结了。你能帮我吗?我不知道问题出在哪里,可能是缓冲区或垃圾收集器收集某些变量的问题。谢谢

代码如下:

//
//  ViewController.swift
//  CustomCamera
//
//  Created by Taras Chernyshenko on 6/27/17.
//  Copyright © 2017 Taras Chernyshenko. All rights reserved.
//
import AVFoundation
import Photos

class NewRecorder: NSObject,
  AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate {

  private var session: AVCaptureSession = AVCaptureSession()
  private var deviceInput: AVCaptureScreenInput?
  private var previewLayer: AVCaptureVideoPreviewLayer?
  private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
  private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()

  //private var videoDevice: AVCaptureDevice = AVCaptureScreenInput(displayID: 69731840) //AVCaptureDevice.default(for: AVMediaType.video)!
  private var audioConnection: AVCaptureConnection?
  private var videoConnection: AVCaptureConnection?

  private var assetWriter: AVAssetWriter?
  private var audioInput: AVAssetWriterInput?
  private var videoInput: AVAssetWriterInput?

  private var fileManager: FileManager = FileManager()
  private var recordingURL: URL?

  private var isCameraRecording: Bool = false
  private var isRecordingSessionStarted: Bool = false

  private var recordingQueue = DispatchQueue(label: "recording.queue")


  func setup() {
    self.session.sessionPreset = AVCaptureSession.Preset.high

    self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mp4")
    if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
      _ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
    }

    self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
                                          fileType: AVFileType.mp4)
    self.assetWriter!.movieFragmentInterval = kCMTimeInvalid
    self.assetWriter!.shouldOptimizeForNetworkUse = true

    let audioSettings = [
      AVFormatIDKey : kAudioFormatMPEG4AAC,
      AVNumberOfChannelsKey : 2,
      AVSampleRateKey : 44100.0,
      AVEncoderBitRateKey: 192000
      ] as [String : Any]

    let videoSettings = [
      AVVideoCodecKey : AVVideoCodecType.h264,
      AVVideoWidthKey : 1920,
      AVVideoHeightKey : 1080
      /*AVVideoCompressionPropertiesKey: [
        AVVideoAverageBitRateKey:  NSNumber(value: 5000000)
      ]*/
      ] as [String : Any]

    self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,
                                         outputSettings: videoSettings)
    self.audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,
                                         outputSettings: audioSettings)

    self.videoInput?.expectsMediaDataInRealTime = true
    self.audioInput?.expectsMediaDataInRealTime = true

    if self.assetWriter!.canAdd(self.videoInput!) {
      self.assetWriter?.add(self.videoInput!)
    }

    if self.assetWriter!.canAdd(self.audioInput!) {
      self.assetWriter?.add(self.audioInput!)
    }

    //self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
    self.deviceInput = AVCaptureScreenInput(displayID: 724042646)
    self.deviceInput!.minFrameDuration = CMTimeMake(1, Int32(30))
    self.deviceInput!.capturesCursor = true
    self.deviceInput!.capturesMouseClicks = true

    if self.session.canAddInput(self.deviceInput!) {
      self.session.addInput(self.deviceInput!)
    }

    self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)

    //importent line of code what will did a trick
    //self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill

    //let rootLayer = self.view.layer
    //rootLayer.masksToBounds = true
    //self.previewLayer?.frame = CGRect(x: 0, y: 0, width: 1920, height: 1080)

    //rootLayer.insertSublayer(self.previewLayer!, at: 0)

    self.session.startRunning()

    DispatchQueue.main.async {
      self.session.beginConfiguration()

      if self.session.canAddOutput(self.videoOutput) {
        self.session.addOutput(self.videoOutput)
      }

      self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
      /*if self.videoConnection?.isVideoStabilizationSupported == true {
        self.videoConnection?.preferredVideoStabilizationMode = .auto
      }*/
      self.session.commitConfiguration()

      let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
      let audioIn = try? AVCaptureDeviceInput(device: audioDevice!)

      if self.session.canAddInput(audioIn!) {
        self.session.addInput(audioIn!)
      }

      if self.session.canAddOutput(self.audioOutput) {
        self.session.addOutput(self.audioOutput)
      }

      self.audioConnection = self.audioOutput.connection(with: AVMediaType.audio)
    }
  }

  func startRecording() {
    if self.assetWriter?.startWriting() != true {
      print("error: \(self.assetWriter?.error.debugDescription ?? "")")
    }

    self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
    self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
  }

  func stopRecording() {
    self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
    self.audioOutput.setSampleBufferDelegate(nil, queue: nil)

    self.assetWriter?.finishWriting {
      print("Saved in folder \(self.recordingURL!)")
      exit(0)
    }
  }
  func captureOutput(_ captureOutput: AVCaptureOutput, didOutput
    sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

    if !self.isRecordingSessionStarted {
      let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
      self.assetWriter?.startSession(atSourceTime: presentationTime)
      self.isRecordingSessionStarted = true
    }

    let description = CMSampleBufferGetFormatDescription(sampleBuffer)!

    if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
      if self.audioInput!.isReadyForMoreMediaData {
        //print("appendSampleBuffer audio");
        self.audioInput?.append(sampleBuffer)
      }
    } else {
      if self.videoInput!.isReadyForMoreMediaData {
        //print("appendSampleBuffer video");
        if !self.videoInput!.append(sampleBuffer) {
          print("Error writing video buffer");
        }
      }
    }
  }
}

所以我通过移动这个代码来修复它

if self.session.canAddInput(self.deviceInput!) {
  self.session.addInput(self.deviceInput!)
}
在调用self.session.beginConfiguration()之后

self.session.beginConfiguration()
  if self.session.canAddInput(self.deviceInput!) {
    self.session.addInput(self.deviceInput!)
  }

  if self.session.canAddOutput(self.videoOutput) {
    self.session.addOutput(self.videoOutput)
  }

  self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)

  self.session.commitConfiguration()