Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/113.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios AVAssetWriterInput追加失败,错误代码为-11800 AVErrorUnknown-12780_Ios_Avfoundation_Avassetwriter_Cvpixelbuffer_Avassetwriterinput - Fatal编程技术网

Ios AVAssetWriterInput追加失败,错误代码为-11800 AVErrorUnknown-12780

Ios AVAssetWriterInput追加失败,错误代码为-11800 AVErrorUnknown-12780,ios,avfoundation,avassetwriter,cvpixelbuffer,avassetwriterinput,Ios,Avfoundation,Avassetwriter,Cvpixelbuffer,Avassetwriterinput,我正在尝试使用AVCaptureSession在内存中捕获摄像头视频,以便以后可以将视频数据写入电影文件。虽然我已经能够成功地启动捕获会话,但我无法使用AVAssetWriter成功地将捕获的CMSampleBuffers写入压缩的电影文件 使用AVAssetWriterInput的append方法追加示例缓冲区失败,当我检查AVAssetWriter的error属性时,得到以下结果: Error Domain=AVFoundationErrorDomain Code=-11800“操作无法完成

我正在尝试使用AVCaptureSession在内存中捕获摄像头视频,以便以后可以将视频数据写入电影文件。虽然我已经能够成功地启动捕获会话,但我无法使用AVAssetWriter成功地将捕获的CMSampleBuffers写入压缩的电影文件

使用AVAssetWriterInput的append方法追加示例缓冲区失败,当我检查AVAssetWriter的error属性时,得到以下结果:

Error Domain=AVFoundationErrorDomain Code=-11800“操作无法完成”UserInfo={NSUnderlyingError=0x17005d070{Error Domain=nsossstatuserrordomain Code=-12780”(null)},NSLocalizedFailureReason=发生未知错误(-12780),NSLocalizedDescription=操作无法完成}

据我所知-11800表示一个未知的错误,但是我还没有找到关于-12780错误代码的信息,据我所知,这是没有记录的。下面我粘贴了我设置的示例项目中的主要文件,以演示该问题

任何指导都将不胜感激。谢谢

ViewController.swift

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue")
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue")
    private let session = AVCaptureSession()
    private var backfillSampleBufferList = [CMSampleBuffer]()

    override func viewDidLoad() {
        super.viewDidLoad()

        session.sessionPreset = AVCaptureSessionPreset640x480

        let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);
        let videoDeviceInput: AVCaptureDeviceInput;

        do {
            videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
        } catch {
            print("Error creating device input from video device: \(error).")
            return
        }

        guard session.canAddInput(videoDeviceInput) else {
            print("Could not add video device input to capture session.")
            return
        }

        session.addInput(videoDeviceInput)

        let videoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ]
        videoDataOutput.alwaysDiscardsLateVideoFrames = true
        videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)

        guard session.canAddOutput(videoDataOutput) else {
            print("Could not add video data output to capture session.")
            return
        }

        session.addOutput(videoDataOutput)
        videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true

        session.startRunning()
    }

    private func backfillSizeInSeconds() -> Double {
        if backfillSampleBufferList.count < 1 {
            return 0.0
        }

        let earliestSampleBuffer = backfillSampleBufferList.first!
        let latestSampleBuffer = backfillSampleBufferList.last!

        let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value
        let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value
        let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale

        return Double(latestSampleBufferPTS - earliestSampleBufferPTS) / Double(timescale)
    }

    private func createClipFromBackfill() {
        guard backfillSampleBufferList.count > 0 else {
            print("createClipFromBackfill() called before any samples were recorded.")
            return
        }

        let clipURL = URL(fileURLWithPath:
            NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] +
            "/recorded_clip.mp4")

        if FileManager.default.fileExists(atPath: clipURL.path) {
            do {
                try FileManager.default.removeItem(atPath: clipURL.path)
            } catch {
                print("Could not delete existing clip file: \(error).")
            }
        }

        var _videoFileWriter: AVAssetWriter?
        do {
            _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeQuickTimeMovie)
        } catch {
            print("Could not create video file writer: \(error).")
            return
        }

        guard let videoFileWriter = _videoFileWriter else {
            print("Video writer was nil.")
            return
        }

        let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)!

        guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else {
            print("Video file writer could not apply video output settings.")
            return
        }

        let earliestRecordedSampleBuffer = backfillSampleBufferList.first!

        let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer)
        guard let formatDescription = _formatDescription else {
            print("Earliest recording pixel buffer format description was nil.")
            return
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
                                                  outputSettings: settingsAssistant.videoSettings,
                                                  sourceFormatHint: formatDescription)

        guard videoFileWriter.canAdd(videoWriterInput) else {
            print("Could not add video writer input to video file writer.")
            return
        }

        videoFileWriter.add(videoWriterInput)

        guard videoFileWriter.startWriting() else {
            print("Video file writer not ready to write file.")
            return
        }

        videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer))

        videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) {
            while videoWriterInput.isReadyForMoreMediaData {
                if self.backfillSampleBufferList.count > 0 {
                    let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy()
                    let appendSampleBufferSucceeded = videoWriterInput.append(sampleBufferToAppend)
                    if !appendSampleBufferSucceeded {
                        print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)")
                        print("Video file writer status: \(videoFileWriter.status.rawValue)")
                    }

                    self.backfillSampleBufferList.remove(at: 0)
                } else {
                    videoWriterInput.markAsFinished()
                    videoFileWriter.finishWriting {
                        print("Saved clip to \(clipURL)")
                    }

                    break
                }
            }
        }
    }

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        guard let buffer = sampleBuffer else {
            print("Captured sample buffer was nil.")
            return
        }

        let sampleBufferCopy = buffer.deepCopy()

        backfillSampleBufferList.append(sampleBufferCopy)

        if backfillSizeInSeconds() > 3.0 {
            session.stopRunning()
            createClipFromBackfill()
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didDrop sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        print("Sample buffer dropped.")
    }

}
import UIKit
import AVFoundation
import Photos

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue")
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue")
    private let session = AVCaptureSession()
    private var backfillSampleBufferList = [CMSampleBuffer]()

    override func viewDidLoad() {
        super.viewDidLoad()

        session.sessionPreset = AVCaptureSessionPreset640x480

        let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);
        let videoDeviceInput: AVCaptureDeviceInput;

        do {
            videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
        } catch {
            print("Error creating device input from video device: \(error).")
            return
        }

        guard session.canAddInput(videoDeviceInput) else {
            print("Could not add video device input to capture session.")
            return
        }

        session.addInput(videoDeviceInput)

        let videoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ]
        videoDataOutput.alwaysDiscardsLateVideoFrames = true
        videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)

        guard session.canAddOutput(videoDataOutput) else {
            print("Could not add video data output to capture session.")
            return
        }

        session.addOutput(videoDataOutput)
        videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true

        session.startRunning()
    }

    private func backfillSizeInSeconds() -> Double {
        if backfillSampleBufferList.count < 1 {
            return 0.0
        }

        let earliestSampleBuffer = backfillSampleBufferList.first!
        let latestSampleBuffer = backfillSampleBufferList.last!

        let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value
        let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value
        let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale

        return Double(latestSampleBufferPTS - earliestSampleBufferPTS) / Double(timescale)
    }

    private func createClipFromBackfill() {
        guard backfillSampleBufferList.count > 0 else {
            print("createClipFromBackfill() called before any samples were recorded.")
            return
        }

        let clipURL = URL(fileURLWithPath:
            NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] +
            "/recorded_clip.mp4")

        if FileManager.default.fileExists(atPath: clipURL.path) {
            do {
                try FileManager.default.removeItem(atPath: clipURL.path)
            } catch {
                print("Could not delete existing clip file: \(error).")
            }
        }

        var _videoFileWriter: AVAssetWriter?
        do {
            _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeMPEG4)
        } catch {
            print("Could not create video file writer: \(error).")
            return
        }

        guard let videoFileWriter = _videoFileWriter else {
            print("Video writer was nil.")
            return
        }

        let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)!

        guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else {
            print("Video file writer could not apply video output settings.")
            return
        }

        let earliestRecordedSampleBuffer = backfillSampleBufferList.first!

        let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer)
        guard let formatDescription = _formatDescription else {
            print("Earliest recording pixel buffer format description was nil.")
            return
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
                                                  outputSettings: settingsAssistant.videoSettings,
                                                  sourceFormatHint: formatDescription)

        guard videoFileWriter.canAdd(videoWriterInput) else {
            print("Could not add video writer input to video file writer.")
            return
        }

        videoFileWriter.add(videoWriterInput)

        let pixelAdapterBufferAttributes = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCMPixelFormat_32BGRA) ]
        let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
                                                                sourcePixelBufferAttributes: pixelAdapterBufferAttributes)

        guard videoFileWriter.startWriting() else {
            print("Video file writer not ready to write file.")
            return
        }

        videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer))

        videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) {
            while videoWriterInput.isReadyForMoreMediaData {
                if self.backfillSampleBufferList.count > 0 {
                    let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy()
                    let appendSampleBufferSucceeded = pixelAdapter.append(CMSampleBufferGetImageBuffer(sampleBufferToAppend)!,
                                                                          withPresentationTime: CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferToAppend))
                    if !appendSampleBufferSucceeded {
                        print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)")
                        print("Video file writer status: \(videoFileWriter.status.rawValue)")
                    }

                    self.backfillSampleBufferList.remove(at: 0)
                } else {
                    videoWriterInput.markAsFinished()
                    videoFileWriter.finishWriting {
                        print("Saving clip to \(clipURL)")
                    }

                    break
                }
            }
        }
    }

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        guard let buffer = sampleBuffer else {
            print("Captured sample buffer was nil.")
            return
        }

        let sampleBufferCopy = buffer.deepCopy()

        backfillSampleBufferList.append(sampleBufferCopy)

        if backfillSizeInSeconds() > 3.0 {
            session.stopRunning()
            createClipFromBackfill()
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didDrop sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        print("Sample buffer dropped.")
    }

}
CMSampleBuffer+Copy.swift:

import CoreVideo

extension CVPixelBuffer {
    func deepCopy() -> CVPixelBuffer {
        precondition(CFGetTypeID(self) == CVPixelBufferGetTypeID(), "deepCopy() cannot copy a non-CVPixelBuffer")

        var _copy : CVPixelBuffer?
        CVPixelBufferCreate(
            nil,
            CVPixelBufferGetWidth(self),
            CVPixelBufferGetHeight(self),
            CVPixelBufferGetPixelFormatType(self),
            CVBufferGetAttachments(self, CVAttachmentMode.shouldPropagate),
            &_copy)

        guard let copy = _copy else {
            print("Pixel buffer copy was nil.")
            fatalError()
        }

        CVBufferPropagateAttachments(self, copy)
        CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags.readOnly)
        CVPixelBufferLockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0))

        let sourceBaseAddress = CVPixelBufferGetBaseAddress(self)
        let copyBaseAddress = CVPixelBufferGetBaseAddress(copy)
        memcpy(copyBaseAddress, sourceBaseAddress, CVPixelBufferGetHeight(self) * CVPixelBufferGetBytesPerRow(self))

        CVPixelBufferUnlockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0))
        CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags.readOnly)

        return copy
    }
}
import CoreMedia

extension CMSampleBuffer {
    func deepCopy() -> CMSampleBuffer {
        let _pixelBuffer = CMSampleBufferGetImageBuffer(self)
        guard let pixelBuffer = _pixelBuffer else {
            print("Pixel buffer to copy was nil.")
            fatalError()
        }
        let pixelBufferCopy = pixelBuffer.deepCopy()

        let _formatDescription = CMSampleBufferGetFormatDescription(self)
        guard let formatDescription = _formatDescription else {
            print("Format description to copy was nil.")
            fatalError()
        }

        var timingInfo = kCMTimingInfoInvalid
        let getTimingInfoResult = CMSampleBufferGetSampleTimingInfo(self, 0, &timingInfo)
        guard getTimingInfoResult == noErr else {
            print("Could not get timing info to copy: \(getTimingInfoResult).")
            fatalError()
        }

        timingInfo.presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(self)

        var _copy : CMSampleBuffer?
        let createCopyResult = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
                                                                  pixelBufferCopy,
                                                                  true,
                                                                  nil,
                                                                  nil,
                                                                  formatDescription,
                                                                  &timingInfo,
                                                                  &_copy);

        guard createCopyResult == noErr else {
            print("Error creating copy of sample buffer: \(createCopyResult).")
            fatalError()
        }

        guard let copy = _copy else {
            print("Copied sample buffer was nil.")
            fatalError()
        }

        return copy
    }
}

经过更多的研究和实验,似乎使用AvassetWriterInputPixelBufferAdapter将我存储的CMSampleBuffers的CVPixelBuffers附加到AVAssetWriterInput工作时不会产生错误

下面是ViewController.swift实现的修改版本,它使用AvassetWriterInputPixelBufferAdapter附加像素缓冲区

ViewController.swift

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue")
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue")
    private let session = AVCaptureSession()
    private var backfillSampleBufferList = [CMSampleBuffer]()

    override func viewDidLoad() {
        super.viewDidLoad()

        session.sessionPreset = AVCaptureSessionPreset640x480

        let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);
        let videoDeviceInput: AVCaptureDeviceInput;

        do {
            videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
        } catch {
            print("Error creating device input from video device: \(error).")
            return
        }

        guard session.canAddInput(videoDeviceInput) else {
            print("Could not add video device input to capture session.")
            return
        }

        session.addInput(videoDeviceInput)

        let videoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ]
        videoDataOutput.alwaysDiscardsLateVideoFrames = true
        videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)

        guard session.canAddOutput(videoDataOutput) else {
            print("Could not add video data output to capture session.")
            return
        }

        session.addOutput(videoDataOutput)
        videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true

        session.startRunning()
    }

    private func backfillSizeInSeconds() -> Double {
        if backfillSampleBufferList.count < 1 {
            return 0.0
        }

        let earliestSampleBuffer = backfillSampleBufferList.first!
        let latestSampleBuffer = backfillSampleBufferList.last!

        let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value
        let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value
        let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale

        return Double(latestSampleBufferPTS - earliestSampleBufferPTS) / Double(timescale)
    }

    private func createClipFromBackfill() {
        guard backfillSampleBufferList.count > 0 else {
            print("createClipFromBackfill() called before any samples were recorded.")
            return
        }

        let clipURL = URL(fileURLWithPath:
            NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] +
            "/recorded_clip.mp4")

        if FileManager.default.fileExists(atPath: clipURL.path) {
            do {
                try FileManager.default.removeItem(atPath: clipURL.path)
            } catch {
                print("Could not delete existing clip file: \(error).")
            }
        }

        var _videoFileWriter: AVAssetWriter?
        do {
            _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeQuickTimeMovie)
        } catch {
            print("Could not create video file writer: \(error).")
            return
        }

        guard let videoFileWriter = _videoFileWriter else {
            print("Video writer was nil.")
            return
        }

        let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)!

        guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else {
            print("Video file writer could not apply video output settings.")
            return
        }

        let earliestRecordedSampleBuffer = backfillSampleBufferList.first!

        let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer)
        guard let formatDescription = _formatDescription else {
            print("Earliest recording pixel buffer format description was nil.")
            return
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
                                                  outputSettings: settingsAssistant.videoSettings,
                                                  sourceFormatHint: formatDescription)

        guard videoFileWriter.canAdd(videoWriterInput) else {
            print("Could not add video writer input to video file writer.")
            return
        }

        videoFileWriter.add(videoWriterInput)

        guard videoFileWriter.startWriting() else {
            print("Video file writer not ready to write file.")
            return
        }

        videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer))

        videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) {
            while videoWriterInput.isReadyForMoreMediaData {
                if self.backfillSampleBufferList.count > 0 {
                    let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy()
                    let appendSampleBufferSucceeded = videoWriterInput.append(sampleBufferToAppend)
                    if !appendSampleBufferSucceeded {
                        print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)")
                        print("Video file writer status: \(videoFileWriter.status.rawValue)")
                    }

                    self.backfillSampleBufferList.remove(at: 0)
                } else {
                    videoWriterInput.markAsFinished()
                    videoFileWriter.finishWriting {
                        print("Saved clip to \(clipURL)")
                    }

                    break
                }
            }
        }
    }

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        guard let buffer = sampleBuffer else {
            print("Captured sample buffer was nil.")
            return
        }

        let sampleBufferCopy = buffer.deepCopy()

        backfillSampleBufferList.append(sampleBufferCopy)

        if backfillSizeInSeconds() > 3.0 {
            session.stopRunning()
            createClipFromBackfill()
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didDrop sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        print("Sample buffer dropped.")
    }

}
import UIKit
import AVFoundation
import Photos

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue")
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue")
    private let session = AVCaptureSession()
    private var backfillSampleBufferList = [CMSampleBuffer]()

    override func viewDidLoad() {
        super.viewDidLoad()

        session.sessionPreset = AVCaptureSessionPreset640x480

        let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);
        let videoDeviceInput: AVCaptureDeviceInput;

        do {
            videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
        } catch {
            print("Error creating device input from video device: \(error).")
            return
        }

        guard session.canAddInput(videoDeviceInput) else {
            print("Could not add video device input to capture session.")
            return
        }

        session.addInput(videoDeviceInput)

        let videoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ]
        videoDataOutput.alwaysDiscardsLateVideoFrames = true
        videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)

        guard session.canAddOutput(videoDataOutput) else {
            print("Could not add video data output to capture session.")
            return
        }

        session.addOutput(videoDataOutput)
        videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true

        session.startRunning()
    }

    private func backfillSizeInSeconds() -> Double {
        if backfillSampleBufferList.count < 1 {
            return 0.0
        }

        let earliestSampleBuffer = backfillSampleBufferList.first!
        let latestSampleBuffer = backfillSampleBufferList.last!

        let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value
        let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value
        let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale

        return Double(latestSampleBufferPTS - earliestSampleBufferPTS) / Double(timescale)
    }

    private func createClipFromBackfill() {
        guard backfillSampleBufferList.count > 0 else {
            print("createClipFromBackfill() called before any samples were recorded.")
            return
        }

        let clipURL = URL(fileURLWithPath:
            NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] +
            "/recorded_clip.mp4")

        if FileManager.default.fileExists(atPath: clipURL.path) {
            do {
                try FileManager.default.removeItem(atPath: clipURL.path)
            } catch {
                print("Could not delete existing clip file: \(error).")
            }
        }

        var _videoFileWriter: AVAssetWriter?
        do {
            _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeMPEG4)
        } catch {
            print("Could not create video file writer: \(error).")
            return
        }

        guard let videoFileWriter = _videoFileWriter else {
            print("Video writer was nil.")
            return
        }

        let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)!

        guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else {
            print("Video file writer could not apply video output settings.")
            return
        }

        let earliestRecordedSampleBuffer = backfillSampleBufferList.first!

        let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer)
        guard let formatDescription = _formatDescription else {
            print("Earliest recording pixel buffer format description was nil.")
            return
        }

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
                                                  outputSettings: settingsAssistant.videoSettings,
                                                  sourceFormatHint: formatDescription)

        guard videoFileWriter.canAdd(videoWriterInput) else {
            print("Could not add video writer input to video file writer.")
            return
        }

        videoFileWriter.add(videoWriterInput)

        let pixelAdapterBufferAttributes = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCMPixelFormat_32BGRA) ]
        let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
                                                                sourcePixelBufferAttributes: pixelAdapterBufferAttributes)

        guard videoFileWriter.startWriting() else {
            print("Video file writer not ready to write file.")
            return
        }

        videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer))

        videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) {
            while videoWriterInput.isReadyForMoreMediaData {
                if self.backfillSampleBufferList.count > 0 {
                    let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy()
                    let appendSampleBufferSucceeded = pixelAdapter.append(CMSampleBufferGetImageBuffer(sampleBufferToAppend)!,
                                                                          withPresentationTime: CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferToAppend))
                    if !appendSampleBufferSucceeded {
                        print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)")
                        print("Video file writer status: \(videoFileWriter.status.rawValue)")
                    }

                    self.backfillSampleBufferList.remove(at: 0)
                } else {
                    videoWriterInput.markAsFinished()
                    videoFileWriter.finishWriting {
                        print("Saving clip to \(clipURL)")
                    }

                    break
                }
            }
        }
    }

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        guard let buffer = sampleBuffer else {
            print("Captured sample buffer was nil.")
            return
        }

        let sampleBufferCopy = buffer.deepCopy()

        backfillSampleBufferList.append(sampleBufferCopy)

        if backfillSizeInSeconds() > 3.0 {
            session.stopRunning()
            createClipFromBackfill()
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!,
                       didDrop sampleBuffer: CMSampleBuffer!,
                       from connection: AVCaptureConnection!) {
        print("Sample buffer dropped.")
    }

}
导入UIKit
进口AVF基金会
导入照片
类ViewController:UIViewController、AVCaptureVideoDataOutputSampleBufferDelegate{
private let recordingClipQueue=DispatchQueue(标签:“com.example.recordingClipQueue”)
私有let videoDataOutputQueue=DispatchQueue(标签:“com.example.videoDataOutputQueue”)
private let session=AVCaptureSession()
私有变量回填SampleBufferList=[CMSampleBuffer]()
重写func viewDidLoad(){
super.viewDidLoad()
session.sessionPreset=AVCaptureSessionPreset640x480
让videoDevice=AVCaptureDevice.defaultDevice(带MediaType:AVMediaTypeVideo);
让视频设备输入:AVCaptureDeviceInput;
做{
videoDeviceInput=尝试AVCaptureDeviceInput(设备:videoDevice)
}抓住{
打印(“从视频设备创建设备输入时出错:\(错误)。”)
返回
}
保护会话。canAddInput(视频设备输入)else{
打印(“无法将视频设备输入添加到捕获会话。”)
返回
}
会话.添加输入(视频设备输入)
让videoDataOutput=AVCaptureVideoDataOutput()
videoDataOutput.videoSettings=[kCVPixelBufferPixelFormatTypeKey作为NSString:Int(kCMPixelFormat_32BGRA)]
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutput.setSampleBufferDelegate(self,队列:videoDataOutputQueue)
保护会话.canAddOutput(视频数据输出)else{
打印(“无法将视频数据输出添加到捕获会话。”)
返回
}
session.addOutput(videoDataOutput)
videoDataOutput.connection(withMediaType:AVMediaTypeVideo).isEnabled=true
会议.启动耳轴()
}
private func backfillSizeInSeconds()->Double{
如果回填SampleBufferList.count<1{
返回0.0
}
让EarliesSampleBuffer=backfillSampleBufferList.first!
让latestSampleBuffer=backfillSampleBufferList.last!
让earliestSampleBufferPTS=CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value
让latestSampleBufferPTS=CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value
让timescale=CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer)。timescale
返回双精度(latestSampleBufferPTS-earliestSampleBufferPTS)/双精度(时间刻度)
}
私有函数createClipFromBackfill(){
guard backfillSampleBufferList.count>0其他{
打印(“在记录任何样本之前调用createClipFromBackfill()”)
返回
}
让clipURL=URL(fileURLWithPath:
NSSearchPathForDirectoriesInDomains(.documentDirectory、.userDomainMask,true)[0]+
“/recorded\u clip.mp4”)
如果FileManager.default.fileExists(atPath:clipURL.path){
做{
请尝试FileManager.default.removietem(atPath:clipURL.path)
}抓住{
打印(“无法删除现有剪辑文件:\(错误)。”)
}
}
var_videoFileWriter:AVAssetWriter?
做{
_videoFileWriter=尝试AVAssetWriter(url:clipURL,文件类型:AVFileTypeMPEG4)
}抓住{
打印(“无法创建视频文件编写器:\(错误)。”)
返回
}
guard let videoFileWriter=\u videoFileWriter else{
打印(“视频编写器为零”)
返回
}
让SettingsAsistant=AvoutputSettingsAsistant(预设:AVOutputSettingsPreset640x480)!
guard videoFileWriter.canApply(输出设置:SettingsAsistant.videoSettings,forMediaType:AVMediaTypeVideo)其他{
打印(“视频文件编写器无法应用视频输出设置。”)
返回
}
让EarliesTreRecordedSampleBuffer=backfillSampleBufferList.first!
let _formatDescription=CMSampleBufferGetFormatDescription(EarliesTreRecordedSampleBuffer)
guard let formatDescription=\u formatDescription else{
打印(“最早的记录像素缓冲区格式说明为零。”)
返回
}
让videoWriterInput=AVAssetWriterInput(mediaType:AVMediaTypeVideo,
输出设置:设置SASS