Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/94.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 从CGBitmapContext和CVPixelBuffer输出的失真视频_Ios_Avfoundation_Cgcontext_Core Video_Cvpixelbuffer - Fatal编程技术网

Ios 从CGBitmapContext和CVPixelBuffer输出的失真视频

Ios 从CGBitmapContext和CVPixelBuffer输出的失真视频,ios,avfoundation,cgcontext,core-video,cvpixelbuffer,Ios,Avfoundation,Cgcontext,Core Video,Cvpixelbuffer,我已经试着调试(几天了)这段代码,从CGImages生成视频。CGImages实际上是从我在应用程序代码中绘制的CGBitMapContext创建的。我在这里简单地画了一条黄色的斜线,并画了一些静态图像的帧。然而,这里有一帧(每一帧都是相同的)我在写入路径中找到的失真视频。 导入基础 导入核心图形 导入CoreMedia 进口石英砂 进口AVF基金会 func导出视频( 宽度:Int=500, 高度:Int=500, 帧数:Int=100 ) { 让vidURL=NSURL.fileURL(路

我已经试着调试(几天了)这段代码,从CGImages生成视频。CGImages实际上是从我在应用程序代码中绘制的CGBitMapContext创建的。我在这里简单地画了一条黄色的斜线,并画了一些静态图像的帧。然而,这里有一帧(每一帧都是相同的)我在写入路径中找到的失真视频。

<代码>导入基础 导入核心图形 导入CoreMedia 进口石英砂 进口AVF基金会 func导出视频( 宽度:Int=500, 高度:Int=500, 帧数:Int=100 ) { 让vidURL=NSURL.fileURL(路径为:“/Users/me/Desktop/testVideo.mp4”) try?FileManager.default.removietem(位于:vidURL) let设置:[字符串:任意]=[ AVVideoCodeKey:AVVideoCodeType.h264, AVVideoWidthKey:宽度, AVVideoHeightKey:高度 ] 让assetWriter=try!AVAssetWriter(url:vidURL,文件类型:.m4v) 让writerInput=AVAssetWriterInput(媒体类型:AVMediaType.video,输出设置:设置) assetWriter.add(writerInput) let queue=DispatchQueue.global(qos:.后台) writerInput.expectsMediaDataInRealTime=false 让inputAdaptor=AvassetWriterInputPixelBufferAdapter(assetWriterInput:writerInput,sourcePixelBufferAttributes:nil) assetWriter.startWriting() assetWriter.startSession(atSourceTime:CMTime.zero) writerInput.requestMediaDataWhenReady(在:队列上){ 对于0..CVPixelBuffer中的i{ let选项:[字符串:任意]=[ kCVPixelBufferCGImageCompatibilityKey作为字符串:true, kCVPixelBufferCGBitmapContextCompatibilityKey作为字符串:true ] var pxbuffer:CVPixelBuffer? 让状态=CVPixelBufferCreate(kCFAllocatorDefault, 宽度, 高度, kCVPixelFormatType_32ARGB, 选项作为CFDictionary?, &(缓冲区) 断言(状态==kCVReturnSuccess&&pxbuffer!=nil,“newPixelBuffer失败”) CVPixelBufferLockBaseAddress(pxbuffer!),CVPixelBufferLockFlags(rawValue:0)) 让pxdata=CVPixelBufferGetBaseAddress(pxbuffer!) 设rgbColorSpace=CGColorSpaceCreateDeviceRGB() guard let context=CGContext( 数据:pxdata, 宽度:宽度, 高度:高度,, 比特组件:8, bytesPerRow:4*宽, 空间:rgbColorSpace, bitmapInfo:CGImageAlphaInfo.PremultipledFirst.rawValue )否则{ 法塔莱罗() } context.setStrokeColor(UIColor.yellow.cgColor) context.setLineWidth(5) 移动(到:.init(x:0,y:0)) addLine(to:.init(x:width,y:height)) context.strokePath() CVPixelBufferUnlockBaseAddress(pxbuffer!,CVPixelBufferLockFlags(rawValue:0)) 返回缓冲区 } 所以我偶然发现了问题的“答案”


结果是宽度和高度必须是4的倍数。我只想说,我希望这篇文章能帮助一个未来可怜的人,因为错误代码、警告、API成员和文档完全帮不上我的忙。

谢谢!!!在我的例子中,它是16。我直接给基址指针分配了一个缓冲区。缓冲区需要调整为width/高度为16的倍数。我真希望苹果能记录下这一点!
import Foundation
import CoreGraphics
import CoreMedia
import QuartzCore
import AVFoundation

func exportVideo(
    width: Int = 500,
    height: Int = 500,
    numberOfFrames: Int = 100
) {
    let vidURL = NSURL.fileURL(withPath: "/Users/me/Desktop/testVideo.mp4")

    try? FileManager.default.removeItem(at: vidURL)

    let settings: [String: Any] = [
        AVVideoCodecKey: AVVideoCodecType.h264,
        AVVideoWidthKey: width,
        AVVideoHeightKey: height
    ]

    let assetWriter = try! AVAssetWriter(url: vidURL, fileType: .m4v)
    let writerInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: settings)
    assetWriter.add(writerInput)

    let queue = DispatchQueue.global(qos: .background)

    writerInput.expectsMediaDataInRealTime = false

    let inputAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: nil)

    assetWriter.startWriting()

    assetWriter.startSession(atSourceTime: CMTime.zero)

    writerInput.requestMediaDataWhenReady(on: queue) {

        for i in 0..<numberOfFrames where writerInput.isReadyForMoreMediaData {

            guard let buffer = newPixelBufferFrom(width: width, height: height) else {
                fatalError()
            }

            inputAdaptor.append(
                buffer,
                withPresentationTime: CMTime(seconds: Double(i), preferredTimescale: CMTimeScale(10))
            )
        }

        writerInput.markAsFinished()

        assetWriter.finishWriting { }
    }
}

private func newPixelBufferFrom(
    width: Int,
    height: Int
) -> CVPixelBuffer? {

    let options:[String: Any] = [
        kCVPixelBufferCGImageCompatibilityKey as String: true,
        kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
    ]

    var pxbuffer: CVPixelBuffer?
    let status = CVPixelBufferCreate(kCFAllocatorDefault,
                                     width,
                                     height,
                                     kCVPixelFormatType_32ARGB,
                                     options as CFDictionary?,
                                     &pxbuffer)

    assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

    CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))

    let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)

    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    guard let context = CGContext(
        data: pxdata,
        width: width,
        height: height,
        bitsPerComponent: 8,
        bytesPerRow: 4 * width,
        space: rgbColorSpace,
        bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
        ) else {
            fatalError()
    }

    context.setStrokeColor(UIColor.yellow.cgColor)
    context.setLineWidth(5)
    context.move(to: .init(x: 0, y: 0))
    context.addLine(to: .init(x: width, y: height))
    context.strokePath()

    CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    return pxbuffer
}