Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/17.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios Swift ReplayKit AVAssetWriter视频音频转换为HLS时不同步_Ios_Swift_Video_Video Streaming_Http Live Streaming - Fatal编程技术网

Ios Swift ReplayKit AVAssetWriter视频音频转换为HLS时不同步

Ios Swift ReplayKit AVAssetWriter视频音频转换为HLS时不同步,ios,swift,video,video-streaming,http-live-streaming,Ios,Swift,Video,Video Streaming,Http Live Streaming,在iOS/Swift中,我正在使用ReplayKit使用AVAssetWriter创建用户屏幕和麦克风音频的mov或MP4视频 当我创建视频时,它可以在本地正常播放,并且音频和视频是同步的。但是,当我使用AWS Mediaconvert将此视频转换为HLS(HTTP实时流)格式时,音频与视频不同步。有人知道这是什么原因吗?我读过关于时间编码的书,也许我需要给我的视频添加一个时间编码?有没有更简单的方法来解决这个问题,或者有没有人遇到过类似的问题 private func startRecordi

在iOS/Swift中,我正在使用ReplayKit使用AVAssetWriter创建用户屏幕和麦克风音频的mov或MP4视频

当我创建视频时,它可以在本地正常播放,并且音频和视频是同步的。但是,当我使用AWS Mediaconvert将此视频转换为HLS(HTTP实时流)格式时,音频与视频不同步。有人知道这是什么原因吗?我读过关于时间编码的书,也许我需要给我的视频添加一个时间编码?有没有更简单的方法来解决这个问题,或者有没有人遇到过类似的问题

private func startRecordingVideo(){

    //Initialize MP4 Output File for Screen Recorded Video
       let fileManager = FileManager.default
       let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
       guard let documentDirectory: NSURL = urls.first as NSURL? else {
           fatalError("documentDir Error")
       }
       videoOutputURL = documentDirectory.appendingPathComponent("OutputVideo.mov")

       if FileManager.default.fileExists(atPath: videoOutputURL!.path) {
           do {
               try FileManager.default.removeItem(atPath: videoOutputURL!.path)
           } catch {
               fatalError("Unable to delete file: \(error) : \(#function).")
           }
       }

    //Initialize Asset Writer to Write Video to User's Storage
    assetWriter = try! AVAssetWriter(outputURL: videoOutputURL!, fileType:
        AVFileType.mov)

    let videoOutputSettings: Dictionary<String, Any> = [
        AVVideoCodecKey : AVVideoCodecType.h264,
        AVVideoWidthKey : UIScreen.main.bounds.size.width,
        AVVideoHeightKey : UIScreen.main.bounds.size.height,
    ];

    let audioSettings = [
        AVFormatIDKey : kAudioFormatMPEG4AAC,
        AVNumberOfChannelsKey : 1,
        AVSampleRateKey : 44100.0,
        AVEncoderBitRateKey: 96000,
        ] as [String : Any]


    videoInput  = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
    audioInput  = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings:audioSettings )

    videoInput?.expectsMediaDataInRealTime = true
    audioInput?.expectsMediaDataInRealTime = true

    assetWriter?.add(videoInput!)
    assetWriter?.add(audioInput!)


       let sharedRecorder = RPScreenRecorder.shared()
       sharedRecorder.isMicrophoneEnabled = true
       sharedRecorder.startCapture(handler: {
           (sample, bufferType, error) in

        //Audio/Video Buffer Data returned from the Screen Recorder
           if CMSampleBufferDataIsReady(sample) {

               DispatchQueue.main.async { [weak self] in

                //Start the Asset Writer if it has not yet started
                   if self?.assetWriter?.status == AVAssetWriter.Status.unknown {
                       if !(self?.assetWriter?.startWriting())! {
                           return
                       }
                       self?.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
                       self?.startSession = true
                   }

               }
            //Handle errors
               if self.assetWriter?.status == AVAssetWriter.Status.failed {

                   print("Error occured, status = \(String(describing: self.assetWriter?.status.rawValue)), \(String(describing: self.assetWriter?.error!.localizedDescription)) \(String(describing: self.assetWriter?.error))")

                   return
               }
            //Add video buffer to AVAssetWriter Video Input
            if (bufferType == .video)
               {
                   if(self.videoInput!.isReadyForMoreMediaData) && self.startSession {
                       self.videoInput?.append(sample)
                   }
               }
            //Add audio microphone buffer to AVAssetWriter Audio Input
               if (bufferType == .audioMic)
               {
                     print("MIC BUFFER RECEIVED")
                   if self.audioInput!.isReadyForMoreMediaData
                   {
                       print("Audio Buffer Came")
                       self.audioInput?.append(sample)
                   }
               }
           }

       }, completionHandler: {
           error in
           print("COMP HANDLER ERROR", error?.localizedDescription)
       })
}

private func stopRecordingVideo(){
    self.startSession = false
    RPScreenRecorder.shared().stopCapture{ (error) in
        self.videoInput?.markAsFinished()
        self.audioInput?.markAsFinished()

        if error == nil{
            self.assetWriter?.finishWriting{
                self.startSession = false
                print("FINISHED WRITING!")
                DispatchQueue.main.async {
                    self.setUpVideoPreview()
                }
            }
        }else{
            //DELETE DIRECTORY
        }
    }

}
private func startRecordingVideo(){
//初始化屏幕录制视频的MP4输出文件
让fileManager=fileManager.default
让URL=fileManager.URL(对于:.documentDirectory,在:.userDomainMask中)
guard let documentDirectory:NSURL=url.first作为NSURL?else{
fatalError(“documentDir错误”)
}
videoOutputURL=documentDirectory.appendingPathComponent(“OutputVideo.mov”)
如果FileManager.default.fileExists(atPath:videoOutputURL!.path){
做{
尝试FileManager.default.removietem(atPath:videoOutputURL!.path)
}抓住{
fatalError(“无法删除文件:\(错误):\(\函数)。”)
}
}
//初始化Asset Writer以将视频写入用户存储器
assetWriter=try!AVAssetWriter(输出URL:videoOutputURL!,文件类型:
AVFileType.mov)
让videoOutputSettings:字典=[
AVVideoCodeKey:AVVideoCodeType.h264,
AVVideoWidthKey:UIScreen.main.bounds.size.width,
AVVideoHeightKey:UIScreen.main.bounds.size.height,
];
让音频设置=[
AVFormatIDKey:kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey:1,
AVSampleRateKey:44100.0,
AvencoderBitatekey:96000,
]as[字符串:任意]
videoInput=AVAssetWriterInput(媒体类型:AVMediaType.video,输出设置:videoOutputSettings)
audioInput=AVAssetWriterInput(媒体类型:AVMediaType.audio,输出设置:audioSettings)
videoInput?.expectsMediaDataInRealTime=true
音频输入?.expectsMediaDataInRealTime=true
assetWriter?添加(视频输入!)
assetWriter?添加(音频输入!)
让sharedRecorder=RPScreenRecorder.shared()
sharedRecorder.isMicrophoneEnabled=true
sharedRecorder.startCapture(处理程序:{
中的(示例、缓冲区类型、错误)
//从屏幕记录器返回的音频/视频缓冲区数据
如果CMSampleBufferDataIsReady(样本){
DispatchQueue.main.async{[weak self]位于
//如果资产编写器尚未启动,请启动它
如果self?.assetWriter?.status==AVAssetWriter.status.unknown{
如果!(self?.assetWriter?.startWriting()){
返回
}
self?.assetWriter?.startSession(atSourceTime:CMSampleBufferGetPresentationTimeStamp(样本))
self?.startSession=true
}
}
//处理错误
如果self.assetWriter?.status==AVAssetWriter.status.failed{
打印(“发生错误,状态=\(字符串(描述:self.assetWriter?.status.rawValue)),\(字符串(描述:self.assetWriter?.Error!.localizedDescription))\(字符串(描述:self.assetWriter?.Error)))
返回
}
//将视频缓冲区添加到AVAssetWriter视频输入
如果(bufferType=.video)
{
if(self.videoInput!.isReadyForMoreMediaData)和&self.startSession{
self.videoInput?.append(示例)
}
}
//将音频麦克风缓冲区添加到AVAssetWriter音频输入
如果(bufferType=.audioMic)
{
打印(“接收到麦克风缓冲区”)
如果self.audioInput!.isReadyForMoreMediaData
{
打印(“音频缓冲区来了”)
self.audioInput?.append(示例)
}
}
}
},completionHandler:{
错误
打印(“组件处理程序错误”,错误?.localizedDescription)
})
}
私有函数停止录制视频(){
self.startSession=false
RPScreenRecorder.shared().stopCapture{(错误)位于
self.videoInput?.markAsFinished()
self.audioInput?.markAsFinished()
如果错误==nil{
self.assetWriter?完成写作{
self.startSession=false
打印(“写完!”)
DispatchQueue.main.async{
self.setUpVideoPreview()
}
}
}否则{
//删除目录
}
}
}

我相信你要么已经解决了这个问题,要么继续前进,但对于所有谷歌用户来说,你基本上都必须在视频输入上设置
mediaTimeScale
。你可以看到一个例子

下面是该代码的相关部分(此代码使用的是
AVSampleBufferDisplayLayer
,但相同的概念适用于:

double pts = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));

if(!timebaseSet && pts != 0)
{
    timebaseSet = true;
    
    CMTimebaseRef controlTimebase;
    CMTimebaseCreateWithMasterClock( CFAllocatorGetDefault(), CMClockGetHostTimeClock(), &controlTimebase );
    
    displayLayer.controlTimebase = controlTimebase;
    CMTimebaseSetTime(displayLayer.controlTimebase, CMTimeMake(pts, 1));
    CMTimebaseSetRate(displayLayer.controlTimebase, 1.0);
}

if([displayLayer isReadyForMoreMediaData])
{
    [displayLayer enqueueSampleBuffer:sampleBuffer];
}

嘿@PatPatchPatrick!你找到一个这样的工作示例了吗?使用ReplayKit时不是会因为用户权限而出现延迟吗?你需要如何同步?您好。我最终使用AVCaptureSession来录制视频/音频,而不是ReplayKit。ReplayKit可以在本地录制视频/音频,但在转换时存在问题将视频转换为其他格式。我相信可以通过对媒体进行时间编码或设置@derickito在其回复中提到的mediaTimeScale来解决这些问题,但我自己还没有机会尝试。您好@PatPatchPatrick!非常感谢您的回复!我想知道是否有使用AVCaptureSession捕获视频和视频的示例音频?你是对的,目前我正在尝试