Ios Swift 3:使用AVCaptureAudioDataOutput分析音频输入
我正试图使用AVCaptureAudioDataOutput来分析音频输入,如前所述。这不是我自己能弄明白的东西,所以我复制了这个例子,但我有困难 Swift 3中的Xcode促使我做了一些更改。我在分配Ios Swift 3:使用AVCaptureAudioDataOutput分析音频输入,ios,swift3,avcapturesession,audiobuffer,Ios,Swift3,Avcapturesession,Audiobuffer,我正试图使用AVCaptureAudioDataOutput来分析音频输入,如前所述。这不是我自己能弄明白的东西,所以我复制了这个例子,但我有困难 Swift 3中的Xcode促使我做了一些更改。我在分配samples的行中遇到编译错误。Xcode表示,“无法使用类型为“(UnsafeMutableRawPointer?)的参数列表调用类型为“UnsafeMutablePointer”的初始值设定项” 以下是我修改后的代码: func captureOutput(_ captureOutput:
samples
的行中遇到编译错误。Xcode表示,“无法使用类型为“(UnsafeMutableRawPointer?)的参数列表调用类型为“UnsafeMutablePointer”的初始值设定项”
以下是我修改后的代码:
func captureOutput(_ captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
from connection: AVCaptureConnection!){
var buffer: CMBlockBuffer? = nil
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
MemoryLayout<AudioBufferList>.size, // changed for Swift 3
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
var sum:Int64 = 0
var count:Int = 0
var bufs:Int = 0
for buf in abl {
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buf.mData), // Error here
count: Int(buf.mDataByteSize)/sizeof(Int16))
for sample in samples {
let s = Int64(sample)
sum = (sum + s*s)
count += 1
}
bufs += 1
}
print( "found \(count) samples in \(bufs) buffers, sum is \(sum)" )
}
func captureOutput(\captureOutput:AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,
来自连接:AVCaptureConnection!){
var缓冲区:CMBlockBuffer?=nil
var audioBufferList=audioBufferList(mNumberBuffers:1,
mbuffer:AudioBuffer(mNumberChannels:1,mDataByteSize:0,mData:nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
样本缓冲区,
无
&音频缓冲列表,
MemoryLayout.size,//已更改为Swift 3
无
无
UInt32(kCMSampleBufferFlag_Audio BufferList_Assure16 ByteAlignment),
&缓冲区
)
设abl=UnsafemeutableAudioBufferListPointer(&audioBufferList)
变量和:Int64=0
变量计数:Int=0
变量bufs:Int=0
用于abl中的buf{
让samples=UnsafeMutableBufferPointer(开始:UnsafeMutablePointer(buf.mData))//此处出错
计数:Int(buf.mDataByteSize)/sizeof(Int16))
样品中的样品{
设s=Int64(示例)
总和=(总和+s*s)
计数+=1
}
bufs+=1
}
打印(“在\(bufs)缓冲区中找到\(计数)样本,总和为\(总和)”)
}
有人能告诉我如何修复此代码吗?答案是我需要将
buf.mData
包装在不透明指针中。i、 例如,在调用unsafemtablebufferpointer(OpaquePointer(buff.mData))
时,更改
start: UnsafeMutablePointer(buff.mData)
到
以下是为Swift 3更新的完整代码:
func captureOutput(_ captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
from connection: AVCaptureConnection!){
var buffer: CMBlockBuffer? = nil
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
MemoryLayout<AudioBufferList>.size,
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
var sum:Int64 = 0
var count:Int = 0
var bufs:Int = 0
for buff in abl {
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(OpaquePointer(buff.mData)),
count: Int(buff.mDataByteSize)/MemoryLayout<Int16>.size)
for sample in samples {
let s = Int64(sample)
sum = (sum + s*s)
count += 1
}
bufs += 1
}
print( "found \(count) samples in \(bufs) buffers, RMS is \(sqrt(Float(sum)/Float(count)))" )
}
func captureOutput(\captureOutput:AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,
来自连接:AVCaptureConnection!){
var缓冲区:CMBlockBuffer?=nil
var audioBufferList=audioBufferList(mNumberBuffers:1,
mbuffer:AudioBuffer(mNumberChannels:1,mDataByteSize:0,mData:nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
样本缓冲区,
无
&音频缓冲列表,
MemoryLayout.size,
无
无
UInt32(kCMSampleBufferFlag_Audio BufferList_Assure16 ByteAlignment),
&缓冲区
)
设abl=UnsafemeutableAudioBufferListPointer(&audioBufferList)
变量和:Int64=0
变量计数:Int=0
变量bufs:Int=0
对于abl中的buff{
让samples=UnsafeMutableBufferPointer(开始:UnsafeMutablePointer(OpaquePointer(buff.mData)),
计数:Int(buff.mDataByteSize)/MemoryLayout.size)
样品中的样品{
设s=Int64(示例)
总和=(总和+s*s)
计数+=1
}
bufs+=1
}
打印(“在\(bufs)缓冲区中找到\(计数)样本,RMS为\(sqrt(浮点(总和)/浮点(计数))))
}
这使编译器感到满意,而且似乎生成了合理的数字。我正在录制视频,方法是使用CMSampleBufferGetImageBuffer从sampleBuffer获取图像,修改图像,然后将其写入视频。如何检查sampleBuffer是否为音频,然后将其写入assetWriter?@ChewieTheChorkie要检查sampleBuffer是否为音频,可以查看连接的属性输出。像这样,如果connection.output是AVCaptureAudioDataOutput{…}否则{//isVideo}
也许我会试试,但我通过运行AVAudioRecorder“修复”了它,然后在最后合并音频和视频。“那不好吗?”丘耶特霍基真是个有用的暗示!你找到更好的解决办法了吗?我也有同样的问题(在实时视频录制中尝试绘制多个上下文时没有音频输出)
func captureOutput(_ captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
from connection: AVCaptureConnection!){
var buffer: CMBlockBuffer? = nil
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
MemoryLayout<AudioBufferList>.size,
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
var sum:Int64 = 0
var count:Int = 0
var bufs:Int = 0
for buff in abl {
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(OpaquePointer(buff.mData)),
count: Int(buff.mDataByteSize)/MemoryLayout<Int16>.size)
for sample in samples {
let s = Int64(sample)
sum = (sum + s*s)
count += 1
}
bufs += 1
}
print( "found \(count) samples in \(bufs) buffers, RMS is \(sqrt(Float(sum)/Float(count)))" )
}