Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/18.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Swift 音频启动但没有声音 导入基础 导入音频工具箱 班级音乐剧{ //设置缓冲区数 静态let knumberBuffers=3 var aqData=AQPlayerState.init() //播放音频队列的自定义结构 AQPlayerState类{ var mDataFormat=AudioStreamBasicDescription() var mQueue:AudioQueueRef? var mBuffers=[AudioQueueBufferRef?].init(重复:nil,计数:AudioPlay.knumberBuffers) var mAudioFile:AudioFileID? var bufferByteSize=UInt32() var mCurrentPacket:Int64? var mNumPacketsToRead=UInt32() var MPackedEscs:不可女性化的指针? var错误运行=错误 } //回放音频队列回调 静态let HandleOutputBuffer:AudioQueueOutputCallback={(aqData1,inAQ,inBuffer)在 var pAqData=(aqData1?.assumingmemorybind(to:AQPlayerState.self).pointee)! guard pAqData.mIsRunning | | pAqData.mQueue!=无其他{ 打印(“audioplay未运行退出回调函数”) 返回 } var numBytesReadFromFile=UInt32() var numPackets=pAqData.mNumPacketsToRead AudioFileReadPacketData(pAqData.mAudioFile!、false和numBytesReadFromFile、pAqData.Mpackedescs、pAqData.McCurrentPacket!、numPackets、inBuffer.pointee.mAudioData) 如果numPackets>0{ inBuffer.pointee.mAudioDataByteSize=numBytesReadFromFile audioqueuenqueuebuffer(pAqData.mQueue!,inBuffer,((pAqData.mpackedescs!=nil)?numPackets:UInt32(0)),pAqData.mpackedescs) pAqData.mCurrentPacket!+=Int64(numPackets) }否则{ AudioQueueStop(pAqData.mQueue!,false) pAqData.mIsRunning=false } } //调用func来设置属性 //创建新的outputqueue //启动音频队列 func start(){ 让url=Bundle.main.url(用于资源:“123”,扩展名为“mp3”)! 将audioFileURL=url设为CFURL 打印(音频文件URL) 让结果=AudioFileOpenURL(audioFileURL、.readPermission、0和aqData.mAudioFile) 打印(结果) var dataFormatSize=UInt32(MemoryLayout.size(of值:aqData.mDataFormat)) 让result1=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyDataFormat、&dataFormatSize、&aqData.mDataFormat) //获取文件属性 var maxPacketSize=UInt32() var propertySize=UInt32(MemoryLayout.size(of值:maxPacketSize)) 让result2=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyPacketSizeUpperBound、&propertySize、&maxPacketSize) //计算和设置缓冲区大小 DeriveBufferSize(ASBDesc:aqData.mDataFormat,maxPacketSize:maxPacketSize,秒数:0.5,ExputfForSize:&aqData.bufferByteSize,outNumPacketsToRead:&aqData.mNumPacketsToRead) //检查格式是否为VBR或CBR 让isFormatVBR=aqData.mDataFormat.mBytesPerPacket==0 | | aqData.mDataFormat.mFramesPerPacket==0 如果isFormatVBR{ aqData.mpackedescs=UnsafeMutablePointer.allocate(容量:MemoryLayout.size(of值:AudioStreamPacketDescription())) }否则{ aqData.mpackedescs=nil } //创建新的音频队列 让result4=AudioQueueNewOutput(&aqData.mDataFormat,AudioPlay.HandleOutputBuffer,&aqData,CFRunLoopGetCurrent(),CFRunLoopMode.commonModes.rawValue,0,&aqData.mQueue) //队列开始 aqData.mIsRunning=true //分配内存缓冲区 aqData.mCurrentPacket=0 对于0..maxPacketSize中的i)?maxBufferSize:maxPacketSize } 如果exputffersize.pointee>maxBufferSize&&exputffersize.pointee>maxPacketSize{ exputffersize.pointee=maxBufferSize }否则{ 如果exputffersize.pointee_Swift_Audioqueueservices - Fatal编程技术网

Swift 音频启动但没有声音 导入基础 导入音频工具箱 班级音乐剧{ //设置缓冲区数 静态let knumberBuffers=3 var aqData=AQPlayerState.init() //播放音频队列的自定义结构 AQPlayerState类{ var mDataFormat=AudioStreamBasicDescription() var mQueue:AudioQueueRef? var mBuffers=[AudioQueueBufferRef?].init(重复:nil,计数:AudioPlay.knumberBuffers) var mAudioFile:AudioFileID? var bufferByteSize=UInt32() var mCurrentPacket:Int64? var mNumPacketsToRead=UInt32() var MPackedEscs:不可女性化的指针? var错误运行=错误 } //回放音频队列回调 静态let HandleOutputBuffer:AudioQueueOutputCallback={(aqData1,inAQ,inBuffer)在 var pAqData=(aqData1?.assumingmemorybind(to:AQPlayerState.self).pointee)! guard pAqData.mIsRunning | | pAqData.mQueue!=无其他{ 打印(“audioplay未运行退出回调函数”) 返回 } var numBytesReadFromFile=UInt32() var numPackets=pAqData.mNumPacketsToRead AudioFileReadPacketData(pAqData.mAudioFile!、false和numBytesReadFromFile、pAqData.Mpackedescs、pAqData.McCurrentPacket!、numPackets、inBuffer.pointee.mAudioData) 如果numPackets>0{ inBuffer.pointee.mAudioDataByteSize=numBytesReadFromFile audioqueuenqueuebuffer(pAqData.mQueue!,inBuffer,((pAqData.mpackedescs!=nil)?numPackets:UInt32(0)),pAqData.mpackedescs) pAqData.mCurrentPacket!+=Int64(numPackets) }否则{ AudioQueueStop(pAqData.mQueue!,false) pAqData.mIsRunning=false } } //调用func来设置属性 //创建新的outputqueue //启动音频队列 func start(){ 让url=Bundle.main.url(用于资源:“123”,扩展名为“mp3”)! 将audioFileURL=url设为CFURL 打印(音频文件URL) 让结果=AudioFileOpenURL(audioFileURL、.readPermission、0和aqData.mAudioFile) 打印(结果) var dataFormatSize=UInt32(MemoryLayout.size(of值:aqData.mDataFormat)) 让result1=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyDataFormat、&dataFormatSize、&aqData.mDataFormat) //获取文件属性 var maxPacketSize=UInt32() var propertySize=UInt32(MemoryLayout.size(of值:maxPacketSize)) 让result2=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyPacketSizeUpperBound、&propertySize、&maxPacketSize) //计算和设置缓冲区大小 DeriveBufferSize(ASBDesc:aqData.mDataFormat,maxPacketSize:maxPacketSize,秒数:0.5,ExputfForSize:&aqData.bufferByteSize,outNumPacketsToRead:&aqData.mNumPacketsToRead) //检查格式是否为VBR或CBR 让isFormatVBR=aqData.mDataFormat.mBytesPerPacket==0 | | aqData.mDataFormat.mFramesPerPacket==0 如果isFormatVBR{ aqData.mpackedescs=UnsafeMutablePointer.allocate(容量:MemoryLayout.size(of值:AudioStreamPacketDescription())) }否则{ aqData.mpackedescs=nil } //创建新的音频队列 让result4=AudioQueueNewOutput(&aqData.mDataFormat,AudioPlay.HandleOutputBuffer,&aqData,CFRunLoopGetCurrent(),CFRunLoopMode.commonModes.rawValue,0,&aqData.mQueue) //队列开始 aqData.mIsRunning=true //分配内存缓冲区 aqData.mCurrentPacket=0 对于0..maxPacketSize中的i)?maxBufferSize:maxPacketSize } 如果exputffersize.pointee>maxBufferSize&&exputffersize.pointee>maxPacketSize{ exputffersize.pointee=maxBufferSize }否则{ 如果exputffersize.pointee

Swift 音频启动但没有声音 导入基础 导入音频工具箱 班级音乐剧{ //设置缓冲区数 静态let knumberBuffers=3 var aqData=AQPlayerState.init() //播放音频队列的自定义结构 AQPlayerState类{ var mDataFormat=AudioStreamBasicDescription() var mQueue:AudioQueueRef? var mBuffers=[AudioQueueBufferRef?].init(重复:nil,计数:AudioPlay.knumberBuffers) var mAudioFile:AudioFileID? var bufferByteSize=UInt32() var mCurrentPacket:Int64? var mNumPacketsToRead=UInt32() var MPackedEscs:不可女性化的指针? var错误运行=错误 } //回放音频队列回调 静态let HandleOutputBuffer:AudioQueueOutputCallback={(aqData1,inAQ,inBuffer)在 var pAqData=(aqData1?.assumingmemorybind(to:AQPlayerState.self).pointee)! guard pAqData.mIsRunning | | pAqData.mQueue!=无其他{ 打印(“audioplay未运行退出回调函数”) 返回 } var numBytesReadFromFile=UInt32() var numPackets=pAqData.mNumPacketsToRead AudioFileReadPacketData(pAqData.mAudioFile!、false和numBytesReadFromFile、pAqData.Mpackedescs、pAqData.McCurrentPacket!、numPackets、inBuffer.pointee.mAudioData) 如果numPackets>0{ inBuffer.pointee.mAudioDataByteSize=numBytesReadFromFile audioqueuenqueuebuffer(pAqData.mQueue!,inBuffer,((pAqData.mpackedescs!=nil)?numPackets:UInt32(0)),pAqData.mpackedescs) pAqData.mCurrentPacket!+=Int64(numPackets) }否则{ AudioQueueStop(pAqData.mQueue!,false) pAqData.mIsRunning=false } } //调用func来设置属性 //创建新的outputqueue //启动音频队列 func start(){ 让url=Bundle.main.url(用于资源:“123”,扩展名为“mp3”)! 将audioFileURL=url设为CFURL 打印(音频文件URL) 让结果=AudioFileOpenURL(audioFileURL、.readPermission、0和aqData.mAudioFile) 打印(结果) var dataFormatSize=UInt32(MemoryLayout.size(of值:aqData.mDataFormat)) 让result1=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyDataFormat、&dataFormatSize、&aqData.mDataFormat) //获取文件属性 var maxPacketSize=UInt32() var propertySize=UInt32(MemoryLayout.size(of值:maxPacketSize)) 让result2=AudioFileGetProperty(aqData.mAudioFile!、kAudioFilePropertyPacketSizeUpperBound、&propertySize、&maxPacketSize) //计算和设置缓冲区大小 DeriveBufferSize(ASBDesc:aqData.mDataFormat,maxPacketSize:maxPacketSize,秒数:0.5,ExputfForSize:&aqData.bufferByteSize,outNumPacketsToRead:&aqData.mNumPacketsToRead) //检查格式是否为VBR或CBR 让isFormatVBR=aqData.mDataFormat.mBytesPerPacket==0 | | aqData.mDataFormat.mFramesPerPacket==0 如果isFormatVBR{ aqData.mpackedescs=UnsafeMutablePointer.allocate(容量:MemoryLayout.size(of值:AudioStreamPacketDescription())) }否则{ aqData.mpackedescs=nil } //创建新的音频队列 让result4=AudioQueueNewOutput(&aqData.mDataFormat,AudioPlay.HandleOutputBuffer,&aqData,CFRunLoopGetCurrent(),CFRunLoopMode.commonModes.rawValue,0,&aqData.mQueue) //队列开始 aqData.mIsRunning=true //分配内存缓冲区 aqData.mCurrentPacket=0 对于0..maxPacketSize中的i)?maxBufferSize:maxPacketSize } 如果exputffersize.pointee>maxBufferSize&&exputffersize.pointee>maxPacketSize{ exputffersize.pointee=maxBufferSize }否则{ 如果exputffersize.pointee,swift,audioqueueservices,Swift,Audioqueueservices,以上代码是根据AudioQueueServiceProgrammingGuide编写的 创建此类的指令,并调用start()函数 编译正常,但没有输出声音。 我已经检查代码很多次了,但是没有进步 有熟悉audiiqueue的人能帮我吗? 任何帮助都将不胜感激。将“AudioFileReadPacketData”替换为“AudioFileReadPacketData”可以解决此问题 但有时我会遇到一个新问题,如下所示!有时效果很好 流音频(20535,0x1085ac3c0)malloc:*对象0

以上代码是根据AudioQueueServiceProgrammingGuide编写的

创建此类的指令,并调用start()函数 编译正常,但没有输出声音。 我已经检查代码很多次了,但是没有进步

有熟悉audiiqueue的人能帮我吗? 任何帮助都将不胜感激。

将“AudioFileReadPacketData”替换为“AudioFileReadPacketData”可以解决此问题

但有时我会遇到一个新问题,如下所示!有时效果很好

流音频(20535,0x1085ac3c0)malloc:*对象0x608001F6300的错误:无效指针从可用列表中退出队列 *在malloc\u error\u break中将断点设置为

import Foundation
import AudioToolbox

class AudioPlay {

    //setting buffer num
  static  let knumberBuffers = 3 
    var aqData = AQPlayerState.init()
    //A custom structure for a playback audio queue
     class AQPlayerState {
        var mDataFormat = AudioStreamBasicDescription()
        var mQueue:AudioQueueRef?
        var mBuffers = [AudioQueueBufferRef?].init(repeating: nil, count: AudioPlay.knumberBuffers)
        var mAudioFile:AudioFileID?
        var bufferByteSize = UInt32()
        var mCurrentPacket:Int64?
        var mNumPacketsToRead = UInt32()
        var mPacketDescs:UnsafeMutablePointer<AudioStreamPacketDescription>?
        var mIsRunning = false
    }        

    //playbackAudioQueue callback
   static let HandleOutputBuffer:AudioQueueOutputCallback = { (aqData1, inAQ, inBuffer) in

        var pAqData = (aqData1?.assumingMemoryBound(to: AQPlayerState.self).pointee)!

        guard pAqData.mIsRunning || pAqData.mQueue != nil else{
            print("audioplay is not running exit callback func")
            return
        }
        var numBytesReadFromFile = UInt32()
        var numPackets = pAqData.mNumPacketsToRead

        AudioFileReadPacketData(pAqData.mAudioFile!, false, &numBytesReadFromFile, pAqData.mPacketDescs, pAqData.mCurrentPacket!, &numPackets, inBuffer.pointee.mAudioData)

        if numPackets > 0 {
            inBuffer.pointee.mAudioDataByteSize = numBytesReadFromFile

            AudioQueueEnqueueBuffer(pAqData.mQueue!, inBuffer, ((pAqData.mPacketDescs != nil)  ? numPackets : UInt32(0)), pAqData.mPacketDescs)

            pAqData.mCurrentPacket! += Int64(numPackets)
        }else{
            AudioQueueStop(pAqData.mQueue!, false)
            pAqData.mIsRunning = false
        }    
    }    

    //call func to set the property 
    //create new outputqueue
    //start th audioqueue
    func start()  {

        let url = Bundle.main.url(forResource: "123", withExtension: "mp3")!

        let audioFileURL = url as CFURL      

        print(audioFileURL)           

        let result = AudioFileOpenURL(audioFileURL, .readPermission, 0, &aqData.mAudioFile)
        print(result)    

        var dataFormatSize = UInt32(MemoryLayout.size(ofValue: aqData.mDataFormat))

      let result1 =  AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyDataFormat,&dataFormatSize, &aqData.mDataFormat)    

        //get file property
        var maxPacketSize = UInt32()            
        var propertySize = UInt32(MemoryLayout.size(ofValue: maxPacketSize))            

        let result2 = AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize)        

        //calculate and setting buffer size
        DeriveBufferSize(ASBDesc: aqData.mDataFormat, maxPacketSize: maxPacketSize, seconds: 0.5, outBufferSize: &aqData.bufferByteSize, outNumPacketsToRead: &aqData.mNumPacketsToRead)

        //check the format is VBR or CBR
        let isFormatVBR = aqData.mDataFormat.mBytesPerPacket == 0 || aqData.mDataFormat.mFramesPerPacket == 0

        if isFormatVBR {
            aqData.mPacketDescs = UnsafeMutablePointer<AudioStreamPacketDescription>.allocate(capacity: MemoryLayout.size(ofValue: AudioStreamPacketDescription()))
        }else{
            aqData.mPacketDescs = nil
        }   

      //create new audio queue
        let result4 =  AudioQueueNewOutput(&aqData.mDataFormat,AudioPlay.HandleOutputBuffer, &aqData,CFRunLoopGetCurrent(),CFRunLoopMode.commonModes.rawValue, 0, &aqData.mQueue)      

        //queue start
        aqData.mIsRunning = true

        //alloc memory buffer
        aqData.mCurrentPacket = 0

        for i in 0..<AudioPlay.knumberBuffers {
            AudioQueueAllocateBuffer(aqData.mQueue!, aqData.bufferByteSize,&aqData.mBuffers[i])
            AudioPlay.HandleOutputBuffer(&aqData,aqData.mQueue!, (aqData.mBuffers[i])!)
        }

        //start audioqueue
        AudioQueueStart(aqData.mQueue!, nil)            

        repeat{
            CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.25, false)
        }while (aqData.mIsRunning)

        CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1, false)          
    }       

    //calculate and setting buffer size
    func DeriveBufferSize(ASBDesc:AudioStreamBasicDescription,maxPacketSize:UInt32,seconds:Float64,outBufferSize:UnsafeMutablePointer<UInt32>,outNumPacketsToRead:UnsafeMutablePointer<UInt32>) {
        let maxBufferSize:UInt32 = 0x50000
        let minBufferSIze:UInt32 = 0x4000

        if ASBDesc.mFramesPerPacket != 0 {
            let numPacketsForTime = ASBDesc.mSampleRate / Float64(ASBDesc.mFramesPerPacket) * seconds
            outBufferSize.pointee = UInt32(numPacketsForTime) * maxPacketSize
        }else{
            outBufferSize.pointee = (maxBufferSize > maxPacketSize) ? maxBufferSize:maxPacketSize
        }

        if outBufferSize.pointee > maxBufferSize && outBufferSize.pointee > maxPacketSize {
            outBufferSize.pointee = maxBufferSize
        }else{
            if outBufferSize.pointee < minBufferSIze{
                outBufferSize.pointee = minBufferSIze
            }
        }
        outNumPacketsToRead.pointee = outBufferSize.pointee/maxPacketSize
    }    

    //dispose the audioqueue
    func Dispose()  {
        AudioQueueDispose(aqData.mQueue!, true)
        AudioFileClose(aqData.mAudioFile!)
        free(aqData.mPacketDescs)
    }        
}