Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/iphone/42.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Iphone 我可以从MPMusicLayerController获取音频会话/应用音频单元进行播放吗?_Iphone_Audio_Avaudioplayer_Ipod_Mpmusicplayercontroller - Fatal编程技术网

Iphone 我可以从MPMusicLayerController获取音频会话/应用音频单元进行播放吗?

Iphone 我可以从MPMusicLayerController获取音频会话/应用音频单元进行播放吗?,iphone,audio,avaudioplayer,ipod,mpmusicplayercontroller,Iphone,Audio,Avaudioplayer,Ipod,Mpmusicplayercontroller,我想控制来自MPMusicLayerController的音频(即从iPod库播放)。例如,我想对它应用EQ,或者做DSP,混响,诸如此类的事情 这可能吗?有没有音频会话我可以处理?或者,是否有某种方法可以使用AVAudioPlayer播放iPod库中的文件?MPMusicLayerController无法与AV框架“很好地”配合使用 我设法使用MPMusicLayerController获取一些DSP来获取媒体项,然后获取该项的url。然后使用avurlast 还有阿瓦塞特。 大概是这样的:

我想控制来自MPMusicLayerController的音频(即从iPod库播放)。例如,我想对它应用EQ,或者做DSP,混响,诸如此类的事情


这可能吗?有没有音频会话我可以处理?或者,是否有某种方法可以使用AVAudioPlayer播放iPod库中的文件?

MPMusicLayerController无法与AV框架“很好地”配合使用 我设法使用MPMusicLayerController获取一些DSP来获取媒体项,然后获取该项的url。然后使用avurlast 还有阿瓦塞特。 大概是这样的:

MPMediaItem *currentSong = [myMusicController nowPlayingItem];
NSURL *currentSongURL = [currentSong valueForProperty:MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:currentSongURL options:nil];
NSError *error = nil;        
AVAssetReader* reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

AVAssetTrack* track = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
[audioReadSettings setValue:[NSNumber numberWithInt:kAudioFormatLinearPCM]
                     forKey:AVFormatIDKey];

AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
[reader addOutput:readerOutput];
[reader startReading];
CMSampleBufferRef sample = [readerOutput copyNextSampleBuffer];
while( sample != NULL )
{
    sample = [readerOutput copyNextSampleBuffer];

    if( sample == NULL )
        continue;

    CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer( sample );
    CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(sample);

    AudioBufferList audioBufferList;

    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sample,
                                                            NULL,
                                                            &audioBufferList,
                                                            sizeof(audioBufferList),
                                                            NULL,
                                                            NULL,
                                                            kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
                                                            &buffer
                                                            );

    for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
        SInt16* samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
        for (int i=0; i < numSamplesInBuffer; i++) {
            NSLog(@"%i", samples[i]);
        }
    }

    //Release the buffer when done with the samples 
    //(retained by CMSampleBufferGetAudioBufferListWithRetainedblockBuffer)
    CFRelease(buffer);             

    CFRelease( sample );
MPMediaItem*currentSong=[myMusicController nowPlayingItem];
NSURL*currentSongURL=[currentSong valueForProperty:MPMediaItemPropertyAsetUrl];
AVURLAsset*songAsset=[AVURLAsset URLAssetWithURL:currentSongURL选项:nil];
n错误*错误=nil;
Avassetrader*reader=[[Avassetrader alloc]initWithAsset:songAsset错误:&error];
AVAssetTrack*track=[[songAsset tracksWithMediaType:AVMediaTypeAudio]对象索引:0];
NSMutableDictionary*audioReadSettings=[NSMutableDictionary];
[audioReadSettings设置值:[NSNumber numberWithInt:kAudioFormatLinearPCM]
forKey:AVFormatIDKey];
AVAssetReaderTrackOutput*readerOutput=[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
[读卡器添加输出:读卡器输出];
[读者开始阅读];
CMSampleBufferRef样本=[readerOutput copyNextSampleBuffer];
while(示例!=NULL)
{
样本=[readerOutput copyNextSampleBuffer];
if(sample==NULL)
继续;
CMBlockBufferRef buffer=CMSampleBufferGetDataBuffer(样本);
CMItemCount numSamplesInBuffer=CMSampleBufferGetNumSamples(样本);
AudioBufferList AudioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(示例,
无效的
&音频缓冲列表,
sizeof(音频缓冲列表),
无效的
无效的
kCMSampleBufferFlag_Audio BufferList_确保16通过对齐,
&缓冲区
);
对于(int bufferCount=0;bufferCount
那么使用AVURLAsset您可以直接访问文件还是什么?是的,您可以完全访问声音数据。我将编辑代码其余部分的答案以查看实际数据。好的,这是可能的,但是有没有人找到更好的方法来做到这一点,可能是使用?