Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/iphone/37.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Iphone 播放和录制流式音频_Iphone - Fatal编程技术网

Iphone 播放和录制流式音频

Iphone 播放和录制流式音频,iphone,Iphone,我正在开发一款iPhone应用程序,它应该能够同时播放和录制音频流数据。真的有可能吗?我试图混合SpeakHere和录音机样本,得到一个没有音频数据的空文件 这是我的.m代码: #import "AzRadioViewController.h" @implementation azRadioViewController static const CFOptionFlags kNetworkEvents = kCFStreamEventOpenCompleted | kCFStreamEven

我正在开发一款iPhone应用程序,它应该能够同时播放和录制音频流数据。真的有可能吗?我试图混合SpeakHere和录音机样本,得到一个没有音频数据的空文件

这是我的.m代码:

#import "AzRadioViewController.h"

@implementation azRadioViewController

static const CFOptionFlags kNetworkEvents = kCFStreamEventOpenCompleted |
kCFStreamEventHasBytesAvailable |
kCFStreamEventEndEncountered |
kCFStreamEventErrorOccurred;

void MyAudioQueueOutputCallback( void* inClientData, 
AudioQueueRef inAQ, 
AudioQueueBufferRef inBuffer,
const AudioTimeStamp *inStartTime, 
UInt32 inNumberPacketDescriptions, 
const AudioStreamPacketDescription* inPacketDesc
)
{
NSLog(@"start MyAudioQueueOutputCallback");


MyData* myData = (MyData*)inClientData;


NSLog(@"--- %i", inNumberPacketDescriptions);

if(inNumberPacketDescriptions == 0 && myData->dataFormat.mBytesPerPacket != 0)
{
inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / myData->dataFormat.mBytesPerPacket;
}


OSStatus status = AudioFileWritePackets(myData->audioFile, FALSE, inBuffer->mAudioDataByteSize,
inPacketDesc, myData->currentPacket, &inNumberPacketDescriptions, inBuffer->mAudioData);


if(status == 0)
{
myData->currentPacket += inNumberPacketDescriptions;
}

NSLog(@"status:%i curpac:%i pcdesct: %i", status, myData->currentPacket, inNumberPacketDescriptions);


unsigned int bufIndex = MyFindQueueBuffer(myData, inBuffer);

pthread_mutex_lock(&myData->mutex);
myData->inuse[bufIndex] = false;
pthread_cond_signal(&myData->cond);
pthread_mutex_unlock(&myData->mutex);
}


OSStatus StartQueueIfNeeded(MyData* myData)
{

NSLog(@"start StartQueueIfNeeded");

OSStatus err = noErr;
if (!myData->started) { 
err = AudioQueueStart(myData->queue, NULL);
if (err) { PRINTERROR("AudioQueueStart"); myData->failed = true; return err; } 
myData->started = true;
printf("started\n");
}
return err;
}



OSStatus MyEnqueueBuffer(MyData* myData)
{
NSLog(@"start MyEnqueueBuffer");


OSStatus err = noErr;
myData->inuse[myData->fillBufferIndex] = true; 

AudioQueueBufferRef fillBuf = myData->audioQueueBuffer[myData->fillBufferIndex];
fillBuf->mAudioDataByteSize = myData->bytesFilled; 
err = AudioQueueEnqueueBuffer(myData->queue, fillBuf, myData->packetsFilled, myData->packetDescs);
if (err) { PRINTERROR("AudioQueueEnqueueBuffer"); myData->failed = true; return err; } 



StartQueueIfNeeded(myData);

return err;
}


void WaitForFreeBuffer(MyData* myData)
{
NSLog(@"start WaitForFreeBuffer");

if (++myData->fillBufferIndex >= kNumAQBufs) myData->fillBufferIndex = 0;
myData->bytesFilled = 0; 
myData->packetsFilled = 0; 

printf("->lock\n");
pthread_mutex_lock(&myData->mutex); 
while (myData->inuse[myData->fillBufferIndex]) {
printf("... WAITING ...\n");
pthread_cond_wait(&myData->cond, &myData->mutex);
}
pthread_mutex_unlock(&myData->mutex);
printf("<-unlock\n");
}


int MyFindQueueBuffer(MyData* myData, AudioQueueBufferRef inBuffer)
{
NSLog(@"start MyFindQueueBuffer");

for (unsigned int i = 0; i < kNumAQBufs; ++i) {
if (inBuffer == myData->audioQueueBuffer[i]) 
return i;
}
return -1;
}


void MyAudioQueueIsRunningCallback( void* inClientData, 
AudioQueueRef inAQ, 
AudioQueuePropertyID inID)
{
NSLog(@"start MyAudioQueueIsRunningCallback");

MyData* myData = (MyData*)inClientData;

UInt32 running;
UInt32 size;
OSStatus err = AudioQueueGetProperty(inAQ, kAudioQueueProperty_IsRunning, &running, &size);
if (err) { PRINTERROR("get kAudioQueueProperty_IsRunning"); return; }
if (!running) {
pthread_mutex_lock(&myData->mutex);
pthread_cond_signal(&myData->done);
pthread_mutex_unlock(&myData->mutex);
}
}


void MyPropertyListenerProc( void * inClientData,
AudioFileStreamID inAudioFileStream,
AudioFileStreamPropertyID inPropertyID,
UInt32 * ioFlags)
{ 
NSLog(@"start MyPropertyListenerProc");

MyData* myData = (MyData*)inClientData;
OSStatus err = noErr;

printf("found property '%c%c%c%c'\n", (inPropertyID>>24)&255, (inPropertyID>>16)&255, (inPropertyID>>8)&255, inPropertyID&255);


switch (inPropertyID) {
case kAudioFileStreamProperty_ReadyToProducePackets :
{

AudioStreamBasicDescription asbd;
UInt32 asbdSize = sizeof(asbd);



err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd);
if (err) { PRINTERROR("get kAudioFileStreamProperty_DataFormat"); myData->failed = true; break; }

err = AudioQueueNewOutput(&asbd, MyAudioQueueOutputCallback, myData, NULL, NULL, 0, &myData->queue);
if (err) { PRINTERROR("AudioQueueNewOutput"); myData->failed = true; break; }


for (unsigned int i = 0; i < kNumAQBufs; ++i) {
err = AudioQueueAllocateBuffer(myData->queue, kAQBufSize, &myData->audioQueueBuffer[i]);
if (err) { PRINTERROR("AudioQueueAllocateBuffer"); myData->failed = true; break; }
}

UInt32 cookieSize;
Boolean writable;
err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
if (err) { PRINTERROR("info kAudioFileStreamProperty_MagicCookieData"); break; }
printf("cookieSize %d\n", cookieSize);

void* cookieData = calloc(1, cookieSize);
err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData);
if (err) { PRINTERROR("get kAudioFileStreamProperty_MagicCookieData"); free(cookieData); break; }

err = AudioQueueSetProperty(myData->queue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize);
free(cookieData);
if (err) { PRINTERROR("set kAudioQueueProperty_MagicCookie"); break; }

err = AudioQueueAddPropertyListener(myData->queue, kAudioQueueProperty_IsRunning, MyAudioQueueIsRunningCallback, myData);
if (err) { PRINTERROR("AudioQueueAddPropertyListener"); myData->failed = true; break; }





break;
}
}
}


static void
ReadStreamClientCallBack(CFReadStreamRef stream, CFStreamEventType type, void *clientCallBackInfo) {

NSLog(@"start ReadStreamClientCallBack");

if(type == kCFStreamEventHasBytesAvailable) {

UInt8 buffer[2048];
CFIndex bytesRead = CFReadStreamRead(stream, buffer, sizeof(buffer));


if (bytesRead < 0) {
}

else if (bytesRead) {
OSStatus err = AudioFileStreamParseBytes(globalMyData->audioFileStream, bytesRead, buffer, 0);
if (err) { PRINTERROR("AudioFileStreamParseBytes"); }
}
}
}


void MyPacketsProc(void * inClientData,
UInt32 inNumberBytes,
UInt32 inNumberPackets,
const void * inInputData,
AudioStreamPacketDescription *inPacketDescriptions)
{
NSLog(@"start MyPacketsProc");
MyData* myData = (MyData*)inClientData;

printf("got data. bytes: %d packets: %d\n", inNumberBytes, inNumberPackets);

for (int i = 0; i < inNumberPackets; ++i) {
SInt64 packetOffset = inPacketDescriptions[i].mStartOffset;
SInt64 packetSize = inPacketDescriptions[i].mDataByteSize;

size_t bufSpaceRemaining = kAQBufSize - myData->bytesFilled;
if (bufSpaceRemaining < packetSize) {
MyEnqueueBuffer(myData);
WaitForFreeBuffer(myData);
}

AudioQueueBufferRef fillBuf = myData->audioQueueBuffer[myData->fillBufferIndex];
memcpy((char*)fillBuf->mAudioData + myData->bytesFilled, (const char*)inInputData + packetOffset, packetSize);
myData->packetDescs[myData->packetsFilled] = inPacketDescriptions[i];
myData->packetDescs[myData->packetsFilled].mStartOffset = myData->bytesFilled;
myData->bytesFilled += packetSize;
myData->packetsFilled += 1;


size_t packetsDescsRemaining = kAQMaxPacketDescs - myData->packetsFilled;
if (packetsDescsRemaining == 0) {
MyEnqueueBuffer(myData);
WaitForFreeBuffer(myData);
}
} 




}

- (IBAction)buttonPlayPressedid)sender
{

label.text = @"Buffering";
[self connectionStart];
}




- (IBAction)buttonSavePressedid)sender
{
NSLog(@"save");

AudioFileClose(myData.audioFile);
AudioQueueDispose(myData.queue, TRUE);

}

bool getFilename(char* buffer,int maxBufferLength)
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, 
NSUserDomainMask, YES); 
NSString* docDir = [paths objectAtIndex:0];
NSString* file = [docDir stringByAppendingString:@"/rec.caf"];
return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}

-(void)connectionStart {


@try {

MyData* myData = (MyData*)calloc(1, sizeof(MyData));

globalMyData = myData;

pthread_mutex_init(&myData->mutex, NULL);
pthread_cond_init(&myData->cond, NULL);
pthread_cond_init(&myData->done, NULL);

NSLog(@"Start");


myData->dataFormat.mSampleRate = 16000.0f;
myData->dataFormat.mFormatID = kAudioFormatLinearPCM;
myData->dataFormat.mFramesPerPacket = 1;
myData->dataFormat.mChannelsPerFrame = 1;
myData->dataFormat.mBytesPerFrame = 2;
myData->dataFormat.mBytesPerPacket = 2;
myData->dataFormat.mBitsPerChannel = 16;
myData->dataFormat.mReserved = 0;
myData->dataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;


int i, bufferByteSize;
UInt32 size;



AudioQueueNewInput(
&myData->dataFormat,
MyAudioQueueOutputCallback,
&myData,
NULL /* run loop */, kCFRunLoopCommonModes /* run loop mode */,
0 /* flags */, &myData->queue);



size = sizeof(&myData->dataFormat);
AudioQueueGetProperty(&myData->queue, kAudioQueueProperty_StreamDescription, 
&myData->dataFormat, &size);




CFURLRef fileURL;
char path[256];
memset(path,0,sizeof(path));
getFilename(path,256);

fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), FALSE);

AudioFileCreateWithURL(fileURL,
kAudioFileCAFType,
&myData->dataFormat,
kAudioFileFlags_EraseFile,
&myData->audioFile);

OSStatus err = AudioFileStreamOpen(myData, MyPropertyListenerProc, MyPacketsProc, 
kAudioFileMP3Type, &myData->audioFileStream);
if (err) { PRINTERROR("AudioFileStreamOpen"); return 1; }

CFStreamClientContext ctxt = {0, self, NULL, NULL, NULL};

CFStringRef bodyData = CFSTR(""); // Usually used for POST data
CFStringRef headerFieldName = CFSTR("X-My-Favorite-Field");
CFStringRef headerFieldValue = CFSTR("Dreams");

CFStringRef url = CFSTR(RADIO_LOCATION);
CFURLRef myURL = CFURLCreateWithString(kCFAllocatorDefault, url, NULL);
CFStringRef requestMethod = CFSTR("GET");
CFHTTPMessageRef myRequest = CFHTTPMessageCreateRequest(kCFAllocatorDefault, requestMethod, myURL, kCFHTTPVersion1_1);

CFHTTPMessageSetBody(myRequest, bodyData);
CFHTTPMessageSetHeaderFieldValue(myRequest, headerFieldName, headerFieldValue);


CFReadStreamRef stream = CFReadStreamCreateForHTTPRequest(kCFAllocatorDefault, myRequest);

if (!stream) {
NSLog(@"Creating the stream failed");
return;
}



if (!CFReadStreamSetClient(stream, kNetworkEvents, ReadStreamClientCallBack, &ctxt)) {
CFRelease(stream);
NSLog(@"Setting the stream's client failed.");
return;
}

CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);

if (!CFReadStreamOpen(stream)) {
CFReadStreamSetClient(stream, 0, NULL, NULL);
CFReadStreamUnscheduleFromRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
CFRelease(stream);
NSLog(@"Opening the stream failed.");
return;
}

}
@catch (NSException *exception) {
NSLog(@"main: Caught %@: %@", [exception name], [exception reason]); 
}
}


- (void)viewDidLoad {
[[UIApplication sharedApplication] setIdleTimerDisabled:YES];
[super viewDidLoad];
}


- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}

- (void)viewDidUnload {
}


- (void)dealloc {
[super dealloc];
}

@end
#导入“AzRadioViewController.h”
@RadioViewController的实现
静态常量CFOptionFlags kNetworkEvents=KCFStreameVentoCompleted|
kCFStreamEventHasBytesAvailable|
KCFStreamEventEndended反击|
KCFStreamEventErrorrocurred;
void MyAudioQueueOutputCallback(void*inClientData,
音频队列参考inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp*inStartTime,
UInt32在数字包说明中,
const AudioStreamPacketDescription*inPacketDesc
)
{
NSLog(@“启动MyAudioQueueOutputCallback”);
MyData*MyData=(MyData*)inClientData;
NSLog(@“---%i”,在数字包描述中);
如果(inNumberPacketDescriptions==0&&myData->dataFormat.mBytesPerPacket!=0)
{
inNumberPacketDescriptions=inBuffer->mAudioDataByteSize/myData->dataFormat.mBytesPerPacket;
}
OSStatus status=AudioFileWritePackages(myData->audioFile,FALSE,inBuffer->mAudioDataByteSize,
在packetdesc,myData->currentpack,以及在numberpacketdescriptions,inBuffer->mAudioData);
如果(状态==0)
{
myData->currentPacket+=inNumberPacketDescriptions;
}
NSLog(@“状态:%i curpac:%i pcdesct:%i”,状态,myData->currentPacket,inNumberPacketDescriptions);
unsigned int bufIndex=MyFindQueueBuffer(myData,inBuffer);
pthread_mutex_lock(&myData->mutex);
myData->inuse[bufIndex]=false;
pthread_cond_信号(&myData->cond);
pthread_mutex_unlock(&myData->mutex);
}
OSStatus StartQueueIfNeeded(MyData*MyData)
{
NSLog(@“启动开始UEIfRequired”);
OSStatus err=noErr;
如果(!myData->started){
err=AudioQueueStart(myData->queue,NULL);
if(err){PRINTERROR(“AudioQueueStart”);myData->failed=true;返回err;}
myData->start=true;
printf(“已启动\n”);
}
返回错误;
}
OSStatus MyEnqueueBuffer(MyData*MyData)
{
NSLog(@“启动MyeQueueBuffer”);
OSStatus err=noErr;
myData->inuse[myData->fillBufferIndex]=true;
AudioQueueBufferRef fillBuf=myData->audioQueueBuffer[myData->fillBufferIndex];
fillBuf->mAudioDataByteSize=myData->bytesFilled;
err=audioqueuenqueuebuffer(myData->queue,fillBuf,myData->packetsFilled,myData->packetDescs);
if(err){PRINTERROR(“audioqueuenbuffer”);myData->failed=true;返回err;}
StartQueueIfNeeded(myData);
返回错误;
}
void WaitForFreeBuffer(MyData*MyData)
{
NSLog(@“启动WaitForFreeBuffer”);
如果(++myData->fillBufferIndex>=kNumAQBufs)myData->fillBufferIndex=0;
myData->bytesFilled=0;
myData->packetsFilled=0;
printf(“->lock\n”);
pthread_mutex_lock(&myData->mutex);
同时(myData->inuse[myData->fillBufferIndex]){
printf(“…等待…\n”);
pthread_cond_wait(&myData->cond,&myData->mutex);
}
pthread_mutex_unlock(&myData->mutex);

printf(“@Sergey hi,你找到录制liveStream音频的解决方案了吗?”?