Iphone 如何将使用AVWriterInput重新编码的数据保存到内存中?

Iphone 如何将使用AVWriterInput重新编码的数据保存到内存中?,iphone,Iphone,正如我们所知,使用AVWriterInput可以对数据进行重新编码,然后将其保存到本地文件。但是,如果我想将重新编码的数据保存到内存中,我该如何获取数据?我该怎么做?有什么建议吗?AVWriterInput无法对内存中的数据进行编码,仅用于将数据编码到文件中。尝试使用x264编码器对CVSampleBuffer进行编码,并按照您的意愿使用它。AVWriterInput无法对内存中的数据进行编码仅用于将数据编码到文件。尝试使用x264编码器对CVSampleBuffer进行编码,并根据需要使用它。

正如我们所知,使用AVWriterInput可以对数据进行重新编码,然后将其保存到本地文件。但是,如果我想将重新编码的数据保存到内存中,我该如何获取数据?我该怎么做?有什么建议吗?

AVWriterInput无法对内存中的数据进行编码,仅用于将数据编码到文件中。尝试使用x264编码器对CVSampleBuffer进行编码,并按照您的意愿使用它。AVWriterInput无法对内存中的数据进行编码仅用于将数据编码到文件。尝试使用x264编码器对CVSampleBuffer进行编码,并根据需要使用它。
- (void)setUpSession{
m_session = [[AVCaptureSession alloc] init];
m_session.sessionPreset = AVCaptureSessionPresetLow;  

// video Input
NSError *error = NULL;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice != NULL)
{
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
    if (videoInput != NULL)
    {
        if ([m_session canAddInput:videoInput])
        {
             [m_session addInput:videoInput];
        }
    }
    else
    {
       // NSLog(@"%@", [error localizedFailureReason]);
    }
}

// audio input
if (![videoDevice hasMediaType:AVMediaTypeAudio] && 
    ![videoDevice hasMediaType:AVMediaTypeMuxed])
{
    AVCaptureDevice *audioDevice = [AVCaptureDevice 
                                    defaultDeviceWithMediaType:AVMediaTypeAudio];
    if (audioDevice != nil)
    {
        AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
        if (audioDevice != nil)
        {
            if ([m_session canAddInput:audioInput])
            {
                [m_session addInput:audioInput];
            }
        }
        else
        {
            // NSLog(@"%@", [error localizedFailureReason]);
        }
    }

}
else
{
    NSLog(@"videoDevice has audioMedia or muxed");
}

// video Output
m_videoOut = [[AVCaptureVideoDataOutput alloc] init];
m_videoOut.alwaysDiscardsLateVideoFrames = NO; 
m_videoOut.videoSettings =
                [NSDictionary dictionaryWithObject:
                [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];     
//kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
if ([m_session canAddOutput:m_videoOut])
{
    [m_session addOutput:m_videoOut];
}
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc]
                                    initWithSession:m_session];
layer.frame = m_videoView.frame;
layer.videoGravity = AVLayerVideoGravityResize;
[m_videoView.layer addSublayer:layer];

// audio output
m_audioOut = [[AVCaptureAudioDataOutput alloc] init];
if ([m_session canAddOutput:m_audioOut])
{
    [m_session addOutput:m_audioOut];
}

// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[m_videoOut setSampleBufferDelegate:self queue:queue];
[m_audioOut setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);

 }
- (void)setUpWriter{
NSError *error = nil;
NSURL *url = [self recordFilePath];
m_writer = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
                                            error:&error];
NSParameterAssert(m_writer);

// Add video input
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:640], AVVideoWidthKey,
                               [NSNumber numberWithInt:480], AVVideoHeightKey,
                               nil];

m_videoWriteInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                        outputSettings:videoSettings] retain];
NSParameterAssert(m_videoWriteInput);
m_videoWriteInput.expectsMediaDataInRealTime = YES;

// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                           [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                           [NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
                           [NSNumber numberWithFloat: 16000.0], AVSampleRateKey,
                           [NSNumber numberWithInt: 32000], AVEncoderBitRateKey,
                           [NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey,
                           nil];    
m_audioWriterInput = [[AVAssetWriterInput 
                      assetWriterInputWithMediaType: AVMediaTypeAudio 
                      outputSettings: audioOutputSettings] retain];
m_audioWriterInput.expectsMediaDataInRealTime = YES;

// add input
[m_writer addInput:m_videoWriteInput];
[m_writer addInput:m_audioWriterInput];

 }
- (void)captureOutput:(AVCaptureOutput *)captureOutput
       didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
        fromConnection:(AVCaptureConnection *)connection{
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
    NSLog( @"sample buffer is not ready. Skipping sample" );
    return;
}

if( m_bRecording == YES )
{
    CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    if( m_writer.status != AVAssetWriterStatusWriting  )
    {
        if (m_writer.status ==  AVAssetWriterStatusFailed)
        {
            NSLog(@"ERROR:%@\r\n", m_writer.error);
        }
        else
        {
            [m_writer startWriting];
            [m_writer startSessionAtSourceTime:lastSampleTime];
        }
    }

    if( captureOutput == m_videoOut )
    {
        [self newVideoSample:sampleBuffer];
        NSLog(@"i am video");
    }
    else 
    {
        NSParameterAssert(captureOutput == m_audioOut);
        [self newAudioSample:sampleBuffer];
        NSLog(@"i am audio");
    }
}

 }
- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer{
if(m_bRecording)
{
    if( m_writer.status > AVAssetWriterStatusWriting )
    {
        NSLog(@"Warning: writer status is %d\r\n", m_writer.status);
        if(m_writer.status == AVAssetWriterStatusFailed)
        {
            NSLog(@"Error: %@\r\n", m_writer.error);
        }
        return;
    }

    if (m_videoWriteInput.readyForMoreMediaData)
    {
        if(![m_videoWriteInput appendSampleBuffer:sampleBuffer])
        {
            NSLog(@"Unable to write to video input\r\n");
        }
    }
    else
    {
        NSLog(@"video input readyForMoreMediaData is NO\r\n");
    }
 }
}