Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/113.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 录制视频时更改摄像头前/后位置_Ios_Objective C_Video_Camera_Avcapturemoviefileoutput - Fatal编程技术网

Ios 录制视频时更改摄像头前/后位置

Ios 录制视频时更改摄像头前/后位置,ios,objective-c,video,camera,avcapturemoviefileoutput,Ios,Objective C,Video,Camera,Avcapturemoviefileoutput,我的相机拍摄照片和录制视频非常完美(使用AVCaptureMovieFileOutput),并且我能够正常地将相机位置切换到前/后。然而,与Instagram、Snapchat和其他无数应用程序一样,我也想让用户在录制视频时切换相机位置 似乎要实现这一点,我需要使用avcapturedevideodataoutput,因为它可以处理不同的帧,但我并没有真正让它工作。一切都很顺利,但在我完成视频后,它就不播放了,而且似乎没有来自captureOutput方法的结果URL。这是我的密码: - (vo

我的相机拍摄照片和录制视频非常完美(使用
AVCaptureMovieFileOutput
),并且我能够正常地将相机位置切换到前/后。然而,与Instagram、Snapchat和其他无数应用程序一样,我也想让用户在录制视频时切换相机位置

似乎要实现这一点,我需要使用
avcapturedevideodataoutput
,因为它可以处理不同的帧,但我并没有真正让它工作。一切都很顺利,但在我完成视频后,它就不播放了,而且似乎没有来自
captureOutput
方法的结果URL。这是我的密码:

- (void)initialize{
if(!_session) {
    _session = [[AVCaptureSession alloc] init];
    _session.sessionPreset = self.cameraQuality;

    // preview layer
    CGRect bounds = self.preview.layer.bounds;
    _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _captureVideoPreviewLayer.bounds = bounds;
    _captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
    [self.preview.layer addSublayer:_captureVideoPreviewLayer];

    AVCaptureDevicePosition devicePosition;
    switch (self.position) {
        case LLCameraPositionRear:
            if([self.class isRearCameraAvailable]) {
                devicePosition = AVCaptureDevicePositionBack;
            } else {
                devicePosition = AVCaptureDevicePositionFront;
                _position = LLCameraPositionFront;
            }
            break;
        case LLCameraPositionFront:
            if([self.class isFrontCameraAvailable]) {
                devicePosition = AVCaptureDevicePositionFront;
            } else {
                devicePosition = AVCaptureDevicePositionBack;
                _position = LLCameraPositionRear;
            }
            break;
        default:
            devicePosition = AVCaptureDevicePositionUnspecified;
            break;
    }

    if(devicePosition == AVCaptureDevicePositionUnspecified) {
        _videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    } else {
        _videoCaptureDevice = [self cameraWithPosition:devicePosition];
    }

    NSError *error = nil;
    _videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error];

    if (!_videoDeviceInput) {
        if(self.onError) {
            self.onError(self, error);
        }
        return;
    }

    if([self.session canAddInput:_videoDeviceInput]) {
        [self.session  addInput:_videoDeviceInput];
     //   self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection];
    }

    // add audio if video is enabled
    if(self.videoEnabled) {
        _audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        _audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error];
        if (!_audioDeviceInput) {
            if(self.onError) {
                self.onError(self, error);
            }
        }

        if([self.session canAddInput:_audioDeviceInput]) {
            [self.session addInput:_audioDeviceInput];
        }



         // Setup the video output
        _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        _videoOutput.alwaysDiscardsLateVideoFrames = NO;
        _videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
                                      nil];
        //[NSDictionary dictionaryWithObject:
         //[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

        // Setup the audio input
        _audioOutput = [[AVCaptureAudioDataOutput alloc] init];

        // Create the session

        [_session addOutput:_videoOutput];
        [_session addOutput:_audioOutput];

        // Setup the queue
        dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
        [_videoOutput setSampleBufferDelegate:self queue:queue];
        [_audioOutput setSampleBufferDelegate:self queue:queue];
    }

    // continiously adjust white balance
    self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;

    // image output
    self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [self.stillImageOutput setOutputSettings:outputSettings];
    [self.session addOutput:self.stillImageOutput];
}

//if we had disabled the connection on capture, re-enable it
if (![self.captureVideoPreviewLayer.connection isEnabled]) {
    [self.captureVideoPreviewLayer.connection setEnabled:YES];
}
  // [_assetWriter startWriting];
   //[_assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.session startRunning];
}

 - (void)stop
 {
     [self.session stopRunning];
 }

 -(BOOL) setupWriter:(NSURL*)url {

NSError *error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
                                            error:&error];
NSParameterAssert(_videoWriter);


// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
                                       [NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
                                       nil ];

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:192], AVVideoWidthKey,
                               [NSNumber numberWithInt:144], AVVideoHeightKey,
                               videoCompressionProps, AVVideoCompressionPropertiesKey,
                               nil];

_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                        outputSettings:videoSettings];


NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;


// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;


NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
    // should work from iphone 3GS on and from ipod 3rd generation
    audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil];
} else {
    // should work on any device requires more space
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil ];
}

_audioWriterInput = [AVAssetWriterInput
                      assetWriterInputWithMediaType: AVMediaTypeAudio
                      outputSettings: audioOutputSettings ];

_audioWriterInput.expectsMediaDataInRealTime = YES;



// add input
[_videoWriter addInput:_videoWriterInput];
[_videoWriter addInput:_audioWriterInput];

return YES;
}

-(void) startVideoRecording
{
if( !self.recording )
{

    NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[@"movie" stringByAppendingPathExtension:@"mov"]]];

    //if(!debug){
    [[NSFileManager defaultManager] removeItemAtURL:url error:nil];
    //}

    NSLog(@"start video recording...");
    if( ![self setupWriter:url] ) {
        NSLog(@"Setup Writer Failed") ;

        return;
    }

   // [_session startRunning] ;
    self.recording = YES;
}
}

-(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL         *outputFileUrl, NSError *error))completionBlock
{
NSLog(@"STOP RECORDING");
if(!self.videoEnabled) {
    return;
}

if( self.recording )
{
    self.recording = NO;
    self.didRecord = completionBlock;

    [_session stopRunning] ;
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
    {
        if(![_videoWriter finishWriting]) {
            NSLog(@"finishWriting returned NO") ;
        }
    });

    //[_videoWriter endSessionAtSourceTime:lastSampleTime];
    //[_videoWriterInput markAsFinished];
    //[_audioWriterInput markAsFinished];

    NSLog(@"video recording stopped");

}
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"CALLING CAPTUREOUTPUT");

self.recording = NO;
[self enableTorch:NO];

if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
    NSLog( @"sample buffer is not ready. Skipping sample" );
    return;
}

/*if(self.didRecord) {
    NSLog(@"DID RECORD EXISTS !!!");
    self.didRecord(self, outputFileURL, error);
}*/

//THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput

if( self.recording == YES )
{
    _lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    if( _videoWriter.status != AVAssetWriterStatusWriting  )
    {
        [_videoWriter startWriting];
        [_videoWriter startSessionAtSourceTime:_lastSampleTime];
    }

    if( captureOutput == _videoOutput )
        [self newVideoSample:sampleBuffer];
    else if( captureOutput == _audioOutput) {
        [self newAudioSample:sampleBuffer];
    }
    /*
     // If I add audio to the video, then the output file gets corrupted and it cannot be reproduced
     else
     [self newAudioSample:sampleBuffer];
     */
}
}

 -(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
 {
if( self.recording )
{
    if( _videoWriter.status > AVAssetWriterStatusWriting )
    {
        NSLog(@"Warning: writer status is %ld", _videoWriter.status);
        if( _videoWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", _videoWriter.error);
        return;
    }

    if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] )
        NSLog(@"Unable to write to video input");

}

}


-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer

{
if( self.recording )
{
    if( _videoWriter.status > AVAssetWriterStatusWriting )
    {
        NSLog(@"Warning: writer status is %ld", _videoWriter.status);
        if( _videoWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", _videoWriter.error);
        return;
    }

    if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
        NSLog(@"Unable to write to audio input");
}
}
PS1:这里有类似问题的参考资料


PS2:很抱歉上面的代码出现了错误的缩进。代码是完全缩进的,但不知怎的,当我在这里发布大量代码时,它丢失了缩进。

你知道这一点吗?