Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/assembly/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios8 拍摄照片时,GPUImageStillCamera图像预览会跳转_Ios8_Avfoundation_Gpuimage_Gpuimagestillcamera - Fatal编程技术网

Ios8 拍摄照片时,GPUImageStillCamera图像预览会跳转

Ios8 拍摄照片时,GPUImageStillCamera图像预览会跳转,ios8,avfoundation,gpuimage,gpuimagestillcamera,Ios8,Avfoundation,Gpuimage,Gpuimagestillcamera,我正在使用GPUImageStillCamera拍摄一张方形裁剪的照片,并允许用户缩放相机。当用户点击拍照时,相机会向前跳一秒钟(好像相机会进一步放大用户缩放到的区域,然后在图像返回屏幕后立即返回到正确的裁剪)。这仅在用户缩放相机时发生。如果他们没有缩放相机,则不会发生闪烁/跳跃。(无论用户是否缩放,返回的图像都具有正确的裁剪) 想法 创建摄影机并添加方形裁剪 //Add in filters stillCamera = [[GPUImageStillCamera alloc] initWith

我正在使用
GPUImageStillCamera
拍摄一张方形裁剪的照片,并允许用户缩放相机。当用户点击拍照时,相机会向前跳一秒钟(好像相机会进一步放大用户缩放到的区域,然后在图像返回屏幕后立即返回到正确的裁剪)。这仅在用户缩放相机时发生。如果他们没有缩放相机,则不会发生闪烁/跳跃。(无论用户是否缩放,返回的图像都具有正确的裁剪)

想法

创建摄影机并添加方形裁剪

//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];
图像缩放方法

-(void)imagePinch:(UIPinchGestureRecognizer *)recognizer{ //Controlling the zoom scale as the user pinches the live preview

if (recognizer.state == UIGestureRecognizerStateBegan) {

    zoomOutAdder = 0.0f;
    if (currentScale > 2) {
        zoomOutAdder = currentScale;
    }
}


float addition = (recognizer.scale - lastScale);

if (addition > 0) {
    addition = addition *1.7;
}

if (addition < 0) {
    addition = addition *(1.7+zoomOutAdder);
}


currentScale = currentScale +addition;
lastScale = recognizer.scale;


 if (currentScale < 1) {
 currentScale = 1;
 }

 if (currentScale > 4) {
 currentScale =4;
 }

if (currentScale == 1) {
    zoomOutAdder = 0.0f;
}

cameraImagePreview.transform = CGAffineTransformMakeScale(currentScale, currentScale);

if (recognizer.state == UIGestureRecognizerStateEnded) {
    lastScale = 1.0f;
}
添加的方法

一些附加代码显示了用于捕获照片的各种相机元素的创建

centerPoint = CGPointMake(self.view.frame.size.width/2, (cameraHolder.frame.size.height+50+self.view.frame.size.height)/2);
cameraImagePreview = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 0, cameraHolder.frame.size.width, cameraHolder.frame.size.width)];
[cameraHolder addSubview:cameraImagePreview];
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(imageTouch:)];
[cameraImagePreview addGestureRecognizer:tapGesture];
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(imagePinch:)];
[cameraImagePreview addGestureRecognizer:pinchGesture];


float scaleForView = self.view.frame.size.width/720.0;
fullCameraFocusPoint = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, 1280*scaleForView)];
fullCameraFocusPoint.center = CGPointMake(cameraHolder.frame.size.width/2, (cameraHolder.frame.size.width/2)+50);
[self.view insertSubview:fullCameraFocusPoint atIndex:0];

takenPhoto = [[UIImageView alloc]initWithFrame:cameraHolder.frame];
takenPhoto.alpha = 0;
[self.view addSubview:takenPhoto];


//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

//Create standard vignette filter
vignetteFilter = [[GPUImageVignetteFilter alloc] init]; //1
vignetteFilter.vignetteCenter = CGPointMake(.5, .5);
vignetteFilter.vignetteStart = 0.4f;
vignetteFilter.vignetteEnd = 1.08f;

//Add filters to photo
[cropFilter addTarget:vignetteFilter];
[stillCamera addTarget:cropFilter];
[vignetteFilter addTarget:cameraImagePreview];
[stillCamera startCameraCapture];

快速评论。。。请编辑您的问题并重新格式化您的代码…。@jesses.co.tt-修复它。。。谢谢你的提醒所以,你有两个不同的手势识别器,一个检测敲击,另一个是捏+缩放?我猜这是一些视图截取,实际上与GPUImage没有任何关系。。。你能用日志发布界面代码吗?是的,一个检测到点击图像下方的按钮,而另一个检测到实际实时图像预览上的挤压,并在挤压上缩放实时图像预览。点击触发capturePhotoAsImageProcessedUpToFilter方法,当跳转发生时——在界面上记录是否有您想要的特定片段。对于iOS来说还是个新手,不太清楚你在寻找什么。我只是想看看是否有一个UI交互正在触发/正在接受另一个手势的交互。。。
- (void) flipCamera {

if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(-1, 1);
    }];

} else {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(1, 1);
    }];
}
[self performSelector:@selector(rotateCamera) withObject:0 afterDelay:.2];
}


- (void) rotateCamera {

[stillCamera rotateCamera];

//Adjust flash settings as needed
[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
}

NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:@"off"
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

[UIView animateWithDuration:.2 animations:^{
    [flash setTintColor:[UIColor colorWithWhite:1 alpha:.55]];
}];

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) changeFlash {

if (stillCamera.cameraPosition == AVCaptureDevicePositionFront) {//no flash available on front of camera
    return;
}

[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOff) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:1] andString:@"on"];

} else if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOn) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:.55] andString:@"off"];
}

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) animateFlashWithTintColor:(UIColor *)color andString:(NSString *)text {

//Set new text
NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:text
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

float duration = .7;

[UIView animateKeyframesWithDuration:duration delay:0 options:0 animations:^{
    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:duration animations:^{
        [flash setTintColor:color];
    }];

    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:.7/duration animations:^{
        flash.transform = CGAffineTransformMakeRotation(M_PI);

    }];

}completion:^(BOOL finished){
    flash.transform = CGAffineTransformIdentity;
}];
}


-(void) usePhoto {

if ([ALAssetsLibrary authorizationStatus] != ALAuthorizationStatusAuthorized){
    NSLog(@"Do Not Have Right To Save to Photo Library");
}

//Save Image to Phone Album & save image
UIImageWriteToSavedPhotosAlbum(takenPhoto.image, nil, nil, nil);

//Save Image to Delegate
[self.delegate saveImageToDatabase:takenPhoto.image];
[self performSelector:@selector(dismissCamera) withObject:0 afterDelay:.4];
}
centerPoint = CGPointMake(self.view.frame.size.width/2, (cameraHolder.frame.size.height+50+self.view.frame.size.height)/2);
cameraImagePreview = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 0, cameraHolder.frame.size.width, cameraHolder.frame.size.width)];
[cameraHolder addSubview:cameraImagePreview];
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(imageTouch:)];
[cameraImagePreview addGestureRecognizer:tapGesture];
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(imagePinch:)];
[cameraImagePreview addGestureRecognizer:pinchGesture];


float scaleForView = self.view.frame.size.width/720.0;
fullCameraFocusPoint = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, 1280*scaleForView)];
fullCameraFocusPoint.center = CGPointMake(cameraHolder.frame.size.width/2, (cameraHolder.frame.size.width/2)+50);
[self.view insertSubview:fullCameraFocusPoint atIndex:0];

takenPhoto = [[UIImageView alloc]initWithFrame:cameraHolder.frame];
takenPhoto.alpha = 0;
[self.view addSubview:takenPhoto];


//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

//Create standard vignette filter
vignetteFilter = [[GPUImageVignetteFilter alloc] init]; //1
vignetteFilter.vignetteCenter = CGPointMake(.5, .5);
vignetteFilter.vignetteStart = 0.4f;
vignetteFilter.vignetteEnd = 1.08f;

//Add filters to photo
[cropFilter addTarget:vignetteFilter];
[stillCamera addTarget:cropFilter];
[vignetteFilter addTarget:cameraImagePreview];
[stillCamera startCameraCapture];