Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/115.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 使用AVCaptureSessionPreset时如何更改图像质量_Ios_Objective C_Image_Avfoundation_Avcapturesession - Fatal编程技术网

Ios 使用AVCaptureSessionPreset时如何更改图像质量

Ios 使用AVCaptureSessionPreset时如何更改图像质量,ios,objective-c,image,avfoundation,avcapturesession,Ios,Objective C,Image,Avfoundation,Avcapturesession,我在寻找一些关于AVFoundation的帮助。目前,我有以下关于如何从本主题中捕获静态图像的分步指南: 我的问题是,如何降低保存图像的质量以及使用AVCaptureSessionPreset640x480。我想将图像质量减半,或者如果有其他方法使保存的图像尽可能小(可能是320x280),那么这可能比调整实际质量更好 我不知道以前是否有人问过这个问题,但我已经在网上搜索了几天,找不到答案。下面是我的代码 ` #导入“ViewController.h” #进口 @界面视图控制器() @结束 @实

我在寻找一些关于AVFoundation的帮助。目前,我有以下关于如何从本主题中捕获静态图像的分步指南:

我的问题是,如何降低保存图像的质量以及使用AVCaptureSessionPreset640x480。我想将图像质量减半,或者如果有其他方法使保存的图像尽可能小(可能是320x280),那么这可能比调整实际质量更好

我不知道以前是否有人问过这个问题,但我已经在网上搜索了几天,找不到答案。下面是我的代码

`

#导入“ViewController.h”
#进口
@界面视图控制器()
@结束
@实现视图控制器
@综合图像预览;
@合成图像;
@综合图像输出;
-(IBAction)捕获现在{
AVCaptureConnection*视频连接=nil;
for(stillImageOutput.connections中的AVCaptureConnection*连接)
{
用于(AVCaptureInputPort*在[connection inputPorts]中的端口)
{
如果([[port mediaType]isEqual:AVMediaTypeVideo])
{
视频连接=连接;
打破
}
}
if(视频连接)
{
打破
}
}
NSLog(@“即将从%@请求捕获”,stillImageOutput);
[stillImageOutput Capture StillImageAsynchronousLyfromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageSampleBuffer,NSError*错误)
{
CFDictionaryRef ExifTachments=CMGetAttachment(imageSampleBuffer,kCGImagePropertyExifDictionary,NULL);
如果(出口)
{
//对附件做些什么。
NSLog(@“附件:%@”,附件);
}否则{
NSLog(“无附件”);
}
NSData*imageData=[AVCaptureSillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
//NSData*data2=[NSData dataWithData:uiimagejpegresentation(图像,0.5f)];
UIImage*image=[[UIImage alloc]initWithData:imageData];
self.iImage.image=图像;
UIImageWriteToSavedPhotosAlbum(图像,无,无,无);
}];
}
-(无效)视图显示:(BOOL)动画
{
AVCaptureSession*会话=[[AVCaptureSession alloc]init];
session.sessionPreset=AVCaptureSessionPreset640x480;
CALayer*viewLayer=self.imagePreview.layer;
NSLog(@“viewLayer=%@”,viewLayer);
AVCaptureVideoPreviewLayer*captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]初始化与会话:会话];
captureVideoPreviewLayer.frame=self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice*device=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
n错误*错误=nil;
AVCaptureDeviceInput*输入=[AVCaptureDeviceInputDeviceInputWithDevice:设备错误:&错误];
如果(!输入){
//适当地处理错误。
NSLog(@“错误:尝试打开相机:%@”,错误);
}
[会话附加输入:输入];
stillImageOutput=[[AvCaptureSillImageOutput alloc]init];
NSDictionary*outputSettings=[[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[会议开始和结束];
}
-(无效)viewDidLoad
{
[超级视图下载];
//加载视图后,通常从nib执行任何其他设置。
}
-(无效)未收到记忆警告
{
[超级记忆警告];
//处置所有可以重新创建的资源。
}
@结束

`

尝试使用不同的预设,以获得不同分辨率的图像

根据Apple文档,对于iPhone4(背面),您将获得该会话的以下预设的以下分辨率图像

AVCaptureSessionPresetHigh:1280x720

AVCaptureSessionPresetMedium:480x360

AVCaptureSessionPresetLow:192x144

AVCaptureSessionPreset640x480:640x480

AVCaptureSessionPreset1280x720:1280x720

AVCaptureSessionPresetPhoto:2592x1936。视频输出不支持此选项


希望这会有所帮助。

尝试使用不同的预设,以获得不同分辨率的图像

根据Apple文档,对于iPhone4(背面),您将获得该会话的以下预设的以下分辨率图像

AVCaptureSessionPresetHigh:1280x720

AVCaptureSessionPresetMedium:480x360

AVCaptureSessionPresetLow:192x144

AVCaptureSessionPreset640x480:640x480

AVCaptureSessionPreset1280x720:1280x720

AVCaptureSessionPresetPhoto:2592x1936。视频输出不支持此选项


希望这会有所帮助。

您需要设置
kCVPixelBufferWidthKey
kCVPixelBufferHeightKey
对象上的
选项来设置您选择的分辨率。此宽度/高度将覆盖会话预设的宽度/高度。最小样本如下(添加错误检查)


注意:我只在mac电脑上试用过。理想情况下,它也应该适用于iOS。还可以尝试保持一些纵横比。

您需要设置
kCVPixelBufferWidthKey
kCVPixelBufferHeightKey
对象上的选项来设置您选择的分辨率。此宽度/高度将覆盖会话预设的宽度/高度。最小样本如下(添加错误检查)


注意:我只在mac电脑上试用过。理想情况下,它也应该适用于iOS。另外,请尝试保持一些纵横比。

感谢您的帮助,我已经在使用我使用的AVCaptureSessionPreset640x480
#import "ViewController.h"
#import <ImageIO/ImageIO.h>

@interface ViewController ()

@end

@implementation ViewController

@synthesize imagePreview;
@synthesize iImage;
@synthesize stillImageOutput;

-(IBAction)captureNow {
    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in stillImageOutput.connections)
    {
        for (AVCaptureInputPort *port in [connection inputPorts])
        {
            if ([[port mediaType] isEqual:AVMediaTypeVideo] )
            {
                videoConnection = connection;
                break;
            }
        }
        if (videoConnection)
        {
            break;
        }
    }

    NSLog(@"about to request a capture from: %@", stillImageOutput);
    [stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
     {
         CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
         if (exifAttachments)
         {
             // Do something with the attachments.
             NSLog(@"attachements: %@", exifAttachments);
         } else {
             NSLog(@"no attachments");
         }

         NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
         //NSData *data2 = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.5f)]];
         UIImage *image = [[UIImage alloc] initWithData:imageData];

         self.iImage.image = image;

         UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
     }];
}

-(void)viewDidAppear:(BOOL)animated
{
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    session.sessionPreset = AVCaptureSessionPreset640x480;

    CALayer *viewLayer = self.imagePreview.layer;
    NSLog(@"viewLayer = %@", viewLayer);

    AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];

    captureVideoPreviewLayer.frame = self.imagePreview.bounds;
    [self.imagePreview.layer addSublayer:captureVideoPreviewLayer];

    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (!input) {
        // Handle the error appropriately.
        NSLog(@"ERROR: trying to open camera: %@", error);
    }
    [session addInput:input];

    stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [stillImageOutput setOutputSettings:outputSettings];
    [session addOutput:stillImageOutput];

    [session startRunning];
}


- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

@end
    _session = [[AVCaptureSession alloc] init];
    _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError * error;
    _sessionInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
    _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                              [NSNumber numberWithDouble:320.0], (id)kCVPixelBufferWidthKey,
                              [NSNumber numberWithDouble:280.0], (id)kCVPixelBufferHeightKey,
                              [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
                              nil];

    [_stillImageOutput setOutputSettings:options];

    [_session beginConfiguration ];
    [_session addInput:_sessionInput];
    [_session addOutput:_stillImageOutput];
    [_session setSessionPreset:AVCaptureSessionPresetPhoto];
     _avConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    [ _session commitConfiguration ];

.............

- (void) start
{
    [self.session startRunning];
}

.............

[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.avConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
 {
     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
     CVPixelBufferLockBaseAddress(imageBuffer, 0);
     size_t width = CVPixelBufferGetWidth(imageBuffer);
     size_t height = CVPixelBufferGetHeight(imageBuffer);
     NSLog(@"%d : %d", height, width);

 }];