Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/22.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c IOS 8延时功能_Objective C_Iphone_Ios8_Ios Camera - Fatal编程技术网

Objective c IOS 8延时功能

Objective c IOS 8延时功能,objective-c,iphone,ios8,ios-camera,Objective C,Iphone,Ios8,Ios Camera,我正在尝试在IOS应用程序上添加延时功能。有人能帮我提供资源/如何实施吗?我想使用IOS8新功能上的延时模式从我的应用程序中捕获视频。我没有找到开发人员的任何资源。如果可以的话,这对我自己来说将是一件好事您可以使用AVFoundation功能将照片组合成视频。您可以使用AVAssetWriter执行此操作 AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [

我正在尝试在IOS应用程序上添加延时功能。有人能帮我提供资源/如何实施吗?我想使用IOS8新功能上的延时模式从我的应用程序中捕获视频。我没有找到开发人员的任何资源。如果可以的话,这对我自己来说将是一件好事

您可以使用AVFoundation功能将照片组合成视频。您可以使用AVAssetWriter执行此操作

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 2;
double frameDuration = fps * numberOfSecondsPerFrame;
然后将图像(缓冲区)附加在一起


}

你好@Mehedi Hasan你有什么解决办法吗?我需要在我的应用程序中使用相同的功能否我现在没有得到任何解决方案仍在尝试:(
for(UIImage * img in imageArray)
{
    progress.completedUnitCount++;

    buffer = [self pixelBufferFromCGImage:[img CGImage] ImageSize:imageSize];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imageArray count]);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{}];
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image ImageSize:(CGSize)size {

NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                      size.width,
                                      size.height,
                                      kCVPixelFormatType_32ARGB,
                                      (__bridge CFDictionaryRef) options,
                                      &pxbuffer);
if (status != kCVReturnSuccess){
    NSLog(@"Failed to create pixel buffer");
}

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                             size.height, 8, 4*size.width, rgbColorSpace,
                                             kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;