Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/27.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c ipad中的屏幕录制_Objective C_Ipad_Ios5_Avassetwriter - Fatal编程技术网

Objective c ipad中的屏幕录制

Objective c ipad中的屏幕录制,objective-c,ipad,ios5,avassetwriter,Objective C,Ipad,Ios5,Avassetwriter,我最近两周遇到了一个问题。事实上,我正在开发一款iPad应用程序。我想在其中做注释,并对注释进行屏幕记录。注释部分工作正常,但在我开始录制时出现问题。问题是,它失去了它的平滑度,并在屏幕录制过程中出现滞后。对于屏幕录制,我使用AVAsset Writer。代码对于注释和屏幕录制都很好。。。。但我不知道问题出在哪里 我的屏幕截图大小是(1050650) 我应该用中央调度解决这个问题吗??? 有人能帮我解决我的问题吗 请帮帮我 我的代码 // For Annotation - (v

我最近两周遇到了一个问题。事实上,我正在开发一款iPad应用程序。我想在其中做注释,并对注释进行屏幕记录。注释部分工作正常,但在我开始录制时出现问题。问题是,它失去了它的平滑度,并在屏幕录制过程中出现滞后。对于屏幕录制,我使用AVAsset Writer。代码对于注释和屏幕录制都很好。。。。但我不知道问题出在哪里

我的屏幕截图大小是(1050650)

我应该用中央调度解决这个问题吗??? 有人能帮我解决我的问题吗

请帮帮我

我的代码

// For Annotation

        - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 
   {
       mouseSwiped = NO;
    UITouch *touch = [touches anyObject];

    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;  //Double click to undo drawing.
        return;
    }

    lastPoint = [touch locationInView:self.view];
    lastPoint.y -= 20;

    }
    - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    mouseSwiped = YES;
    UITouch *touch = [touches anyObject];
    CGPoint currentPoint = [touch locationInView:self.view];
    currentPoint.y -= 20;
//    UIGraphicsBeginImageContext(canvasView.frame.size);
UIGraphicsBeginImageContext(drawImage.frame.size);
    [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
    CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
    CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0);
    CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
    CGContextBeginPath(UIGraphicsGetCurrentContext());
    CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
    CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
    CGContextStrokePath(UIGraphicsGetCurrentContext());
    drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    lastPoint = currentPoint;

    }
    - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    UITouch *touch = [touches anyObject];
    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;
        return;
    }


    if(!mouseSwiped) 
    {
        UIGraphicsBeginImageContext(drawImage.frame.size);
        [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
        CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
        CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
        CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
        CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextStrokePath(UIGraphicsGetCurrentContext());
        CGContextFlush(UIGraphicsGetCurrentContext());
        drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
    }

   }



    //For Screen Recording

    #define FRAME_WIDTH 1024            
    #define FRAME_HEIGHT 650
    #define TIME_SCALE 600

    - (UIImage*)screenshot
   {

    UIGraphicsBeginImageContext(drawImage.frame.size);
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
    return viewImage;
   }


    -(NSURL*) pathToDocumentsDirectory {


NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mov"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
    NSError* error;
    if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
        NSLog(@"Could not delete old recording file at path:  %@", outputPath);
    }
}

[outputPath release];
return [outputURL autorelease];

   }


   -(void) writeSample: (NSTimer*) _timer 
   {


   if (assetWriterInput.readyForMoreMediaData) {
    // CMSampleBufferRef sample = nil;

    CVReturn cvErr = kCVReturnSuccess;

    // get screenshot image!
    CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
    NSLog (@"made screenshot");

    // prepare the pixel buffer
    CVPixelBufferRef pixelBuffer = NULL;
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(image),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    // calculate the time
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];

    }
}



   }


    -(void) startRecording {


movieURL = [self pathToDocumentsDirectory];
NSLog(@"path=%@",movieURL);
movieError = nil;
[assetWriter release];

assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
                                        fileType: AVFileTypeQuickTimeMovie 
                                           error: &movieError];

[self writer];

// start writing samples to it

NSDate* start = [NSDate date];
frameRate=40.0f;                                                                                                                                                         
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
delayRemaining = (1.0 / self.frameRate) - processingSeconds;
[assetWriterTimer release];

assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining > 0.0 ? delayRemaining : 0.01
                                                    target:self
                                                  selector:@selector (writeSample:)
                                                  userInfo:nil
                                                   repeats:YES] ;

    }


   -(void)writer
   {

NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
                                          [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
                                          nil];

assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                      outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];

[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]
                                 initWithAssetWriterInput:assetWriterInput
                                 sourcePixelBufferAttributes:nil];
[assetWriter startWriting];

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];

   }


   -(void) stopRecording {


[assetWriterTimer invalidate];
assetWriterTimer = nil;

[assetWriter finishWriting];
NSLog (@"finished writing");

   }

最简单的方法是尝试更低的帧速率