Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/25.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/ionic-framework/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c 具有屏幕截图比较的视频帧_Objective C_Xcode_Xcode7 - Fatal编程技术网

Objective c 具有屏幕截图比较的视频帧

Objective c 具有屏幕截图比较的视频帧,objective-c,xcode,xcode7,Objective C,Xcode,Xcode7,我有一个视频处理应用程序,它使用AVFoundation.h头文件保存视频截图。使用UIImagepicker上传屏幕截图后,我想播放该特定帧的视频。为此,我认为我应该将视频的每一帧与拍摄的视频截图进行比较 如何比较两个图像(不只是相等,如果图像几乎相同,则应检测) 如何播放特定帧中的视频 请通读代码,并建议我修改要做的比较视频帧和截图。我已成功地将视频的帧提取到tableview中。但无法将图像与截图进行比较 我浏览了互联网上可用的大多数方法,但找不到任何符合我需要的方法。提前谢谢。快乐编码

我有一个视频处理应用程序,它使用AVFoundation.h头文件保存视频截图。使用UIImagepicker上传屏幕截图后,我想播放该特定帧的视频。为此,我认为我应该将视频的每一帧与拍摄的视频截图进行比较

  • 如何比较两个图像(不只是相等,如果图像几乎相同,则应检测)

  • 如何播放特定帧中的视频

  • 请通读代码,并建议我修改要做的比较视频帧和截图。我已成功地将视频的帧提取到tableview中。但无法将图像与截图进行比较

  • 我浏览了互联网上可用的大多数方法,但找不到任何符合我需要的方法。提前谢谢。快乐编码

        - (void)viewDidLoad {
        [super viewDidLoad];
        _imageArray=[[NSMutableArray alloc]init];
        _framesArray=[[NSMutableArray alloc]init];
        // Do any additional setup after loading the view.
    
          NSURL *url=[NSURL fileURLWithPath:@"/****Image path***/Desktop/ImageExtract/ImageExtract/The          Avengers-Iron-man Vs Nuke scene.mp4"];
    
         NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
    
         AVAsset *myAsset = [AVURLAsset URLAssetWithURL:url options:options];
    
         self.imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:myAsset];
    
         Float64 durationSeconds = CMTimeGetSeconds([myAsset duration]);
    
        _times = [[NSMutableArray alloc] init];
         for (Float64 i = 0; i < durationSeconds; i++) // For 25 fps in 15 sec of Video
        {
        [_times addObject:[NSValue valueWithCMTime:CMTimeMakeWithSeconds(i, 60)]];
        }
    
        [_imageGenerator generateCGImagesAsynchronouslyForTimes:_times
                                         completionHandler:^(CMTime          requestedTime, CGImageRef image, CMTime actualTime,
                                                                      AVAssetImageGeneratorResult result, NSError *error) {
    
                                             NSString requestedTimeString = (NSString )
                                             CFBridgingRelease(CMTimeCopyDescription(NULL, requestedTime));
                                             NSString actualTimeString = (NSString )
                                             CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
                                             NSLog(@"Requested: %@; actual %@", requestedTimeString, actualTimeString);
    
    
                                             if (result == AVAssetImageGeneratorSucceeded) {
    
                                                 NSLog(@"image generated");
    
                                                 UIImage *generatedImage=[UIImage imageWithCGImage:image];
                                                 [_imageArray addObject:generatedImage];
                                                 NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
                                                 NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
    
                                                 [_framesArray addObject:filename];
    
                                                 [UIImagePNGRepresentation(generatedImage) writeToFile: pngPath atomically: YES];
    
                                                 NSLog(@"Frame name %@",filename);
                                                 NSLog(@"Time of Frame %f",CMTimeGetSeconds(requestedTime));
                                                 NSLog(@"Frames Array %@",_framesArray);
    
                                                 frameCount++;
    
                                                 }
    
    
    
    
    
                                             if (result == AVAssetImageGeneratorFailed) {
                                                 NSLog(@"Failed with error: %@", [error localizedDescription]);
                                                 NSLog(@"image Array %@",_imageArray);
    
                                                 NSLog(@"Count %d",_imageArray.count);
    
    
    
                                                _imageTable=[[UITableView alloc]init];
                                                _imageTable.frame=CGRectMake(0, 100, 320, 400);
                                               _imageTable.dataSource=self;
                                              _imageTable.delegate=self;
                                                [self.view addSubview:_imageTable];
    
                                             }
                                             if (result == AVAssetImageGeneratorCancelled) {
                                                 NSLog(@"Canceled");
                                             }
    
    
                                         }];
    
    
    
        }
        - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section
        {
         return _imageArray.count;
        }
        - (UITableViewCell )tableView:(UITableView )tableView          cellForRowAtIndexPath:(NSIndexPath *)indexPath {
    
         static NSString *CellIdentifier = @"Cell";
    
         UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
    
         if (cell == nil) {
    
         cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];
    
        }
    
        // Configure the cell...
         UIImageView *uuimage=[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 100, 100)];
         uuimage.image=[_imageArray objectAtIndex:indexPath.row];
        [cell addSubview:uuimage];
    
         return cell;
    
         }
    
    -(void)viewDidLoad{
    [超级视图下载];
    _imageArray=[[NSMutableArray alloc]init];
    _framesArray=[[NSMutableArray alloc]init];
    //加载视图后执行任何其他设置。
    NSURL*url=[NSURL fileURLWithPath:@/****图像路径***/Desktop/ImageExtract/ImageExtract/The复仇者钢铁侠Vs Nuke scene.mp4”];
    NSDictionary*选项=@{AVURLASSETPREVERPRECISEDURATION和TIMINGKEY:@YES};
    AVAsset*myAsset=[AVURLAsset URLAssetWithURL:url选项:选项];
    self.imageGenerator=[AVAssetImageGenerator assetimagegenerator with asset:myAsset];
    Float64 durationSeconds=CMTimeGetSeconds([myAsset duration]);
    _times=[[NSMutableArray alloc]init];
    for(Float64 i=0;i
    计算机视觉是出了名的困难,比较帧并不是最好的方法——如果有一个帧可以匹配源视频中任意数量的时间戳,该怎么办?最好将屏幕截图和源时间戳存储在一起,并使用它们来恢复播放。谢谢你的想法。我没有那样想过