Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/iphone/38.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
获取*.mov视频IOS的缩略图_Ios_Iphone_Video_Avfoundation_Video Thumbnails - Fatal编程技术网

获取*.mov视频IOS的缩略图

获取*.mov视频IOS的缩略图,ios,iphone,video,avfoundation,video-thumbnails,Ios,Iphone,Video,Avfoundation,Video Thumbnails,我想获取iPhone/iPAD拍摄的视频(*.mov)的缩略图。我正在尝试使用AVFoundation库执行此操作,但出现以下错误: couldn't generate thumbnail, error:Error Domain=AVFoundationErrorDomain Code=-11822 "Cannot Open" UserInfo=0x15d90a30 {NSLocalizedDescription=Cannot Open, NSLocalizedFailureReason=Thi

我想获取iPhone/iPAD拍摄的视频(*.mov)的缩略图。我正在尝试使用AVFoundation库执行此操作,但出现以下错误:

couldn't generate thumbnail, error:Error Domain=AVFoundationErrorDomain Code=-11822 "Cannot Open" UserInfo=0x15d90a30 {NSLocalizedDescription=Cannot Open, NSLocalizedFailureReason=This media format is not supported.}
代码:

我用我的应用程序录制了视频,想显示它们的缩略图。

在这里。。。(从该链接获取-)


我没有足够的声誉来评论,但上面的例子缺少
CGImageRelease(imageRef)

最后的方法应该如下所示:

- (UIImage*)loadImage {

    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:vidURL options:nil];
    AVAssetImageGenerator *generate = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    NSError *err = NULL;
    CMTime time = CMTimeMake(1, 60);
    //Grab image
    CGImageRef imgRef = [generate copyCGImageAtTime:time actualTime:NULL error:&err];
    NSLog(@"err==%@, imageRef==%@", err, imgRef);

    UIImage* finalImage = [[UIImage alloc] initWithCGImage:imgRef];
    CGImageRelease(imageRef);
    return finalImage;
}

我花了一段时间才意识到。。。因此,它可能会帮助某些人。

您可以通过两种方式在swift中生成1。AvassetimageGenerator2。MPMoviePlayerController

1. 

    func generateThumnail(url : NSURL) -> UIImage{
            var asset : AVAsset = AVAsset.assetWithURL(url) as AVAsset
            var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
            assetImgGenerate.appliesPreferredTrackTransform = true
            var error       : NSError? = nil
            var time        : CMTime = CMTimeMake(1, 30)
            var img         : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
            var frameImg    : UIImage = UIImage(CGImage: img)!

            return frameImg
        }

2. 

    override func viewDidLoad() {
            super.viewDidLoad()
            var moviePlayer         : MPMoviePlayerController!  = MPMoviePlayerController(contentURL: moviePlayManager.movieURL)
            moviePlayer.view.frame   = CGRect(x: self.view.frame.origin.x, y: self.view.frame.origin.y, width:
                                       self.view.frame.size.width, height: self.view.frame.height)
            moviePlayer.fullscreen   = true
            moviePlayer.controlStyle = MPMovieControlStyle.None
            NSNotificationCenter.defaultCenter().addObserver(self,
        selector: "videoThumbnailIsAvailable:",
        name: MPMoviePlayerThumbnailImageRequestDidFinishNotification,
        object: nil)


        let thumbnailTimes = 3.0
        moviePlayer.requestThumbnailImagesAtTimes([thumbnailTimes],
                timeOption: .NearestKeyFrame)
        }

    func videoThumbnailIsAvailable(notification: NSNotification){

        if let player = moviePlayer{
            let thumbnail =
            notification.userInfo![MPMoviePlayerThumbnailImageKey] as? UIImage

            if let image = thumbnail{

                /* We got the thumbnail image. You can now use it here */
                println("Thumbnail image = \(image)")
            }
        }

您需要检查视频曲目中的每个视频片段是否为空

+ (UIImage*)getVideoPreViewImage:(AVAsset *)asset atTimeSec:(double)timeSec
{
    if (!asset) {
        return nil;
    }


    AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
    NSArray<AVAssetTrackSegment *> *segs = videoTrack.segments;
    if (!segs.count) {
        return nil;
    }
    CMTime currentStartTime = kCMTimeZero;
    for (NSInteger i = 0; i < segs.count; i ++) {
        if (!segs[i].isEmpty) {
            currentStartTime = segs[i].timeMapping.target.start;
            break;
        }
    }

    CMTime coverAtTimeSec = CMTimeMakeWithSeconds(timeSec, asset.duration.timescale);

    if (CMTimeCompare(coverAtTimeSec, asset.duration) == 1 || CMTimeCompare(coverAtTimeSec, currentStartTime) == -1) {
        coverAtTimeSec = currentStartTime;
    }

    AVAssetImageGenerator *assetGen = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
    assetGen.requestedTimeToleranceBefore = kCMTimeZero;
    assetGen.requestedTimeToleranceAfter = kCMTimeZero;
    assetGen.appliesPreferredTrackTransform = YES;


    NSError *error = nil;
    CGImageRef image = [assetGen copyCGImageAtTime:coverAtTimeSec actualTime:NULL error:&error];
    if (error) {
        return nil;
    }
    UIImage *videoImage = [UIImage imageWithCGImage:image];


    CGImageRelease(image);
    return videoImage;
}
+(UIImage*)getVideoPreViewImage:(AVAsset*)asset Attimesc:(双)timeSec
{
如果(!资产){
返回零;
}
AVAssetTrack*videoTrack=[资产跟踪媒体类型:AVMediaTypeVideo].firstObject;
NSArray*segs=videoTrack.segments;
如果(!分段计数){
返回零;
}
CMTime currentStartTime=kCMTimeZero;
对于(NSInteger i=0;i
谢谢@Ashok它工作得很好,修改很少。节省了我的时间。放弃你的答案。看看下面这个@jr.root.cs的答案。别忘了发布imgRef。太好了,有没有办法让CMTimeMake作为播放视频的暂停时间?不要忘记添加生成器。appliesPreferredTrackTransform=是;要修复图像方向HI,此代码工作正常。但是当我有更多的视频URL时,速度会变慢。我确实尝试了第一种方法…但它不起作用…并给出了
致命错误:在展开可选值时意外地发现了nil
,因为img没有任何值。请让我知道哪个值是nil,因为这是由于传递了nil值而发生的。否,可能b资产(assetimgenerate)nil
1. 

    func generateThumnail(url : NSURL) -> UIImage{
            var asset : AVAsset = AVAsset.assetWithURL(url) as AVAsset
            var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
            assetImgGenerate.appliesPreferredTrackTransform = true
            var error       : NSError? = nil
            var time        : CMTime = CMTimeMake(1, 30)
            var img         : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
            var frameImg    : UIImage = UIImage(CGImage: img)!

            return frameImg
        }

2. 

    override func viewDidLoad() {
            super.viewDidLoad()
            var moviePlayer         : MPMoviePlayerController!  = MPMoviePlayerController(contentURL: moviePlayManager.movieURL)
            moviePlayer.view.frame   = CGRect(x: self.view.frame.origin.x, y: self.view.frame.origin.y, width:
                                       self.view.frame.size.width, height: self.view.frame.height)
            moviePlayer.fullscreen   = true
            moviePlayer.controlStyle = MPMovieControlStyle.None
            NSNotificationCenter.defaultCenter().addObserver(self,
        selector: "videoThumbnailIsAvailable:",
        name: MPMoviePlayerThumbnailImageRequestDidFinishNotification,
        object: nil)


        let thumbnailTimes = 3.0
        moviePlayer.requestThumbnailImagesAtTimes([thumbnailTimes],
                timeOption: .NearestKeyFrame)
        }

    func videoThumbnailIsAvailable(notification: NSNotification){

        if let player = moviePlayer{
            let thumbnail =
            notification.userInfo![MPMoviePlayerThumbnailImageKey] as? UIImage

            if let image = thumbnail{

                /* We got the thumbnail image. You can now use it here */
                println("Thumbnail image = \(image)")
            }
        }
+ (UIImage*)getVideoPreViewImage:(AVAsset *)asset atTimeSec:(double)timeSec
{
    if (!asset) {
        return nil;
    }


    AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
    NSArray<AVAssetTrackSegment *> *segs = videoTrack.segments;
    if (!segs.count) {
        return nil;
    }
    CMTime currentStartTime = kCMTimeZero;
    for (NSInteger i = 0; i < segs.count; i ++) {
        if (!segs[i].isEmpty) {
            currentStartTime = segs[i].timeMapping.target.start;
            break;
        }
    }

    CMTime coverAtTimeSec = CMTimeMakeWithSeconds(timeSec, asset.duration.timescale);

    if (CMTimeCompare(coverAtTimeSec, asset.duration) == 1 || CMTimeCompare(coverAtTimeSec, currentStartTime) == -1) {
        coverAtTimeSec = currentStartTime;
    }

    AVAssetImageGenerator *assetGen = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
    assetGen.requestedTimeToleranceBefore = kCMTimeZero;
    assetGen.requestedTimeToleranceAfter = kCMTimeZero;
    assetGen.appliesPreferredTrackTransform = YES;


    NSError *error = nil;
    CGImageRef image = [assetGen copyCGImageAtTime:coverAtTimeSec actualTime:NULL error:&error];
    if (error) {
        return nil;
    }
    UIImage *videoImage = [UIImage imageWithCGImage:image];


    CGImageRelease(image);
    return videoImage;
}