Ios 如何创建具有真实持续时间的虚拟AVPlayerItem?
我正在使用Ios 如何创建具有真实持续时间的虚拟AVPlayerItem?,ios,iphone,avfoundation,Ios,Iphone,Avfoundation,我正在使用AVPlayer在AVSynchronizedLayer上播放CAKeyFrameAnimations。为了在动画期间不播放AVAsset时保持播放器播放,我将AVPlayerItem的forwardPlaybackEndTime设置为所需动画的持续时间。不幸的是。在这个forwardPlaybackEndTime期间,似乎不可能seekToTime:,因为AVPlayer总是回到开头。可能是因为它试图寻找AVplayerItem的持续时间 我怎样才能创建一个具有真实持续时间的虚拟AV
AVPlayer
在AVSynchronizedLayer
上播放CAKeyFrameAnimations
。为了在动画期间不播放AVAsset
时保持播放器播放,我将AVPlayerItem
的forwardPlaybackEndTime
设置为所需动画的持续时间。不幸的是。在这个forwardPlaybackEndTime
期间,似乎不可能seekToTime:
,因为AVPlayer
总是回到开头。可能是因为它试图寻找AVplayerItem
的持续时间
我怎样才能创建一个具有真实持续时间的虚拟AVPlayerItem来欺骗AVPlayer播放一些空的
AVPlayerItem
,并让我seekToTime
?不幸的是,seekToTime
只会搜索AVPlayerItem的持续时间。因此,需要创建一个虚拟玩家项目来生成可搜索的持续时间。为了快速执行此操作,需要创建一个虚拟AVplayerItem。下面是生成此类项的实现示例。它很长,但它是必需的。祝你好运
@interface FakeAsset ()
+ (CVPixelBufferRef)blackImagePixelBuffer;
@end
@implementation FakeAsset
+ (void)assetWithDuration:(CMTime)duration
completitionBlock:(void (^)(AVAsset *))callBack
{
NSError * error = nil;
NSString * assetPath = nil;
NSUInteger i = 0;
do
{
assetPath =
[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"dummyAsset%i.m4v",i]];
i++;
}
while ([[NSFileManager defaultManager] fileExistsAtPath:assetPath
isDirectory:NO]);
NSURL * fileURL = [NSURL fileURLWithPath:assetPath];
NSParameterAssert(fileURL);
AVAssetWriter * videoWriter =
[[AVAssetWriter alloc] initWithURL:fileURL
fileType:AVFileTypeAppleM4V
error:&error];
NSParameterAssert(videoWriter);
NSDictionary * compression =
@{
AVVideoAverageBitRateKey : @10,
AVVideoProfileLevelKey : AVVideoProfileLevelH264Main31,
AVVideoMaxKeyFrameIntervalKey : @300
};
NSDictionary * outputSettings =
@{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoCompressionPropertiesKey : compression,
AVVideoWidthKey : @120,
AVVideoHeightKey : @80
};
AVAssetWriterInput * videoWriterInput =
[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
NSParameterAssert(videoWriterInput);
NSDictionary * parameters =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB),
(NSString *)kCVPixelBufferWidthKey : @120,
(NSString *)kCVPixelBufferHeightKey : @80
};
AVAssetWriterInputPixelBufferAdaptor * adaptor =
[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:parameters];
NSParameterAssert(adaptor);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:videoWriterInput];
NSParameterAssert([videoWriter startWriting]);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t dispatchQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
[videoWriterInput requestMediaDataWhenReadyOnQueue:dispatchQueue
usingBlock:^
{
int frame = 0;
while (videoWriterInput.isReadyForMoreMediaData)
{
if (frame < 2)
{
CMTime frameTime = frame ? duration : kCMTimeZero;
CVPixelBufferRef buffer = [self blackImagePixelBuffer];
[adaptor appendPixelBuffer:buffer
withPresentationTime:frameTime];
CVBufferRelease(buffer);
++frame;
}
else
{
[videoWriterInput markAsFinished];
[videoWriter endSessionAtSourceTime:duration];
dispatch_async(dispatch_get_main_queue(), ^
{
[videoWriter finishWritingWithCompletionHandler:^()
{
NSLog(@"did finish writing the video!");
AVURLAsset * asset =
[AVURLAsset assetWithURL:videoWriter.outputURL];
callBack(asset);
}];
});
break;
}
}
}];
}
+ (CVPixelBufferRef)blackImagePixelBuffer
{
NSDictionary * options =
@{
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES
};
CVPixelBufferRef pxbuffer = NULL;
CVReturn status =
CVPixelBufferCreate(kCFAllocatorDefault, 120, 80, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void * pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
//kCGImageAlphaPremultipliedFirst
CGContextRef context = CGBitmapContextCreate(pxdata, 120, 80, 8, 4*120, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextFillRect(context,CGRectMake(0.f, 0.f, 120.f, 80.f));
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
@接口伪造资产()
+(CVPixelBufferRef)blackImagePixelBuffer;
@结束
@实施伪造资产
+(作废)资产持续时间:(CMTime)持续时间
completionblock:(void(^)(AVAsset*)回调
{
n错误*错误=nil;
NSString*assetPath=nil;
整数i=0;
做
{
资产路径=
[NSTemporaryDirectory()stringByAppendingPathComponent:[NSString stringWithFormat:@“dummyAsset%i.m4v”,i]];
i++;
}
而([[NSFileManager defaultManager]文件存在路径:assetPath
董事会:否);
NSURL*fileURL=[NSURL fileURLWithPath:assetPath];
NSParameterAssert(fileURL);
视频写手=
[[AVAssetWriter alloc]initWithURL:fileURL
文件类型:AVFileTypeAppleM4V
错误:&错误];
NSParameterAssert(视频编写器);
NSDictionary*压缩=
@{
AVVideoAverageBitRateKey:@10,
AVVideoProfileLevelKey:AVVideoProfileLevelH264Main31,
AVVideoMaxKeyFrameIntervalKey:@300
};
NSDictionary*输出设置=
@{
AVVideoCodeKey:AVVideoCodecH264,
AVVideoCompressionProperties键:压缩,
AVVideoWidthKey:@120,
AVVideoHeightKey:@80
};
AVAssetWriterInput*视频写入器Input=
[AvassetWriteInput AssetWriteInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
NSParameterAssert(videoWriterInput);
NSDictionary*参数=
@{(NSString*)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32ARGB),
(NSString*)kCVPixelBufferWidthKey:@120,
(NSString*)kCVPixelBufferHeightKey:@80
};
AvassetWriterInputPixelBufferAdapter*适配器=
[AvassetWriterInputPixelBufferAdapter AssetWriterInputPixelBufferAdapter WithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:参数];
NSParameterAssert(适配器);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime=否;
[videoWriter附加输入:videoWriterInput];
NSParameterAssert([videoWriter startWriting]);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
调度队列调度队列=调度获取全局队列(调度队列优先级默认为0);
[VideoWriterInputRequestMediaDataWhenRepayonQueue:dispatchQueue
使用块:^
{
int帧=0;
while(videoWriterInput.isReadyForMoreMediaData)
{
如果(帧<2)
{
CMTime frameTime=帧?持续时间:KCMTIME0;
CVPixelBufferRef缓冲区=[self blackImagePixelBuffer];
[像素缓冲区:缓冲区]
withPresentationTime:frameTime];
CVBufferRelease(缓冲区);
++框架;
}
其他的
{
[videoWriterInput markAsFinished];
[videoWriter EndSessionSourceTime:持续时间];
dispatch\u async(dispatch\u get\u main\u queue()^
{
[videoWriter使用CompletionHandler完成写入:^()
{
NSLog(@“视频写完了!”);
AVURLAsset*资产=
[avurlaste assetwithur:videoWriter.outputURL];
收回(资产);
}];
});
打破
}
}
}];
}
+(CVPixelBufferRef)blackImagePixelBuffer
{
NSDictionary*选项=
@{
(id)kCVPixelBufferCGImageCompatibilityKey:@是,
(id)kCVPixelBufferCGBitmapContextCompatibilityKey:@YES
};
CVPixelBufferRef pxbuffer=NULL;
返回状态=
CVPixelBufferCreate(kCFAllocatorDefault、120、80、KCVpixelFormattType_32ARGB、(__桥CFDictionaryRef)选项和pxbuffer);
NSParameterAssert(状态==kCVReturnSuccess&&pxbuffer!=NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void*pxdata=CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata!=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
//KCGIMAGEAlphaPremultipledFirst
CGContextRef context=CGBitmapContextCreate(pxdata,120,80,8,4*120,rgbColorSpace,(CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
NSParameterAssert(上下文);
CGContextSetFillColorWithColor(上下文[UIColor blackColor].CGColor);
CGContextFillRect(上下文,CGRectMake(0.f,0.f,120.f,80.f));
CGBColorSpace;
CGC