I'm using an AVPlayer to play CAKeyFrameAnimations on a AVSynchronizedLayer. In order to keep the player playing as I do not play an AVAsset during the animation, I set the forwardPlaybackEndTime of the AVPlayerItem to the desired animation's duration. Unfortunately. It seems impossible to seekToTime: during this forwardPlaybackEndTime as the AVPlayer always goes back to the beginning. Probably because it tries to seek into the AVplayerItem's duration.
How could I create a dummy AVPlayerItem with a real duration to trick the AVPlayer to play some empty AVPlayerItem and letting me seekToTime?
Unfortunately, seekToTime will only seek into the AVPlayerItem's duration. Thus it is required to create a dummy player item to generate a seek-able duration. In order to do so rapidly, one need to create a dummy AVplayerItem. Here's an example of implementation to generate such an item. It's long but it's required. Good luck!
@interface FakeAsset ()
+ (CVPixelBufferRef)blackImagePixelBuffer;
@end
@implementation FakeAsset
+ (void)assetWithDuration:(CMTime)duration
completitionBlock:(void (^)(AVAsset *))callBack
{
NSError * error = nil;
NSString * assetPath = nil;
NSUInteger i = 0;
do
{
assetPath =
[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"dummyAsset%i.m4v",i]];
i++;
}
while ([[NSFileManager defaultManager] fileExistsAtPath:assetPath
isDirectory:NO]);
NSURL * fileURL = [NSURL fileURLWithPath:assetPath];
NSParameterAssert(fileURL);
AVAssetWriter * videoWriter =
[[AVAssetWriter alloc] initWithURL:fileURL
fileType:AVFileTypeAppleM4V
error:&error];
NSParameterAssert(videoWriter);
NSDictionary * compression =
@{
AVVideoAverageBitRateKey : @10,
AVVideoProfileLevelKey : AVVideoProfileLevelH264Main31,
AVVideoMaxKeyFrameIntervalKey : @300
};
NSDictionary * outputSettings =
@{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoCompressionPropertiesKey : compression,
AVVideoWidthKey : @120,
AVVideoHeightKey : @80
};
AVAssetWriterInput * videoWriterInput =
[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
NSParameterAssert(videoWriterInput);
NSDictionary * parameters =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB),
(NSString *)kCVPixelBufferWidthKey : @120,
(NSString *)kCVPixelBufferHeightKey : @80
};
AVAssetWriterInputPixelBufferAdaptor * adaptor =
[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:parameters];
NSParameterAssert(adaptor);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:videoWriterInput];
NSParameterAssert([videoWriter startWriting]);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t dispatchQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
[videoWriterInput requestMediaDataWhenReadyOnQueue:dispatchQueue
usingBlock:^
{
int frame = 0;
while (videoWriterInput.isReadyForMoreMediaData)
{
if (frame < 2)
{
CMTime frameTime = frame ? duration : kCMTimeZero;
CVPixelBufferRef buffer = [self blackImagePixelBuffer];
[adaptor appendPixelBuffer:buffer
withPresentationTime:frameTime];
CVBufferRelease(buffer);
++frame;
}
else
{
[videoWriterInput markAsFinished];
[videoWriter endSessionAtSourceTime:duration];
dispatch_async(dispatch_get_main_queue(), ^
{
[videoWriter finishWritingWithCompletionHandler:^()
{
NSLog(@"did finish writing the video!");
AVURLAsset * asset =
[AVURLAsset assetWithURL:videoWriter.outputURL];
callBack(asset);
}];
});
break;
}
}
}];
}
+ (CVPixelBufferRef)blackImagePixelBuffer
{
NSDictionary * options =
@{
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES
};
CVPixelBufferRef pxbuffer = NULL;
CVReturn status =
CVPixelBufferCreate(kCFAllocatorDefault, 120, 80, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void * pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
//kCGImageAlphaPremultipliedFirst
CGContextRef context = CGBitmapContextCreate(pxdata, 120, 80, 8, 4*120, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextFillRect(context,CGRectMake(0.f, 0.f, 120.f, 80.f));
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With