Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to use CVPixelBufferPool in conjunction with AVAssetWriterInputPixelBufferAdaptor in iPhone?

I have successfully created video from images using the following code

-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    //Write samples:
    for (int i = 0;i<[array count]; i++)
    {
        if([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

            CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above

            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

        }
        else
        {
            NSLog(@"error");
            i--;
        }
    }
    NSLog(@"outside for loop");

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWriting];
}

Here I have used CVPixelBufferRef. Instead of this, I want to use the CVPixelBufferPoolRef in conjunction with AVAssetWriterInputPixelBufferAdaptor.

Can anybody provide an example which I can debug and use?

like image 679
Atulkumar V. Jain Avatar asked Oct 26 '10 12:10

Atulkumar V. Jain


2 Answers

You are passing nil 'sourcePixelBufferAttributes', because of which the pixel buffer pool will not get created:

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

Instead pass some attributes, for example:

NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

Then you can use the pool to create the pixel buffers, like:

CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);
like image 131
radical Avatar answered Sep 19 '22 02:09

radical


@Atulkumar V. Jain : great! good luck ^^ @Brian : you are right thanks, I correct it and I am getting it work now here is the working code (if someone else need it :-) )

CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);

[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];

[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
    while (1){
        if (i == frameNumber) {
            break;
        }
        if ([writerInput isReadyForMoreMediaData]) {

            CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

           CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

           CMTime presentTime=CMTimeAdd(lastTime, frameTime);       

        if (sampleBuffer) {
                [adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
                i++;
                CFRelease(sampleBuffer);
            } else {
                break;
            }
        }
    }
    NSLog (@"done");
    [writerInput markAsFinished];
    [videoWriter finishWriting];     

    CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
    [videoWriter release];
    [writerInput release];      
    [imagesArray removeAllObjects];


}];
like image 43
pink Avatar answered Sep 20 '22 02:09

pink