Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

iPhone - AVAssetWriter - Error creating movie from photos at 1920×1080 pixels

I am trying to create a movie from some pictures. It works just fine with hd pictures ({720, 1280}) or lower resolutions . But when i try to create the movie with full hd pictures {1080, 1920} , the video is scrambled. Here is a link to see how it looks http://www.youtube.com/watch?v=BfYldb8e_18 . Do you have any ideas what i may be doing wrong?

- (void) createMovieWithOptions:(NSDictionary *) options
{
@autoreleasepool {
    NSString *path = [options valueForKey:@"path"];
    CGSize size =  [(NSValue *)[options valueForKey:@"size"] CGSizeValue];
    NSArray *imageArray = [options valueForKey:@"pictures"];
    NSInteger recordingFPS = [[options valueForKey:@"fps"] integerValue];
    BOOL success=YES;
    NSError *error = nil;

    AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(assetWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithFloat:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithFloat:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                              outputSettings:videoSettings];

    // Configure settings for the pixel buffer adaptor.
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([assetWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = NO;
    [assetWriter addInput:videoWriterInput];

    //Start a session:
    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;
    float progress = 0;
    float progressFromFrames = _progressView.progress; //only for create iflipbook movie

    for(UIImage * img in imageArray)
    {
        if([[NSThread currentThread] isCancelled])
        {
            [NSThread exit];
        }

        [condCreateMovie lock];
        if(isCreateMoviePaused)
        {
            [condCreateMovie wait];
        }

        uint64_t totalFreeSpace=[Utils getFreeDiskspace];
        if(((totalFreeSpace/1024ll)/1024ll)<50)
        {
            success=NO;
            break;
        }

        //        @autoreleasepool {
        NSLog(@"size:%@",NSStringFromCGSize(img.size));

        buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 60)
        {
            if(adaptor.assetWriterInput.readyForMoreMediaData)
            {
                CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];

                CVPixelBufferRelease(buffer);

                [NSThread sleepForTimeInterval:0.1];


                if(isCreatingiFlipBookFromImported)
                    progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
                else
                    progress = (float)frameCount/(float)[imageArray count];

                [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
            }
            else
            {
                [NSThread sleepForTimeInterval:0.5];
            }
            j++;
        }
        if (!append_ok)
        {
            NSLog(@"error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;

        [condCreateMovie unlock];
    }

    //Finish the session:
    [videoWriterInput markAsFinished];
    [assetWriter finishWriting];

    NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
                          [NSNumber numberWithBool:success], @"success",
                          path, @"path", nil];

    [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationFinished" object:dict];
}
}

*Edit . Here is the code for [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:]

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@autoreleasepool {
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

     return pxbuffer;
}
}
like image 760
flaviusilaghi Avatar asked Dec 07 '12 13:12

flaviusilaghi


3 Answers

I had the same problem and this answer resolved it: the size of the video must be a multiple of 16.

like image 149
Remy Cilia Avatar answered Nov 19 '22 05:11

Remy Cilia


Pretty sure that this is either a HW limitation or a bug. Please file a Radar.

like image 39
Cocoanetics Avatar answered Nov 19 '22 04:11

Cocoanetics


how about something like this to get pixel buffer

    //you could use a cgiimageref here instead
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(imageView.image.CGImage));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(imageView.image.CGImage),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();  
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;  
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor  appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
    CVPixelBufferRelease(pixelBuffer);
    CFRelease(imageData);
    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];
        self.startStopButton.selected = NO;
    }
like image 2
Michelle Cannon Avatar answered Nov 19 '22 04:11

Michelle Cannon