Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to make a movie from set of images using UIGetScreenImage

I have used this method and get multiple images. I am able to successfully create a movie but my problem is that when I play the movie, it seems to be playing too fast i.e. the movie doesn't have all the frames. Here is my code.

-(UIImage *)uiImageScreen
{
  CGImageRef screen = UIGetScreenImage();
  UIImage* image = [UIImage imageWithCGImage:screen];
  CGImageRelease(screen);
  UIImageWriteToSavedPhotosAlbum(image, self,nil, nil);
  return image;
}

-(void) writeSample: (NSTimer*) _timer 
 {
if (assetWriterInput.readyForMoreMediaData) {
    // CMSampleBufferRef sample = nil;

    CVReturn cvErr = kCVReturnSuccess;

    // get screenshot image!
    CGImageRef image = (CGImageRef) [[self uiImageScreen] CGImage];
    NSLog (@"made screenshot");

    // prepare the pixel buffer
    CVPixelBufferRef pixelBuffer = NULL;
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(image),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    // calculate the time
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake (elapsedTime * 600, 600);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

    if (appended) 
    {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } 
    else 
    {
        NSLog (@"failed to append");

    }
}
}

Then I call this method to create movie.

-(void)StartRecording
{
 NSString *moviePath = [[self pathToDocumentsDirectory] stringByAppendingPathComponent:OUTPUT_FILE_NAME];
if ([[NSFileManager defaultManager] fileExistsAtPath:moviePath]) {
    [[NSFileManager defaultManager] removeItemAtPath:moviePath error:nil];
}

NSURL *movieURL = [NSURL fileURLWithPath:moviePath];
NSLog(@"path=%@",movieURL);
NSError *movieError = nil;
[assetWriter release];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
                                        fileType: AVFileTypeQuickTimeMovie 
                                           error: &movieError];
NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:320], AVVideoWidthKey,
                                          [NSNumber numberWithInt:480], AVVideoHeightKey,
                                          nil];
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                      outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];

[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]
                                 initWithAssetWriterInput:assetWriterInput
                                 sourcePixelBufferAttributes:nil];
[assetWriter startWriting];

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, 1000)];

// start writing samples to it
[assetWriterTimer release];
assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:0.1
                                                    target:self
                                                  selector:@selector (writeSample:)
                                                  userInfo:nil
                                                   repeats:YES] ;
}
like image 365
Ajay Chaudhary Avatar asked Jul 12 '12 09:07

Ajay Chaudhary


1 Answers

try this method....

if (![videoWriterInput isReadyForMoreMediaData]) {
    NSLog(@"Not ready for video data");
}
else {
    @synchronized (self) {
        UIImage* newFrame = [self.currentScreen retain];
        CVPixelBufferRef pixelBuffer = NULL;
        CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
        CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));

        int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
        if(status != 0){
            //could not get a buffer from the pool
            NSLog(@"Error creating pixel buffer:  status=%d", status);
        }
        // set image data into pixel buffer
        CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
        uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
        CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data

        if(status == 0){
            BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
            if (!success)
                NSLog(@"Warning:  Unable to write buffer to video");
        }

        //clean up
        [newFrame release];
        CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
        CVPixelBufferRelease( pixelBuffer );
        CFRelease(image);
        CGImageRelease(cgImage);
    }

}
like image 61
Sam Avatar answered Nov 15 '22 06:11

Sam