Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

AVFoundation + AssetWriter: Generate Movie With Images and Audio

I have to export a movie from my iPhone application which contains UIImage from an NSArray and add some audio files in .caf format that have to start at pre-specified times. Now I have been able to use the AVAssetWriter (after going through many questions and answers on this and other sites) to export the video portion containing the images but cant seem to find a way to add the audio files to complete the movie.

Here is what I have gotten so far

-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size {     NSLog(@"Write Started");      NSError *error = nil;      AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:                               [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie                                                           error:&error];         NSParameterAssert(videoWriter);      NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:                                AVVideoCodecH264, AVVideoCodecKey,                                [NSNumber numberWithInt:size.width], AVVideoWidthKey,                                [NSNumber numberWithInt:size.height], AVVideoHeightKey,                                nil];      AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput                                     assetWriterInputWithMediaType:AVMediaTypeVideo                                     outputSettings:videoSettings] retain];       AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor                                             assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput                                                  sourcePixelBufferAttributes:nil];      NSParameterAssert(videoWriterInput);     NSParameterAssert([videoWriter canAddInput:videoWriterInput]);     videoWriterInput.expectsMediaDataInRealTime = YES;     [videoWriter addInput:videoWriterInput];      //Start a session:     [videoWriter startWriting];     [videoWriter startSessionAtSourceTime:kCMTimeZero];      CVPixelBufferRef buffer = NULL;      //convert uiimage to CGImage.      int frameCount = 0;      for(UIImage * img in imageArray)     {             buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];              BOOL append_ok = NO;             int j = 0;             while (!append_ok && j < 30)              {                 if (adaptor.assetWriterInput.readyForMoreMediaData)                  {                     printf("appending %d attemp %d\n", frameCount, j);                      CMTime frameTime = CMTimeMake(frameCount,(int32_t) kRecordingFPS);                     append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];                      if(buffer)                         CVBufferRelease(buffer);                     [NSThread sleepForTimeInterval:0.05];                 }                  else                  {                     printf("adaptor not ready %d, %d\n", frameCount, j);                     [NSThread sleepForTimeInterval:0.1];                 }                 j++;             }             if (!append_ok) {                 printf("error appending image %d times %d\n", frameCount, j);             }             frameCount++;         }     }      //Finish the session:     [videoWriterInput markAsFinished];       [videoWriter finishWriting];     NSLog(@"Write Ended"); } 

And now the code for pixelBufferFromCGImage

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size {     NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:                          [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,                          [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,                          nil];     CVPixelBufferRef pxbuffer = NULL;      CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,                                       size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,                                        &pxbuffer);     NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);      CVPixelBufferLockBaseAddress(pxbuffer, 0);     void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);     NSParameterAssert(pxdata != NULL);      CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();     CGContextRef context = CGBitmapContextCreate(pxdata, size.width,                                              size.height, 8, 4*size.width, rgbColorSpace,                                               kCGImageAlphaNoneSkipFirst);     NSParameterAssert(context);     CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));     CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),                                             CGImageGetHeight(image)), image);     CGColorSpaceRelease(rgbColorSpace);     CGContextRelease(context);      CVPixelBufferUnlockBaseAddress(pxbuffer, 0);      return pxbuffer; } 

So can you help me out regarding how to add the audio files and how to make buffers for them and the adaptor and input settings etc

If this approach might cause a problem guide me about how to use a AVMutableComposition to use the image array for video export

like image 553
MuTaTeD Avatar asked Apr 12 '11 19:04

MuTaTeD


1 Answers

I ended up exporting the video separately using the above code and added the audio files separately using AVComposition & AVExportSession. Here is the code

-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath {     NSError * error = nil;      AVMutableComposition * composition = [AVMutableComposition composition];       AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];      AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];      AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo                                                                                  preferredTrackID: kCMPersistentTrackID_Invalid];      [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero                                      error:&error];           CMTime audioStartTime = kCMTimeZero;     for (NSDictionary * audioInfo in audioInfoArray)     {         NSString * pathString = [audioInfo objectForKey:audioFilePath];         AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];          AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];         AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio                                                                                      preferredTrackID: kCMPersistentTrackID_Invalid];          [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];                audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:audioDuration] floatValue] * kRecordingFPS) + 0.5), kRecordingFPS));     }     AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];       assetExport.videoComposition = mutableVideoComposition;      assetExport.outputFileType =AVFileTypeQuickTimeMovie;// @"com.apple.quicktime-movie";     assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];      [assetExport exportAsynchronouslyWithCompletionHandler:      ^(void ) {          switch (assetExport.status)           {              case AVAssetExportSessionStatusCompleted: //                export complete                   NSLog(@"Export Complete");                  break;              case AVAssetExportSessionStatusFailed:                  NSLog(@"Export Failed");                  NSLog(@"ExportSessionError: %@", [assetExport.error localizedDescription]); //                export error (see exportSession.error)                    break;              case AVAssetExportSessionStatusCancelled:                  NSLog(@"Export Failed");                  NSLog(@"ExportSessionError: %@", [assetExport.error localizedDescription]); //                export cancelled                    break;          }      }];     } 
like image 72
MuTaTeD Avatar answered Sep 29 '22 12:09

MuTaTeD