Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Record video with AVCaptureSession, add CIFilter to it and save it to photos album

I want to make custom video recorder in my app. For now I can record the video and save it, but I want to add filters to the video when it recording and save the video with new filter to photos album. This is my code to record video and save it.

let captureSession = AVCaptureSession()
let fileOutput = AVCaptureMovieFileOutput()

func initVideoRecording() {



   do {
        try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
        try AVAudioSession.sharedInstance().setActive(true)
    }catch {
        print("error in audio")
    }

    let session = AVCaptureSession()

    session.beginConfiguration()

    session.sessionPreset = AVCaptureSessionPresetMedium

    let videoLayer = AVCaptureVideoPreviewLayer(session: session)
    videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
    videoLayer.frame = myImage.bounds
    myImage.layer.addSublayer(videoLayer)

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
    do
    {
        let input = try AVCaptureDeviceInput(device: backCamera)
        let audioInput = try AVCaptureDeviceInput(device: audio)

        session.addInput(input)
        session.addInput(audioInput)

    }
    catch
    {
        print("can't access camera")
        return
    }

    session.addOutput(fileOutput)

    session.commitConfiguration()

    session.startRunning()

}

@IBAction func recordFunc() {
        if fileOutput.recording {
            myButton.setTitle("record", forState: .Normal)
            fileOutput.stopRecording()
        }else{
            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4")
        fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

        myButton.setTitle("stop", forState: .Normal)

    }
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {

//to save record video to photos album

    UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil)


}

I try to use AVCaptureVideoDataOutput

And in its delegate I use this code

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {


            connection.videoOrientation = AVCaptureVideoOrientation.Portrait
            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIComicEffect")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)


            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }

        }

With this code it just display the filter but not record it.

=======================/ this is the solution for my question \================ please not that this code use swift 2 and Xcode 7.3

let captureSession = AVCaptureSession()
    let videoOutput = AVCaptureVideoDataOutput()
    let audioOutput = AVCaptureAudioDataOutput()

    var adapter:AVAssetWriterInputPixelBufferAdaptor!
    var record = false
    var videoWriter:AVAssetWriter!
    var writerInput:AVAssetWriterInput!
    var audioWriterInput:AVAssetWriterInput!
    var lastPath = ""
    var starTime = kCMTimeZero

    var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height)

override func viewDidAppear(animated: Bool) {
        super.viewDidAppear(animated)

        video()
    }

    func video() {

        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
            try AVAudioSession.sharedInstance().setActive(true)
        }catch {
            print("error in audio")
        }

        captureSession.beginConfiguration()

        captureSession.sessionPreset = AVCaptureSessionPresetMedium

        let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
        //videoLayer.frame = myImage.bounds
        //myImage.layer.addSublayer(videoLayer)

        view.layer.addSublayer(videoLayer)

        let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
        do
        {
            let input = try AVCaptureDeviceInput(device: backCamera)
            let audioInput = try AVCaptureDeviceInput(device: audio)

            captureSession.addInput(input)
            captureSession.addInput(audioInput)

        }
        catch
        {
            print("can't access camera")
            return
        }

        let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)

        videoOutput.setSampleBufferDelegate(self,queue: queue)
        audioOutput.setSampleBufferDelegate(self, queue: queue)

        captureSession.addOutput(videoOutput)
        captureSession.addOutput(audioOutput)
        captureSession.commitConfiguration()

        captureSession.startRunning()

    }


    @IBAction func recordFunc() {

        if record {
            myButton.setTitle("record", forState: .Normal)
            record = false
            self.writerInput.markAsFinished()
            audioWriterInput.markAsFinished()
            self.videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
                UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil)
            }


        }else{

            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4")

            lastPath = fileUrl.path!
            videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4)



            let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]

            writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
            writerInput.expectsMediaDataInRealTime = true
            audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject])

            videoWriter.addInput(writerInput)
            videoWriter.addInput(audioWriterInput)

            adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject])









            videoWriter.startWriting()
            videoWriter.startSessionAtSourceTime(starTime)

            record = true
            myButton.setTitle("stop", forState: .Normal)

        }


    }

    func getCurrentDate()->String{
        let format = NSDateFormatter()
        format.dateFormat = "dd-MM-yyyy hh:mm:ss"
        format.locale = NSLocale(localeIdentifier: "en")
        let date = format.stringFromDate(NSDate())
        return date
    }


extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{


    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        if captureOutput == videoOutput {
            connection.videoOrientation = AVCaptureVideoOrientation.Portrait

            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIHexagonalPixellate")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
            //let filteredImage = UIImage(CIImage: cameraImage)
            if self.record == true{

                dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), {
                    if self.record == true{
                        if self.writerInput.readyForMoreMediaData {
                        let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime)

                        print("video is \(bo)")
                        }
                    }
                })
            }
            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }
        }else if captureOutput == audioOutput{

            if self.record == true{

                let bo = audioWriterInput.appendSampleBuffer(sampleBuffer)
                print("audio is \(bo)")
            }
        }



    }


    func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
        let context:CIContext? = CIContext(options: nil)
        if context != nil {
            return context!.createCGImage(inputImage, fromRect: inputImage.extent)
        }
        return nil
    }

    func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) {
        var title = "Success"
        var message = "Video was saved"

        if let saveError = error {
            title = "Error"
            message = "Video failed to save"
        }

        let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert)
        alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil))
        presentViewController(alert, animated: true, completion: nil)
    }

these methods thats in DejalActivityView there in objective c and i couldn't convert it to Swift so if any one can convert it please edit my code and convert it

+ (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

+(NSDictionary *)getAdapterDictionary{


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                           [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    return sourcePixelBufferAttributesDictionary;
}

+(NSDictionary *) getAudioDictionary{
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;


    NSDictionary* audioOutputSettings = nil;
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil ];
//    NSDictionary* audioOutputSettings = nil;
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey,
//                               [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];

    return audioOutputSettings;
}
like image 808
Eslam Hanafy Avatar asked Apr 13 '16 13:04

Eslam Hanafy


1 Answers

You need to add a AVAssetWriter

var videoRecorder: AVAssetWriter?

Then in your delegate callback:

let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

if videoRecorder?.status == .Unknown {
    startRecordingTime = timeStamp
    videoRecorder?.startWriting()
    videoRecorder?.startSessionAtSourceTime(timeStamp)
}

You will need to configure the recorder for each recording you wish to do, you will also need to add your inputs to the recorder.

You may start to encounter issues as you don't seem to have any queues setup yet which you will need but for reference this Github is a very good resource for it.

https://github.com/waleedka/rosywriterswift

EDIT: Additional Info

You need to init() the writer then add inputs AVAssetWriterInput for video / audio.

like image 114
Sean Lintern Avatar answered Sep 28 '22 17:09

Sean Lintern