Am using AVCaptureSession to get the camera output and have added successfully the audio and video inputs and outputs.
{
var captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice
var error: NSError? = nil
do {
//remove the previous inputs
let inputs = cameraSession.inputs as! [AVCaptureDeviceInput]
for oldInput:AVCaptureDeviceInput in inputs {
cameraSession.removeInput(oldInput)
}
cameraSession.beginConfiguration()
if cameraPosition.isEqualToString("Front") {
captureDevice = cameraWithPosition(.Front)!
}
else {
captureDevice = cameraWithPosition(.Back)!
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
if (cameraSession.canAddInput(deviceInput) == true) {
cameraSession.addInput(deviceInput)
}
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if (cameraSession.canAddOutput(dataOutput) == true) {
cameraSession.addOutput(dataOutput)
}
let audioCheck = AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)
if audioCheck.isEmpty {
print("no audio device")
return
}
let audioDevice: AVCaptureDevice! = audioCheck.first as! AVCaptureDevice
var audioDeviceInput: AVCaptureDeviceInput?
do {
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
} catch let error2 as NSError {
error = error2
audioDeviceInput = nil
} catch {
fatalError()
}
if error != nil{
print(error)
let alert = UIAlertController(title: "Error", message: error!.localizedDescription
, preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
if cameraSession.canAddInput(audioDeviceInput){
cameraSession.addInput(audioDeviceInput)
}
cameraSession.commitConfiguration()
let queue = dispatch_queue_create("com.invasivecode.videoQueue", DISPATCH_QUEUE_SERIAL)
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
catch let error as NSError {
NSLog("\(error), \(error.localizedDescription)")
}
}
Using AVCaptureMovieFileOutput am able to save the output video in the photo library using
movieFileOutput.startRecordingToOutputFileURL( outputFilePath, recordingDelegate: self)
but i use AVCaptureVideoDataOutput as output for doing extra work on the meta data i get from the delegates and try to record the video, but am unable to get any methods to start and stop recording the video.
Suggest how to record the video using AVCaptureVideoDataOutput
You need an AVCaptureSession to do it:
//First add AVCaptureVideoDataOutput to AVCaptureSession
AVCaptureSession *_captureSession;
_captureSession = [[AVCaptureSession alloc] init];
......Configuration......
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
......Configuration......
if ( [_captureSession canAddOutput:videoOut] ) {
[_captureSession addOutput:videoOut];
}
//Then use captureSession to start and stop recording
[_captureSession startRunning];
[_captureSession stopRunning];
Please look through RosyWriterCapturePipeline.m, this is a very good example:
RosyWriter
I found that the Rosy Writer is a good example for the working of AVCaptureSession
. So here is the Rosy Writer2.1 Swift version. It should help those who struggle with AVCaptureSession
.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With