in Swift 2.3 I used this code to take a picture in custom camera:
func didPressTakePhoto(){
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProviderCreateWithCFData(imageData)
let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
let image = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
self.captureImageView.image = image
}
})
}
}
But his line: stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
Shows this error:
Value of type 'AVCapturePhotoOutput' has no member 'captureStillImageAsynchronouslyFromConnection'
I tried solving my problem but I always get more and more errors so that is why I post my original code.
Does anybody know how to make my code work again?
Thank you.
You can use AVCapturePhotoOutput
like this in Swift 3:
You need the AVCapturePhotoCaptureDelegate
which returns the CMSampleBuffer
.
You can get as well a preview image if you tell the AVCapturePhotoSettings
the previewFormat
class CameraCaptureOutput: NSObject, AVCapturePhotoCaptureDelegate {
let cameraOutput = AVCapturePhotoOutput()
func capturePhoto() {
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160,
]
settings.previewPhotoFormat = previewFormat
self.cameraOutput.capturePhoto(with: settings, delegate: self)
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: NSError?) {
if let error = error {
print(error.localizedDescription)
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print(image: UIImage(data: dataImage).size)
} else {
}
}
}
Thanks to Sharpkits I found my solution (This code works for me):
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print(error.localizedDescription)
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: nil)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.absoluteColorimetric)
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
let cropedImage = self.cropToSquare(image: image)
let newImage = self.scaleImageWith(cropedImage, and: CGSize(width: 600, height: 600))
print(UIScreen.main.bounds.width)
self.tempImageView.image = newImage
self.tempImageView.isHidden = false
} else {
}
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With