Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Cannot capture video data on Swift Playgrounds, captureOutput AVCaptureVideoDataOutputSampleBufferDelegate delegate method not called

I want to get access to the iPad's camera on the Swift Playgrounds iPad app. I have found that it's not possibile to capture video data, even though my playground runs ok.

captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!), a delegate method of the AVCaptureVideoDataOutputSampleBufferDelegate protocol, is not getting called (probably because no video data is coming in), while it is in my iOS app.

The view in my playground is supposed to display the FaceTime camera view. Why can't I display the camera output even though Apple explicitly says it's allowed to do so? Also, the Playground app asks me for camera permissions as soon as I open my playground, so it should be allowed in some way.

import UIKit
import CoreImage
import AVFoundation
import ImageIO
import PlaygroundSupport

class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {

    var visageCameraView : UIView = UIView()
    fileprivate var faceDetector : CIDetector?
    fileprivate var videoDataOutput : AVCaptureVideoDataOutput?
    fileprivate var videoDataOutputQueue : DispatchQueue?
    fileprivate var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
    fileprivate var captureSession : AVCaptureSession = AVCaptureSession()
    fileprivate let notificationCenter : NotificationCenter = NotificationCenter.default

    override init() {
        super.init()

        self.captureSetup(AVCaptureDevicePosition.front)
        var faceDetectorOptions : [String : AnyObject]?
        faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyHigh as AnyObject]
        self.faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: faceDetectorOptions)
    }

    func beginFaceDetection() {
        self.captureSession.startRunning()
    }

    func endFaceDetection() {
        self.captureSession.stopRunning()
    }

    fileprivate func captureSetup (_ position : AVCaptureDevicePosition) {
        var captureError : NSError?
        var captureDevice : AVCaptureDevice!

        for testedDevice in AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo){
            if ((testedDevice as AnyObject).position == position) {
                captureDevice = testedDevice as! AVCaptureDevice
            }
        }

        if (captureDevice == nil) {
            captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
        }

        var deviceInput : AVCaptureDeviceInput?
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
        } catch let error as NSError {
            captureError = error
            deviceInput = nil
        }
        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        if (captureError == nil) {
            if (captureSession.canAddInput(deviceInput)) {
                captureSession.addInput(deviceInput)
            }

            self.videoDataOutput = AVCaptureVideoDataOutput()
            self.videoDataOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: Int(kCVPixelFormatType_32BGRA)]
            self.videoDataOutput!.alwaysDiscardsLateVideoFrames = true
            self.videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue", attributes: [])
            self.videoDataOutput!.setSampleBufferDelegate(self, queue: self.videoDataOutputQueue!)

            if (captureSession.canAddOutput(self.videoDataOutput)) {
                captureSession.addOutput(self.videoDataOutput)
            }
        }

        visageCameraView.frame = UIScreen.main.bounds
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer?.frame = UIScreen.main.bounds
        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
        visageCameraView.layer.addSublayer(previewLayer!)
    }

    // NOT CALLED
    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {            
        print("delegate method called!")
    }
}

class SmileView: UIView {
    let smileView = UIView()
    var smileRec: Visage!

    override init(frame: CGRect) {
        super.init(frame: frame)
        self.addSubview(smileView)
        self.translatesAutoresizingMaskIntoConstraints = false
        smileRec = Visage()
        smileRec.beginFaceDetection()
        let cameraView = smileRec.visageCameraView
        self.addSubview(cameraView)
    }

    required init?(coder aDecoder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
}

let frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
let sView = SmileView(frame: frame)
PlaygroundPage.current.liveView = sView
like image 360
Cesare Avatar asked Sep 10 '25 23:09

Cesare


1 Answers

Edit: this should have been fixed :)

--

Edit: this was confirmed to be a bug by Apple.

I have filed a bug report and I will update this answer when new official information comes in.

like image 93
Cesare Avatar answered Sep 12 '25 19:09

Cesare