AVCaptureVideoDataOutput captureOutput 未被调用

AVCaptureVideoDataOutput captureOutput not being called

我正在尝试使用 AVCaptureScreenInput 在 Mac 上进行屏幕捕获,但从未调用 AVCaptureVideoDataOutput 委托 captureOutput,我不确定为什么。我确实收到一条通知,说捕获会话已开始。

import Cocoa
import AVFoundation

class ViewController: NSViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
    }

    override func viewWillAppear() {
        super.viewWillAppear()
        NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.errorNotif), name: AVCaptureSessionRuntimeErrorNotification, object: nil)
        NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.startedNotif), name: AVCaptureSessionDidStartRunningNotification, object: nil)
        startScreenCapture()
    }

    override func viewWillDisappear() {
        super.viewWillDisappear()
        NSNotificationCenter.defaultCenter().removeObserver(self)
    }

    func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        print("ignore frame, add code to handle later")
    }

    func startScreenCapture() {

        let displayId = CGMainDisplayID()
        captureSession = AVCaptureSession()
        if captureSession.canSetSessionPreset(AVCaptureSessionPresetHigh) {
            captureSession.sessionPreset = AVCaptureSessionPresetHigh
        }
        let captureScreenInput = AVCaptureScreenInput(displayID: displayId)
        if captureSession.canAddInput(captureScreenInput) {
            captureSession.addInput(captureScreenInput)
        } else {
            print("Could not add main display to capture input")
        }

        let output = AVCaptureVideoDataOutput()

        let queue = dispatch_queue_create("myQueue", DISPATCH_QUEUE_SERIAL)
        output.setSampleBufferDelegate(self, queue: queue)
        output.alwaysDiscardsLateVideoFrames = true

        output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as NSString: NSNumber(unsignedInt: kCVPixelFormatType_32BGRA)]
        captureSession.addOutput(output)
        captureSession.startRunning()
    }

    func errorNotif() {
        print("error starting capture")
    }
    func startedNotif() {
        print("started screen capture")
    }
}

您需要定义 didOutputSampleBuffer 委托回调以实际接收捕获的帧:

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
    print("captured \(sampleBuffer)")
}

p.s。我不确定 macOS,但 viewWillAppear 可能不是进行初始化的好地方,因为在 iOS 上至少可以调用多次。


我为基本示例添加了 AVCaptureVideoDataOutputSampleBufferDelegate

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {    


    let videoQueue = DispatchQueue(label: "VIDEO_QUEUE")

    override func viewDidLoad() {
        super.viewDidLoad()

        let captureSession = AVCaptureSession()

        guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }

        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }

        captureSession.addInput(input)

        captureSession.startRunning()

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame

        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: videoQueue)
        captureSession.addOutput(dataOutput)
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        print("Camera was able to capture a frame:", Date())
    }
}