为什么视频输出始终为 30 fps,尽管它设置为其他方式?

Why is the video output ALWAYS at 30 fps although it's set otherwise?

我将后置摄像头配置为 120 fps。但是,当我通过打印调用此类函数的时间来使用 captureOutput() 检查样本输出时(见下文),差异大约为 33 毫秒(30 帧/秒)。无论我使用 activeVideoMinFrameDuration 和 activeVideoMaxFrameDuration 设置什么 fps,在 captureOutput() 中观察到的结果 fps 始终是 30 fps。

我已经在可以处理慢动作视频的 iPhone 6 上对此进行了测试。我在 https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html 阅读了 Apple 官方文档。有什么线索吗?

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate
{
    var captureDevice: AVCaptureDevice?
    let captureSession = AVCaptureSession()
    let videoCaptureOutput = AVCaptureVideoDataOutput()

    var startTime = NSDate.timeIntervalSinceReferenceDate()

    // press button to start the video session
    @IBAction func startPressed() {
        if captureSession.inputs.count > 0 && captureSession.outputs.count > 0 {
            startTime = NSDate.timeIntervalSinceReferenceDate()
            captureSession.startRunning()
        }
    }

    override func viewDidLoad() {
        super.viewDidLoad()

        // set capture session resolution
        captureSession.sessionPreset = AVCaptureSessionPresetLow

        let devices = AVCaptureDevice.devices()
        var avFormat: AVCaptureDeviceFormat? = nil
        for device in devices {
            if (device.hasMediaType(AVMediaTypeVideo)) {
                if (device.position == AVCaptureDevicePosition.Back) {
                    for vFormat in device.formats {
                        let ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange]
                        let filtered: Array<Double> = ranges.map({ [=10=].maxFrameRate } ).filter( {[=10=] >= 119.0} )
                        if !filtered.isEmpty {
                            // found a good device with good format!
                            captureDevice = device as? AVCaptureDevice
                            avFormat = vFormat as? AVCaptureDeviceFormat
                        }
                    }
                }
            }
        }

        // use the found capture device and format to set things up
        if let dv = captureDevice {
            // configure
            do {
                try dv.lockForConfiguration()
            } catch _ {
                print("failed locking device")
            }

            dv.activeFormat = avFormat
            dv.activeVideoMinFrameDuration = CMTimeMake(1, 120)
            dv.activeVideoMaxFrameDuration = CMTimeMake(1, 120)
            dv.unlockForConfiguration()

            // input -> session
            do {
                let input = try AVCaptureDeviceInput(device: dv)
                if captureSession.canAddInput(input) {
                    captureSession.addInput(input)
                }
            } catch _ {
                print("failed adding capture device as input to capture session")
            }
        }

        // output -> session
        let videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL)
        videoCaptureOutput.setSampleBufferDelegate(self, queue: videoQueue)
        videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)]
        videoCaptureOutput.alwaysDiscardsLateVideoFrames = true
        if captureSession.canAddOutput(videoCaptureOutput) {
            captureSession.addOutput(videoCaptureOutput)
        }
    }

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
    {
        print( "\(NSDate.timeIntervalSinceReferenceDate() - startTime)" )

        // More pixel/frame processing here
    }
}

已找到答案。交换 "configure" 和 "input -> session" 这两个块的顺序。