如何保持两个 AVCaptureMovieFileOutput 同步
How To Keep Two AVCaptureMovieFileOutput In Sync
我有两个摄像头输入进入 OSX 应用程序,我正在尝试使用 AVCaptureMovieFileOutput 保存它们。没过多久,他们的视频就不同步了。经过最小测试后,它们可以关闭 1 到 5 秒。经过一个小时的测试后,他们关闭了 20 秒。我觉得必须有某种简单的解决方案来保持两个输出同步。我们已尝试对会话和输出使用相同的设备,但我们遇到了同样的问题。我们尝试将 fps 强制降低到 15,但仍然没有成功。
设置输出
func assignDeviceToPreview(captureSession: AVCaptureSession, device: AVCaptureDevice, previewView: NSView, index: Int){
captureSession.stopRunning()
captureSession.beginConfiguration()
//clearing out old inputs
for input in captureSession.inputs {
let i = input as! AVCaptureInput
captureSession.removeInput(i)
}
let output = self.outputs[index]
output.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//removing old outputs
for o in captureSession.outputs{
if let oc = o as? AVCaptureStillImageOutput{
captureSession.removeOutput(oc)
print("removed image out")
}
}
//Adding input
do {
try captureSession.addInput(AVCaptureDeviceInput(device:device))
let camViewLayer = previewView.layer!
camViewLayer.backgroundColor = CGColorGetConstantColor(kCGColorBlack)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camViewLayer.bounds
previewLayer.autoresizingMask = [.LayerWidthSizable, .LayerHeightSizable]
camViewLayer.addSublayer(previewLayer)
let overlayPreview = overlayPreviews[index]
overlayPreview.frame.origin = CGPoint.zero
previewView.addSubview(overlayPreview)
//adding output
captureSession.addOutput(output)
if captureSession == session2{
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do {
let input = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(input)
}
}
} catch {
print("Failed to add webcam as AV input")
}
captureSession.commitConfiguration()
captureSession.startRunning()
}
开始录制
func startRecording(){
startRecordingTimer()
let base = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let appFolder = "Sessions"
let sessionFolder = "session_" + session.UUID
let path = base+"/"+appFolder+"/"+sessionFolder
do{
try NSFileManager.defaultManager().createDirectoryAtPath(path, withIntermediateDirectories: true, attributes: nil)
}catch{
print("issue creating folder")
}
for fileOutput in fileOutputs{
let fileName = "cam\(String(fileOutputs.indexOf(fileOutput)!))" + ".mov"
let fileURL = NSURL.fileURLWithPathComponents([path, fileName])
fileURLs.append(fileURL!)
print(fileURL?.absoluteString)
var captureConnection = fileOutput.connections.first as? AVCaptureConnection
captureConnection!.videoMinFrameDuration = CMTimeMake(1, 15)
captureConnection!.videoMaxFrameDuration = CMTimeMake(1, 15)
if fileOutput == movieFileOutput1{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResize, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720], forConnection: captureConnection)
}else{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResizeAspect, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 640, AVVideoHeightKey: 360], forConnection: captureConnection)
}
captureConnection = fileOutput.connections.first as? AVCaptureConnection
print(fileOutput.outputSettingsForConnection(captureConnection))
fileOutput.startRecordingToOutputFileURL(fileURL, recordingDelegate: self)
print("start recording")
}
}
为了精确的时间控制,我认为你需要考虑使用较低级别的 AVAssetWriter 框架。这允许您控制单个帧的写入和时间。
使用 AVAssetWriter.startSession(atSourceTime: CMTime) 您可以精确控制每台摄像机何时开始录制。
在写入过程中,使用AVCaptureVideoDataOutputSampleBufferDelegate,可以进一步操作生成的CMSampleBuffer,调整其时序信息,进一步保持两个视频的同步。查看 https://developer.apple.com/reference/coremedia/1669345-cmsamplebuffer 以获取有关调整 CMSampleBuffer 计时部分的参考。
就是说,我从来没有尝试过这个,也不确定这是否会奏效,但我相信如果你沿着这条路走下去,你会接近你想要实现的目标。
我有两个摄像头输入进入 OSX 应用程序,我正在尝试使用 AVCaptureMovieFileOutput 保存它们。没过多久,他们的视频就不同步了。经过最小测试后,它们可以关闭 1 到 5 秒。经过一个小时的测试后,他们关闭了 20 秒。我觉得必须有某种简单的解决方案来保持两个输出同步。我们已尝试对会话和输出使用相同的设备,但我们遇到了同样的问题。我们尝试将 fps 强制降低到 15,但仍然没有成功。
设置输出
func assignDeviceToPreview(captureSession: AVCaptureSession, device: AVCaptureDevice, previewView: NSView, index: Int){
captureSession.stopRunning()
captureSession.beginConfiguration()
//clearing out old inputs
for input in captureSession.inputs {
let i = input as! AVCaptureInput
captureSession.removeInput(i)
}
let output = self.outputs[index]
output.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//removing old outputs
for o in captureSession.outputs{
if let oc = o as? AVCaptureStillImageOutput{
captureSession.removeOutput(oc)
print("removed image out")
}
}
//Adding input
do {
try captureSession.addInput(AVCaptureDeviceInput(device:device))
let camViewLayer = previewView.layer!
camViewLayer.backgroundColor = CGColorGetConstantColor(kCGColorBlack)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camViewLayer.bounds
previewLayer.autoresizingMask = [.LayerWidthSizable, .LayerHeightSizable]
camViewLayer.addSublayer(previewLayer)
let overlayPreview = overlayPreviews[index]
overlayPreview.frame.origin = CGPoint.zero
previewView.addSubview(overlayPreview)
//adding output
captureSession.addOutput(output)
if captureSession == session2{
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do {
let input = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(input)
}
}
} catch {
print("Failed to add webcam as AV input")
}
captureSession.commitConfiguration()
captureSession.startRunning()
}
开始录制
func startRecording(){
startRecordingTimer()
let base = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let appFolder = "Sessions"
let sessionFolder = "session_" + session.UUID
let path = base+"/"+appFolder+"/"+sessionFolder
do{
try NSFileManager.defaultManager().createDirectoryAtPath(path, withIntermediateDirectories: true, attributes: nil)
}catch{
print("issue creating folder")
}
for fileOutput in fileOutputs{
let fileName = "cam\(String(fileOutputs.indexOf(fileOutput)!))" + ".mov"
let fileURL = NSURL.fileURLWithPathComponents([path, fileName])
fileURLs.append(fileURL!)
print(fileURL?.absoluteString)
var captureConnection = fileOutput.connections.first as? AVCaptureConnection
captureConnection!.videoMinFrameDuration = CMTimeMake(1, 15)
captureConnection!.videoMaxFrameDuration = CMTimeMake(1, 15)
if fileOutput == movieFileOutput1{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResize, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720], forConnection: captureConnection)
}else{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResizeAspect, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 640, AVVideoHeightKey: 360], forConnection: captureConnection)
}
captureConnection = fileOutput.connections.first as? AVCaptureConnection
print(fileOutput.outputSettingsForConnection(captureConnection))
fileOutput.startRecordingToOutputFileURL(fileURL, recordingDelegate: self)
print("start recording")
}
}
为了精确的时间控制,我认为你需要考虑使用较低级别的 AVAssetWriter 框架。这允许您控制单个帧的写入和时间。
使用 AVAssetWriter.startSession(atSourceTime: CMTime) 您可以精确控制每台摄像机何时开始录制。
在写入过程中,使用AVCaptureVideoDataOutputSampleBufferDelegate,可以进一步操作生成的CMSampleBuffer,调整其时序信息,进一步保持两个视频的同步。查看 https://developer.apple.com/reference/coremedia/1669345-cmsamplebuffer 以获取有关调整 CMSampleBuffer 计时部分的参考。
就是说,我从来没有尝试过这个,也不确定这是否会奏效,但我相信如果你沿着这条路走下去,你会接近你想要实现的目标。