使用 AVFoundation 保存视频 Swift

Save Video Using AVFoundation Swift

您好,我学习了 Jared Davidson 的课程,使用 AVFoundation 创建自定义相机视图并保存图片。 https://www.youtube.com/watch?v=w0O3ZGUS3pk

但是我想录制和保存视频而不是图像。有人可以帮我吗?我敢肯定它很简单,但是 apple 的文档是用 Objective-C 写的,我无法破译它。

这是我的代码。谢谢

import UIKit
import AVFoundation

class ViewController: UIViewController {

    var captureSession = AVCaptureSession()
    var sessionOutput = AVCaptureStillImageOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()


    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(animated: Bool) {

        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if device.position == AVCaptureDevicePosition.Front{


                do{

                    let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                    if captureSession.canAddInput(input){

                        captureSession.addInput(input)
                        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                        if captureSession.canAddOutput(sessionOutput){

                            captureSession.addOutput(sessionOutput)
                            captureSession.startRunning()

                            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                            previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
                            cameraView.layer.addSublayer(previewLayer)

                            previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                            previewLayer.bounds = cameraView.frame


                        }

                    }

                }
                catch{

                    print("Error")
                }

            }
        }    

    }


    @IBAction func TakePhoto(sender: AnyObject) {

        if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){

            sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
                buffer, error in

                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
                UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)

            })

        }

    }

}

您可以通过创建 AVCaptureMovieFileOutput 并将其添加到您的捕获会话,并使您的 ViewController 符合 AVCaptureFileOutputRecordingDelegate.

来将您的视频保存到文件中

此示例将 5 秒的视频录制到应用的文档目录中名为 "output.mov" 的文件中。

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    var captureSession = AVCaptureSession()
    var sessionOutput = AVCaptureStillImageOutput()
    var movieOutput = AVCaptureMovieFileOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()

    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(animated: Bool) {
        self.cameraView = self.view

        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if device.position == AVCaptureDevicePosition.Front{


                do{

                    let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                    if captureSession.canAddInput(input){

                        captureSession.addInput(input)
                        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                        if captureSession.canAddOutput(sessionOutput){

                            captureSession.addOutput(sessionOutput)

                            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                            previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
                            cameraView.layer.addSublayer(previewLayer)

                            previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                            previewLayer.bounds = cameraView.frame


                        }

                        captureSession.addOutput(movieOutput)

                        captureSession.startRunning()

                        let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
                        let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
                        try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
                        movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

                        let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
                        dispatch_after(delayTime, dispatch_get_main_queue()) {
                            print("stopping")
                            self.movieOutput.stopRecording()
                        }
                    }

                }
                catch{

                    print("Error")
                }

            }
        }

    }

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
        print("FINISHED \(error)")
        // save video to camera roll
        if error == nil {
            UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
        }
    }

}

谢谢你。这对我很有帮助。这是移植到 Swift 3 的 Rhythmic Fistman 答案的一个版本,其中包含所需的导入语句和委托方法。

import UIKit
import AVFoundation

class ViewController: UIViewController,
AVCaptureFileOutputRecordingDelegate {

var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()

@IBOutlet var cameraView: UIView!

override func viewWillAppear(_ animated: Bool) {
    self.cameraView = self.view

    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
    for device in devices! {
        if (device as AnyObject).position == AVCaptureDevicePosition.front{


            do{

                let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                if captureSession.canAddInput(input){

                    captureSession.addInput(input)
                    sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                    if captureSession.canAddOutput(sessionOutput){

                        captureSession.addOutput(sessionOutput)

                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                        previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
                        cameraView.layer.addSublayer(previewLayer)

                        previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                        previewLayer.bounds = cameraView.frame


                    }

                    captureSession.addOutput(movieOutput)

                    captureSession.startRunning()

                    let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
                    let fileUrl = paths[0].appendingPathComponent("output.mov")
                    try? FileManager.default.removeItem(at: fileUrl)
                    movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self)

                    let delayTime = DispatchTime.now() + 5
                    DispatchQueue.main.asyncAfter(deadline: delayTime) {
                        print("stopping")
                        self.movieOutput.stopRecording()
                    }
                }

            }
            catch{

                print("Error")
            }

        }
    }

}


//MARK: AVCaptureFileOutputRecordingDelegate Methods

func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
    print("FINISHED \(error)")
    // save video to camera roll
    if error == nil {
        UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
    }
}

}

if (device as AnyObject).position == AVCaptureDevicePosition.front{

之后

添加

// Audio Input
                let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

                do
                {
                    let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)

                    // Add Audio Input
                    if captureSession.canAddInput(audioInput)
                    {
                        captureSession.addInput(audioInput)
                    }
                    else
                    {
                        NSLog("Can't Add Audio Input")
                    }
                }
                catch let error
                {
                    NSLog("Error Getting Input Device: \(error)")
                }

谢谢

关于录音问题,

创建captureSession时添加这段代码

askMicroPhonePermission(完成:{(isMicrophonePermissionGiven)在

            if isMicrophonePermissionGiven {
                do {
                    try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio))
                } catch {
                    print("Error creating the database")
                }
            }
        })

////////////////////////////////////////// ///////////////////

askMicroPhonePermission函数如下

func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) {
    switch AVAudioSession.sharedInstance().recordPermission() {
    case AVAudioSessionRecordPermission.granted:
        completion(true)
    case AVAudioSessionRecordPermission.denied:
        completion(false) //show alert if required
    case AVAudioSessionRecordPermission.undetermined:
        AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
            if granted {
                completion(true)
            } else {
                completion(false) // show alert if required
            }
        })
    default:
        completion(false)
    }
}

并且您必须在 info.plist 文件中添加 NSMicrophoneUsageDescription 键值。

您可以使用此代码将您的视频保存在照片库中,您必须提供以下参数,其中最重要的是 OutputURL.path,这是您要保存到的电影文件的文件系统路径相机胶卷相册,对于其余参数,您可以将相应的值传递给那里,也可以根据需要分配 nil

func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

if (error != nil) {
        print("Error recording movie: \(error!.localizedDescription)")
    } else {


        UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, #selector(CameraController.video(_:didFinishSavingWithError:contextInfo:)), nil)

    }
    outputURL = nil

}
func getCurrentFrame(url:String) -> UIImage? {
   let asset = AVAsset(url: URL(string: url)!)
    let assetImgGenerate = AVAssetImageGenerator(asset: asset)
    assetImgGenerate.appliesPreferredTrackTransform = true
    //Can set this to improve performance if target size is known before hand
    //assetImgGenerate.maximumSize = CGSize(width,height)
    let time = CMTimeMakeWithSeconds(1.0, preferredTimescale: 600)
    do {
        let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
        let thumbnail = UIImage(cgImage: img)
        return thumbnail
    } catch {
      print(error.localizedDescription)
      return nil
    }
}

对于没有打电话给代表的人来说只是一个小提示:(我在 Xcode 12.x / iOS 14.5)

假设你有一个 f。打开本地目标。 URL(上一示例中的代码...文档目录...)

如果你这样做:

DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    self.captureSession.startRunning()
                    self.addDest()    
                }

有效,

但如果你交换:

DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    **self.addDest()**
                    self.captureSession.startRunning()   

                }

代理未被调用。

设置输出后调用“startRunning”:

if captureSession.canAddInput(input!) && captureSession.canAddOutput(stillImageOutput) {
            captureSession.addInput(input!)
            captureSession.addOutput(stillImageOutput)
            setupLivePreview()
            self.videoPreviewLayer.frame = self.previewView.bounds
            
            if addVideoOutput(){
                DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
                    self.captureSession.startRunning()
                    
                    self.addDest()

                }
                
            }

...

哪里

 func addVideoOutput() -> Bool{
        movieOutput = AVCaptureMovieFileOutput()
        if captureSession.canAddOutput(movieOutput){
            captureSession.addOutput(movieOutput)
            return true
        }
        return false
    }