AVCaptureFileOutputRecordingDelegate 不写入文件,Swift 2

AVCaptureFileOutputRecordingDelegate not writing to file, Swift 2

我的视图控制器包含一个预览层,它可以实时投影来自我的相机的图像。按住按钮时,我的代码应该录制视频,并将其写入本地临时文件。这适用于 Swift 1.2 和 Xcode 6,但在更新到 Xcode 7 时将代码转换为 Swift 2 后停止工作。

当我松开按钮时,不会调用 captureOutput,并且没有文件写入给定路径。

部分相关代码如下。

如有任何帮助,我将不胜感激!

import UIKit
import MobileCoreServices
import AVFoundation
import AVKit




class ViewControllerPhoto: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIPickerViewDelegate, UIGestureRecognizerDelegate, ACEDrawingViewDelegate, UITextViewDelegate, AVCaptureFileOutputRecordingDelegate, UITableViewDelegate, UITableViewDataSource {

    @IBOutlet weak var captureButton: UIButton!

    var videoCheck: Bool = false

    let captureSession = AVCaptureSession()
    var previewLayer : AVCaptureVideoPreviewLayer?
    var captureDevice : AVCaptureDevice?

    var movieFileOutput = AVCaptureMovieFileOutput()
    var imageData: NSData!
    var outputPath: NSString!
    var outputURL: NSURL!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.


        if captureSession.canSetSessionPreset(AVCaptureSessionPresetMedium) {

        captureSession.sessionPreset = AVCaptureSessionPresetMedium

        }

        let devices = AVCaptureDevice.devices()

        // Loop through all the capture devices on this phone
        for device in devices {
            // Make sure this particular device supports video
            if (device.hasMediaType(AVMediaTypeVideo)) {
                // Finally check the position and confirm we've got the back camera
                if(device.position == AVCaptureDevicePosition.Back) {
                    captureDevice = device as? AVCaptureDevice
                    if captureDevice != nil {
                        print("Capture device found")


                        beginSession() 
                    }
                }
            }

        }

        self.videoCheck = false

    }

    func beginSession() {

        stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
        if captureSession.canAddOutput(stillImageOutput) {
            captureSession.addOutput(stillImageOutput)
        }
        configureDevice()

        var err : NSError? = nil
       // captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
        do{
        try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
        }
        catch{
            print("error: \(err?.localizedDescription)")
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        self.view.layer.addSublayer(previewLayer!)
        previewLayer?.frame = self.view.layer.frame

        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

        if captureSession.canAddOutput(movieFileOutput) {

            self.captureSession.addOutput(movieFileOutput)

        }

        // SET CONNECTION PROPERTIES

        var captureConnection: AVCaptureConnection = movieFileOutput.connectionWithMediaType(AVMediaTypeVideo)
        if captureConnection.supportsVideoOrientation {

        captureConnection.videoOrientation = AVCaptureVideoOrientation.Portrait

        }

        var audioDevice: AVCaptureDevice = AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)[0] as! AVCaptureDevice
        do{

        let audioDeviceInput: AVCaptureDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
            if captureSession.canAddInput(audioDeviceInput) {

                captureSession.addInput(audioDeviceInput)

            }
        }
        catch {
            print("error")
        }

        captureSession.startRunning()
    }



    func captureVideo() {
        outputPath = (NSURL(fileURLWithPath: NSTemporaryDirectory())).URLByAppendingPathComponent("movie.mov").absoluteString as NSString

outputURL = NSURL(fileURLWithPath: outputPath as String)

        let fileManager: NSFileManager = NSFileManager.defaultManager()

        if outputURL.path != nil{

            if fileManager.fileExistsAtPath(outputURL.path!) {

                do{
                    try fileManager.removeItemAtPath(outputPath as String)

                }
                catch{
                print(error)
                }

            }
        }
        self.movieFileOutput.startRecordingToOutputFileURL(outputURL, recordingDelegate: self)

    }


    func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice {
        let devices: NSArray = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if(device.position == position){
                return device as! AVCaptureDevice
            }
        }
        return AVCaptureDevice()
    }

    @IBAction func captureButtonIsLongPressed(sender: UILongPressGestureRecognizer) {

        if sender.state == UIGestureRecognizerState.Began {

            videoCheck = true
            captureVideo()

        }

        else if sender.state == UIGestureRecognizerState.Ended{

            self.movieFileOutput.stopRecording()

        }

    }

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
        print("Output")

        playVideo()

    }
    func playVideo() {

        let path = outputPath
        let url = outputURL

let player = AVPlayer(URL: url)
        let playerLayer = AVPlayerLayer(player: player)
        playerLayer.frame = self.view.bounds

        player.play()

    }

}

经过一番折腾后终于弄明白了。我的错误在于行

outputPath = (NSURL(fileURLWithPath: NSTemporaryDirectory())).URLByAppendingPathComponent("movie.mov").absoluteString as NSString

当然这是NSString,不是NSPath。更改为这些行解决了我的问题:

outputURL = NSURL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true).URLByAppendingPathComponent("movie.mov")
outputPath = outputURL.path

希望对大家有帮助!