AVAudioEngine 录制 AVAudioPlayerNode 中播放的声音

AVAudioEngine recording sounds played in AVAudioPlayerNode

我需要播放声音并能够从缓冲区中录制旋律。但是我不明白,如何设置 AVAudioSession 类别 and/or AVAudioPlayerNode 来实现我的目标。 声音在播放器节点中安排。如果我理解正确,AVAudioRecorder 仅从麦克风录制,而不是音乐,用 AVAudioPlayerNode 播放。所以,这是我的尝试: 首先,我设置了一个会话:

        NSError *error = nil;
        AVAudioSession *audioSession = [AVAudioSession sharedInstance];
        [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
                      withOptions:AVAudioSessionCategoryOptionMixWithOthers
                            error:&error];
        if (error) {
            NSLog(@"AVAudioSession error %ld, %@", error.code, error.localizedDescription);
        }

        [audioSession setActive:YES error:&error];
        if (error) {
            NSLog(@"AVAudioSession error %ld, %@", error.code, error.localizedDescription);
        }

建立记录文件:

    NSString* docs = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:@"Recording.caf"];
    NSURL* url = [NSURL fileURLWithPath:docs];

    NSError* error = nil;
    self.fileForRecording = [[AVAudioFile alloc] initForWriting:url
                                                       settings:[self.engine.inputNode inputFormatForBus:0].settings
                                                          error:&error];
    if (error) {
        NSLog(@"CREATE FILE ERROR %@", error);
    }

然后,一个引擎:

        self.engine = [AVAudioEngine new];
        self.player = [AVAudioPlayerNode new];

        AVAudioOutputNode *output = self.engine.outputNode;

        [self.engine attachNode:self.player];

        [self.engine connect:self.player to:output fromBus: 0 toBus: 0 format: format];
        [self.engine prepare];

以及记录方法:

    - (void)startRecording {
    AVAudioFormat* recordingFormat = [self.engine.outputNode outputFormatForBus:0];

    if (recordingFormat.sampleRate > 0) {
        typeof(self) weakSelf = self;
        [self.engine.inputNode installTapOnBus:0
                                    bufferSize:1024
                                        format:recordingFormat
                                         block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
                                             NSError* error;
                                             [weakSelf.fileForRecording writeFromBuffer:buffer error:&error];
                                             NSLog(@"WRITE ERROR %@", error);
                                         }];
    }
}

我在总线上安装 tap 时尝试使用 nil 作为记录格式,在这种情况下 block 从未调用过。我尝试使用 [self.engine.mainMixerNode outputFormatForBus:0];,这会导致崩溃。使用 self.engine.outputNode 也会产生崩溃。 请帮助:)

我在 Swift 上创建了一个空项目。我的引擎图看起来像这样。我有 2 个声音和 2 个播放器节点,每个声音一个。这些播放器已连接到引擎 mainMixerNode。当我想录制来自两个播放器的音乐时,我从 mainMixerNode 输出中获取缓冲区。这有效!

class ViewController: UIViewController {
    var engine = AVAudioEngine()
    var recordingFile: AVAudioFile?
    var audioPlayer: AVAudioPlayer?
    let playerSaw = AVAudioPlayerNode()
    let playerDk = AVAudioPlayerNode()
    var bufferSaw: AVAudioPCMBuffer?
    var bufferDk: AVAudioPCMBuffer?

    override func viewDidLoad() {
        super.viewDidLoad()

        let audioSession = AVAudioSession.sharedInstance()

        do {
            try audioSession.setCategory(
                AVAudioSessionCategoryPlayAndRecord)
        } catch let error as NSError {
            print("audioSession error: \(error.localizedDescription)")
        }

        self.bufferSaw = self.createBuffer(forFileNamed: "16_saw")
        self.bufferDk = self.createBuffer(forFileNamed: "23_dk")

        if self.bufferSaw != nil &&
            self.bufferDk != nil {
            self.engine.attach(self.playerSaw)
            self.engine.attach(self.playerDk)

            let mainMixerNode = self.engine.mainMixerNode
            self.engine.connect(self.playerSaw, to:mainMixerNode, format:self.bufferSaw!.format)
            self.engine.connect(self.playerDk, to:mainMixerNode, format:self.bufferDk!.format)
            self.engine.prepare()

            do {
                try self.engine.start()
            } catch (let error) {
                print("START FAILED", error)
            }
        }
    }

    @IBAction func record(sender: AnyObject) {
        self.createRecordingFile()

        self.engine.mainMixerNode.installTap(onBus: 0,
                                         bufferSize: 1024,
                                         format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
            do {
                try self.recordingFile?.write(from: buffer)
            } catch (let error) {
                print("RECORD ERROR", error);
            }
            return
        }
    }

    @IBAction func stop(sender: AnyObject) {
        self.engine.mainMixerNode.removeTap(onBus: 0)
    }

    fileprivate func startEngineIfNotRunning() {
        if (!self.engine.isRunning) {
            do {
                try self.engine.start()
            } catch (let error) {
                print("RESTART FAILED", error)
            }
        }
    }

    @IBAction func playSaw(sender: UIButton) {
        if let buffer = self.bufferSaw {
            self.startEngineIfNotRunning()

            sender.isSelected = !sender.isSelected
            if (sender.isSelected) {
                self.playerSaw.scheduleBuffer(buffer,
                                              at: nil,
                                              options: .loops,
                                              completionHandler: nil)
                self.playerSaw.play()
            } else {
                self.playerSaw.pause()
            }
        }
    }

    @IBAction func playDk(sender: UIButton) {
        if let buffer = self.bufferDk {
            self.startEngineIfNotRunning()

            sender.isSelected = !sender.isSelected
            if (sender.isSelected) {
                self.playerDk.scheduleBuffer(buffer,
                                             at: nil,
                                             options: .loops,
                                             completionHandler: nil)
                self.playerDk.play()
            } else {
                self.playerDk.pause()
            }
        }
    }

    @IBAction func playAudio(_ sender: AnyObject) {
        if let url = self.recordingFile?.url {
            do {
                self.audioPlayer = try AVAudioPlayer(contentsOf:
                    url)
                self.audioPlayer?.prepareToPlay()
                self.audioPlayer?.play()
            } catch let error as NSError {
                print("audioPlayer error: \(error.localizedDescription)")
            }
        }
    }

    fileprivate func createRecordingFile() {
        if let dir = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.allDomainsMask, true).first {
            var url = URL(fileURLWithPath: dir)
            url.appendPathComponent("my_file.caf")
            let format = self.engine.outputNode.inputFormat(forBus: 0)

            do {
                self.recordingFile = try AVAudioFile(forWriting: url, settings:format.settings)
            } catch (let error) {
                print("CREATE RECORDING FILE ERROR", error);
            }
        }
    }

    fileprivate func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
        var res: AVAudioPCMBuffer?

        if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
            do {
                let file = try AVAudioFile(forReading: fileURL)
                res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
                if let _ = res {
                    do {
                        try file.read(into: res!)
                    } catch (let error) {
                        print("ERROR read file", error)
                    }
                }
            } catch (let error) {
                print("ERROR file creation", error)
            }
        }
        return res
    }
}