AudioKit播放破解

问题描述 投票:0回答:1

我想分析麦克风的输入频率,然后在确定的频率附近播放正确的音符。我是用AudioKit做的。

现在这个方法是可行的,但是自从我使用了AudioKit来获得频率功能后,在播放过程中,频率检测后播放的声音有时会出现裂缝,这是在我使用AudioKit后发生的。这是在我使用了AudioKit之后发生的。在此之前一切都很好...

var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!

func initFrequencyTracker() {
        AKSettings.channelCount = 2
        AKSettings.audioInputEnabled = true
        AKSettings.defaultToSpeaker = true
        AKSettings.allowAirPlay = true
        AKSettings.useBluetooth = true
        AKSettings.allowHapticsAndSystemSoundsDuringRecording = true
        mic = AKMicrophone()
        tracker = AKFrequencyTracker(mic)
        silence = AKBooster(tracker, gain: 0)
    }

    func deinitFrequencyTracker() {
        AKSettings.audioInputEnabled = false
        plotTimer.invalidate()
        do {
            try AudioKit.stop()
            AudioKit.output = nil
        } catch {
            print(error)
        }
    }

    func initPlotTimer() {
        AudioKit.output = silence
        do {
            try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowAirPlay, .allowBluetoothA2DP])
            try AudioKit.start()
        } catch {
            AKLog("AudioKit did not start!")
        }
        setupPlot()
        plotTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updatePlotUI), userInfo: nil, repeats: true)
    }

    func setupPlot() {
        let plot = AKNodeOutputPlot(mic, frame: audioInputPlot.bounds)
        plot.translatesAutoresizingMaskIntoConstraints = false
        plot.alpha = 0.3
        plot.plotType = .rolling
        plot.shouldFill = true
        plot.shouldCenterYAxis = false
        plot.shouldMirror = true
        plot.color = UIColor(named: uiFarbe)
        audioInputPlot.addSubview(plot)

        // Pin the AKNodeOutputPlot to the audioInputPlot
        var constraints = [plot.leadingAnchor.constraint(equalTo: audioInputPlot.leadingAnchor)]
        constraints.append(plot.trailingAnchor.constraint(equalTo: audioInputPlot.trailingAnchor))
        constraints.append(plot.topAnchor.constraint(equalTo: audioInputPlot.topAnchor))
        constraints.append(plot.bottomAnchor.constraint(equalTo: audioInputPlot.bottomAnchor))
        constraints.forEach { $0.isActive = true }
    }

    @objc func updatePlotUI() {
        if tracker.amplitude > 0.3 {
            let trackerFrequency = Float(tracker.frequency)

            guard trackerFrequency < 7_000 else {
                // This is a bit of hack because of modern Macbooks giving super high frequencies
                return
            }



            var frequency = trackerFrequency
            while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
                frequency /= 2.0
            }
            while frequency < Float(noteFrequencies[0]) {
                frequency *= 2.0
            }

            var minDistance: Float = 10_000.0
            var index = 0

            for i in 0..<noteFrequencies.count {
                let distance = fabsf(Float(noteFrequencies[i]) - frequency)
                if distance < minDistance {
                    index = i
                    minDistance = distance
                }
                print(minDistance, distance)
            }
            //                let octave = Int(log2f(trackerFrequency / frequency))

            frequencyLabel.text = String(format: "%0.1f", tracker.frequency)

            if frequencyTranspose(note: notesToTanspose[index]) != droneLabel.text {
                momentaneNote = frequencyTranspose(note: notesToTanspose[index])
                droneLabel.text = momentaneNote
                stopSinglePlayer()
                DispatchQueue.main.asyncAfter(deadline: .now() + 0.03, execute: {
                    self.prepareSinglePlayerFirstForStart(note: self.momentaneNote)
                    self.startSinglePlayer()
                })
            }

        }
    }

    func frequencyTranspose(note: String) -> String {
        var indexNote = notesToTanspose.firstIndex(of: note)!
        let chosenInstrument = UserDefaults.standard.object(forKey: "whichInstrument") as! String
        if chosenInstrument == "Bb" {
            if indexNote + 2 >= notesToTanspose.count {
                indexNote -= 12
            }
            return notesToTanspose[indexNote + 2]
        } else if chosenInstrument == "Eb" {
            if indexNote - 3 < 0 {
                indexNote += 12
            }
            return notesToTanspose[indexNote - 3]
        } else {
            return note
        }
    }
audiokit
1个回答
0
投票

看来,你的实现可以通过将iOS的多线程原则付诸实践而稍加改进。现在,我不是这方面的专家,但如果我们研究一下这个声明: "频率检测后播放的声音在播放过程中有时会出现裂缝"

我想指出的是,"裂缝 "的 "频率 "是随机的,或者说是不可预测的,这种情况是在计算过程中发生的。

所以,把不需要在主线程中计算的代码移到后台线程中(https:/developer.apple.comdocumentationDISPATCH。)

enter image description here

在重构的同时,你可以通过增加对你的回调计算的调用频率来测试你的实现。Timer所以把这个值降低到0.05为例。这意味着如果你把频率提高到,比如说0.2,你可能会听到更少的随机噼啪声。

现在,当考虑到并发性时,这说起来容易做起来难,但这就是你需要改进的地方。

© www.soinside.com 2019 - 2024. All rights reserved.