我使用下面的函数通过使用 avaudiomixer 和 avaudioengine 来改变音频文件的效果。问题是我需要听完整的音频来保存新的音频。该功能帮助我保存扩展名为 .caf 的文件,我也希望它位于 .m4a 中。我也尝试替换新音频文件的 kAudioFormatLinearPCM 和 .caf 扩展名,但是文档中保存的音频文件没有播放。
函数中使用的参数是:
var audioEngine = AVAudioEngine()
var audioPlayerNode = AVAudioPlayerNode()
var reverb = AVAudioUnitReverb()
var audioFile = AVAudioFile()
var format = AVAudioFormat()
var audioMixer = AVAudioMixerNode()
var player = AVAudioPlayer()
var newAudio = AVAudioFile()
以下功能将效果添加到音频文件。
private func playAudio(pitch : Float, rate: Float, reverb: Float, echo: Float) {
do{
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "audio_File_Name", ofType: "mp3")!)
audioFile = try AVAudioFile(forReading: url)
}catch{
print("error")
}
// Initialize variables
audioEngine.attach(audioPlayerNode)
// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.attach(pitchEffect)
// Setting the playback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.rate = rate
audioEngine.attach(playbackRateEffect)
// Setting the reverb effect
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.attach(reverbEffect)
// Setting the echo effect on a specific interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = TimeInterval(echo)
audioEngine.attach(echoEffect)
// Set up a mixer node
audioEngine.attach(audioMixer)
// Chain all these up, ending with the output
audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: audioFile.processingFormat)
audioEngine.connect(playbackRateEffect, to: pitchEffect, format: audioFile.processingFormat)
audioEngine.connect(pitchEffect, to: reverbEffect, format: audioFile.processingFormat)
audioEngine.connect(reverbEffect, to: echoEffect, format: audioFile.processingFormat)
audioEngine.connect(echoEffect, to: audioMixer, format: audioFile.processingFormat)
audioEngine.connect(audioMixer, to: audioEngine.mainMixerNode , format: audioFile.processingFormat)
audioPlayerNode.scheduleFile(audioFile, at: nil)
try! audioEngine.start()
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let tmpFileUrl = URL(fileURLWithPath: dirPaths.appending("/effectedSound\(Int(Date().timeIntervalSince1970)).caf"))
print(tmpFileUrl)
do {
var settings: [String : Any] = [:]
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVAudioFileTypeKey] = kAudioFileCAFType
settings[AVSampleRateKey] = audioFile.fileFormat.sampleRate //buffer.format.sampleRate
settings[AVNumberOfChannelsKey] = 2
settings[AVLinearPCMIsFloatKey] = (audioFile.fileFormat.commonFormat == .pcmFormatInt32)
newAudio = try AVAudioFile(forWriting: tmpFileUrl, settings: settings)
} catch {
print("Unable to open output audio file: \(error).")
}
audioMixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount(audioMixer.outputFormat(forBus: 0).sampleRate), format: self.audioMixer.outputFormat(forBus: 0)) { buffer, when in
do {
if self.audioFile.length > self.newAudio.length{
try self.newAudio.write(from: buffer)
}
} catch let error {
print(error)
}
}
audioPlayerNode.play()
}
我尝试使用手动渲染模式。该应用程序可能在“尝试 outputFile.write(from: buffer)”时崩溃
var sourceFile = AVAudioFile()
var format = AVAudioFormat()
var engine = AVAudioEngine()
var player = AVAudioPlayerNode()
var reverb = AVAudioUnitReverb()
var outputFile = AVAudioFile()
override func viewDidLoad() {
super.viewDidLoad()
play()
}
func play(){
do {
let sourceFileURL = Bundle.main.url(forResource: "Audio_File_Name", withExtension: "mp3")!
sourceFile = try AVAudioFile(forReading: sourceFileURL)
format = sourceFile.processingFormat
} catch {
print("Unable to load the source audio file: \(error.localizedDescription).")
}
engine.attach(player)
engine.attach(reverb)
// Set the desired reverb parameters.
reverb.loadFactoryPreset(.mediumHall)
reverb.wetDryMix = 50
// Connect the nodes.
engine.connect(player, to: reverb, format: format)
engine.connect(reverb, to: engine.mainMixerNode, format: format)
// Schedule the source file.
player.scheduleFile(sourceFile, at: nil)
do {
// The maximum number of frames the engine renders in any single render call.
let maxFrames: AVAudioFrameCount = 4096
try engine.enableManualRenderingMode(.offline, format: format,
maximumFrameCount: maxFrames)
} catch {
print("Enabling manual rendering mode failed.")
}
do {
try engine.start()
player.play()
} catch {
fatalError("Unable to start audio engine: \(error).")
}
let buffer = AVAudioPCMBuffer(pcmFormat: engine.manualRenderingFormat,
frameCapacity: engine.manualRenderingMaximumFrameCount)!
do {
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let outputURL = documentsURL.appendingPathComponent("Rhythm-processed.caf")
outputFile = try AVAudioFile(forWriting: outputURL, settings: sourceFile.fileFormat.settings)
} catch {
print("Unable to open output audio file")
}
while engine.manualRenderingSampleTime < sourceFile.length {
do {
let frameCount = sourceFile.length - engine.manualRenderingSampleTime
let framesToRender = min(AVAudioFrameCount(frameCount), buffer.frameCapacity)
let status = try engine.renderOffline(framesToRender, to: buffer)
switch status {
case .success:
// The data rendered successfully. Write it to the output file.
try outputFile.write(from: buffer)
case .insufficientDataFromInputNode:
// Applicable only when using the input node as one of the sources.
break
case .cannotDoInCurrentContext:
// The engine couldn't render in the current render call.
// Retry in the next iteration.
break
case .error:
// An error occurred while rendering the audio.
fatalError("The manual rendering failed.")
}
} catch {
fatalError("The manual rendering failed: \(error).")
}
}
// Stop the player node and engine.
player.stop()
engine.stop()
}
}