在带有音频单元的 iOS 中同时录制和添加音频效果

问题描述 投票:0回答:0

我在以下上下文中从 AudioUnitRender 收到错误 -1 oostatus。

我的项目唯一的主要区别是我还使用远程 I/O 单元进行音频输出。音频输出工作正常。这是我的输入回调和我的初始化代码(为简洁起见删除了错误检查)。

我有一个 AudioUnit,对应的回调正常工作,但是现在,我需要将它发送到 RemoteIO,因为我正在实现一些需要 RemoteIO AudioUnit 才能工作的框架。 我有两个音频单元。

然后...我需要与此 audiounit Reverb2(或混音器)获得相同的输出,但使用类型为 kAudioUnitSubType_RemoteIO 的另一个 audiounit。

我在没有 AU Graph 或 AVEngine 的情况下将音频单元连接在一起。

我想在带有音频单元的 iOS 中同时录制和添加音频效果。

AudioUnitRender() 函数出错

throwing -10877 ("kAudioUnitErr_InvalidElement")

from AU (0x7fb6de70af30): aufx/rvb2/appl, render err: -1

初始化:

var desc = AudioComponentDescription()
        desc.componentType = kAudioUnitType_Output
        desc.componentSubType = kAudioUnitSubType_RemoteIO
        desc.componentFlags = 0
        desc.componentFlagsMask = 0
        desc.componentManufacturer = kAudioUnitManufacturer_Apple
        guard let inputComponent = AudioComponentFindNext(nil, &desc) else {
            return
        }
        var maybeAudioUnit: AudioUnit? = nil
        AudioComponentInstanceNew(inputComponent, &maybeAudioUnit)
        
        guard let audioUnit = maybeAudioUnit else {
            return
        }
        
        var one: UInt32 = 1
        guard AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, 4) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }

        var audioStreamDescription = audioRecorderNativeStreamDescription(sampleRate: 48000)
        guard AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &audioStreamDescription, UInt32(MemoryLayout<AudioStreamBasicDescription>.size)) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }
        
        guard AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &audioStreamDescription, UInt32(MemoryLayout<AudioStreamBasicDescription>.size)) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }
        
        var callbackStruct = AURenderCallbackStruct()
        callbackStruct.inputProc = rendererInputProc
        callbackStruct.inputProcRefCon = UnsafeMutableRawPointer(bitPattern: intptr_t(self.id))
        guard AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 0, &callbackStruct, UInt32(MemoryLayout<AURenderCallbackStruct>.size)) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }
        
        var zero: UInt32 = 1
        guard AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, 0, &zero, 4) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }
        
        guard AudioUnitInitialize(audioUnit) == noErr else {
            AudioComponentInstanceDispose(audioUnit)
            return
        }
        
        
        var reverbcd = AudioComponentDescription()
        reverbcd.componentType = kAudioUnitType_Effect
        reverbcd.componentSubType = kAudioUnitSubType_Reverb2
        reverbcd.componentManufacturer = kAudioUnitManufacturer_Apple
        
        guard let inputComponent2 = AudioComponentFindNext(nil, &reverbcd) else {
            return
        }
        var maybeAudioUnit2: AudioUnit? = nil
        AudioComponentInstanceNew(inputComponent2, &maybeAudioUnit2)
        
        guard let audioUnitEffect = maybeAudioUnit2 else {
            return
        }

        let err = AudioUnitSetParameter(audioUnitEffect, kReverb2Param_Gain, kAudioUnitScope_Global, 0, 20, 0);
        guard err == noErr else {
            print(err)
            AudioComponentInstanceDispose(audioUnitEffect)
            return
        }
        
        let err1 = AudioUnitInitialize(audioUnitEffect)
        guard err1 == noErr else {
            print(err1)
            AudioComponentInstanceDispose(audioUnitEffect)
            return
        }
        
        var connection = AudioUnitConnection()
        connection.destInputNumber = 0
        connection.sourceAudioUnit = audioUnitEffect
        connection.sourceOutputNumber = 0
        let osstatus = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, 0, &connection, UInt32(MemoryLayout<AudioUnitConnection>.size))
        guard  osstatus == noErr else {
            print(osstatus)
            AudioComponentInstanceDispose(audioUnitEffect)
            return
        }

并使用 AudioOutputUnitStart(audioUnit) 开始重新编码 这是回调方法。

输入回调:

private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
    let id = Int32(intptr_t(bitPattern: refCon))
    
    withAudioUnitHolder(id, { (holder, queue, holderEffect) in
        var buffer = AudioBuffer()
        buffer.mNumberChannels = 1;
        buffer.mDataByteSize = inNumberFrames * 2;
        buffer.mData = malloc(Int(inNumberFrames) * 2)
        
        var bufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: buffer)
        
        var status = noErr
        holder.with { audioUnit in
            if let audioUnit = audioUnit {
                status = AudioUnitRender(audioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList)
            } else {
                status = kAudioUnitErr_FailedInitialization
            }
        }
        var status2 = noErr
        holderEffect.with { audioUnitEffect in
            if let audioUnitEffect = audioUnitEffect {
                status2 = AudioUnitRender(audioUnitEffect, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList)
            } else {
                status2 = kAudioUnitErr_FailedInitialization
            }
        }
        print(status2)
        if status == noErr {
            queue.async {
                withAudioRecorderContext(id, { context in
                    if let context = context {
                        context.processAndDisposeAudioBuffer(buffer)
                    } else {
                        free(buffer.mData)
                    }
                })
            }
        } else {
            free(buffer.mData)
            Logger.shared.log("ManagedAudioRecorder", "AudioUnitRender returned \(status)")
        }
    })
    
    return noErr
}

原始源代码

有人可以指导我如何将 RemoteIO (kAudioUnitType_Output) 单元与效果器 (kAudioUnitType_Effect) 连接起来,以播放具有效果的音频文件。

ios iphone core-audio audiounit
© www.soinside.com 2019 - 2024. All rights reserved.