使用AVAudioEngine播放WAV数据

问题描述 投票:1回答:1

目前,我在音频线程上遇到EXC_BAD_ACCESS错误,我正在尝试推断出现了什么问题。

.wav文件数据从Data转换为AVAudioPCMBuffer时,是否需要先删除RIFF标题?

import AVFoundation

public class Player : NSObject {
  let engine = AVAudioEngine()

  public override init() {
    super.init()
    do {
      let _ = engine.mainMixerNode
      try engine.start()
    } catch {
      print("Player error: \(error)")
    }
  }

  @objc public func play(_ data: Data) {
    let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 2, interleaved: true)!
    let buffer = data.toPCMBuffer(format: format)!

    let player = AVAudioPlayerNode()
    engine.attach(player)
    engine.connect(player, to: engine.mainMixerNode, format: nil)

    player.scheduleBuffer(buffer, at: nil, completionCallbackType: .dataPlayedBack) {
      callbackType in
      // Nothing in here.
    }
    player.play()
  }
}

这是toPCMBuffer扩展:

// Taken from: https://stackoverflow.com/a/52731480/2228559
extension Data {
  func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
    let streamDesc = format.streamDescription.pointee
    let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
    guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }

    buffer.frameLength = buffer.frameCapacity
    let audioBuffer = buffer.audioBufferList.pointee.mBuffers

    withUnsafeBytes { addr in
      audioBuffer.mData?.copyMemory(from: addr, byteCount: Int(audioBuffer.mDataByteSize))
    }

    return buffer
  }
}

注意:我不能使用AVAudioFile,因为.wav文件数据是通过线上加载的。

objective-c swift macos avfoundation core-audio
1个回答
0
投票

IDK,但如果我播放交错的AVAudioPCMBuffers,我的mac会崩溃,如果它们不是浮动数据则会出现乱码,因此您可以转换为非交错的浮点数据:

@objc public func play(_ data: Data) {
    let sampleRate: Double = 48000

    let interleavedFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 2, interleaved: true)!
    let interleavedBuffer = data.toPCMBuffer(format: interleavedFormat)!

    let nonInterleavedFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: sampleRate, channels: 2, interleaved: false)!
    let nonInterleavedBuffer = AVAudioPCMBuffer(pcmFormat: nonInterleavedFormat, frameCapacity: interleavedBuffer.frameCapacity)!
    nonInterleavedBuffer.frameLength = interleavedBuffer.frameLength

    let converter = AVAudioConverter(from: interleavedFormat, to: nonInterleavedFormat)!
    try! converter.convert(to: nonInterleavedBuffer, from: interleavedBuffer)

    let player = AVAudioPlayerNode()

    engine.attach(player)
    engine.connect(player, to: engine.mainMixerNode, format: nil)

    player.scheduleBuffer(nonInterleavedBuffer, at: nil, completionCallbackType: .dataPlayedBack) {
        callbackType in
        // Nothing in here.
    }

    player.play()
}

extension Data {
    func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
        assert(format.isInterleaved)

        let streamDesc = format.streamDescription.pointee
        let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
        guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }

        buffer.frameLength = buffer.frameCapacity

        let b = UnsafeMutableBufferPointer(start: buffer.int16ChannelData![0], count: buffer.stride * Int(frameCapacity))
        let bytesCopied = self.copyBytes(to: b)
        assert(bytesCopied == count)

        return buffer
    }
}
© www.soinside.com 2019 - 2024. All rights reserved.