//Sending Audio
func sendAudio {
audioEngine = AVAudioEngine()
//var socket: GCDAsyncUdpSocket!
let inputNode = audioEngine.inputNode
var error: NSError?
let bus = 0
DispatchQueue.global(qos: .background).async { [weak self] in
guard let self = self else { return }
do {
self.socket.setIPv4Enabled(true)
self.socket.setIPv6Enabled(false)
try self.socket.connect(toHost:"239.10.10.100" ?? "", onPort: 4545 ?? 0)
try self.socket.beginReceiving()
print("Socket started")
} catch {
print("Socket Started Error: \(error)")
}
}
inputNode.installTap(onBus: bus, bufferSize: 2048, format: inputNode.inputFormat(forBus: bus)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
DispatchQueue.global(qos: .background).async { [weak self] in
guard let self = self else { return }
do {
let data = self.toNSData(PCMBuffer: buffer)
print(data)
self.socket.send(data, withTimeout: 0, tag: 0)
} catch {
print("Socket send Error: \(error)")
}
}
}
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
print("Can't start the engine: \(error)")
}
}
func toNSData(PCMBuffer: AVAudioPCMBuffer) -> Data {
let channelCount = 1 // given PCMBuffer channel count is 1
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: channelCount)
let ch0Data = NSData(bytes: channels[0], length:Int(PCMBuffer.frameCapacity * PCMBuffer.format.streamDescription.pointee.mBytesPerFrame) / 2)
return ch0Data as Data
}
// 收听音频
func udpSocket(_ sock: GCDAsyncUdpSocket, didReceive data: Data, fromAddress address: Data, withFilterContext filterContext: Any?) {
audioPlayer.scheduleBuffer(getComingAudio(with: data), completionHandler: nil)
}
func getComingAudio(with data: Data) -> AVAudioPCMBuffer {
let audioBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.count) / 2)!
data.withUnsafeBytes { (bufferPointer: UnsafeRawBufferPointer) in
let int16Array = Array(bufferPointer.bindMemory(to: Int16.self))
let floatArray = int16Array.map { Float($0) / Float(Int16.max) }
floatArray.withUnsafeBufferPointer { audioBuffer.floatChannelData!.pointee.assign(from: $0.baseAddress!, count: floatArray.count) }
}
audioBuffer.frameLength = audioBuffer.frameCapacity
return audioBuffer
}
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: channelCount)
但是在接收端,你将其解释为 Int16 数据:
let int16Array = Array(bufferPointer.bindMemory(to: Int16.self))
这至少是你问题的一部分。您还需要更广泛地确保两种格式相同。特别是具有不同的采样率并不罕见。但“静态”声音很可能是由于您的 float/int16 转换所致。我会更简单地开始您的项目,并将数据写入内存,然后在同一设备上播放。这将确保您的基本逻辑有效。