我正在尝试使用AVCaptureDevice等进行音量级别编译和运行,但值似乎是随机的,我也一直遇到溢出错误。
编辑:
RMS范围也是0到20000左右是正常的吗?
if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){
try audioCaptureDevice.lockForConfiguration()
let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
audioCaptureDevice.unlockForConfiguration()
if(captureSession.canAddInput(audioInput)){
captureSession.addInput(audioInput)
print("added input")
}
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: GlobalUserInitiatedQueue)
if(captureSession.canAddOutput(audioOutput)){
captureSession.addOutput(audioOutput)
print("added output")
}
//supposed to start session not on UI queue coz it takes a while
dispatch_async(GlobalUserInitiatedQueue) {
print("starting captureSession")
self.captureSession.startRunning()
}
}
...
func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// Needs to be initialized somehow, even if we take only the address
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
//this needs to be in method otherwise only runs 125 times?
var blockBuffer: CMBlockBuffer?
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
sizeof(audioBufferList.dynamicType),
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buffer in abl{
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int = 0
for sample in samples {
sum = sum + Int(sample*sample)
}
let rms = sqrt(Double(sum)/count)
}
看来我有它的工作。在做任何操作之前,我把sample
送到了Int64
。
for buffer in abl{
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int64 = 0
for sample in samples {
let s = Int64(sample)
sum +=s*s
}
dispatch_async(dispatch_get_main_queue()) {
self.volLevel.text = String(sqrt(Float(sum/Int64(samples.count))))
}
我玩过你的例子。这是一个完整的swift 2代码片段:
// also define a variable in class scope, otherwise captureOutput will not be called
var session : AVCaptureSession!
func startCapture() {
if let device : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){
do {
self.session = AVCaptureSession()
try device.lockForConfiguration()
let audioInput = try AVCaptureDeviceInput(device: device)
device.unlockForConfiguration()
if(self.session.canAddInput(audioInput)){
self.session.addInput(audioInput)
print("added input")
}
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
if(self.session.canAddOutput(audioOutput)){
self.session.addOutput(audioOutput)
print("added output")
}
//supposed to start session not on UI queue coz it takes a while
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
print("starting captureSession")
self.session.startRunning()
}
} catch {
}
}
}
func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
var buffer: CMBlockBuffer? = nil
// Needs to be initialized somehow, even if we take only the address
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
sizeof(audioBufferList.dynamicType),
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buffer in abl {
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int64 = 0
for sample in samples {
let s = Int64(sample)
sum = (sum + s*s)
}
dispatch_async(dispatch_get_main_queue()) {
print( String(sqrt(Float(sum/Int64(samples.count)))))
}
}
}
使用AVCaptureAudioDataOutputSampleBufferDelegate
的方法
captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
从最后一个参数获取AVCaptureConnection
。
然后从AVCaptureAudioChannel
获得connection.audioChannels
然后你可以从它获得音量水平:
audioChannel.averagePowerLevel
audioChannel.peakHoldLevel