如何在 swift 中正确地将 .m4a 文件转换为 .caf 文件

问题描述 投票:0回答:0

我有一个扩展名为 .m4a 的音频文件,我希望将其转换为 .caf 文件。我有一个功能准备就绪并且可以工作。它工作正常但是当通过代码编辑音频时(改变速度、音高、回声和混响)它返回一个错误说明:

"failed Optional(Error Domain=AVFoundationErrorDomain Code=-11800 "操作无法完成" UserInfo={NSLocalizedFailureReason=发生未知错误(-12780),NSLocalizedDescription=操作无法完成,NSUnderlyingError=0x600001878a80 {Error Domain =NSOSStatusErrorDomain Code=-12780 "(null)"}})"

转换使用的函数是:

func convertAudioFileToCaf(inputURL: URL, outputURL: URL, completion: @escaping (Bool) -> Void) {
    let asset = AVURLAsset(url: inputURL, options: nil)
    let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)
    exportSession?.outputURL = outputURL
exportSession?.outputFileType = AVFileType.caf
    exportSession?.exportAsynchronously(completionHandler: {
        switch exportSession!.status {
        case .completed:
            completion(true)
        case .failed:
            print("failed \(String(describing: exportSession?.error))")
            completion(false)
        case .cancelled:
            print("cancelled \(String(describing: exportSession?.error))")
            completion(false)
        default:
            break
        }
    })
}

文件正在播放,但编辑时出现错误。 添加效果的函数:

func play(){
        do {
            guard let url = urlSong else{return}
            let sourceFileURL = url // it is .caf file url of the function convertAudioFileToCaf.
            sourceFile = try AVAudioFile(forReading: sourceFileURL)
            format = sourceFile.processingFormat
        } catch {
            print("Unable to load the source audio file: \(error.localizedDescription).")
        }
        engine.attach(player)
        engine.attach(reverb)
        engine.attach(playbackRateEffect)
        engine.attach(pitchEffect)
        // Set the desired reverb parameters.
        reverb.loadFactoryPreset(.cathedral)
        reverb.wetDryMix = 50
        playbackRateEffect.rate = 2
        engine.attach(playbackRateEffect)
        //Pitch
        pitchEffect.rate = 1
        pitchEffect.pitch = -1500
        engine.attach(pitchEffect)
        //Echo
//        echoEffect.delayTime = TimeInterval(60)
//        engine.attach(echoEffect)
        // Connect the nodes.
        engine.connect(player, to: playbackRateEffect, format: sourceFile.processingFormat)
        engine.connect(playbackRateEffect, to: pitchEffect, format: sourceFile.processingFormat)
//        engine.connect(pitchEffect, to: echoEffect, format: sourceFile.processingFormat)
        engine.connect(pitchEffect, to: reverb, format: sourceFile.processingFormat)
        engine.connect(reverb, to: engine.mainMixerNode, format: sourceFile.processingFormat)
        player.scheduleFile(sourceFile, at: nil)
        do {
            let maxFrames: AVAudioFrameCount = 4096
            try engine.enableManualRenderingMode(.offline, format: format,
                                                 maximumFrameCount: maxFrames)
        } catch {
            print("Enabling manual rendering mode failed.")
        }
        do {
            try engine.start()
            player.play()
        } catch {
            fatalError("Unable to start audio engine: \(error).")
        }
        let buffer = AVAudioPCMBuffer(pcmFormat: engine.manualRenderingFormat,
                                      frameCapacity: engine.manualRenderingMaximumFrameCount)!
        do {
            let outputURL = documentsURL.appendingPathComponent("effectedSound\(Int(Date().timeIntervalSince1970)).caf")
            outputFile = try AVAudioFile(forWriting: outputURL, settings: sourceFile.fileFormat.settings)
            newCafUrl = outputURL
            print(outputURL)
        } catch {
            print("Unable to open output audio file")
        }
        while engine.manualRenderingSampleTime < sourceFile.length {
            do {
                let frameCount = sourceFile.length - engine.manualRenderingSampleTime
                let framesToRender = min(AVAudioFrameCount(frameCount), buffer.frameCapacity)
                let status = try engine.renderOffline(framesToRender, to: buffer)
                let ct = buffer.frameCapacity
                if status == .success{
                    try outputFile.write(from: buffer)
                    let dataptrptr = buffer.floatChannelData!
                    let dataptr = dataptrptr.pointee
                    let datum = abs(dataptr[Int(buffer.frameLength)-1])
                    print(datum)
                    if datum < 0.00001 && true {
                        break
                    }
                } else{
                    print("Error while writing file")
                }
            } catch {
                fatalError("The manual rendering failed: \(error).")
            }
        }
        let outM4aURL = documentsURL.appendingPathComponent("equalized\(Int(Date().timeIntervalSince1970)).m4a")
        convertAudioFileToM4a(inputURL: newCafUrl!, outputURL: outM4aURL) { success in
            print(outM4aURL)
        }
        // Stop the player node and engine.
        player.stop()
        engine.stop()
    }
ios swift core-audio file-conversion avaudiofile
© www.soinside.com 2019 - 2024. All rights reserved.