使用 AVAudioEngine 进行音频处理。 对音频格式进行二次处理,已满足数据处理音频格式要求
import AVFoundation
import Foundation
class MicrophoneAudioStream {
private var audioEngine: AVAudioEngine!
var audioInputNode: AVAudioInputNode!
private var audioFile: AVAudioFile?
// 定义处理音频缓冲区数据的闭包类型
typealias AudioBufferHandler = (AVAudioPCMBuffer, AVAudioTime) -> Void
init() {
setupAudioEngine()
}
private func setupAudioEngine() {
audioEngine = AVAudioEngine()
audioInputNode = audioEngine.inputNode
let audioFormat = audioInputNode.inputFormat(forBus: 0)
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
return
}
let audioFileURL = documentsDirectory.appendingPathComponent("audio.wav")
// 创建AVAudioFile对象
do {
audioFile = try AVAudioFile(forWriting: audioFileURL, settings: audioFormat.settings, commonFormat: audioFormat.commonFormat, interleaved: audioFormat.isInterleaved)
} catch {
print("Error creating audio file: \(error.localizedDescription)")
}
}
func start() {
// Start the audio engine
do {
try audioEngine.start()
} catch {
print("Error starting audio engine: \(error.localizedDescription)")
}
}
// 提供外部方法,接收闭包作为参数
func startAudioStream(withBufferHandler handler: @escaping AudioBufferHandler) {
audioInputNode = audioEngine.inputNode
// 在设置音频格式之前先停止音频引擎
audioEngine.stop()
audioInputNode.removeTap(onBus: 0)
let inputNodeOutputFormat = audioInputNode.outputFormat(forBus: 0)
let targetFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: Double(16000), channels: 1, interleaved: false)
let formatConverter = AVAudioConverter(from:inputNodeOutputFormat, to: targetFormat!)
audioInputNode.installTap(onBus: 0, bufferSize: 1024, format: inputNodeOutputFormat) { (buffer, time) in
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: targetFormat!, frameCapacity: AVAudioFrameCount(targetFormat!.sampleRate) / 10)
var error: NSError? = nil
let inputBlock: AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return buffer
}
formatConverter!.convert(to: pcmBuffer!, error: &error, withInputFrom: inputBlock)
guard let pcmBuffer = pcmBuffer else {return}
handler(pcmBuffer, time)
}
// 启动音频引擎
do {
audioEngine.prepare()
try audioEngine.start()
} catch {
print("Error starting audio engine: \(error.localizedDescription)")
}
}
func stopAudioEngine() {
audioEngine.stop()
audioInputNode.removeTap(onBus: 0)
}
private func writeAudioBuffer(_ audioBuffer: AVAudioPCMBuffer) {
// 将音频数据写入文件
if let audioFile = audioFile {
do {
try audioFile.write(from: audioBuffer)
} catch {
print("Error writing to audio file: \(error.localizedDescription)")
}
}
}
}
网友评论