使用Audio Unit采集音频主要步骤有三步
1 配置AudioStreamBasicDescription
struct AudioStreamBasicDescription
{
Float64 mSampleRate;
AudioFormatID mFormatID;
AudioFormatFlags mFormatFlags;
UInt32 mBytesPerPacket;
UInt32 mFramesPerPacket;
UInt32 mBytesPerFrame;
UInt32 mChannelsPerFrame;
UInt32 mBitsPerChannel;
UInt32 mReserved;
}
#pragma mark - ASBD Audio Format
-(void)configureAudioToAudioFormat:(AudioStreamBasicDescription *)audioFormat
byParamFormatID:(UInt32)formatID
sampleRate:(Float64)sampleRate
channelCount:(UInt32)channelCount {
AudioStreamBasicDescription dataFormat = {0};
UInt32 size = sizeof(dataFormat.mSampleRate);
// Get hardware origin sample rate. (Recommended it)
Float64 hardwareSampleRate = 0;
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareSampleRate,
&size,
&hardwareSampleRate);
// Manual set sample rate
dataFormat.mSampleRate = sampleRate;
size = sizeof(dataFormat.mChannelsPerFrame);
// Get hardware origin channels number. (Must refer to it)
UInt32 hardwareNumberChannels = 0;
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareInputNumberChannels,
&size,
&hardwareNumberChannels);
dataFormat.mChannelsPerFrame = channelCount;
dataFormat.mFormatID = formatID;
if (formatID == kAudioFormatLinearPCM) {
dataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
dataFormat.mBitsPerChannel = KXDXAudioBitsPerChannel;
dataFormat.mBytesPerPacket = dataFormat.mBytesPerFrame = (dataFormat.mBitsPerChannel / 8) * dataFormat.mChannelsPerFrame;
dataFormat.mFramesPerPacket = kXDXAudioPCMFramesPerPacket;
}
memcpy(audioFormat, &dataFormat, sizeof(dataFormat));
NSLog(@"%@: %s - sample rate:%f, channel count:%d",kModuleName, __func__,sampleRate,channelCount);
}
步骤2 设置sample time
The typical maximum I/O buffer duration is 0.93 s (corresponding to 4096 sample frames at a sample rate of 44.1 kHz). The minimum I/O buffer duration is at least 0.005 s (256 frames) but might be lower depending on the hardware in use.
You can set a preferred I/O buffer duration before or after activating the audio session.
范围在0.005s 至0.93s之间
// Set sample time
[[AVAudioSession sharedInstance] setPreferredIOBufferDuration:durationSec error:NULL];
步骤3 配置Audio Unit
3.1 创建Audio Unit
- (AudioUnit)createAudioUnitObject {
AudioUnit audioUnit;
AudioComponentDescription audioDesc;
audioDesc.componentType = kAudioUnitType_Output;
audioDesc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;//kAudioUnitSubType_RemoteIO;
audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
audioDesc.componentFlags = 0;
audioDesc.componentFlagsMask = 0;
AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDesc);
OSStatus status = AudioComponentInstanceNew(inputComponent, &audioUnit);
if (status != noErr) {
NSLog(@"%@: %s - create audio unit failed, status : %d \n",kModuleName, __func__, status);
return NULL;
}else {
return audioUnit;
}
}
3.2 设置AudioBufferList
- (void)initCaptureAudioBufferWithAudioUnit:(AudioUnit)audioUnit channelCount:(int)channelCount dataByteSize:(int)dataByteSize {
// Disable AU buffer allocation for the recorder, we allocate our own.
UInt32 flag = 0;
OSStatus status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
INPUT_BUS,
&flag,
sizeof(flag));
if (status != noErr) {
NSLog(@"%@: %s - couldn't allocate buffer of callback, status : %d \n", kModuleName, __func__, status);
}
AudioBufferList * buffList = (AudioBufferList*)malloc(sizeof(AudioBufferList));
buffList->mNumberBuffers = 1;
buffList->mBuffers[0].mNumberChannels = channelCount;//1
buffList->mBuffers[0].mDataByteSize = dataByteSize;//2048
buffList->mBuffers[0].mData = (UInt32 *)malloc(dataByteSize);//2048
m_buffList = buffList;
}
3.3 设置Audio Unit property
- (void)setAudioUnitPropertyWithAudioUnit:(AudioUnit)audioUnit dataFormat:(AudioStreamBasicDescription)dataFormat {
OSStatus status;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
INPUT_BUS,
&dataFormat,
sizeof(dataFormat));
if (status != noErr) {
NSLog(@"%@: %s - set audio unit stream format failed, status : %d \n",kModuleName, __func__,status);
}
/*
// remove echo but can't effect by testing.
UInt32 echoCancellation = 0;
AudioUnitSetProperty(m_audioUnit,
kAUVoiceIOProperty_BypassVoiceProcessing,
kAudioUnitScope_Global,
0,
&echoCancellation,
sizeof(echoCancellation));
*/
UInt32 enableFlag = 1;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
INPUT_BUS,
&enableFlag,
sizeof(enableFlag));
if (status != noErr) {
NSLog(@"%@: %s - could not enable input on AURemoteIO, status : %d \n",kModuleName, __func__, status);
}
UInt32 disableFlag = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
OUTPUT_BUS,
&disableFlag,
sizeof(disableFlag));
if (status != noErr) {
NSLog(@"%@: %s - could not enable output on AURemoteIO, status : %d \n",kModuleName, __func__,status);
}
}
3.4 设置录音回调
- (void)initCaptureCallbackWithAudioUnit:(AudioUnit)audioUnit callBack:(AURenderCallback)callBack {
AURenderCallbackStruct captureCallback;
captureCallback.inputProc = callBack;
captureCallback.inputProcRefCon = (__bridge void *)self;
OSStatus status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
INPUT_BUS,
&captureCallback,
sizeof(captureCallback));
if (status != noErr) {
NSLog(@"%@: %s - Audio Unit set capture callback failed, status : %d \n",kModuleName, __func__,status);
}
}
3.5 录音回调中我们既可以拿到从音频设备录音得到的数据
// 音频录制回调
static OSStatus AudioCaptureCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
//这个函数的作用是播放声音
AudioUnitRender(m_audioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, m_buffList);
if (g_av_base_time == 0) {
return noErr;
}
XDXAudioCaptureManager *manager = (__bridge XDXAudioCaptureManager *)inRefCon;
Float64 currentTime = CMTimeGetSeconds(CMClockMakeHostTimeFromSystemUnits(inTimeStamp->mHostTime));
int64_t pts = (int64_t)((currentTime - g_av_base_time) * 1000);
/* Test audio fps
static Float64 lastTime = 0;
Float64 currentTime = CMTimeGetSeconds(CMClockMakeHostTimeFromSystemUnits(inTimeStamp->mHostTime))*1000;
NSLog(@"Test duration - %f",currentTime - lastTime);
lastTime = currentTime;
*/
void *bufferData = m_buffList->mBuffers[0].mData;
UInt32 bufferSize = m_buffList->mBuffers[0].mDataByteSize;
// NSLog(@"demon = %d",bufferSize);
struct XDXCaptureAudioData audioData = {
.data = bufferData,
.size = bufferSize,
.inNumberFrames = inNumberFrames,
.pts = pts,
};
XDXCaptureAudioDataRef audioDataRef = &audioData;
if ([manager.delegate respondsToSelector:@selector(receiveAudioDataByDevice:)]) {
[manager.delegate receiveAudioDataByDevice:audioDataRef];
}
return noErr;
}
网友评论