Audio Unit播放音频的步骤主要三步
1 AVAudioSession设置
AVAudioSession *session = [AVAudioSession sharedInstance];
NSError *rawError = nil;
if (![session setCategory:AVAudioSessionCategoryPlayback error:&rawError]) {
NSLog(@"setCategory AVAudioSessionCategoryPlayback error");
return NO;
}
NSTimeInterval prefferedIOBufferDuration = 0.023;
// 你还可以配置一些其他功能,如采样率为44.1 kHz默认的duration是大概23ms,相当于每次采集1024个采样点。如果你的app要求延迟很低,你可以最低设置0.005ms(相当于256个采样点)
//设置缓冲时间,通常来说I/O缓冲时间的范围是0.005s至0.93s
if (![session setPreferredIOBufferDuration:prefferedIOBufferDuration error:&rawError]) {
NSLog(@"setPreferredIOBufferDuration error");
}
double prefferedSampleRate = 44100;
//设置采样率44100,每秒采样44100次
if (![session setPreferredSampleRate:prefferedSampleRate error:&rawError]) {
NSLog(@"setPreferredSampleRate: %.4f, error: %@", prefferedSampleRate, rawError);
}
//激活session
if (![session setActive:YES error:&rawError]) {
NSLog(@"setActive error");
return NO;
}
2 AudioStreamBasicDescription设置
这个结构体在Audio Unit 中尤为重要,描述了音频格式的通用设置
一般设置为这个参数即可
Sample Rate: 44100
Format ID: lpcm
Format Flags: 4
Bytes per Packet: 2
Frames per Packet: 1
Bytes per Frame: 2
Channels per Frame: 1
Bits per Channel: 16
3 初始化及设置Audio Unit
//1 设置audio组件描述
AudioComponentDescription descr = {0};
descr.componentType = kAudioUnitType_Output;
descr.componentSubType = kAudioUnitSubType_RemoteIO;
descr.componentManufacturer = kAudioUnitManufacturer_Apple;
descr.componentFlags = 0;
descr.componentFlagsMask = 0;
//设置组件描述
AudioUnit audioUnit = NULL;
AudioComponent component = AudioComponentFindNext(NULL, &descr);
//初始化音频组件
OSStatus status = AudioComponentInstanceNew(component, &audioUnit);
if (status != noErr) {
NSLog(@"AudioComponentInstanceNew error");
return NO;
}
//初始化音频描述
AudioStreamBasicDescription streamDescr = {0};
UInt32 size = sizeof(AudioStreamBasicDescription);
status = AudioUnitGetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
0, &streamDescr, &size);
if (status != noErr) {
NSLog(@"AudioUnitGetProperty error");
return NO;
}
streamDescr.mSampleRate = sampleRate;
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
0, &streamDescr, size);
if (status != noErr) {
NSLog(@"AudioUnitSetProperty error");
return NO;
}
_bitsPerChannel = streamDescr.mBitsPerChannel;
_channelsPerFrame = streamDescr.mChannelsPerFrame;
AURenderCallbackStruct renderCallbackStruct;
renderCallbackStruct.inputProc = renderCallback;
renderCallbackStruct.inputProcRefCon = (__bridge void *)(self);
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallbackStruct, sizeof(AURenderCallbackStruct));
if (status != noErr) {
NSLog(@"AudioUnitSetProperty error");
return NO;
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSLog(@"AudioUnitInitialize error");
return NO;
}
网友评论