目录
概览一 音频框架概览
1.1 Low-Level
1.2 Mid-Level
1.3 High-Level
二 AudioUnit
2.1 AudioUnit类型
2.2 AudioUnit两种创建方式
1.1 Low-Level
该主要在MAC上的音频APP实现中并且需要最大限度的实时性能的情况下使用,大部分音频APP不需要使用该层的服务。而且,在iOS上也提供了具备较高实时性能的高层API达到你的需求。例如OpenAL,在游戏中具备与I/O直接调用的实时音频处理能力。
I/O Kit, 与硬件驱动交互
Audio HAL, 音频硬件抽象层,使API调用与实际硬件相分离,保持独立
Core MIDI, 为MIDI流和设备提供软件抽象工作层
Host Time Services, 访问电脑硬件时钟
1.2 Mid-Level
该层功能比较齐全,包括音频数据格式转换,音频文件读写,音频流解析,插件工作支持等。
Audio Convert Services 负责音频数据格式的转换
Audio File Services 负责音频数据的读写
Audio Unit Services 和 Audio Processing Graph Services 支持均衡器和混音器等数字信号处理的插件
Audio File Scream Services 负责流解析
Core Audio Clock Services 负责音频音频时钟同步
1.3 High-Level
是一组从低层接口组合起来的高层应用,基本上我们很多关于音频开发的工作在这一层就可以完成
Audio Queue Services 提供录制、播放、暂停、循环、和同步音频它自动采用必要的编解码器处理压缩的音频格式
AVAudioPlayer 是专为IOS平台提供的基于Objective-C接口的音频播放类,可以支持iOS所支持的所有音频的播放
Extended Audio File Services 由Audio File与Audio Converter组合而成,提供压缩及无压缩音频文件的读写能力
OpenAL 是CoreAudio对OpenAL标准的实现,可以播放3D混音效果
2.1 AudioUnit类型
I/O: Remote I/O、Voice-Processing I/O、Generic Output
Mixing: 3D Mixer、Mutichannel Mixer
Effect: iPod Equalizer
Format Conversion: Format Converter
每个AudioUnit都有Input, Output 和 Global 三个域。input输入域是音频流进入unit的入口,output输出域是音频流离开unit的出口,global全局域则代表整个unit。输入域和输出域都有若干个bus/element,比如说mixer unit有多个输入bus,只有一个输出bus;而splitter unit则有一个输入bus,有多个输出的bus。其中,I/O主要负责和设备打交道,比如采集和播放;Mixing负责将不同来源的音频数据进行混合;Effect是对音频数据进行音效处理(变声、混响);Format Conversion主要是进行格式转换比如重采样等。这里有一个优化的点是音频格式转换 Multichannel Mixer 本身就能够实现格式转换的功能,输入和输出的音频数据格式可以不同,利用这一点可以节省一个格式转换unit。
2.2 AudioUnit创建方式
I/O Unit先来了解一下第一种创建方式中要用的结构体类型
//此结构体用来描述Audio的基本信息
struct AudioStreamBasicDescription
{
Float64 mSampleRate;
AudioFormatID mFormatID;
AudioFormatFlags mFormatFlags;
UInt32 mBytesPerPacket;
UInt32 mFramesPerPacket;
UInt32 mBytesPerFrame;
UInt32 mChannelsPerFrame;
UInt32 mBitsPerChannel;
UInt32 mReserved;
};
//缓冲区Data
struct AudioBuffer
{
UInt32 mNumberChannels;
UInt32 mDataByteSize;
void* __nullable mData;
};
//缓冲区List
struct AudioBufferList
{
UInt32 mNumberBuffers;
AudioBuffer mBuffers[1]; // this is a variable length array of mNumberBuffers elements
#if defined(__cplusplus) && defined(CA_STRICT) && CA_STRICT
public:
AudioBufferList() {}
private:
// Copying and assigning a variable length struct is problematic; generate a compile error.
AudioBufferList(const AudioBufferList&);
AudioBufferList& operator=(const AudioBufferList&);
#endif
};
//Audio元素描述
typedef struct AudioComponentDescription {
OSType componentType;
OSType componentSubType;
OSType componentManufacturer;
UInt32 componentFlags;
UInt32 componentFlagsMask;
} AudioComponentDescription;
容易混淆的概念
kAudioUnitProperty_SetRenderCallback是audio unit需要数据,向Host请求数据;
kAudioOutputUnitProperty_SetInputCallback是audio unit通知Host数据已经就绪,可以通过AudioUnitRender拉取数据;
MaximumFramesPerSlice 表示的是每次回调送入或取出的音频数据的长度
下面就送上第一个实现的代码吧
// create IO Unit
BOOL result = NO;
AudioComponentDescriptionoutputDescription = {0};
outputDescription.componentType = kAudioUnitType_Output;
outputDescription.componentSubType = kAudioUnitSubType_RemoteIO;
outputDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
outputDescription.componentFlags = 0;
outputDescription.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &outputDescription);
result = CheckOSStatus(AudioComponentInstanceNew(comp, &mVoipUnit), @"couldn't create a new instance of RemoteIO");
if (!result) return result;
// config IO Enable status
UInt32 flag = 1;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag)), @"could not enable output on RemoteIO");
if (!result) return result;
///以下设置可以理解为设置element 1的input_scope的EnableIO的属性值是&flag
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag)), @"AudioUnitSetProperty EnableIO");
if (!result) return result;
// Config default format
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &inputAudioDescription, sizeof(inputAudioDescription)), @"couldn't set the input client format on RemoteIO");
if (!result) return result;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &outputAudioDescription, sizeof(outputAudioDescription)), @"couldn't set the output client format on RemoteIO");
if (!result) return result;
// Set the MaximumFramesPerSlice property. This property is used to describe to an audio unit the maximum number
// of samples it will be asked to produce on any single given call to AudioUnitRender
UInt32 maxFramesPerSlice = 4096;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, sizeof(UInt32)), @"couldn't set max frames per slice on RemoteIO");
if (!result) return result;
// Set the record callback
AURenderCallbackStructrecordCallback;
recordCallback.inputProc = recordCallbackFunc;
recordCallback.inputProcRefCon = (__bridge void * _Nullable)(self);
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &recordCallback, sizeof(recordCallback)), @"couldn't set record callback on RemoteIO");
if (!result) return result;
// Set the playback callback
AURenderCallbackStructplaybackCallback;
playbackCallback.inputProc = playbackCallbackFunc;
playbackCallback.inputProcRefCon = (__bridge void * _Nullable)(self);
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &playbackCallback, sizeof(playbackCallback)), @"couldn't set playback callback on RemoteIO");
if (!result) return result;
// set buffer allocate
flag = 0;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag)), @"couldn't set property for ShouldAllocateBuffer");
if (!result) return result;
// Initialize the output IO instance
result = CheckOSStatus(AudioUnitInitialize(mVoipUnit), @"couldn't initialize VoiceProcessingIO instance");
if (!result) return result;
return YES;
AUGraph
AUGraph实现流程如下:
1、初始化文件流和AVAudioSession,分配buffer;
2、新建AUGraph,并添加两个AUNode,一个是RemoteI/O Unit的节点,一个是Mixer Unit的节点。添加AUNode的节点有两个步骤,先通过AUGraphAddNode添加节点,再通过AUGraphNodeInfo获取节点对应的AudioUnit。
3、建立两个AUNode的联系,AUGraphConnectNodeInput通过把Mixer Unit的outputBus的输出作为RemoteI/O Unit的outputBus的输入;(这里需要注意,不是RemoteI/O的inputBus 的输入,因为RemoteI/O Unit的inputBus的输入是麦克风)同时设置好RemoteI/O Unit的输入和输出格式、Record的回调函数;
4、调用AUGraphInitialize初始化AUGraph,然后通过AUGraphStart开始整个AUGraph;在AUGraph开启后,麦克风收到录制数据后调kAudioOutputUnitProperty_SetInputCallback的回调,把麦克风的数据回调给APP;Mixer Unit kAudioUnitProperty_SetRenderCallback设置好的回调,要求APP填充两个inputBus的输入;在Mixer Unit处理好数据之后,会按照之前AUGraphConnectNodeInput设置的,把数据发送给Remote I/O Unit;Remote I/O Unit再把数据发送给扬声器。
// create AUGraph
BOOL result = NO;
result = CheckOSStatus(NewAUGraph (&processingGraph), @"couldn't create a new instance of AUGraph");
if (!result) return result;
// I/O unit
AudioComponentDescription iOUnitDescription;
iOUnitDescription.componentType = kAudioUnitType_Output;
iOUnitDescription.componentSubType = kAudioUnitSubType_RemoteIO;
iOUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
iOUnitDescription.componentFlags = 0;
iOUnitDescription.componentFlagsMask = 0;
// Multichannel mixer unit
AudioComponentDescription MixerUnitDescription;
MixerUnitDescription.componentType = kAudioUnitType_Mixer;
MixerUnitDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer;
MixerUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
MixerUnitDescription.componentFlags = 0;
MixerUnitDescription.componentFlagsMask = 0;
AUNode iONode; // node for I/O unit
AUNode mixerNode; // node for Multichannel Mixer unit
result = CheckOSStatus(AUGraphAddNode (
processingGraph,
&iOUnitDescription,
&iONode), @"couldn't add a node instance of kAudioUnitSubType_RemoteIO");
if (!result) return result;
result = CheckOSStatus(AUGraphAddNode (
processingGraph,
&MixerUnitDescription,
&mixerNode), @"couldn't add a node instance of mixer unit");
if (!result) return result;
// open the AUGraph
result = CheckOSStatus(AUGraphOpen (processingGraph), @"couldn't get instance of mixer unit");
if (!result) return result;
// Obtain unit instance
result = CheckOSStatus(AUGraphNodeInfo (
processingGraph,
mixerNode,
NULL,
&mMixerUnit
), @"couldn't get instance of mixer unit");
if (!result) return result;
result = CheckOSStatus(AUGraphNodeInfo (
processingGraph,
iONode,
NULL,
&mVoipUnit
), @"couldn't get a new instance of remoteio unit");
if (!result) return result;
UInt32 busCount = 2; // bus count for mixer unit input
UInt32 guitarBus = 0; // mixer unit bus 0 will be stereo and will take the guitar sound
UInt32 beatsBus = 1; // mixer unit bus 1 will be mono and will take the beats sound
result = CheckOSStatus(AudioUnitSetProperty (
mMixerUnit,
kAudioUnitProperty_ElementCount,
kAudioUnitScope_Input,
0,
&busCount,
sizeof (busCount)
), @"could not set mixer unit input bus count");
if (!result) return result;
UInt32 maximumFramesPerSlice = 4096;
result = CheckOSStatus(AudioUnitSetProperty (
mMixerUnit,
kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global,
0,
&maximumFramesPerSlice,
sizeof (maximumFramesPerSlice)
), @"could not set mixer unit maximum frame per slice");
if (!result) return result;
// Attach the input render callback and context to each input bus
for (UInt16 busNumber = 0; busNumber < busCount; ++busNumber) {
// Setup the struture that contains the input render callback
AURenderCallbackStruct playbackCallback;
playbackCallback.inputProc = playbackCallbackFunc;
playbackCallback.inputProcRefCon = (__bridge void * _Nullable)(self);
NSLog (@"Registering the render callback with mixer unit input bus %u", busNumber);
// Set a callback for the specified node's specified input
result = CheckOSStatus(AUGraphSetNodeInputCallback (
processingGraph,
mixerNode,
busNumber,
&playbackCallback
), @"couldn't set playback callback on mixer unit");
if (!result) return result;
}
// Config mixer unit input default format
result = CheckOSStatus(AudioUnitSetProperty (
mMixerUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
guitarBus,
&outputAudioDescription,
sizeof (outputAudioDescription)
), @"couldn't set the input 0 client format on mixer unit");
if (!result) return result;
result = CheckOSStatus(AudioUnitSetProperty (
mMixerUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
beatsBus,
&outputAudioDescription,
sizeof (outputAudioDescription)
), @"couldn't set the input 1 client format on mixer unit");
if (!result) return result;
Float64 graphSampleRate = 44100.0; // Hertz;
result = CheckOSStatus(AudioUnitSetProperty (
mMixerUnit,
kAudioUnitProperty_SampleRate,
kAudioUnitScope_Output,
0,
&graphSampleRate,
sizeof (graphSampleRate)
), @"couldn't set the output client format on mixer unit");
if (!result) return result;
////////////////////////////////////////////////////////////////////////////////////////////
// config void unit IO Enable status
UInt32 flag = 1;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&flag,
sizeof(flag)
), @"could not enable output on kAudioUnitSubType_RemoteIO");
if (!result) return result;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&flag,
sizeof(flag)
), @"could not enable input on kAudioUnitSubType_RemoteIO");
if (!result) return result;
// config voip unit default format
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&inputAudioDescription,
sizeof(inputAudioDescription)
), @"couldn't set the input client format on kAudioUnitSubType_RemoteIO");
if (!result) return result;
UInt32 maxFramesPerSlice = 4096;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global,
0,
&maxFramesPerSlice,
sizeof(UInt32)
), @"couldn't set max frames per slice on kAudioUnitSubType_RemoteIO");
if (!result) return result;
// Set the record callback
AURenderCallbackStruct recordCallback;
recordCallback.inputProc = recordCallbackFunc;
recordCallback.inputProcRefCon = (__bridge void * _Nullable)(self);
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&recordCallback,
sizeof(recordCallback)
), @"couldn't set record callback on kAudioUnitSubType_RemoteIO");
if (!result) return result;
// set buffer allocate
flag = 0;
result = CheckOSStatus(AudioUnitSetProperty(mVoipUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag)), @"couldn't set property for ShouldAllocateBuffer");
if (!result) return result;
/////////////////////////////////////////////////////////////////////////////////////////////
// Initialize the output IO instance
result = CheckOSStatus(AUGraphConnectNodeInput (
processingGraph,
mixerNode, // source node
0, // source node output bus number
iONode, // destination node
0 // desintation node input bus number
), @"couldn't connect ionode to mixernode");
if (!result) return result;
result = CheckOSStatus(AUGraphInitialize (processingGraph), @"AUGraphInitialize failed");
if (!result) return result;
return YES;
网友评论