美文网首页
AudioUnit使用

AudioUnit使用

作者: Hayde | 来源:发表于2018-09-12 09:44 被阅读0次

    在iOS上Audio Unit是比较底层的接口,可以理解为其是对音频硬件驱动的封装。用来进行低延迟的音频采集和播放功能如实时语音、VoIP的场景。

    iOS提供了2中创建AudioUnit。一种直接创建AudioUnit、一种通过AUGraph创建AudioUnit.

    一般我们再开发过程中多数会用到AUGraph创建AudioUnit,减少在特性需求下的工作量。

    1、AVAudioSession 设置上下文信息

    在设置session中如果设置采样率,那么在不同的录音设备20ms录制的字节数都为恒定值,项目中未设置,根据设备动态配置录音的字节。

    - (Boolean)setupAuidoSession {
        NSError *error = nil;
        AVAudioSession *session = [AVAudioSession sharedInstance];
    
        [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionAllowBluetooth error:&error];
        if (error != nil) {
            NSLog(@"setupAudioSession : Error set AVAudioSessionCategoryOptionAllowBluetooth(%@).", error.localizedDescription);
            return false;
        }
    
        float aBufferLength = 0.02;
        [session setPreferredIOBufferDuration:aBufferLength error:&error];
    
        //在audioSession中设置首选采样率,那么无论什么设备采样率是一致的,不会根据设备而变化。如果不设置那么在录制回调中inNumberFrames值会不同,意味着在此需要使用循环处理数据
        [session setPreferredSampleRate:kSampleRate error:nil];
        if (error != nil) {
            NSLog (@"setupAudioSession : Error setPreferredIOBufferDuration(%@).", error.localizedDescription);
            return false;
        }
    
        //增加中断监听
        [[NSNotificationCenter defaultCenter] addObserver:self
                                                 selector:@selector(handleInterruption:)
                                                     name:AVAudioSessionInterruptionNotification
                                                   object:session];
    
        [session setActive:YES withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error: &error];
        if (nil != error) {
            NSLog(@"AudioSession setActive error:%@", error.localizedDescription);
            return false;
        }
    
        return true;
    }
    

    2、设置AudioUnit

    - (Boolean)setupAudioUnit {
        //生成AudioComponentInstance实例
        AudioComponentDescription audioDes;
        audioDes.componentType          = kAudioUnitType_Output;
        audioDes.componentSubType       = kAudioUnitSubType_RemoteIO;
        audioDes.componentManufacturer  = kAudioUnitManufacturer_Apple;
        audioDes.componentFlags         = 0;
        audioDes.componentFlagsMask     = 0;
        AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDes);
        
        CheckOSStatus(AudioComponentInstanceNew(inputComponent, &ioUnit), "New ComponentInstance Fail");
        
        //创建录制buffer
        UInt32 flag = 0;
        CheckOSStatus(AudioUnitSetProperty(ioUnit,
                                            kAudioUnitProperty_ShouldAllocateBuffer,
                                            kAudioUnitScope_Output,
                                            1,
                                            &flag,
                                            sizeof(flag)), "could not set StreamFormat");
    
        self->recordAudioBufferList = malloc(sizeof(AudioBufferList));
        self->recordAudioBufferList->mNumberBuffers = 1;
        self->recordAudioBufferList->mBuffers[0].mNumberChannels = 1;
        self->recordAudioBufferList->mBuffers[0].mDataByteSize = 4096;
        self->recordAudioBufferList->mBuffers[0].mData = malloc(4096);
        
        //设置音频流格式
        AudioStreamBasicDescription audioFormat;
        audioFormat.mSampleRate = kSampleRate;
        audioFormat.mFormatID = kAudioFormatLinearPCM;
        audioFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
        audioFormat.mFramesPerPacket = 1;
        audioFormat.mChannelsPerFrame = 1;
        audioFormat.mBitsPerChannel = 16;
        audioFormat.mBytesPerFrame = (audioFormat.mChannelsPerFrame * audioFormat.mBitsPerChannel) / 8;
        audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame * audioFormat.mFramesPerPacket;
        
        CheckOSStatus(AudioUnitSetProperty(ioUnit,
                                           kAudioUnitProperty_StreamFormat,
                                           kAudioUnitScope_Output,
                                           1,
                                           &audioFormat,
                                           sizeof(audioFormat)), "could not set Output StreamFormat");
        
        
        
        CheckOSStatus(AudioUnitSetProperty(ioUnit,
                                           kAudioUnitProperty_StreamFormat,
                                           kAudioUnitScope_Input,
                                           0,
                                           &audioFormat,
                                           sizeof(audioFormat)), "could not set Input StreamFormat");
        
        
        //设置录制回调
        AURenderCallbackStruct recordCallback;
        recordCallback.inputProc = recordCallbackFunc;
        recordCallback.inputProcRefCon = (__bridge void *)self;
        
        CheckOSStatus(AudioUnitSetProperty(ioUnit,
                                           kAudioOutputUnitProperty_SetInputCallback,
                                           kAudioUnitScope_Global,
                                           1,
                                           &recordCallback,
                                           sizeof(recordCallback)), "recordCallback failure");
        //打开录音设备
        flag = 1;
        CheckOSStatus(AudioUnitSetProperty(ioUnit,
                                           kAudioOutputUnitProperty_EnableIO,
                                           kAudioUnitScope_Input,
                                           1,
                                           &flag,
                                           sizeof(flag)), "enable input failure");
        
        return true;
    }
    

    3、录音回调

    - (AudioBufferList)getBufferList:(UInt32)inNumberFrames {
        AudioBuffer buffer;
        buffer.mDataByteSize = inNumberFrames * 2;
        buffer.mNumberChannels = 1;
        
        buffer.mData = malloc( inNumberFrames * 2 );
        AudioBufferList bufferList;
        bufferList.mNumberBuffers = 1;
        bufferList.mBuffers[0] = buffer;
        return bufferList;
    }
    
    static OSStatus recordCallbackFunc(void *inRefCon,
                                       AudioUnitRenderActionFlags *ioActionFlags,
                                       const AudioTimeStamp *inTimeStamp,
                                       UInt32 inBusNumber,
                                       UInt32 inNumberFrames,
                                       AudioBufferList *ioData){
        
        YYAudioRecordManager *this = (__bridge YYAudioRecordManager *)inRefCon;
        OSStatus err = noErr;
        if (this.isRecording){
            @autoreleasepool {
                AudioBufferList bufList = [this getBufferList:inNumberFrames];
                err = AudioUnitRender(this->ioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufList);
                if (err) {
                    printf("AudioUnitRender error code = %d", err);
                } else {
                    AudioBuffer buffer = bufList.mBuffers[0];
                    this.levels = computeLevel(buffer.mData, buffer.mDataByteSize);
                    NSData *pcmBlock = [NSData dataWithBytes:buffer.mData length:buffer.mDataByteSize];
                    [this processAudioData:pcmBlock];
                    free(buffer.mData);
                }
            }
        }
        return err;
    }
    

    4、检查组件状态

    static bool CheckOSStatus(OSStatus result, const char *operation) {
        if (result == noErr) {
            return true;
        }
        
        char errorString[20];
        *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(result);
        if (isprint(errorString[1]) &&
            isprint(errorString[2]) &&
            isprint(errorString[3]) &&
            isprint(errorString[4])) {
            
            errorString[0] = errorString[5] = '\'';
            errorString[6] = '\0';
        } else {
            sprintf(errorString,"%d",(int)result);
        }
        
        fprintf(stderr, "Error: %s (%s)\n", operation, errorString);
        return false;
    }
    

    5、开始录制

    BOOL status = [self setupAuidoSession];
    if (status == false) {
       return;
    }
    
    [self setupAudioUnit];
    
    int result = CheckOSStatus(AudioUnitInitialize(ioUnit), "init unit failure");
    if (result < 0) {
       return;
    }
    
    result = CheckOSStatus(AudioOutputUnitStart(ioUnit), "start unit failure");
    if (result < 0) {
       return;
    }
    

    6、停止录制

    AudioOutputUnitStop(ioUnit)
    AudioComponentInstanceDispose(ioUnit)
    

    7、暂停录制

    AudioOutputUnitStop(ioUnit)
    

    8、继续录制

    AudioOutputUnitStart(ioUnit)
    

    相关文章

      网友评论

          本文标题:AudioUnit使用

          本文链接:https://www.haomeiwen.com/subject/gfpmgftx.html