1.头文件;
#import <AVFoundation/AVFoundation.h>
代理 <AVCaptureAudioDataOutputSampleBufferDelegate>
2.新建属性;
@property(strong,nonatomic) AVCaptureSession *captureSession;//数据传递
@property(strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//输入数据
@property(strong,nonatomic) AVCaptureAudioDataOutput *captureAudioDataOutput;//声音输出
3.初始化数据;
- (void)initAudio{
_captureSession = [[AVCaptureSession alloc]init];
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
[audioSession setActive:YES error:nil];
NSError *error = nil;
AVCaptureDevice *audioDev = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc]initWithDevice:audioDev error:&error];
if ([_captureSession canAddInput:audioIn]) {
[_captureSession addInput:audioIn];
}
_captureAudioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
if([_captureSession canAddOutput:_captureAudioDataOutput]) {
[_captureSession addOutput:_captureAudioDataOutput];
//指定代理 增加并行线程
dispatch_queue_t queue = dispatch_queue_create("myQueue",DISPATCH_QUEUE_SERIAL);
[_captureAudioDataOutput setSampleBufferDelegate:self queue:queue];
[_captureAudioDataOutput connectionWithMediaType:AVMediaTypeAudio];
}}
4.处理代理事件;
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
NSMutableArray *dataArr = [[NSMutableArray alloc]init];
AudioBufferList audioBufferList;
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
//将数据块转换成点
for( int y=0; y< audioBufferList.mNumberBuffers; y++ ){
AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
Byte *frame = (Byte *)audioBuffer.mData;
int d = audioBuffer.mDataByteSize/2;
for(long i=0; i<d; i++)
{long x1 = frame[i*2+1]<<8;
long x2 = frame[i*2];
short int w = x1 | x2;
float x = 10.0 + i * (self.showView.frame.size.width - 20) / d;
float y = self.showView.frame.size.height/2.0f+ (self.showView.frame.size.height*0.5) * (w > 32767.0?32767.0:w) / 32767.0 ;
NSValue *pValue = [NSValue valueWithCGPoint:CGPointMake(x, y)];
[dataArr addObject:pValue];
}
}
CFRelease(blockBuffer);
self.pointArr = [NSMutableArray arrayWithArray:dataArr];
}
5.绘制图形;
-(void)drawRect:(CGRect)rect{
[super drawRect:rect];
if (!self.mpointArr) {
return;
}
CGContextRef context = UIGraphicsGetCurrentContext();
[[UIColor blueColor] setStroke];
CGContextSetLineWidth(context, 1.0);
CGContextBeginPath(context);
CGContextMoveToPoint(context, 0, self.frame.size.height/2.0);
for (int i = 0; i < [self.mpointArr count]; i++) {
CGPoint point = [self.mpointArr[i] CGPointValue];
CGContextAddLineToPoint(context, point.x, point.y);}
CGContextAddLineToPoint(context, ScreenWidth, self.frame.size.height/2.0);
CGContextStrokePath(context);
}
6.加定时器,每0.05S绘制一次;
reloadTime = [NSTimer scheduledTimerWithTimeInterval:0.05 target:self selector:@selector(reloadState:) userInfo:nil repeats:YES];
[[NSRunLoop mainRunLoop] addTimer:reloadTime forMode:NSRunLoopCommonModes];
- (void)reloadState:(NSTimer *)t{
if (self.pointArr) {
sView.mpointArr = [NSMutableArray arrayWithArray:self.pointArr];
[sView setNeedsDisplay];
}
}
debug模式 有些卡的 直接运行就好了
https://github.com/huasali/VoiceDemo
网友评论