一定要倒入 AVFoundtion #import <AVFoundation/AVFoundation.h>
AVFoundtion 框架 => 了解框架哪些类,每个类有什么用
AVCaptureDevice : 摄像头,麦克风
AVCaptureInput 输入端口
AVCaptureOutput 设备输出
AVCaptureSession 管理输入到输出数据流
AVCaptureVideoPreviewLayer : 展示采集 预览View
简单使用
//1.创建捕获会话 : 设置分辨率
AVCaptureSession *session = [[AVCaptureSession alloc] init];
//设置分辨率 720p
session.sessionPreset = AVCaptureSessionPreset1280x720;
//2. 建立输入输出轨道
//2.1获取 设备
//这里设备有很多的额属性可以设置 activeVideoMinFrameDuration 采样率一秒有多少帧的数据 平时10 默认60
AVCaptureDevice *device ;
for ( AVCaptureDevice *deviceNow in [AVCaptureDevice devices])
{
NSLog(@"---%@", deviceNow);//[Back Camera] [Front Camera] [iPhone 麦克风]
if (deviceNow.position == AVCaptureDevicePositionFront)
{
device = deviceNow;
}
}
//2.2设备输入对象
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
//2.3将输入添加到捕获会话
if (input != nil)
{
if ([session canAddInput:input])
{
[session addInput:input];
}
}
//3.视频输出:设置视频原数据格式:YUV,RGB YUV
//苹果不支持YUA渲染,只支持RGB渲染 -> YUV => RGB
//3.1创建输出轨道
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
//3.2设置视频源数据格式videoSettings 设置视频原数据格式 YUV FULL
videoOutput.videoSettings = @{(NSString *)(kCVPixelBufferPixelFormatTypeKey):@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
//3.2设置输出代理,捕获每一帧的数据
[videoOutput setSampleBufferDelegate:self queue:dispatch_queue_create("SERIAL", DISPATCH_QUEUE_SERIAL)];
//3.2 捕获设备,与输出设备相连接
AVCaptureConnection *connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
// NSLog(@"---1%@",connection);
//3.3给回话添加输出轨道
if ([session canAddOutput:videoOutput])
{
[session addOutput:videoOutput];
}
//4.开启会话
// 在输入与输出对象中,建立一个连接,要在添加到会话之后才可以拿到连接
[session startRunning];
connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
// NSLog(@"---2%@",connection);
//设置图层预览
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previewLayer] ;
//代理
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate -
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"Image %@", sampleBuffer);
}
网友评论