美文网首页
[木木方文技术分享之音视频二]获取摄像头视频流

[木木方文技术分享之音视频二]获取摄像头视频流

作者: 丿沧海一粟丿 | 来源:发表于2017-07-28 09:12 被阅读0次

添加依赖库


#import <AVFoundation/AVFoundation.h>
//
//  LiveTelecastController.m
//  FFmpegDemo
//
//  Created by huoliquankai on 2017/7/20.
//  Copyright © 2017年 火力全开. All rights reserved.
//

#import "LiveTelecastController.h"
#import <AVFoundation/AVFoundation.h>

@interface LiveTelecastController () <AVCaptureVideoDataOutputSampleBufferDelegate>
{
   
}
@property (nonatomic, strong)AVCaptureSession *session;
@property (nonatomic, strong)AVCaptureDeviceInput *videoInput;
@property (nonatomic, strong)AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic, strong)UIView *cameraShowView;
@property (nonatomic, strong)AVCaptureVideoPreviewLayer *previewLayer;
@end

@implementation LiveTelecastController

- (instancetype)init
{
    self = [super init];
    if (self) {
        [self initialSession];
        [self initialCameraShowView];
    }
    return self;
}

- (void)initialSession {
    self.session = [[AVCaptureSession alloc] init];
    self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:nil];
    self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
//  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange表示设置摄像头返回的数据类型为YUV420SP类型    
NSDictionary *outputSettings = [NSDictionary dictionaryWithObject:
                                    [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
                                                               forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [self.videoDataOutput setVideoSettings:outputSettings];
    dispatch_queue_t queue = dispatch_queue_create("linlinqi", NULL);
    [self.videoDataOutput setSampleBufferDelegate:self queue:queue];
    if ([self.session canAddInput:self.videoInput]) {
        [self.session addInput:self.videoInput];
    }
    if ([self.session canAddOutput:self.videoDataOutput]) {
        [self.session addOutput:self.videoDataOutput];
    } else {
        NSLog(@"failed get output");
    }
}

- (void)initialCameraShowView {
    self.cameraShowView = [[UIView alloc] initWithFrame:self.view.frame];
    [self.view addSubview:self.cameraShowView];
}

- (AVCaptureDevice *)backCamera {
    return [self cameraWithPosition:AVCaptureDevicePositionBack];
}

- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in devices) {
        if (device.position == position) {
            return device;
        }
    }
    return nil;
}

- (void)viewWillAppear:(BOOL)animated {
    [super viewWillAppear:animated];
    [self setUpCameraLayer];
}
//启动摄像头捕获数据
- (void)viewDidAppear:(BOOL)animated {
    [super viewDidAppear:animated];
    if (self.session) {
        [self.session startRunning];
    }
}

- (void)setUpCameraLayer {
    if (self.previewLayer == nil) {
        self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
        UIView *view = self.cameraShowView;
        CALayer *viewLayer = [view layer];
        [viewLayer setMasksToBounds:YES];
        CGRect bounds = [view bounds];
        [self.previewLayer setFrame:bounds];
        [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
        [viewLayer addSublayer:self.previewLayer];
    }
}

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
}

#pragma AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    NSLog(@"get data success");
}

@end

获取到的数据格式
CMSampleBuffer 0x15be22d20 retainCount: 1 allocator: 0x1b8606bb8
invalid = NO
dataReady = YES
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
buffer-level attachments:
Orientation(P) = 1
{Exif}    (P) = <CFBasicHash 0x17427bc00 [0x1b8606bb8]>{type = mutable dict, count = 24,
entries =>
0 : <CFString 0x17404bcd0 [0x1b8606bb8]>{contents = "DateTimeOriginal"} = <CFString 0x17404baf0 [0x1b8606bb8]>{contents = "2017:07:20 16:47:20"}
1 : <CFString 0x1b278f930 [0x1b8606bb8]>{contents = "MeteringMode"} = <CFNumber 0xb000000000000052 [0x1b8606bb8]>{value = +5, type = kCFNumberSInt32Type}
2 : <CFString 0x1b278fe70 [0x1b8606bb8]>{contents = "LensMake"} = Apple
3 : <CFString 0x1b287c480 [0x1b8606bb8]>{contents = "BrightnessValue"} = <CFNumber 0x174035260 [0x1b8606bb8]>{value = +3.32548037588890554872, type = kCFNumberFloat64Type}
6 : <CFString 0x1b278f670 [0x1b8606bb8]>{contents = "FNumber"} = <CFNumber 0x174037e20 [0x1b8606bb8]>{value = +2.20000000000000017764, type = kCFNumberFloat64Type}
7 : <CFString 0x1b278f990 [0x1b8606bb8]>{contents = "FocalLength"} = <CFNumber 0x174037dc0 [0x1b8606bb8]>{value = +4.15000000000000035527, type = kCFNumberFloat64Type}
8 : <CFString 0x1b278f870 [0x1b8606bb8]>{contents = "ShutterSpeedValue"} = <CFNumber 0x174035d00 [0x1b8606bb8]>{value = +6.64385618977472436342, type = kCFNumberFloat64Type}
9 : <CFString 0x1b278fc30 [0x1b8606bb8]>{contents = "SceneType"} = <CFNumber 0xb000000000000012 [0x1b8606bb8]>{value = +1, type = kCFNumberSInt32Type}
10 : <CFString 0x1b278f890 [0x1b8606bb8]>{contents = "ApertureValue"} = <CFNumber 0x174036b20 [0x1b8606bb8]>{value = +2.27500704749987026076, type = kCFNumberFloat64Type}
13 : <CFString 0x1b278fa90 [0x1b8606bb8]>{contents = "ColorSpace"} = <CFNumber 0xb000000000000012 [0x1b8606bb8]>{value = +1, type = kCFNumberSInt32Type}
17 : <CFString 0x1b278fe50 [0x1b8606bb8]>{contents = "LensSpecification"} = (
    "4.15",
    "4.15",
    "2.2",
    "2.2"
)
18 : <CFString 0x1b278fad0 [0x1b8606bb8]>{contents = "PixelYDimension"} = <CFNumber 0xb000000000004383 [0x1b8606bb8]>{value = +1080, type = kCFNumberSInt64Type}
19 : <CFString 0x1b278fcb0 [0x1b8606bb8]>{contents = "WhiteBalance"} = <CFNumber 0xb000000000000002 [0x1b8606bb8]>{value = +0, type = kCFNumberSInt32Type}
23 : <CFString 0x17404e910 [0x1b8606bb8]>{contents = "DateTimeDigitized"} = <CFString 0x17404bca0 [0x1b8606bb8]>{contents = "2017:07:20 16:47:20"}
28 : <CFString 0x1b278f6d0 [0x1b8606bb8]>{contents = "ISOSpeedRatings"} = (
    32
)
31 : <CFString 0x1b278fab0 [0x1b8606bb8]>{contents = "PixelXDimension"} = <CFNumber 0xb000000000007803 [0x1b8606bb8]>{value = +1920, type = kCFNumberSInt64Type}
32 : <CFString 0x1b278fe90 [0x1b8606bb8]>{contents = "LensModel"} = <CFString 0x17427ac80 [0x1b8606bb8]>{contents = "iPhone 6 back camera 4.15mm f/2.2"}
34 : <CFString 0x17404bdc0 [0x1b8606bb8]>{contents = "SubsecTimeOriginal"} = 408
35 : <CFString 0x1b287a4c0 [0x1b8606bb8]>{contents = "ExposureTime"} = <CFNumber 0x174038060 [0x1b8606bb8]>{value = +0.01000000000000000021, type = kCFNumberFloat64Type}
36 : <CFString 0x17404bb20 [0x1b8606bb8]>{contents = "SubsecTimeDigitized"} = 408
37 : <CFString 0x1b278fcf0 [0x1b8606bb8]>{contents = "FocalLenIn35mmFilm"} = <CFNumber 0xb000000000000202 [0x1b8606bb8]>{value = +32, type = kCFNumberSInt32Type}
38 : <CFString 0x1b278f970 [0x1b8606bb8]>{contents = "Flash"} = <CFNumber 0xb000000000000002 [0x1b8606bb8]>{value = +0, type = kCFNumberSInt32Type}
39 : <CFString 0x1b278fbf0 [0x1b8606bb8]>{contents = "SensingMethod"} = <CFNumber 0xb000000000000022 [0x1b8606bb8]>{value = +2, type = kCFNumberSInt32Type}
40 : <CFString 0x1b278f8d0 [0x1b8606bb8]>{contents = "ExposureBiasValue"} = <CFNumber 0xb000000000000005 [0x1b8606bb8]>{value = +0.0, type = kCFNumberFloat64Type}
}

DPIWidth  (P) = 72
{TIFF}    (P) = <CFBasicHash 0x17427a940 [0x1b8606bb8]>{type = mutable dict, count = 7,
entries =>
1 : <CFString 0x1b278f490 [0x1b8606bb8]>{contents = "ResolutionUnit"} = <CFNumber 0xb000000000000022 [0x1b8606bb8]>{value = +2, type = kCFNumberSInt32Type}
2 : Software = 10.3.2
5 : DateTime = <CFString 0x17404ed00 [0x1b8606bb8]>{contents = "2017:07:20 16:47:20"}
6 : <CFString 0x1b278f450 [0x1b8606bb8]>{contents = "XResolution"} = <CFNumber 0xb000000000000482 [0x1b8606bb8]>{value = +72, type = kCFNumberSInt32Type}
9 : Model = iPhone 6
10 : <CFString 0x1b278f470 [0x1b8606bb8]>{contents = "YResolution"} = <CFNumber 0xb000000000000482 [0x1b8606bb8]>{value = +72, type = kCFNumberSInt32Type}
11 : Make = Apple
}

DPIHeight (P) = 72
{MakerApple}(P) = {
    1 = 3;
    10 = 0;
    14 = 0;
    3 =     {
        epoch = 0;
        flags = 1;
        timescale = 1000000000;
        value = 54041253161083;
    };
    4 = 0;
    5 = 193;
    6 = 25;
    7 = 1;
    8 =     (
        "-0.01732243",
        "-0.7562239",
        "-0.6610191"
    );
    9 = 0;
}
formatDescription = <CMVideoFormatDescription 0x17404b640 [0x1b8606bb8]> {
mediaType:'vide'
mediaSubType:'BGRA'
mediaSpecific: {
codecType: 'BGRA'dimensions: 1920 x 1080
}
extensions: {<CFBasicHash 0x1744603c0 [0x1b8606bb8]>{type = immutable dict, count = 6,
entries =>
0 : <CFString 0x1b283b7c8 [0x1b8606bb8]>{contents = "CVImageBufferYCbCrMatrix"} = <CFString 0x1b283b808 [0x1b8606bb8]>{contents = "ITU_R_601_4"}
1 : <CFString 0x1b283b928 [0x1b8606bb8]>{contents = "CVImageBufferTransferFunction"} = <CFString 0x1b283b7e8 [0x1b8606bb8]>{contents = "ITU_R_709_2"}
2 : <CFString 0x1b286e8a0 [0x1b8606bb8]>{contents = "CVBytesPerRow"} = <CFNumber 0xb00000000001e002 [0x1b8606bb8]>{value = +7680, type = kCFNumberSInt32Type}
3 : <CFString 0x1b286e920 [0x1b8606bb8]>{contents = "Version"} = <CFNumber 0xb000000000000022 [0x1b8606bb8]>{value = +2, type = kCFNumberSInt32Type}
5 : <CFString 0x1b283b8a8 [0x1b8606bb8]>{contents = "CVImageBufferColorPrimaries"} = <CFString 0x1b283b7e8 [0x1b8606bb8]>{contents = "ITU_R_709_2"}
6 : <CFString 0x1b283b9a8 [0x1b8606bb8]>{contents = "CVImageBufferChromaLocationTopField"} = <CFString 0x1b283ba08 [0x1b8606bb8]>{contents = "Center"}
}
}
}
sbufToTrackReadiness = 0x0
numSamples = 1
sampleTimingArray[1] = {
{PTS = {54041253161083/1000000000 = 54041.253}, DTS = {INVALID}, duration = {INVALID}},
}
imageBuffer = 0x174124ce0

相关文章

网友评论

      本文标题:[木木方文技术分享之音视频二]获取摄像头视频流

      本文链接:https://www.haomeiwen.com/subject/lplxlxtx.html