美文网首页
iOS 获取视频源数据

iOS 获取视频源数据

作者: 索性流年 | 来源:发表于2019-01-07 14:23 被阅读0次

使用系统框架AVFoundation下的:


<AVCaptureVideoDataOutputSampleBufferDelegate>

进行视频源数据捕捉

创建LyPhotoCollection继承与UIView

.h

#import <AVFoundation/AVFoundation.h>
#import <Photos/Photos.h>
@property(nonatomic,copy)void(^photoComesBlack)(CMSampleBufferRef buffer);
// 闪光灯开关
-(void)lightAction;
//停止运行
-(void)stopRunning;
//开始运行
-(void)startRunning;
/**
 切换前后摄像头
 @param camera 前置、后置
*/
- (void)cameraPosition:(NSString *)camera;
//放大缩小屏幕
- (void)viewScaleChnageValue:(CGFloat )sender;

.m

@interface LyPhotoCollection () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *captureSession; // 会话
@property (nonatomic, strong) AVCaptureDevice *captureDevice;   // 输入设备
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput; // 输入源
// 预览画面
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; 
@property (nonatomic, assign) AVCaptureFlashMode mode;//设置聚焦
@property (nonatomic, assign) AVCaptureDevicePosition position;//设置焦点
@property(nonatomic,assign)AVCaptureVideoDataOutput * output;//设置输出源
@property(nonatomic,strong)dispatch_queue_t captureQueue;//输出队列
@end

- (instancetype)initWithFrame:(CGRect)frame
{
  self = [super initWithFrame:frame];
  if (self) {
    [self.layer insertSublayer:self.previewLayer atIndex:0];
    __weak typeof(self)weakSelf = self;
    //    监听屏幕方向
    [[NSNotificationCenter   defaultCenter]addObserverForName:UIApplicationDidChangeStatusBarOrientationNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
        weakSelf.previewLayer.connection.videoOrientation = [self getCaptureVideoOrientation];
    }];
  }
  return self;
}

-(void)layoutSubviews{
  [super layoutSubviews];
  self.previewLayer.frame = self.bounds;
  [self startRunning];
}


#pragma mark - 开始运行
-(void)startRunning{
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
      [self.captureSession startRunning];
});
}


#pragma mark - 停止运行
-(void)stopRunning{
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
        [self.captureSession stopRunning];
  });
}


#pragma mark - 调整焦距
- (void)cameraBackgroundDidChangeFocus:(CGFloat)focus{
    AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
    NSError *error;
    if ([captureDevice lockForConfiguration:&error]) {
        if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
            [captureDevice setFocusModeLockedWithLensPosition:focus completionHandler:nil];
        }
    }
    else{
    // Handle the error appropriately.
    }
}


#pragma mark - 闪光灯开关
-(void)lightAction{
    if (self.mode == AVCaptureFlashModeOn) {
      [self setMode:AVCaptureFlashModeOff];
    } else {
        [self setMode:AVCaptureFlashModeOn];
    }
}


#pragma mark - 切换前后摄像头
- (void)cameraPosition:(NSString *)camera{
  if ([camera isEqualToString:@"前置"]) {
      if (self.captureDevice.position != AVCaptureDevicePositionFront) {
          self.position = AVCaptureDevicePositionFront;
     }
  }
  else if ([camera isEqualToString:@"后置"]){
      f (self.captureDevice.position != AVCaptureDevicePositionBack) {
          self.position = AVCaptureDevicePositionBack;
      }
  }

AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:self.position];
  if (device) {
    self.captureDevice = device;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
      [self.captureSession beginConfiguration];
      [self.captureSession removeInput:self.captureDeviceInput];
      if ([self.captureSession canAddInput:input]) {
        [self.captureSession addInput:input];
        self.captureDeviceInput = input;
        [self.captureSession commitConfiguration];
      }
  }
}


#pragma mark - 源数据代理方法
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  if (self.photoComesBlack) {
      self.photoComesBlack(sampleBuffer);
  }
}


#pragma mark - 创建会话
-(AVCaptureSession *)captureSession{
  if (!_captureSession) {
    _captureSession = [[AVCaptureSession alloc] init];
    _captureSession.sessionPreset = AVCaptureSessionPreset1280x720; // 画质
    // 5. 连接输入与会话
    if ([_captureSession canAddInput:self.captureDeviceInput]) {
        [_captureSession addInput:self.captureDeviceInput];
    }
    // 6. 连接输出与会话
    if ([_captureSession canAddOutput:self.output]) {
        [_captureSession addOutput:self.output];
    }
  }
  return _captureSession;
}

#pragma mark - 创建输入设备
-(AVCaptureDevice *)captureDevice{
  if (!_captureDevice) {
    //        设置默认前置摄像头
    _captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
  }
  return _captureDevice;
}

#pragma mark - 创建输入源
-(AVCaptureDeviceInput *)captureDeviceInput{
  if (!_captureDeviceInput) {
      _captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
  }
  return _captureDeviceInput;
}

#pragma mark - 设置相机画布
-(AVCaptureVideoPreviewLayer *)previewLayer{
  if (!_previewLayer) {
      _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
      _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
   }
   return _previewLayer;
}

#pragma mark - 设置视频输出源
-(AVCaptureVideoDataOutput *)output{
    if (!_output) {
        _output = [[AVCaptureVideoDataOutput alloc]init];
         self.captureQueue = dispatch_queue_create("uk.co.gdcl.avencoder.capture", DISPATCH_QUEUE_SERIAL);
          [_output setSampleBufferDelegate:self queue:self.captureQueue];
            NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
                                    nil];
        _output.videoSettings = setcapSettings;
    }
    return _output;
}


#pragma mark - 获取焦点
-(AVCaptureDevicePosition)position{
    f (!_position) {
      _position = AVCaptureDevicePositionFront;
    }
    return _position;
}

相关文章

网友评论

      本文标题:iOS 获取视频源数据

      本文链接:https://www.haomeiwen.com/subject/nhdurqtx.html