//
// getLiangduViewController.m
// ocQRCode
//
// Created by 雷王 on 2019/11/6.
// Copyright © 2019 WL. All rights reserved.
//
import "getLiangduViewController.h"
import <AVFoundation/AVFoundation.h>
import <ImageIO/ImageIO.h>
import <UIKit/UIKit.h>
@interface getLiangduViewController ()<AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic,strong) AVCaptureDevice *device;
@property(nonatomic,strong) AVCaptureDeviceInput *input;
@property(nonatomic,strong) AVCaptureMetadataOutput *output;
@property(nonatomic,strong) AVCaptureSession *session;
@property(nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
@property(nonatomic,strong) AVCaptureStillImageOutput *stillImageOutput;
@property(nonatomic,strong) AVCaptureVideoDataOutput *videoOutput;
@property(nonatomic,strong) NSMutableArray *arrayResult;
@property(nonatomic) BOOL isNeedCaptureImage;
@property(nonatomic) BOOL isNeedScanResult;
@end
@implementation getLiangduViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self create];
}
-(void)create{
self.session=[[AVCaptureSession alloc] init];
self.device=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.input=[[AVCaptureDeviceInput alloc] initWithDevice:self.device error:nil];
self.output=[[AVCaptureMetadataOutput alloc]init];
self.isNeedCaptureImage=YES;
self.isNeedScanResult=YES;
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
//获取亮度主要通过这个代理回调
self.videoOutput =[[AVCaptureVideoDataOutput alloc] init];
[self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
if ([self.session canAddInput:self.input]) {
[self.session addInput:self.input];
}
if ([self.session canAddOutput:self.output]) {
[self.session addOutput:self.output];
}
if ([self.session canAddOutput:self.videoOutput]) {
[self.session addOutput:self.videoOutput];
}
if ([self.session canAddOutput:self.stillImageOutput]) {
[self.session addOutput:self.stillImageOutput];
}
NSDictionary *outputSettings = @{AVVideoCodecJPEG:AVVideoCodecKey};
self.stillImageOutput.outputSettings=outputSettings;
self.session.sessionPreset=AVCaptureSessionPresetHigh;
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
NSArray *objType = @[AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeCode128Code];
self.output.metadataObjectTypes=objType;
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
CGRect frame = self.view.frame;
frame.origin=CGPointZero;
self.previewLayer.frame=frame;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
if ([self.device isFocusPointOfInterestSupported] && [self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
[self.input.device lockForConfiguration:nil];
self.input.device.focusMode=AVCaptureFocusModeAutoFocus;
[self.input.device unlockForConfiguration];
}
[self start];
}
-(void)start{
if (!self.session.isRunning) {
self.isNeedScanResult=YES;
[self.session startRunning];
}
}
-(void)stop{
if (self.session.isRunning) {
self.isNeedScanResult=NO;
[self.session stopRunning];
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CFDictionaryRef metadataDict = CMCopyDictionaryOfAttachments(NULL,sampleBuffer, kCMAttachmentMode_ShouldPropagate);
NSDictionary metadata = [[NSMutableDictionary alloc] initWithDictionary:(__bridge NSDictionary)metadataDict];
CFRelease(metadataDict);
NSDictionary *exifMetadata = [[metadata objectForKey:(NSString *)kCGImagePropertyExifDictionary] mutableCopy];
float brightnessValue = [[exifMetadata objectForKey:(NSString *)kCGImagePropertyExifBrightnessValue] floatValue];
NSLog(@"当前亮度%f",brightnessValue);
}
@end
网友评论