不多说,代码如下:
#import "LightSensitiveViewController.h"
@import AVFoundation;
#import <ImageIO/ImageIO.h>
@interface LightSensitiveViewController ()< AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *session;
@end
@implementation LightSensitiveViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor whiteColor];
self.navigationItem.title = @"光感";
[self lightSensitive];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark- 光感
- (void)lightSensitive {
// 1.获取硬件设备
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 2.创建输入流
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc]initWithDevice:device error:nil];
// 3.创建设备输出流
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// AVCaptureSession属性
self.session = [[AVCaptureSession alloc]init];
// 设置为高质量采集率
[self.session setSessionPreset:AVCaptureSessionPresetHigh];
// 添加会话输入和输出
if ([self.session canAddInput:input]) {
[self.session addInput:input];
}
if ([self.session canAddOutput:output]) {
[self.session addOutput:output];
}
// 9.启动会话
[self.session startRunning];
}
#pragma mark- AVCaptureVideoDataOutputSampleBufferDelegate的方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CFDictionaryRef metadataDict = CMCopyDictionaryOfAttachments(NULL,sampleBuffer, kCMAttachmentMode_ShouldPropagate);
NSDictionary *metadata = [[NSMutableDictionary alloc] initWithDictionary:(__bridge NSDictionary*)metadataDict];
CFRelease(metadataDict);
NSDictionary *exifMetadata = [[metadata objectForKey:(NSString *)kCGImagePropertyExifDictionary] mutableCopy];
float brightnessValue = [[exifMetadata objectForKey:(NSString *)kCGImagePropertyExifBrightnessValue] floatValue];
NSLog(@"%f",brightnessValue);
// 根据brightnessValue的值来打开和关闭闪光灯
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
BOOL result = [device hasTorch];// 判断设备是否有闪光灯
if ((brightnessValue < 0) && result) {// 打开闪光灯
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOn];//开
[device unlockForConfiguration];
}else if((brightnessValue > 0) && result) {// 关闭闪光灯
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOff];//关
[device unlockForConfiguration];
}
}
@end
注意点:
- 首先引入AVFoundation框架和ImageIO/ImageIO.h声明文件
- 遵循AVCaptureVideoDataOutputSampleBufferDelegate协议
- AVCaptureSession对象要定义为属性,确保有对象在一直引用AVCaptureSession对象;否则如果在lightSensitive方法中定义并初始化AVCaptureSession对象,会造成AVCaptureSession对象提前释放, [self.session startRunning];会失效
- 实现AVCaptureVideoDataOutputSampleBufferDelegate的代理方法,参数brightnessValue就是周围环境的亮度参数了,范围大概在-5~~12之间,参数数值越大,环境越亮
参考文章
iOS开发 读取环境光亮度
网友评论
NSDictionary *metadata = [[NSMutableDictionary alloc] initWithDictionary:(__bridge NSDictionary*)metadataDict];
CFRelease(metadataDict);
NSDictionary *exifMetadata = [[metadata objectForKey:(NSString *)kCGImagePropertyExifDictionary] mutableCopy];
float brightnessValue = [[exifMetadata objectForKey:(NSString *)kCGImagePropertyExifBrightnessValue] floatValue];
这段代码是否可以解释下逻辑?
NSDictionary *metadata = [[NSMutableDictionary alloc] initWithDictionary:(__bridge NSDictionary*)metadataDict]; -->将CFDictionaryRef字典转换成为NSDctionary,便于操作和取值。
CFRelease(metadataDict); --> 释放Ref,防止内存泄露
NSDictionary *exifMetadata = [[metadata objectForKey:(NSString *)kCGImagePropertyExifDictionary] mutableCopy];
float brightnessValue = [[exifMetadata objectForKey:(NSString *)kCGImagePropertyExifBrightnessValue] floatValue];
--> 这几句有不同说了吧,通过Key,取Value。Key值可以直接输出MetaData进行查看,也可以用SDK定义好的进行取值(例如kCGImagePropertyExifDictionary对应的其实是{Exif})。