美文网首页iOS技术ios开发整理程序员
iOS开发-自定义相机(仿微信)

iOS开发-自定义相机(仿微信)

作者: Kegem | 来源:发表于2017-03-17 13:40 被阅读2374次

    网上有很多自定义相机的例子,这里只是我临时写的一个小demo,仅供参考:
    用到了下面几个库:
    #import <AVFoundation/AVFoundation.h>
    #import <AssetsLibrary/AssetsLibrary.h>

    在使用的时候需要在Info.plist中把相关权限写进去:
    Privacy - Microphone Usage Description
    Privacy - Photo Library Usage Description
    Privacy - Camera Usage Description

    我在写这个demo时,是按照微信的样式写的,同样是点击拍照、长按录制视频,视频录制完直接进行播放,这里封装了一个简易的播放器:

    m文件

    #import "HAVPlayer.h"
    #import <AVFoundation/AVFoundation.h>
    
    @interface HAVPlayer ()
    
    @property (nonatomic,strong) AVPlayer *player;//播放器对象
    
    @end
    
    @implementation HAVPlayer
    
    /*
    // Only override drawRect: if you perform custom drawing.
    // An empty implementation adversely affects performance during animation.
    - (void)drawRect:(CGRect)rect {
        // Drawing code
    }
    */
    
    - (instancetype)initWithFrame:(CGRect)frame withShowInView:(UIView *)bgView url:(NSURL *)url {
        if (self = [self initWithFrame:frame]) {
            //创建播放器层
            AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
            playerLayer.frame = self.bounds;
            
            [self.layer addSublayer:playerLayer];
            if (url) {
                self.videoUrl = url;
            }
            
            [bgView addSubview:self];
        }
        return self;
    }
    
    - (void)dealloc {
        [self removeAvPlayerNtf];
        [self stopPlayer];
        self.player = nil;
    }
    
    - (AVPlayer *)player {
        if (!_player) {
            _player = [AVPlayer playerWithPlayerItem:[self getAVPlayerItem]];
            [self addAVPlayerNtf:_player.currentItem];
            
        }
        
        return _player;
    }
    
    - (AVPlayerItem *)getAVPlayerItem {
        AVPlayerItem *playerItem=[AVPlayerItem playerItemWithURL:self.videoUrl];
        return playerItem;
    }
    
    - (void)setVideoUrl:(NSURL *)videoUrl {
        _videoUrl = videoUrl;
        [self removeAvPlayerNtf];
        [self nextPlayer];
    }
    
    - (void)nextPlayer {
        [self.player seekToTime:CMTimeMakeWithSeconds(0, _player.currentItem.duration.timescale)];
        [self.player replaceCurrentItemWithPlayerItem:[self getAVPlayerItem]];
        [self addAVPlayerNtf:self.player.currentItem];
        if (self.player.rate == 0) {
            [self.player play];
        }
    }
    
    - (void) addAVPlayerNtf:(AVPlayerItem *)playerItem {
        //监控状态属性
        [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
        //监控网络加载情况属性
        [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
        
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:self.player.currentItem];
    }
    
    - (void)removeAvPlayerNtf {
        AVPlayerItem *playerItem = self.player.currentItem;
        [playerItem removeObserver:self forKeyPath:@"status"];
        [playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"];
        [[NSNotificationCenter defaultCenter] removeObserver:self];
    }
    
    - (void)stopPlayer {
        if (self.player.rate == 1) {
            [self.player pause];//如果在播放状态就停止
        }
    }
    
    /**
     *  通过KVO监控播放器状态
     *
     *  @param keyPath 监控属性
     *  @param object  监视器
     *  @param change  状态改变
     *  @param context 上下文
     */
    -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{
        AVPlayerItem *playerItem = object;
        if ([keyPath isEqualToString:@"status"]) {
            AVPlayerStatus status= [[change objectForKey:@"new"] intValue];
            if(status==AVPlayerStatusReadyToPlay){
                NSLog(@"正在播放...,视频总长度:%.2f",CMTimeGetSeconds(playerItem.duration));
            }
        }else if([keyPath isEqualToString:@"loadedTimeRanges"]){
            NSArray *array=playerItem.loadedTimeRanges;
            CMTimeRange timeRange = [array.firstObject CMTimeRangeValue];//本次缓冲时间范围
            float startSeconds = CMTimeGetSeconds(timeRange.start);
            float durationSeconds = CMTimeGetSeconds(timeRange.duration);
            NSTimeInterval totalBuffer = startSeconds + durationSeconds;//缓冲总长度
            NSLog(@"共缓冲:%.2f",totalBuffer);
        }
    }
    
    - (void)playbackFinished:(NSNotification *)ntf {
        Plog(@"视频播放完成");
        [self.player seekToTime:CMTimeMake(0, 1)];
        [self.player play];
    }
    
    @end
    

    另外微信下面的按钮长按会出现圆弧时间条:

    m文件

    #import "HProgressView.h"
    
    @interface HProgressView ()
    
    /**
     *  进度值0-1.0之间
     */
    @property (nonatomic,assign)CGFloat progressValue;
    
    @property (nonatomic, assign) CGFloat currentTime;
    
    @end
    
    @implementation HProgressView
    
    
    // Only override drawRect: if you perform custom drawing.
    // An empty implementation adversely affects performance during animation.
    - (void)drawRect:(CGRect)rect {
        // Drawing code
        CGContextRef ctx = UIGraphicsGetCurrentContext();//获取上下文
        Plog(@"width = %f",self.frame.size.width);
        CGPoint center = CGPointMake(self.frame.size.width/2.0, self.frame.size.width/2.0);  //设置圆心位置
        CGFloat radius = self.frame.size.width/2.0-5;  //设置半径
        CGFloat startA = - M_PI_2;  //圆起点位置
        CGFloat endA = -M_PI_2 + M_PI * 2 * _progressValue;  //圆终点位置
        
        UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:center radius:radius startAngle:startA endAngle:endA clockwise:YES];
        
        CGContextSetLineWidth(ctx, 10); //设置线条宽度
        [[UIColor whiteColor] setStroke]; //设置描边颜色
        
        CGContextAddPath(ctx, path.CGPath); //把路径添加到上下文
        
        CGContextStrokePath(ctx);  //渲染
    }
    
    - (void)setTimeMax:(NSInteger)timeMax {
        _timeMax = timeMax;
        self.currentTime = 0;
        self.progressValue = 0;
        [self setNeedsDisplay];
        self.hidden = NO;
        [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
    }
    
    - (void)clearProgress {
        _currentTime = _timeMax;
        self.hidden = YES;
    }
    
    - (void)startProgress {
        _currentTime += 0.1;
        if (_timeMax > _currentTime) {
            _progressValue = _currentTime/_timeMax;
            Plog(@"progress = %f",_progressValue);
            [self setNeedsDisplay];
            [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
        }
        
        if (_timeMax <= _currentTime) {
            [self clearProgress];
            
        }
    }
    
    @end
    
    

    接下来就是相机的控制器了,由于是临时写的,所以用的xib,大家不要直接使用,直接上m文件代码吧:

    #import "HVideoViewController.h"
    #import <AVFoundation/AVFoundation.h>
    #import "HAVPlayer.h"
    #import "HProgressView.h"
    #import <Foundation/Foundation.h>
    #import <AssetsLibrary/AssetsLibrary.h>
    
    typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
    @interface HVideoViewController ()<AVCaptureFileOutputRecordingDelegate>
    
    //轻触拍照,按住摄像
    @property (strong, nonatomic) IBOutlet UILabel *labelTipTitle;
    
    //视频输出流
    @property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
    //图片输出流
    //@property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流
    //负责从AVCaptureDevice获得输入数据
    @property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;
    //后台任务标识
    @property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;
    
    @property (assign,nonatomic) UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier;
    
    @property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //聚焦光标
    
    //负责输入和输出设备之间的数据传递
    @property(nonatomic)AVCaptureSession *session;
    
    //图像预览层,实时显示捕获的图像
    @property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer;
    
    @property (strong, nonatomic) IBOutlet UIButton *btnBack;
    //重新录制
    @property (strong, nonatomic) IBOutlet UIButton *btnAfresh;
    //确定
    @property (strong, nonatomic) IBOutlet UIButton *btnEnsure;
    //摄像头切换
    @property (strong, nonatomic) IBOutlet UIButton *btnCamera;
    
    @property (strong, nonatomic) IBOutlet UIImageView *bgView;
    //记录录制的时间 默认最大60秒
    @property (assign, nonatomic) NSInteger seconds;
    
    //记录需要保存视频的路径
    @property (strong, nonatomic) NSURL *saveVideoUrl;
    
    //是否在对焦
    @property (assign, nonatomic) BOOL isFocus;
    @property (strong, nonatomic) IBOutlet NSLayoutConstraint *afreshCenterX;
    @property (strong, nonatomic) IBOutlet NSLayoutConstraint *ensureCenterX;
    @property (strong, nonatomic) IBOutlet NSLayoutConstraint *backCenterX;
    
    //视频播放
    @property (strong, nonatomic) HAVPlayer *player;
    
    @property (strong, nonatomic) IBOutlet HProgressView *progressView;
    
    //是否是摄像 YES 代表是录制  NO 表示拍照
    @property (assign, nonatomic) BOOL isVideo;
    
    @property (strong, nonatomic) UIImage *takeImage;
    @property (strong, nonatomic) UIImageView *takeImageView;
    @property (strong, nonatomic) IBOutlet UIImageView *imgRecord;
    
    
    @end
    
    //时间大于这个就是视频,否则为拍照
    #define TimeMax 1
    
    @implementation HVideoViewController
    
    
    -(void)dealloc{
        [self removeNotification];
        
        
    }
    
    - (void)viewDidLoad {
        [super viewDidLoad];
        // Do any additional setup after loading the view.
    
        UIImage *image = [UIImage imageNamed:@"sc_btn_take.png"];
        self.backCenterX.constant = -(SCREEN_WIDTH/2/2)-image.size.width/2/2;
        
        self.progressView.layer.cornerRadius = self.progressView.frame.size.width/2;
        
        if (self.HSeconds == 0) {
            self.HSeconds = 60;
        }
        
        [self performSelector:@selector(hiddenTipsLabel) withObject:nil afterDelay:4];
    }
    
    - (void)hiddenTipsLabel {
        self.labelTipTitle.hidden = YES;
    }
    
    - (void)didReceiveMemoryWarning {
        [super didReceiveMemoryWarning];
        // Dispose of any resources that can be recreated.
    }
    
    - (void)viewWillAppear:(BOOL)animated {
        [super viewWillAppear:animated];
        [[UIApplication sharedApplication] setStatusBarHidden:YES];
        [self customCamera];
        [self.session startRunning];
    }
    
    
    -(void)viewDidAppear:(BOOL)animated{
        [super viewDidAppear:animated];
    }
    
    -(void)viewDidDisappear:(BOOL)animated{
        [super viewDidDisappear:animated];
        [self.session stopRunning];
    }
    
    - (void)viewWillDisappear:(BOOL)animated {
        [super viewWillDisappear:animated];
        [[UIApplication sharedApplication] setStatusBarHidden:NO];
    }
    
    - (void)customCamera {
        
        //初始化会话,用来结合输入输出
        self.session = [[AVCaptureSession alloc] init];
        //设置分辨率 (设备支持的最高分辨率)
        if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
            self.session.sessionPreset = AVCaptureSessionPresetHigh;
        }
        //取得后置摄像头
        AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
        //添加一个音频输入设备
        AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
        
        //初始化输入设备
        NSError *error = nil;
        self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
        if (error) {
            Plog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
            return;
        }
        
        //添加音频
        error = nil;
        AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
        if (error) {
            NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
            return;
        }
        
        //输出对象
        self.captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];//视频输出
        
        //将输入设备添加到会话
        if ([self.session canAddInput:self.captureDeviceInput]) {
            [self.session addInput:self.captureDeviceInput];
            [self.session addInput:audioCaptureDeviceInput];
            //设置视频防抖
            AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
            if ([connection isVideoStabilizationSupported]) {
                connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
            }
        }
        
        //将输出设备添加到会话 (刚开始 是照片为输出对象)
        if ([self.session canAddOutput:self.captureMovieFileOutput]) {
            [self.session addOutput:self.captureMovieFileOutput];
        }
        
        //创建视频预览层,用于实时展示摄像头状态
        self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
        self.previewLayer.frame = self.view.bounds;//CGRectMake(0, 0, self.view.width, self.view.height);
        self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式
        [self.bgView.layer addSublayer:self.previewLayer];
        
        [self addNotificationToCaptureDevice:captureDevice];
        [self addGenstureRecognizer];
    }
    
    
    
    - (IBAction)onCancelAction:(UIButton *)sender {
        [self dismissViewControllerAnimated:YES completion:^{
            [Utility hideProgressDialog];
        }];
    }
    
    
    - (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
        if ([[touches anyObject] view] == self.imgRecord) {
            Plog(@"开始录制");
            //根据设备输出获得连接
            AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];
            //根据连接取得设备输出的数据
            if (![self.captureMovieFileOutput isRecording]) {
                //如果支持多任务则开始多任务
                if ([[UIDevice currentDevice] isMultitaskingSupported]) {
                    self.backgroundTaskIdentifier = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
                }
                if (self.saveVideoUrl) {
                    [[NSFileManager defaultManager] removeItemAtURL:self.saveVideoUrl error:nil];
                }
                //预览图层和视频方向保持一致
                connection.videoOrientation = [self.previewLayer connection].videoOrientation;
                NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];
                NSLog(@"save path is :%@",outputFielPath);
                NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
                NSLog(@"fileUrl:%@",fileUrl);
                [self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
            } else {
                [self.captureMovieFileOutput stopRecording];
            }
        }
    }
    
    
    - (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
        if ([[touches anyObject] view] == self.imgRecord) {
            Plog(@"结束触摸");
            if (!self.isVideo) {
                [self performSelector:@selector(endRecord) withObject:nil afterDelay:0.3];
            } else {
                [self endRecord];
            }
        }
    }
    
    - (void)endRecord {
        [self.captureMovieFileOutput stopRecording];//停止录制
    }
    
    - (IBAction)onAfreshAction:(UIButton *)sender {
        Plog(@"重新录制");
        [self recoverLayout];
    }
    
    - (IBAction)onEnsureAction:(UIButton *)sender {
        Plog(@"确定 这里进行保存或者发送出去");
        if (self.saveVideoUrl) {
            WS(weakSelf)
            [Utility showProgressDialogText:@"视频处理中..."];
            ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
            [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:self.saveVideoUrl completionBlock:^(NSURL *assetURL, NSError *error) {
                Plog(@"outputUrl:%@",weakSelf.saveVideoUrl);
                [[NSFileManager defaultManager] removeItemAtURL:weakSelf.saveVideoUrl error:nil];
                if (weakSelf.lastBackgroundTaskIdentifier!= UIBackgroundTaskInvalid) {
                    [[UIApplication sharedApplication] endBackgroundTask:weakSelf.lastBackgroundTaskIdentifier];
                }
                if (error) {
                    Plog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);
                    [Utility showAllTextDialog:KAppDelegate.window Text:@"保存视频到相册发生错误"];
                } else {
                    if (weakSelf.takeBlock) {
                        weakSelf.takeBlock(assetURL);
                    }
                    Plog(@"成功保存视频到相簿.");
                    [weakSelf onCancelAction:nil];
                }
            }];
        } else {
            //照片
            UIImageWriteToSavedPhotosAlbum(self.takeImage, self, nil, nil);
            if (self.takeBlock) {
                self.takeBlock(self.takeImage);
            }
    
            [self onCancelAction:nil];
        }
    }
    
    //前后摄像头的切换
    - (IBAction)onCameraAction:(UIButton *)sender {
        Plog(@"切换摄像头");
        AVCaptureDevice *currentDevice=[self.captureDeviceInput device];
        AVCaptureDevicePosition currentPosition=[currentDevice position];
        [self removeNotificationFromCaptureDevice:currentDevice];
        AVCaptureDevice *toChangeDevice;
        AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;//前
        if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
            toChangePosition = AVCaptureDevicePositionBack;//后
        }
        toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];
        [self addNotificationToCaptureDevice:toChangeDevice];
        //获得要调整的设备输入对象
        AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];
        
        //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
        [self.session beginConfiguration];
        //移除原有输入对象
        [self.session removeInput:self.captureDeviceInput];
        //添加新的输入对象
        if ([self.session canAddInput:toChangeDeviceInput]) {
            [self.session addInput:toChangeDeviceInput];
            self.captureDeviceInput = toChangeDeviceInput;
        }
        //提交会话配置
        [self.session commitConfiguration];
    }
    
    - (void)onStartTranscribe:(NSURL *)fileURL {
        if ([self.captureMovieFileOutput isRecording]) {
            -- self.seconds;
            if (self.seconds > 0) {
                if (self.HSeconds - self.seconds >= TimeMax && !self.isVideo) {
                    self.isVideo = YES;//长按时间超过TimeMax 表示是视频录制
                    self.progressView.timeMax = self.seconds;
                }
                [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
            } else {
                if ([self.captureMovieFileOutput isRecording]) {
                    [self.captureMovieFileOutput stopRecording];
                }
            }
        }
    }
    
    
    #pragma mark - 视频输出代理
    -(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
        Plog(@"开始录制...");
        self.seconds = self.HSeconds;
        [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
    }
    
    
    -(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
        Plog(@"视频录制完成.");
        [self changeLayout];
        if (self.isVideo) {
            self.saveVideoUrl = outputFileURL;
            if (!self.player) {
                self.player = [[HAVPlayer alloc] initWithFrame:self.bgView.bounds withShowInView:self.bgView url:outputFileURL];
            } else {
                if (outputFileURL) {
                    self.player.videoUrl = outputFileURL;
                    self.player.hidden = NO;
                }
            }
        } else {
            //照片
            self.saveVideoUrl = nil;
            [self videoHandlePhoto:outputFileURL];
        }
        
    }
    
    - (void)videoHandlePhoto:(NSURL *)url {
        AVURLAsset *urlSet = [AVURLAsset assetWithURL:url];
        AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
        imageGenerator.appliesPreferredTrackTransform = YES;    // 截图的时候调整到正确的方向
        NSError *error = nil;
        CMTime time = CMTimeMake(0,30);//缩略图创建时间 CMTime是表示电影时间信息的结构体,第一个参数表示是视频第几秒,第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)
        CMTime actucalTime; //缩略图实际生成的时间
        CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];
        if (error) {
            Plog(@"截取视频图片失败:%@",error.localizedDescription);
        }
        CMTimeShow(actucalTime);
        UIImage *image = [UIImage imageWithCGImage:cgImage];
        
        CGImageRelease(cgImage);
        if (image) {
            Plog(@"视频截取成功");
        } else {
            Plog(@"视频截取失败");
        }
        
      
        self.takeImage = image;//[UIImage imageWithCGImage:cgImage];
        
        [[NSFileManager defaultManager] removeItemAtURL:url error:nil];
        
        if (!self.takeImageView) {
            self.takeImageView = [[UIImageView alloc] initWithFrame:self.view.frame];
            [self.bgView addSubview:self.takeImageView];
        }
        self.takeImageView.hidden = NO;
        self.takeImageView.image = self.takeImage;
    }
    
    #pragma mark - 通知
    
    //注册通知
    - (void)setupObservers
    {
        NSNotificationCenter *notification = [NSNotificationCenter defaultCenter];
        [notification addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationWillResignActiveNotification object:[UIApplication sharedApplication]];
    }
    
    //进入后台就退出视频录制
    - (void)applicationDidEnterBackground:(NSNotification *)notification {
        [self onCancelAction:nil];
    }
    
    /**
     *  给输入设备添加通知
     */
    -(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
        //注意添加区域改变捕获通知必须首先设置设备允许捕获
        [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
            captureDevice.subjectAreaChangeMonitoringEnabled=YES;
        }];
        NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
        //捕获区域发生改变
        [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
    }
    -(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{
        NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
        [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
    }
    /**
     *  移除所有通知
     */
    -(void)removeNotification{
        NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
        [notificationCenter removeObserver:self];
    }
    
    -(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{
        NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
        //会话出错
        [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
    }
    
    /**
     *  设备连接成功
     *
     *  @param notification 通知对象
     */
    -(void)deviceConnected:(NSNotification *)notification{
        NSLog(@"设备已连接...");
    }
    /**
     *  设备连接断开
     *
     *  @param notification 通知对象
     */
    -(void)deviceDisconnected:(NSNotification *)notification{
        NSLog(@"设备已断开.");
    }
    /**
     *  捕获区域改变
     *
     *  @param notification 通知对象
     */
    -(void)areaChange:(NSNotification *)notification{
        NSLog(@"捕获区域改变...");
    }
    
    /**
     *  会话出错
     *
     *  @param notification 通知对象
     */
    -(void)sessionRuntimeError:(NSNotification *)notification{
        NSLog(@"会话发生错误.");
    }
    
    
    
    /**
     *  取得指定位置的摄像头
     *
     *  @param position 摄像头位置
     *
     *  @return 摄像头设备
     */
    -(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
        NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        for (AVCaptureDevice *camera in cameras) {
            if ([camera position] == position) {
                return camera;
            }
        }
        return nil;
    }
    
    /**
     *  改变设备属性的统一操作方法
     *
     *  @param propertyChange 属性改变操作
     */
    -(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
        AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
        NSError *error;
        //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
        if ([captureDevice lockForConfiguration:&error]) {
            //自动白平衡
            if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
                [captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
            }
            //自动根据环境条件开启闪光灯
            if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
                [captureDevice setFlashMode:AVCaptureFlashModeAuto];
            }
            
            propertyChange(captureDevice);
            [captureDevice unlockForConfiguration];
        }else{
            NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription);
        }
    }
    
    /**
     *  设置闪光灯模式
     *
     *  @param flashMode 闪光灯模式
     */
    -(void)setFlashMode:(AVCaptureFlashMode )flashMode{
        [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
            if ([captureDevice isFlashModeSupported:flashMode]) {
                [captureDevice setFlashMode:flashMode];
            }
        }];
    }
    /**
     *  设置聚焦模式
     *
     *  @param focusMode 聚焦模式
     */
    -(void)setFocusMode:(AVCaptureFocusMode )focusMode{
        [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
            if ([captureDevice isFocusModeSupported:focusMode]) {
                [captureDevice setFocusMode:focusMode];
            }
        }];
    }
    /**
     *  设置曝光模式
     *
     *  @param exposureMode 曝光模式
     */
    -(void)setExposureMode:(AVCaptureExposureMode)exposureMode{
        [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
            if ([captureDevice isExposureModeSupported:exposureMode]) {
                [captureDevice setExposureMode:exposureMode];
            }
        }];
    }
    /**
     *  设置聚焦点
     *
     *  @param point 聚焦点
     */
    -(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
        [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    //        if ([captureDevice isFocusPointOfInterestSupported]) {
    //            [captureDevice setFocusPointOfInterest:point];
    //        }
    //        if ([captureDevice isExposurePointOfInterestSupported]) {
    //            [captureDevice setExposurePointOfInterest:point];
    //        }
            if ([captureDevice isExposureModeSupported:exposureMode]) {
                [captureDevice setExposureMode:exposureMode];
            }
            if ([captureDevice isFocusModeSupported:focusMode]) {
                [captureDevice setFocusMode:focusMode];
            }
        }];
    }
    
    /**
     *  添加点按手势,点按时聚焦
     */
    -(void)addGenstureRecognizer{
        UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
        [self.bgView addGestureRecognizer:tapGesture];
    }
    
    -(void)tapScreen:(UITapGestureRecognizer *)tapGesture{
        if ([self.session isRunning]) {
            CGPoint point= [tapGesture locationInView:self.bgView];
            //将UI坐标转化为摄像头坐标
            CGPoint cameraPoint= [self.previewLayer captureDevicePointOfInterestForPoint:point];
            [self setFocusCursorWithPoint:point];
            [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:cameraPoint];
        }
    }
    
    /**
     *  设置聚焦光标位置
     *
     *  @param point 光标位置
     */
    -(void)setFocusCursorWithPoint:(CGPoint)point{
        if (!self.isFocus) {
            self.isFocus = YES;
            self.focusCursor.center=point;
            self.focusCursor.transform = CGAffineTransformMakeScale(1.25, 1.25);
            self.focusCursor.alpha = 1.0;
            [UIView animateWithDuration:0.5 animations:^{
                self.focusCursor.transform = CGAffineTransformIdentity;
            } completion:^(BOOL finished) {
                [self performSelector:@selector(onHiddenFocusCurSorAction) withObject:nil afterDelay:0.5];
            }];
        }
    }
    
    - (void)onHiddenFocusCurSorAction {
        self.focusCursor.alpha=0;
        self.isFocus = NO;
    }
    
    //拍摄完成时调用
    - (void)changeLayout {
        self.imgRecord.hidden = YES;
        self.btnCamera.hidden = YES;
        self.btnAfresh.hidden = NO;
        self.btnEnsure.hidden = NO;
        self.btnBack.hidden = YES;
        if (self.isVideo) {
            [self.progressView clearProgress];
        }
        self.afreshCenterX.constant = -(SCREEN_WIDTH/2/2);
        self.ensureCenterX.constant = SCREEN_WIDTH/2/2;
        [UIView animateWithDuration:0.25 animations:^{
            [self.view layoutIfNeeded];
        }];
        
        self.lastBackgroundTaskIdentifier = self.backgroundTaskIdentifier;
        self.backgroundTaskIdentifier = UIBackgroundTaskInvalid;
        [self.session stopRunning];
    }
    
    
    //重新拍摄时调用
    - (void)recoverLayout {
        if (self.isVideo) {
            self.isVideo = NO;
            [self.player stopPlayer];
            self.player.hidden = YES;
        }
        [self.session startRunning];
    
        if (!self.takeImageView.hidden) {
            self.takeImageView.hidden = YES;
        }
    //    self.saveVideoUrl = nil;
        self.afreshCenterX.constant = 0;
        self.ensureCenterX.constant = 0;
        self.imgRecord.hidden = NO;
        self.btnCamera.hidden = NO;
        self.btnAfresh.hidden = YES;
        self.btnEnsure.hidden = YES;
        self.btnBack.hidden = NO;
        [UIView animateWithDuration:0.25 animations:^{
            [self.view layoutIfNeeded];
        }];
    }
    
    /*
    #pragma mark - Navigation
    
    // In a storyboard-based application, you will often want to do a little preparation before navigation
    - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
        // Get the new view controller using [segue destinationViewController].
        // Pass the selected object to the new view controller.
    }
    */
    
    @end
    

    使用也挺简单:

    
    - (IBAction)onCameraAction:(UIButton *)sender {
        //额 。。由于是demo,所以用的xib,大家根据需求自己更改,该demo只是提供一个思路,使用时不要直接拖入项目
        HVideoViewController *ctrl = [[NSBundle mainBundle] loadNibNamed:@"HVideoViewController" owner:nil options:nil].lastObject;
        ctrl.HSeconds = 30;//设置可录制最长时间
        ctrl.takeBlock = ^(id item) {
            if ([item isKindOfClass:[NSURL class]]) {
                NSURL *videoURL = item;
                //视频url
                
            } else {
                //图片
                
            }
        };
        [self presentViewController:ctrl animated:YES completion:nil];
    }
    

    demo地址也给出来吧:不喜勿碰-_-
    https://github.com/hkjin/KJCamera

    自此就结束啦,写的比较简单,希望能帮助到大家,谢谢!

    x效果图也帖出来吧:

    KJCamera.gif

    相关文章

      网友评论

        本文标题:iOS开发-自定义相机(仿微信)

        本文链接:https://www.haomeiwen.com/subject/qgjenttx.html