美文网首页
GPUImage源码分析与使用(三)

GPUImage源码分析与使用(三)

作者: 紫水依 | 来源:发表于2020-02-12 16:07 被阅读0次

    使用GPUImage拍照添加滤镜

    - (void)viewDidLoad {
        [super viewDidLoad];
        // Do any additional setup after loading the view from its nib.
        
        //拍照添加滤镜
        //1.添加按钮拍照
        UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
        button.frame = CGRectMake((self.view.bounds.size.width-80)*0.5, self.view.bounds.size.height-120, 80, 40);
        button.backgroundColor = [UIColor blueColor];
        [button setTitle:@"拍照" forState:UIControlStateNormal];
        [button addTarget:self action:@selector(takePhoto) forControlEvents:UIControlEventTouchUpInside];
        [self.view addSubview:button];
        [self.view bringSubviewToFront:button];
        
        //2.照相机添加滤镜
        [self addFilterCamera];
    }
    
    - (void)addFilterCamera {
        //1.获取照相机,AVCaptureSessionPreset640x480照片尺寸,AVCaptureDevicePositionFront前置摄像头
        _sCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront];
        
        //2.切换摄像头
        [_sCamera rotateCamera];
        
        //3.竖屏
        _sCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
        
        //4.初始化滤镜---灰度滤镜
        _grayFilter = [[GPUImageGrayscaleFilter alloc] init];
        [_sCamera addTarget:_grayFilter];
        
        //5.显示在GPUImageView
        _gpuImageView = [[GPUImageView alloc] initWithFrame:self.jingImageView.frame];
        [_grayFilter addTarget:_gpuImageView];
        
        [self.view addSubview:_gpuImageView];
        
        //6.开始拍照
        [_sCamera startCameraCapture];
        
        
    }
    
    - (void)takePhoto {
        //7.拍照后处理照片,添加相册
        [_sCamera capturePhotoAsJPEGProcessedUpToFilter:_grayFilter withCompletionHandler:^(NSData *processedJPEG, NSError *error) {
            
            [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                [[PHAssetCreationRequest creationRequestForAsset] addResourceWithType:PHAssetResourceTypePhoto data:processedJPEG options:nil];
            } completionHandler:^(BOOL success, NSError * _Nullable error) {
                
            }];
            
            //获取拍摄的图片预览
            UIImage *newImage = [UIImage imageWithData:processedJPEG];
            
            [self.view bringSubviewToFront:self.jingImageView];
            self.jingImageView.image = newImage;
        }];
        
    }
    

    使用GPUImage拍视频添加滤镜

    VideoManager.h

    @protocol videoDelegate <NSObject>
    
    - (void)didStartRecordVideo;
    
    - (void)didCompressingVideo;
    
    - (void)didEndRecordVideoWithTime:(CGFloat)totalTime outputFile:(NSString *)filePath;
    
    @end
    
    @interface VideoManager : NSObject
    
    @property (nonatomic, assign) CGRect frame;
    @property (nonatomic, weak) id<videoDelegate> delegate;
    
    - (void)showWithFrame:(CGRect *)rect superView:(UIView *)view;
    
    - (void)startRecording;
    - (void)stopRecording;
    
    @end
    

    VideoManager.m

    //视频录制单例
    + (instancetype)manager {
        static dispatch_once_t onceToken;
        dispatch_once(&onceToken, ^{
            _manager = [[VideoManager alloc] init];
        });
        return _manager;
    }
    
    //开始录制
    - (void)startRecording {
        //录制路径
        NSString *defaultPath = [self getVideoPathCache];
        self.moviePath = [defaultPath stringByAppendingPathComponent:[self getVideoNameWithType:@"mp4"]];
        self.movieUrl = [NSURL fileURLWithPath:self.moviePath];
        
        self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:self.movieUrl size:CGSizeMake(480, 640)];
        self.movieWriter.encodingLiveVideo = YES;
        self.movieWriter.shouldPassthroughAudio = YES;
        
        [self.saturationFilter addTarget:self.movieWriter];
        
        self.videoCamera.audioEncodingTarget = self.movieWriter;
        
        //开始录制
        [self.movieWriter startRecording];
        
        if (self.delegate && [self.delegate respondsToSelector:@selector(didStartRecordVideo)]) {
            [self.delegate didStartRecordVideo];
        }
        [self.timer setFireDate:[NSDate distantPast]];
        [self.timer fire];
    }
    
    //结束录制
    - (void)stopRecording {
        if (self.videoCamera) {
            
            [self.timer invalidate];
            self.timer = nil;
            
            [self.movieWriter finishRecording];
            
            //移除滤镜
            [self.saturationFilter removeTarget:self.movieWriter];
            
            self.videoCamera.audioEncodingTarget = nil;
            
            if (self.recordSecond > self.maxTime) {
                //可以清除录制的内容
            }else {
                //压缩视频
                if (self.delegate && [self.delegate respondsToSelector:@selector(didCompressingVideo)]) {
                    [self.delegate didCompressingVideo]; //提示正在压缩
                }
                
                //开始压缩
                __weak typeof(self) weakSelf = self;
                [self compressVideoWithUrl:self.movieUrl compressionType:AVAssetExportPresetMediumQuality filePath:^(NSString *resultPath, float memorySize, NSString *videoImagePath, int seconds){
                    
                    NSData *data = [NSData dataWithContentsOfFile:resultPath];
                    CGFloat totalTime = (CGFloat)data.length/1024/1024;
                    
                    if (weakSelf.delegate && [weakSelf.delegate respondsToSelector:@selector(didEndRecordVideoWithTime:outputFile:)]) {
                        [weakSelf.delegate didEndRecordVideoWithTime:totalTime outputFile:resultPath];
                    }
                }];
            }
        }
    }
    
    - (void)compressVideoWithUrl:(NSURL *)url compressionType:(NSString *)type filePath:(void(^)(NSString *resultPath, float memorySize, NSString *videoImagePath, int seconds))resultBlock {
        
        NSString *resultPath;
        
        // 视频压缩前大小
        NSData *data = [NSData dataWithContentsOfURL:url];
        CGFloat totalSize = (float)data.length / 1024 / 1024;
        NSLog(@"压缩前大小:%.2fM",totalSize);
        AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
        
        CMTime time = [avAsset duration];
        
        // 视频时长
        int seconds = ceil(time.value / time.timescale);
        
        NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
        if ([compatiblePresets containsObject:type]) {
            
            // 中等质量
            AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];
            
            // 用时间给文件命名 防止存储被覆盖
            NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
            [formatter setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
            
            // 若压缩路径不存在重新创建
            NSFileManager *manager = [NSFileManager defaultManager];
            BOOL isExist = [manager fileExistsAtPath:COMPRESSEDVIDEOPATH];
            if (!isExist) {
                [manager createDirectoryAtPath:COMPRESSEDVIDEOPATH withIntermediateDirectories:YES attributes:nil error:nil];
            }
            resultPath = [COMPRESSEDVIDEOPATH stringByAppendingPathComponent:[NSString stringWithFormat:@"user%outputVideo-%@.mp4",arc4random_uniform(10000),[formatter stringFromDate:[NSDate date]]]];
            
            session.outputURL = [NSURL fileURLWithPath:resultPath];
            session.outputFileType = AVFileTypeMPEG4;
            session.shouldOptimizeForNetworkUse = YES;
            [session exportAsynchronouslyWithCompletionHandler:^{
                
                switch (session.status) {
                    case AVAssetExportSessionStatusUnknown:
                        break;
                    case AVAssetExportSessionStatusWaiting:
                        break;
                    case AVAssetExportSessionStatusExporting:
                        break;
                    case AVAssetExportSessionStatusCancelled:
                        break;
                    case AVAssetExportSessionStatusFailed:
                        break;
                    case AVAssetExportSessionStatusCompleted:{
                        
                        NSData *data = [NSData dataWithContentsOfFile:resultPath];
                        // 压缩过后的大小
                        float compressedSize = (float)data.length / 1024 / 1024;
                        resultBlock(resultPath,compressedSize,@"",seconds);
                        NSLog(@"压缩后大小:%.2f",compressedSize);
                    }
                    default:
                        break;
                }
            }];
        }
    }
    
    - (void)showWithFrame:(CGRect)frame superView:(UIView *)superView {
        _frame = frame;
        [self.saturationFilter addTarget:self.displayView];
        [self.videoCamera addTarget:self.saturationFilter];
        
        [superView addSubview:self.displayView];
        [self.videoCamera startCameraCapture];
    }
    
    #pragma mark - 摄像头输出
    - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer {
        //返回已经添加滤镜的sampleBuffer
    }
    

    VideoViewController.m

    - (void)setupVideoManager {
        _manager = [[VideoManager alloc] init];
        _manager.delegate = self;
        [_manager showWithFrame:CGRectMake(20, 120, LMW, LMH) superView:self.view];
        
    }
    
    - (void)shootVideo {
        [_manager startRecording];
    }
    
    - (void)stopVideo {
        [_manager stopRecording];
    }
    
    - (void)playVideo {
        _player = [[AVPlayerViewController alloc] init];
        _player.player = [[AVPlayer alloc] initWithURL:[NSURL fileURLWithPath:_filePath]];
        _player.videoGravity = AVLayerVideoGravityResizeAspect;
        [self presentViewController:_player animated:NO completion:nil];
    }
    
    - (void)didStartRecordVideo {
        NSLog(@"开始录制");
    }
    
    - (void)didCompressingVideo {
        NSLog(@"视频压缩中");
    }
    
    - (void)didEndRecordVideoWithTime:(CGFloat)totalTime outputFile:(NSString *)filePath {
        NSLog(@"录制完毕---%f秒,%@", totalTime, filePath);
        _filePath = filePath;
    }
    

    相关文章

      网友评论

          本文标题:GPUImage源码分析与使用(三)

          本文链接:https://www.haomeiwen.com/subject/tldcfhtx.html