简介
本文章不讲解美颜功能和更多滤镜功能,只讲解我真实项目需求需要的功能封装。功能用OC写的,主要是公司太多项目了,OC兼容各种版本,兼容性强。
第一步导入库文件,可以使用pod 导入第三方GPUImage即可。
第二步那就是搭建布局了,这里省略.........(截个图吧,xib的,参考就好了,别太认真,哈哈哈哈)
第三步,开始我们正题吧,先说思路。
思路很简单,先打开摄像头,打开麦克风,显示摄像头内容在界面上,在界面内容里面添加水印显示,点击按钮进行录制,录制完成后,进行压缩转码(转MP4,这是需求=,=),获取第一帧显示在查看页面里面,然后播放内容。
代码走起。。
先简单介绍一下基本用法。。
/// 创建界面视图 用来将摄像头显示到界面上的
GPUImageView *cameraView = [[GPUImageView alloc] init];
/// 创建相机管理器
GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
///添加加载输入输出流(用来解决 解决获取视频第一帧图片黑屏问题,不加也行,哈哈)
[videoCamera addAudioInputsAndOutputs];
/// 创建渲染滤镜
GPUImageFilter *filter = [[GPUImageFilter alloc] init];
/// 添加滤镜到管理器
[videoCamera addTarget:filter];
/// 将滤镜渲染添加到 视图界面上显示
[filter addTarget:cameraView];
/// 开始捕捉摄像头
[videoCamera startCameraCapture];
录制
/// 主要录制是边录边保存的 所以要本地路径,用沙盒目录就好了。
NSURL * fileUrl = ......;//这里就不写具体路径了。
/// 添加输出流
GPUImageMovieWriter *movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:fileUrl size:CGSizeMake(480, 640)];
movieWriter.encodingLiveVideo =YES;
movieWriter.shouldPassthroughAudio =YES;
/// 绑定音频编码目标
videoCamera.audioEncodingTarget = movieWriter;
/// 重点 将屏幕显示滤镜添加到 输出流
/// 到时候如果添加了水印这里的滤镜就是 结合屏幕水印的滤镜
[filter addTarget:movieWriter];
/// 开始录制
[movieWriter startRecording];
完成录制
[movieWriter finishRecordingWithCompletionHandler:^{
///这里是在异步线程回调
dispatch_async(dispatch_get_main_queue(), ^{
/// 完成录制回调 然后在这里可以做转码 压缩等等事情
});
}];
开始我们封装大业了
先说配置文件吧。
VideoCameraConfig
#import <UIKit/UIKit.h>
#define kScreenBounds ([UIScreen mainScreen].bounds)
#define kScreenWidth (kScreenBounds.size.width)
#define kScreenHeight (kScreenBounds.size.height)
#define IS_iPhoneX [[UIScreen mainScreen] bounds].size.width >=375.0f && [[UIScreen mainScreen] bounds].size.height >=812.0f
#define kSessionPresetWidth kScreenWidth
//闪光灯模式
typedef NS_ENUM(NSInteger, CameraCaptureMode) {
/// 关闭
CameraCaptureModeOff,
/// 拍照闪光灯
CameraCaptureModeOn,
/// 闪光灯自动
CameraCaptureModeAuto,
/// 打开灯
CameraCaptureModeTorch
};
/** 长宽比示例 */
typedef NS_ENUM(NSInteger, CameraConfigLen2wid) {
CameraConfigLen2wid_default = 0, //default
CameraConfigLen2wid_4_3 = 1, //"4 :3"
CameraConfigLen2wid_16_9 = 2 //'16 :9'
};
/// 相机配置配置
@interface VideoCameraConfig : NSObject
/** 长宽比示例: "4 :3",'16 9' */
@property (assign, nonatomic) CameraConfigLen2wid len2wid;
/** 照片带时间戳字段->true,"->false */
@property (assign, nonatomic, getter=isTimestamp) BOOL timestamp;
/** 摄像头 ->允许切到前置换摄像头,->不允许切换到前置摄像头*/
@property (assign, nonatomic, getter=isCamera) BOOL camera;
/** 平铺水印 ->true,"->false */
@property (assign, nonatomic, getter=isWatermark) BOOL watermark;
/** 平铺水印文案 */
@property (copy, nonatomic) NSString *watermarkFormat;
// ---------- 其它 ----------
/** 水印字体大小 */
@property (assign, nonatomic) CGFloat watermarkSize;
/// 是否录制 以后可能有拍照 留着
@property (assign, nonatomic) BOOL isRecording;
@end
/// 模型 不解释了
@interface VideoModel : NSObject
/// 预览图
@property (strong, nonatomic) UIImage *photo;
///相机预览视图高度
@property (assign, nonatomic) CGFloat drawViewHeight;
///视频url
@property (strong, nonatomic) NSURL *videoURL;
///视频时长(秒)
@property (assign, nonatomic) int videoTime;
///视频大小
@property (assign, nonatomic) NSInteger fileSize;
+ (instancetype)videoModelWithVideoURL:(NSURL *)url;
@end
GPUVideoCamera
#import <UIKit/UIKit.h>
@class VideoCameraConfig;
@class VideoModel;
//确定回调
typedef void (^SaveAllVideoBlock)(VideoModel *video);
@interface GPUVideoCamera : UIViewController
/// 视频最大时长
@property (assign, nonatomic) NSInteger videoMaxSecond;
/** 保存图片回调 */
- (void)fetchSaveAllVideoWithCallBack:(SaveAllVideoBlock)callBack;
/// 跳转创建控件
+ (instancetype)videoCamera:(VideoCameraConfig *)config
show:(UIViewController *)viewContoller;
@end
#import "GPUVideoCamera.h"
#import "GPUImage.h"
#import "VideoModel.h"
#import "VideoCameraConfig.h"
#import <AVFoundation/AVFoundation.h>
@interface GPUVideoCamera () <GPUImageMovieWriterDelegate>
/// 摄像头管理器
@property (strong, nonatomic) GPUImageVideoCamera * videoCamera;
/// 默认显示摄像头输入滤镜流
@property (strong, nonatomic) GPUImageFilter *filter;
/// 录制入口
@property (strong, nonatomic) GPUImageMovieWriter *movieWriter;
/// 水印
@property (strong, nonatomic) GPUImageUIElement *dissolveElement;
/// 与水印合并的界面滤镜 主要用来显示
@property (strong, nonatomic) GPUImageAlphaBlendFilter *dissolveFilter;
//是否在对焦
@property (assign, nonatomic) BOOL isFocus;
/// 是否是前置摄像头
@property (assign, nonatomic) BOOL isDevicePositionFront;
/// 闪光灯控制
@property (assign, nonatomic) CameraCaptureMode captureMode;
/// 相机配置
@property (strong, nonatomic) VideoCameraConfig *config;
/// 时间水印
@property (weak, nonatomic) CALayer *timeLayer;
/// 水印界面
@property (strong, nonatomic) UIView *watermarkView;
///////////////////// 内部控件 /////////////////////
/// 闪光灯按钮
@property (weak, nonatomic) IBOutlet UIButton *flashButton;
/// 切换前后镜头按钮
@property (weak, nonatomic) IBOutlet UIButton *toggleButton;
/// 相机预览视图
@property (weak, nonatomic) IBOutlet GPUImageView *cameraView;
/// 聚焦光标
@property (weak, nonatomic) IBOutlet UIImageView *focusCursor;
/// 相册按钮
@property (weak, nonatomic) IBOutlet UIButton *photoLibButton;
/// 录制按钮
@property (weak, nonatomic) IBOutlet UIButton *recordingButton;
/// 确定按钮
@property (weak, nonatomic) IBOutlet UIButton *saveButton;
/// 面板高度
@property (weak, nonatomic) IBOutlet NSLayoutConstraint *panelViewHeight;
/// 加载视频制作view
@property (weak, nonatomic) IBOutlet UIView *loadCDView;
/// 录像计时
@property (weak, nonatomic) IBOutlet UILabel *timeLab;
/// 录像计时器
@property (strong, nonatomic) NSTimer *recordTimer;
/// 录像计时 秒
@property (assign, nonatomic) NSUInteger recordSecond;
/// 保存回调
@property (copy, nonatomic) SaveAllVideoBlock callSaveAllVideoBlock;
@end
@implementation GPUVideoCamera
+ (instancetype)videoCamera:(VideoCameraConfig *)config
show:(UIViewController *)viewContoller {
///这里为啥这么写,主要我这边是要封装成私有库,如果不这么写读取不了xib
NSBundle *xibBundle = [NSBundle bundleForClass:self];
NSString *name = NSStringFromClass([self class]);
GPUVideoCamera *vc = [[self alloc] initWithNibName:name bundle:xibBundle];
vc.config = config;
UINavigationController *nav = [[UINavigationController alloc] initWithRootViewController:vc];
nav.navigationBar.hidden = YES;
[viewContoller presentViewController:nav animated:YES completion:nil];
return vc;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
[self setupUI];
[self loadCamera];
}
- (void)dealloc {
NSLog(@"EvergrandeCamera - dealloc");
[self.videoCamera stopCameraCapture];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
//判断灯是否常开
if ([self.videoCamera.inputCamera lockForConfiguration:nil] &&
self.captureMode == CameraCaptureModeTorch) {
[self.videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
[self.videoCamera.inputCamera unlockForConfiguration];
}
//启动
if (!self.videoCamera.captureSession.isRunning) {
[self.videoCamera stopCameraCapture];
}
self.navigationController.interactivePopGestureRecognizer.enabled = NO;
[[UIApplication sharedApplication] setStatusBarHidden:YES];
}
- (void)viewDidDisappear:(BOOL)animated {
[super viewDidDisappear:animated];
//停止
if (!self.videoCamera.captureSession.isRunning) {
[self.videoCamera stopCameraCapture];
}
}
/// 视频分辨率
- (CGSize) getVideoSize {
if (self.config.len2wid == CameraConfigLen2wid_16_9) {
return CGSizeMake(720, 1280);
}
return CGSizeMake(480, 640);
}
/// 滤镜
- (GPUImageFilter *)filter {
if (!_filter) {
_filter = [[GPUImageFilter alloc] init]; /// 这里可以做扩展 做出美颜滤镜呀 复古滤镜呀 等等滤镜 我这边需求用不上就没写了哈哈哈
}
return _filter;
}
// 管理器
- (GPUImageVideoCamera *)videoCamera {
if (!_videoCamera) {
AVCaptureSessionPreset preset = AVCaptureSessionPreset1280x720;
if (self.config.len2wid == CameraConfigLen2wid_4_3) {
preset = AVCaptureSessionPreset640x480;
}
_videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:preset cameraPosition:AVCaptureDevicePositionBack];
_videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
/// 设置前置摄像头反转问题
_videoCamera.horizontallyMirrorFrontFacingCamera = YES;
/// 解决获取视频第一帧图片黑屏,
[_videoCamera addAudioInputsAndOutputs];
}
return _videoCamera;
}
/** 设置UI */
- (void)setupUI {
//聚焦光标
self.focusCursor.bounds = CGRectMake(0, 0, 60, 60);
self.focusCursor.hidden = YES;
/// 一些其他布局设置 这里就不占用行数了
///.......
}
/// 加载摄像头
- (void)loadCamera {
/// 水印视图
UIView * contentView = [[UIView alloc] initWithFrame:(CGRect){{0,0},self.cameraView.bounds.size}];
/* ============里面可以不用学我这么写================== */
// 添加水印
if (self.config.isWatermark) {
CALayer *watermarkFormatLayer = [Tools watermarkFormatLayer:self.config.watermarkFormat size:[self getVideoSize] fontSize:30];
if (watermarkFormatLayer != nil) {
[contentView.layer addSublayer:watermarkFormatLayer];
}
}
// 时间戳水印
if (self.config.isTimestamp) {
CALayer *timestampLayer = [Tools timeLayerSize:self.cameraView.frame.size fontSize:15];
if (timestampLayer != nil) {
[contentView.layer addSublayer:timestampLayer];
self.timeLayer = timestampLayer;
}
}
/* ================================================ */
self.watermarkView = contentView;
//水印设置
GPUImageUIElement *uiElement = [[GPUImageUIElement alloc] initWithView:contentView];
self.dissolveElement = uiElement;
// [self.filter addTarget:self.cameraView];
/// 水印滤镜
GPUImageAlphaBlendFilter *dissolveFilter = [[GPUImageAlphaBlendFilter alloc] init];
dissolveFilter.mix = 1.0;
self.dissolveFilter = dissolveFilter;
/// 按顺序渲染添加
/// 摄像头添加到主滤镜
[self.videoCamera addTarget:self.filter];
/// 主滤镜渲染到 水印滤镜
[self.filter addTarget:dissolveFilter];
/// 水印 渲染到 水印滤镜
[self.dissolveElement addTarget:dissolveFilter];
/// 然后最关键一步 水印滤镜渲染显示到 屏幕视图上面
[dissolveFilter addTarget:self.cameraView];
/// 更新到滤镜上 每次更新水印需要刷新
__unsafe_unretained GPUImageUIElement *weakOverlay = self.dissolveElement;
[self.filter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
[weakOverlay update];
}];
[self.videoCamera startCameraCapture];
}
/// 文件名
- (NSString *)getFileName {
NSDateFormatter *format = [[NSDateFormatter alloc] init];
format.dateFormat = @"yyyyMMddHHmmss";
NSString * dateStr = [format stringFromDate: [NSDate date]];
NSString *fileName = [NSString stringWithFormat:@"Video_%@_%03d.mov",dateStr,arc4random_uniform(999)];
return fileName;
}
- (BOOL)deleteFilePath:(NSString*)path {
return [[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
- (BOOL)createFileDirFilePath:(NSString *)filePath {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *dirPath = [filePath stringByDeletingLastPathComponent];
if (![fileManager fileExistsAtPath:dirPath]) { //文件夹不存在
//创建文件夹
return [fileManager createDirectoryAtPath:dirPath withIntermediateDirectories:YES attributes:nil error:nil];
}
return NO;
}
#pragma mark - action
/** 闪光灯控制 */
- (IBAction)flashOnAction:(UIButton *)sender {
if ([self.videoCamera.inputCamera lockForConfiguration:nil]) {
//判断灯是否常开
if (self.videoCamera.inputCamera.torchMode == AVCaptureTorchModeOn) {
[self.videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOff];
}
switch (self.captureMode) {
case CameraCaptureModeOff:
{
if (self.config.isRecording) {
self.captureMode = CameraCaptureModeTorch;
//闪光灯开
if ([self.videoCamera.inputCamera isFlashModeSupported:AVCaptureFlashModeOn]) {
[self.videoCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
}
//灯常开
if ([self.videoCamera.inputCamera isTorchModeSupported:AVCaptureTorchModeOn]) {
[self.videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
}
} else {
self.captureMode = CameraCaptureModeOn;
//闪光灯开
if ([self.videoCamera.inputCamera isFlashModeSupported:AVCaptureFlashModeOn]) {
[self.videoCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
}
}
break;
}
case CameraCaptureModeOn:
{
self.captureMode = CameraCaptureModeAuto;
//闪光自动
if ([self.videoCamera.inputCamera isFlashModeSupported:AVCaptureFlashModeAuto]) {
[self.videoCamera.inputCamera setFlashMode:AVCaptureFlashModeAuto];
}
break;
}
case CameraCaptureModeAuto:
{
self.captureMode = CameraCaptureModeTorch;
//闪光灯开
if ([self.videoCamera.inputCamera isFlashModeSupported:AVCaptureFlashModeOn]) {
[self.videoCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
}
//灯常开
if ([self.videoCamera.inputCamera isTorchModeSupported:CaptureTorchModeOn]) {
[self.videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
}
break;
}
case CameraCaptureModeTorch:
{
self.captureMode = CameraCaptureModeOff;
//闪光关闭
if ([self.videoCamera.inputCamera isFlashModeSupported:AVCaptureFlashModeOff]) {
[self.videoCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
}
break;
}
default:
break;
}
[self.videoCamera.inputCamera unlockForConfiguration];
}
}
/// 切换前/后置摄像头
- (IBAction)changeCameraAction:(UIButton *)sender {
[self.videoCamera rotateCamera];
}
/// 对焦手势
- (IBAction)focusGesture:(UITapGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:gesture.view];
[self focusAtPoint:point];
}
// 设置聚焦光标位置
- (void)focusAtPoint:(CGPoint)point {
CGSize size = self.view.bounds.size;
point.y += self.cameraView.frame.origin.y;
CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width );
if([self.videoCamera.inputCamera isExposurePointOfInterestSupported] && [self.videoCamera.inputCamera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])
{
NSError *error;
if ([self.videoCamera.inputCamera lockForConfiguration:&error]) {
[self.videoCamera.inputCamera setExposurePointOfInterest:focusPoint];
[self.videoCamera.inputCamera setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
if ([self.videoCamera.inputCamera isFocusPointOfInterestSupported] && [self.videoCamera.inputCamera isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
[self.videoCamera.inputCamera setFocusPointOfInterest:focusPoint];
[self.videoCamera.inputCamera setFocusMode:AVCaptureFocusModeAutoFocus];
}
[self.videoCamera.inputCamera unlockForConfiguration];
} else {
NSLog(@"ERROR = %@", error);
}
}
self.focusCursor.center = point;
self.focusCursor.hidden = NO;
[UIView animateWithDuration:0.3 animations:^{
self.focusCursor.transform = CGAffineTransformMakeScale(1.25, 1.25);
}completion:^(BOOL finished) {
[UIView animateWithDuration:0.5 animations:^{
self.focusCursor.transform = CGAffineTransformIdentity;
} completion:^(BOOL finished) {
self.focusCursor.hidden = YES;
}];
}];
}
/// 点击录制
- (IBAction)clickVideoButton:(UIButton *)sender{
//根据设备输出获得连接
// AVCaptureConnection *captureConnection=[self.captureMovieFileOutPut connectionWithMediaType:AVMediaTypeVideo];
//关闭计时器
if (self.recordTimer) {
[self.recordTimer invalidate];
self.recordTimer = nil;
}
//根据连接取得设备输出的数据
//添加录制输入接口
if (!self.movieWriter) {
/// 录制开始
/* ============里面可以不用学我这么写================== */
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
formatter.dateFormat = @"yyyy-MM-dd HH:mm:ss";
NSString *text = [formatter stringFromDate:[NSDate new]];
[self.timeLayer removeFromSuperlayer];
CALayer * timeLayer = [Tools timeLayerSize:self.cameraView.frame.size fontSize:15];
[self.watermarkView.layer addSublayer:timeLayer];
self.timeLayer = timeLayer;
///更新水印
__unsafe_unretained GPUImageUIElement *weakOverlay = self.dissolveElement;
[self.filter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
[weakOverlay update];
}];
/* ================================================= */
self.recordingButton.selected = YES;
NSString *outputFielPath=[ThumbnailPath stringByAppendingPathComponent:[self getFileName]];
[self createFileDirFilePath:outputFielPath];
NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:fileUrl size:[self getVideoSize]];
self.movieWriter.encodingLiveVideo =YES;
self.movieWriter.shouldPassthroughAudio =YES;
self.videoCamera.audioEncodingTarget = self.movieWriter;
[self.dissolveFilter addTarget:self.movieWriter];
[self.movieWriter startRecording];
self.recordTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(recordTimer:) userInfo:nil repeats:YES];
} else {
/// 录制结束
self.recordingButton.selected = NO;
__weak typeof(self) weakSelf = self;
[self.movieWriter finishRecordingWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf movieRecordingCompleted];
});
}];
}
}
/// 计时
- (void)recordTimer:(NSTimer *)timer {
self.recordSecond ++;
//时
NSUInteger hour = self.recordSecond / 3600;
//分
NSUInteger min = (self.recordSecond%3600)/60;
//秒
NSUInteger scd = (self.recordSecond%60);
self.timeLab.text = [NSString stringWithFormat:@"%02lu:%02lu:%02lu",(unsigned long)hour,(unsigned long)min,(unsigned long)scd];
/// 最大录制时间
if (self.videoMaxSecond > 0 && self.recordSecond >= self.videoMaxSecond) {
[self clickVideoButton:nil];
}
}
#pragma mark- GPUImageMoive
// 完成
- (void)movieRecordingCompleted {
NSURL * outputURL = self.movieWriter.assetWriter.outputURL;
/// 销毁释放
[self.dissolveFilter removeTarget:self.movieWriter];
self.videoCamera.audioEncodingTarget = nil;
NSLog(@"录制成功");
//视频验证
if (self.recordSecond < 1) {
self.loadCDView.hidden = YES;
UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频制作失败,请录制大于1秒的视频。" preferredStyle:UIAlertControllerStyleAlert];
[alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleCancel handler:nil]];
[self presentViewController:alert animated:YES completion:nil];
self.recordSecond = 0;
self.timeLab.text = @"00:00:00";
return ;
}
self.recordSecond = 0;
self.timeLab.text = @"00:00:00";
self.loadCDView.hidden = NO;
/// 转换压缩 转换mp4
[outputURL toMp4:^(BOOL success, NSURL *mp4) {
dispatch_async(dispatch_get_main_queue(), ^{
self.loadCDView.hidden = YES;
if (success) {
// NSLog(@"mp4path-%@",mp4);
//删除多余缓存文件
[self deleteFilePath:outputURL.resourceSpecifier];
VideoModel *videoModel = [VideoModel videoModelWithVideoURL:mp4];
/// 获取视频第一帧
videoModel.photo = [mp4 getScreenShotImage];
/// 录制好可以跳转下一页面显示 这里就不写了,这里就写直接回调然后退出页面吧
///..................
self.callSaveAllVideoBlock ? self.callSaveAllVideoBlock(videoModel) : nil;
[[UIApplication sharedApplication] setStatusBarHidden:NO];
[self dismissViewControllerAnimated:YES completion:nil];
} else {
UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频制作失败" preferredStyle:UIAlertControllerStyleAlert];
[alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleCancel handler:nil]];
[self presentViewController:alert animated:YES completion:nil];
}
});
}];
self.movieWriter = nil;
}
@end
///文件大小
extern NSString *const FileSize;
///视频时长
extern NSString *const VideoDuration;
@interface NSURL (Video)
- (UIImage *)getScreenShotImage;
- (void)toMp4:(void(^)(BOOL,NSURL*))block;
- (NSDictionary <NSString *, NSNumber *>*)getVideoInfo;
- (NSInteger)fileSize;
@end
NSString *const FileSize = @"size";
///视频时长
NSString *const VideoDuration = @"duration";
/// 以下代码都是网上找的,然后根据自己的情形改的
@implementation NSURL (Video)
- (UIImage *)getScreenShotImage {
UIImage *shotImage;
//视频路径URL
NSURL *fileURL = self;
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:nil];
AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
gen.appliesPreferredTrackTransform = YES;
CMTime time = CMTimeMakeWithSeconds(0.0, 600);
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
shotImage = [[[UIImage alloc] initWithCGImage:image] flipHorizontal];
CGImageRelease(image);
return shotImage;
}
- (void)toMp4:(void (^)(BOOL,NSURL *))block {
//获取后缀
NSString * pathExtension = [self.absoluteString pathExtension];
if ([pathExtension isEqualToString:@"mp4"]) {
block(YES,self);
return;
}
NSString *lastPath = [[self.absoluteString lastPathComponent] stringByDeletingPathExtension];
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:self options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
NSLog(@"%@",compatiblePresets);
if ([compatiblePresets containsObject:AVAssetExportPresetHighestQuality]) {
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];
NSString * resultPath = [ThumbnailPath stringByAppendingPathComponent:[NSString stringWithFormat: @"%@.mp4", lastPath]];
/// 创建路径
[Tools createFileDirFilePath:resultPath];
NSLog(@"resultPath = %@",resultPath);
exportSession.outputURL = [NSURL fileURLWithPath:resultPath];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exportSession.status) {
case AVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown");
block(NO,nil);
break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting");
block(NO,nil);
break;
case AVAssetExportSessionStatusCompleted:
NSLog(@"AVAssetExportSessionStatusCompleted");
block(YES,exportSession.outputURL);
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"AVAssetExportSessionStatusFailed");
block(NO,nil);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"AVAssetExportSessionStatusCancelled");
block(NO,nil);
break;
}
}];
} else {
block(NO,nil);
}
}
- (NSDictionary *)getVideoInfo {
AVURLAsset * asset = [AVURLAsset assetWithURL:self];
CMTime time = [asset duration];
int seconds = ceil(time.value/time.timescale);
NSInteger fileSize = [self fileSize];
return @{FileSize : @(fileSize),
VideoDuration : @(seconds)};
}
- (NSInteger)fileSize {
NSInteger fileSize = [[NSFileManager defaultManager] attributesOfItemAtPath:self.resourceSpecifier error:nil].fileSize;
return fileSize;
}
@end
上面都是重要的逻辑 就这样吧。
还有一些复杂的布局我这边都过滤掉了,上面都是比较常见的操作及布局,根据自己的需求然后自定义吧。
大体封装就这样子,demo的话目前没有上传,以后再说哈哈哈哈。
总结
GPUImage
这个库还是有坑的,获取第一帧图片就算加了addAudioInputsAndOutputs
这个方法还是会黑,我看很多资料都是重写作者GPUImageMovieWriter
里面的代码解决。
网友评论