实现视频自定义时间这个需求还是很简单,AVFoundation提供的AVAssetExportSession这个类可以很方便的实现这个功能。效果图如下:
IMG_5333.PNG
左右两边的拖拽框添加手势事件,通过触摸点的变化算出要seek的时间,调用playerView的seekToTime:
方法即可。下面显示视频画面的可以用UIScrollView做,即取得视频的缩略图,放在UIScrollView上就行。取视频缩略图的代码如下:
AVURLAsset *asset =[[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:_filePath] options:nil];
videoTotalTime =asset.duration.value/asset.duration.timescale;
NSLog(@"视频总时长:_____%f",videoTotalTime);
//获取帧率
CGFloat fps =[[[asset tracksWithMediaType:AVMediaTypeVideo] firstObject] nominalFrameRate];
NSLog(@"视频帧率_____%f",fps);
AVAssetImageGenerator *generator =[[AVAssetImageGenerator alloc]initWithAsset:asset];
generator.appliesPreferredTrackTransform =YES;
// generator.requestedTimeToleranceBefore =kCMTimeZero;
// generator.requestedTimeToleranceAfter =kCMTimeZero;
for (int i =1; i<= videoTotalTime; i++) {
CMTime time =CMTimeMakeWithSeconds(i, fps);
NSLog(@"时间结构体");
CMTimeShow(time);
NSValue *value =[NSValue valueWithCMTime:time];
[self.framesArray addObject:value];
}
//自定义imageView的宽
if (self.framesArray.count <=5) {
imgWidth =scrollView.frame.size.width/self.framesArray.count;
}else{
imgWidth =50;
}
NSLog(@"imgWidth:%f___%@",imgWidth,self.framesArray);
[generator generateCGImagesAsynchronouslyForTimes:self.framesArray completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
if (result ==AVAssetImageGeneratorSucceeded) {
UIImageView *imageView =[[UIImageView alloc]initWithFrame:CGRectMake(imgWidth*count,0,imgWidth,66)];
imageView.image =[UIImage imageWithCGImage:image];
//主线程刷新UI
dispatch_async(dispatch_get_main_queue(), ^{
[scrollView addSubview:imageView];
scrollView.contentSize =CGSizeMake(imgWidth*count, 0.0);
});
count++;
}else if (result ==AVAssetImageGeneratorFailed){
NSLog(@"出错了:%@",error.localizedDescription);
}else if (result ==AVAssetImageGeneratorCancelled){
NSLog(@"出错了:%@",error.localizedDescription);
}
}];
UI层面的东西就不再多说,大家自己实现即可。核心还是使用AVAssetExportSession
导出视频时设定自定义的时间,代码如下:
//设定开始时间和结束时间
CMTime start =CMTimeMakeWithSeconds(startTime, playerView.player.currentTime.timescale);
CMTime duration =CMTimeMakeWithSeconds((endTime -startTime),playerView.player.currentTime.timescale);
CMTimeRange timeRange =CMTimeRangeMake(start, duration);
// 创建AVAsset对象
AVAsset* asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
// 创建AVAssetExportSession对象
AVAssetExportSession * exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:presetName];
//优化网络
exporter.shouldOptimizeForNetworkUse = YES;
//设置输出路径
exporter.outputURL =compressionFileURL;
//设置输出类型
exporter.outputFileType = outputFileType;
exporter.timeRange =timeRange;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:{
if (completeBlock) {
completeBlock(exporter.error,compressionFileURL);
}
break;
}
case AVAssetExportSessionStatusCancelled:{
NSLog(@"Export Status: Cancell");
break;
}
case AVAssetExportSessionStatusCompleted: {
if (completeBlock) {
completeBlock(nil,compressionFileURL);
}
break;
}
case AVAssetExportSessionStatusUnknown: {
NSLog(@"Export Status: Unknown");
break;
}
case AVAssetExportSessionStatusExporting : {
NSLog(@"Export Status: Exporting");
break;
}
case AVAssetExportSessionStatusWaiting: {
NSLog(@"Export Status: Wating");
break;
}
};
});
}];
有什么问题,欢迎大家留言讨论。
网友评论