自己遇到的坑:
1.视频和音频需要分开合成;
2.竖屏拍摄的视频,合成后改变了方向, 只好自己更改方向
实现步骤:
在.h中添加合成时需要调用的方法
//多个分段视频合成
/*
array为本地保存的短视频数组,元素为NSURL类型 比如:
file:///var/mobile/Containers/Data/Application/BBF38CEF-BB5D-40BA-82D5-4A9508BEA4F9/Library/Caches/我的短视频/video_0.mov,
file:///var/mobile/Containers/Data/Application/BBF38CEF-BB5D-40BA-82D5-4A9508BEA4F9/Library/Caches/我的短视频/video_1.mov
*/
- (void)synthesisMedia:(NSMutableArray *)array;
在.m中添加
#import <AVFoundation/AVFoundation.h>
#define ShootVideoPath @"录制视频"
#define ShootVideoName @"我的短视频"
- 1.创建一个AVComposition的媒体数据
-(AVMutableComposition *)mergeVideostoOnevideo:(NSArray*)array{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//视频通道
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//音频通道
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
Float64 tmpDuration =0.0f;
//合成视频
for (NSInteger i=0; i<array.count; i++){
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:array[i] options:nil];
CMTime time = [videoAsset duration];
NSInteger seconds = ceil(time.value/time.timescale);
NSLog(@"第%ld个视频时长 = %ld",i,seconds);
//视频采集
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
NSError *error;
//合成视频
[compositionVideoTrack insertTimeRange:timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:CMTimeMakeWithSeconds(tmpDuration, 0) error:&error];
//合成音频
[compositionAudioTrack insertTimeRange:timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeMakeWithSeconds(tmpDuration, 0) error:&error];
tmpDuration = CMTimeGetSeconds(videoAsset.duration) + tmpDuration;
}
//竖屏录制的视频,合成后会改变方向。所以手动转了一下
compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI/2);
return mixComposition;
- 2.创建一个AVMutableComposition并调用上一步的方法,以此合成视频
//分段多个视频 进行合成
- (void)synthesisMedia:(NSMutableArray *)array{
AVMutableComposition *mixComposition = [self mergeVideostoOnevideo:array];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
assetExport.outputFileType = AVFileTypeMPEG4;
NSURL *filUrl = [self getVideosURLPath:ShootVideoName];//自定义的输出路径
assetExport.outputURL = filUrl;//视频的输出路径
assetExport.shouldOptimizeForNetworkUse = YES;
//视频合成
[assetExport exportAsynchronouslyWithCompletionHandler:^{
//回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:filUrl options:nil];
CMTime time = [videoAsset duration];
NSInteger seconds = ceil(time.value/time.timescale);
//在系统相册存储一份(需要开启相册的权限,不然闪退)
//UISaveVideoAtPathToSavedPhotosAlbum([filUrl path], nil, nil, nil);
//!!filUrl为视频的输出路径!!
});
}];
}
- 3.自定义输出路径
//拼接输出路径
- (NSURL *)getVideosURLPath:(NSString *)videoName{
NSString *documents = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES)[0];
documents = [documents stringByAppendingPathComponent:ShootVideoPath];
documents = [self action_addFiles:documents];//新建文件并返回地址
NSString *failPath = [documents stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",videoName]];
NSURL *filUrl = [NSURL fileURLWithPath:failPath];
return filUrl;
}
//新建文件并返回地址
- (NSString *)action_addFiles:(NSString *)path{
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
//文件夹已存在
} else {
//创建文件夹
[[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:nil];
}
return path;
}
网友评论