最近接触英语学习类app,根据视频跟读句子,完成配音作业,然后将自己的多个录音和原视频合成为一个新的视频。
之前看过几篇类似的博客,都是写一个视频和一个音频合成,要么是一个视频去掉音频,提取音频之类的。但是都没有多个音频根据时间节点拼接到原有的视频中去,或者理解为替换该节点的音频。
⚠️⚠️最近发现一个问题,iphoneXR/iphoneXS音频和视频合并会失败,主要错在于输出的地方;
Error Domain=AVFoundationErrorDomain Code=-11821 “Cannot Decode” UserInfo={NSLocalizedFailureReason=The media data could not be decoded. It may be damaged., NSLocalizedDescription=Cannot Decode, NSUnderlyingError=0x28338c4e0 {Error Domain=NSOSStatusErrorDomain Code=-16977 “(null)“}}
发现有用ffmpeg编码遇到用c++写的,一个是在stackOverflow上看到的, 但是都没有解决方案!!!试过很多办法测试,各种设置参数,后面发现是“presetName”导致的,只能是AVAssetExportPresetPassthrough才能成功; AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];然后顺利结局。。。。。。
不明白的地方欢迎➕抠:【3376512101】
一、思路
第一步:将分别提取原视频的音频(Ymp3)、视频(Ymp4);
第二步:将自己的多个录音根据时间节点依次合成到提取的原音频(Ymp3)中去,最后合成为一个新的音频(Xmp3);
第三部:将合成的新音频(Xmp3)和提取的原视频(Ymp4)重新合成为一个新的视频(Perfect.mp4);
⚠️⚠️-------之前有地方写错,这里还给大家再理一下思路,感觉之前说的不是很清楚。 主要讲第二步;
多个录音和原音频合并是可以有三种方式:
- 先把原音频轨道根据每段规定的时候先截取掉,然后再把每段录音根据时间插入原音频轨道;
- 在插入每段录音之前先删除音频轨道上的该时间段,然后再把每段录音根据时间插入原音频轨道;(和第一种其实相同)
- 一个取巧的办法,就是在把每段录音创建一个新的音频轨道,然后将原音频轨道该时间段的声音调到0,然后再把这些新音频轨道和原音频轨道合并;AVMutableCompositionTrack * audioTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
- 【以下为参考代码】
二、代码
@interface HJMergeVideoWithMusic : NSObject {
NSInteger mergeIndex;//合并到了第几个
NSArray * musicUrlArray;//所有的音频
NSArray * _musicStartTimes;//音频的时间段
NSString * _videoPath;//视频地址
AVMutableCompositionTrack * _audio_track;//视频中的音频文件
AVMutableComposition *_composition;
NSMutableArray *_audioMixParams;
}
@property (nonatomic, strong)UIView *baseView;
+ (instancetype)shared;
/**
没有背景音乐的视频添加背景音乐
@param musicPath 背景音乐地址
@param videoPath 视频地址
@param savePath 保存视频地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath noBgMusicVideo:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock;
//抽取原视频的音频与需要的音乐混合
/**
音频视频合成
@param musicArrayPath 音频(可以是多个,多个时可支撑替换某一段的音频)
@param musicStartTime 音频的时间段,起始时间
@param videoPath 视频
@param savePath 保存地址
@param successBlock 合成成功
*/
- (void)mergeVideoWithMusicArray:(NSArray *)musicArrayPath musicStartTime:(NSArray *)musicStartTimes video:(NSString *)videoPath success:(void(^)(NSURL *recordPath))successBlock;
@end
image.gif
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^【.m文件】^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@implementation HJMergeVideoWithMusic
static HJMergeVideoWithMusic *manager;
/*
单例模式
*/
+ (instancetype)shared {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
manager = [HJMergeVideoWithMusic new];
});
return manager;
}
/**
没有背景音乐的视频添加背景音乐
@param musicPath 背景音乐地址
@param videoPath 视频地址
@param savePath 保存视频地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath noBgMusicVideo:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(void(^)(NSURL *recordPath))successBlock{
// 声音来源
NSURL *audioInputUrl = [NSURL fileURLWithPath:musicPath];
// 视频来源
NSURL *videoInputUrl = [NSURL fileURLWithPath:videoPath];
// 添加合成路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:savePath];
// 时间起点
CMTime nextClistartTime = kCMTimeZero;
// 创建可变的音视频组合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 视频采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 视频时间范围
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 视频采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// 把采集轨道数据加入到可变轨道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 声音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
CMTimeRange audioTimeRange = videoTimeRange;
// 音频通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音频采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成轨道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 创建一个输出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 输出类型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 输出地址
assetExport.outputURL = outputFileUrl;
// 优化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完毕
//混音后的输出地址
NSString *exportFile = [NSHomeDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat:@"Library/%@.mp4",[AppDelegate uuidString]]];
// NSString *exportFile = [videoCachePath stringByAppendingPathComponent:@"video.mp4"];
// if([[NSFileManager defaultManager]fileExistsAtPath:exportFile]) {
// [[NSFileManager defaultManager]removeItemAtPath:exportFile error:nil];
// NSLog(@"======删除路径===%@",exportFile);
// }
NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
assetExport.outputURL = exportURL;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
successBlock(exportURL);
});
}];
}
/**
音频视频合成
@param musicArrayPath (多个录音)音频数组
@param musicStartTime 音频的时间段,起始时间
@param videoPath 视频
@param savePath 保存地址
@param successBlock 合成成功
*/
- (void)mergeVideoWithMusicArray:(NSArray *)musicArrayPath musicStartTime:(NSArray *)musicStartTimes video:(NSString *)videoPath success:(void(^)(NSURL *recordPath))successBlock {
musicUrlArray = [[NSArray alloc] initWithArray:musicArrayPath];
_musicStartTimes = [[NSArray alloc] initWithArray:musicStartTimes];
_videoPath = videoPath;
//第一步:音频和视频中的音频混音,生成混音文件
// 创建可变的音视频组合
_composition =[AVMutableComposition composition];
_audioMixParams =[[NSMutableArray alloc] init];
// 总音频通道
_audio_track = [_composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//录制的视频
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
// 音频采集通道
AVAssetTrack *sourceAudioTrack =[[songAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
//
NSError *error =nil;
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, songAsset.duration);
//Insert audio into track //offsetCMTimeMake(0, 44100)
//第一步,先把视频中的音频通道插入总音频轨道中
BOOL isFinish = [_audio_track insertTimeRange:videoTimeRange ofTrack:sourceAudioTrack atTime:kCMTimeInvalid error:&error];
if (isFinish) {
mergeIndex = 0;
// video_track 把抽离出来的音频进行编辑(多段配音)这个函数时替换某一段原视频中的音频。
[self setUpAndAddAudioArrayAtPath:musicUrlArray[0] andAVAssetTrack:sourceAudioTrack success:successBlock];
}else {
NSURL *url = [NSURL URLWithString:@""];
successBlock(url);
[MBProgressHUD show:@"合成失败!" view:nil];
return;
}
//本地要插入的音乐 (这里如果只配一首背景音乐是,就用这个方法),且支撑原音和配音同时存在
// NSURL *assetURL2 =[NSURL fileURLWithPath:musicPath];
//获取设置完的本地音乐素材
// [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(0,44100) addAudioParams:audioMixParams setVolume:0.0f];
}
//通过文件路径建立和添加音频素材 (多个音频)
- (void)setUpAndAddAudioArrayAtPath:(NSString *)audioPath andAVAssetTrack:(AVAssetTrack *)videoTrack success:(void(^)(NSURL *recordPath))successBlock {
// NSString * audioPath = assetArrayURL[I];
AVURLAsset *audioAsset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioPath]];
if ([audioAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) {
AVAssetTrack *sourceAudioTrack =[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
NSError *error =nil;
// BOOL ok = NO;
//音频列表数据
NSDictionary * audioDic = _musicStartTimes[mergeIndex];
NSDictionary * schedule = [audioDic dictValueWithKey:@"schedule"];
NSString * enterTime = [schedule stringValueWithKey:@"enterTime"];
// NSString * endTime = [schedule stringValueWithKey:@"endTime"];
// Float64 tmpDuration = enterTime.floatValue;//开始时间
//Insert audio into track //offsetCMTimeMake(0, 44100)
// CMTime qiTime = CMTimeMake(tmpDuration, 1);
// CMTime jiTime = CMTimeMake(endTime.floatValue, 1);
// AVMutableAudioMixInputParameters *trackMix =[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:sourceAudioTrack];
// CMTime startTime =CMTimeMakeWithSeconds(tmpDuration,audioAsset.duration.timescale);
// [trackMix setVolume:1.0f atTime:startTime];
// //素材加入数组
// [_audioMixParams addObject:trackMix];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,audioAsset.duration);//指定要插入的跟踪的时间范围。
CMTime insertTime = CMTimeMakeWithSeconds(enterTime.floatValue,audioAsset.duration.timescale);//插入时间点
//先把要插入部分的原音轨道删除掉
[_audio_track removeTimeRange:CMTimeRangeMake(insertTime,audioAsset.duration)];
//这里如果需要用调节音量的方法时可以添加新的音频轨道进去。。。。暂时没有选用那种方法
// AVMutableCompositionTrack * audioTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//把每段新的音频通道插入总轨道中
BOOL isFinish = [_audio_track insertTimeRange:audio_timeRange ofTrack:sourceAudioTrack atTime:insertTime error:&error];
// tmpDuration += CMTimeGetSeconds(audioAsset.duration);
if (isFinish) {
mergeIndex += 1;
if (mergeIndex < musicUrlArray.count) {
[self setUpAndAddAudioArrayAtPath:musicUrlArray[mergeIndex] andAVAssetTrack:videoTrack success:successBlock];
}else {
[self mergeVideoOrAudio:successBlock];//最后合并
}
}else {
NSURL *url = [NSURL URLWithString:@""];
successBlock(url);
[MBProgressHUD show:@"合成失败!" view:nil];
return;
}
}else {
NSURL *url = [NSURL URLWithString:@""];
successBlock(url);
[MBProgressHUD show:@"合成失败!" view:nil];
return;
}
}
// 最后把音频一起合并
- (void)mergeVideoOrAudio:(void(^)(NSURL *recordPath))successBlock {
//创建一个可变的音频混合
AVMutableAudioMix *audioMix =[AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:_audioMixParams];//从数组里取出处理后的音频轨道参数
//创建一个输出
AVAssetExportSession *exporter =[[AVAssetExportSession alloc]
initWithAsset:_composition
presetName:AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
//如果要支撑原音和配音时,可设置双轨道原音和配音的声音大小
// exporter.audioMix = [self buildAudioMixWithVideoTrack:video_track VideoVolume:0.0f BGMTrack:audio_track BGMVolume:0.8f controlVolumeRange:kCMTimeZero];
exporter.outputFileType=@"com.apple.m4a-audio";//输出的类型也是 m4a文件
//混音后的输出地址
// NSString *exportFile = [NSHomeDirectory() stringByAppendingPathComponent:
// [NSString stringWithFormat:@"Library/%@.m4a",[AppDelegate uuidString]]];
NSString * exportFile = [videoCachePath stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.m4a",[AppDelegate uuidString]]];
NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"音频混音完毕,开始合成音频、视频");
if ([[NSFileManager defaultManager] fileExistsAtPath:exportFile]) {
//第二步:混音和视频合成
[self theVideoWithMixMusic:exportFile videoPath:self->_videoPath success:successBlock];
}
}];
}
/**
音频和视频混合
@param mixURLPath 混音
@param videoPath 视频
@param successBlock 成功
*/
- (void)theVideoWithMixMusic:(NSString *)mixURLPath videoPath:(NSString *)videoPath success:(void(^)(NSURL *recordPath))successBlock
{
//声音来源路径(最终混合的音频)
NSURL *audio_inputFileUrl =[NSURL fileURLWithPath:mixURLPath];
//视频来源路径
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
//最终合成输出路径
NSString * exportFile = [videoCachePath stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",[AppDelegate uuidString]]];
NSFileManager *fileManager = [NSFileManager defaultManager];
[fileManager removeItemAtPath:exportFile error:nil];//先删除该目录下的内容
//
NSURL *outputFileUrl = [NSURL fileURLWithPath:exportFile];
CMTime nextClipStartTime =kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition *mixComposition =[AVMutableComposition composition];
//视频采集
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]atTime:nextClipStartTime error:nil];
//视频操作指令
// AVMutableVideoComposition *videoComposition =[AVMutableVideoComposition videoComposition];
// AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// CMTime currentTime = kCMTimeZero;
// CMTimeAdd(currentTime, videoAsset.duration); //HERE!! I don't actually increment the value! currentTime is always kCMTimeZero
// videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
//// videoCompositionInstruction.backgroundColor = [[UIColor clearColor] CGColor];
// videoComposition.instructions = @[videoCompositionInstruction];
// videoComposition.renderSize = CGSizeMake(960, 540);
// videoComposition.frameDuration = CMTimeMake(1, 600);
//声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围==视频长度
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]atTime:nextClipStartTime error:nil];
//创建一个输出
AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL =outputFileUrl;
// _assetExport.videoComposition = videoComposition;
_assetExport.shouldOptimizeForNetworkUse=YES;
__weak typeof(self) wSelf = self;
__strong typeof(wSelf) sSelf = wSelf;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"完成!AVAssetExportSession:【%@】",_assetExport);
// 如果导出的状态为完成
if ([_assetExport status] == AVAssetExportSessionStatusCompleted) {
// 回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
// 调用播放方法
NSLog(@"合成完毕:%@", outputFileUrl);
if ([[NSFileManager defaultManager] fileExistsAtPath:exportFile]) {
//第二步:混音和视频合成
successBlock(outputFileUrl);
}else {
[MBProgressHUD hideHUDForView:sSelf.baseView animated:YES];
[MBProgressHUD show:@"输出错误..." view:nil afterDelay:1.5];
return;
}
});
}else if ([_assetExport status] == AVAssetExportSessionStatusCancelled) {
// 取消合成
// 进度条归0, 处理UI
dispatch_sync(dispatch_get_main_queue(), ^{
// 显示失败提示, 进度条归0
[MBProgressHUD hideHUDForView:sSelf.baseView animated:YES];
[MBProgressHUD show:@"合成失败..." view:nil afterDelay:1.5];
return;
});
}else if ([_assetExport status] == AVAssetExportSessionStatusFailed) {
// 合成失败
// 显示失败提示, 进度条归0
[MBProgressHUD hideHUDForView:sSelf.baseView animated:YES];
[MBProgressHUD show:@"合成失败..." view:nil afterDelay:1.5];
NSLog(@"合成失败:%@", _assetExport.error);
return;
}
});
}];
}
#pragma mark - 调节合成的音量
- (AVAudioMix *)buildAudioMixWithVideoTrack:(AVCompositionTrack *)videoTrack VideoVolume:(float)videoVolume BGMTrack:(AVCompositionTrack *)BGMTrack BGMVolume:(float)BGMVolume controlVolumeRange:(CMTime)volumeRange {
// 创建音频混合类
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
// 拿到视频声音轨道设置音量
/* CMTime CMTimeMake (
int64_t value, //表示 当前视频播放到的第几桢数
int32_t timescale //每秒的帧数
);
*/
AVMutableAudioMixInputParameters *Videoparameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:videoTrack];
[Videoparameters setVolume:videoVolume atTime:CMTimeMakeWithSeconds(5,600)];
// 设置背景音乐音量
AVMutableAudioMixInputParameters *BGMparameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:BGMTrack];
[BGMparameters setVolume:BGMVolume atTime:volumeRange];
// 加入混合数组
audioMix.inputParameters = @[Videoparameters,BGMparameters];
return audioMix;
}
@end
网友评论