项目中涉及视频上传需求的, 为了上传和拉取速度体验, 节省存储费用, 都会在上传前进行视频压缩. 下面提供两种视频压缩实现, 仅供参考
第一种, 是采用 AVAssetExportSession 导出的方式, 优点是压缩快, 能有效的减少视频的文件大小, 但是定制化不友好, 压缩之后的视频画质确实令人不太满意, 实现代码如下
//设置压缩的格式
AVAssetExportSession *session=[AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetMediumQuality];//mediumquality:中等质量
session.videoComposition = [self getVideoComposition:asset];
//设置导出路径
NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true) lastObject] stringByAppendingPathComponent:[NSString stringWithFormat:@"%ld_compressedVideo.mp4",time(NULL)]];
//设置到处路径
session.outputURL=[NSURL fileURLWithPath:path];
//设置输出文件的类型
session.outputFileType=AVFileTypeMPEG4;
//开辟子线程处理耗时操作
[session exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"导出完成! 路径:%@",path);
}];
第二种, 是采用 AVAssetReader和AVAssetWriter 实现逐帧压缩的方式, 优点是压缩的定制化比较好, 可以设置导出视频的编码格式,比特率, 帧率, 尺寸及视频质量, 压缩之后的画质令人满意, 缺点是视频时长较长时, 压缩时间较长, 实现代码如下:
///视频解码
+ (NSDictionary *)configVideoOutput{
NSDictionary *videoOutputSetting = @{
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
};
return videoOutputSetting;
}
///音频解码
+ (NSDictionary *)configAudioOutput{
NSDictionary *audioOutputSetting = @{
AVFormatIDKey: @(kAudioFormatLinearPCM)
};
return audioOutputSetting;
}
/// 编码视频
+ (NSDictionary *)videoCompressSettting:(CGSize)videoSize frameRate:(NSInteger)frameRate estimatedDataRate:(float)estimatedDataRate{
//写入视频大小
float numPixels = videoSize.width * videoSize.height;
//每像素比特
CGFloat bitsPerPixel = 0.0;
if (estimatedDataRate / numPixels > 1) {
bitsPerPixel = 1.0;
}else{
bitsPerPixel = estimatedDataRate / numPixels - 0.1;
}
NSInteger bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *compressProperties = @{
AVVideoAverageBitRateKey : @(bitsPerSecond),
AVVideoExpectedSourceFrameRateKey : @(frameRate),
AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel
};
if (@available(iOS 11.0, *)) {
NSDictionary *compressSetting = @{
AVVideoCodecKey : AVVideoCodecTypeH264,
AVVideoWidthKey : @(videoSize.width),
AVVideoHeightKey : @(videoSize.height),
AVVideoCompressionPropertiesKey : compressProperties,
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill
};
return compressSetting;
} else {
NSDictionary *compressSetting = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(videoSize.width),
AVVideoHeightKey : @(videoSize.height),
AVVideoCompressionPropertiesKey : compressProperties,
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill
};
return compressSetting;
}
}
/// 编码音频
+ (NSDictionary *)audioCompressSetting{
AudioChannelLayout layout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *data = [NSData dataWithBytes:&layout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
NSDictionary *compressSettings = @{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVEncoderBitRateKey : @96000,
AVSampleRateKey : @44100,
AVChannelLayoutKey : data,
AVNumberOfChannelsKey : @2
};
return compressSettings;
}
///视频大小及方向修正
+ (AVMutableVideoComposition *)getVideoComposition:(AVAsset *)asset {
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
if((t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) ||
(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)){
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, videoTrack.nominalFrameRate);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
///压缩代码
AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:asset error:nil];
NSString *outPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true) lastObject] stringByAppendingPathComponent:[NSString stringWithFormat:@"%ld_compressedVideo.mp4",time(NULL)]];
AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:outPath] fileType:AVFileTypeMPEG4 error:nil];
AVAssetTrack *track;
NSArray *array = asset.tracks;
//获取视频大小...不用之前的naturalSize 方法是因为被弃用了
CGSize videoSize = CGSizeZero;
for (AVAssetTrack *atrack in array) {
if ([atrack.mediaType isEqualToString:AVMediaTypeVideo]) {
CGSize realSize = CGSizeApplyAffineTransform(atrack.naturalSize, atrack.preferredTransform);
videoSize = CGSizeMake(fabs(realSize.width), fabs(realSize.height));
track = atrack;
break;
}
}
NSLog(@"width = %f,height = %f",videoSize.width, videoSize.height);
///比特率小于1不压缩
if (track.estimatedDataRate / (videoSize.width * videoSize.height) < 1) {
return;
}
AVAssetReaderVideoCompositionOutput *output = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:@[track] videoSettings:[self configVideoOutput]];
output.videoComposition = composition;
NSInteger frameRate = 20;
if (track.nominalFrameRate > 30) {
frameRate = 30;
}else{
frameRate = track.nominalFrameRate - 5;
}
AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:[self videoCompressSettting:videoSize frameRate:frameRate estimatedDataRate:track.estimatedDataRate]];
if ([reader canAddOutput:output]) {
[reader addOutput:output];
}
if ([writer canAddInput:videoInput]) {
[writer addInput:videoInput];
}
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
AVAssetReaderTrackOutput *audioOutPut = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:[self configAudioOutput]];
AVAssetWriterInput *audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioCompressSetting]];
if ([reader canAddOutput:audioOutPut]) {
[reader addOutput:audioOutPut];
}
if ([writer canAddInput:audioInput]) {
[writer addInput:audioInput];
}
[reader startReading];
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
dispatch_queue_t videoCompressQueue = dispatch_queue_create("videoCompressQueue", NULL);
[videoInput requestMediaDataWhenReadyOnQueue:videoCompressQueue usingBlock:^{
while ([videoInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBufferRef;
if ([reader status] == AVAssetReaderStatusReading && (sampleBufferRef = [output copyNextSampleBuffer])) {
BOOL result = [videoInput appendSampleBuffer:sampleBufferRef];
CFRelease(sampleBufferRef);
if (!result) {
[reader cancelReading];
break;
}
}else{
[videoInput markAsFinished];
dispatch_group_leave(group);
break;
}
}
}];
dispatch_group_enter(group);
dispatch_queue_t audioCompressQueue = dispatch_queue_create("audioCompressQueue", NULL);
[audioInput requestMediaDataWhenReadyOnQueue:audioCompressQueue usingBlock:^{
while ([audioInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBufferRef;
if ([reader status] == AVAssetReaderStatusReading && (sampleBufferRef = [audioOutPut copyNextSampleBuffer])) {
BOOL result = [audioInput appendSampleBuffer:sampleBufferRef];
CFRelease(sampleBufferRef);
if (!result) {
[reader cancelReading];
break;
}
}else{
[audioInput markAsFinished];
dispatch_group_leave(group);
break;
}
}
}];
dispatch_group_notify(group, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
if ([reader status] == AVAssetReaderStatusReading) {
[reader cancelReading];
}
switch (writer.status) {
case AVAssetWriterStatusWriting:
{
[writer finishWritingWithCompletionHandler:^{
NSLog(@"压缩完成");
}];
}
break;
default:
break;
}
});
另外在压缩时, 需要判断视频的质量, 有些是已经压缩过的视频, 再度上传时, 就没有必要再压缩.
网友评论