最近项目中上传视频遇到的bug,就是本地拍摄完视频正常的逻辑应该是压缩,然后上传
但是在我压缩完并上传之后,在后台查看上传的视频,竟然倒着的!旋转了90度
既然旋转了90度,那我们再把它旋转回来就可以了呗。
直接上代码:
+ (void)lowQuailtyWithInputURL:(NSURL *)inputURL blockHandler:(void (^)(BOOL, AVAssetExportSession *session, NSURL *videoUrl))handler
{
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
NSString *path = [NSString stringWithFormat:@"%@VideoCompression/",NSTemporaryDirectory()];
NSFileManager *fileManage = [[NSFileManager alloc] init]; static dispatch_once_t predicate;
dispatch_once(&predicate, ^{
if(![fileManage fileExistsAtPath:path]){
[fileManage createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:nil];
}
});
if([fileManage fileExistsAtPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path]]){
[fileManage removeItemAtPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path] error:nil];
}
NSURL *compressionVideoURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path]];
session.outputURL = compressionVideoURL;
session.outputFileType = AVFileTypeMPEG4;
session.shouldOptimizeForNetworkUse = YES;
session.videoComposition = [self getVideoComposition:asset];
[session exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(),^{
switch ([session status]) {
case AVAssetExportSessionStatusFailed:{
NSLog(@"Export failed: %@ : %@", [[session error] localizedDescription], [session error]);
handler(NO,nil, nil);
break;
}case AVAssetExportSessionStatusCancelled:{
NSLog(@"Export canceled");
handler(NO,nil, nil);
break;
}default:
handler(YES,session,compressionVideoURL);
break;
}
});
}];
}
+ (AVMutableVideoComposition *)getVideoComposition:(AVAsset *)asset {
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
if((t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)){
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
网友评论