使用PHImageManager获取视频asset
[[PHImageManager defaultManager] requestAVAssetForVideo:asset options:self.videoOptions resultHandler:^(AVAsset * _Nullable avasset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
AVURLAsset *urlAsset = (AVURLAsset *)avasset;
NSURL *url = urlAsset.URL;
[self getVideoFirstImageWithUrl:url];
}];
- (PHVideoRequestOptions *)videoOptions {
if (!_videoOptions) {
_videoOptions = [[PHVideoRequestOptions alloc] init];
_videoOptions.version = PHVideoRequestOptionsVersionOriginal;
_videoOptions.deliveryMode = PHVideoRequestOptionsDeliveryModeAutomatic;
_videoOptions.networkAccessAllowed = YES;
}
return _videoOptions;
}
后面两个子方法取自YBImageBrowser框架
#pragma mark - 获取视频的第一帧截图, 返回UIImage (需要导入AVFoundation.h)
- (UIImage *)getVideoFirstImageWithUrl:(NSURL *)url {
AVAsset *asset = [AVAsset assetWithURL:url];
// 也可直接传入AVURLAsset类型, AVURLAsset是AVAsset的子类
AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
generator.maximumSize = CGSizeMake(500, 500);
NSError *error = nil;
CGImageRef cgImage = [generator copyCGImageAtTime:CMTimeMake(0, 1) actualTime:NULL error:&error];
CGImageRef decodedImage = YYCGImageCreateDecodedCopy(cgImage, YES);
UIImage *resultImage = [UIImage imageWithCGImage:decodedImage];
if (cgImage) CGImageRelease(cgImage);
if (decodedImage) CGImageRelease(decodedImage);
return resultImage;
}
CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) {
if (!imageRef) return NULL;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
if (width == 0 || height == 0) return NULL;
if (decodeForDisplay) { //decode with redraw (may lose some precision)
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
BOOL hasAlpha = NO;
if (alphaInfo == kCGImageAlphaPremultipliedLast ||
alphaInfo == kCGImageAlphaPremultipliedFirst ||
alphaInfo == kCGImageAlphaLast ||
alphaInfo == kCGImageAlphaFirst) {
hasAlpha = YES;
}
// BGRA8888 (premultiplied) or BGRX8888
// same as UIGraphicsBeginImageContext() and -[UIView drawRect:]
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), bitmapInfo);
if (!context) return NULL;
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode
CGImageRef newImage = CGBitmapContextCreateImage(context);
CFRelease(context);
return newImage;
} else {
CGColorSpaceRef space = CGImageGetColorSpace(imageRef);
size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
if (bytesPerRow == 0 || width == 0 || height == 0) return NULL;
CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
if (!dataProvider) return NULL;
CFDataRef data = CGDataProviderCopyData(dataProvider); // decode
if (!data) return NULL;
CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data);
CFRelease(data);
if (!newProvider) return NULL;
CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault);
CFRelease(newProvider);
return newImage;
}
}
CGColorSpaceRef YYCGColorSpaceGetDeviceRGB() {
static CGColorSpaceRef space;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
space = CGColorSpaceCreateDeviceRGB();
});
return space;
}
网友评论