- YYKit__YYModel_源码分析:http://www.jianshu.com/p/e1f477dddb47
- YYKit__ YYWebImage__源码分析:http://www.jianshu.com/p/54e83f44a268
- AFNetworking源码分析:http://www.jianshu.com/p/723533b2f1ed
- YYImage继承UIImage:分成三种情况对图片进行解码:WebP 、APNG/PNG、其他(gif、jpeg、png...)
webp(google开发动态图片格式。体积大约只有JPEG的2/3)、 apng 、帧图片、表单图片(矩阵式的多张图片集合) 格式!
这是普通播放gif的代码,看起来很繁琐,也可以用webView来播放gif:
UIImageView *gifImageView = [[UIImageView alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
NSArray *gifArray = [NSArray arrayWithObjects:[UIImage imageNamed:@"1"],
[UIImage imageNamed:@"2"],
[UIImage imageNamed:@"3"],
[UIImage imageNamed:@"4"],
[UIImage imageNamed:@"5"],
[UIImage imageNamed:@"6"],
[UIImage imageNamed:@"7"],
[UIImage imageNamed:@"8"],
[UIImage imageNamed:@"9"],
[UIImage imageNamed:@"10"],
[UIImage imageNamed:@"11"],
[UIImage imageNamed:@"12"],
[UIImage imageNamed:@"13"],
[UIImage imageNamed:@"14"],
[UIImage imageNamed:@"15"],
[UIImage imageNamed:@"16"],
[UIImage imageNamed:@"17"],
[UIImage imageNamed:@"18"],
[UIImage imageNamed:@"19"],
[UIImage imageNamed:@"20"],
[UIImage imageNamed:@"21"],
[UIImage imageNamed:@"22"],nil];
gifImageView.animationImages = gifArray; //动画图片数组
gifImageView.animationDuration = 5; //执行一次完整动画所需的时长
gifImageView.animationRepeatCount = 1; //动画重复次数
[gifImageView startAnimating];
[self.view addSubview:gifImageView];
[gifImageView release];
//CG的一些基本操作
//创建image数据源,
CGImageSourceRef _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
//_data 要包含 _source 才能进行更新image数据
CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
//返回image的帧数,如果是psd,返回1
NSUInteger frameCount = CGImageSourceGetCount(_source);
获取image数据源的属性
// properties :
// {
// FileSize = 5096436;
// "{GIF}" = {
// HasGlobalColorMap = 1;
// LoopCount = 0;
// };
// }
CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
//以例子中的97帧的gif为例,获取第i帧的属性
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
//使用: 和普通的UIImage差不多
YYImage *image = [YYImage imageNamed:name];
YYAnimatedImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
下面是几个核心的类YYImageDecoder、YYImageFrame、yy_png_info
@implementation YYImageDecoder {
pthread_mutex_t _lock; // recursive lock
BOOL _sourceTypeDetected;//数据源是否已经解密
CGImageSourceRef _source;//数据源
yy_png_info *_apngSource;//apng数据结构
#if YYIMAGE_WEBP_ENABLED
WebPDemuxer *_webpSource;//webp数据源
#endif
UIImageOrientation _orientation;//图片的方向
dispatch_semaphore_t _framesLock;
NSArray *_frames; //< Array<_YYImageDecoderFrame>, without image
BOOL _needBlend;
NSUInteger _blendFrameIndex;
CGContextRef _blendCanvas;
}
//帧解码器
@interface _YYImageDecoderFrame : YYImageFrame
@property (nonatomic, assign) BOOL hasAlpha; ///< Whether frame has alpha.
@property (nonatomic, assign) BOOL isFullSize; ///< Whether frame fill the canvas.
@property (nonatomic, assign) NSUInteger blendFromIndex; ///< Blend from frame index to current frame.
@end
//块基本信息
typedef struct {
uint32_t sequence_number; ///< sequence number of the animation chunk, starting from 0
uint32_t width; ///< width of the following frame
uint32_t height; ///< height of the following frame
uint32_t x_offset; ///< x position at which to render the following frame
uint32_t y_offset; ///< y position at which to render the following frame
uint16_t delay_num; ///< frame delay fraction numerator
uint16_t delay_den; ///< frame delay fraction denominator
uint8_t dispose_op; ///< see yy_png_dispose_op
uint8_t blend_op; ///< see yy_png_blend_op
} yy_png_chunk_fcTL;
//png块结构体
typedef struct {
uint32_t offset; ///< chunk offset in PNG data
uint32_t fourcc; ///< chunk fourcc
uint32_t length; ///< chunk data length
uint32_t crc32; ///< chunk crc32
} yy_png_chunk_info;
//apng每一帧结构体
typedef struct {
uint32_t chunk_index; ///< the first `fdAT`/`IDAT` chunk index
uint32_t chunk_num; ///< the `fdAT`/`IDAT` chunk count
uint32_t chunk_size; ///< the `fdAT`/`IDAT` chunk bytes
yy_png_chunk_fcTL frame_control;
} yy_png_frame_info;
//apng结构体
typedef struct {
yy_png_chunk_IHDR header; ///< png header
yy_png_chunk_info *chunks; ///块的集合,data取出的原始块
uint32_t chunk_num; ///块数量
yy_png_frame_info *apng_frames; ///帧的集合,包含每一帧的属性信息,每一帧包含多个chunk
uint32_t apng_frame_num; ///帧数量
uint32_t apng_loop_num; ///循环次数
uint32_t *apng_shared_chunk_indexs; ///< shared chunk index
uint32_t apng_shared_chunk_num; ///< shared chunk count
uint32_t apng_shared_chunk_size; ///< shared chunk bytes
uint32_t apng_shared_insert_index; ///< shared chunk insert index
bool apng_first_frame_is_cover; ///< the first frame is same as png (cover)
} yy_png_info;
//寻找资源
+ (YYImage *)imageNamed:(NSString *)name {
//过滤不符合规范的name
if (name.length == 0) return nil;
if ([name hasSuffix:@"/"]) return nil;
NSString *res = name.stringByDeletingPathExtension;
NSString *ext = name.pathExtension;
NSString *path = nil;
CGFloat scale = 1;
//如果扩展名为空,就瞎猜,碰到哪个算哪个 ^_^
NSArray *exts = ext.length > 0 ? @[ext] : @[@"", @"png", @"jpeg", @"jpg", @"gif", @"webp", @"apng"];
//按照当前设备最适合的分辨率排序,因为一个项目可能有多个同样的图片,但是大小不同
//如:iPhone3GS:@[@1,@2,@3] iPhone5:@[@2,@3,@1] iPhone6 Plus:@[@3,@2,@1]
NSArray *scales = [NSBundle preferredScales];
for (int s = 0; s < scales.count; s++) {
scale = ((NSNumber *)scales[s]).floatValue;
NSString *scaledName = [res stringByAppendingNameScale:scale];
///循环,找对应的资源,有则终止,没有继续找
for (NSString *e in exts) {
path = [[NSBundle mainBundle] pathForResource:scaledName ofType:e];
if (path) break;
}
if (path) break;
}
if (path.length == 0) return nil;
NSData *data = [NSData dataWithContentsOfFile:path];
if (data.length == 0) return nil;
return [[self alloc] initWithData:data scale:scale];
}
//创建解码器,并且用第一帧来创建image,循环播放动态图的时候,需要解码器支持
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
if (data.length == 0) return nil;
if (scale <= 0) scale = [UIScreen mainScreen].scale;
_preloadedLock = dispatch_semaphore_create(1);
@autoreleasepool {
//创建解码器
YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale];
//取第1帧,并创建yyimage实例
YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
UIImage *image = frame.image;
if (!image) return nil;
self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation];
if (!self) return nil;
_animatedImageType = decoder.type;//记录类型
if (decoder.frameCount > 1) {
_decoder = decoder;
_bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage);
_animatedImageMemorySize = _bytesPerFrame * decoder.frameCount;
}
self.isDecodedForDisplay = YES;
}
return self;
}
//分三种情况来解码
- (void)_updateSource {
switch (_type) {
case YYImageTypeWebP: {
[self _updateSourceWebP];
} break;
//png 、apng
case YYImageTypePNG: {
[self _updateSourceAPNG];
} break;
default: {
[self _updateSourceImageIO];
} break;
}
}
//第一种:解码webp格式
//解码webp啥都不好使,所以暂时用WebPDemuxer来解码,谷歌已经提供好了
- (void)_updateSourceWebP {
#if YYIMAGE_WEBP_ENABLED
_width = 0;
_height = 0;
_loopCount = 0;
if (_webpSource) WebPDemuxDelete(_webpSource);
_webpSource = NULL;
dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
_frames = nil;
dispatch_semaphore_signal(_framesLock);
/*
https://developers.google.com/speed/webp/docs/api
The documentation said we can use WebPIDecoder to decode webp progressively,
but currently it can only returns an empty image (not same as progressive jpegs),
so we don't use progressive decoding.
When using WebPDecode() to decode multi-frame webp, we will get the error
"VP8_STATUS_UNSUPPORTED_FEATURE", so we first use WebPDemuxer to unpack it.
*/
//Demuxer:视频音频混合格式
WebPData webPData = {0};
webPData.bytes = _data.bytes;
webPData.size = _data.length;
WebPDemuxer *demuxer = WebPDemux(&webPData);
if (!demuxer) return;
//Copyright 2012 Google
//获取webp的帧数量、循环次数、宽、高
uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
uint32_t webpLoopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
//获取失败,释放demuxer,并返回
if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) {
WebPDemuxDelete(demuxer);
return;
}
NSMutableArray *frames = [NSMutableArray new];
BOOL needBlend = NO;
uint32_t iterIndex = 0;
uint32_t lastBlendIndex = 0;
//webp迭代器,循环指向每一帧
WebPIterator iter = {0};
if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index...
do {
_YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
[frames addObject:frame];
//展示方式
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
frame.dispose = YYImageDisposeBackground;
}
if (iter.blend_method == WEBP_MUX_BLEND) {
frame.blend = YYImageBlendOver;
}
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
//从迭代器取出每一帧的基本信息,并存储
frame.index = iterIndex;
frame.duration = iter.duration / 1000.0;
frame.width = iter.width;
frame.height = iter.height;
frame.hasAlpha = iter.has_alpha;
frame.blend = iter.blend_method == WEBP_MUX_BLEND;
frame.offsetX = iter.x_offset;
frame.offsetY = canvasHeight - iter.y_offset - iter.height;
BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);//是否全尺寸展示
if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
frame.blendFromIndex = lastBlendIndex = iterIndex;
} else {
if (frame.dispose && frame.isFullSize) {
frame.blendFromIndex = lastBlendIndex;
lastBlendIndex = iterIndex + 1;
} else {
frame.blendFromIndex = lastBlendIndex;
}
}
if (frame.index != frame.blendFromIndex) needBlend = YES;
iterIndex++;
} while (WebPDemuxNextFrame(&iter));//遍历下一帧,如果有值,继续迭代
WebPDemuxReleaseIterator(&iter);//释放迭代器
}
if (frames.count != webpFrameCount) {
WebPDemuxDelete(demuxer);
return;
}
_width = canvasWidth;
_height = canvasHeight;
_frameCount = frames.count;
_loopCount = webpLoopCount;
_needBlend = needBlend;
_webpSource = demuxer;
dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
_frames = frames;//赋值时候需要加锁
dispatch_semaphore_signal(_framesLock);
#else
static const char *func = __FUNCTION__;
static const int line = __LINE__;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", func, line);
});
#endif
}
//第二种解密:apng/png解密
- (void)_updateSourceAPNG {
//apng继承了png,imageio都支持
yy_png_info_release(_apngSource);
_apngSource = nil;
[self _updateSourceImageIO]; // decode first frame
if (_frameCount == 0) return; // png decode failed
if (!_finalized) return; // ignore multi-frame before finalized
yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length);
if (!apng) return; // apng decode failed
if (apng->apng_frame_num == 0 ||
(apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) {
yy_png_info_release(apng);
return; // no animation
}
if (_source) { // 解密完成,释放_source
CFRelease(_source);
_source = NULL;
}
uint32_t canvasWidth = apng->header.width;
uint32_t canvasHeight = apng->header.height;
NSMutableArray *frames = [NSMutableArray new];
BOOL needBlend = NO;
uint32_t lastBlendIndex = 0;
for (uint32_t i = 0; i < apng->apng_frame_num; i++) {
_YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
[frames addObject:frame];
//apng的每一帧 转存到DecoderFrame
yy_png_frame_info *fi = apng->apng_frames + i;
frame.index = i;
frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den);
frame.hasAlpha = YES;
frame.width = fi->frame_control.width;
frame.height = fi->frame_control.height;
frame.offsetX = fi->frame_control.x_offset;
frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height;
BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight);
BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0);
frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);//判断是否全尺寸展示
switch (fi->frame_control.dispose_op) {
case YY_PNG_DISPOSE_OP_BACKGROUND: {
frame.dispose = YYImageDisposeBackground;
} break;
case YY_PNG_DISPOSE_OP_PREVIOUS: {
frame.dispose = YYImageDisposePrevious;
} break;
default: {
frame.dispose = YYImageDisposeNone;
} break;
}
switch (fi->frame_control.blend_op) {
case YY_PNG_BLEND_OP_OVER: {
frame.blend = YYImageBlendOver;
} break;
default: {
frame.blend = YYImageBlendNone;
} break;
}
if (frame.blend == YYImageBlendNone && frame.isFullSize) {
frame.blendFromIndex = i;
if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i;
} else {
if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) {
frame.blendFromIndex = lastBlendIndex;
lastBlendIndex = i + 1;
} else {
frame.blendFromIndex = lastBlendIndex;
}
}
if (frame.index != frame.blendFromIndex) needBlend = YES;
}
_width = canvasWidth;
_height = canvasHeight;
_frameCount = frames.count;
_loopCount = apng->apng_loop_num;
_needBlend = needBlend;
_apngSource = apng;
dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
_frames = frames;
dispatch_semaphore_signal(_framesLock);
}
这也是最麻烦的一个方法了:
/**
Create a png info from a png file. See struct png_info for more information.
@param data png/apng file data.
@param length the data's length in bytes.
@return A png info object, you may call yy_png_info_release() to release it.
Returns NULL if an error occurs.
*/
static yy_png_info *yy_png_info_create(const uint8_t *data, uint32_t length) {
if (length < 32) return NULL;
/*
根据PNG文件的定义来说,其文件头位置总是由位固定的字节来描述的:
十六进制数
89 50 4E 47 0D 0A 1A 0A
其中第一个字节0x89超出了ASCII字符的范围,这是为了避免某些软件将PNG文件当做文本文件来处理
——————————————————————————————————————————————————————————————
——————————————————————————————————————————————————————————————
一个标准的PNG文件结构应该如下:
PNG文件标志
PNG数据块
……
PNG数据块
——————————————————————————————————————————————————————————————
——————————————————————————————————————————————————————————————
PNG文件中,每个数据块由4个部分组成,如下:
名称 字节数 说明
Length (长度) 4字节 指定数据块中数据域的长度,其长度不超过(231-1)字节
Chunk Type Code (数据块类型码) 4字节 数据块类型码由ASCII字母(A-Z和a-z)组成
Chunk Data (数据块数据) 可变长度 存储按照Chunk Type Code指定的数据
CRC (循环冗余检测) 4字节 存储用来检测是否有错误的循环冗余码
——————————————————————————————————————————————————————————————
——————————————————————————————————————————————————————————————
data:指向数据源内存的字符串数组 ,用前几个字符来判断类型
如:(const uint8_t *) data = 0x79376e00 "\x89PNG\r\n\x1a\n"
ascii码表对照:
P:50 N:4E G:47
所以用YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)来代表png类型
**/
if (*((uint32_t *)data) != YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)) return NULL;
if (*((uint32_t *)(data + 4)) != YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A)) return NULL;
uint32_t chunk_realloc_num = 16;
//为chunks申请内存空间:yy_png_chunk_info是单个片段的结构体
yy_png_chunk_info *chunks = malloc(sizeof(yy_png_chunk_info) * chunk_realloc_num);
if (!chunks) return NULL;
// parse png chunks
uint32_t offset = 8;//首次偏移8位,因为前8位是类型
uint32_t chunk_num = 0; //片段数量
uint32_t chunk_capacity = chunk_realloc_num;
uint32_t apng_loop_num = 0;
int32_t apng_sequence_index = -1;
int32_t apng_frame_index = 0;
int32_t apng_frame_number = -1;
bool apng_chunk_error = false;
/*
遍历每个块(trunk),和块的基本信息,存储
**/
do {
if (chunk_num >= chunk_capacity) {
//realloc:先判断当前的指针是否有足够的连续空间,如果有,返回chunks ,如果不够,使用第二个参数申请内存空间,并把第一个参数拷贝过去
yy_png_chunk_info *new_chunks = realloc(chunks, sizeof(yy_png_chunk_info) * (chunk_capacity + chunk_realloc_num));
if (!new_chunks) {
free(chunks);
return NULL;
}
chunks = new_chunks;
chunk_capacity += chunk_realloc_num;
}
yy_png_chunk_info *chunk = chunks + chunk_num;
const uint8_t *chunk_data = data + offset;
chunk->offset = offset;
chunk->length = yy_swap_endian_uint32(*((uint32_t *)chunk_data));
if ((uint64_t)chunk->offset + (uint64_t)chunk->length + 12 > length) {
free(chunks);
return NULL;
}
//向后移4位 就是每个chunk的类型
chunk->fourcc = *((uint32_t *)(chunk_data + 4));
if ((uint64_t)chunk->offset + 4 + chunk->length + 4 > (uint64_t)length) break; //第一个4是类型,第二个4是长度
chunk->crc32 = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8 + chunk->length)));
chunk_num++;
offset += 12 + chunk->length;//块长度 占4字节,块类型占4字节,冗余检测占4字节 ,数据块内容不限
/*
1.IHDR只能有一个,且必须为开头。
2.ACTL必须只有一个。
3.FCTL和FDAT中的sequence_number必须按顺序递增,不能有间隔和重复。
4.FCTL的个数必须和ACTL中的num_frames相等。
5.FCTL后面必须是IDAT或FDAT。
**/
switch (chunk->fourcc) {
//actl记录有多少帧,一共播放多少次 是8位的长度
case YY_FOUR_CC('a', 'c', 'T', 'L') : {
if (chunk->length == 8) {
apng_frame_number = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
apng_loop_num = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 12)));
} else {
apng_chunk_error = true;
}
} break;
/*
FCTL是每个帧的头部chunk,结构如下
typedef struct {
unsigned int sequence_number;
unsigned int width;
unsigned int height;
unsigned int x_offset;
unsigned int y_offset;
unsigned short delay_num;
unsigned short delay_den;
unsigned char dispose_op;
unsigned char blend_op;
}fctl;
**/
case YY_FOUR_CC('f', 'c', 'T', 'L') :
case YY_FOUR_CC('f', 'd', 'A', 'T') : {
if (chunk->fourcc == YY_FOUR_CC('f', 'c', 'T', 'L')) {
if (chunk->length != 26) {
apng_chunk_error = true;
} else {
apng_frame_index++;//新的一帧
}
}
if (chunk->length > 4) {
uint32_t sequence = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
if (apng_sequence_index + 1 == sequence) {
apng_sequence_index++;
} else {
apng_chunk_error = true;
}
} else {
apng_chunk_error = true;
}
} break;
//iend类型是最后一个数据块的类型
case YY_FOUR_CC('I', 'E', 'N', 'D') : {
offset = length; // end, break do-while loop
} break;
}
} while (offset + 12 <= length);//offset指向遍历到的位置,+12位基本信息如果 <= data的总长度,则说明遍历完成,退出循环
if (chunk_num < 3 ||
chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R') ||
chunks->length != 13) {
free(chunks);
return NULL;
}
// png info 分配1个长度为sizeof(yy_png_info)的连续空间
yy_png_info *info = calloc(1, sizeof(yy_png_info));
if (!info) {
free(chunks);
return NULL;
}
//存储png ,记录png数量
info->chunks = chunks;
info->chunk_num = chunk_num;
yy_png_chunk_IHDR_read(&info->header, data + chunks->offset + 8);
// apng info
if (!apng_chunk_error && apng_frame_number == apng_frame_index && apng_frame_number >= 1) {
bool first_frame_is_cover = false;
uint32_t first_IDAT_index = 0;
if (!yy_png_validate_animation_chunk_order(info->chunks, info->chunk_num, &first_IDAT_index, &first_frame_is_cover)) {
return info; // ignore apng chunk
}
info->apng_loop_num = apng_loop_num;
info->apng_frame_num = apng_frame_number;
info->apng_first_frame_is_cover = first_frame_is_cover;
info->apng_shared_insert_index = first_IDAT_index;
info->apng_frames = calloc(apng_frame_number, sizeof(yy_png_frame_info));
if (!info->apng_frames) {
yy_png_info_release(info);
return NULL;
}
info->apng_shared_chunk_indexs = calloc(info->chunk_num, sizeof(uint32_t));
if (!info->apng_shared_chunk_indexs) {
yy_png_info_release(info);
return NULL;
}
int32_t frame_index = -1;
uint32_t *shared_chunk_index = info->apng_shared_chunk_indexs;
for (int32_t i = 0; i < info->chunk_num; i++) {
//指针向后移动i个chunks大小,即访问第i个chunks
yy_png_chunk_info *chunk = info->chunks + i;
/*
根据块类型,创建不同的frame
APNG第一个数据块仍是叫IDAT而不叫FDAT,这样设置了一个default image,目的是为让不支持APNG的解码器也可以解码出一张default image。
1.IHDR只能有一个,且必须为开头。
2.ACTL必须只有一个。
3.FCTL和FDAT中的sequence_number必须按顺序递增,不能有间隔和重复。
4.FCTL的个数必须和ACTL中的num_frames相等。
5.FCTL后面必须是IDAT或FDAT。
**/
switch (chunk->fourcc) {
/*
IDAT
经过编码的图像数据chunk,可能有多个。
**/
case YY_FOUR_CC('I', 'D', 'A', 'T'): {
if (info->apng_shared_insert_index == 0) {
info->apng_shared_insert_index = i;
}
if (first_frame_is_cover) {
yy_png_frame_info *frame = info->apng_frames + frame_index;
frame->chunk_num++;
frame->chunk_size += chunk->length + 12;
}
} break;
/*
ACTL是控制动画的chunk,num_frames记录APNG有多少帧,num_playes记录APNG播放动画多少遍。
typedef struct {
unsigned int num_frames;
unsigned int num_playes;
}actl;
**/
case YY_FOUR_CC('a', 'c', 'T', 'L'): {
} break;
/*
FCTL是每个帧的头部chunk,结构如下
typedef struct {
unsigned int sequence_number;
unsigned int width;
unsigned int height;
unsigned int x_offset;
unsigned int y_offset;
unsigned short delay_num;
unsigned short delay_den;
unsigned char dispose_op;
unsigned char blend_op;
}fctl;
**/
case YY_FOUR_CC('f', 'c', 'T', 'L'): {
frame_index++;
yy_png_frame_info *frame = info->apng_frames + frame_index;
frame->chunk_index = i + 1;
yy_png_chunk_fcTL_read(&frame->frame_control, data + chunk->offset + 8);
} break;
//如果是第一个块,同idat
case YY_FOUR_CC('f', 'd', 'A', 'T'): {
yy_png_frame_info *frame = info->apng_frames + frame_index;
frame->chunk_num++;
frame->chunk_size += chunk->length + 12;
} break;
default: {
*shared_chunk_index = i;
shared_chunk_index++;
info->apng_shared_chunk_size += chunk->length + 12;
info->apng_shared_chunk_num++;
} break;
}
}
}
return info;
}
/*通用的解密方法 除 apng webp之外
使用core graphics:CA来解密:
**/
- (void)_updateSourceImageIO {
_width = 0;
_height = 0;
_orientation = UIImageOrientationUp;//默认朝上
_loopCount = 0;
dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
_frames = nil;
dispatch_semaphore_signal(_framesLock);
if (!_source) {
if (_finalized) {
//创建image数据源
_source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
} else {
//初始化数据源,Create an incremental image source.
_source = CGImageSourceCreateIncremental(NULL);
//_data 要包含 _source 才能进行更新image数据
if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
}
} else {
CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized);
}
if (!_source) return;
//CGImageSourceGetCount 返回image的帧数,如果是psd,返回1
_frameCount = CGImageSourceGetCount(_source);
if (_frameCount == 0) return;
if (!_finalized) { // ignore multi-frame before finalized
_frameCount = 1;
} else {
if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame
_frameCount = 1;
}
if (_type == YYImageTypeGIF) { // get gif loop count
// properties :
// {
// FileSize = 5096436;
// "{GIF}" = {
// HasGlobalColorMap = 1;
// LoopCount = 0;
// };
// }
CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
if (properties) {
/* gif
{
HasGlobalColorMap = 1;
LoopCount = 0;
}
**/
CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
if (gif) {
//All other Core Foundation opaque types derive from CFType.
//官方文档解释;任何不确定的类型都源于 CFType
CFTypeRef loop = CFDictionaryGetValue(gif, kCGImagePropertyGIFLoopCount);
//以整型方式取loop值,存入_loopCount中
if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount);
}
CFRelease(properties);
}
}
}
/*
处理多帧情况,ICO, GIF, APNG
*/
NSMutableArray *frames = [NSMutableArray new];
//遍历所有的帧,并且记录每一帧图片的frame
for (NSUInteger i = 0; i < _frameCount; i++) {
_YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
frame.index = i;//当前的frame属于第几帧
frame.blendFromIndex = i;
frame.hasAlpha = YES;
frame.isFullSize = YES;
[frames addObject:frame];
//以例子中的97帧的gif为例,获取第i帧的属性
/*
description of properties:
{
ColorModel = RGB;
Depth = 8;
PixelHeight = 270;
PixelWidth = 480;
ProfileName = "sRGB IEC61966-2.1";
"{GIF}" = {
DelayTime = "0.09";
UnclampedDelayTime = "0.09";
};
}
**/
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
if (properties) {
NSTimeInterval duration = 0;
NSInteger orientationValue = 0, width = 0, height = 0;
CFTypeRef value = NULL;
//获取像素宽度
value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
//value转成integer类型 赋值给width
if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width);
//同上,获取像素高度
value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
//赋值
if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height);
if (_type == YYImageTypeGIF) {
CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
/*
"{GIF}" = {
DelayTime = "0.09";
UnclampedDelayTime = "0.09";
};
**/
if (gif) {
// Use the unclamped frame delay if it exists.
value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime);
if (!value) {
// Fall back to the clamped frame delay if the unclamped frame delay does not exist.
value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime);
}
if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration);
}
}
frame.width = width;
frame.height = height;
frame.duration = duration;
//以第一帧的宽高 作为整个image的 宽高
//记录方向
if (i == 0 && _width + _height == 0) { // init first frame
_width = width;
_height = height;
/* 获取图片方向
That is:
* 1 = 0th row is at the top, and 0th column is on the left.
* 2 = 0th row is at the top, and 0th column is on the right.
* 3 = 0th row is at the bottom, and 0th column is on the right.
* 4 = 0th row is at the bottom, and 0th column is on the left.
* 5 = 0th row is on the left, and 0th column is the top.
* 6 = 0th row is on the right, and 0th column is the top.
* 7 = 0th row is on the right, and 0th column is the bottom.
* 8 = 0th row is on the left, and 0th column is the bottom.
* If not present, a value of 1 is assumed. */
value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
if (value) {
CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue);
_orientation = YYUIImageOrientationFromEXIFValue(orientationValue);
}
}
CFRelease(properties);
}
}
dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
_frames = frames;
dispatch_semaphore_signal(_framesLock);
}
解密完后需要创建imageView:
YYAnimatedImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
- (instancetype)initWithImage:(UIImage *)image {
self = [super init];
_runloopMode = NSRunLoopCommonModes;
_autoPlayAnimatedImage = YES;//默认是自动播放的
self.frame = (CGRect) {CGPointZero, image.size };
self.image = image;//这里重写了set方法
return self;
}
- (void)setImage:(UIImage *)image {
if (self.image == image) return;
[self setImage:image withType:YYAnimatedImageTypeImage];
}
- (void)setImage:(id)image withType:(YYAnimatedImageType)type {
[self stopAnimating];
if (_link) [self resetAnimated];
_curFrame = nil;
switch (type) {
case YYAnimatedImageTypeNone: break;
case YYAnimatedImageTypeImage: super.image = image; break;
case YYAnimatedImageTypeHighlightedImage: super.highlightedImage = image; break;
case YYAnimatedImageTypeImages: super.animationImages = image; break;
case YYAnimatedImageTypeHighlightedImages: super.highlightedAnimationImages = image; break;
}
[self imageChanged];
}
- (void)imageChanged {
//判断当前类型:动图 高亮 非高亮
YYAnimatedImageType newType = [self currentImageType];
//获取需要显示的图片
id newVisibleImage = [self imageForType:newType];
NSUInteger newImageFrameCount = 0;
BOOL hasContentsRect = NO;
if ([newVisibleImage isKindOfClass:[UIImage class]] && [newVisibleImage conformsToProtocol:@protocol(YYAnimatedImage)]) {
//获取帧数
newImageFrameCount = ((UIImage<YYAnimatedImage> *) newVisibleImage).animatedImageFrameCount;
if (newImageFrameCount > 1) {
hasContentsRect = [((UIImage<YYAnimatedImage> *) newVisibleImage) respondsToSelector:@selector(animatedImageContentsRectAtIndex:)];
}
}
if (!hasContentsRect && _curImageHasContentsRect) {
if (!CGRectEqualToRect(self.layer.contentsRect, CGRectMake(0, 0, 1, 1)) ) {
[CATransaction begin];
[CATransaction setDisableActions:YES];
self.layer.contentsRect = CGRectMake(0, 0, 1, 1);
[CATransaction commit];
}
}
_curImageHasContentsRect = hasContentsRect;
if (hasContentsRect) {
CGRect rect = [((UIImage<YYAnimatedImage> *) newVisibleImage) animatedImageContentsRectAtIndex:0];
[self setContentsRect:rect forImage:newVisibleImage];
}
//多帧,播放动画
if (newImageFrameCount > 1) {
[self resetAnimated];
//记录当前帧 image 循环次数 总帧数量
_curAnimatedImage = newVisibleImage;
_curFrame = newVisibleImage;
_totalLoop = _curAnimatedImage.animatedImageLoopCount;
_totalFrameCount = _curAnimatedImage.animatedImageFrameCount;
[self calcMaxBufferCount];
}
[self setNeedsDisplay];
[self didMoved];
}
// init the animated params.
- (void)resetAnimated {
dispatch_once(&_onceToken, ^{
_lock = dispatch_semaphore_create(1);
_buffer = [NSMutableDictionary new];
_requestQueue = [[NSOperationQueue alloc] init];
_requestQueue.maxConcurrentOperationCount = 1;
/*
CADisplayLink是一个能让我们以和屏幕刷新率相同的频率将内容画到屏幕上的定时器。
我们在应用中创建一个新的 CADisplayLink 对象,把它添加到一个runloop中,
并给它提供一个 target 和selector 在屏幕刷新的时候调用。
**/
_link = [CADisplayLink displayLinkWithTarget:[YYWeakProxy proxyWithTarget:self] selector:@selector(step:)];
if (_runloopMode) {//添加到主线程的runloop中
[_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
}
_link.paused = YES;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
});
[_requestQueue cancelAllOperations];
LOCK(
if (_buffer.count) {
NSMutableDictionary *holder = _buffer;
_buffer = [NSMutableDictionary new];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
// Capture the dictionary to global queue,
// release these images in background to avoid blocking UI thread.
[holder class];
});
}
);
_link.paused = YES;
_time = 0;
if (_curIndex != 0) {
[self willChangeValueForKey:@"currentAnimatedImageIndex"];
_curIndex = 0;
[self didChangeValueForKey:@"currentAnimatedImageIndex"];
}
_curAnimatedImage = nil;
_curFrame = nil;
_curLoop = 0;
_totalLoop = 0;
_totalFrameCount = 1;
_loopEnd = NO;
_bufferMiss = NO;
_incrBufferCount = 0;
}
/*
刷新下一帧
**/
/*
刷新下一帧
**/
- (void)step:(CADisplayLink *)link {
UIImage <YYAnimatedImage> *image = _curAnimatedImage;
NSMutableDictionary *buffer = _buffer;
UIImage *bufferedImage = nil;
NSUInteger nextIndex = (_curIndex + 1) % _totalFrameCount;
BOOL bufferIsFull = NO;
if (!image) return;
if (_loopEnd) { // view will keep in last frame
[self stopAnimating];
return;
}
NSTimeInterval delay = 0;
//如果命中,判断是否需要展示当前帧
if (!_bufferMiss) {
_time += link.duration;
//取当前帧展示的时间
delay = [image animatedImageDurationAtIndex:_curIndex];
if (_time < delay) return;
_time -= delay;
//如果当前是最后一帧,判断下次循环是否需要继续
if (nextIndex == 0) {
_curLoop++;
if (_curLoop >= _totalLoop && _totalLoop != 0) {//循环次数用完,终止循环
_loopEnd = YES;
[self stopAnimating];
[self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
return; // stop at last frame
}
}
delay = [image animatedImageDurationAtIndex:nextIndex];
if (_time > delay) _time = delay; // do not jump over frame
}
LOCK(
//从缓存中取img
bufferedImage = buffer[@(nextIndex)];
if (bufferedImage) {
/*帧数超出允许最大的缓存帧数量,就移除下一帧,
删除后 ,遍历到下一帧时, 缓存命中失败 :_bufferMiss = YES;
就不会刷新下一帧 :[self.layer setNeedsDisplay];
**/
if ((int)_incrBufferCount < _totalFrameCount) {
[buffer removeObjectForKey:@(nextIndex)];
}
[self willChangeValueForKey:@"currentAnimatedImageIndex"];
_curIndex = nextIndex;
[self didChangeValueForKey:@"currentAnimatedImageIndex"];
_curFrame = bufferedImage == (id)[NSNull null] ? nil : bufferedImage;
if (_curImageHasContentsRect) {
_curContentsRect = [image animatedImageContentsRectAtIndex:_curIndex];
[self setContentsRect:_curContentsRect forImage:_curFrame];
}
//循环播放 求余
nextIndex = (_curIndex + 1) % _totalFrameCount;
_bufferMiss = NO;
if (buffer.count == _totalFrameCount) {
bufferIsFull = YES;
}
} else {
_bufferMiss = YES;
}
)
/*如果缓存命中,刷新layer,重新绘制
**/
if (!_bufferMiss) {
[self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
}
if (!bufferIsFull && _requestQueue.operationCount == 0) { // if some work not finished, wait for next opportunity
_YYAnimatedImageViewFetchOperation *operation = [_YYAnimatedImageViewFetchOperation new];
operation.view = self;
operation.nextIndex = nextIndex;
operation.curImage = image;
[_requestQueue addOperation:operation];
}
}
网友评论