美文网首页
YYImage源码浅析

YYImage源码浅析

作者: 好有魔力 | 来源:发表于2019-11-07 13:27 被阅读0次

    简介

    YYImage是与UIImage完全兼容的子类,YYImage扩展了UIImage使其支持WebP,GIF,APNG图片数据的解码.它也支持NSCoding协议来进行归解档.

    YYImage的接口

    @interface YYImage : UIImage <YYAnimatedImage>
    
    //使用图片名称加载图片,此方法不会缓存图片
    + (nullable YYImage *)imageNamed:(NSString *)name;
    //使用图片路径加载图片
    + (nullable YYImage *)imageWithContentsOfFile:(NSString *)path;
    //使用NSData加载图片
    + (nullable YYImage *)imageWithData:(NSData *)data;
    //使用NSData和图片缩放加载图片
    + (nullable YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale;
    //图片的类型
    @property (nonatomic, readonly) YYImageType animatedImageType;
    //原始带动画的图片的数据
    @property (nullable, nonatomic, readonly) NSData *animatedImageData;
    //多帧图片数据所占用的内存,如果不是由多帧图片数据创建则返回0
    @property (nonatomic, readonly) NSUInteger animatedImageMemorySize;
    //是否预加载所有的帧
    @property (nonatomic) BOOL preloadAllAnimatedImageFrames;
    @end
    

    创建方法

    - (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
       //数据合理性检查
        if (data.length == 0) return nil;
        if (scale <= 0) scale = [UIScreen mainScreen].scale;
       //加锁
        _preloadedLock = dispatch_semaphore_create(1);
        @autoreleasepool {
            //创建图片解码器
            YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale];
             //解码出图片数据
            YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
            UIImage *image = frame.image;
            if (!image) return nil;
            // 实例化自身
            self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation];
            if (!self) return nil;
            //存储图片类型信息
            _animatedImageType = decoder.type;
            if (decoder.frameCount > 1) {
                //当有多帧时复用解码器
                _decoder = decoder;
                 //每帧大小
                _bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage);
                 //多帧图片的总大小
                _animatedImageMemorySize = _bytesPerFrame * decoder.frameCount;
            }
           //标记解码成功
            self.isDecodedForDisplay = YES;
        }
        return self;
    }
    

    从源码可知,YYImage主要调用YYImageDecoder来获取图片相关信息.

    YYImageDecoder

    YYImageDecoder是线程安全的图片解码器,支持的格式有PNG, JPG, JP2, BMP, TIFF, PIC, ICNSICO.可以用来解码完整的图片数据也可以用来增量地解码数据.
    YYImageDecoder接口:

    @interface YYImageDecoder : NSObject
    //图片二进制数据
    @property (nullable, nonatomic, readonly) NSData *data;   
    //图片类型.png,jpeg,webP,gif...
    @property (nonatomic, readonly) YYImageType type; 
    //图片缩放比率         
    @property (nonatomic, readonly) CGFloat scale;    
    //图片帧数        
    @property (nonatomic, readonly) NSUInteger frameCount;    
    //图片循环数,0是无限循环
    @property (nonatomic, readonly) NSUInteger loopCount;     
    //图片宽度
    @property (nonatomic, readonly) NSUInteger width;        
    //图片高度 
    @property (nonatomic, readonly) NSUInteger height;        
    //是否解码完成
    @property (nonatomic, readonly, getter=isFinalized) BOOL finalized;
    //使用图片缩放比率初始化
    - (instancetype)initWithScale:(CGFloat)scale;
    //使用累计数据更新图片
    - (BOOL)updateData:(nullable NSData *)data final:(BOOL)final;
    //使用图片数据和缩放比率初始化
    + (nullable instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale;
    //解码一帧图片,decodeForDisplay 为YES时会解码为bitmap以供显示
    - (nullable YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay;
    //获取某一帧的持续时长
    - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index;
    //获取图片的属性
    - (nullable NSDictionary *)imageProperties;
    @end
    

    初始化

    - (instancetype)initWithScale:(CGFloat)scale {
        self = [super init];
        if (scale <= 0) scale = 1;
         //保存缩放比例
        _scale = scale;
        //创建两个锁
        _framesLock = dispatch_semaphore_create(1);
        pthread_mutex_init_recursive(&_lock, true);
        return self;
    }
    
    + (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale {
        if (!data) return nil;
       //调用实例初始化方法
        YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:scale];
       //添加待解码数据
        [decoder updateData:data final:YES];
        if (decoder.frameCount == 0) return nil;
        return decoder;
    }
    

    添加数据及解码

    //更新待解码数据
    - (BOOL)updateData:(NSData *)data final:(BOOL)final {
        BOOL result = NO;
       //加锁
        pthread_mutex_lock(&_lock);
       //调用私有实现方法
        result = [self _updateData:data final:final];
        pthread_mutex_unlock(&_lock);
        return result;
    }
    
    //私有更新待解码数据
    - (BOOL)_updateData:(NSData *)data final:(BOOL)final {
       //如果已经结束则直接返回
        if (_finalized) return NO;
        //不是增量解码,直接返回
        if (data.length < _data.length) return NO;
        _finalized = final;
        _data = data;
        //获取图片类型
        YYImageType type = YYImageDetectType((__bridge CFDataRef)data);
       //如果已经解析过图片类型
        if (_sourceTypeDetected) {
            if (_type != type) {
               //与之前解析出来图片类型不一致,直接返回失败
                return NO;
            } else {
               //调用私有更新资源方法
                [self _updateSource];
            }
        } else {
           //图片数据长度大于16的情况下才能解析出图片类型
            if (_data.length > 16) {
                _type = type;
               //已经解析过图片类型
                _sourceTypeDetected = YES;
                //调用私有更新资源方法
                [self _updateSource];
            }
        }
        return YES;
    }
    
    //私有更新资源方法
    - (void)_updateSource {
       //解析不同的图片类型,调用私有特定类型的更新数据方法
        switch (_type) {
            case YYImageTypeWebP: {
               //更新WebP资源
                [self _updateSourceWebP];
            } break;
                
            case YYImageTypePNG: {
                //更新PNG资源
                [self _updateSourceAPNG];
            } break;
                
            default: {
                //更新其他类型
                [self _updateSourceImageIO];
            } break;
        }
    }
    //更新WebP资源
    - (void)_updateSourceWebP {
    #if YYIMAGE_WEBP_ENABLED
       //初始化数据
        _width = 0;
        _height = 0;
        _loopCount = 0;
        //清除之前解码过得webp数据
        if (_webpSource) WebPDemuxDelete(_webpSource);
        _webpSource = NULL;
       
        //清除解码过的帧
        dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
        _frames = nil;
        dispatch_semaphore_signal(_framesLock);
        
       // 以WebP开头的都是goole WebP.framework中的数据结构和方法
       //构造WebP类型数据
        WebPData webPData = {0};
        webPData.bytes = _data.bytes;
        webPData.size = _data.length;
        //解析WebP数据解码器
        WebPDemuxer *demuxer = WebPDemux(&webPData);
        //解析失败直接return
        if (!demuxer) return;
        //获取帧数信息
        uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
       //获取循环次数信息
        uint32_t webpLoopCount =  WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
       //获取图片宽度
        uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
       //获取图片高度
        uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
        //图片信息合理性判断
        if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) {
           //释放内存
            WebPDemuxDelete(demuxer);
            return;
        }
        //创建帧数组
        NSMutableArray *frames = [NSMutableArray new];
        BOOL needBlend = NO;
        uint32_t iterIndex = 0;
        uint32_t lastBlendIndex = 0;
        //创建迭代器
        WebPIterator iter = {0};
        //获取第一帧成功 
        if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index...
            do {
                //创建YY帧对象
                _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
               //添加到帧数组中
                [frames addObject:frame];
                //设置帧的绘制方式
                if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
                    frame.dispose = YYImageDisposeBackground;
                }
                //设置混合模式
                if (iter.blend_method == WEBP_MUX_BLEND) {
                    frame.blend = YYImageBlendOver;
                }
                //获取画布宽度
                int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
                //获取画布高度
                int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
               //把获取到的帧数据赋值给_YYImageDecoderFrame
                frame.index = iterIndex;
                frame.duration = iter.duration / 1000.0;
                frame.width = iter.width;
                frame.height = iter.height;
                frame.hasAlpha = iter.has_alpha;
                frame.blend = iter.blend_method == WEBP_MUX_BLEND;
                frame.offsetX = iter.x_offset;
                frame.offsetY = canvasHeight - iter.y_offset - iter.height;
                
                BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
                //判断偏移
                BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
                //判断是否尺寸读取完毕
                frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
                
                if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
                    frame.blendFromIndex = lastBlendIndex = iterIndex;
                } else {
                    if (frame.dispose && frame.isFullSize) {
                        //设置要混合的帧的索引
                        frame.blendFromIndex = lastBlendIndex;
                        lastBlendIndex = iterIndex + 1;
                    } else {
                        frame.blendFromIndex = lastBlendIndex;
                    }
                }
                if (frame.index != frame.blendFromIndex) needBlend = YES;
                iterIndex++;
            } while (WebPDemuxNextFrame(&iter));
           //帧获取完毕释放迭代器
            WebPDemuxReleaseIterator(&iter);
        }
        
        if (frames.count != webpFrameCount) {
            //帧数解析不一只则直接返回
            WebPDemuxDelete(demuxer);
            return;
        }
        //更新数据
        _width = canvasWidth;
        _height = canvasHeight;
        _frameCount = frames.count;
        _loopCount = webpLoopCount;
        _needBlend = needBlend;
        //保存WebP解码器
        _webpSource = demuxer;
        //加锁
        dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
         //引用解码出的所有帧数据
        _frames = frames;
        dispatch_semaphore_signal(_framesLock);
    #else
        static const char *func = __FUNCTION__;
        static const int line = __LINE__;
        static dispatch_once_t onceToken;
        dispatch_once(&onceToken, ^{
            NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", func, line);
        });
    #endif
    }
    //解析支持动画的APNG图片
    - (void)_updateSourceAPNG {
       //释放之前的png数据
        yy_png_info_release(_apngSource);
        _apngSource = nil;
        //解码第一帧
        [self _updateSourceImageIO]; 
        //解码失败返回
        if (_frameCount == 0) return; 
        if (!_finalized) return; // ignore multi-frame before finalized
        //解码PNG数据
        yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length);
        if (!apng) return; // apng decode failed
        if (apng->apng_frame_num == 0 ||
            (apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) {
            yy_png_info_release(apng);
           //没有动画直接返回
            return;
        }
    
        if (_source) { // apng decode succeed, no longer need image souce
            CFRelease(_source);
            _source = NULL;
        }
        //获取画布宽度
        uint32_t canvasWidth = apng->header.width;
        //获取画布高度
        uint32_t canvasHeight = apng->header.height;
        //创建帧数组
        NSMutableArray *frames = [NSMutableArray new];
        BOOL needBlend = NO;
        uint32_t lastBlendIndex = 0;
       //遍历APNG的每一帧
        for (uint32_t i = 0; i < apng->apng_frame_num; i++) {
            //创建_YYImageDecoderFrame对象并添加到帧数组
            _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
            [frames addObject:frame];
            //取出一帧数据
            yy_png_frame_info *fi = apng->apng_frames + i;
           //为_YYImageDecoderFrame 对象设置信息
            frame.index = i;
            frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den);
            frame.hasAlpha = YES;
            frame.width = fi->frame_control.width;
            frame.height = fi->frame_control.height;
            frame.offsetX = fi->frame_control.x_offset;
            frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height;
            
            BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight);
            BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0);
            frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
            //设置帧的绘制方式
            switch (fi->frame_control.dispose_op) {
                case YY_PNG_DISPOSE_OP_BACKGROUND: {
                    //绘制下一帧前,清空画布
                    frame.dispose = YYImageDisposeBackground;
                } break;
                case YY_PNG_DISPOSE_OP_PREVIOUS: {  
                    //绘制下一帧前,保存画布之前的状态
                    frame.dispose = YYImageDisposePrevious;
                } break;
                default: {
                   //绘制下一帧前,does nothing
                    frame.dispose = YYImageDisposeNone;
                } break;
            }
           //设置混合方式
            switch (fi->frame_control.blend_op) {
                case YY_PNG_BLEND_OP_OVER: {
                    //基于alpha混合
                    frame.blend = YYImageBlendOver;
                } break;
                    
                default: {
                   //覆盖
                    frame.blend = YYImageBlendNone;
                } break;
            }
            
            if (frame.blend == YYImageBlendNone && frame.isFullSize) {
                frame.blendFromIndex  = i;
                if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i;
            } else {
              //设置要混合的帧索引
                if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) {
                    frame.blendFromIndex = lastBlendIndex;
                    lastBlendIndex = i + 1;
                } else {
                    frame.blendFromIndex = lastBlendIndex;
                }
            }
           //是否需要混合
            if (frame.index != frame.blendFromIndex) needBlend = YES;
        }
        
        //保存信息
        _width = canvasWidth;
        _height = canvasHeight;
        _frameCount = frames.count;
        _loopCount = apng->apng_loop_num;
        _needBlend = needBlend;
        _apngSource = apng;
        dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
        _frames = frames;
        dispatch_semaphore_signal(_framesLock);
    }
    //使用ImageIO解码
    - (void)_updateSourceImageIO {
         //清空之前的信息
        _width = 0;
        _height = 0;
        _orientation = UIImageOrientationUp;
        _loopCount = 0;
        dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
        _frames = nil;
        dispatch_semaphore_signal(_framesLock);
        
       //当_sourcesource不存在时创建_source对象
        if (!_source) {
            if (_finalized) {
                //创建CGImageSourceRef 对象
                _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
            } else { 
                 //创建CGImageSourceRef 对象for 增量渲染
                _source = CGImageSourceCreateIncremental(NULL);
                if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
            }
        } else {
            //_source存在直接更新数据
            CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized);
        }
        //_source创建失败直接返回
        if (!_source) return;
        
       //获取图片帧数
        _frameCount = CGImageSourceGetCount(_source);
        if (_frameCount == 0) return;
        
         //处理需要忽略多帧的情况
        if (!_finalized) { // ignore multi-frame before finalized
            _frameCount = 1;
        } else {
            if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame
                _frameCount = 1;
            }
            
            if (_type == YYImageTypeGIF) { // get gif loop count
                CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
                if (properties) {
                   //获取GIF图片信息
                    CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
                    if (gif) {
                        CFTypeRef loop = CFDictionaryGetValue(gif, kCGImagePropertyGIFLoopCount);
                         //获取GIF图片循环次数
                        if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount);
                    }
                   //释放资源
                    CFRelease(properties);
                }
            }
        }
        
        /*
         ICO, GIF, APNG may contains multi-frame.
         */
        //创建帧数组 
        NSMutableArray *frames = [NSMutableArray new];
        //遍历每一帧
        for (NSUInteger i = 0; i < _frameCount; i++) {
            _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
            frame.index = i;
            frame.blendFromIndex = i;
            frame.hasAlpha = YES;
            frame.isFullSize = YES;
            [frames addObject:frame];
             //获取图片信息
            CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
            if (properties) {
                NSTimeInterval duration = 0;
                NSInteger orientationValue = 0, width = 0, height = 0;
                CFTypeRef value = NULL;
                //获取像素宽度
                value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
                if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width);
    
                //获取像素高度
                value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
                if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height);
                 //当图片类型是GIF时
                if (_type == YYImageTypeGIF) {
                    //获取gif图片相关的信息
                    CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
                    if (gif) {
                        // Use the unclamped frame delay if it exists.
                        value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime);
                        if (!value) {
                            // Fall back to the clamped frame delay if the unclamped frame delay does not exist.
                            value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime);
                        }
                        //设置duration
                        if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration);
                    }
                }
                //_YYImageDecoderFrame中设置数据
                frame.width = width;
                frame.height = height;
                frame.duration = duration;
                //第一帧图片时
                if (i == 0 && _width + _height == 0) { // init first frame
                     //保存宽高信息
                    _width = width;
                    _height = height;
                    value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
                    if (value) {
                       //获取图片方向信息
                        CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue);
                        _orientation = YYUIImageOrientationFromEXIFValue(orientationValue);
                    }
                }
               //清除资源
                CFRelease(properties);
            }
        }
       //保存帧数组
        dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
        _frames = frames;
        dispatch_semaphore_signal(_framesLock);
    }
    
    

    获取帧图片

    //获取一帧的图片
     - (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
        YYImageFrame *result = nil;
        pthread_mutex_lock(&_lock);
        //调用私有获取方法
        result = [self _frameAtIndex:index decodeForDisplay:decodeForDisplay];
        pthread_mutex_unlock(&_lock);
        return result;
    }
    //私有获取图片帧方法
    - (YYImageFrame *)_frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
        if (index >= _frames.count) return 0;
         //取出 _YYImageDecoderFrame对象
        _YYImageDecoderFrame *frame = [(_YYImageDecoderFrame *)_frames[index] copy];
        BOOL decoded = NO;
        BOOL extendToCanvas = NO;
       //处理ICO图片
        if (_type != YYImageTypeICO && decodeForDisplay) { // ICO contains multi-size frame and should not extend to canvas.
            extendToCanvas = YES;
        }
        //当不需要混合时
        if (!_needBlend) {
            //获取不需要混合的图片数据,此时还没有解码
            CGImageRef imageRef = [self _newUnblendedImageAtIndex:index extendToCanvas:extendToCanvas decoded:&decoded];
            if (!imageRef) return nil;
            if (decodeForDisplay && !decoded) {
                CGImageRef imageRefDecoded = YYCGImageCreateDecodedCopy(imageRef, YES);
                if (imageRefDecoded) {
                    CFRelease(imageRef);
                    //解码为CGImageRef
                    imageRef = imageRefDecoded;
                    decoded = YES;
                }
            }
            //转换为UIImage
            UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
            CFRelease(imageRef);
            if (!image) return nil;
            image.isDecodedForDisplay = decoded;
            //赋值给_YYImageDecoderFrame 
            frame.image = image;
            return frame;
        }
        
        // 需要混合的情况
       //创建混合上下文
        if (![self _createBlendContextIfNeeded]) return nil;
        CGImageRef imageRef = NULL;
        
        if (_blendFrameIndex + 1 == frame.index) {
            //创建混合过后的CGImage
            imageRef = [self _newBlendedImageWithFrame:frame];
            _blendFrameIndex = index;
        } else { // should draw canvas from previous frame
            _blendFrameIndex = NSNotFound;
            //清空CGContext
            CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
            
            if (frame.blendFromIndex == frame.index) {
                 //创建要混合的前一帧图片
                CGImageRef unblendedImage = [self _newUnblendedImageAtIndex:index extendToCanvas:NO decoded:NULL];
                if (unblendedImage) {
                    //绘制到CGContext
                    CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendedImage);
                    CFRelease(unblendedImage);
                }
                //解码获取图片
                imageRef = CGBitmapContextCreateImage(_blendCanvas);
                //如果需要清空
                if (frame.dispose == YYImageDisposeBackground) {
                    //清空对应位置的CGContext
                    CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
                }
                _blendFrameIndex = index;
            } else { // canvas is not ready
               
                for (uint32_t i = (uint32_t)frame.blendFromIndex; i <= (uint32_t)frame.index; i++) {
                    if (i == frame.index) {
                        if (!imageRef) imageRef = [self _newBlendedImageWithFrame:frame];
                    } else {
                        [self _blendImageWithFrame:_frames[i]];
                    }
                }
                _blendFrameIndex = index;
            }
        }
        
        if (!imageRef) return nil;
      //创建UIImage
        UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
        CFRelease(imageRef);
        if (!image) return nil;
        
        image.isDecodedForDisplay = YES;
        //赋值给帧对象
        frame.image = image;
        //是否和画布对齐
        if (extendToCanvas) {
            frame.width = _width;
            frame.height = _height;
            frame.offsetX = 0;
            frame.offsetY = 0;
            frame.dispose = YYImageDisposeNone;
            frame.blend = YYImageBlendNone;
        }
        return frame;
    }
    
    

    混合相关的方法

    //创建图片混合上下文
    - (BOOL)_createBlendContextIfNeeded {
       if (!_blendCanvas) {
            //清空缓存索引
           _blendFrameIndex = NSNotFound;
           //创建位图上下文
           _blendCanvas = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
       }
      //返回成功与否
       BOOL suc = _blendCanvas != NULL;
       return suc;
    }
    
    //创建混合图片
    - (CGImageRef)_newBlendedImageWithFrame:(_YYImageDecoderFrame *)frame CF_RETURNS_RETAINED{
       CGImageRef imageRef = NULL;
       //如果画布上的帧区域在绘制之前应该保持之前的状态
       if (frame.dispose == YYImageDisposePrevious) {
           //帧是基于其alpha合成到输出缓冲区上
           if (frame.blend == YYImageBlendOver) {
               //获取混合画布上之前的内容
               CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
               //使用当前帧生成一张图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //将当前帧图片绘制在混合画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
               //从混合画布上获取当前图片
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
               //清空画布
               CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
               if (previousImage) {
                   //将之前的状态重新绘制在混合画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
                   CFRelease(previousImage);
               }
           } else { //覆盖当前的区域使用帧的所有颜色分量
               //保存混合画布的当前状态
               CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
               //使用当前帧生成图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //清空画布
                   CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
                   //绘制当前帧
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
               //从混合画布中取出解码后的当前帧
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
               //清空画布
               CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
               if (previousImage) {
                   //在画布上保存之前的状态
                   CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
                   CFRelease(previousImage);
               }
           }
       } else if (frame.dispose == YYImageDisposeBackground) { //画布的帧区域在绘制下一帧之前要被清除为完全透明的黑色
            //帧是基于其alpha合成到输出缓冲区上
           if (frame.blend == YYImageBlendOver) {
               //使用当前帧获取图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //把当前帧图片绘制在画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
               //从画布中取出当前帧
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
               //清空画布
               CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
           } else { //覆盖当前的区域使用帧的所有颜色分量
                //使用当前帧获取图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //清空画布
                   CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
                   //绘制当前帧
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
                //从画布中取出当前帧
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
               //清空画布
               CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
           }
       } else { // no dispose
           //在绘制下一帧前,不会对画布进行任何操作,保持原样
           //基于alpha混合
           if (frame.blend == YYImageBlendOver) {
               //使用当前帧创建图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //直接把当前帧绘制到画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
               //从画布中取出图片
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
           } else { //覆盖当前的区域使用帧的所有颜色分量
               //使用当前帧创建图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //清空画布
                   CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
                   //绘制当前帧
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
               //从当前帧获取图片
               imageRef = CGBitmapContextCreateImage(_blendCanvas);
           }
       }
       return imageRef;
    }
    
    //将帧绘制到画布上并返回混合后的帧
    -  (CGImageRef)_newUnblendedImageAtIndex:(NSUInteger)index
                            extendToCanvas:(BOOL)extendToCanvas
                                   decoded:(BOOL *)decoded CF_RETURNS_RETAINED {
       
       //已经结束直接返回
       if (!_finalized && index > 0) return NULL;
       //帧索引越界直接失败
       if (_frames.count <= index) return NULL;
       
       _YYImageDecoderFrame *frame = _frames[index];
       
       //png,jpeg,ico...
       if (_source) {
           //从CGImageSourceRef 中获取图片
           CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
           //当需要扩展到画布大小时
           if (imageRef && extendToCanvas) {
               size_t width = CGImageGetWidth(imageRef);
               size_t height = CGImageGetHeight(imageRef);
               //与当前大小相等
               if (width == _width && height == _height) {
                   //直接解码图片
                   CGImageRef imageRefExtended = YYCGImageCreateDecodedCopy(imageRef, YES);
                   if (imageRefExtended) {
                       CFRelease(imageRef);
                       imageRef = imageRefExtended;
                       if (decoded) *decoded = YES;
                   }
               } else { //与当前大小不相等
                   //创建画布
                   CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
                   if (context) {
                       //绘制竖直方向上的差异部分
                       CGContextDrawImage(context, CGRectMake(0, _height - height, width, height), imageRef);
                       CGImageRef imageRefExtended = CGBitmapContextCreateImage(context);
                       CFRelease(context);
                       if (imageRefExtended) {
                           CFRelease(imageRef);
                           imageRef = imageRefExtended;
                           if (decoded) *decoded = YES;
                       }
                   }
               }
           }
           return imageRef;
       }
       
       //是图片格式是apng
       if (_apngSource) {
           uint32_t size = 0;
           //获取一帧数据
           uint8_t *bytes = yy_png_copy_frame_data_at_index(_data.bytes, _apngSource, (uint32_t)index, &size);
           if (!bytes) return NULL;
           //把帧数据传递给 CGDataProviderRef
           CGDataProviderRef provider = CGDataProviderCreateWithData(bytes, bytes, size, YYCGDataProviderReleaseDataCallback);
           if (!provider) {
               free(bytes);
               return NULL;
           }
           bytes = NULL; // hold by provider
           //使用CGDataProviderRef 创建 CGImageSourceRef
           CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);
           if (!source) {
               CFRelease(provider);
               return NULL;
           }
           CFRelease(provider);
           //图片帧数小于1,数据错误返回失败
           if(CGImageSourceGetCount(source) < 1) {
               CFRelease(source);
               return NULL;
           }
           //获取 CGImageRef
           CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
           CFRelease(source);
           if (!imageRef) return NULL;
           //需要扩展到画布大小
           if (extendToCanvas) {
               //创建画布
               CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); //bgrA
               if (context) {
                   //解码当前帧
                   CGContextDrawImage(context, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), imageRef);
                   CFRelease(imageRef);
                   //从画布中获取解码过后的图片
                   imageRef = CGBitmapContextCreateImage(context);
                   CFRelease(context);
                   if (decoded) *decoded = YES;
               }
           }
           return imageRef;
       }
       
    #if YYIMAGE_WEBP_ENABLED
       //图片格式是webp
       if (_webpSource) {
           WebPIterator iter;
           //获取webp迭代器
           if (!WebPDemuxGetFrame(_webpSource, (int)(index + 1), &iter)) return NULL; // demux webp frame data
           // frame numbers are one-based in webp -----------^
           
           //获取图片大小
           int frameWidth = iter.width;
           int frameHeight = iter.height;
           if (frameWidth < 1 || frameHeight < 1) return NULL;
           
           int width = extendToCanvas ? (int)_width : frameWidth;
           int height = extendToCanvas ? (int)_height : frameHeight;
           if (width > _width || height > _height) return NULL;
           
           //获取数据内存地址
           const uint8_t *payload = iter.fragment.bytes;
           //获取数据大小
           size_t payloadSize = iter.fragment.size;
           
           //创建WebP解码器配置
           WebPDecoderConfig config;
           if (!WebPInitDecoderConfig(&config)) {
               //配置获取失败释放迭代器
               WebPDemuxReleaseIterator(&iter);
               return NULL;
           }
           //验证webp帧数据
           if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) {
               WebPDemuxReleaseIterator(&iter);
               return NULL;
           }
           
           size_t bitsPerComponent = 8;
           size_t bitsPerPixel = 32;
           //字节对齐,最近的大于第一个参数的32的整数倍
           size_t bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * width, 32);
           //图片数据大小
           size_t length = bytesPerRow * height;
           CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; //bgrA
           //分配图片内存
           void *pixels = calloc(1, length);
           if (!pixels) {
               WebPDemuxReleaseIterator(&iter);
               return NULL;
           }
           //设置webp颜色空间为RGBA
           config.output.colorspace = MODE_bgrA;
           //使用外部内存
           config.output.is_external_memory = 1;
           //关联图片内存
           config.output.u.RGBA.rgba = pixels;
           //设置bytesPerRow
           config.output.u.RGBA.stride = (int)bytesPerRow;
           //设置图片大小
           config.output.u.RGBA.size = length;
           //解码webp数据
           VP8StatusCode result = WebPDecode(payload, payloadSize, &config); // decode
           //解码不成功释放资源返回失败
           if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) {
               WebPDemuxReleaseIterator(&iter);
               free(pixels);
               return NULL;
           }
           WebPDemuxReleaseIterator(&iter);
           
           //是否需要扩展到画布大小
           if (extendToCanvas && (iter.x_offset != 0 || iter.y_offset != 0)) {
               void *tmp = calloc(1, length);
               if (tmp) {
                    //构造vImage_Buffer
                   vImage_Buffer src = {pixels, height, width, bytesPerRow};
                   //buffer 2
                   vImage_Buffer dest = {tmp, height, width, bytesPerRow};
                   //构造矩阵
                   vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
                   uint8_t backColor[4] = {0};
                   vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
                   if (error == kvImageNoError) {
                        //将变换后的矩阵拷贝到pixels中
                       memcpy(pixels, tmp, length);
                   }
                   free(tmp);
               }
           }
           
           //创建CGDataProvider
           CGDataProviderRef provider = CGDataProviderCreateWithData(pixels, pixels, length, YYCGDataProviderReleaseDataCallback);
           if (!provider) {
               free(pixels);
               return NULL;
           }
           pixels = NULL; // hold by provider
            //创建CGImageRef
           CGImageRef image = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
           CFRelease(provider);
           if (decoded) *decoded = YES;
           return image;
       }
    #endif
       
       return NULL;
    }
    
    
    //根据dispose将帧绘制到混合画布上
    - (void)_blendImageWithFrame:(_YYImageDecoderFrame *)frame {
       //如果混合之前保持之前的状态则什么也不用做
       if (frame.dispose == YYImageDisposePrevious) {
           // nothing
       } else if (frame.dispose == YYImageDisposeBackground) {
            //如果在绘制之前要要清空,则清空混合画布
           CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
       } else { // no dispose
           //YYImageDisposeNone 解析混合模式
           if (frame.blend == YYImageBlendOver) { //基于alpha 混合
               //获取一帧图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //获取到画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
           } else {
               //清空画布
               CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
                //获取一帧图片
               CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
               if (unblendImage) {
                   //绘制到画布上
                   CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
                   CFRelease(unblendImage);
               }
           }
       }
    }
    

    相关文章

      网友评论

          本文标题:YYImage源码浅析

          本文链接:https://www.haomeiwen.com/subject/cmzebctx.html