Add a SDWebImageCoderDecodeScaleFactor option to specify scale factor using for decoder. Since the scale information is not stored into image data itself, but generated from image file name & metadata.

This commit is contained in:
DreamPiggy 2018-04-07 23:21:16 +08:00
parent 01e75cfa7c
commit fea3a56d71
6 changed files with 89 additions and 37 deletions

View File

@ -95,11 +95,15 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
return nil;
}
size_t count = CGImageSourceGetCount(source);
CGFloat scale = 1.0;
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
scale = MAX(1.0, scale);
}
UIImage *animatedImage;
if (count <= 1) {
animatedImage = [[UIImage alloc] initWithData:data];
animatedImage = [[UIImage alloc] initWithData:data scale:scale];
} else {
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
@ -110,7 +114,7 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
}
float duration = [self sd_frameDurationAtIndex:i source:source];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
CGImageRelease(imageRef);
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];

View File

@ -17,6 +17,10 @@ typedef NSDictionary<SDWebImageCoderOption, id> SDWebImageCoderOptions;
A Boolean value indicating whether to decode the first frame only for animated image during decoding. (NSNumber)
*/
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeFirstFrameOnly;
/**
A CGFloat value which is greater than or equal to 1.0. This value specify the image scale factor for decoding. If not provide, use 1.0. (NSNumber)
*/
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeScaleFactor;
/**
A double value between 0.0-1.0 indicating the encode compression quality to produce the image data. If not provide, use 1.0. (NSNumber)
*/

View File

@ -9,4 +9,5 @@
#import "SDWebImageCoder.h"
SDWebImageCoderOption const SDWebImageCoderDecodeFirstFrameOnly = @"decodeFirstFrameOnly";
SDWebImageCoderOption const SDWebImageCoderDecodeScaleFactor = @"decodeScaleFactor";
SDWebImageCoderOption const SDWebImageCoderEncodeCompressionQuality = @"encodeCompressionQuality";

View File

@ -85,7 +85,11 @@
return nil;
}
size_t count = CGImageSourceGetCount(source);
CGFloat scale = 1.0;
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
scale = MAX(1.0, scale);
}
UIImage *animatedImage;
BOOL decodeFirstFrame = [options[SDWebImageCoderDecodeFirstFrameOnly] boolValue];
@ -101,7 +105,7 @@
}
float duration = [self sd_frameDurationAtIndex:i source:source];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
CGImageRelease(imageRef);
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];

View File

@ -82,11 +82,16 @@
return nil;
}
UIImage *image = [[UIImage alloc] initWithData:data];
#if SD_MAC
UIImage *image = [[UIImage alloc] initWithData:data];
return image;
#else
CGFloat scale = 1.0;
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
scale = MAX(1.0, scale);
}
UIImage *image = [[UIImage alloc] initWithData:data scale:scale];
if (!image) {
return nil;
}

View File

@ -114,9 +114,25 @@ dispatch_semaphore_signal(self->_lock);
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
BOOL hasAnimation = flags & ANIMATION_FLAG;
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
#if SD_UIKIT || SD_WATCH
CGFloat scale = 1.0;
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
scale = MAX(1.0, scale);
}
#endif
if (!hasAnimation) {
// for static single webp image
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData];
CGImageRef imageRef = [self sd_createWebpImageWithData:webpData];
if (!imageRef) {
return nil;
}
#if SD_UIKIT || SD_WATCH
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
WebPDemuxDelete(demuxer);
return staticImage;
}
@ -132,7 +148,13 @@ dispatch_semaphore_signal(self->_lock);
if (decodeFirstFrame) {
// first frame for animated webp image
UIImage *firstFrameImage = [self sd_rawWebpImageWithData:iter.fragment];
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
#if SD_UIKIT || SD_WATCH
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
return firstFrameImage;
@ -154,10 +176,16 @@ dispatch_semaphore_signal(self->_lock);
do {
@autoreleasepool {
UIImage *image = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter];
if (!image) {
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter];
if (!imageRef) {
continue;
}
#if SD_UIKIT || SD_WATCH
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
NSTimeInterval duration = [self sd_frameDurationWithIterator:iter];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
@ -271,8 +299,8 @@ dispatch_semaphore_signal(self->_lock);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
} else {
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
if (!image) {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
if (!imageRef) {
return;
}
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
@ -280,13 +308,14 @@ dispatch_semaphore_signal(self->_lock);
if (!shouldBlend) {
CGContextClearRect(canvas, imageRect);
}
CGContextDrawImage(canvas, imageRect, image.CGImage);
CGContextDrawImage(canvas, imageRect, imageRef);
CGImageRelease(imageRef);
}
}
- (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
if (!image) {
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
if (!imageRef) {
return nil;
}
@ -300,25 +329,19 @@ dispatch_semaphore_signal(self->_lock);
if (!shouldBlend) {
CGContextClearRect(canvas, imageRect);
}
CGContextDrawImage(canvas, imageRect, image.CGImage);
CGContextDrawImage(canvas, imageRect, imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:newImageRef];
#elif SD_MAC
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#endif
CGImageRelease(newImageRef);
CGImageRelease(imageRef);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
}
return image;
return newImageRef;
}
- (nullable UIImage *)sd_rawWebpImageWithData:(WebPData)webpData {
- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData CF_RETURNS_RETAINED {
WebPDecoderConfig config;
if (!WebPInitDecoderConfig(&config)) {
return nil;
@ -361,14 +384,7 @@ dispatch_semaphore_signal(self->_lock);
CGDataProviderRelease(provider);
#if SD_UIKIT || SD_WATCH
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
return image;
return imageRef;
}
- (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
@ -639,7 +655,16 @@ static void FreeImageData(void *info, const void *data, size_t size) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
if (!imageRef) {
return nil;
}
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:imageRef];
#else
image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
} else {
// Else, this can happen when one image set to different imageViews or one loop end. So we should clear the shared cavans.
if (_currentBlendIndex != NSNotFound) {
@ -660,7 +685,16 @@ static void FreeImageData(void *info, const void *data, size_t size) {
if ((size_t)iter.frame_num == endIndex) {
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter];
} else {
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
if (!imageRef) {
return nil;
}
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:imageRef];
#else
image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
}
}
} while ((size_t)iter.frame_num < (endIndex + 1) && WebPDemuxNextFrame(&iter));