Merge pull request #2266 from dreampiggy/feature_scale_factor
Feature scale factor
This commit is contained in:
commit
02dfed984e
|
@ -8,16 +8,50 @@
|
|||
|
||||
#import "SDWebImageCompat.h"
|
||||
|
||||
// This category is provided to easily write cross-platform code. For common usage, see `UIImage+WebCache`.
|
||||
// This category is provided to easily write cross-platform(AppKit/UIKit) code. For common usage, see `UIImage+WebCache`.
|
||||
|
||||
#if SD_MAC
|
||||
|
||||
@interface NSImage (Additions)
|
||||
|
||||
/**
|
||||
The underlying Core Graphics image object. This will actually `CGImageForProposedRect` with the image size.
|
||||
*/
|
||||
@property (nonatomic, readonly, nullable) CGImageRef CGImage;
|
||||
@property (nonatomic, readonly, nullable) NSArray<NSImage *> *images;
|
||||
/**
|
||||
The scale factor of the image. This wil actually use image size, and its `CGImage`'s pixel size to calculate the scale factor. Should be greater than or equal to 1.0.
|
||||
*/
|
||||
@property (nonatomic, readonly) CGFloat scale;
|
||||
@property (nonatomic, readonly, nullable) NSBitmapImageRep *bitmapImageRep;
|
||||
|
||||
// These are convenience methods to make AppKit's `NSImage` match UIKit's `UIImage` behavior. The scale factor should be greater than or equal to 1.0.
|
||||
|
||||
/**
|
||||
Returns an image object with the scale factor. The representation is created from the Core Graphics image object.
|
||||
@note The difference between this and `initWithCGImage:size` is that `initWithCGImage:size` will create a `NSCGImageSnapshotRep` but not `NSBitmapImageRep` instance. And it will always `backingScaleFactor` as scale factor.
|
||||
|
||||
@param cgImage A Core Graphics image object
|
||||
@param scale The image scale factor
|
||||
@return The image object
|
||||
*/
|
||||
- (nonnull instancetype)initWithCGImage:(nonnull CGImageRef)cgImage scale:(CGFloat)scale;
|
||||
|
||||
/**
|
||||
Returns an image object with the scale factor. The representation is created from the image data.
|
||||
@note The difference between these this and `initWithData:` is that `initWithData:` will always `backingScaleFactor` as scale factor.
|
||||
|
||||
@param data The image data
|
||||
@param scale The image scale factor
|
||||
@return The image object
|
||||
*/
|
||||
- (nullable instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale;
|
||||
|
||||
@end
|
||||
|
||||
@interface NSBitmapImageRep (Additions)
|
||||
|
||||
// These method's function is the same as `NSImage`'s function. For `NSBitmapImageRep`.
|
||||
- (nonnull instancetype)initWithCGImage:(nonnull CGImageRef)cgImage scale:(CGFloat)scale;
|
||||
- (nullable instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -18,28 +18,72 @@
|
|||
return cgImage;
|
||||
}
|
||||
|
||||
- (NSArray<NSImage *> *)images {
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (CGFloat)scale {
|
||||
CGFloat scale = 1;
|
||||
CGFloat width = self.size.width;
|
||||
if (width > 0) {
|
||||
// Use CGImage to get pixel width, NSImageRep.pixelsWide always double on Retina screen
|
||||
// Use CGImage to get pixel width, NSImageRep.pixelsWide may be double on Retina screen
|
||||
NSUInteger pixelWidth = CGImageGetWidth(self.CGImage);
|
||||
scale = pixelWidth / width;
|
||||
}
|
||||
return scale;
|
||||
}
|
||||
|
||||
- (NSBitmapImageRep *)bitmapImageRep {
|
||||
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
|
||||
NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
return (NSBitmapImageRep *)imageRep;
|
||||
- (instancetype)initWithCGImage:(CGImageRef)cgImage scale:(CGFloat)scale {
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
return nil;
|
||||
NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage scale:scale];
|
||||
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
|
||||
self = [self initWithSize:size];
|
||||
if (self) {
|
||||
[self addRepresentation:imageRep];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithData:data scale:scale];
|
||||
if (!imageRep) {
|
||||
return nil;
|
||||
}
|
||||
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
|
||||
self = [self initWithSize:size];
|
||||
if (self) {
|
||||
[self addRepresentation:imageRep];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation NSBitmapImageRep (Additions)
|
||||
|
||||
- (instancetype)initWithCGImage:(CGImageRef)cgImage scale:(CGFloat)scale {
|
||||
self = [self initWithCGImage:cgImage];
|
||||
if (self) {
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
NSSize size = NSMakeSize(self.pixelsWide / scale, self.pixelsHigh / scale);
|
||||
self.size = size;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
|
||||
self = [self initWithData:data];
|
||||
if (self) {
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
NSSize size = NSMakeSize(self.pixelsWide / scale, self.pixelsHigh / scale);
|
||||
self.size = size;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
|
@ -310,7 +310,7 @@ static NSArray *SDBundlePreferredScales() {
|
|||
return nil;
|
||||
}
|
||||
#if SD_MAC
|
||||
self = [super initWithCGImage:image.CGImage size:NSZeroSize];
|
||||
self = [super initWithCGImage:image.CGImage scale:scale];
|
||||
#else
|
||||
self = [super initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
|
||||
#endif
|
||||
|
@ -353,7 +353,10 @@ static NSArray *SDBundlePreferredScales() {
|
|||
NSNumber *scale = [aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(scale))];
|
||||
NSData *animatedImageData = [aDecoder decodeObjectOfClass:[NSData class] forKey:NSStringFromSelector(@selector(animatedImageData))];
|
||||
if (animatedImageData) {
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wobjc-designated-initializers"
|
||||
return [self initWithData:animatedImageData scale:scale.doubleValue];
|
||||
#pragma clang diagnostic pop
|
||||
} else {
|
||||
return [super initWithCoder:aDecoder];
|
||||
}
|
||||
|
|
|
@ -485,12 +485,13 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
|
|||
if (data) {
|
||||
UIImage *image;
|
||||
BOOL decodeFirstFrame = options & SDImageCacheDecodeFirstFrameOnly;
|
||||
NSNumber *scaleValue = [context valueForKey:SDWebImageContextImageScaleFactor];
|
||||
CGFloat scale = scaleValue.doubleValue >= 1 ? scaleValue.doubleValue : SDImageScaleFactorForKey(key);
|
||||
if (!decodeFirstFrame) {
|
||||
// check whether we should use `SDAnimatedImage`
|
||||
if ([context valueForKey:SDWebImageContextAnimatedImageClass]) {
|
||||
Class animatedImageClass = [context valueForKey:SDWebImageContextAnimatedImageClass];
|
||||
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)]) {
|
||||
CGFloat scale = SDImageScaleForKey(key);
|
||||
image = [[animatedImageClass alloc] initWithData:data scale:scale];
|
||||
if (options & SDImageCachePreloadAllFrames && [image respondsToSelector:@selector(preloadAllFrames)]) {
|
||||
[((id<SDAnimatedImage>)image) preloadAllFrames];
|
||||
|
@ -499,8 +500,7 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
|
|||
}
|
||||
}
|
||||
if (!image) {
|
||||
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
|
||||
image = [self scaledImageForKey:key image:image];
|
||||
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame), SDWebImageCoderDecodeScaleFactor : @(scale)}];
|
||||
}
|
||||
BOOL shouldDecode = YES;
|
||||
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
|
||||
|
@ -521,10 +521,6 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
|
|||
}
|
||||
}
|
||||
|
||||
- (nullable UIImage *)scaledImageForKey:(nullable NSString *)key image:(nullable UIImage *)image {
|
||||
return SDScaledImageForKey(key, image);
|
||||
}
|
||||
|
||||
- (nullable NSOperation *)queryCacheOperationForKey:(NSString *)key done:(SDCacheQueryCompletedBlock)doneBlock {
|
||||
return [self queryCacheOperationForKey:key options:0 done:doneBlock];
|
||||
}
|
||||
|
|
|
@ -82,10 +82,19 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
|
|||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
|
||||
#if SD_MAC
|
||||
SDAnimatedImageRep *imageRep = [[SDAnimatedImageRep alloc] initWithData:data];
|
||||
NSImage *animatedImage = [[NSImage alloc] initWithSize:imageRep.size];
|
||||
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
|
||||
imageRep.size = size;
|
||||
NSImage *animatedImage = [[NSImage alloc] initWithSize:size];
|
||||
[animatedImage addRepresentation:imageRep];
|
||||
return animatedImage;
|
||||
#else
|
||||
|
@ -95,11 +104,10 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
|
|||
return nil;
|
||||
}
|
||||
size_t count = CGImageSourceGetCount(source);
|
||||
|
||||
UIImage *animatedImage;
|
||||
|
||||
if (count <= 1) {
|
||||
animatedImage = [[UIImage alloc] initWithData:data];
|
||||
animatedImage = [[UIImage alloc] initWithData:data scale:scale];
|
||||
} else {
|
||||
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
|
||||
|
||||
|
@ -110,7 +118,7 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
|
|||
}
|
||||
|
||||
float duration = [self sd_frameDurationAtIndex:i source:source];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
|
||||
|
@ -271,10 +279,17 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
|
|||
CGImageRef partialImageRef = CGImageSourceCreateImageAtIndex(_imageSource, 0, NULL);
|
||||
|
||||
if (partialImageRef) {
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#elif SD_MAC
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef size:NSZeroSize];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(partialImageRef);
|
||||
}
|
||||
|
@ -369,7 +384,7 @@ const CFStringRef kCGImagePropertyAPNGUnclampedDelayTime = (__bridge CFStringRef
|
|||
CGImageRelease(imageRef);
|
||||
}
|
||||
#if SD_MAC
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef scale:1];
|
||||
#else
|
||||
UIImage *image = [UIImage imageWithCGImage:newImageRef];
|
||||
#endif
|
||||
|
|
|
@ -17,6 +17,10 @@ typedef NSDictionary<SDWebImageCoderOption, id> SDWebImageCoderOptions;
|
|||
A Boolean value indicating whether to decode the first frame only for animated image during decoding. (NSNumber)
|
||||
*/
|
||||
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeFirstFrameOnly;
|
||||
/**
|
||||
A CGFloat value which is greater than or equal to 1.0. This value specify the image scale factor for decoding. If not provide, use 1.0. (NSNumber)
|
||||
*/
|
||||
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeScaleFactor;
|
||||
/**
|
||||
A double value between 0.0-1.0 indicating the encode compression quality to produce the image data. If not provide, use 1.0. (NSNumber)
|
||||
*/
|
||||
|
|
|
@ -9,4 +9,5 @@
|
|||
#import "SDWebImageCoder.h"
|
||||
|
||||
SDWebImageCoderOption const SDWebImageCoderDecodeFirstFrameOnly = @"decodeFirstFrameOnly";
|
||||
SDWebImageCoderOption const SDWebImageCoderDecodeScaleFactor = @"decodeScaleFactor";
|
||||
SDWebImageCoderOption const SDWebImageCoderEncodeCompressionQuality = @"encodeCompressionQuality";
|
||||
|
|
|
@ -103,8 +103,14 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
|
|||
return nil;
|
||||
}
|
||||
CFRelease(imageDestination);
|
||||
CGFloat scale = frames.firstObject.image.scale;
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
SDAnimatedImageRep *imageRep = [[SDAnimatedImageRep alloc] initWithData:imageData];
|
||||
animatedImage = [[NSImage alloc] initWithSize:imageRep.size];
|
||||
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
|
||||
imageRep.size = size;
|
||||
animatedImage = [[NSImage alloc] initWithSize:size];
|
||||
[animatedImage addRepresentation:imageRep];
|
||||
#endif
|
||||
|
||||
|
@ -157,27 +163,27 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
|
|||
|
||||
#else
|
||||
|
||||
NSBitmapImageRep *bitmapRep;
|
||||
for (NSImageRep *imageRep in animatedImage.representations) {
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapRep = (NSBitmapImageRep *)imageRep;
|
||||
break;
|
||||
}
|
||||
NSRect imageRect = NSMakeRect(0, 0, animatedImage.size.width, animatedImage.size.height);
|
||||
NSImageRep *imageRep = [animatedImage bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
NSBitmapImageRep *bitmapImageRep;
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapImageRep = (NSBitmapImageRep *)imageRep;
|
||||
}
|
||||
if (bitmapRep) {
|
||||
frameCount = [[bitmapRep valueForProperty:NSImageFrameCount] unsignedIntegerValue];
|
||||
if (!bitmapImageRep) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
frameCount = [[bitmapImageRep valueForProperty:NSImageFrameCount] unsignedIntegerValue];
|
||||
if (frameCount == 0) {
|
||||
return nil;
|
||||
}
|
||||
CGFloat scale = animatedImage.scale;
|
||||
|
||||
for (size_t i = 0; i < frameCount; i++) {
|
||||
@autoreleasepool {
|
||||
// NSBitmapImageRep need to manually change frame. "Good taste" API
|
||||
[bitmapRep setProperty:NSImageCurrentFrame withValue:@(i)];
|
||||
float frameDuration = [[bitmapRep valueForProperty:NSImageCurrentFrameDuration] floatValue];
|
||||
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapRep.CGImage size:NSZeroSize];
|
||||
[bitmapImageRep setProperty:NSImageCurrentFrame withValue:@(i)];
|
||||
float frameDuration = [[bitmapImageRep valueForProperty:NSImageCurrentFrameDuration] floatValue];
|
||||
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapImageRep.CGImage scale:scale];
|
||||
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:frameImage duration:frameDuration];
|
||||
[frames addObject:frame];
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#endif
|
||||
#import "NSImage+Additions.h"
|
||||
#import "UIImage+WebCache.h"
|
||||
#import "SDWebImageDefine.h"
|
||||
|
||||
@interface SDWebImageCodersManager ()
|
||||
|
||||
|
@ -99,7 +100,6 @@
|
|||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
|
||||
UIImage *image;
|
||||
for (id<SDWebImageCoder> coder in self.coders) {
|
||||
if ([coder canDecodeFromData:data]) {
|
||||
|
@ -107,9 +107,6 @@
|
|||
break;
|
||||
}
|
||||
}
|
||||
if (decodeFirstFrame && image.images.count > 0) {
|
||||
image = image.images.firstObject;
|
||||
}
|
||||
|
||||
return image;
|
||||
}
|
||||
|
|
|
@ -16,16 +16,19 @@ typedef NSMutableDictionary<SDWebImageContextOption, id> SDWebImageMutableContex
|
|||
#pragma mark - Image scale
|
||||
|
||||
/**
|
||||
Return the image scale from the specify key, supports file name and url key
|
||||
Return the image scale factor for the specify key, supports file name and url key.
|
||||
This is the built-in way to check the scale factor when we have no context about it. Because scale factor is not stored in image data (It's typically from filename).
|
||||
However, you can also provide custom scale factor as well, see `SDWebImageContextImageScaleFactor`.
|
||||
|
||||
@param key The image cache key
|
||||
@return The scale factor for image
|
||||
*/
|
||||
FOUNDATION_EXPORT CGFloat SDImageScaleForKey(NSString * _Nullable key);
|
||||
FOUNDATION_EXPORT CGFloat SDImageScaleFactorForKey(NSString * _Nullable key);
|
||||
|
||||
/**
|
||||
Scale the image with the scale factor from the specify key. If no need to scale, return the original image
|
||||
This only works for `UIImage`(UIKit) or `NSImage`(AppKit).
|
||||
Scale the image with the scale factor for the specify key. If no need to scale, return the original image.
|
||||
This works for `UIImage`(UIKit) or `NSImage`(AppKit). And this function also preserve the associated value in `UIImage+WebCache`.
|
||||
@note This is actually a convenience function, which firstlly call `SDImageScaleFactorForKey` and then call `SDScaledImageForScaleFactor`, kept for backward compatibility.
|
||||
|
||||
@param key The image cache key
|
||||
@param image The image
|
||||
|
@ -33,6 +36,16 @@ FOUNDATION_EXPORT CGFloat SDImageScaleForKey(NSString * _Nullable key);
|
|||
*/
|
||||
FOUNDATION_EXPORT UIImage * _Nullable SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image);
|
||||
|
||||
/**
|
||||
Scale the image with the scale factor. If no need to scale, return the original image.
|
||||
This works for `UIImage`(UIKit) or `NSImage`(AppKit). And this function also preserve the associated value in `UIImage+WebCache`.
|
||||
|
||||
@param scale The image scale factor
|
||||
@param image The image
|
||||
@return The scaled image
|
||||
*/
|
||||
FOUNDATION_EXPORT UIImage * _Nullable SDScaledImageForScaleFactor(CGFloat scale, UIImage * _Nullable image);
|
||||
|
||||
#pragma mark - WebCache Options
|
||||
|
||||
typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
|
||||
|
@ -176,6 +189,11 @@ FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextCustom
|
|||
*/
|
||||
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextCustomTransformer;
|
||||
|
||||
/**
|
||||
A CGFloat value which specify the image scale factor. The number should be greater than or equal to 1.0. If not provide or the number is invalid, we will use the cache key to specify the scale factor. (NSNumber)
|
||||
*/
|
||||
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageScaleFactor;
|
||||
|
||||
/**
|
||||
A Class object which the instance is a `UIImage/NSImage` subclass and adopt `SDAnimatedImage` protocol. We will call `initWithData:scale:` to create the instance (or `initWithAnimatedCoder:scale` when using progressive download) . If the instance create failed, fallback to normal `UIImage/NSImage`.
|
||||
This can be used to improve animated images rendering performance (especially memory usage on big animated images) with `SDAnimatedImageView` (Class).
|
||||
|
|
|
@ -16,7 +16,7 @@ static inline NSArray<NSNumber *> * _Nonnull SDImageScaleFactors() {
|
|||
return @[@2, @3];
|
||||
}
|
||||
|
||||
inline CGFloat SDImageScaleForKey(NSString * _Nullable key) {
|
||||
inline CGFloat SDImageScaleFactorForKey(NSString * _Nullable key) {
|
||||
CGFloat scale = 1;
|
||||
if (!key) {
|
||||
return scale;
|
||||
|
@ -55,46 +55,63 @@ inline CGFloat SDImageScaleForKey(NSString * _Nullable key) {
|
|||
return scale;
|
||||
}
|
||||
|
||||
inline UIImage *SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image) {
|
||||
inline UIImage * _Nullable SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image) {
|
||||
if (!image) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CGFloat scale = SDImageScaleForKey(key);
|
||||
if (scale > 1) {
|
||||
UIImage *scaledImage;
|
||||
if (image.sd_isAnimated) {
|
||||
UIImage *animatedImage;
|
||||
CGFloat scale = SDImageScaleFactorForKey(key);
|
||||
return SDScaledImageForScaleFactor(scale, image);
|
||||
}
|
||||
|
||||
inline UIImage * _Nullable SDScaledImageForScaleFactor(CGFloat scale, UIImage * _Nullable image) {
|
||||
if (!image) {
|
||||
return nil;
|
||||
}
|
||||
if (scale <= 1) {
|
||||
return image;
|
||||
}
|
||||
if (scale == image.scale) {
|
||||
return image;
|
||||
}
|
||||
UIImage *scaledImage;
|
||||
if (image.sd_isAnimated) {
|
||||
UIImage *animatedImage;
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
// `UIAnimatedImage` images share the same size and scale.
|
||||
NSMutableArray<UIImage *> *scaledImages = [NSMutableArray array];
|
||||
|
||||
for (UIImage *tempImage in image.images) {
|
||||
UIImage *tempScaledImage = [[UIImage alloc] initWithCGImage:tempImage.CGImage scale:scale orientation:tempImage.imageOrientation];
|
||||
[scaledImages addObject:tempScaledImage];
|
||||
}
|
||||
|
||||
animatedImage = [UIImage animatedImageWithImages:scaledImages duration:image.duration];
|
||||
animatedImage.sd_imageLoopCount = image.sd_imageLoopCount;
|
||||
#else
|
||||
// Animated GIF for `NSImage` need to grab `NSBitmapImageRep`
|
||||
NSSize size = NSMakeSize(image.size.width / scale, image.size.height / scale);
|
||||
animatedImage = [[NSImage alloc] initWithSize:size];
|
||||
NSBitmapImageRep *bitmapImageRep = image.bitmapImageRep;
|
||||
[animatedImage addRepresentation:bitmapImageRep];
|
||||
#endif
|
||||
scaledImage = animatedImage;
|
||||
} else {
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
scaledImage = [[UIImage alloc] initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
|
||||
#else
|
||||
scaledImage = [[NSImage alloc] initWithCGImage:image.CGImage size:NSZeroSize];
|
||||
#endif
|
||||
// `UIAnimatedImage` images share the same size and scale.
|
||||
NSMutableArray<UIImage *> *scaledImages = [NSMutableArray array];
|
||||
|
||||
for (UIImage *tempImage in image.images) {
|
||||
UIImage *tempScaledImage = [[UIImage alloc] initWithCGImage:tempImage.CGImage scale:scale orientation:tempImage.imageOrientation];
|
||||
[scaledImages addObject:tempScaledImage];
|
||||
}
|
||||
|
||||
return scaledImage;
|
||||
animatedImage = [UIImage animatedImageWithImages:scaledImages duration:image.duration];
|
||||
animatedImage.sd_imageLoopCount = image.sd_imageLoopCount;
|
||||
#else
|
||||
// Animated GIF for `NSImage` need to grab `NSBitmapImageRep`;
|
||||
NSRect imageRect = NSMakeRect(0, 0, image.size.width, image.size.height);
|
||||
NSImageRep *imageRep = [image bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
NSBitmapImageRep *bitmapImageRep;
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapImageRep = (NSBitmapImageRep *)imageRep;
|
||||
}
|
||||
if (bitmapImageRep) {
|
||||
NSSize size = NSMakeSize(image.size.width / scale, image.size.height / scale);
|
||||
animatedImage = [[NSImage alloc] initWithSize:size];
|
||||
[animatedImage addRepresentation:bitmapImageRep];
|
||||
}
|
||||
#endif
|
||||
scaledImage = animatedImage;
|
||||
} else {
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
scaledImage = [[UIImage alloc] initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
|
||||
#else
|
||||
scaledImage = [[NSImage alloc] initWithCGImage:image.CGImage scale:scale];
|
||||
#endif
|
||||
}
|
||||
return image;
|
||||
scaledImage.sd_isIncremental = image.sd_isIncremental;
|
||||
|
||||
return scaledImage;
|
||||
}
|
||||
|
||||
#pragma mark - Context option
|
||||
|
@ -103,4 +120,5 @@ SDWebImageContextOption const SDWebImageContextSetImageOperationKey = @"setImage
|
|||
SDWebImageContextOption const SDWebImageContextSetImageGroup = @"setImageGroup";
|
||||
SDWebImageContextOption const SDWebImageContextCustomManager = @"customManager";
|
||||
SDWebImageContextOption const SDWebImageContextCustomTransformer = @"customTransformer";
|
||||
SDWebImageContextOption const SDWebImageContextImageScaleFactor = @"imageScaleFactor";
|
||||
SDWebImageContextOption const SDWebImageContextAnimatedImageClass = @"animatedImageClass";
|
||||
|
|
|
@ -363,17 +363,20 @@ didReceiveResponse:(NSURLResponse *)response
|
|||
dispatch_async(self.coderQueue, ^{
|
||||
// check whether we should use `SDAnimatedImage`
|
||||
UIImage *image;
|
||||
if ([self.context valueForKey:SDWebImageContextAnimatedImageClass]) {
|
||||
Class animatedImageClass = [self.context valueForKey:SDWebImageContextAnimatedImageClass];
|
||||
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)] && [self.progressiveCoder conformsToProtocol:@protocol(SDWebImageAnimatedCoder)]) {
|
||||
CGFloat scale = SDImageScaleForKey(self.cacheKey);
|
||||
image = [[animatedImageClass alloc] initWithAnimatedCoder:(id<SDWebImageAnimatedCoder>)self.progressiveCoder scale:scale];
|
||||
BOOL decodeFirstFrame = self.options & SDWebImageDownloaderDecodeFirstFrameOnly;
|
||||
NSNumber *scaleValue = [self.context valueForKey:SDWebImageContextImageScaleFactor];
|
||||
CGFloat scale = scaleValue.doubleValue >= 1 ? scaleValue.doubleValue : SDImageScaleFactorForKey(self.cacheKey);
|
||||
if (!decodeFirstFrame) {
|
||||
// check whether we should use `SDAnimatedImage`
|
||||
if ([self.context valueForKey:SDWebImageContextAnimatedImageClass]) {
|
||||
Class animatedImageClass = [self.context valueForKey:SDWebImageContextAnimatedImageClass];
|
||||
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)] && [self.progressiveCoder conformsToProtocol:@protocol(SDWebImageAnimatedCoder)]) {
|
||||
image = [[animatedImageClass alloc] initWithAnimatedCoder:(id<SDWebImageAnimatedCoder>)self.progressiveCoder scale:scale];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!image) {
|
||||
BOOL decodeFirstFrame = self.options & SDWebImageDownloaderDecodeFirstFrameOnly;
|
||||
image = [self.progressiveCoder incrementalDecodedImageWithOptions:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
|
||||
image = [self scaledImageForKey:self.cacheKey image:image];
|
||||
image = [self.progressiveCoder incrementalDecodedImageWithOptions:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame), SDWebImageCoderDecodeScaleFactor : @(scale)}];
|
||||
}
|
||||
if (image) {
|
||||
BOOL shouldDecode = self.shouldDecompressImages;
|
||||
|
@ -454,13 +457,17 @@ didReceiveResponse:(NSURLResponse *)response
|
|||
// decode the image in coder queue
|
||||
dispatch_async(self.coderQueue, ^{
|
||||
BOOL decodeFirstFrame = self.options & SDWebImageDownloaderDecodeFirstFrameOnly;
|
||||
NSNumber *scaleValue = [self.context valueForKey:SDWebImageContextImageScaleFactor];
|
||||
CGFloat scale = scaleValue.doubleValue >= 1 ? scaleValue.doubleValue : SDImageScaleFactorForKey(self.cacheKey);
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
UIImage *image;
|
||||
if (!decodeFirstFrame) {
|
||||
// check whether we should use `SDAnimatedImage`
|
||||
if ([self.context valueForKey:SDWebImageContextAnimatedImageClass]) {
|
||||
Class animatedImageClass = [self.context valueForKey:SDWebImageContextAnimatedImageClass];
|
||||
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)]) {
|
||||
CGFloat scale = SDImageScaleForKey(self.cacheKey);
|
||||
image = [[animatedImageClass alloc] initWithData:imageData scale:scale];
|
||||
if (self.options & SDWebImageDownloaderPreloadAllFrames && [image respondsToSelector:@selector(preloadAllFrames)]) {
|
||||
[((id<SDAnimatedImage>)image) preloadAllFrames];
|
||||
|
@ -469,8 +476,7 @@ didReceiveResponse:(NSURLResponse *)response
|
|||
}
|
||||
}
|
||||
if (!image) {
|
||||
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
|
||||
image = [self scaledImageForKey:self.cacheKey image:image];
|
||||
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame), SDWebImageCoderDecodeScaleFactor : @(scale)}];
|
||||
}
|
||||
|
||||
BOOL shouldDecode = self.shouldDecompressImages;
|
||||
|
@ -547,10 +553,6 @@ didReceiveResponse:(NSURLResponse *)response
|
|||
return _cacheKey;
|
||||
}
|
||||
|
||||
- (nullable UIImage *)scaledImageForKey:(nullable NSString *)key image:(nullable UIImage *)image {
|
||||
return SDScaledImageForKey(key, image);
|
||||
}
|
||||
|
||||
- (BOOL)shouldContinueWhenAppEntersBackground {
|
||||
return self.options & SDWebImageDownloaderContinueInBackground;
|
||||
}
|
||||
|
|
|
@ -72,10 +72,19 @@
|
|||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
|
||||
#if SD_MAC
|
||||
SDAnimatedImageRep *imageRep = [[SDAnimatedImageRep alloc] initWithData:data];
|
||||
NSImage *animatedImage = [[NSImage alloc] initWithSize:imageRep.size];
|
||||
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
|
||||
imageRep.size = size;
|
||||
NSImage *animatedImage = [[NSImage alloc] initWithSize:size];
|
||||
[animatedImage addRepresentation:imageRep];
|
||||
return animatedImage;
|
||||
#else
|
||||
|
@ -85,7 +94,6 @@
|
|||
return nil;
|
||||
}
|
||||
size_t count = CGImageSourceGetCount(source);
|
||||
|
||||
UIImage *animatedImage;
|
||||
|
||||
BOOL decodeFirstFrame = [options[SDWebImageCoderDecodeFirstFrameOnly] boolValue];
|
||||
|
@ -101,7 +109,7 @@
|
|||
}
|
||||
|
||||
float duration = [self sd_frameDurationAtIndex:i source:source];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
|
||||
|
@ -218,10 +226,17 @@
|
|||
CGImageRef partialImageRef = CGImageSourceCreateImageAtIndex(_imageSource, 0, NULL);
|
||||
|
||||
if (partialImageRef) {
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#elif SD_MAC
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef size:NSZeroSize];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(partialImageRef);
|
||||
}
|
||||
|
@ -369,7 +384,7 @@
|
|||
CGImageRelease(imageRef);
|
||||
}
|
||||
#if SD_MAC
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef scale:1];
|
||||
#else
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef];
|
||||
#endif
|
||||
|
|
|
@ -81,9 +81,15 @@
|
|||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
|
||||
UIImage *image = [[UIImage alloc] initWithData:data];
|
||||
|
||||
UIImage *image = [[UIImage alloc] initWithData:data scale:scale];
|
||||
#if SD_MAC
|
||||
return image;
|
||||
#else
|
||||
|
@ -175,10 +181,17 @@
|
|||
#endif
|
||||
|
||||
if (partialImageRef) {
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:1 orientation:_orientation];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale orientation:_orientation];
|
||||
#elif SD_MAC
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef size:NSZeroSize];
|
||||
image = [[UIImage alloc] initWithCGImage:partialImageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(partialImageRef);
|
||||
}
|
||||
|
|
|
@ -261,7 +261,7 @@
|
|||
|
||||
if (options & SDWebImageRefreshCached && cachedImage && !downloadedImage) {
|
||||
// Image refresh hit the NSURLCache cache, do not call the completion block
|
||||
} else if (downloadedImage && (!downloadedImage.images || (options & SDWebImageTransformAnimatedImage)) && transformer) {
|
||||
} else if (downloadedImage && (!downloadedImage.sd_isAnimated || (options & SDWebImageTransformAnimatedImage)) && transformer) {
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
||||
UIImage *transformedImage = [transformer transformedImageWithImage:downloadedImage forKey:key];
|
||||
if (transformedImage && finished) {
|
||||
|
|
|
@ -114,9 +114,25 @@ dispatch_semaphore_signal(self->_lock);
|
|||
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
|
||||
BOOL hasAnimation = flags & ANIMATION_FLAG;
|
||||
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
if (!hasAnimation) {
|
||||
// for static single webp image
|
||||
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData];
|
||||
CGImageRef imageRef = [self sd_createWebpImageWithData:webpData];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#else
|
||||
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
WebPDemuxDelete(demuxer);
|
||||
return staticImage;
|
||||
}
|
||||
|
@ -132,7 +148,13 @@ dispatch_semaphore_signal(self->_lock);
|
|||
|
||||
if (decodeFirstFrame) {
|
||||
// first frame for animated webp image
|
||||
UIImage *firstFrameImage = [self sd_rawWebpImageWithData:iter.fragment];
|
||||
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#else
|
||||
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
WebPDemuxReleaseIterator(&iter);
|
||||
WebPDemuxDelete(demuxer);
|
||||
return firstFrameImage;
|
||||
|
@ -154,10 +176,16 @@ dispatch_semaphore_signal(self->_lock);
|
|||
|
||||
do {
|
||||
@autoreleasepool {
|
||||
UIImage *image = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter];
|
||||
if (!image) {
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter];
|
||||
if (!imageRef) {
|
||||
continue;
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#else
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
NSTimeInterval duration = [self sd_frameDurationWithIterator:iter];
|
||||
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
|
||||
|
@ -249,11 +277,18 @@ dispatch_semaphore_signal(self->_lock);
|
|||
CGContextRelease(canvas);
|
||||
return nil;
|
||||
}
|
||||
CGFloat scale = 1;
|
||||
if ([options valueForKey:SDWebImageCoderDecodeScaleFactor]) {
|
||||
scale = [[options valueForKey:SDWebImageCoderDecodeScaleFactor] doubleValue];
|
||||
if (scale < 1) {
|
||||
scale = 1;
|
||||
}
|
||||
}
|
||||
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef];
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef scale:scale orientation:UIImageOrientationUp];
|
||||
#else
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef scale:scale];
|
||||
#endif
|
||||
CGImageRelease(newImageRef);
|
||||
CGContextRelease(canvas);
|
||||
|
@ -271,8 +306,8 @@ dispatch_semaphore_signal(self->_lock);
|
|||
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
|
||||
CGContextClearRect(canvas, imageRect);
|
||||
} else {
|
||||
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
|
||||
if (!image) {
|
||||
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
|
||||
if (!imageRef) {
|
||||
return;
|
||||
}
|
||||
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
|
||||
|
@ -280,13 +315,14 @@ dispatch_semaphore_signal(self->_lock);
|
|||
if (!shouldBlend) {
|
||||
CGContextClearRect(canvas, imageRect);
|
||||
}
|
||||
CGContextDrawImage(canvas, imageRect, image.CGImage);
|
||||
CGContextDrawImage(canvas, imageRect, imageRef);
|
||||
CGImageRelease(imageRef);
|
||||
}
|
||||
}
|
||||
|
||||
- (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
|
||||
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
|
||||
if (!image) {
|
||||
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter CF_RETURNS_RETAINED {
|
||||
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
|
@ -300,25 +336,19 @@ dispatch_semaphore_signal(self->_lock);
|
|||
if (!shouldBlend) {
|
||||
CGContextClearRect(canvas, imageRect);
|
||||
}
|
||||
CGContextDrawImage(canvas, imageRect, image.CGImage);
|
||||
CGContextDrawImage(canvas, imageRect, imageRef);
|
||||
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
|
||||
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef];
|
||||
#elif SD_MAC
|
||||
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
|
||||
#endif
|
||||
|
||||
CGImageRelease(newImageRef);
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
|
||||
CGContextClearRect(canvas, imageRect);
|
||||
}
|
||||
|
||||
return image;
|
||||
return newImageRef;
|
||||
}
|
||||
|
||||
- (nullable UIImage *)sd_rawWebpImageWithData:(WebPData)webpData {
|
||||
- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData CF_RETURNS_RETAINED {
|
||||
WebPDecoderConfig config;
|
||||
if (!WebPInitDecoderConfig(&config)) {
|
||||
return nil;
|
||||
|
@ -361,14 +391,7 @@ dispatch_semaphore_signal(self->_lock);
|
|||
|
||||
CGDataProviderRelease(provider);
|
||||
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
|
||||
#else
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
return image;
|
||||
return imageRef;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
|
||||
|
@ -639,7 +662,16 @@ static void FreeImageData(void *info, const void *data, size_t size) {
|
|||
WebPDemuxReleaseIterator(&iter);
|
||||
return nil;
|
||||
}
|
||||
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:imageRef];
|
||||
#else
|
||||
image = [[UIImage alloc] initWithCGImage:imageRef scale:1];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
} else {
|
||||
// Else, this can happen when one image set to different imageViews or one loop end. So we should clear the shared cavans.
|
||||
if (_currentBlendIndex != NSNotFound) {
|
||||
|
@ -660,7 +692,16 @@ static void FreeImageData(void *info, const void *data, size_t size) {
|
|||
if ((size_t)iter.frame_num == endIndex) {
|
||||
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter];
|
||||
} else {
|
||||
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
#if SD_UIKIT || SD_WATCH
|
||||
image = [[UIImage alloc] initWithCGImage:imageRef];
|
||||
#else
|
||||
image = [[UIImage alloc] initWithCGImage:imageRef scale:1];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
}
|
||||
}
|
||||
} while ((size_t)iter.frame_num < (endIndex + 1) && WebPDemuxNextFrame(&iter));
|
||||
|
|
|
@ -24,6 +24,15 @@ static CGContextRef SDCGContextCreateARGBBitmapContext(CGSize size, BOOL opaque,
|
|||
CGImageAlphaInfo alphaInfo = (opaque ? kCGImageAlphaNoneSkipFirst : kCGImageAlphaPremultipliedFirst);
|
||||
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, space, kCGBitmapByteOrderDefault | alphaInfo);
|
||||
CGColorSpaceRelease(space);
|
||||
if (!context) {
|
||||
return NULL;
|
||||
}
|
||||
if (scale == 0) {
|
||||
// Match `UIGraphicsBeginImageContextWithOptions`, reset to the scale factor of the device’s main screen if scale is 0.
|
||||
scale = [NSScreen mainScreen].backingScaleFactor;
|
||||
}
|
||||
CGContextScaleCTM(context, scale, scale);
|
||||
|
||||
return context;
|
||||
}
|
||||
#endif
|
||||
|
@ -71,7 +80,17 @@ static UIImage * SDGraphicsGetImageFromCurrentImageContext(void) {
|
|||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
NSImage *image = [[NSImage alloc] initWithCGImage:imageRef size:NSZeroSize];
|
||||
CGAffineTransform transform = CGContextGetCTM(context);
|
||||
CGFloat xs = transform.a;
|
||||
CGFloat ys = transform.d;
|
||||
CGFloat scale;
|
||||
if (xs == ys && xs > 0) {
|
||||
scale = xs;
|
||||
} else {
|
||||
// Protect if x/y axis scale factor not equal
|
||||
scale = [NSScreen mainScreen].backingScaleFactor;
|
||||
}
|
||||
NSImage *image = [[NSImage alloc] initWithCGImage:imageRef scale:scale];
|
||||
CGImageRelease(imageRef);
|
||||
return image;
|
||||
#endif
|
||||
|
@ -303,7 +322,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:self.scale];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
return image;
|
||||
|
@ -381,7 +400,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef size:NSZeroSize];
|
||||
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef scale:self.scale];
|
||||
#endif
|
||||
CGImageRelease(imgRef);
|
||||
CGContextRelease(context);
|
||||
|
@ -417,7 +436,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef size:NSZeroSize];
|
||||
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef scale:self.scale];
|
||||
#endif
|
||||
CGImageRelease(imgRef);
|
||||
return img;
|
||||
|
@ -434,7 +453,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT || SD_WATCH
|
||||
return [UIImage imageWithCGImage:self.CGImage scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
return [[UIImage alloc] initWithCGImage:self.CGImage size:NSZeroSize];
|
||||
return [[UIImage alloc] initWithCGImage:self.CGImage scale:self.scale];
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -651,7 +670,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT || SD_WATCH
|
||||
UIImage *outputImage = [UIImage imageWithCGImage:effectCGImage scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
UIImage *outputImage = [[UIImage alloc] initWithCGImage:effectCGImage size:NSZeroSize];
|
||||
UIImage *outputImage = [[UIImage alloc] initWithCGImage:effectCGImage scale:self.scale];
|
||||
#endif
|
||||
CGImageRelease(effectCGImage);
|
||||
|
||||
|
@ -676,7 +695,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
|
|||
#if SD_UIKIT
|
||||
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
|
||||
#else
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
|
||||
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:self.scale];
|
||||
#endif
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* UIKit:
|
||||
* Check the `images` array property
|
||||
* AppKit:
|
||||
* NSImage currently only support animated via GIF imageRep unlike UIImage. It will check all the imageRef
|
||||
* NSImage currently only support animated via GIF imageRep unlike UIImage. It will check the imageRep's frame count.
|
||||
*/
|
||||
@property (nonatomic, assign, readonly) BOOL sd_isAnimated;
|
||||
|
||||
|
|
|
@ -51,7 +51,12 @@
|
|||
|
||||
- (NSUInteger)sd_imageLoopCount {
|
||||
NSUInteger imageLoopCount = 0;
|
||||
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
|
||||
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
|
||||
NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
NSBitmapImageRep *bitmapImageRep;
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapImageRep = (NSBitmapImageRep *)imageRep;
|
||||
}
|
||||
if (bitmapImageRep) {
|
||||
imageLoopCount = [[bitmapImageRep valueForProperty:NSImageLoopCount] unsignedIntegerValue];
|
||||
}
|
||||
|
@ -59,7 +64,12 @@
|
|||
}
|
||||
|
||||
- (void)setSd_imageLoopCount:(NSUInteger)sd_imageLoopCount {
|
||||
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
|
||||
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
|
||||
NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
NSBitmapImageRep *bitmapImageRep;
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapImageRep = (NSBitmapImageRep *)imageRep;
|
||||
}
|
||||
if (bitmapImageRep) {
|
||||
[bitmapImageRep setProperty:NSImageLoopCount withValue:@(sd_imageLoopCount)];
|
||||
}
|
||||
|
@ -67,7 +77,12 @@
|
|||
|
||||
- (BOOL)sd_isAnimated {
|
||||
BOOL isGIF = NO;
|
||||
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
|
||||
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
|
||||
NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
|
||||
NSBitmapImageRep *bitmapImageRep;
|
||||
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
|
||||
bitmapImageRep = (NSBitmapImageRep *)imageRep;
|
||||
}
|
||||
if (bitmapImageRep) {
|
||||
NSUInteger frameCount = [[bitmapImageRep valueForProperty:NSImageFrameCount] unsignedIntegerValue];
|
||||
isGIF = frameCount > 1 ? YES : NO;
|
||||
|
|
|
@ -170,7 +170,9 @@
|
|||
UIImage *outputImage = [coder decodedImageWithData:outputImageData options:nil];
|
||||
expect(outputImage.size).to.equal(inputImage.size);
|
||||
expect(outputImage.scale).to.equal(inputImage.scale);
|
||||
#if SD_UIKIT
|
||||
expect(outputImage.images.count).to.equal(inputImage.images.count);
|
||||
#endif
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
Loading…
Reference in New Issue