Merge pull request #3559 from dreampiggy/performance/default_bitmap_and_force_decode

Refactory the logic to handle force decode logic to avoid CA copy frame buffer, introduce SDImageForceDecodePolicy detailed control
This commit is contained in:
DreamPiggy 2023-07-13 23:59:03 +08:00 committed by GitHub
commit 8f16a63d7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 449 additions and 101 deletions

View File

@ -23,7 +23,9 @@
}
[self.imageView sd_setImageWithURL:self.imageURL
placeholderImage:nil
options:SDWebImageProgressiveLoad | SDWebImageScaleDownLargeImages];
options:SDWebImageProgressiveLoad | SDWebImageScaleDownLargeImages
context:@{SDWebImageContextImageForceDecodePolicy: @(SDImageForceDecodePolicyNever)}
];
self.imageView.shouldCustomLoopCount = YES;
self.imageView.animationRepeatCount = 0;
}

View File

@ -37,8 +37,10 @@ typedef NS_OPTIONS(NSUInteger, SDImageCacheOptions) {
/**
* By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
* However, this process may increase the memory usage as well. If you are experiencing a issue due to excessive memory consumption, This flag can prevent decode the image.
* @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
* @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
*/
SDImageCacheAvoidDecodeImage = 1 << 4,
SDImageCacheAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 4,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.
*/

View File

@ -883,6 +883,8 @@ static NSString * _defaultDiskCacheDirectory;
}
#pragma mark - Helper
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+ (SDWebImageOptions)imageOptionsFromCacheOptions:(SDImageCacheOptions)cacheOptions {
SDWebImageOptions options = 0;
if (cacheOptions & SDImageCacheScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@ -893,6 +895,7 @@ static NSString * _defaultDiskCacheDirectory;
return options;
}
#pragma clang diagnostic pop
@end
@ -904,6 +907,8 @@ static NSString * _defaultDiskCacheDirectory;
return [self queryImageForKey:key options:options context:context cacheType:SDImageCacheTypeAll completion:completionBlock];
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- (id<SDWebImageOperation>)queryImageForKey:(NSString *)key options:(SDWebImageOptions)options context:(nullable SDWebImageContext *)context cacheType:(SDImageCacheType)cacheType completion:(nullable SDImageCacheQueryCompletionBlock)completionBlock {
SDImageCacheOptions cacheOptions = 0;
if (options & SDWebImageQueryMemoryData) cacheOptions |= SDImageCacheQueryMemoryData;
@ -917,6 +922,7 @@ static NSString * _defaultDiskCacheDirectory;
return [self queryCacheOperationForKey:key options:cacheOptions context:context cacheType:cacheType done:completionBlock];
}
#pragma clang diagnostic pop
- (void)storeImage:(UIImage *)image imageData:(NSData *)imageData forKey:(nullable NSString *)key cacheType:(SDImageCacheType)cacheType completion:(nullable SDWebImageNoParamsBlock)completionBlock {
[self storeImage:image imageData:imageData forKey:key options:0 context:nil cacheType:cacheType completion:completionBlock];

View File

@ -123,15 +123,19 @@ UIImage * _Nullable SDImageCacheDecodeImageData(NSData * _Nonnull imageData, NSS
image = [imageCoder decodedImageWithData:imageData options:coderOptions];
}
if (image) {
BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
if (lazyDecode) {
// lazyDecode = NO means we should not forceDecode, highest priority
shouldDecode = NO;
SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
if (polivyValue != nil) {
policy = polivyValue.unsignedIntegerValue;
}
if (shouldDecode) {
image = [SDImageCoderHelper decodedImageWithImage:image];
// TODO: Deprecated, remove in SD 6.0...
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
policy = SDImageForceDecodePolicyNever;
}
#pragma clang diagnostic pop
image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
// assign the decode options, to let manager check whether to re-decode if needed
image.sd_decodeOptions = coderOptions;
}

View File

@ -20,6 +20,36 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
SDImageCoderDecodeSolutionUIKit
};
/// The policy to force-decode the origin CGImage (produced by Image Coder Plugin)
/// Some CGImage may be lazy, or not lazy, but need extra copy to render on screen
/// The force-decode step help to `pre-process` to get the best suitable CGImage to render, which can increase frame rate
/// The downside is that force-decode may consume RAM and CPU, and may loss the `lazy` support (lazy CGImage can be purged when memory warning, and re-created if need), see more: `SDImageCoderDecodeUseLazyDecoding`
typedef NS_ENUM(NSUInteger, SDImageForceDecodePolicy) {
/// Based on input CGImage's colorspace, alignment, bitmapinfo, if it may trigger `CA::copy_image` extra copy, we will force-decode, else don't
SDImageForceDecodePolicyAutomatic,
/// Never force decode input CGImage
SDImageForceDecodePolicyNever,
/// Always force decode input CGImage (only once)
SDImageForceDecodePolicyAlways
};
/// Byte alignment the bytes size with alignment
/// - Parameters:
/// - size: The bytes size
/// - alignment: The alignment, in bytes
static inline size_t SDByteAlign(size_t size, size_t alignment) {
return ((size + (alignment - 1)) / alignment) * alignment;
}
/// The pixel format about the information to call `CGImageCreate` suitable for current hardware rendering
///
typedef struct SDImagePixelFormat {
/// Typically is pre-multiplied RGBA8888 for alpha image, RGBX8888 for non-alpha image.
CGBitmapInfo bitmapInfo;
/// Typically is 32, the 8 pixels bytesPerRow.
size_t alignment;
} SDImagePixelFormat;
/**
Provide some common helper methods for building the image decoder/encoder.
*/
@ -45,16 +75,31 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
*/
+ (NSArray<SDImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage NS_SWIFT_NAME(frames(from:));
#pragma mark - Preferred Rendering Format
/// For coders who use `CGImageCreate`, use the information below to create an effient CGImage which can be render on GPU without Core Animation's extra copy (`CA::Render::copy_image`), which can be debugged using `Color Copied Image` in Xcode Instruments
/// `CGImageCreate`'s `bytesPerRow`, `space`, `bitmapInfo` params should use the information below.
/**
Return the shared device-dependent RGB color space. This follows The Get Rule.
On iOS, it's created with deviceRGB (if available, use sRGB).
On macOS, it's from the screen colorspace (if failed, use deviceRGB)
Because it's shared, you should not retain or release this object.
Typically is sRGB for iOS, screen color space (like Color LCD) for macOS.
@return The device-dependent RGB color space
*/
+ (CGColorSpaceRef _Nonnull)colorSpaceGetDeviceRGB CF_RETURNS_NOT_RETAINED;
/**
Tthis returns the pixel format **Preferred from current hardward && OS using runtime detection**
@param containsAlpha Whether the image to render contains alpha channel
*/
+ (SDImagePixelFormat)preferredPixelFormat:(BOOL)containsAlpha;
/**
Check whether CGImage is hardware supported to rendering on screen, without the trigger of `CA::Render::copy_image`
You can debug the copied image by using Xcode's `Color Copied Image`, the copied image will turn Cyan and occupy double RAM for bitmap buffer.
Typically, when the CGImage's using the method above (`colorspace` / `alignment` / `bitmapInfo`) can render withtout the copy.
*/
+ (BOOL)CGImageIsHardwareSupported:(_Nonnull CGImageRef)cgImage;
/**
Check whether CGImage contains alpha channel.
@ -113,20 +158,41 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
/**
Return the decoded image by the provided image. This one unlike `CGImageCreateDecoded:`, will not decode the image which contains alpha channel or animated image. On iOS 15+, this may use `UIImage.preparingForDisplay()` to use CMPhoto for better performance than the old solution.
@param image The image to be decoded
@note This translate to `decodedImageWithImage:policy:` with automatic policy
@return The decoded image
*/
+ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image;
/**
Return the decoded image by the provided image. This one unlike `CGImageCreateDecoded:`, will not decode the image which contains alpha channel or animated image. On iOS 15+, this may use `UIImage.preparingForDisplay()` to use CMPhoto for better performance than the old solution.
@param image The image to be decoded
@param policy The force decode policy to decode image, will effect the check whether input image need decode
@return The decoded image
*/
+ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image policy:(SDImageForceDecodePolicy)policy;
/**
Return the decoded and probably scaled down image by the provided image. If the image pixels bytes size large than the limit bytes, will try to scale down. Or just works as `decodedImageWithImage:`, never scale up.
@warning You should not pass too small bytes, the suggestion value should be larger than 1MB. Even we use Tile Decoding to avoid OOM, however, small bytes will consume much more CPU time because we need to iterate more times to draw each tile.
@param image The image to be decoded and scaled down
@param bytes The limit bytes size. Provide 0 to use the build-in limit.
@note This translate to `decodedAndScaledDownImageWithImage:limitBytes:policy:` with automatic policy
@return The decoded and probably scaled down image
*/
+ (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes;
/**
Return the decoded and probably scaled down image by the provided image. If the image pixels bytes size large than the limit bytes, will try to scale down. Or just works as `decodedImageWithImage:`, never scale up.
@warning You should not pass too small bytes, the suggestion value should be larger than 1MB. Even we use Tile Decoding to avoid OOM, however, small bytes will consume much more CPU time because we need to iterate more times to draw each tile.
@param image The image to be decoded and scaled down
@param bytes The limit bytes size. Provide 0 to use the build-in limit.
@param policy The force decode policy to decode image, will effect the check whether input image need decode
@return The decoded and probably scaled down image
*/
+ (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes policy:(SDImageForceDecodePolicy)policy;
/**
Control the default force decode solution. Available solutions in `SDImageCoderDecodeSolution`.
@note Defaults to `SDImageCoderDecodeSolutionAutomatic`, which prefers to use UIKit for JPEG/HEIF, and fallback on CoreGraphics. If you want control on your hand, set the other solution.

View File

@ -17,11 +17,10 @@
#import "SDInternalMacros.h"
#import "SDGraphicsImageRenderer.h"
#import "SDInternalMacros.h"
#import "SDDeviceHelper.h"
#import <Accelerate/Accelerate.h>
static inline size_t SDByteAlign(size_t size, size_t alignment) {
return ((size + (alignment - 1)) / alignment) * alignment;
}
#define kCGColorSpaceDeviceRGB CFSTR("kCGColorSpaceDeviceRGB")
#if SD_UIKIT
static inline UIImage *SDImageDecodeUIKit(UIImage *image) {
@ -72,6 +71,42 @@ static inline BOOL SDImageSupportsHardwareHEVCDecoder(void) {
}
#endif
static UIImage * _Nonnull SDImageGetAlphaDummyImage(void) {
static dispatch_once_t onceToken;
static UIImage *dummyImage;
dispatch_once(&onceToken, ^{
SDGraphicsImageRendererFormat *format = [SDGraphicsImageRendererFormat preferredFormat];
format.scale = 1;
format.opaque = NO;
CGSize size = CGSizeMake(1, 1);
SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size format:format];
dummyImage = [renderer imageWithActions:^(CGContextRef _Nonnull context) {
CGContextSetFillColorWithColor(context, UIColor.redColor.CGColor);
CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
}];
NSCAssert(dummyImage, @"The sample alpha image (1x1 pixels) returns nil, OS bug ?");
});
return dummyImage;
}
static UIImage * _Nonnull SDImageGetNonAlphaDummyImage(void) {
static dispatch_once_t onceToken;
static UIImage *dummyImage;
dispatch_once(&onceToken, ^{
SDGraphicsImageRendererFormat *format = [SDGraphicsImageRendererFormat preferredFormat];
format.scale = 1;
format.opaque = YES;
CGSize size = CGSizeMake(1, 1);
SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size format:format];
dummyImage = [renderer imageWithActions:^(CGContextRef _Nonnull context) {
CGContextSetFillColorWithColor(context, UIColor.redColor.CGColor);
CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
}];
NSCAssert(dummyImage, @"The sample non-alpha image (1x1 pixels) returns nil, OS bug ?");
});
return dummyImage;
}
static SDImageCoderDecodeSolution kDefaultDecodeSolution = SDImageCoderDecodeSolutionAutomatic;
static const size_t kBytesPerPixel = 4;
@ -258,11 +293,82 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
static CGColorSpaceRef colorSpace;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
#if SD_MAC
NSScreen *mainScreen = nil;
if (@available(macOS 10.12, *)) {
mainScreen = [NSScreen mainScreen];
} else {
mainScreen = [NSScreen screens].firstObject;
}
colorSpace = mainScreen.colorSpace.CGColorSpace;
#else
colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceSRGB);
#endif
});
return colorSpace;
}
+ (SDImagePixelFormat)preferredPixelFormat:(BOOL)containsAlpha {
CGImageRef cgImage;
if (containsAlpha) {
cgImage = SDImageGetAlphaDummyImage().CGImage;
} else {
cgImage = SDImageGetNonAlphaDummyImage().CGImage;
}
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(cgImage);
size_t bitsPerPixel = 8;
if (SD_OPTIONS_CONTAINS(bitmapInfo, kCGBitmapFloatComponents)) {
bitsPerPixel = 16;
}
size_t components = 4; // Hardcode now
// https://github.com/path/FastImageCache#byte-alignment
// A properly aligned bytes-per-row value must be a multiple of 8 pixels × bytes per pixel.
size_t alignment = (bitsPerPixel / 8) * components * 8;
SDImagePixelFormat pixelFormat = {
.bitmapInfo = bitmapInfo,
.alignment = alignment
};
return pixelFormat;
}
+ (BOOL)CGImageIsHardwareSupported:(CGImageRef)cgImage {
BOOL supported = YES;
// 1. Check byte alignment
size_t bytesPerRow = CGImageGetBytesPerRow(cgImage);
BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
SDImagePixelFormat pixelFormat = [self preferredPixelFormat:hasAlpha];
if (SDByteAlign(bytesPerRow, pixelFormat.alignment) == bytesPerRow) {
// byte aligned, OK
supported &= YES;
} else {
// not aligned
supported &= NO;
}
if (!supported) return supported;
// 2. Check color space
CGColorSpaceRef colorSpace = CGImageGetColorSpace(cgImage);
CGColorSpaceRef perferredColorSpace = [self colorSpaceGetDeviceRGB];
if (colorSpace == perferredColorSpace) {
return supported;
} else {
if (@available(iOS 10.0, tvOS 10.0, macOS 10.6, watchOS 3.0, *)) {
NSString *colorspaceName = (__bridge_transfer NSString *)CGColorSpaceCopyName(colorSpace);
// Seems sRGB/deviceRGB always supported, P3 not always
if ([colorspaceName isEqualToString:(__bridge NSString *)kCGColorSpaceDeviceRGB]
|| [colorspaceName isEqualToString:(__bridge NSString *)kCGColorSpaceSRGB]) {
supported &= YES;
} else {
supported &= NO;
}
return supported;
} else {
// Fallback on earlier versions
return supported;
}
}
}
+ (BOOL)CGImageContainsAlpha:(CGImageRef)cgImage {
if (!cgImage) {
return NO;
@ -307,16 +413,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Check #3330 for more detail about why this bitmap is choosen.
CGBitmapInfo bitmapInfo;
if (hasAlpha) {
// iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
// BGRA8888
bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
} else {
// BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
// RGB888
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
}
// From v5.17.0, use runtime detection of bitmap info instead of hardcode.
CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
CGContextRef context = CGBitmapContextCreate(NULL, newWidth, newHeight, 8, 0, [self colorSpaceGetDeviceRGB], bitmapInfo);
if (!context) {
return NULL;
@ -351,16 +449,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Check #3330 for more detail about why this bitmap is choosen.
CGBitmapInfo bitmapInfo;
if (hasAlpha) {
// iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
// BGRA8888
bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
} else {
// BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
// RGB888
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
}
// From v5.17.0, use runtime detection of bitmap info instead of hardcode.
CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
vImage_CGImageFormat format = (vImage_CGImageFormat) {
.bitsPerComponent = 8,
.bitsPerPixel = 32,
@ -442,7 +532,11 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
+ (UIImage *)decodedImageWithImage:(UIImage *)image {
if (![self shouldDecodeImage:image]) {
return [self decodedImageWithImage:image policy:SDImageForceDecodePolicyAutomatic];
}
+ (UIImage *)decodedImageWithImage:(UIImage *)image policy:(SDImageForceDecodePolicy)policy {
if (![self shouldDecodeImage:image policy:policy]) {
return image;
}
@ -499,7 +593,11 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
+ (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes {
if (![self shouldDecodeImage:image]) {
return [self decodedAndScaledDownImageWithImage:image limitBytes:bytes policy:SDImageForceDecodePolicyAutomatic];
}
+ (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes policy:(SDImageForceDecodePolicy)policy {
if (![self shouldDecodeImage:image policy:policy]) {
return image;
}
@ -563,16 +661,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Check #3330 for more detail about why this bitmap is choosen.
CGBitmapInfo bitmapInfo;
if (hasAlpha) {
// iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
// BGRA8888
bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
} else {
// BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
// RGB888
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
}
// From v5.17.0, use runtime detection of bitmap info instead of hardcode.
CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
CGContextRef destContext = CGBitmapContextCreate(NULL,
destResolution.width,
destResolution.height,
@ -746,11 +836,15 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
#endif
#pragma mark - Helper Function
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image policy:(SDImageForceDecodePolicy)policy {
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// Check policy (never)
if (policy == SDImageForceDecodePolicyNever) {
return NO;
}
// Avoid extra decode
if (image.sd_isDecoded) {
return NO;
@ -763,7 +857,25 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
if (image.sd_isVector) {
return NO;
}
// Check policy (always)
if (policy == SDImageForceDecodePolicyAlways) {
return YES;
} else {
// Check policy (automatic)
CGImageRef cgImage = image.CGImage;
if (cgImage) {
CFStringRef uttype = CGImageGetUTType(cgImage);
if (uttype) {
// Only ImageIO can set `com.apple.ImageIO.imageSourceTypeIdentifier`
return YES;
} else {
// Now, let's check if the CGImage is hardware supported (not byte-aligned will cause extra copy)
BOOL isSupported = [SDImageCoderHelper CGImageIsHardwareSupported:cgImage];
return !isSupported;
}
}
}
return YES;
}

View File

@ -32,14 +32,13 @@ static CGContextRef SDCGContextCreateBitmapContext(CGSize size, BOOL opaque, CGF
CGColorSpaceRef space = [SDImageCoderHelper colorSpaceGetDeviceRGB];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Check #3330 for more detail about why this bitmap is choosen.
// From v5.17.0, use runtime detection of bitmap info instead of hardcode.
// However, macOS's runtime detection will also call this function, cause recursive, so still hardcode here
CGBitmapInfo bitmapInfo;
if (!opaque) {
// iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
// BGRA8888
bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
// [NSImage imageWithSize:flipped:drawingHandler:] returns float(16-bits) RGBA8888 on alpha image, which we don't need
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
} else {
// BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
// RGB888
bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
}
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, space, bitmapInfo);

View File

@ -74,15 +74,19 @@ UIImage * _Nullable SDImageLoaderDecodeImageData(NSData * _Nonnull imageData, NS
image = [imageCoder decodedImageWithData:imageData options:coderOptions];
}
if (image) {
BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
if (lazyDecode) {
// lazyDecode = NO means we should not forceDecode, highest priority
shouldDecode = NO;
SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
if (polivyValue != nil) {
policy = polivyValue.unsignedIntegerValue;
}
if (shouldDecode) {
image = [SDImageCoderHelper decodedImageWithImage:image];
// TODO: Deprecated, remove in SD 6.0...
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
policy = SDImageForceDecodePolicyNever;
}
#pragma clang diagnostic pop
image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
// assign the decode options, to let manager check whether to re-decode if needed
image.sd_decodeOptions = coderOptions;
}
@ -151,15 +155,19 @@ UIImage * _Nullable SDImageLoaderDecodeProgressiveImageData(NSData * _Nonnull im
image = [progressiveCoder incrementalDecodedImageWithOptions:coderOptions];
}
if (image) {
BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
if (lazyDecode) {
// lazyDecode = NO means we should not forceDecode, highest priority
shouldDecode = NO;
SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
if (polivyValue != nil) {
policy = polivyValue.unsignedIntegerValue;
}
if (shouldDecode) {
image = [SDImageCoderHelper decodedImageWithImage:image];
// TODO: Deprecated, remove in SD 6.0...
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
policy = SDImageForceDecodePolicyNever;
}
#pragma clang diagnostic pop
image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
// assign the decode options, to let manager check whether to re-decode if needed
image.sd_decodeOptions = coderOptions;
// mark the image as progressive (completed one are not mark as progressive)

View File

@ -172,8 +172,9 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
* By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
* However, this process may increase the memory usage as well. If you are experiencing an issue due to excessive memory consumption, This flag can prevent decode the image.
* @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
* @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
*/
SDWebImageAvoidDecodeImage = 1 << 18,
SDWebImageAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 18,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.
@ -256,6 +257,15 @@ FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageC
*/
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageTransformer;
#pragma mark - Force Decode Options
/**
A NSNumber instance which store the`SDImageForceDecodePolicy` enum. This is used to control how current image loading should force-decode the decoded image (CGImage, typically). See more what's force-decode means in `SDImageForceDecodePolicy` comment.
Defaults to `SDImageForceDecodePolicyAutomatic`, which will detect the input CGImage's metadata, and only force-decode if the input CGImage can not directly render on screen (need extra CoreAnimation Copied Image and increase RAM usage).
@note If you want to always the force-decode for this image request, pass `SDImageForceDecodePolicyAlways`, for example, some WebP images which does not created by ImageIO.
*/
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageForceDecodePolicy;
#pragma mark - Image Decoder Context Options
/**

View File

@ -151,6 +151,7 @@ SDWebImageContextOption const SDWebImageContextImageCache = @"imageCache";
SDWebImageContextOption const SDWebImageContextImageLoader = @"imageLoader";
SDWebImageContextOption const SDWebImageContextImageCoder = @"imageCoder";
SDWebImageContextOption const SDWebImageContextImageTransformer = @"imageTransformer";
SDWebImageContextOption const SDWebImageContextImageForceDecodePolicy = @"imageForceDecodePolicy";
SDWebImageContextOption const SDWebImageContextImageDecodeOptions = @"imageDecodeOptions";
SDWebImageContextOption const SDWebImageContextImageScaleFactor = @"imageScaleFactor";
SDWebImageContextOption const SDWebImageContextImagePreserveAspectRatio = @"imagePreserveAspectRatio";

View File

@ -74,8 +74,10 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageDownloaderOptions) {
/**
* By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
* However, this process may increase the memory usage as well. If you are experiencing a issue due to excessive memory consumption, This flag can prevent decode the image.
* @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
* @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
*/
SDWebImageDownloaderAvoidDecodeImage = 1 << 9,
SDWebImageDownloaderAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 9,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.

View File

@ -288,6 +288,8 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
}
#pragma mark Helper methods
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+ (SDWebImageOptions)imageOptionsFromDownloaderOptions:(SDWebImageDownloaderOptions)downloadOptions {
SDWebImageOptions options = 0;
if (downloadOptions & SDWebImageDownloaderScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@ -298,6 +300,7 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
return options;
}
#pragma clang diagnostic pop
- (nullable NSOperation<SDWebImageDownloaderOperation> *)createDownloaderOperationWithUrl:(nonnull NSURL *)url
options:(SDWebImageDownloaderOptions)options
@ -625,6 +628,8 @@ didReceiveResponse:(NSURLResponse *)response
return YES;
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- (id<SDWebImageOperation>)requestImageWithURL:(NSURL *)url options:(SDWebImageOptions)options context:(SDWebImageContext *)context progress:(SDImageLoaderProgressBlock)progressBlock completed:(SDImageLoaderCompletedBlock)completedBlock {
UIImage *cachedImage = context[SDWebImageContextLoaderCachedImage];
@ -651,6 +656,7 @@ didReceiveResponse:(NSURLResponse *)response
return [self downloadImageWithURL:url options:downloaderOptions context:context progress:progressBlock completed:completedBlock];
}
#pragma clang diagnostic pop
- (BOOL)shouldBlockFailedURLWithURL:(NSURL *)url error:(NSError *)error {
return [self shouldBlockFailedURLWithURL:url error:error options:0 context:nil];

View File

@ -671,6 +671,8 @@ didReceiveResponse:(NSURLResponse *)response
}
#pragma mark Helper methods
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+ (SDWebImageOptions)imageOptionsFromDownloaderOptions:(SDWebImageDownloaderOptions)downloadOptions {
SDWebImageOptions options = 0;
if (downloadOptions & SDWebImageDownloaderScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@ -681,6 +683,7 @@ didReceiveResponse:(NSURLResponse *)response
return options;
}
#pragma clang diagnostic pop
- (BOOL)shouldContinueWhenAppEntersBackground {
return SD_OPTIONS_CONTAINS(self.options, SDWebImageDownloaderContinueInBackground);

View File

@ -15,6 +15,12 @@
/**
A bool value indicating whether the image has already been decoded. This can help to avoid extra force decode.
Force decode is used for 2 cases:
-- 1. for ImageIO created image (via `CGImageCreateWithImageSource` SPI), it's lazy and we trigger the decode before rendering
-- 2. for non-ImageIO created image (via `CGImageCreate` API), we can ensure it's alignment is suitable to render on screen without copy by CoreAnimation
@note For coder plugin developer, always use the SDImageCoderHelper's `colorSpaceGetDeviceRGB`/`preferredPixelFormat` to create CGImage.
@note For more information why force decode, see: https://github.com/path/FastImageCache#byte-alignment
@note From v5.17.0, the default value is always NO. Use `SDImageForceDecodePolicy` to control complicated policy.
*/
@property (nonatomic, assign) BOOL sd_isDecoded;

View File

@ -15,24 +15,7 @@
- (BOOL)sd_isDecoded {
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_isDecoded));
if (value != nil) {
return value.boolValue;
} else {
// Assume only CGImage based can use lazy decoding
CGImageRef cgImage = self.CGImage;
if (cgImage) {
CFStringRef uttype = CGImageGetUTType(self.CGImage);
if (uttype) {
// Only ImageIO can set `com.apple.ImageIO.imageSourceTypeIdentifier`
return NO;
} else {
// Thumbnail or CGBitmapContext drawn image
return YES;
}
}
}
// Assume others as non-decoded
return NO;
return [value boolValue];
}
- (void)setSd_isDecoded:(BOOL)sd_isDecoded {

View File

@ -57,7 +57,7 @@ static inline CGRect SDCGRectFitWithScaleMode(CGRect rect, CGSize size, SDImageS
return rect;
}
static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bitmapInfo) {
static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bitmapInfo, CGColorSpaceRef cgColorSpace) {
// Get alpha info, byteOrder info
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@ -135,11 +135,18 @@ static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bit
default:
break;
}
#if SD_MAC
// Mac supports ColorSync, to ensure the same bahvior, we convert color to sRGB
NSColorSpace *colorSpace = [[NSColorSpace alloc] initWithCGColorSpace:cgColorSpace];
CGFloat components[2] = {w, a};
NSColor *color = [NSColor colorWithColorSpace:colorSpace components:components count:2];
return [color colorUsingColorSpace:NSColorSpace.genericGamma22GrayColorSpace];
#else
return [UIColor colorWithWhite:w alpha:a];
#endif
}
static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmapInfo) {
static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmapInfo, CGColorSpaceRef cgColorSpace) {
// Get alpha info, byteOrder info
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@ -150,8 +157,10 @@ static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmap
case kCGBitmapByteOrderDefault: {
byteOrderNormal = YES;
} break;
case kCGBitmapByteOrder16Little:
case kCGBitmapByteOrder32Little: {
} break;
case kCGBitmapByteOrder16Big:
case kCGBitmapByteOrder32Big: {
byteOrderNormal = YES;
} break;
@ -242,8 +251,15 @@ static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmap
default:
break;
}
#if SD_MAC
// Mac supports ColorSync, to ensure the same bahvior, we convert color to sRGB
NSColorSpace *colorSpace = [[NSColorSpace alloc] initWithCGColorSpace:cgColorSpace];
CGFloat components[4] = {r, g, b, a};
NSColor *color = [NSColor colorWithColorSpace:colorSpace components:components count:4];
return [color colorUsingColorSpace:NSColorSpace.sRGBColorSpace];
#else
return [UIColor colorWithRed:r green:g blue:b alpha:a];
#endif
}
#if SD_UIKIT || SD_MAC
@ -558,6 +574,9 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
CGImageRelease(imageRef);
return nil;
}
// Get color space for transform
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
// greyscale
if (components == 2) {
Pixel_88 pixel = {0};
@ -565,7 +584,7 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
CFRelease(data);
CGImageRelease(imageRef);
// Convert to color
return SDGetColorFromGrayscale(pixel, bitmapInfo);
return SDGetColorFromGrayscale(pixel, bitmapInfo, colorSpace);
} else if (components == 3 || components == 4) {
// RGB/RGBA
Pixel_8888 pixel = {0};
@ -573,7 +592,7 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
CFRelease(data);
CGImageRelease(imageRef);
// Convert to color
return SDGetColorFromRGBA(pixel, bitmapInfo);
return SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
} else {
NSLog(@"Unsupported components: %zu", components);
CFRelease(data);
@ -637,6 +656,8 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
// Convert to color
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
NSMutableArray<UIColor *> *colors = [NSMutableArray arrayWithCapacity:CGRectGetWidth(rect) * CGRectGetHeight(rect)];
// ColorSpace
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
for (size_t index = start; index < end; index += components) {
if (index >= row * bytesPerRow + col * components) {
// Index beyond the end of current row, go next row
@ -648,14 +669,14 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
UIColor *color;
if (components == 2) {
Pixel_88 pixel = {pixels[index], pixel[index+1]};
color = SDGetColorFromGrayscale(pixel, bitmapInfo);
color = SDGetColorFromGrayscale(pixel, bitmapInfo, colorSpace);
} else {
if (components == 3) {
Pixel_8888 pixel = {pixels[index], pixels[index+1], pixels[index+2], 0};
color = SDGetColorFromRGBA(pixel, bitmapInfo);
color = SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
} else if (components == 4) {
Pixel_8888 pixel = {pixels[index], pixels[index+1], pixels[index+2], pixels[index+3]};
color = SDGetColorFromRGBA(pixel, bitmapInfo);
color = SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
} else {
NSLog(@"Unsupported components: %zu", components);
}

View File

@ -8,6 +8,7 @@
#import "SDDeviceHelper.h"
#import <mach/mach.h>
#import <sys/sysctl.h>
@implementation SDDeviceHelper

View File

@ -448,6 +448,86 @@
expect(data2).notTo.beNil();
}
- (void)test28ThatNotTriggerCACopyImage {
// 10 * 8 pixels, RGBA8888
size_t width = 10;
size_t height = 8;
size_t bitsPerComponent = 8;
size_t components = 4;
size_t bitsPerPixel = bitsPerComponent * components;
size_t bytesPerRow = SDByteAlign(bitsPerPixel / 8 * width, [SDImageCoderHelper preferredPixelFormat:YES].alignment);
size_t size = bytesPerRow * height;
uint8_t bitmap[size];
for (size_t i = 0; i < size; i++) {
bitmap[i] = 255;
}
CGColorSpaceRef colorspace = [SDImageCoderHelper colorSpaceGetDeviceRGB];
CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:YES].bitmapInfo;
CFDataRef data = CFDataCreate(NULL, bitmap, size);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CFRelease(data);
BOOL shouldInterpolate = YES;
CGColorRenderingIntent intent = kCGRenderingIntentDefault;
CGImageRef cgImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorspace, bitmapInfo, provider, NULL, shouldInterpolate, intent);
CGDataProviderRelease(provider);
XCTAssert(cgImage);
BOOL result = [SDImageCoderHelper CGImageIsHardwareSupported:cgImage];
// Since it's 32 bytes aligned, return true
XCTAssertTrue(result);
// Let's force-decode to check again
#if SD_MAC
UIImage *image = [[UIImage alloc] initWithCGImage:cgImage scale:1 orientation:kCGImagePropertyOrientationUp];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:cgImage scale:1 orientation:UIImageOrientationUp];
#endif
CGImageRelease(cgImage);
UIImage *newImage = [SDImageCoderHelper decodedImageWithImage:image policy:SDImageForceDecodePolicyAutomatic];
// Check policy works, since it's supported by CA hardware, which return the input image object, using pointer compare
XCTAssertTrue(image == newImage);
BOOL newResult = [SDImageCoderHelper CGImageIsHardwareSupported:newImage.CGImage];
XCTAssertTrue(newResult);
}
- (void)test28ThatDoTriggerCACopyImage {
// 10 * 8 pixels, RGBA8888
size_t width = 10;
size_t height = 8;
size_t bitsPerComponent = 8;
size_t components = 4;
size_t bitsPerPixel = bitsPerComponent * components;
size_t bytesPerRow = bitsPerPixel / 8 * width;
size_t size = bytesPerRow * height;
uint8_t bitmap[size];
for (size_t i = 0; i < size; i++) {
bitmap[i] = 255;
}
CGColorSpaceRef colorspace = [SDImageCoderHelper colorSpaceGetDeviceRGB];
CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:YES].bitmapInfo;
CFDataRef data = CFDataCreate(NULL, bitmap, size);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CFRelease(data);
BOOL shouldInterpolate = YES;
CGColorRenderingIntent intent = kCGRenderingIntentDefault;
CGImageRef cgImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorspace, bitmapInfo, provider, NULL, shouldInterpolate, intent);
CGDataProviderRelease(provider);
XCTAssert(cgImage);
BOOL result = [SDImageCoderHelper CGImageIsHardwareSupported:cgImage];
// Since it's not 32 bytes aligned, return false
XCTAssertFalse(result);
// Let's force-decode to check again
#if SD_MAC
UIImage *image = [[UIImage alloc] initWithCGImage:cgImage scale:1 orientation:kCGImagePropertyOrientationUp];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:cgImage scale:1 orientation:UIImageOrientationUp];
#endif
CGImageRelease(cgImage);
UIImage *newImage = [SDImageCoderHelper decodedImageWithImage:image policy:SDImageForceDecodePolicyAutomatic];
// Check policy works, since it's not supported by CA hardware, which return the different image object
XCTAssertFalse(image == newImage);
BOOL newResult = [SDImageCoderHelper CGImageIsHardwareSupported:newImage.CGImage];
XCTAssertTrue(newResult);
}
#pragma mark - Utils
- (void)verifyCoder:(id<SDImageCoder>)coder

View File

@ -124,12 +124,15 @@
CGSize size = CGSizeMake(100, 100);
SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size format:format];
UIColor *color = UIColor.redColor;
NSLog(@"Draw Color ColorSpace: %@", color.CGColor);
UIImage *image = [renderer imageWithActions:^(CGContextRef _Nonnull context) {
CGContextSetFillColorWithColor(context, [color CGColor]);
CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
}];
expect(image.scale).equal(format.scale);
expect([image sd_colorAtPoint:CGPointMake(50, 50)].sd_hexString).equal(color.sd_hexString);
UIColor *testColor = [image sd_colorAtPoint:CGPointMake(50, 50)];
NSLog(@"Draw Color ColorSpace: %@", testColor.CGColor);
expect(testColor.sd_hexString).equal(color.sd_hexString);
UIColor *grayscaleColor = UIColor.blackColor;
UIImage *grayscaleImage = [renderer imageWithActions:^(CGContextRef _Nonnull context) {

View File

@ -10,6 +10,7 @@
#import "SDWebImageTestTransformer.h"
#import "SDWebImageTestCache.h"
#import "SDWebImageTestLoader.h"
#import <SDWebImageWebPCoder/SDWebImageWebPCoder.h>
// Keep strong references for object
@interface SDObjectContainer<ObjectType> : NSObject
@ -297,8 +298,7 @@
NSUInteger defaultLimitBytes = SDImageCoderHelper.defaultScaleDownLimitBytes;
SDImageCoderHelper.defaultScaleDownLimitBytes = 1000 * 1000 * 4; // Limit 1000x1000 pixel
// From v5.5.0, the `SDWebImageScaleDownLargeImages` translate to `SDWebImageContextImageThumbnailPixelSize`, and works for progressive loading
[SDImageCache.sharedImageCache removeImageFromDiskForKey:originalImageURL.absoluteString];
[SDWebImageManager.sharedManager loadImageWithURL:originalImageURL options:SDWebImageScaleDownLargeImages | SDWebImageProgressiveLoad progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
[SDWebImageManager.sharedManager loadImageWithURL:originalImageURL options:SDWebImageFromLoaderOnly | SDWebImageScaleDownLargeImages | SDWebImageProgressiveLoad progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
expect(image).notTo.beNil();
expect(image.size).equal(CGSizeMake(1000, 1000));
if (finished) {
@ -309,7 +309,7 @@
}
}];
[self waitForExpectationsWithCommonTimeoutUsingHandler:^(NSError * _Nullable error) {
[self waitForExpectationsWithTimeout:100 handler:^(NSError * _Nullable error) {
SDImageCoderHelper.defaultScaleDownLimitBytes = defaultLimitBytes;
}];
}
@ -317,7 +317,7 @@
- (void)test13ThatScaleDownLargeImageEXIFOrientationImage {
XCTestExpectation *expectation = [self expectationWithDescription:@"SDWebImageScaleDownLargeImages works on EXIF orientation image"];
NSURL *originalImageURL = [NSURL URLWithString:@"https://raw.githubusercontent.com/recurser/exif-orientation-examples/master/Landscape_2.jpg"];
[SDWebImageManager.sharedManager loadImageWithURL:originalImageURL options:SDWebImageScaleDownLargeImages | SDWebImageAvoidDecodeImage progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
[SDWebImageManager.sharedManager loadImageWithURL:originalImageURL options:SDWebImageScaleDownLargeImages progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
expect(image).notTo.beNil();
#if SD_UIKIT
UIImageOrientation orientation = [SDImageCoderHelper imageOrientationFromEXIFOrientation:kCGImagePropertyOrientationUpMirrored];
@ -620,6 +620,39 @@
[self waitForExpectationsWithCommonTimeout];
}
- (void)test22ThatForceDecodePolicyAutomatic {
XCTestExpectation *expectation = [self expectationWithDescription:@"Automatic policy with ICC profile colorspace image should force-decode"];
NSURL *url = [NSURL URLWithString:@"http://photodb.illusdolphin.net/media/15292/browsertest.jpg"];
SDImageCoderHelper.defaultDecodeSolution = SDImageCoderDecodeSolutionCoreGraphics; // Temp set
[SDWebImageManager.sharedManager loadImageWithURL:url options:SDWebImageFromLoaderOnly context:@{SDWebImageContextImageForceDecodePolicy : @(SDImageForceDecodePolicyAutomatic)} progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
expect(image).notTo.beNil();
expect(image.sd_isDecoded).beTruthy();
CGImageRef cgImage = image.CGImage;
CGColorSpaceRef colorspace = CGImageGetColorSpace(cgImage);
expect(colorspace).equal([SDImageCoderHelper colorSpaceGetDeviceRGB]);
// Revert back
SDImageCoderHelper.defaultDecodeSolution = SDImageCoderDecodeSolutionAutomatic;
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)test22ThatForceDecodePolicyAlways {
XCTestExpectation *expectation = [self expectationWithDescription:@"Always policy with WebP image (libwebp) should force-decode"];
NSURL *url = [NSURL URLWithString:@"https://www.gstatic.com/webp/gallery/4.webp"];
[SDWebImageManager.sharedManager loadImageWithURL:url options:SDWebImageFromLoaderOnly context:@{SDWebImageContextImageCoder : SDImageWebPCoder.sharedCoder, SDWebImageContextImageForceDecodePolicy : @(SDImageForceDecodePolicyAlways)} progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
expect(image).notTo.beNil();
expect(image.sd_isDecoded).beTruthy();
CGImageRef cgImage = image.CGImage;
CGColorSpaceRef colorspace = CGImageGetColorSpace(cgImage);
expect(colorspace).equal([SDImageCoderHelper colorSpaceGetDeviceRGB]);
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (NSString *)testJPEGPath {
NSBundle *testBundle = [NSBundle bundleForClass:[self class]];
return [testBundle pathForResource:@"TestImage" ofType:@"jpg"];