Merge pull request #3067 from dreampiggy/bugfix_scale_down_too_small_pixels

Add the support to pass small bytes to `decodedAndScaledDownLargeImage`, which always scale down (at least 1x1 pixel) but not return the original size
This commit is contained in:
DreamPiggy 2020-08-24 14:49:04 +08:00 committed by GitHub
commit 35578f0524
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 21 additions and 12 deletions

View File

@ -91,7 +91,8 @@
+ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image;
/**
Return the decoded and probably scaled down image by the provided image. If the image is large than the limit size, will try to scale down. Or just works as `decodedImageWithImage:`
Return the decoded and probably scaled down image by the provided image. If the image pixels bytes size large than the limit bytes, will try to scale down. Or just works as `decodedImageWithImage:`, never scale up.
@warning You should not pass too small bytes, the suggestion value should be larger than 1MB. Even we use Tile Decoding to avoid OOM, however, small bytes will consume much more CPU time because we need to iterate more times to draw each tile.
@param image The image to be decoded and scaled down
@param bytes The limit bytes size. Provide 0 to use the build-in limit.
@ -101,7 +102,7 @@
/**
Control the default limit bytes to scale down largest images.
This value must be larger than or equal to 1MB. Defaults to 60MB on iOS/tvOS, 90MB on macOS, 30MB on watchOS.
This value must be larger than 4 Bytes (at least 1x1 pixel). Defaults to 60MB on iOS/tvOS, 90MB on macOS, 30MB on watchOS.
*/
@property (class, readwrite) NSUInteger defaultScaleDownLimitBytes;

View File

@ -25,7 +25,6 @@ static const size_t kBytesPerPixel = 4;
static const size_t kBitsPerComponent = 8;
static const CGFloat kBytesPerMB = 1024.0f * 1024.0f;
static const CGFloat kPixelsPerMB = kBytesPerMB / kBytesPerPixel;
/*
* Defines the maximum size in MB of the decoded image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 60.
@ -379,8 +378,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// see kDestImageSizeMB, and how it relates to destTotalPixels.
CGFloat imageScale = sqrt(destTotalPixels / sourceTotalPixels);
CGSize destResolution = CGSizeZero;
destResolution.width = (int)(sourceResolution.width * imageScale);
destResolution.height = (int)(sourceResolution.height * imageScale);
destResolution.width = MAX(1, (int)(sourceResolution.width * imageScale));
destResolution.height = MAX(1, (int)(sourceResolution.height * imageScale));
// device color space
CGColorSpaceRef colorspaceRef = [self colorSpaceGetDeviceRGB];
@ -419,7 +418,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// The source tile height is dynamic. Since we specified the size
// of the source tile in MB, see how many rows of pixels high it
// can be given the input image width.
sourceTile.size.height = (int)(tileTotalPixels / sourceTile.size.width );
sourceTile.size.height = MAX(1, (int)(tileTotalPixels / sourceTile.size.width));
sourceTile.origin.x = 0.0f;
// The output tile is the same proportions as the input tile, but
// scaled to image scale.
@ -485,7 +484,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
+ (void)setDefaultScaleDownLimitBytes:(NSUInteger)defaultScaleDownLimitBytes {
if (defaultScaleDownLimitBytes < kBytesPerMB) {
if (defaultScaleDownLimitBytes < kBytesPerPixel) {
return;
}
kDestImageLimitBytes = defaultScaleDownLimitBytes;
@ -596,13 +595,10 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
CGFloat destTotalPixels;
if (bytes == 0) {
bytes = kDestImageLimitBytes;
bytes = [self defaultScaleDownLimitBytes];
}
bytes = MAX(bytes, kBytesPerPixel);
destTotalPixels = bytes / kBytesPerPixel;
if (destTotalPixels <= kPixelsPerMB) {
// Too small to scale down
return NO;
}
float imageScale = destTotalPixels / sourceTotalPixels;
if (imageScale < 1) {
shouldScaleDown = YES;

View File

@ -74,6 +74,7 @@
}
- (void)test07ThatDecodeAndScaleDownImageDoesNotScaleSmallerImage {
// check when user use the larget bytes than image pixels byets, we do not scale up the image (defaults 60MB means 3965x3965 pixels)
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedAndScaledDownImageWithImage:image];
@ -83,6 +84,17 @@
expect(decodedImage.size.height).to.equal(image.size.height);
}
- (void)test07ThatDecodeAndScaleDownImageScaleSmallerBytes {
// Check when user provide too small bytes, we scale it down to 1x1, but not return the force decoded original size image
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedAndScaledDownImageWithImage:image limitBytes:1];
expect(decodedImage).toNot.beNil();
expect(decodedImage).toNot.equal(image);
expect(decodedImage.size.width).to.equal(1);
expect(decodedImage.size.height).to.equal(1);
}
- (void)test08ThatEncodeAlphaImageToJPGWithBackgroundColor {
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"png"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];