Merge pull request #2977 from dreampiggy/attempt_fix_image_io_animated_image_thread_safe

SDAnimatedImageView animation rendering should not use CGContext force decoding, use `kCGImageSourceShouldCacheImmediately` instead which can avoid OOM for large number of GIFs
This commit is contained in:
DreamPiggy 2020-04-11 17:40:59 +08:00 committed by GitHub
commit 774571f498
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 88 additions and 46 deletions

View File

@ -21,6 +21,7 @@ typedef NSMutableDictionary<SDImageCoderOption, id> SDImageCoderMutableOptions;
@note works for `SDImageCoder`.
*/
FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderDecodeFirstFrameOnly;
/**
A CGFloat value which is greater than or equal to 1.0. This value specify the image scale factor for decoding. If not provide, use 1.0. (NSNumber)
@note works for `SDImageCoder`, `SDProgressiveImageCoder`, `SDAnimatedImageCoder`.

View File

@ -563,14 +563,14 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
#pragma mark - Helper Fuction
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
// Avoid extra decode
if (image.sd_isDecoded) {
return NO;
}
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// Avoid extra decode
if (image.sd_isDecoded) {
return NO;
}
// do not decode animated images
if (image.sd_isAnimated) {
return NO;

View File

@ -122,8 +122,12 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
}
+ (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index source:(CGImageSourceRef)source {
NSDictionary *options = @{
(__bridge NSString *)kCGImageSourceShouldCacheImmediately : @(YES),
(__bridge NSString *)kCGImageSourceShouldCache : @(YES) // Always cache to reduce CPU usage
};
NSTimeInterval frameDuration = 0.1;
CFDictionaryRef cfFrameProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil);
CFDictionaryRef cfFrameProperties = CGImageSourceCopyPropertiesAtIndex(source, index, (__bridge CFDictionaryRef)options);
if (!cfFrameProperties) {
return frameDuration;
}
@ -153,9 +157,10 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
return frameDuration;
}
+ (UIImage *)createFrameAtIndex:(NSUInteger)index source:(CGImageSourceRef)source scale:(CGFloat)scale preserveAspectRatio:(BOOL)preserveAspectRatio thumbnailSize:(CGSize)thumbnailSize {
+ (UIImage *)createFrameAtIndex:(NSUInteger)index source:(CGImageSourceRef)source scale:(CGFloat)scale preserveAspectRatio:(BOOL)preserveAspectRatio thumbnailSize:(CGSize)thumbnailSize options:(NSDictionary *)options {
// Some options need to pass to `CGImageSourceCopyPropertiesAtIndex` before `CGImageSourceCreateImageAtIndex`, or ImageIO will ignore them because they parse once :)
// Parse the image properties
NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, index, NULL);
NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, index, (__bridge CFDictionaryRef)options);
NSUInteger pixelWidth = [properties[(__bridge NSString *)kCGImagePropertyPixelWidth] unsignedIntegerValue];
NSUInteger pixelHeight = [properties[(__bridge NSString *)kCGImagePropertyPixelHeight] unsignedIntegerValue];
CGImagePropertyOrientation exifOrientation = (CGImagePropertyOrientation)[properties[(__bridge NSString *)kCGImagePropertyOrientation] unsignedIntegerValue];
@ -169,10 +174,15 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
if ([NSData sd_imageFormatFromUTType:uttype] == SDImageFormatPDF) {
isVector = YES;
}
NSMutableDictionary *decodingOptions;
if (options) {
decodingOptions = [NSMutableDictionary dictionaryWithDictionary:options];
} else {
decodingOptions = [NSMutableDictionary dictionary];
}
CGImageRef imageRef;
if (thumbnailSize.width == 0 || thumbnailSize.height == 0 || pixelWidth == 0 || pixelHeight == 0 || (pixelWidth <= thumbnailSize.width && pixelHeight <= thumbnailSize.height)) {
NSDictionary *options;
if (isVector) {
if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
// Provide the default pixel count for vector images, simply just use the screen size
@ -187,12 +197,11 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
CGFloat maxPixelSize = MAX(thumbnailSize.width, thumbnailSize.height);
NSUInteger DPIPerPixel = 2;
NSUInteger rasterizationDPI = maxPixelSize * DPIPerPixel;
options = @{kSDCGImageSourceRasterizationDPI : @(rasterizationDPI)};
decodingOptions[kSDCGImageSourceRasterizationDPI] = @(rasterizationDPI);
}
imageRef = CGImageSourceCreateImageAtIndex(source, index, (__bridge CFDictionaryRef)options);
imageRef = CGImageSourceCreateImageAtIndex(source, index, (__bridge CFDictionaryRef)decodingOptions);
} else {
NSMutableDictionary *thumbnailOptions = [NSMutableDictionary dictionary];
thumbnailOptions[(__bridge NSString *)kCGImageSourceCreateThumbnailWithTransform] = @(preserveAspectRatio);
decodingOptions[(__bridge NSString *)kCGImageSourceCreateThumbnailWithTransform] = @(preserveAspectRatio);
CGFloat maxPixelSize;
if (preserveAspectRatio) {
CGFloat pixelRatio = pixelWidth / pixelHeight;
@ -205,9 +214,9 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
} else {
maxPixelSize = MAX(thumbnailSize.width, thumbnailSize.height);
}
thumbnailOptions[(__bridge NSString *)kCGImageSourceThumbnailMaxPixelSize] = @(maxPixelSize);
thumbnailOptions[(__bridge NSString *)kCGImageSourceCreateThumbnailFromImageIfAbsent] = @(YES);
imageRef = CGImageSourceCreateThumbnailAtIndex(source, index, (__bridge CFDictionaryRef)thumbnailOptions);
decodingOptions[(__bridge NSString *)kCGImageSourceThumbnailMaxPixelSize] = @(maxPixelSize);
decodingOptions[(__bridge NSString *)kCGImageSourceCreateThumbnailFromImageIfAbsent] = @(YES);
imageRef = CGImageSourceCreateThumbnailAtIndex(source, index, (__bridge CFDictionaryRef)decodingOptions);
}
if (!imageRef) {
return nil;
@ -288,12 +297,12 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
BOOL decodeFirstFrame = [options[SDImageCoderDecodeFirstFrameOnly] boolValue];
if (decodeFirstFrame || count <= 1) {
animatedImage = [self.class createFrameAtIndex:0 source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize];
animatedImage = [self.class createFrameAtIndex:0 source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize options:nil];
} else {
NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];
for (size_t i = 0; i < count; i++) {
UIImage *image = [self.class createFrameAtIndex:i source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize];
UIImage *image = [self.class createFrameAtIndex:i source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize options:nil];
if (!image) {
continue;
}
@ -369,7 +378,11 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
CGImageSourceUpdateData(_imageSource, (__bridge CFDataRef)data, finished);
if (_width + _height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_imageSource, 0, NULL);
NSDictionary *options = @{
(__bridge NSString *)kCGImageSourceShouldCacheImmediately : @(YES),
(__bridge NSString *)kCGImageSourceShouldCache : @(YES) // Always cache to reduce CPU usage
};
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_imageSource, 0, (__bridge CFDictionaryRef)options);
if (properties) {
CFTypeRef val = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
if (val) CFNumberGetValue(val, kCFNumberLongType, &_height);
@ -393,7 +406,7 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
if (scaleFactor != nil) {
scale = MAX([scaleFactor doubleValue], 1);
}
image = [self.class createFrameAtIndex:0 source:_imageSource scale:scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize];
image = [self.class createFrameAtIndex:0 source:_imageSource scale:scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize options:nil];
if (image) {
image.sd_imageFormat = self.class.imageFormat;
}
@ -597,24 +610,20 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
UIImage *image = [self.class createFrameAtIndex:index source:_imageSource scale:_scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize];
if (index >= _frameCount) {
return nil;
}
// Animated Image should not use the CGContext solution to force decode. Prefers to use Image/IO built in method, which is safer and memory friendly, see https://github.com/SDWebImage/SDWebImage/issues/2961
NSDictionary *options = @{
(__bridge NSString *)kCGImageSourceShouldCacheImmediately : @(YES),
(__bridge NSString *)kCGImageSourceShouldCache : @(YES) // Always cache to reduce CPU usage
};
UIImage *image = [self.class createFrameAtIndex:index source:_imageSource scale:_scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize options:options];
if (!image) {
return nil;
}
image.sd_imageFormat = self.class.imageFormat;
// Image/IO create CGImage does not decode, so we do this because this is called background queue, this can avoid main queue block when rendering(especially when one more imageViews use the same image instance)
CGImageRef imageRef = [SDImageCoderHelper CGImageCreateDecoded:image.CGImage];
if (!imageRef) {
return image;
}
#if SD_MAC
image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:kCGImagePropertyOrientationUp];
#else
image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:image.imageOrientation];
#endif
CGImageRelease(imageRef);
image.sd_isDecoded = YES;
image.sd_imageFormat = self.class.imageFormat;
image.sd_isDecoded = YES;;
return image;
}

View File

@ -101,7 +101,7 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
return nil;
}
UIImage *image = [SDImageIOAnimatedCoder createFrameAtIndex:0 source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize];
UIImage *image = [SDImageIOAnimatedCoder createFrameAtIndex:0 source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize options:nil];
CFRelease(source);
if (!image) {
return nil;
@ -193,7 +193,7 @@ static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestination
if (scaleFactor != nil) {
scale = MAX([scaleFactor doubleValue], 1);
}
image = [SDImageIOAnimatedCoder createFrameAtIndex:0 source:_imageSource scale:scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize];
image = [SDImageIOAnimatedCoder createFrameAtIndex:0 source:_imageSource scale:scale preserveAspectRatio:_preserveAspectRatio thumbnailSize:_thumbnailSize options:nil];
if (image) {
CFStringRef uttype = CGImageSourceGetType(_imageSource);
image.sd_imageFormat = [NSData sd_imageFormatFromUTType:uttype];

View File

@ -13,6 +13,6 @@
+ (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index source:(nonnull CGImageSourceRef)source;
+ (NSUInteger)imageLoopCountWithSource:(nonnull CGImageSourceRef)source;
+ (nullable UIImage *)createFrameAtIndex:(NSUInteger)index source:(nonnull CGImageSourceRef)source scale:(CGFloat)scale preserveAspectRatio:(BOOL)preserveAspectRatio thumbnailSize:(CGSize)thumbnailSize;
+ (nullable UIImage *)createFrameAtIndex:(NSUInteger)index source:(nonnull CGImageSourceRef)source scale:(CGFloat)scale preserveAspectRatio:(BOOL)preserveAspectRatio thumbnailSize:(CGSize)thumbnailSize options:(nullable NSDictionary *)options;
@end

View File

@ -9,6 +9,9 @@
/* Begin PBXBuildFile section */
1E3C51E919B46E370092B5E6 /* SDWebImageDownloaderTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 1E3C51E819B46E370092B5E6 /* SDWebImageDownloaderTests.m */; };
2D7AF0601F329763000083C2 /* SDTestCase.m in Sources */ = {isa = PBXBuildFile; fileRef = 2D7AF05F1F329763000083C2 /* SDTestCase.m */; };
320224F72440C39B00E5B29D /* TestImageLarge.png in Resources */ = {isa = PBXBuildFile; fileRef = 320224F62440C39B00E5B29D /* TestImageLarge.png */; };
320224F82440C39B00E5B29D /* TestImageLarge.png in Resources */ = {isa = PBXBuildFile; fileRef = 320224F62440C39B00E5B29D /* TestImageLarge.png */; };
320224F92440C39B00E5B29D /* TestImageLarge.png in Resources */ = {isa = PBXBuildFile; fileRef = 320224F62440C39B00E5B29D /* TestImageLarge.png */; };
320630412085A37C006E0FA4 /* SDAnimatedImageTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 32A571552037DB2D002EDAAE /* SDAnimatedImageTest.m */; };
3222417F2272F808002429DB /* SDUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 3222417E2272F808002429DB /* SDUtilsTests.m */; };
322241802272F808002429DB /* SDUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 3222417E2272F808002429DB /* SDUtilsTests.m */; };
@ -107,6 +110,7 @@
1E3C51E819B46E370092B5E6 /* SDWebImageDownloaderTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDWebImageDownloaderTests.m; sourceTree = "<group>"; };
2D7AF05E1F329763000083C2 /* SDTestCase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDTestCase.h; sourceTree = "<group>"; };
2D7AF05F1F329763000083C2 /* SDTestCase.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDTestCase.m; sourceTree = "<group>"; };
320224F62440C39B00E5B29D /* TestImageLarge.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TestImageLarge.png; sourceTree = "<group>"; };
3222417E2272F808002429DB /* SDUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDUtilsTests.m; sourceTree = "<group>"; };
3226ECB920754F7700FAFACF /* SDWebImageTestDownloadOperation.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SDWebImageTestDownloadOperation.h; sourceTree = "<group>"; };
3226ECBA20754F7700FAFACF /* SDWebImageTestDownloadOperation.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDWebImageTestDownloadOperation.m; sourceTree = "<group>"; };
@ -239,6 +243,7 @@
326E69462334C0C200B7252C /* TestLoopCount.gif */,
5F7F38AC1AE2A77A00B0E330 /* TestImage.jpg */,
43828A441DA67F9900000E62 /* TestImageLarge.jpg */,
320224F62440C39B00E5B29D /* TestImageLarge.png */,
433BBBB81D7EF8260086B6E9 /* TestImage.png */,
327A418B211D660600495442 /* TestImage.heic */,
32905E63211D786E00460FCF /* TestImage.heif */,
@ -449,6 +454,7 @@
329922872365DC6C00EAFD97 /* TestLoopCount.gif in Resources */,
3299228C2365DC6C00EAFD97 /* TestImage.heif in Resources */,
3234306423E2BAC800C290C8 /* TestImage.pdf in Resources */,
320224F92440C39B00E5B29D /* TestImageLarge.png in Resources */,
329922892365DC6C00EAFD97 /* TestImageLarge.jpg in Resources */,
3299228A2365DC6C00EAFD97 /* TestImage.png in Resources */,
329922842365DC6C00EAFD97 /* MonochromeTestImage.jpg in Resources */,
@ -468,6 +474,7 @@
324047452271956F007C53E1 /* TestEXIF.png in Resources */,
32B99EA4203B31360017FD66 /* TestImage.jpg in Resources */,
3234306323E2BAC800C290C8 /* TestImage.pdf in Resources */,
320224F82440C39B00E5B29D /* TestImageLarge.png in Resources */,
32B99EA6203B31360017FD66 /* TestImage.png in Resources */,
3297A0A023374D1700814590 /* TestImageAnimated.heic in Resources */,
32B99EA2203B31360017FD66 /* MonochromeTestImage.jpg in Resources */,
@ -487,6 +494,7 @@
32905E64211D786E00460FCF /* TestImage.heif in Resources */,
43828A451DA67F9900000E62 /* TestImageLarge.jpg in Resources */,
3234306223E2BAC800C290C8 /* TestImage.pdf in Resources */,
320224F72440C39B00E5B29D /* TestImageLarge.png in Resources */,
433BBBB71D7EF8200086B6E9 /* TestImage.gif in Resources */,
433BBBB91D7EF8260086B6E9 /* TestImage.png in Resources */,
3297A09F23374D1700814590 /* TestImageAnimated.heic in Resources */,

Binary file not shown.

After

Width:  |  Height:  |  Size: 306 KiB

View File

@ -22,7 +22,7 @@
}
- (void)test02ThatDecodedImageWithImageWorksWithARegularJPGImage {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedImageWithImage:image];
expect(decodedImage).toNot.beNil();
@ -32,7 +32,7 @@
}
- (void)test03ThatDecodedImageWithImageDoesNotDecodeAnimatedImages {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"gif"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"gif"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
#if SD_MAC
UIImage *animatedImage = image;
@ -45,7 +45,7 @@
}
- (void)test04ThatDecodedImageWithImageWorksWithAlphaImages {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"png"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"png"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedImageWithImage:image];
expect(decodedImage).toNot.beNil();
@ -53,7 +53,7 @@
}
- (void)test05ThatDecodedImageWithImageWorksEvenWithMonochromeImage {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"MonochromeTestImage" ofType:@"jpg"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"MonochromeTestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedImageWithImage:image];
expect(decodedImage).toNot.beNil();
@ -63,7 +63,7 @@
}
- (void)test06ThatDecodeAndScaleDownImageWorks {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImageLarge" ofType:@"jpg"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImageLarge" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedAndScaledDownImageWithImage:image limitBytes:(60 * 1024 * 1024)];
expect(decodedImage).toNot.beNil();
@ -74,7 +74,7 @@
}
- (void)test07ThatDecodeAndScaleDownImageDoesNotScaleSmallerImage {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIImage *decodedImage = [UIImage sd_decodedAndScaledDownImageWithImage:image];
expect(decodedImage).toNot.beNil();
@ -84,7 +84,7 @@
}
- (void)test08ThatEncodeAlphaImageToJPGWithBackgroundColor {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"png"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"png"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
UIColor *backgroundColor = [UIColor blackColor];
NSData *encodedData = [SDImageCodersManager.sharedManager encodedDataWithImage:image format:SDImageFormatJPEG options:@{SDImageCoderEncodeBackgroundColor : backgroundColor}];
@ -99,7 +99,7 @@
}
- (void)test09ThatJPGImageEncodeWithMaxFileSize {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImageLarge" ofType:@"jpg"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImageLarge" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
// This large JPEG encoding size between (770KB ~ 2.23MB)
NSUInteger limitFileSize = 1 * 1024 * 1024; // 1MB
@ -118,6 +118,30 @@
expect(limitEncodedData.length).beGreaterThan(minEncodedData.length);
}
- (void)test10ThatAnimatedImageCacheImmediatelyWorks {
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImageLarge" ofType:@"png"];
NSData *testImageData = [NSData dataWithContentsOfFile:testImagePath];
// Check that animated image rendering should not use lazy decoding (performance related)
CFAbsoluteTime begin = CFAbsoluteTimeGetCurrent();
SDImageAPNGCoder *coder = [[SDImageAPNGCoder alloc] initWithAnimatedImageData:testImageData options:@{SDImageCoderDecodeFirstFrameOnly : @(NO)}];
UIImage *imageWithoutLazyDecoding = [coder animatedImageFrameAtIndex:0];
CFAbsoluteTime end = CFAbsoluteTimeGetCurrent();
CFAbsoluteTime duration = end - begin;
expect(imageWithoutLazyDecoding.sd_isDecoded).beTruthy();
// Check that static image rendering should use lazy decoding
CFAbsoluteTime begin2 = CFAbsoluteTimeGetCurrent();
SDImageAPNGCoder *coder2 = SDImageAPNGCoder.sharedCoder;
UIImage *imageWithLazyDecoding = [coder2 decodedImageWithData:testImageData options:@{SDImageCoderDecodeFirstFrameOnly : @(YES)}];
CFAbsoluteTime end2 = CFAbsoluteTimeGetCurrent();
CFAbsoluteTime duration2 = end2 - begin2;
expect(imageWithLazyDecoding.sd_isDecoded).beFalsy();
// lazy decoding need less time (10x)
expect(duration2 * 10.0).beLessThan(duration);
}
- (void)test11ThatAPNGPCoderWorks {
NSURL *APNGURL = [[NSBundle bundleForClass:[self class]] URLForResource:@"TestImageAnimated" withExtension:@"apng"];
[self verifyCoder:[SDImageAPNGCoder sharedCoder]
@ -138,7 +162,7 @@
// When GIF metadata does not contains any loop count information (`kCGImagePropertyGIFLoopCount`'s value nil)
// The standard says it should just play once. See: http://www6.uniovi.es/gifanim/gifabout.htm
// This behavior is different from other modern animated image format like APNG/WebP. Which will play infinitely
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestLoopCount" ofType:@"gif"];
NSString *testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestLoopCount" ofType:@"gif"];
NSData *testImageData = [NSData dataWithContentsOfFile:testImagePath];
UIImage *image = [SDImageGIFCoder.sharedCoder decodedImageWithData:testImageData options:nil];
expect(image.sd_imageLoopCount).equal(1);