Merge pull request #31 from SDWebImage/feature_thumbnail_decoding

WebPCoder now supports the thumbnail decoding
This commit is contained in:
DreamPiggy 2020-01-18 12:40:54 +08:00 committed by GitHub
commit d77570d623
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 187 additions and 43 deletions

View File

@ -1,2 +1,2 @@
github "SDWebImage/SDWebImage" ~> 5.0
github "SDWebImage/SDWebImage" ~> 5.5
github "SDWebImage/libwebp-Xcode" ~> 1.0

View File

@ -1,2 +1,2 @@
github "SDWebImage/SDWebImage" "5.0.0"
github "SDWebImage/libwebp-Xcode" "1.0.0"
github "SDWebImage/SDWebImage" "5.5.0"
github "SDWebImage/libwebp-Xcode" "1.1.0"

View File

@ -35,7 +35,7 @@
NSURL *staticWebPURL = [NSURL URLWithString:@"https://www.gstatic.com/webp/gallery/2.webp"];
NSURL *animatedWebPURL = [NSURL URLWithString:@"http://littlesvr.ca/apng/images/world-cup-2014-42.webp"];
[self.imageView1 sd_setImageWithURL:staticWebPURL completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
[self.imageView1 sd_setImageWithURL:staticWebPURL placeholderImage:nil options:0 context:@{SDWebImageContextImageThumbnailPixelSize : @(CGSizeMake(300, 300))} progress:nil completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
if (image) {
NSLog(@"%@", @"Static WebP load success");
}

View File

@ -27,7 +27,7 @@ This is a SDWebImage coder plugin to support WebP image.
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) SD_WEBP=1 WEBP_USE_INTRINSICS=1',
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
}
s.dependency 'SDWebImage/Core', '~> 5.0'
s.dependency 'SDWebImage/Core', '~> 5.5'
s.dependency 'libwebp', '~> 1.0'
end

View File

@ -24,6 +24,38 @@
#import <Accelerate/Accelerate.h>
/// Calculate the actual thumnail pixel size
static CGSize SDCalculateThumbnailSize(CGSize fullSize, BOOL preserveAspectRatio, CGSize thumbnailSize) {
CGFloat width = fullSize.width;
CGFloat height = fullSize.height;
CGFloat resultWidth;
CGFloat resultHeight;
if (width == 0 || height == 0 || thumbnailSize.width == 0 || thumbnailSize.height == 0 || (width <= thumbnailSize.width && height <= thumbnailSize.height)) {
// Full Pixel
resultWidth = width;
resultHeight = height;
} else {
// Thumbnail
if (preserveAspectRatio) {
CGFloat pixelRatio = width / height;
CGFloat thumbnailRatio = thumbnailSize.width / thumbnailSize.height;
if (pixelRatio > thumbnailRatio) {
resultWidth = thumbnailSize.width;
resultHeight = ceil(thumbnailSize.width / pixelRatio);
} else {
resultHeight = thumbnailSize.height;
resultWidth = ceil(thumbnailSize.height * pixelRatio);
}
} else {
resultWidth = thumbnailSize.width;
resultHeight = thumbnailSize.height;
}
}
return CGSizeMake(resultWidth, resultHeight);
}
#ifndef SD_LOCK
#define SD_LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
#endif
@ -68,6 +100,8 @@
CGFloat _canvasHeight;
dispatch_semaphore_t _lock;
NSUInteger _currentBlendIndex;
BOOL _preserveAspectRatio;
CGSize _thumbnailSize;
}
- (void)dealloc {
@ -133,6 +167,22 @@
}
}
CGSize thumbnailSize = CGSizeZero;
NSValue *thumbnailSizeValue = options[SDImageCoderDecodeThumbnailPixelSize];
if (thumbnailSizeValue != nil) {
#if SD_MAC
thumbnailSize = thumbnailSizeValue.sizeValue;
#else
thumbnailSize = thumbnailSizeValue.CGSizeValue;
#endif
}
BOOL preserveAspectRatio = YES;
NSNumber *preserveAspectRatioValue = options[SDImageCoderDecodePreserveAspectRatio];
if (preserveAspectRatioValue != nil) {
preserveAspectRatio = preserveAspectRatioValue.boolValue;
}
// for animated webp image
WebPIterator iter;
// libwebp's index start with 1
@ -141,11 +191,15 @@
WebPDemuxDelete(demuxer);
return nil;
}
CGColorSpaceRef colorSpace = [self sd_colorSpaceWithDemuxer:demuxer];
CGColorSpaceRef colorSpace = [self sd_createColorSpaceWithDemuxer:demuxer];
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
// Check whether we need to use thumbnail
CGSize scaledSize = SDCalculateThumbnailSize(CGSizeMake(canvasWidth, canvasHeight), preserveAspectRatio, thumbnailSize);
if (!hasAnimation || decodeFirstFrame) {
// first frame for animated webp image
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpace];
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpace scaledSize:scaledSize];
CGColorSpaceRelease(colorSpace);
#if SD_UIKIT || SD_WATCH
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
@ -159,9 +213,6 @@
return firstFrameImage;
}
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
BOOL hasAlpha = flags & ALPHA_FLAG;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
@ -172,14 +223,16 @@
return nil;
}
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];
do {
@autoreleasepool {
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace scaledSize:scaledSize];
if (!imageRef) {
continue;
}
#if SD_UIKIT || SD_WATCH
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
@ -221,6 +274,22 @@
}
}
_scale = scale;
CGSize thumbnailSize = CGSizeZero;
NSValue *thumbnailSizeValue = options[SDImageCoderDecodeThumbnailPixelSize];
if (thumbnailSizeValue != nil) {
#if SD_MAC
thumbnailSize = thumbnailSizeValue.sizeValue;
#else
thumbnailSize = thumbnailSizeValue.CGSizeValue;
#endif
}
_thumbnailSize = thumbnailSize;
BOOL preserveAspectRatio = YES;
NSNumber *preserveAspectRatioValue = options[SDImageCoderDecodePreserveAspectRatio];
if (preserveAspectRatioValue != nil) {
preserveAspectRatio = preserveAspectRatioValue.boolValue;
}
_preserveAspectRatio = preserveAspectRatio;
}
return self;
}
@ -317,7 +386,7 @@
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
} else {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef scaledSize:CGSizeZero];
if (!imageRef) {
return;
}
@ -331,16 +400,18 @@
}
}
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef scaledSize:(CGSize)scaledSize CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef scaledSize:CGSizeZero];
if (!imageRef) {
return nil;
}
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
// If not blend, cover the target image rect. (firstly clear then draw)
@ -351,15 +422,26 @@
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
CGImageRelease(imageRef);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
}
// Check whether we need to use thumbnail
if (!CGSizeEqualToSize(CGSizeMake(canvasWidth, canvasHeight), scaledSize)) {
// Important: For Animated WebP thumbnail generation, we can not just use a scaled small canvas and draw each thumbnail frame
// This works **On Theory**. However, image scale down loss details. Animated WebP use the partial pixels with blend mode / dispose method with offset, to cover previous canvas status
// Because of this reason, even each frame contains small zigzag, the final animation contains visible glitch, this is not we want.
// So, always create the full pixels canvas (even though this consume more RAM), after drawn on the canvas, re-scale again with the final size
CGImageRef scaledImageRef = [SDImageCoderHelper CGImageCreateScaled:newImageRef size:scaledSize];
CGImageRelease(newImageRef);
newImageRef = scaledImageRef;
}
return newImageRef;
}
- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef scaledSize:(CGSize)scaledSize CF_RETURNS_RETAINED {
WebPDecoderConfig config;
if (!WebPInitDecoderConfig(&config)) {
return nil;
@ -377,24 +459,26 @@
config.options.use_threads = 1;
config.output.colorspace = MODE_bgrA;
// Use scaling for thumbnail
if (scaledSize.width != 0 && scaledSize.height != 0) {
config.options.use_scaling = 1;
config.options.scaled_width = scaledSize.width;
config.options.scaled_height = scaledSize.height;
}
// Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
return nil;
}
int width = config.input.width;
int height = config.input.height;
if (config.options.use_scaling) {
width = config.options.scaled_width;
height = config.options.scaled_height;
}
// Construct a UIImage from the decoded RGBA value array
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = config.output.u.RGBA.stride;
size_t width = config.output.width;
size_t height = config.output.height;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
@ -414,7 +498,7 @@
}
// Create and return the correct colorspace by checking the ICC Profile
- (nonnull CGColorSpaceRef)sd_colorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
- (nonnull CGColorSpaceRef)sd_createColorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
// WebP contains ICC Profile should use the desired colorspace, instead of default device colorspace
// See: https://developers.google.com/speed/webp/docs/riff_container#color_profile
@ -681,6 +765,22 @@ static void FreeImageData(void *info, const void *data, size_t size) {
scale = 1;
}
}
CGSize thumbnailSize = CGSizeZero;
NSValue *thumbnailSizeValue = options[SDImageCoderDecodeThumbnailPixelSize];
if (thumbnailSizeValue != nil) {
#if SD_MAC
thumbnailSize = thumbnailSizeValue.sizeValue;
#else
thumbnailSize = thumbnailSizeValue.CGSizeValue;
#endif
}
_thumbnailSize = thumbnailSize;
BOOL preserveAspectRatio = YES;
NSNumber *preserveAspectRatioValue = options[SDImageCoderDecodePreserveAspectRatio];
if (preserveAspectRatioValue != nil) {
preserveAspectRatio = preserveAspectRatioValue.boolValue;
}
_preserveAspectRatio = preserveAspectRatio;
_scale = scale;
_demux = demuxer;
_imageData = data;
@ -804,7 +904,7 @@ static void FreeImageData(void *info, const void *data, size_t size) {
- (UIImage *)safeStaticImageFrame {
UIImage *image;
if (!_colorSpace) {
_colorSpace = [self sd_colorSpaceWithDemuxer:_demux];
_colorSpace = [self sd_createColorSpaceWithDemuxer:_demux];
}
// Static WebP image
WebPIterator iter;
@ -812,7 +912,9 @@ static void FreeImageData(void *info, const void *data, size_t size) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:_colorSpace];
// Check whether we need to use thumbnail
CGSize scaledSize = SDCalculateThumbnailSize(CGSizeMake(_canvasWidth, _canvasHeight), _preserveAspectRatio, _thumbnailSize);
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:_colorSpace scaledSize:scaledSize];
if (!imageRef) {
return nil;
}
@ -837,7 +939,7 @@ static void FreeImageData(void *info, const void *data, size_t size) {
_canvas = canvas;
}
if (!_colorSpace) {
_colorSpace = [self sd_colorSpaceWithDemuxer:_demux];
_colorSpace = [self sd_createColorSpaceWithDemuxer:_demux];
}
SDWebPCoderFrame *frame = _frames[index];
@ -887,7 +989,9 @@ static void FreeImageData(void *info, const void *data, size_t size) {
_currentBlendIndex = index;
// Now the canvas is ready, which respects of dispose method behavior. Just do normal decoding and produce image.
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
// Check whether we need to use thumbnail
CGSize scaledSize = SDCalculateThumbnailSize(CGSizeMake(_canvasWidth, _canvasHeight), _preserveAspectRatio, _thumbnailSize);
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace scaledSize:scaledSize];
if (!imageRef) {
return nil;
}

View File

@ -5,5 +5,6 @@ project '../SDWebImageWebPCoder'
workspace '../SDWebImageWebPCoder'
target 'SDWebImageWebPCoderTests' do
pod 'Expecta'
pod 'SDWebImageWebPCoder', :path => '../'
end

View File

@ -10,6 +10,7 @@
@import XCTest;
#import <SDWebImage/SDWebImage.h>
#import <SDWebImageWebPCoder/SDWebImageWebPCoder.h>
#import <Expecta/Expecta.h>
#import <objc/runtime.h>
const int64_t kAsyncTestTimeout = 5;
@ -193,45 +194,83 @@ const int64_t kAsyncTestTimeout = 5;
withLocalImageURL:(NSURL *)imageUrl
supportsEncoding:(BOOL)supportsEncoding
isAnimatedImage:(BOOL)isAnimated {
SDImageFormat encodingFormat = SDImageFormatWebP;
NSData *inputImageData = [NSData dataWithContentsOfURL:imageUrl];
XCTAssertNotNil(inputImageData, @"Input image data should not be nil");
expect(inputImageData).toNot.beNil();
SDImageFormat inputImageFormat = [NSData sd_imageFormatForImageData:inputImageData];
XCTAssert(inputImageFormat != SDImageFormatUndefined, @"Input image format should not be undefined");
expect(inputImageFormat).toNot.equal(SDImageFormatUndefined);
// 1 - check if we can decode - should be true
XCTAssertTrue([coder canDecodeFromData:inputImageData]);
expect([coder canDecodeFromData:inputImageData]).to.beTruthy();
// 2 - decode from NSData to UIImage and check it
UIImage *inputImage = [coder decodedImageWithData:inputImageData options:nil];
XCTAssertNotNil(inputImage, @"The decoded image from input data should not be nil");
expect(inputImage).toNot.beNil();
if (isAnimated) {
// 2a - check images count > 0 (only for animated images)
XCTAssertTrue(inputImage.sd_isAnimated, @"The decoded image should be animated");
expect(inputImage.sd_isAnimated).to.beTruthy();
// 2b - check image size and scale for each frameImage (only for animated images)
#if SD_UIKIT
CGSize imageSize = inputImage.size;
CGFloat imageScale = inputImage.scale;
[inputImage.images enumerateObjectsUsingBlock:^(UIImage * frameImage, NSUInteger idx, BOOL * stop) {
XCTAssertTrue(CGSizeEqualToSize(imageSize, frameImage.size), @"Each frame size should match the image size");
XCTAssertEqual(imageScale, frameImage.scale, @"Each frame scale should match the image scale");
expect(imageSize).to.equal(frameImage.size);
expect(imageScale).to.equal(frameImage.scale);
}];
#endif
}
// 3 - check thumbnail decoding
CGFloat pixelWidth = inputImage.size.width;
CGFloat pixelHeight = inputImage.size.height;
expect(pixelWidth).beGreaterThan(0);
expect(pixelHeight).beGreaterThan(0);
// check thumnail with scratch
CGFloat thumbnailWidth = 50;
CGFloat thumbnailHeight = 50;
UIImage *thumbImage = [coder decodedImageWithData:inputImageData options:@{
SDImageCoderDecodeThumbnailPixelSize : @(CGSizeMake(thumbnailWidth, thumbnailHeight)),
SDImageCoderDecodePreserveAspectRatio : @(NO)
}];
expect(thumbImage).toNot.beNil();
expect(thumbImage.size).equal(CGSizeMake(thumbnailWidth, thumbnailHeight));
// check thumnail with aspect ratio limit
thumbImage = [coder decodedImageWithData:inputImageData options:@{
SDImageCoderDecodeThumbnailPixelSize : @(CGSizeMake(thumbnailWidth, thumbnailHeight)),
SDImageCoderDecodePreserveAspectRatio : @(YES)
}];
expect(thumbImage).toNot.beNil();
CGFloat ratio = pixelWidth / pixelHeight;
CGFloat thumbnailRatio = thumbnailWidth / thumbnailHeight;
CGSize thumbnailPixelSize;
if (ratio > thumbnailRatio) {
thumbnailPixelSize = CGSizeMake(thumbnailWidth, round(thumbnailWidth / ratio));
} else {
thumbnailPixelSize = CGSizeMake(round(thumbnailHeight * ratio), thumbnailHeight);
}
// Image/IO's thumbnail API does not always use round to preserve precision, we check ABS <= 1
expect(ABS(thumbImage.size.width - thumbnailPixelSize.width) <= 1);
expect(ABS(thumbImage.size.height - thumbnailPixelSize.height) <= 1);
if (supportsEncoding) {
// 3 - check if we can encode to the original format
XCTAssertTrue([coder canEncodeToFormat:inputImageFormat], @"Coder should be able to encode");
// 4 - check if we can encode to the original format
if (encodingFormat == SDImageFormatUndefined) {
encodingFormat = inputImageFormat;
}
expect([coder canEncodeToFormat:encodingFormat]).to.beTruthy();
// 4 - encode from UIImage to NSData using the inputImageFormat and check it
NSData *outputImageData = [coder encodedDataWithImage:inputImage format:inputImageFormat options:nil];
XCTAssertNotNil(outputImageData, @"The encoded image data should not be nil");
// 5 - encode from UIImage to NSData using the inputImageFormat and check it
NSData *outputImageData = [coder encodedDataWithImage:inputImage format:encodingFormat options:nil];
expect(outputImageData).toNot.beNil();
UIImage *outputImage = [coder decodedImageWithData:outputImageData options:nil];
XCTAssertTrue(CGSizeEqualToSize(outputImage.size, inputImage.size), @"Output and input image size should match");
XCTAssertEqual(outputImage.scale, inputImage.scale, @"Output and input image scale should match");
expect(outputImage.size).to.equal(inputImage.size);
expect(outputImage.scale).to.equal(inputImage.scale);
#if SD_UIKIT
XCTAssertEqual(outputImage.images.count, inputImage.images.count, @"Output and input image frame count should match");
expect(outputImage.images.count).to.equal(inputImage.images.count);
#endif
}
}