Revert the changes due to merge conflict.

This commit is contained in:
DreamPiggy 2019-01-26 17:42:17 +08:00
parent 4bb93d293f
commit d90ca19d1b
3 changed files with 0 additions and 867 deletions

View File

@ -1,215 +0,0 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "FLAnimatedImageView+WebCache.h"
#if SD_UIKIT
#import "objc/runtime.h"
#import "UIView+WebCacheOperation.h"
#import "UIView+WebCache.h"
#import "NSData+ImageContentType.h"
#import "UIImageView+WebCache.h"
#import "UIImage+MultiFormat.h"
#import "UIImage+MemoryCacheCost.h"
@interface UIView (PrivateWebCache)
- (void)sd_internalSetImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
operationKey:(nullable NSString *)operationKey
internalSetImageBlock:(nullable SDInternalSetImageBlock)setImageBlock
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock
context:(nullable NSDictionary<NSString *, id> *)context;
@end
static inline FLAnimatedImage * SDWebImageCreateFLAnimatedImage(FLAnimatedImageView *imageView, NSData *imageData) {
if ([NSData sd_imageFormatForImageData:imageData] != SDImageFormatGIF) {
return nil;
}
FLAnimatedImage *animatedImage;
// Compatibility in 4.x for lower version FLAnimatedImage.
if ([FLAnimatedImage instancesRespondToSelector:@selector(initWithAnimatedGIFData:optimalFrameCacheSize:predrawingEnabled:)]) {
animatedImage = [[FLAnimatedImage alloc] initWithAnimatedGIFData:imageData optimalFrameCacheSize:imageView.sd_optimalFrameCacheSize predrawingEnabled:imageView.sd_predrawingEnabled];
} else {
animatedImage = [[FLAnimatedImage alloc] initWithAnimatedGIFData:imageData];
}
return animatedImage;
}
static inline NSUInteger SDWebImageMemoryCostFLAnimatedImage(FLAnimatedImage *animatedImage, UIImage *image) {
NSUInteger frameCacheSizeCurrent = animatedImage.frameCacheSizeCurrent; // [1...frame count], more suitable than raw frame count because FLAnimatedImage internal actually store a buffer size but not full frames (they called `window`)
NSUInteger pixelsPerFrame = animatedImage.size.width * animatedImage.size.height; // FLAnimatedImage does not support scale factor
NSUInteger animatedImageCost = frameCacheSizeCurrent * pixelsPerFrame;
NSUInteger imageCost = image.size.height * image.size.width * image.scale * image.scale; // Same as normal cost calculation
imageCost = image.images ? (imageCost * image.images.count) : imageCost;
return animatedImageCost + imageCost;
}
@implementation UIImage (FLAnimatedImage)
- (FLAnimatedImage *)sd_FLAnimatedImage {
return objc_getAssociatedObject(self, @selector(sd_FLAnimatedImage));
}
- (void)setSd_FLAnimatedImage:(FLAnimatedImage *)sd_FLAnimatedImage {
objc_setAssociatedObject(self, @selector(sd_FLAnimatedImage), sd_FLAnimatedImage, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
@end
@implementation FLAnimatedImageView (WebCache)
// These property based options will moved to `SDWebImageContext` in 5.x, to allow per-image-request level options instead of per-imageView-level options
- (NSUInteger)sd_optimalFrameCacheSize {
NSUInteger optimalFrameCacheSize = 0;
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_optimalFrameCacheSize));
if ([value isKindOfClass:[NSNumber class]]) {
optimalFrameCacheSize = value.unsignedShortValue;
}
return optimalFrameCacheSize;
}
- (void)setSd_optimalFrameCacheSize:(NSUInteger)sd_optimalFrameCacheSize {
objc_setAssociatedObject(self, @selector(sd_optimalFrameCacheSize), @(sd_optimalFrameCacheSize), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (BOOL)sd_predrawingEnabled {
BOOL predrawingEnabled = YES;
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_predrawingEnabled));
if ([value isKindOfClass:[NSNumber class]]) {
predrawingEnabled = value.boolValue;
}
return predrawingEnabled;
}
- (void)setSd_predrawingEnabled:(BOOL)sd_predrawingEnabled {
objc_setAssociatedObject(self, @selector(sd_predrawingEnabled), @(sd_predrawingEnabled), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (BOOL)sd_cacheFLAnimatedImage {
BOOL cacheFLAnimatedImage = YES;
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_cacheFLAnimatedImage));
if ([value isKindOfClass:[NSNumber class]]) {
cacheFLAnimatedImage = value.boolValue;
}
return cacheFLAnimatedImage;
}
- (void)setSd_cacheFLAnimatedImage:(BOOL)sd_cacheFLAnimatedImage {
objc_setAssociatedObject(self, @selector(sd_cacheFLAnimatedImage), @(sd_cacheFLAnimatedImage), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (void)sd_setImageWithURL:(nullable NSURL *)url {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock {
dispatch_group_t group = dispatch_group_create();
__weak typeof(self)weakSelf = self;
[self sd_internalSetImageWithURL:url
placeholderImage:placeholder
options:options
operationKey:nil
internalSetImageBlock:^(UIImage * _Nullable image, NSData * _Nullable imageData, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
__strong typeof(weakSelf)strongSelf = weakSelf;
if (!strongSelf) {
dispatch_group_leave(group);
return;
}
// Step 1. Check memory cache (associate object)
FLAnimatedImage *associatedAnimatedImage = image.sd_FLAnimatedImage;
if (associatedAnimatedImage) {
// Asscociated animated image exist
// FLAnimatedImage framework contains a bug that cause GIF been rotated if previous rendered image orientation is not Up. We have to call `setImage:` with non-nil image to reset the state. See `https://github.com/SDWebImage/SDWebImage/issues/2402`
strongSelf.image = associatedAnimatedImage.posterImage;
strongSelf.animatedImage = associatedAnimatedImage;
dispatch_group_leave(group);
return;
}
// Step 2. Check if original compressed image data is "GIF"
BOOL isGIF = (image.sd_imageFormat == SDImageFormatGIF || [NSData sd_imageFormatForImageData:imageData] == SDImageFormatGIF);
// Check if placeholder, which does not trigger a backup disk cache query
BOOL isPlaceholder = !imageData && image && cacheType == SDImageCacheTypeNone;
if (!isGIF || isPlaceholder) {
strongSelf.image = image;
strongSelf.animatedImage = nil;
dispatch_group_leave(group);
return;
}
__weak typeof(strongSelf) wweakSelf = strongSelf;
// Hack, mark we need should use dispatch group notify for completedBlock
objc_setAssociatedObject(group, &SDWebImageInternalSetImageGroupKey, @(YES), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
__strong typeof(wweakSelf) sstrongSelf = wweakSelf;
if (!sstrongSelf || ![url isEqual:sstrongSelf.sd_imageURL]) { return ; }
// Step 3. Check if data exist or query disk cache
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:url];
__block NSData *gifData = imageData;
if (!gifData) {
gifData = [[SDImageCache sharedImageCache] diskImageDataForKey:key];
}
// Step 4. Create FLAnimatedImage
FLAnimatedImage *animatedImage = SDWebImageCreateFLAnimatedImage(sstrongSelf, gifData);
dispatch_async(dispatch_get_main_queue(), ^{
if (![url isEqual:sstrongSelf.sd_imageURL]) { return ; }
// Step 5. Set animatedImage or normal image
if (animatedImage) {
if (sstrongSelf.sd_cacheFLAnimatedImage && SDImageCache.sharedImageCache.config.shouldCacheImagesInMemory) {
image.sd_FLAnimatedImage = animatedImage;
image.sd_memoryCost = SDWebImageMemoryCostFLAnimatedImage(animatedImage, image);
// Update the memory cache
[SDImageCache.sharedImageCache removeImageForKey:key fromDisk:NO withCompletion:nil];
[SDImageCache.sharedImageCache storeImage:image forKey:key toDisk:NO completion:nil];
}
sstrongSelf.image = animatedImage.posterImage;
sstrongSelf.animatedImage = animatedImage;
} else {
sstrongSelf.image = image;
sstrongSelf.animatedImage = nil;
}
dispatch_group_leave(group);
});
});
}
progress:progressBlock
completed:completedBlock
context:@{SDWebImageInternalSetImageGroupKey: group}];
}
@end
#endif

View File

@ -1,551 +0,0 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#ifdef SD_WEBP
#import "SDWebImageWebPCoder.h"
#import "SDWebImageCoderHelper.h"
#import "NSImage+WebCache.h"
#import "UIImage+MultiFormat.h"
#import "SDWebImageImageIOCoder.h"
#if __has_include(<webp/decode.h>) && __has_include(<webp/encode.h>) && __has_include(<webp/demux.h>) && __has_include(<webp/mux.h>)
#import <webp/decode.h>
#import <webp/encode.h>
#import <webp/demux.h>
#import <webp/mux.h>
#else
#import "webp/decode.h"
#import "webp/encode.h"
#import "webp/demux.h"
#import "webp/mux.h"
#endif
#import <Accelerate/Accelerate.h>
@implementation SDWebImageWebPCoder {
WebPIDecoder *_idec;
}
- (void)dealloc {
if (_idec) {
WebPIDelete(_idec);
_idec = NULL;
}
}
+ (instancetype)sharedCoder {
static SDWebImageWebPCoder *coder;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
coder = [[SDWebImageWebPCoder alloc] init];
});
return coder;
}
#pragma mark - Decode
- (BOOL)canDecodeFromData:(nullable NSData *)data {
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
}
- (BOOL)canIncrementallyDecodeFromData:(NSData *)data {
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
}
- (UIImage *)decodedImageWithData:(NSData *)data {
if (!data) {
return nil;
}
WebPData webpData;
WebPDataInit(&webpData);
webpData.bytes = data.bytes;
webpData.size = data.length;
WebPDemuxer *demuxer = WebPDemux(&webpData);
if (!demuxer) {
return nil;
}
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
CGColorSpaceRef colorSpace = [self sd_colorSpaceWithDemuxer:demuxer];
if (!(flags & ANIMATION_FLAG)) {
// for static single webp image
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData colorSpace:colorSpace];
WebPDemuxDelete(demuxer);
CGColorSpaceRelease(colorSpace);
staticImage.sd_imageFormat = SDImageFormatWebP;
return staticImage;
}
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
CGBitmapInfo bitmapInfo;
// `CGBitmapContextCreate` does not support RGB888 on iOS. Where `CGImageCreate` supports.
if (!(flags & ALPHA_FLAG)) {
// RGBX8888
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
} else {
// RGBA8888
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
CGColorSpaceRelease(colorSpace);
return nil;
}
// for animated webp image
WebPIterator iter;
if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
CGColorSpaceRelease(colorSpace);
return nil;
}
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
do {
@autoreleasepool {
UIImage *image = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace];
if (!image) {
continue;
}
int duration = iter.duration;
if (duration <= 10) {
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
// Some animated WebP images also created without duration, we should keep compatibility
duration = 100;
}
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration / 1000.f];
[frames addObject:frame];
}
} while (WebPDemuxNextFrame(&iter));
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
CGColorSpaceRelease(colorSpace);
UIImage *animatedImage = [SDWebImageCoderHelper animatedImageWithFrames:frames];
animatedImage.sd_imageLoopCount = loopCount;
animatedImage.sd_imageFormat = SDImageFormatWebP;
return animatedImage;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_idec) {
// Progressive images need transparent, so always use premultiplied RGBA
_idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
if (!_idec) {
return nil;
}
}
UIImage *image;
VP8StatusCode status = WebPIUpdate(_idec, data.bytes, data.length);
if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) {
return nil;
}
int width = 0;
int height = 0;
int last_y = 0;
int stride = 0;
uint8_t *rgba = WebPIDecGetRGB(_idec, &last_y, &width, &height, &stride);
// last_y may be 0, means no enough bitmap data to decode, ignore this
if (width + height > 0 && last_y > 0 && height >= last_y) {
// Construct a UIImage from the decoded RGBA value array
size_t rgbaSize = last_y * stride;
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
size_t components = 4;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
// It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great
// If different threads use WebPIDecGetRGB to grab rgba bitmap, it will contain the previous decoded bitmap data
// So this will cause our drawed image looks strange(above is the current part but below is the previous part)
// We only grab the last_y height and draw the last_y heigh instead of total height image
// Besides fix, this can enhance performance since we do not need to create extra bitmap
CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGDataProviderRelease(provider);
if (!imageRef) {
return nil;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
if (!canvas) {
CGImageRelease(imageRef);
return nil;
}
// Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system
CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
CGImageRelease(imageRef);
if (!newImageRef) {
CGContextRelease(canvas);
return nil;
}
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:newImageRef];
#else
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#endif
image.sd_imageFormat = SDImageFormatWebP;
CGImageRelease(newImageRef);
CGContextRelease(canvas);
}
if (finished) {
if (_idec) {
WebPIDelete(_idec);
_idec = NULL;
}
}
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
// Decompress can help pre-draw the image and transfer the backing store to render process.
// Well, it can reduce the `App process memory usage` from Xcode, because the backing store is in `Other process` (render process). But it does not help for total memory usage for device.
// This logic is actually the same as Image/IO, reuse the code. The refactory has already done in 5.x
return [[SDWebImageImageIOCoder sharedCoder] decompressedImageWithImage:image data:data options:optionsDict];
}
- (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
if (!image) {
return nil;
}
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGSize size = CGSizeMake(canvasWidth, canvasHeight);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = size.height - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
// If not blend, cover the target image rect. (firstly clear then draw)
if (!shouldBlend) {
CGContextClearRect(canvas, imageRect);
}
CGContextDrawImage(canvas, imageRect, image.CGImage);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:newImageRef];
#elif SD_MAC
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#endif
CGImageRelease(newImageRef);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
}
return image;
}
- (nullable UIImage *)sd_rawWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
WebPDecoderConfig config;
if (!WebPInitDecoderConfig(&config)) {
return nil;
}
if (WebPGetFeatures(webpData.bytes, webpData.size, &config.input) != VP8_STATUS_OK) {
return nil;
}
config.output.colorspace = config.input.has_alpha ? MODE_rgbA : MODE_RGB;
config.options.use_threads = 1;
// Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
return nil;
}
int width = config.input.width;
int height = config.input.height;
if (config.options.use_scaling) {
width = config.options.scaled_width;
height = config.options.scaled_height;
}
// Construct a UIImage from the decoded RGBA value array
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
CGBitmapInfo bitmapInfo;
// `CGBitmapContextCreate` does not support RGB888 on iOS. Where `CGImageCreate` supports.
if (!config.input.has_alpha) {
// RGB888
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNone;
} else {
// RGBA8888
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
}
size_t components = config.input.has_alpha ? 4 : 3;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGDataProviderRelease(provider);
#if SD_UIKIT || SD_WATCH
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
return image;
}
// Create and return the correct colorspace by checking the ICC Profile
- (nonnull CGColorSpaceRef)sd_colorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
// WebP contains ICC Profile should use the desired colorspace, instead of default device colorspace
// See: https://developers.google.com/speed/webp/docs/riff_container#color_profile
CGColorSpaceRef colorSpaceRef = NULL;
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
if (flags & ICCP_FLAG) {
WebPChunkIterator chunk_iter;
int result = WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunk_iter);
if (result) {
NSData *profileData = [NSData dataWithBytesNoCopy:(void *)chunk_iter.chunk.bytes length:chunk_iter.chunk.size freeWhenDone:NO];
colorSpaceRef = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)profileData);
WebPDemuxReleaseChunkIterator(&chunk_iter);
}
}
if (!colorSpaceRef) {
colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGColorSpaceRetain(colorSpaceRef);
}
return colorSpaceRef;
}
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
return (format == SDImageFormatWebP);
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
if (!image) {
return nil;
}
NSData *data;
NSArray<SDWebImageFrame *> *frames = [SDWebImageCoderHelper framesFromAnimatedImage:image];
if (frames.count == 0) {
// for static single webp image
data = [self sd_encodedWebpDataWithImage:image];
} else {
// for animated webp image
WebPMux *mux = WebPMuxNew();
if (!mux) {
return nil;
}
for (size_t i = 0; i < frames.count; i++) {
SDWebImageFrame *currentFrame = frames[i];
NSData *webpData = [self sd_encodedWebpDataWithImage:currentFrame.image];
int duration = currentFrame.duration * 1000;
WebPMuxFrameInfo frame = { .bitstream.bytes = webpData.bytes,
.bitstream.size = webpData.length,
.duration = duration,
.id = WEBP_CHUNK_ANMF,
.dispose_method = WEBP_MUX_DISPOSE_BACKGROUND, // each frame will clear canvas
.blend_method = WEBP_MUX_NO_BLEND
};
if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
WebPMuxDelete(mux);
return nil;
}
}
int loopCount = (int)image.sd_imageLoopCount;
WebPMuxAnimParams params = { .bgcolor = 0,
.loop_count = loopCount
};
if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
WebPMuxDelete(mux);
return nil;
}
WebPData outputData;
WebPMuxError error = WebPMuxAssemble(mux, &outputData);
WebPMuxDelete(mux);
if (error != WEBP_MUX_OK) {
return nil;
}
data = [NSData dataWithBytes:outputData.bytes length:outputData.size];
WebPDataClear(&outputData);
}
return data;
}
- (nullable NSData *)sd_encodedWebpDataWithImage:(nullable UIImage *)image {
if (!image) {
return nil;
}
NSData *webpData;
CGImageRef imageRef = image.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
if (width == 0 || width > WEBP_MAX_DIMENSION) {
return nil;
}
if (height == 0 || height > WEBP_MAX_DIMENSION) {
return nil;
}
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
alphaInfo == kCGImageAlphaNoneSkipFirst ||
alphaInfo == kCGImageAlphaNoneSkipLast);
BOOL byteOrderNormal = NO;
switch (byteOrderInfo) {
case kCGBitmapByteOrderDefault: {
byteOrderNormal = YES;
} break;
case kCGBitmapByteOrder32Little: {
} break;
case kCGBitmapByteOrder32Big: {
byteOrderNormal = YES;
} break;
default: break;
}
// If we can not get bitmap buffer, early return
CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
if (!dataProvider) {
return nil;
}
CFDataRef dataRef = CGDataProviderCopyData(dataProvider);
if (!dataRef) {
return nil;
}
uint8_t *rgba = NULL;
// We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage
if (byteOrderNormal && ((alphaInfo == kCGImageAlphaNone) || (alphaInfo == kCGImageAlphaLast))) {
// If the input CGImage is already RGB888/RGBA8888
rgba = (uint8_t *)CFDataGetBytePtr(dataRef);
} else {
// Convert all other cases to target color mode using vImage
vImageConverterRef convertor = NULL;
vImage_Error error = kvImageNoError;
vImage_CGImageFormat srcFormat = {
.bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(imageRef),
.bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(imageRef),
.colorSpace = CGImageGetColorSpace(imageRef),
.bitmapInfo = bitmapInfo
};
vImage_CGImageFormat destFormat = {
.bitsPerComponent = 8,
.bitsPerPixel = hasAlpha ? 32 : 24,
.colorSpace = SDCGColorSpaceGetDeviceRGB(),
.bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
};
convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, &destFormat, NULL, kvImageNoFlags, &error);
if (error != kvImageNoError) {
CFRelease(dataRef);
return nil;
}
vImage_Buffer src = {
.data = (uint8_t *)CFDataGetBytePtr(dataRef),
.width = width,
.height = height,
.rowBytes = bytesPerRow
};
vImage_Buffer dest;
error = vImageBuffer_Init(&dest, height, width, destFormat.bitsPerPixel, kvImageNoFlags);
if (error != kvImageNoError) {
CFRelease(dataRef);
return nil;
}
// Convert input color mode to RGB888/RGBA8888
error = vImageConvert_AnyToAny(convertor, &src, &dest, NULL, kvImageNoFlags);
if (error != kvImageNoError) {
CFRelease(dataRef);
return nil;
}
rgba = dest.data; // Converted buffer
bytesPerRow = dest.rowBytes; // Converted bytePerRow
CFRelease(dataRef);
dataRef = NULL;
}
uint8_t *data = NULL; // Output WebP data
float qualityFactor = 100; // WebP quality is 0-100
// Encode RGB888/RGBA8888 buffer to WebP data
size_t size;
if (hasAlpha) {
size = WebPEncodeRGBA(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
} else {
size = WebPEncodeRGB(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
}
if (dataRef) {
CFRelease(dataRef); // free non-converted rgba buffer
dataRef = NULL;
} else {
free(rgba); // free converted rgba buffer
rgba = NULL;
}
if (size) {
// success
webpData = [NSData dataWithBytes:data length:size];
}
if (data) {
WebPFree(data);
}
return webpData;
}
static void FreeImageData(void *info, const void *data, size_t size) {
free((void *)data);
}
@end
#endif

View File

@ -53,110 +53,9 @@
#pragma mark - Helper
<<<<<<< HEAD
- (NSString *)testJPEGPath {
NSBundle *testBundle = [NSBundle bundleForClass:[self class]];
return [testBundle pathForResource:@"TestImage" ofType:@"jpg"];
=======
- (void)testUIButtonSetImageWithURLHighlightedState {
XCTestExpectation *expectation = [self expectationWithDescription:@"UIButton setImageWithURL highlightedState"];
UIButton *button = [[UIButton alloc] init];
NSURL *originalImageURL = [NSURL URLWithString:kTestJpegURL];
[button sd_setImageWithURL:originalImageURL
forState:UIControlStateHighlighted
completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(image).toNot.beNil();
expect(error).to.beNil();
expect(originalImageURL).to.equal(imageURL);
expect([button imageForState:UIControlStateHighlighted]).to.equal(image);
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)testUIButtonSetBackgroundImageWithURLNormalState {
XCTestExpectation *expectation = [self expectationWithDescription:@"UIButton setBackgroundImageWithURL normalState"];
UIButton *button = [[UIButton alloc] init];
NSURL *originalImageURL = [NSURL URLWithString:kTestJpegURL];
[button sd_setBackgroundImageWithURL:originalImageURL
forState:UIControlStateNormal
completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(image).toNot.beNil();
expect(error).to.beNil();
expect(originalImageURL).to.equal(imageURL);
expect([button backgroundImageForState:UIControlStateNormal]).to.equal(image);
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)testFLAnimatedImageViewSetImageWithURL {
XCTestExpectation *expectation = [self expectationWithDescription:@"FLAnimatedImageView setImageWithURL"];
FLAnimatedImageView *imageView = [[FLAnimatedImageView alloc] init];
NSURL *originalImageURL = [NSURL URLWithString:kTestGIFURL];
[imageView sd_setImageWithURL:originalImageURL
completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(image).toNot.beNil();
expect(error).to.beNil();
expect(originalImageURL).to.equal(imageURL);
expect(imageView.animatedImage).toNot.beNil();
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)testFLAnimatedImageViewSetImageWithPlaceholderFromCacheForSameURL {
XCTestExpectation *expectation = [self expectationWithDescription:@"FLAnimatedImageView set image with a placeholder which is the same as the cached image for same url"];
/**
This is a really rare case. Some of user, who query the cache key for one GIF url and get the placeholder
Then use the placeholder and trigger a query for same url, because it will hit memory cache immediately, so the two `setImageBlock` call will have the same image instance and hard to distinguish. (Because we should not do async disk cache check for placeholder)
*/
FLAnimatedImageView *imageView = [[FLAnimatedImageView alloc] init];
NSURL *originalImageURL = [NSURL URLWithString:@"http://assets.sbnation.com/assets/2512203/dogflops.gif"];
NSString *key = [SDWebImageManager.sharedManager cacheKeyForURL:originalImageURL];
[SDWebImageManager.sharedManager loadImageWithURL:originalImageURL options:0 progress:nil completed:^(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, SDImageCacheType cacheType, BOOL finished, NSURL * _Nullable imageURL) {
UIImage *cachedImage = [SDImageCache.sharedImageCache imageFromCacheForKey:key];
expect(cachedImage).toNot.beNil(); // Should be stored
cachedImage.sd_FLAnimatedImage = nil; // Cleanup the associated FLAnimatedImage instance
[imageView sd_setImageWithURL:originalImageURL
placeholderImage:cachedImage
completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(image).to.equal(cachedImage); // should hit the cache and it's the same as placeholder
expect(imageView.animatedImage).toNot.beNil();
[expectation fulfill];
}];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)testUIViewImageProgressKVOWork {
XCTestExpectation *expectation = [self expectationWithDescription:@"UIView imageProgressKVO failed"];
UIView *view = [[UIView alloc] init];
NSURL *originalImageURL = [NSURL URLWithString:kTestJpegURL];
[view.sd_imageProgress addObserver:self forKeyPath:NSStringFromSelector(@selector(fractionCompleted)) options:NSKeyValueObservingOptionNew context:SDCategoriesTestsContext];
// Clear the disk cache to force download from network
[[SDImageCache sharedImageCache] removeImageForKey:kTestJpegURL withCompletion:^{
[view sd_internalSetImageWithURL:originalImageURL placeholderImage:nil options:0 operationKey:nil setImageBlock:nil progress:nil completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(view.sd_imageProgress.fractionCompleted).equal(1.0);
expect([view.sd_imageProgress.userInfo[NSStringFromSelector(_cmd)] boolValue]).equal(YES);
[expectation fulfill];
}];
}];
[self waitForExpectationsWithTimeout:kAsyncTestTimeout handler:^(NSError * _Nullable error) {
[view.sd_imageProgress removeObserver:self forKeyPath:NSStringFromSelector(@selector(fractionCompleted)) context:SDCategoriesTestsContext];
}];
}
- (NSString *)testGIFPath {