Merge pull request #77 from SDWebImage/performance/byte_alignment

Avoid force-decode by apply the byte alignment for static WebP images, using runtime detection for bitmap info
This commit is contained in:
DreamPiggy 2023-07-14 00:15:16 +08:00 committed by GitHub
commit 3b274872cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 148 additions and 40 deletions

View File

@ -1,2 +1,2 @@
github "SDWebImage/SDWebImage" ~> 5.16 github "SDWebImage/SDWebImage" ~> 5.17
github "SDWebImage/libwebp-Xcode" ~> 1.0 github "SDWebImage/libwebp-Xcode" ~> 1.0

View File

@ -17,7 +17,7 @@ let package = Package(
dependencies: [ dependencies: [
// Dependencies declare other packages that this package depends on. // Dependencies declare other packages that this package depends on.
// .package(url: /* package url */, from: "1.0.0"), // .package(url: /* package url */, from: "1.0.0"),
.package(url: "https://github.com/SDWebImage/SDWebImage.git", from: "5.16.0"), .package(url: "https://github.com/SDWebImage/SDWebImage.git", from: "5.17.0"),
.package(url: "https://github.com/SDWebImage/libwebp-Xcode.git", from: "1.1.0") .package(url: "https://github.com/SDWebImage/libwebp-Xcode.git", from: "1.1.0")
], ],
targets: [ targets: [

View File

@ -28,7 +28,7 @@ This is a SDWebImage coder plugin to support WebP image.
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src' 'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
} }
s.framework = 'CoreGraphics' s.framework = 'CoreGraphics'
s.dependency 'SDWebImage/Core', '~> 5.16' s.dependency 'SDWebImage/Core', '~> 5.17'
s.dependency 'libwebp', '~> 1.0' s.dependency 'libwebp', '~> 1.0'
end end

View File

@ -71,8 +71,8 @@ else OSSpinLockUnlock(&lock##_deprecated);
/// Used for animated WebP, which need a canvas for decoding (rendering), possible apply a scale transform for thumbnail decoding (avoiding post-rescale using vImage) /// Used for animated WebP, which need a canvas for decoding (rendering), possible apply a scale transform for thumbnail decoding (avoiding post-rescale using vImage)
/// See more in #73 /// See more in #73
static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canvasSize, CGSize thumbnailSize, BOOL preserveAspectRatio) { static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canvasSize, CGSize thumbnailSize, BOOL preserveAspectRatio) {
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; // From SDWebImage v5.17.0, use runtime detection of bitmap info instead of hardcode.
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
// Check whether we need to use thumbnail // Check whether we need to use thumbnail
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasSize.width, canvasSize.height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO]; CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasSize.width, canvasSize.height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO];
CGContextRef canvas = CGBitmapContextCreate(NULL, scaledSize.width, scaledSize.height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo); CGContextRef canvas = CGBitmapContextCreate(NULL, scaledSize.width, scaledSize.height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
@ -88,18 +88,87 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
return canvas; return canvas;
} }
// TODO, share this logic for multiple coders, or do refactory in v6.0 (The coder plugin should provide image information back to Core, like `CGImageSourceCopyPropertiesAtIndex`) WEBP_CSP_MODE ConvertCSPMode(CGBitmapInfo bitmapInfo) {
static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize originalSize, NSUInteger frameCount, NSUInteger bytesPerPixel) { // Get alpha info, byteOrder info
if (CGSizeEqualToSize(originalSize, CGSizeZero)) return CGSizeMake(1, 1); CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
NSUInteger totalFramePixelSize = limitBytes / bytesPerPixel / (frameCount ?: 1); CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
CGFloat ratio = originalSize.height / originalSize.width; BOOL byteOrderNormal = NO;
CGFloat width = sqrt(totalFramePixelSize / ratio); switch (byteOrderInfo) {
CGFloat height = width * ratio; case kCGBitmapByteOrderDefault: {
width = MAX(1, floor(width)); byteOrderNormal = YES;
height = MAX(1, floor(height)); } break;
CGSize size = CGSizeMake(width, height); case kCGBitmapByteOrder32Little: {
} break;
return size; case kCGBitmapByteOrder32Big: {
byteOrderNormal = YES;
} break;
default: break;
}
switch (alphaInfo) {
case kCGImageAlphaPremultipliedFirst: {
if (byteOrderNormal) {
// ARGB8888, premultiplied
return MODE_Argb;
} else {
// BGRA8888, premultiplied
return MODE_bgrA;
}
}
break;
case kCGImageAlphaPremultipliedLast: {
if (byteOrderNormal) {
// RGBA8888, premultiplied
return MODE_rgbA;
} else {
// ABGR8888, premultiplied
// Unsupported!
return MODE_LAST;
}
}
break;
case kCGImageAlphaNone: {
if (byteOrderNormal) {
// RGB
return MODE_RGB;
} else {
// BGR
return MODE_BGR;
}
}
break;
case kCGImageAlphaLast:
case kCGImageAlphaNoneSkipLast: {
if (byteOrderNormal) {
// RGBA or RGBX
return MODE_RGBA;
} else {
// ABGR or XBGR
// Unsupported!
return MODE_LAST;
}
}
break;
case kCGImageAlphaFirst:
case kCGImageAlphaNoneSkipFirst: {
if (byteOrderNormal) {
// ARGB or XRGB
return MODE_ARGB;
} else {
// BGRA or BGRX
return MODE_BGRA;
}
}
break;
case kCGImageAlphaOnly: {
// A
// Unsupported
return MODE_LAST;
}
break;
default:
break;
}
return MODE_LAST;
} }
@interface SDWebPCoderFrame : NSObject @interface SDWebPCoderFrame : NSObject
@ -245,7 +314,7 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
if (limitBytes > 0) { if (limitBytes > 0) {
// Hack 32 BitsPerPixel // Hack 32 BitsPerPixel
CGSize imageSize = CGSizeMake(canvasWidth, canvasHeight); CGSize imageSize = CGSizeMake(canvasWidth, canvasHeight);
CGSize framePixelSize = SDCalculateScaleDownPixelSize(limitBytes, imageSize, frameCount, 4); CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:limitBytes bytesPerPixel:4 frameCount:frameCount];
// Override thumbnail size // Override thumbnail size
thumbnailSize = framePixelSize; thumbnailSize = framePixelSize;
preserveAspectRatio = YES; preserveAspectRatio = YES;
@ -317,8 +386,8 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
- (instancetype)initIncrementalWithOptions:(nullable SDImageCoderOptions *)options { - (instancetype)initIncrementalWithOptions:(nullable SDImageCoderOptions *)options {
self = [super init]; self = [super init];
if (self) { if (self) {
// Progressive images need transparent, so always use premultiplied BGRA // Progressive images need transparent, so always use premultiplied RGBA
_idec = WebPINewRGB(MODE_bgrA, NULL, 0, 0); _idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
CGFloat scale = 1; CGFloat scale = 1;
NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor]; NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
if (scaleFactor != nil) { if (scaleFactor != nil) {
@ -394,7 +463,7 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
if (_limitBytes > 0) { if (_limitBytes > 0) {
// Hack 32 BitsPerPixel // Hack 32 BitsPerPixel
CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight); CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight);
CGSize framePixelSize = SDCalculateScaleDownPixelSize(_limitBytes, imageSize, _frameCount, 4); CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
// Override thumbnail size // Override thumbnail size
_thumbnailSize = framePixelSize; _thumbnailSize = framePixelSize;
_preserveAspectRatio = YES; _preserveAspectRatio = YES;
@ -428,9 +497,10 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
CGDataProviderRef provider = CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL); CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB]; CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
// Because _idec use MODE_rgbA
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
size_t components = 4; size_t components = 4;
BOOL shouldInterpolate = YES;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362) // Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
// It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great // It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great
@ -438,7 +508,7 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
// So this will cause our drawed image looks strange(above is the current part but below is the previous part) // So this will cause our drawed image looks strange(above is the current part but below is the previous part)
// We only grab the last_y height and draw the last_y height instead of total height image // We only grab the last_y height and draw the last_y height instead of total height image
// Besides fix, this can enhance performance since we do not need to create extra bitmap // Besides fix, this can enhance performance since we do not need to create extra bitmap
CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, shouldInterpolate, renderingIntent);
CGDataProviderRelease(provider); CGDataProviderRelease(provider);
@ -546,20 +616,46 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
} }
BOOL hasAlpha = config.input.has_alpha; BOOL hasAlpha = config.input.has_alpha;
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]` // From SDWebImage v5.17.0, use runtime detection of bitmap info instead of hardcode.
// use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments) SDImagePixelFormat pixelFormat = [SDImageCoderHelper preferredPixelFormat:hasAlpha];
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; CGBitmapInfo bitmapInfo = pixelFormat.bitmapInfo;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; WEBP_CSP_MODE mode = ConvertCSPMode(bitmapInfo);
if (mode == MODE_LAST) {
NSAssert(NO, @"Unsupported libwebp preferred CGBitmapInfo: %d", bitmapInfo);
return nil;
}
config.output.colorspace = mode;
config.options.use_threads = 1; config.options.use_threads = 1;
config.output.colorspace = MODE_bgrA;
// Use scaling for thumbnail // Use scaling for thumbnail
size_t width = config.input.width;
size_t height = config.input.height;
if (scaledSize.width != 0 && scaledSize.height != 0) { if (scaledSize.width != 0 && scaledSize.height != 0) {
config.options.use_scaling = 1; config.options.use_scaling = 1;
config.options.scaled_width = scaledSize.width; config.options.scaled_width = scaledSize.width;
config.options.scaled_height = scaledSize.height; config.options.scaled_height = scaledSize.height;
width = scaledSize.width;
height = scaledSize.height;
} }
// We alloc the buffer and do byte alignment by ourself. libwebp defaults does not byte alignment to `bitsPerPixel`, which cause the CoreAnimation unhappy and always trigger the `CA::Render::copy_image`
size_t bitsPerComponent = 8;
size_t components = (mode == MODE_RGB || mode == MODE_BGR) ? 3 : 4; // Actually always 4
size_t bitsPerPixel = bitsPerComponent * components;
// Read: https://github.com/path/FastImageCache#byte-alignment
// A properly aligned bytes-per-row value must be a multiple of 8 pixels × bytes per pixel
// For a typical ARGB image, the aligned bytes-per-row value is a multiple of 64.
size_t alignment = pixelFormat.alignment;
size_t bytesPerRow = SDByteAlign(width * (bitsPerPixel / 8), alignment);
//size_t bytesPerRow = 6688;
void *rgba = WebPMalloc(bytesPerRow * height);
config.output.is_external_memory = 1;
config.output.u.RGBA.rgba = rgba;
config.output.u.RGBA.stride = (int)bytesPerRow;
config.output.u.RGBA.size = height * bytesPerRow;
// Decode the WebP image data into a RGBA value array // Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) { if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
return nil; return nil;
@ -568,13 +664,9 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
// Construct a UIImage from the decoded RGBA value array // Construct a UIImage from the decoded RGBA value array
CGDataProviderRef provider = CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData); CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
size_t bitsPerComponent = 8; BOOL shouldInterpolate = YES;
size_t bitsPerPixel = 32;
size_t bytesPerRow = config.output.u.RGBA.stride;
size_t width = config.output.width;
size_t height = config.output.height;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, shouldInterpolate, renderingIntent);
CGDataProviderRelease(provider); CGDataProviderRelease(provider);
@ -756,9 +848,6 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
} }
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef); size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
size_t components = bitsPerPixel / bitsPerComponent;
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask; CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask; CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@ -891,7 +980,7 @@ static inline CGSize SDCalculateScaleDownPixelSize(NSUInteger limitBytes, CGSize
} }
static void FreeImageData(void *info, const void *data, size_t size) { static void FreeImageData(void *info, const void *data, size_t size) {
free((void *)data); WebPFree((void *)data);
} }
static int GetIntValueForKey(NSDictionary * _Nonnull dictionary, NSString * _Nonnull key, int defaultValue) { static int GetIntValueForKey(NSDictionary * _Nonnull dictionary, NSString * _Nonnull key, int defaultValue) {
@ -968,7 +1057,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
if (_limitBytes > 0) { if (_limitBytes > 0) {
// Hack 32 BitsPerPixel // Hack 32 BitsPerPixel
CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight); CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight);
CGSize framePixelSize = SDCalculateScaleDownPixelSize(_limitBytes, imageSize, _frameCount, 4); CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
// Override thumbnail size // Override thumbnail size
_thumbnailSize = framePixelSize; _thumbnailSize = framePixelSize;
_preserveAspectRatio = YES; _preserveAspectRatio = YES;
@ -1236,6 +1325,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
#else #else
image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:kCGImagePropertyOrientationUp]; image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:kCGImagePropertyOrientationUp];
#endif #endif
image.sd_imageFormat = SDImageFormatWebP;
CGImageRelease(imageRef); CGImageRelease(imageRef);
WebPDemuxReleaseIterator(&iter); WebPDemuxReleaseIterator(&iter);

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

View File

@ -218,6 +218,18 @@ const int64_t kAsyncTestTimeout = 5;
XCTAssertLessThanOrEqual(dataWithLimit.length, maxFileSize); XCTAssertLessThanOrEqual(dataWithLimit.length, maxFileSize);
} }
- (void)testWebPDecodeDoesNotTriggerCACopyImage {
NSURL *staticWebPURL = [[NSBundle bundleForClass:[self class]] URLForResource:@"TestColorspaceStatic" withExtension:@"webp"];
NSData *data = [NSData dataWithContentsOfURL:staticWebPURL];
UIImage *image = [SDImageWebPCoder.sharedCoder decodedImageWithData:data options:@{SDImageCoderDecodeThumbnailPixelSize: @(CGSizeMake(1023, 680))}]; // 1023 * 4 need aligned to 4096
CGImageRef cgImage = [image CGImage];
size_t bytesPerRow = CGImageGetBytesPerRow(cgImage);
XCTAssertEqual(bytesPerRow, 4096);
CGColorSpaceRef colorspace = CGImageGetColorSpace(cgImage);
NSString *colorspaceName = (__bridge_transfer NSString *)CGColorSpaceCopyName(colorspace);
XCTAssertEqual(colorspaceName, (__bridge NSString *)kCGColorSpaceSRGB, @"Color space is not sRGB");
}
- (void)testEncodingSettings { - (void)testEncodingSettings {
WebPConfig config; WebPConfig config;
WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, 0.2); WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, 0.2);

View File

@ -10,6 +10,7 @@
0EF5B6264833B7BC20894578 /* Pods_SDWebImageWebPCoderTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 46F21AD7D1692EBAC4D0FF33 /* Pods_SDWebImageWebPCoderTests.framework */; }; 0EF5B6264833B7BC20894578 /* Pods_SDWebImageWebPCoderTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 46F21AD7D1692EBAC4D0FF33 /* Pods_SDWebImageWebPCoderTests.framework */; };
3219F3B2228B0453003822A6 /* TestImageBlendAnimated.webp in Resources */ = {isa = PBXBuildFile; fileRef = 3219F3B1228B0453003822A6 /* TestImageBlendAnimated.webp */; }; 3219F3B2228B0453003822A6 /* TestImageBlendAnimated.webp in Resources */ = {isa = PBXBuildFile; fileRef = 3219F3B1228B0453003822A6 /* TestImageBlendAnimated.webp */; };
325E268E25C82BE1000B807B /* TestImageGrayscale.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */; }; 325E268E25C82BE1000B807B /* TestImageGrayscale.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */; };
326420312A5D53E300EE3E46 /* TestColorspaceStatic.webp in Resources */ = {isa = PBXBuildFile; fileRef = 326420302A5D53E300EE3E46 /* TestColorspaceStatic.webp */; };
808C918E213FD131004B0F7C /* SDWebImageWebPCoderTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 808C918D213FD131004B0F7C /* SDWebImageWebPCoderTests.m */; }; 808C918E213FD131004B0F7C /* SDWebImageWebPCoderTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 808C918D213FD131004B0F7C /* SDWebImageWebPCoderTests.m */; };
808C919C213FD2B2004B0F7C /* TestImageStatic.webp in Resources */ = {isa = PBXBuildFile; fileRef = 808C919A213FD2B2004B0F7C /* TestImageStatic.webp */; }; 808C919C213FD2B2004B0F7C /* TestImageStatic.webp in Resources */ = {isa = PBXBuildFile; fileRef = 808C919A213FD2B2004B0F7C /* TestImageStatic.webp */; };
808C919D213FD2B2004B0F7C /* TestImageAnimated.webp in Resources */ = {isa = PBXBuildFile; fileRef = 808C919B213FD2B2004B0F7C /* TestImageAnimated.webp */; }; 808C919D213FD2B2004B0F7C /* TestImageAnimated.webp in Resources */ = {isa = PBXBuildFile; fileRef = 808C919B213FD2B2004B0F7C /* TestImageAnimated.webp */; };
@ -19,6 +20,7 @@
28D8AA3D3015E075692FD3E3 /* Pods-SDWebImageWebPCoderTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SDWebImageWebPCoderTests.debug.xcconfig"; path = "../Pods/Target Support Files/Pods-SDWebImageWebPCoderTests/Pods-SDWebImageWebPCoderTests.debug.xcconfig"; sourceTree = "<group>"; }; 28D8AA3D3015E075692FD3E3 /* Pods-SDWebImageWebPCoderTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SDWebImageWebPCoderTests.debug.xcconfig"; path = "../Pods/Target Support Files/Pods-SDWebImageWebPCoderTests/Pods-SDWebImageWebPCoderTests.debug.xcconfig"; sourceTree = "<group>"; };
3219F3B1228B0453003822A6 /* TestImageBlendAnimated.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = TestImageBlendAnimated.webp; sourceTree = "<group>"; }; 3219F3B1228B0453003822A6 /* TestImageBlendAnimated.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = TestImageBlendAnimated.webp; sourceTree = "<group>"; };
325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = TestImageGrayscale.jpg; sourceTree = "<group>"; }; 325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = TestImageGrayscale.jpg; sourceTree = "<group>"; };
326420302A5D53E300EE3E46 /* TestColorspaceStatic.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = TestColorspaceStatic.webp; sourceTree = "<group>"; };
46F21AD7D1692EBAC4D0FF33 /* Pods_SDWebImageWebPCoderTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SDWebImageWebPCoderTests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 46F21AD7D1692EBAC4D0FF33 /* Pods_SDWebImageWebPCoderTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SDWebImageWebPCoderTests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
808C918B213FD130004B0F7C /* SDWebImageWebPCoderTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SDWebImageWebPCoderTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 808C918B213FD130004B0F7C /* SDWebImageWebPCoderTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SDWebImageWebPCoderTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
808C918D213FD131004B0F7C /* SDWebImageWebPCoderTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDWebImageWebPCoderTests.m; sourceTree = "<group>"; }; 808C918D213FD131004B0F7C /* SDWebImageWebPCoderTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDWebImageWebPCoderTests.m; sourceTree = "<group>"; };
@ -80,6 +82,7 @@
808C9199213FD2B2004B0F7C /* Images */ = { 808C9199213FD2B2004B0F7C /* Images */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
326420302A5D53E300EE3E46 /* TestColorspaceStatic.webp */,
325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */, 325E268D25C82BE1000B807B /* TestImageGrayscale.jpg */,
808C919A213FD2B2004B0F7C /* TestImageStatic.webp */, 808C919A213FD2B2004B0F7C /* TestImageStatic.webp */,
808C919B213FD2B2004B0F7C /* TestImageAnimated.webp */, 808C919B213FD2B2004B0F7C /* TestImageAnimated.webp */,
@ -157,6 +160,7 @@
3219F3B2228B0453003822A6 /* TestImageBlendAnimated.webp in Resources */, 3219F3B2228B0453003822A6 /* TestImageBlendAnimated.webp in Resources */,
808C919D213FD2B2004B0F7C /* TestImageAnimated.webp in Resources */, 808C919D213FD2B2004B0F7C /* TestImageAnimated.webp in Resources */,
808C919C213FD2B2004B0F7C /* TestImageStatic.webp in Resources */, 808C919C213FD2B2004B0F7C /* TestImageStatic.webp in Resources */,
326420312A5D53E300EE3E46 /* TestColorspaceStatic.webp in Resources */,
325E268E25C82BE1000B807B /* TestImageGrayscale.jpg in Resources */, 325E268E25C82BE1000B807B /* TestImageGrayscale.jpg in Resources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
@ -377,6 +381,7 @@
"$(inherited)", "$(inherited)",
); );
INFOPLIST_FILE = Info.plist; INFOPLIST_FILE = Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
PRODUCT_BUNDLE_IDENTIFIER = org.SDWebImage.SDWebImageWebPCoderTests; PRODUCT_BUNDLE_IDENTIFIER = org.SDWebImage.SDWebImageWebPCoderTests;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
}; };
@ -387,6 +392,7 @@
baseConfigurationReference = D92E6791BF088D1A101E670E /* Pods-SDWebImageWebPCoderTests.release.xcconfig */; baseConfigurationReference = D92E6791BF088D1A101E670E /* Pods-SDWebImageWebPCoderTests.release.xcconfig */;
buildSettings = { buildSettings = {
INFOPLIST_FILE = Info.plist; INFOPLIST_FILE = Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
PRODUCT_BUNDLE_IDENTIFIER = org.SDWebImage.SDWebImageWebPCoderTests; PRODUCT_BUNDLE_IDENTIFIER = org.SDWebImage.SDWebImageWebPCoderTests;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
}; };