diff --git a/SDWebImage/Core/NSImage+Compatibility.h b/SDWebImage/Core/NSImage+Compatibility.h index dccc1ffa..0a562cc4 100644 --- a/SDWebImage/Core/NSImage+Compatibility.h +++ b/SDWebImage/Core/NSImage+Compatibility.h @@ -19,6 +19,10 @@ The underlying Core Graphics image object. This will actually use `CGImageForProposedRect` with the image size. */ @property (nonatomic, readonly, nullable) CGImageRef CGImage; +/** + The underlying Core Image data. This will actually use `bestRepresentationForRect` with the image size to find the `NSCIImageRep`. + */ +@property (nonatomic, readonly, nullable) CIImage *CIImage; /** The scale factor of the image. This wil actually use `bestRepresentationForRect` with image size and pixel size to calculate the scale factor. If failed, use the default value 1.0. Should be greater than or equal to 1.0. */ @@ -38,6 +42,16 @@ The underlying Core Graphics image object. This will actually use `CGImageForPro */ - (nonnull instancetype)initWithCGImage:(nonnull CGImageRef)cgImage scale:(CGFloat)scale orientation:(CGImagePropertyOrientation)orientation; +/** + Initializes and returns an image object with the specified Core Image object. The representation is `NSCIImageRep`. + + @param ciImage A Core Image image object + @param scale The image scale factor + @param orientation The orientation of the image data + @return The image object + */ +- (nonnull instancetype)initWithCIImage:(nonnull CIImage *)ciImage scale:(CGFloat)scale orientation:(CGImagePropertyOrientation)orientation; + /** Returns an image object with the scale factor. The representation is created from the image data. @note The difference between these this and `initWithData:` is that `initWithData:` will always use `backingScaleFactor` as scale factor. diff --git a/SDWebImage/Core/NSImage+Compatibility.m b/SDWebImage/Core/NSImage+Compatibility.m index 83b80bc6..7de0c703 100644 --- a/SDWebImage/Core/NSImage+Compatibility.m +++ b/SDWebImage/Core/NSImage+Compatibility.m @@ -20,6 +20,15 @@ return cgImage; } +- (nullable CIImage *)CIImage { + NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height); + NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil]; + if (![imageRep isKindOfClass:NSCIImageRep.class]) { + return nil; + } + return ((NSCIImageRep *)imageRep).CIImage; +} + - (CGFloat)scale { CGFloat scale = 1; NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height); @@ -65,6 +74,28 @@ return self; } +- (instancetype)initWithCIImage:(nonnull CIImage *)ciImage scale:(CGFloat)scale orientation:(CGImagePropertyOrientation)orientation { + NSCIImageRep *imageRep; + if (orientation != kCGImagePropertyOrientationUp) { + CIImage *rotatedCIImage = [ciImage imageByApplyingOrientation:orientation]; + imageRep = [[NSCIImageRep alloc] initWithCIImage:rotatedCIImage]; + } else { + imageRep = [[NSCIImageRep alloc] initWithCIImage:ciImage]; + } + if (scale < 1) { + scale = 1; + } + CGFloat pixelWidth = imageRep.pixelsWide; + CGFloat pixelHeight = imageRep.pixelsHigh; + NSSize size = NSMakeSize(pixelWidth / scale, pixelHeight / scale); + self = [self initWithSize:size]; + if (self) { + imageRep.size = size; + [self addRepresentation:imageRep]; + } + return self; +} + - (instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale { NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithData:data]; if (!imageRep) { diff --git a/SDWebImage/Core/UIImage+Transform.m b/SDWebImage/Core/UIImage+Transform.m index 4565c2de..091566bb 100644 --- a/SDWebImage/Core/UIImage+Transform.m +++ b/SDWebImage/Core/UIImage+Transform.m @@ -164,6 +164,21 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma return [UIColor colorWithRed:r green:g blue:b alpha:a]; } +#if SD_UIKIT || SD_MAC +// Core Image Support +static inline CGImageRef _Nullable SDCGImageFromCIImage(CIImage * _Nonnull ciImage) { + CGImageRef imageRef = NULL; + if (@available(iOS 10, macOS 10.12, tvOS 10, *)) { + imageRef = ciImage.CGImage; + } + if (!imageRef) { + CIContext *context = [CIContext context]; + imageRef = [context createCGImage:ciImage fromRect:ciImage.extent]; + } + return imageRef; +} +#endif + @implementation UIImage (Transform) - (void)sd_drawInRect:(CGRect)rect context:(CGContextRef)context scaleMode:(SDImageScaleMode)scaleMode clipsToBounds:(BOOL)clips { @@ -194,27 +209,45 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma } - (nullable UIImage *)sd_croppedImageWithRect:(CGRect)rect { - if (!self.CGImage) return nil; rect.origin.x *= self.scale; rect.origin.y *= self.scale; rect.size.width *= self.scale; rect.size.height *= self.scale; if (rect.size.width <= 0 || rect.size.height <= 0) return nil; - CGImageRef imageRef = CGImageCreateWithImageInRect(self.CGImage, rect); + +#if SD_UIKIT || SD_MAC + // CIImage shortcut + if (self.CIImage) { + CGRect croppingRect = CGRectMake(rect.origin.x, self.size.height - CGRectGetMaxY(rect), rect.size.width, rect.size.height); + CIImage *ciImage = [self.CIImage imageByCroppingToRect:croppingRect]; +#if SD_UIKIT + UIImage *image = [UIImage imageWithCIImage:ciImage scale:self.scale orientation:self.imageOrientation]; +#else + UIImage *image = [[UIImage alloc] initWithCIImage:ciImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; +#endif + return image; + } +#endif + + CGImageRef imageRef = self.CGImage; if (!imageRef) { return nil; } + + CGImageRef croppedImageRef = CGImageCreateWithImageInRect(imageRef, rect); + if (!croppedImageRef) { + return nil; + } #if SD_UIKIT || SD_WATCH - UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation]; + UIImage *image = [UIImage imageWithCGImage:croppedImageRef scale:self.scale orientation:self.imageOrientation]; #else - UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:self.scale orientation:kCGImagePropertyOrientationUp]; + UIImage *image = [[UIImage alloc] initWithCGImage:croppedImageRef scale:self.scale orientation:kCGImagePropertyOrientationUp]; #endif - CGImageRelease(imageRef); + CGImageRelease(croppedImageRef); return image; } - (nullable UIImage *)sd_roundedCornerImageWithRadius:(CGFloat)cornerRadius corners:(SDRectCorner)corners borderWidth:(CGFloat)borderWidth borderColor:(nullable UIColor *)borderColor { - if (!self.CGImage) return nil; SDGraphicsImageRendererFormat *format = [[SDGraphicsImageRendererFormat alloc] init]; format.scale = self.scale; SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:self.size format:format]; @@ -256,11 +289,32 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma } - (nullable UIImage *)sd_rotatedImageWithAngle:(CGFloat)angle fitSize:(BOOL)fitSize { - if (!self.CGImage) return nil; size_t width = self.size.width; size_t height = self.size.height; CGRect newRect = CGRectApplyAffineTransform(CGRectMake(0, 0, width, height), fitSize ? CGAffineTransformMakeRotation(angle) : CGAffineTransformIdentity); + +#if SD_UIKIT || SD_MAC + // CIImage shortcut + if (self.CIImage) { + CIImage *ciImage = self.CIImage; + if (fitSize) { + CGAffineTransform transform = CGAffineTransformMakeRotation(angle); + ciImage = [ciImage imageByApplyingTransform:transform]; + } else { + CIFilter *filter = [CIFilter filterWithName:@"CIStraightenFilter"]; + [filter setValue:ciImage forKey:kCIInputImageKey]; + [filter setValue:@(angle) forKey:kCIInputAngleKey]; + ciImage = filter.outputImage; + } +#if SD_UIKIT || SD_WATCH + UIImage *image = [UIImage imageWithCIImage:ciImage scale:self.scale orientation:self.imageOrientation]; +#else + UIImage *image = [[UIImage alloc] initWithCIImage:ciImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; +#endif + return image; + } +#endif SDGraphicsImageRendererFormat *format = [[SDGraphicsImageRendererFormat alloc] init]; format.scale = self.scale; @@ -283,9 +337,31 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma } - (nullable UIImage *)sd_flippedImageWithHorizontal:(BOOL)horizontal vertical:(BOOL)vertical { - if (!self.CGImage) return nil; size_t width = self.size.width; size_t height = self.size.height; + +#if SD_UIKIT || SD_MAC + // CIImage shortcut + if (self.CIImage) { + CGAffineTransform transform = CGAffineTransformIdentity; + // Use UIKit coordinate system + if (horizontal) { + CGAffineTransform flipHorizontal = CGAffineTransformMake(-1, 0, 0, 1, width, 0); + transform = CGAffineTransformConcat(transform, flipHorizontal); + } + if (vertical) { + CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, height); + transform = CGAffineTransformConcat(transform, flipVertical); + } + CIImage *ciImage = [self.CIImage imageByApplyingTransform:transform]; +#if SD_UIKIT + UIImage *image = [UIImage imageWithCIImage:ciImage scale:self.scale orientation:self.imageOrientation]; +#else + UIImage *image = [[UIImage alloc] initWithCIImage:ciImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; +#endif + return image; + } +#endif SDGraphicsImageRendererFormat *format = [[SDGraphicsImageRendererFormat alloc] init]; format.scale = self.scale; @@ -308,18 +384,30 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma #pragma mark - Image Blending - (nullable UIImage *)sd_tintedImageWithColor:(nonnull UIColor *)tintColor { - if (!self.CGImage) return nil; - if (!tintColor.CGColor) return nil; - BOOL hasTint = CGColorGetAlpha(tintColor.CGColor) > __FLT_EPSILON__; if (!hasTint) { -#if SD_UIKIT || SD_WATCH - return [UIImage imageWithCGImage:self.CGImage scale:self.scale orientation:self.imageOrientation]; -#else - return [[UIImage alloc] initWithCGImage:self.CGImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; -#endif + return self; } +#if SD_UIKIT || SD_MAC + // CIImage shortcut + if (self.CIImage) { + CIImage *ciImage = self.CIImage; + CIImage *colorImage = [CIImage imageWithColor:[[CIColor alloc] initWithColor:tintColor]]; + colorImage = [colorImage imageByCroppingToRect:ciImage.extent]; + CIFilter *filter = [CIFilter filterWithName:@"CISourceAtopCompositing"]; + [filter setValue:colorImage forKey:kCIInputImageKey]; + [filter setValue:ciImage forKey:kCIInputBackgroundImageKey]; + ciImage = filter.outputImage; +#if SD_UIKIT + UIImage *image = [UIImage imageWithCIImage:ciImage scale:self.scale orientation:self.imageOrientation]; +#else + UIImage *image = [[UIImage alloc] initWithCIImage:ciImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; +#endif + return image; + } +#endif + CGSize size = self.size; CGRect rect = { CGPointZero, size }; CGFloat scale = self.scale; @@ -340,10 +428,16 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma } - (nullable UIColor *)sd_colorAtPoint:(CGPoint)point { - if (!self) { - return nil; + CGImageRef imageRef = NULL; + // CIImage compatible +#if SD_UIKIT || SD_MAC + if (self.CIImage) { + imageRef = SDCGImageFromCIImage(self.CIImage); + } +#endif + if (!imageRef) { + imageRef = self.CGImage; } - CGImageRef imageRef = self.CGImage; if (!imageRef) { return nil; } @@ -384,10 +478,16 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma } - (nullable NSArray *)sd_colorsWithRect:(CGRect)rect { - if (!self) { - return nil; + CGImageRef imageRef = NULL; + // CIImage compatible +#if SD_UIKIT || SD_MAC + if (self.CIImage) { + imageRef = SDCGImageFromCIImage(self.CIImage); + } +#endif + if (!imageRef) { + imageRef = self.CGImage; } - CGImageRef imageRef = self.CGImage; if (!imageRef) { return nil; } @@ -451,18 +551,41 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma if (self.size.width < 1 || self.size.height < 1) { return nil; } - if (!self.CGImage) { - return nil; - } - BOOL hasBlur = blurRadius > __FLT_EPSILON__; if (!hasBlur) { return self; } +#if SD_UIKIT || SD_MAC + if (self.CIImage) { + CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"]; + [filter setValue:self.CIImage forKey:kCIInputImageKey]; + // Blur Radius use pixel count + [filter setValue:@(blurRadius / 2) forKey:kCIInputRadiusKey]; + CIImage *ciImage = filter.outputImage; + ciImage = [ciImage imageByCroppingToRect:CGRectMake(0, 0, self.size.width, self.size.height)]; +#if SD_UIKIT + UIImage *image = [UIImage imageWithCIImage:ciImage scale:self.scale orientation:self.imageOrientation]; +#else + UIImage *image = [[UIImage alloc] initWithCIImage:ciImage scale:self.scale orientation:kCGImagePropertyOrientationUp]; +#endif + return image; + } +#endif + CGFloat scale = self.scale; CGImageRef imageRef = self.CGImage; + //convert to BGRA if it isn't + if (CGImageGetBitsPerPixel(imageRef) != 32 || + CGImageGetBitsPerComponent(imageRef) != 8 || + !((CGImageGetBitmapInfo(imageRef) & kCGBitmapAlphaInfoMask))) { + SDGraphicsBeginImageContextWithOptions(self.size, NO, self.scale); + [self drawInRect:CGRectMake(0, 0, self.size.width, self.size.height)]; + imageRef = SDGraphicsGetImageFromCurrentImageContext().CGImage; + SDGraphicsEndImageContext(); + } + vImage_Buffer effect = {}, scratch = {}; vImage_Buffer *input = NULL, *output = NULL; @@ -477,7 +600,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma }; vImage_Error err; - err = vImageBuffer_InitWithCGImage(&effect, &format, NULL, imageRef, kvImagePrintDiagnosticsToConsole); + err = vImageBuffer_InitWithCGImage(&effect, &format, NULL, imageRef, kvImageNoFlags); if (err != kvImageNoError) { NSLog(@"UIImage+Transform error: vImageBuffer_InitWithCGImage returned error code %zi for inputImage: %@", err, self); return nil; @@ -542,12 +665,19 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma #if SD_UIKIT || SD_MAC - (nullable UIImage *)sd_filteredImageWithFilter:(nonnull CIFilter *)filter { - if (!self.CGImage) return nil; - - CIContext *context = [CIContext context]; - CIImage *inputImage = [CIImage imageWithCGImage:self.CGImage]; + CIImage *inputImage; + if (self.CIImage) { + inputImage = self.CIImage; + } else { + CGImageRef imageRef = self.CGImage; + if (!imageRef) { + return nil; + } + inputImage = [CIImage imageWithCGImage:imageRef]; + } if (!inputImage) return nil; + CIContext *context = [CIContext context]; [filter setValue:inputImage forKey:kCIInputImageKey]; CIImage *outputImage = filter.outputImage; if (!outputImage) return nil; diff --git a/Tests/Tests/SDImageTransformerTests.m b/Tests/Tests/SDImageTransformerTests.m index 6ddcfc14..9bc28ea5 100644 --- a/Tests/Tests/SDImageTransformerTests.m +++ b/Tests/Tests/SDImageTransformerTests.m @@ -13,7 +13,8 @@ @interface SDImageTransformerTests : SDTestCase -@property (nonatomic, strong) UIImage *testImage; +@property (nonatomic, strong) UIImage *testImageCG; +@property (nonatomic, strong) UIImage *testImageCI; @end @@ -22,21 +23,37 @@ #pragma mark - UIImage+Transform // UIImage+Transform test is hard to write because it's more about visual effect. Current it's tied to the `TestImage.png`, please keep that image or write new test with new image -- (void)test01UIImageTransformResize { +- (void)test01UIImageTransformResizeCG { + [self test01UIImageTransformResizeWithImage:self.testImageCG]; +} + +- (void)test01UIImageTransformResizeCI { + [self test01UIImageTransformResizeWithImage:self.testImageCI]; +} + +- (void)test01UIImageTransformResizeWithImage:(UIImage *)testImage { CGSize scaleDownSize = CGSizeMake(200, 100); - UIImage *scaledDownImage = [self.testImage sd_resizedImageWithSize:scaleDownSize scaleMode:SDImageScaleModeFill]; + UIImage *scaledDownImage = [testImage sd_resizedImageWithSize:scaleDownSize scaleMode:SDImageScaleModeFill]; expect(CGSizeEqualToSize(scaledDownImage.size, scaleDownSize)).beTruthy(); CGSize scaleUpSize = CGSizeMake(2000, 1000); - UIImage *scaledUpImage = [self.testImage sd_resizedImageWithSize:scaleUpSize scaleMode:SDImageScaleModeAspectFit]; + UIImage *scaledUpImage = [testImage sd_resizedImageWithSize:scaleUpSize scaleMode:SDImageScaleModeAspectFit]; expect(CGSizeEqualToSize(scaledUpImage.size, scaleUpSize)).beTruthy(); // Check image not inversion UIColor *topCenterColor = [scaledUpImage sd_colorAtPoint:CGPointMake(1000, 50)]; expect([topCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test02UIImageTransformCrop { +- (void)test02UIImageTransformCropCG { + [self test02UIImageTransformCropWithImage:self.testImageCG]; +} + +- (void)test02UIImageTransformCropCI { + [self test02UIImageTransformCropWithImage:self.testImageCI]; +} + +- (void)test02UIImageTransformCropWithImage:(UIImage *)testImage { CGRect rect = CGRectMake(50, 10, 200, 200); - UIImage *croppedImage = [self.testImage sd_croppedImageWithRect:rect]; + UIImage *croppedImage = [testImage sd_croppedImageWithRect:rect]; expect(CGSizeEqualToSize(croppedImage.size, CGSizeMake(200, 200))).beTruthy(); UIColor *startColor = [croppedImage sd_colorAtPoint:CGPointZero]; expect([startColor.sd_hexString isEqualToString:[UIColor clearColor].sd_hexString]).beTruthy(); @@ -45,7 +62,15 @@ expect([topCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test03UIImageTransformRoundedCorner { +- (void)test03UIImageTransformRoundedCornerCG { + [self test03UIImageTransformRoundedCornerWithImage:self.testImageCG]; +} + +- (void)test03UIImageTransformRoundedCornerCI { + [self test03UIImageTransformRoundedCornerWithImage:self.testImageCI]; +} + +- (void)test03UIImageTransformRoundedCornerWithImage:(UIImage *)testImage { CGFloat radius = 50; #if SD_UIKIT SDRectCorner corners = UIRectCornerAllCorners; @@ -54,7 +79,7 @@ #endif CGFloat borderWidth = 1; UIColor *borderColor = [UIColor blackColor]; - UIImage *roundedCornerImage = [self.testImage sd_roundedCornerImageWithRadius:radius corners:corners borderWidth:borderWidth borderColor:borderColor]; + UIImage *roundedCornerImage = [testImage sd_roundedCornerImageWithRadius:radius corners:corners borderWidth:borderWidth borderColor:borderColor]; expect(CGSizeEqualToSize(roundedCornerImage.size, CGSizeMake(300, 300))).beTruthy(); UIColor *startColor = [roundedCornerImage sd_colorAtPoint:CGPointZero]; expect([startColor.sd_hexString isEqualToString:[UIColor clearColor].sd_hexString]).beTruthy(); @@ -66,25 +91,42 @@ expect([topCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test04UIImageTransformRotate { +- (void)test04UIImageTransformRotateCG { + [self test04UIImageTransformRotateWithImage:self.testImageCG]; +} + +- (void)test04UIImageTransformRotateCI { + [self test04UIImageTransformRotateWithImage:self.testImageCI]; +} + +- (void)test04UIImageTransformRotateWithImage:(UIImage *)testImage { CGFloat angle = M_PI_4; - UIImage *rotatedImage = [self.testImage sd_rotatedImageWithAngle:angle fitSize:NO]; + UIImage *rotatedImage = [testImage sd_rotatedImageWithAngle:angle fitSize:NO]; // Not fit size and no change - expect(CGSizeEqualToSize(rotatedImage.size, self.testImage.size)).beTruthy(); + expect(CGSizeEqualToSize(rotatedImage.size, testImage.size)).beTruthy(); // Fit size, may change size - rotatedImage = [self.testImage sd_rotatedImageWithAngle:angle fitSize:YES]; + rotatedImage = [testImage sd_rotatedImageWithAngle:angle fitSize:YES]; CGSize rotatedSize = CGSizeMake(ceil(300 * 1.414), ceil(300 * 1.414)); // 45ยบ, square length * sqrt(2) - expect(CGSizeEqualToSize(rotatedImage.size, rotatedSize)).beTruthy(); + expect(rotatedImage.size.width - rotatedSize.width <= 1).beTruthy(); + expect(rotatedImage.size.height - rotatedSize.height <= 1).beTruthy(); // Check image not inversion UIColor *leftCenterColor = [rotatedImage sd_colorAtPoint:CGPointMake(60, 175)]; expect([leftCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test05UIImageTransformFlip { +- (void)test05UIImageTransformFlipCG { + [self test05UIImageTransformFlipWithImage:self.testImageCG]; +} + +- (void)test05UIImageTransformFlipCI { + [self test05UIImageTransformFlipWithImage:self.testImageCI]; +} + +- (void)test05UIImageTransformFlipWithImage:(UIImage *)testImage { BOOL horizontal = YES; BOOL vertical = YES; - UIImage *flippedImage = [self.testImage sd_flippedImageWithHorizontal:horizontal vertical:vertical]; - expect(CGSizeEqualToSize(flippedImage.size, self.testImage.size)).beTruthy(); + UIImage *flippedImage = [testImage sd_flippedImageWithHorizontal:horizontal vertical:vertical]; + expect(CGSizeEqualToSize(flippedImage.size, testImage.size)).beTruthy(); // Test pixel colors method here UIColor *checkColor = [flippedImage sd_colorAtPoint:CGPointMake(75, 75)]; expect(checkColor); @@ -98,10 +140,18 @@ expect([bottomCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test06UIImageTransformTint { +- (void)test06UIImageTransformTintCG { + [self test06UIImageTransformTintWithImage:self.testImageCG]; +} + +- (void)test06UIImageTransformTintCI { + [self test06UIImageTransformTintWithImage:self.testImageCI]; +} + +- (void)test06UIImageTransformTintWithImage:(UIImage *)testImage { UIColor *tintColor = [UIColor blackColor]; - UIImage *tintedImage = [self.testImage sd_tintedImageWithColor:tintColor]; - expect(CGSizeEqualToSize(tintedImage.size, self.testImage.size)).beTruthy(); + UIImage *tintedImage = [testImage sd_tintedImageWithColor:tintColor]; + expect(CGSizeEqualToSize(tintedImage.size, testImage.size)).beTruthy(); // Check center color, should keep clear UIColor *centerColor = [tintedImage sd_colorAtPoint:CGPointMake(150, 150)]; expect([centerColor.sd_hexString isEqualToString:[UIColor clearColor].sd_hexString]); @@ -113,10 +163,18 @@ expect([topCenterColor.sd_hexString isEqualToString:[UIColor blackColor].sd_hexString]).beTruthy(); } -- (void)test07UIImageTransformBlur { +- (void)test07UIImageTransformBlurCG { + [self test07UIImageTransformBlurWithImage:self.testImageCG]; +} + +- (void)test07UIImageTransformBlurCI { + [self test07UIImageTransformBlurWithImage:self.testImageCI]; +} + +- (void)test07UIImageTransformBlurWithImage:(UIImage *)testImage { CGFloat radius = 50; - UIImage *blurredImage = [self.testImage sd_blurredImageWithRadius:radius]; - expect(CGSizeEqualToSize(blurredImage.size, self.testImage.size)).beTruthy(); + UIImage *blurredImage = [testImage sd_blurredImageWithRadius:radius]; + expect(CGSizeEqualToSize(blurredImage.size, testImage.size)).beTruthy(); // Check left color, should be blurred UIColor *leftColor = [blurredImage sd_colorAtPoint:CGPointMake(80, 150)]; // Hard-code from the output @@ -124,14 +182,23 @@ expect([leftColor.sd_hexString isEqualToString:expectedColor.sd_hexString]); // Check rounded corner operation not inversion the image UIColor *topCenterColor = [blurredImage sd_colorAtPoint:CGPointMake(150, 20)]; - expect([topCenterColor.sd_hexString isEqualToString:@"#9a430d06"]).beTruthy(); + UIColor *bottomCenterColor = [blurredImage sd_colorAtPoint:CGPointMake(150, 280)]; + expect([topCenterColor.sd_hexString isEqualToString:bottomCenterColor.sd_hexString]).beFalsy(); } -- (void)test08UIImageTransformFilter { +- (void)test08UIImageTransformFilterCG { + [self test08UIImageTransformFilterWithImage:self.testImageCG]; +} + +- (void)test08UIImageTransformFilterCI { + [self test08UIImageTransformFilterWithImage:self.testImageCI]; +} + +- (void)test08UIImageTransformFilterWithImage:(UIImage *)testImage { // Invert color filter CIFilter *filter = [CIFilter filterWithName:@"CIColorInvert"]; - UIImage *filteredImage = [self.testImage sd_filteredImageWithFilter:filter]; - expect(CGSizeEqualToSize(filteredImage.size, self.testImage.size)).beTruthy(); + UIImage *filteredImage = [testImage sd_filteredImageWithFilter:filter]; + expect(CGSizeEqualToSize(filteredImage.size, testImage.size)).beTruthy(); // Check left color, should be inverted UIColor *leftColor = [filteredImage sd_colorAtPoint:CGPointMake(80, 150)]; // Hard-code from the output @@ -198,7 +265,7 @@ NSString *transformerKey = [transformerKeys componentsJoinedByString:@"-"]; // SDImageTransformerKeySeparator expect([pipelineTransformer.transformerKey isEqualToString:transformerKey]).beTruthy(); - UIImage *transformedImage = [pipelineTransformer transformedImageWithImage:self.testImage forKey:@"Test"]; + UIImage *transformedImage = [pipelineTransformer transformedImageWithImage:self.testImageCG forKey:@"Test"]; expect(transformedImage).notTo.beNil(); expect(CGSizeEqualToSize(transformedImage.size, cropRect.size)).beTruthy(); } @@ -239,6 +306,8 @@ expect(SDTransformedKeyForKey(key, transformerKey)).equal(@"ftp://root:password@foo.com/image-SDImageFlippingTransformer(1,0).png"); } +#pragma mark - Coder Helper + - (void)test20CGImageCreateDecodedWithOrientation { // Test EXIF orientation tag, you can open this image with `Preview.app`, open inspector (Command+I) and rotate (Command+L/R) to check UIImage *image = [[UIImage alloc] initWithContentsOfFile:[self testPNGPathForName:@"TestEXIF"]]; @@ -331,11 +400,23 @@ #pragma mark - Helper -- (UIImage *)testImage { - if (!_testImage) { - _testImage = [[UIImage alloc] initWithContentsOfFile:[self testPNGPathForName:@"TestImage"]]; +- (UIImage *)testImageCG { + if (!_testImageCG) { + _testImageCG = [[UIImage alloc] initWithContentsOfFile:[self testPNGPathForName:@"TestImage"]]; } - return _testImage; + return _testImageCG; +} + +- (UIImage *)testImageCI { + if (!_testImageCI) { + CIImage *ciImage = [[CIImage alloc] initWithContentsOfURL:[NSURL fileURLWithPath:[self testPNGPathForName:@"TestImage"]]]; +#if SD_UIKIT + _testImageCI = [[UIImage alloc] initWithCIImage:ciImage scale:1 orientation:UIImageOrientationUp]; +#else + _testImageCI = [[UIImage alloc] initWithCIImage:ciImage scale:1 orientation:kCGImagePropertyOrientationUp]; +#endif + } + return _testImageCI; } - (NSString *)testPNGPathForName:(NSString *)name {