Merge pull request #2140 from dreampiggy/refactor_coder_animation

Introduce SDAnimatedImageView, SDAnimatedImage and do refactory
This commit is contained in:
DreamPiggy 2018-03-29 14:47:30 +08:00 committed by GitHub
commit beb958bd08
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 3448 additions and 700 deletions

View File

@ -8,11 +8,11 @@
#import "DetailViewController.h"
#import <SDWebImage/UIView+WebCache.h>
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@interface DetailViewController ()
@property (strong, nonatomic) IBOutlet FLAnimatedImageView *imageView;
@property (strong, nonatomic) IBOutlet SDAnimatedImageView *imageView;
@end

View File

@ -8,13 +8,13 @@
#import "MasterViewController.h"
#import "DetailViewController.h"
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
#import <SDWebImage/UIView+WebCache.h>
@interface MyCustomTableViewCell : UITableViewCell
@property (nonatomic, strong) UILabel *customTextLabel;
@property (nonatomic, strong) FLAnimatedImageView *customImageView;
@property (nonatomic, strong) SDAnimatedImageView *customImageView;
@end
@ -22,7 +22,7 @@
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier {
if (self = [super initWithStyle:style reuseIdentifier:reuseIdentifier]) {
_customImageView = [[FLAnimatedImageView alloc] initWithFrame:CGRectMake(20.0, 2.0, 60.0, 40.0)];
_customImageView = [[SDAnimatedImageView alloc] initWithFrame:CGRectMake(20.0, 2.0, 60.0, 40.0)];
[self.contentView addSubview:_customImageView];
_customTextLabel = [[UILabel alloc] initWithFrame:CGRectMake(100.0, 12.0, 200, 20.0)];
[self.contentView addSubview:_customTextLabel];

View File

@ -1,8 +1,12 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="10117" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none">
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="13771" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" colorMatched="YES">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13772"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="DetailViewController">
@ -16,12 +20,12 @@
<rect key="frame" x="0.0" y="0.0" width="320" height="460"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" id="7" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" id="7" customClass="SDAnimatedImageView">
<rect key="frame" x="0.0" y="0.0" width="320" height="460"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
</imageView>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
</view>
</objects>

View File

@ -687,12 +687,12 @@
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="f0P-c9-GMe"/>
</imageView>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="JIp-Or-vBM">
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="JIp-Or-vBM" customClass="SDAnimatedImageView">
<rect key="frame" x="20" y="116" width="204" height="128"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="NJq-m3-LlB"/>
</imageView>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="khI-tY-l0M">
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="khI-tY-l0M" customClass="SDAnimatedImageView">
<rect key="frame" x="256" y="116" width="204" height="128"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="WbV-Do-9qy"/>

View File

@ -7,6 +7,8 @@
*/
#import "ViewController.h"
#import <SDWebImage/UIImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@import SDWebImage;
@ -14,8 +16,8 @@
@property (weak) IBOutlet NSImageView *imageView1;
@property (weak) IBOutlet NSImageView *imageView2;
@property (weak) IBOutlet NSImageView *imageView3;
@property (weak) IBOutlet NSImageView *imageView4;
@property (weak) IBOutlet SDAnimatedImageView *imageView3;
@property (weak) IBOutlet SDAnimatedImageView *imageView4;
@property (weak) IBOutlet NSButton *clearCacheButton;
@end
@ -25,22 +27,16 @@
- (void)viewDidLoad {
[super viewDidLoad];
//Add GIF coder for better animated image rendering
[[SDWebImageCodersManager sharedManager] addCoder:[SDWebImageGIFCoder sharedCoder]];
// NOTE: https links or authentication ones do not work (there is a crash)
// Do any additional setup after loading the view.
// For animated GIF rendering, set `animates` to YES or will only show the first frame
self.imageView1.animates = YES;
self.imageView3.animates = YES;
self.imageView4.animates = YES;
self.imageView1.sd_imageIndicator = SDWebImageProgressIndicator.defaultIndicator;
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://assets.sbnation.com/assets/2512203/dogflops.gif"]];
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView2 sd_setImageWithURL:[NSURL URLWithString:@"http://www.ioncannon.net/wp-content/uploads/2011/06/test2.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"https://raw.githubusercontent.com/liyong03/YLGIFImage/master/YLGIFImageDemo/YLGIFImageDemo/joy.gif"]];
self.imageView4.wantsLayer = YES;
self.imageView4.sd_imageTransition = SDWebImageTransition.fadeTransition;
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"] placeholderImage:nil options:SDWebImageForceTransition];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"] placeholderImage:nil options:SDWebImageForceTransition];
self.clearCacheButton.target = self;
self.clearCacheButton.action = @selector(clearCacheButtonClicked:);

View File

@ -1,7 +1,12 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder.AppleTV.Storyboard" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="AppleTV" propertyAccessControl="none" useAutolayout="YES" initialViewController="BYZ-38-t0r">
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder.AppleTV.Storyboard" version="3.0" toolsVersion="13771" targetRuntime="AppleTV" propertyAccessControl="none" useAutolayout="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="appleTV" orientation="landscape">
<adaptation id="light"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
<deployment identifier="tvOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13772"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
@ -16,20 +21,24 @@
<rect key="frame" x="0.0" y="0.0" width="1920" height="1080"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xeq-iS-C6S" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xeq-iS-C6S">
<rect key="frame" x="20" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="c5h-Lg-aZx" customClass="FLAnimatedImageView">
<rect key="frame" x="636" y="20" width="300" height="200"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Udx-nH-mbX" customClass="FLAnimatedImageView">
<rect key="frame" x="944" y="20" width="300" height="200"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xvm-ne-7D9" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xvm-ne-7D9">
<rect key="frame" x="328" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="c5h-Lg-aZx" customClass="SDAnimatedImageView">
<rect key="frame" x="636" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Udx-nH-mbX" customClass="SDAnimatedImageView">
<rect key="frame" x="944" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="calibratedWhite"/>
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.0" colorSpace="custom" customColorSpace="sRGB"/>
</view>
<connections>
<outlet property="imageView1" destination="xeq-iS-C6S" id="4gp-UN-VjW"/>

View File

@ -7,14 +7,15 @@
*/
#import "ViewController.h"
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/UIImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@interface ViewController ()
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView1;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView2;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView3;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView4;
@property (weak, nonatomic) IBOutlet UIImageView *imageView1;
@property (weak, nonatomic) IBOutlet UIImageView *imageView2;
@property (weak, nonatomic) IBOutlet SDAnimatedImageView *imageView3;
@property (weak, nonatomic) IBOutlet SDAnimatedImageView *imageView4;
@end
@ -25,10 +26,10 @@
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://assets.sbnation.com/assets/2512203/dogflops.gif"]];
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView2 sd_setImageWithURL:[NSURL URLWithString:@"http://www.ioncannon.net/wp-content/uploads/2011/06/test2.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage000.jpg"]];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"https://raw.githubusercontent.com/liyong03/YLGIFImage/master/YLGIFImageDemo/YLGIFImageDemo/joy.gif"]];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"]];
}
- (void)didReceiveMemoryWarning {

View File

@ -316,6 +316,42 @@
323F8C1D1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
323F8C1E1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
323F8C1F1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
3248475D201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
3248475E201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
3248475F201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484760201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484761201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484762201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484763201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484764201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484765201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484766201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484767201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484768201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484769201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476A201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476B201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476C201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476D201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476E201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476F201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484770201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484771201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484772201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484773201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484774201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484775201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484776201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484777201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484778201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484779201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248477A201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248477B201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477D201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477E201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477F201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
32484780201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
324DF4B4200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
324DF4B5200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
324DF4B6200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
@ -1403,6 +1439,12 @@
323F8B3B1F38EF770092B609 /* muxi.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = muxi.h; sourceTree = "<group>"; };
323F8B3C1F38EF770092B609 /* muxinternal.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = muxinternal.c; sourceTree = "<group>"; };
323F8B3D1F38EF770092B609 /* muxread.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = muxread.c; sourceTree = "<group>"; };
32484757201775F600AF9E5A /* SDAnimatedImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDAnimatedImageView.m; sourceTree = "<group>"; };
32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "SDAnimatedImageView+WebCache.h"; sourceTree = "<group>"; };
32484759201775F600AF9E5A /* SDAnimatedImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDAnimatedImageView.h; sourceTree = "<group>"; };
3248475A201775F600AF9E5A /* SDAnimatedImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDAnimatedImage.m; sourceTree = "<group>"; };
3248475B201775F600AF9E5A /* SDAnimatedImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDAnimatedImage.h; sourceTree = "<group>"; };
3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "SDAnimatedImageView+WebCache.m"; sourceTree = "<group>"; };
324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SDWebImageDefine.h; sourceTree = "<group>"; };
324DF4B3200A14DC008A84CC /* SDWebImageDefine.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDWebImageDefine.m; sourceTree = "<group>"; };
325312C6200F09910046BF1E /* SDWebImageTransition.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SDWebImageTransition.h; sourceTree = "<group>"; };
@ -1720,6 +1762,19 @@
path = mux;
sourceTree = "<group>";
};
32484756201775CE00AF9E5A /* ImageView */ = {
isa = PBXGroup;
children = (
3248475B201775F600AF9E5A /* SDAnimatedImage.h */,
3248475A201775F600AF9E5A /* SDAnimatedImage.m */,
32484759201775F600AF9E5A /* SDAnimatedImageView.h */,
32484757201775F600AF9E5A /* SDAnimatedImageView.m */,
32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */,
3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */,
);
name = ImageView;
sourceTree = "<group>";
};
4369C2851D9811BB007E863A /* WebCache Categories */ = {
isa = PBXGroup;
children = (
@ -1842,6 +1897,7 @@
53922DAB148C56810056699D /* Downloader */,
53922DAA148C56470056699D /* Cache */,
321E60831F38E88F00405457 /* Decoder */,
32484756201775CE00AF9E5A /* ImageView */,
53922DAC148C56DD0056699D /* Utils */,
53922DA9148C562D0056699D /* Categories */,
4369C2851D9811BB007E863A /* WebCache Categories */,
@ -2138,12 +2194,14 @@
00733A721BC4880E00A5A117 /* UIView+WebCacheOperation.h in Headers */,
80377C481F2F666300F89830 /* bit_reader_utils.h in Headers */,
80377C511F2F666300F89830 /* huffman_encode_utils.h in Headers */,
32484778201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
00733A6B1BC4880E00A5A117 /* NSData+ImageContentType.h in Headers */,
325312CB200F09910046BF1E /* SDWebImageTransition.h in Headers */,
323F8C111F38EF770092B609 /* muxi.h in Headers */,
80377EC41F2F66D500F89830 /* vp8li_dec.h in Headers */,
00733A6A1BC4880E00A5A117 /* SDWebImagePrefetcher.h in Headers */,
00733A641BC4880E00A5A117 /* SDWebImageOperation.h in Headers */,
32484766201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
321E60A51F38E8F600405457 /* SDWebImageGIFCoder.h in Headers */,
32CF1C0A1FA496B000004BD1 /* SDWebImageCoderHelper.h in Headers */,
80377C4D1F2F666300F89830 /* endian_inl_utils.h in Headers */,
@ -2154,6 +2212,7 @@
80377EC21F2F66D500F89830 /* vp8i_dec.h in Headers */,
80377EBA1F2F66D500F89830 /* common_dec.h in Headers */,
43CE757E1CFE9427006C64D0 /* FLAnimatedImageView.h in Headers */,
3248476C201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377C5F1F2F666300F89830 /* utils.h in Headers */,
80377C5B1F2F666300F89830 /* rescaler_utils.h in Headers */,
323F8BF91F38EF770092B609 /* animi.h in Headers */,
@ -2205,9 +2264,11 @@
4314D1701D0E0E3B004B36C9 /* mux.h in Headers */,
321E60871F38E8C800405457 /* SDWebImageCoder.h in Headers */,
80377EA21F2F66D400F89830 /* vp8i_dec.h in Headers */,
3248476A201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
321E60951F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
80377C211F2F666300F89830 /* quant_levels_dec_utils.h in Headers */,
4314D1721D0E0E3B004B36C9 /* SDWebImageCompat.h in Headers */,
32484776201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
80377C251F2F666300F89830 /* random_utils.h in Headers */,
80377D4F1F2F66A700F89830 /* lossless.h in Headers */,
80377D511F2F66A700F89830 /* msa_macro.h in Headers */,
@ -2238,6 +2299,7 @@
323F8B871F38EF770092B609 /* histogram_enc.h in Headers */,
80377C1F1F2F666300F89830 /* huffman_utils.h in Headers */,
4314D17F1D0E0E3B004B36C9 /* UIButton+WebCache.h in Headers */,
32484764201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
4314D1811D0E0E3B004B36C9 /* UIImageView+WebCache.h in Headers */,
4314D1841D0E0E3B004B36C9 /* SDWebImageOperation.h in Headers */,
4314D1851D0E0E3B004B36C9 /* SDWebImageDownloaderOperation.h in Headers */,
@ -2266,6 +2328,7 @@
80377EC81F2F66D500F89830 /* alphai_dec.h in Headers */,
43A62A1B1D0E0A800089D7DD /* decode.h in Headers */,
321E608A1F38E8C800405457 /* SDWebImageCoder.h in Headers */,
32484767201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377C601F2F666400F89830 /* bit_reader_inl_utils.h in Headers */,
329A185D1FFF5DFD008C9A2F /* UIImage+WebCache.h in Headers */,
431BB6DC1D06D2C1006A3455 /* UIButton+WebCache.h in Headers */,
@ -2328,9 +2391,11 @@
321E60B41F38E90100405457 /* SDWebImageWebPCoder.h in Headers */,
32F7C0732030114C00873181 /* SDWebImageTransformer.h in Headers */,
431BB6FA1D06D2C1006A3455 /* SDWebImageDownloader.h in Headers */,
3248476D201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377DF51F2F66A800F89830 /* common_sse2.h in Headers */,
323F8BDC1F38EF770092B609 /* vp8i_enc.h in Headers */,
80377ED21F2F66D500F89830 /* vp8i_dec.h in Headers */,
32484779201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
43A918681D8308FE00B3925F /* SDImageCacheConfig.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -2358,6 +2423,7 @@
321E60991F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
323F8B8B1F38EF770092B609 /* histogram_enc.h in Headers */,
4397D2C41D0DDD8C00BB2784 /* SDImageCache.h in Headers */,
3248476E201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
4397D2C51D0DDD8C00BB2784 /* UIImageView+WebCache.h in Headers */,
3290FA091FA478AF0047D20C /* SDWebImageFrame.h in Headers */,
4369C27C1D9807EC007E863A /* UIView+WebCache.h in Headers */,
@ -2371,6 +2437,7 @@
4397D2D11D0DDD8C00BB2784 /* decode.h in Headers */,
80377E481F2F66A800F89830 /* dsp.h in Headers */,
323F8BE91F38EF770092B609 /* vp8li_enc.h in Headers */,
3248477A201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
329A185E1FFF5DFD008C9A2F /* UIImage+WebCache.h in Headers */,
320224BB203979BA00E9F285 /* SDAnimatedImageRep.h in Headers */,
80377E761F2F66A800F89830 /* yuv.h in Headers */,
@ -2406,6 +2473,7 @@
321E608B1F38E8C800405457 /* SDWebImageCoder.h in Headers */,
323F8B731F38EF770092B609 /* delta_palettization_enc.h in Headers */,
321E60C31F38E91700405457 /* UIImage+ForceDecode.h in Headers */,
32484768201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377E561F2F66A800F89830 /* lossless_common.h in Headers */,
4397D2E91D0DDD8C00BB2784 /* UIImage+WebP.h in Headers */,
325312CD200F09910046BF1E /* SDWebImageTransition.h in Headers */,
@ -2473,12 +2541,14 @@
4A2CAE371AB4BB7500B6BC39 /* UIView+WebCacheOperation.h in Headers */,
80377C2E1F2F666300F89830 /* bit_reader_utils.h in Headers */,
80377C371F2F666300F89830 /* huffman_encode_utils.h in Headers */,
32484777201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
4A2CAE2F1AB4BB7500B6BC39 /* UIImage+MultiFormat.h in Headers */,
325312CA200F09910046BF1E /* SDWebImageTransition.h in Headers */,
323F8C101F38EF770092B609 /* muxi.h in Headers */,
80377EB41F2F66D400F89830 /* vp8li_dec.h in Headers */,
4A2CAE1A1AB4BB6400B6BC39 /* SDWebImageOperation.h in Headers */,
80377C331F2F666300F89830 /* endian_inl_utils.h in Headers */,
32484765201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
321E60A41F38E8F600405457 /* SDWebImageGIFCoder.h in Headers */,
32CF1C091FA496B000004BD1 /* SDWebImageCoderHelper.h in Headers */,
4A2CAE1B1AB4BB6800B6BC39 /* SDWebImageDownloader.h in Headers */,
@ -2489,6 +2559,7 @@
80377EAA1F2F66D400F89830 /* common_dec.h in Headers */,
80377C451F2F666300F89830 /* utils.h in Headers */,
80377C411F2F666300F89830 /* rescaler_utils.h in Headers */,
3248476B201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
4A2CAE311AB4BB7500B6BC39 /* UIImage+WebP.h in Headers */,
323F8BF81F38EF770092B609 /* animi.h in Headers */,
80377C351F2F666300F89830 /* filters_utils.h in Headers */,
@ -2536,6 +2607,7 @@
807A12281F89636300EC2A9B /* SDWebImageCodersManager.h in Headers */,
80377C051F2F665300F89830 /* huffman_utils.h in Headers */,
80377E881F2F66D000F89830 /* alphai_dec.h in Headers */,
32484775201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
321E60941F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
431738BD1CDFC2660008FEB9 /* decode.h in Headers */,
80377D0B1F2F66A100F89830 /* mips_macro.h in Headers */,
@ -2558,6 +2630,7 @@
5376131F155AD0D5005750A4 /* UIButton+WebCache.h in Headers */,
53761320155AD0D5005750A4 /* UIImageView+WebCache.h in Headers */,
530E49E816464C25002868E7 /* SDWebImageOperation.h in Headers */,
32484769201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377E961F2F66D000F89830 /* webpi_dec.h in Headers */,
80377BF81F2F665300F89830 /* bit_reader_inl_utils.h in Headers */,
530E49EA16464C7C002868E7 /* SDWebImageDownloaderOperation.h in Headers */,
@ -2575,6 +2648,7 @@
321E60861F38E8C800405457 /* SDWebImageCoder.h in Headers */,
321E60B01F38E90100405457 /* SDWebImageWebPCoder.h in Headers */,
80377C0D1F2F665300F89830 /* rescaler_utils.h in Headers */,
32484763201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377E911F2F66D000F89830 /* vp8_dec.h in Headers */,
323F8B6E1F38EF770092B609 /* delta_palettization_enc.h in Headers */,
438096721CDFC08200DC626B /* MKAnnotationView+WebCache.h in Headers */,
@ -2885,6 +2959,7 @@
80377EBF1F2F66D500F89830 /* tree_dec.c in Sources */,
80377DD21F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
80377DB31F2F66A700F89830 /* cost_sse2.c in Sources */,
32484760201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377DDE1F2F66A700F89830 /* rescaler_mips32.c in Sources */,
80377DCA1F2F66A700F89830 /* filters_sse2.c in Sources */,
80377EBE1F2F66D500F89830 /* quant_dec.c in Sources */,
@ -2894,6 +2969,7 @@
80377DC11F2F66A700F89830 /* enc_mips32.c in Sources */,
80377DBC1F2F66A700F89830 /* dec_sse41.c in Sources */,
80377DCE1F2F66A700F89830 /* lossless_enc_mips32.c in Sources */,
3248477E201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377DCB1F2F66A700F89830 /* filters.c in Sources */,
80377DAA1F2F66A700F89830 /* alpha_processing_sse2.c in Sources */,
43A9186E1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
@ -2919,6 +2995,7 @@
80377C561F2F666300F89830 /* quant_levels_utils.c in Sources */,
323F8BCF1F38EF770092B609 /* token_enc.c in Sources */,
80377DD11F2F66A700F89830 /* lossless_enc_sse2.c in Sources */,
32484772201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8C1D1F38EF770092B609 /* muxread.c in Sources */,
807A12311F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
80377C491F2F666300F89830 /* bit_writer_utils.c in Sources */,
@ -3052,8 +3129,11 @@
323F8B7B1F38EF770092B609 /* frame_enc.c in Sources */,
80377D211F2F66A700F89830 /* alpha_processing_sse41.c in Sources */,
323F8B8D1F38EF770092B609 /* iterator_enc.c in Sources */,
3248475E201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377D481F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
32484770201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8BA91F38EF770092B609 /* picture_psnr_enc.c in Sources */,
3248477C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8C091F38EF770092B609 /* muxedit.c in Sources */,
80377D1F1F2F66A700F89830 /* alpha_processing_neon.c in Sources */,
32C0FDE82013426C001B8F2D /* SDWebImageIndicator.m in Sources */,
@ -3202,8 +3282,11 @@
80377ECC1F2F66D500F89830 /* idec_dec.c in Sources */,
323F8B7E1F38EF770092B609 /* frame_enc.c in Sources */,
80377E171F2F66A800F89830 /* lossless_enc_sse41.c in Sources */,
32484761201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
323F8B901F38EF770092B609 /* iterator_enc.c in Sources */,
32484773201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
80377C611F2F666400F89830 /* bit_reader_utils.c in Sources */,
3248477F201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8BAC1F38EF770092B609 /* picture_psnr_enc.c in Sources */,
323F8C0C1F38EF770092B609 /* muxedit.c in Sources */,
32C0FDEB2013426C001B8F2D /* SDWebImageIndicator.m in Sources */,
@ -3308,6 +3391,7 @@
80377E4C1F2F66A800F89830 /* enc_msa.c in Sources */,
80377E4E1F2F66A800F89830 /* enc_sse2.c in Sources */,
80377E6C1F2F66A800F89830 /* rescaler.c in Sources */,
32484762201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377EE31F2F66D500F89830 /* vp8l_dec.c in Sources */,
80377ED71F2F66D500F89830 /* alpha_dec.c in Sources */,
323F8B7F1F38EF770092B609 /* frame_enc.c in Sources */,
@ -3385,6 +3469,7 @@
323F8B5B1F38EF770092B609 /* config_enc.c in Sources */,
80377E361F2F66A800F89830 /* alpha_processing.c in Sources */,
80377E351F2F66A800F89830 /* alpha_processing_sse41.c in Sources */,
32484780201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8B791F38EF770092B609 /* filter_enc.c in Sources */,
80377EDD1F2F66D500F89830 /* io_dec.c in Sources */,
43A918701D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
@ -3402,6 +3487,7 @@
321E60BB1F38E90100405457 /* SDWebImageWebPCoder.m in Sources */,
80377E3C1F2F66A800F89830 /* cost_mips32.c in Sources */,
80377E421F2F66A800F89830 /* dec_mips32.c in Sources */,
32484774201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
4397D2AE1D0DDD8C00BB2784 /* UIImageView+HighlightedWebCache.m in Sources */,
323F8B851F38EF770092B609 /* histogram_enc.c in Sources */,
80377EE51F2F66D500F89830 /* webp_dec.c in Sources */,
@ -3492,6 +3578,7 @@
80377EAF1F2F66D400F89830 /* tree_dec.c in Sources */,
4A2CAE281AB4BB7500B6BC39 /* MKAnnotationView+WebCache.m in Sources */,
4A2CAE261AB4BB7000B6BC39 /* SDWebImagePrefetcher.m in Sources */,
3248475F201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377C441F2F666300F89830 /* utils.c in Sources */,
80377D8D1F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
80377EAE1F2F66D400F89830 /* quant_dec.c in Sources */,
@ -3501,6 +3588,7 @@
80377D851F2F66A700F89830 /* filters_sse2.c in Sources */,
80377D711F2F66A700F89830 /* dec_clip_tables.c in Sources */,
43A9186D1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
3248477D201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377D7C1F2F66A700F89830 /* enc_mips32.c in Sources */,
80377D771F2F66A700F89830 /* dec_sse41.c in Sources */,
80377D891F2F66A700F89830 /* lossless_enc_mips32.c in Sources */,
@ -3526,6 +3614,7 @@
4A2CAE191AB4BB6400B6BC39 /* SDWebImageCompat.m in Sources */,
80377DA11F2F66A700F89830 /* upsampling_sse2.c in Sources */,
323F8BCE1F38EF770092B609 /* token_enc.c in Sources */,
32484771201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
80377C3C1F2F666300F89830 /* quant_levels_utils.c in Sources */,
323F8C1C1F38EF770092B609 /* muxread.c in Sources */,
807A12301F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
@ -3646,6 +3735,7 @@
5376130C155AD0D5005750A4 /* SDWebImageManager.m in Sources */,
5376130D155AD0D5005750A4 /* SDWebImagePrefetcher.m in Sources */,
80377C101F2F665300F89830 /* utils.c in Sources */,
3248475D201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377D031F2F66A100F89830 /* lossless_enc_sse41.c in Sources */,
80377E8E1F2F66D000F89830 /* quant_dec.c in Sources */,
80377CE41F2F66A100F89830 /* cost_sse2.c in Sources */,
@ -3655,6 +3745,7 @@
80377CE71F2F66A100F89830 /* dec_clip_tables.c in Sources */,
43A9186B1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
80377CF21F2F66A100F89830 /* enc_mips32.c in Sources */,
3248477B201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377CED1F2F66A100F89830 /* dec_sse41.c in Sources */,
80377CFF1F2F66A100F89830 /* lossless_enc_mips32.c in Sources */,
80377CFC1F2F66A100F89830 /* filters.c in Sources */,
@ -3680,6 +3771,7 @@
80377D171F2F66A100F89830 /* upsampling_sse2.c in Sources */,
323F8BCC1F38EF770092B609 /* token_enc.c in Sources */,
80377C081F2F665300F89830 /* quant_levels_utils.c in Sources */,
3248476F201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8C1A1F38EF770092B609 /* muxread.c in Sources */,
807A122E1F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
80377BFB1F2F665300F89830 /* bit_writer_utils.c in Sources */,

View File

@ -17,8 +17,7 @@
@property (nonatomic, readonly, nullable) CGImageRef CGImage;
@property (nonatomic, readonly, nullable) NSArray<NSImage *> *images;
@property (nonatomic, readonly) CGFloat scale;
- (nonnull instancetype)initWithCGImage:(nonnull CGImageRef)cgImage scale:(CGFloat)scale;
@property (nonatomic, readonly, nullable) NSBitmapImageRep *bitmapImageRep;
@end

View File

@ -14,7 +14,7 @@
- (CGImageRef)CGImage {
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
CGImageRef cgImage = [self CGImageForProposedRect:&imageRect context:NULL hints:nil];
CGImageRef cgImage = [self CGImageForProposedRect:&imageRect context:nil hints:nil];
return cgImage;
}
@ -24,28 +24,22 @@
- (CGFloat)scale {
CGFloat scale = 1;
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
NSImageRep *rep = [self bestRepresentationForRect:imageRect context:NULL hints:nil];
NSInteger pixelsWide = rep.pixelsWide;
CGFloat width = rep.size.width;
CGFloat width = self.size.width;
if (width > 0) {
scale = pixelsWide / width;
// Use CGImage to get pixel width, NSImageRep.pixelsWide always double on Retina screen
NSUInteger pixelWidth = CGImageGetWidth(self.CGImage);
scale = pixelWidth / width;
}
return scale;
}
- (instancetype)initWithCGImage:(CGImageRef)cgImage scale:(CGFloat)scale {
NSSize size;
if (cgImage && scale > 0) {
NSInteger pixelsWide = CGImageGetWidth(cgImage);
NSInteger pixelsHigh = CGImageGetHeight(cgImage);
CGFloat width = pixelsWide / scale;
CGFloat height = pixelsHigh / scale;
size = NSMakeSize(width, height);
} else {
size = NSZeroSize;
- (NSBitmapImageRep *)bitmapImageRep {
NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
return (NSBitmapImageRep *)imageRep;
}
return [self initWithCGImage:cgImage size:size];
return nil;
}
@end

View File

@ -0,0 +1,90 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDWebImageCompat.h"
#import "SDWebImageCoder.h"
/**
This is the protocol for SDAnimatedImage class only but not for SDWebImageAnimatedCoder. If you want to provide a custom animated image class with full advanced function, you can conform to this instead of the base protocol.
*/
@protocol SDAnimatedImage <SDAnimatedImageProvider>
@required
/**
Initializes the image with an animated coder. You can use the coder to decode the image frame later.
@note Normally we use `initWithData:scale:` to create custom animated image class. However, for progressive image decoding, we will use this instead.
@param animatedCoder An animated coder which conform `SDWebImageAnimatedCoder` protocol
@param scale The scale factor to assume when interpreting the image data. Applying a scale factor of 1.0 results in an image whose size matches the pixel-based dimensions of the image. Applying a different scale factor changes the size of the image as reported by the `size` property.
@return An initialized object
*/
- (nullable instancetype)initWithAnimatedCoder:(nonnull id<SDWebImageAnimatedCoder>)animatedCoder scale:(CGFloat)scale;
@optional
/**
Pre-load all animated image frame into memory. Then later frame image request can directly return the frame for index without decoding.
This method may be called on background thread.
@note If one image instance is shared by lots of imageViews, the CPU performance for large animated image will drop down because the request frame index will be random (not in order) and the decoder should take extra effort to keep it re-entrant. You can use this to reduce CPU usage if need. Attention this will consume more memory usage.
*/
- (void)preloadAllFrames;
/**
Unload all animated image frame from memory if are already pre-loaded. Then later frame image request need decoding. You can use this to free up the memory usage if need.
*/
- (void)unloadAllFrames;
/**
Returns a Boolean value indicating whether all animated image frames are already pre-loaded into memory.
*/
- (BOOL)isAllFramesLoaded;
@end
@interface SDAnimatedImage : UIImage <SDAnimatedImage>
// This class override these methods from UIImage(NSImage), and it supports NSSecureCoding.
// You should use these methods to create a new animated image. Use other methods just call super instead.
+ (nullable instancetype)imageNamed:(nonnull NSString *)name; // Cache in memory, no Asset Catalog support
#if __has_include(<UIKit/UITraitCollection.h>)
+ (nullable instancetype)imageNamed:(nonnull NSString *)name inBundle:(nullable NSBundle *)bundle compatibleWithTraitCollection:(nullable UITraitCollection *)traitCollection; // Cache in memory, no Asset Catalog support
#endif
+ (nullable instancetype)imageWithContentsOfFile:(nonnull NSString *)path;
+ (nullable instancetype)imageWithData:(nonnull NSData *)data;
+ (nullable instancetype)imageWithData:(nonnull NSData *)data scale:(CGFloat)scale;
- (nullable instancetype)initWithContentsOfFile:(nonnull NSString *)path;
- (nullable instancetype)initWithData:(nonnull NSData *)data;
- (nullable instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale;
- (nullable instancetype)initWithAnimatedCoder:(nonnull id<SDWebImageAnimatedCoder>)animatedCoder scale:(CGFloat)scale;
/**
Current animated image format.
*/
@property (nonatomic, assign, readonly) SDImageFormat animatedImageFormat;
/**
Current animated image data, you can use this instead of CGImage to create another instance
*/
@property (nonatomic, copy, readonly, nullable) NSData *animatedImageData;
/**
The scale factor of the image.
@note For UIKit, this just call super instead.
@note For AppKit, `NSImage` can contains multiple image representations with different scales. However, this class does not do that from the design. We processs the scale like UIKit and store it as a extra information for correctlly rendering in `SDAnimatedImageView`.
*/
@property (nonatomic, readonly) CGFloat scale;
// By default, animated image frames are returned by decoding just in time without keeping into memory. But you can choose to preload them into memory as well, See the decsription in `SDAnimatedImage` protocol.
// After preloaded, there is no huge difference on performance between this and UIImage's `animatedImageWithImages:duration:`. But UIImage's animation have some issues such like blanking and pausing during segue when using in `UIImageView`. It's recommend to use only if need.
- (void)preloadAllFrames;
- (void)unloadAllFrames;
@property (nonatomic, assign, readonly, getter=isAllFramesLoaded) BOOL allFramesLoaded;
@end

View File

@ -0,0 +1,411 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImage.h"
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
#import "SDWebImageCoder.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageFrame.h"
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
__VA_ARGS__; \
dispatch_semaphore_signal(self->_lock);
static CGFloat SDImageScaleFromPath(NSString *string) {
if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
NSString *name = string.stringByDeletingPathExtension;
__block CGFloat scale = 1;
NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
[pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
if (result.range.location >= 3) {
scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
}
}];
return scale;
}
static NSArray *SDBundlePreferredScales() {
static NSArray *scales;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
#if SD_WATCH
CGFloat screenScale = [WKInterfaceDevice currentDevice].screenScale;
#elif SD_UIKIT
CGFloat screenScale = [UIScreen mainScreen].scale;
#elif SD_MAC
CGFloat screenScale = [NSScreen mainScreen].backingScaleFactor;
#endif
if (screenScale <= 1) {
scales = @[@1,@2,@3];
} else if (screenScale <= 2) {
scales = @[@2,@3,@1];
} else {
scales = @[@3,@2,@1];
}
});
return scales;
}
#pragma mark - UIImage cache for bundle
// Apple parse the Asset Catalog compiled file(`Assets.car`) by CoreUI.framework, however it's a private framework and there are no other ways to directly get the data. So we just process the normal bundle files :)
@interface SDImageAssetManager : NSObject {
dispatch_semaphore_t _lock;
}
@property (nonatomic, strong) NSMapTable<NSString *, UIImage *> *imageTable;
+ (instancetype)sharedAssetManager;
- (nullable NSString *)getPathForName:(nonnull NSString *)name bundle:(nonnull NSBundle *)bundle preferredScale:(CGFloat *)scale;
- (nullable UIImage *)imageForName:(nonnull NSString *)name;
- (void)storeImage:(nonnull UIImage *)image forName:(nonnull NSString *)name;
@end
@implementation SDImageAssetManager
+ (instancetype)sharedAssetManager {
static dispatch_once_t onceToken;
static SDImageAssetManager *assetManager;
dispatch_once(&onceToken, ^{
assetManager = [[SDImageAssetManager alloc] init];
});
return assetManager;
}
- (instancetype)init {
self = [super init];
if (self) {
NSPointerFunctionsOptions valueOptions;
#if SD_MAC
// Apple says that NSImage use a weak reference to value
valueOptions = NSPointerFunctionsWeakMemory;
#else
// Apple says that UIImage use a strong reference to value
valueOptions = NSPointerFunctionsStrongMemory;
#endif
_imageTable = [NSMapTable mapTableWithKeyOptions:NSPointerFunctionsCopyIn valueOptions:valueOptions];
_lock = dispatch_semaphore_create(1);
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
return self;
}
- (void)dealloc {
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification {
LOCK({
[self.imageTable removeAllObjects];
});
}
- (NSString *)getPathForName:(NSString *)name bundle:(NSBundle *)bundle preferredScale:(CGFloat *)scale {
NSParameterAssert(name);
NSParameterAssert(bundle);
NSString *path;
if (name.length == 0) {
return path;
}
if ([name hasSuffix:@"/"]) {
return path;
}
NSString *extension = name.pathExtension;
if (extension.length == 0) {
// If no extension, follow Apple's doc, check PNG format
extension = @"png";
}
name = [name stringByDeletingPathExtension];
CGFloat providedScale = *scale;
NSArray *scales = SDBundlePreferredScales();
// Check if file name contains scale
for (size_t i = 0; i < scales.count; i++) {
NSNumber *scaleValue = scales[i];
if ([name hasSuffix:[NSString stringWithFormat:@"@%@x", scaleValue]]) {
path = [bundle pathForResource:name ofType:extension];
if (path) {
*scale = scaleValue.doubleValue; // override
return path;
}
}
}
// Search with provided scale first
if (providedScale != 0) {
NSString *scaledName = [name stringByAppendingFormat:@"@%@x", @(providedScale)];
path = [bundle pathForResource:scaledName ofType:extension];
if (path) {
return path;
}
}
// Search with preferred scale
for (size_t i = 0; i < scales.count; i++) {
NSNumber *scaleValue = scales[i];
if (scaleValue.doubleValue == providedScale) {
// Ignore provided scale
continue;
}
NSString *scaledName = [name stringByAppendingFormat:@"@%@x", scaleValue];
path = [bundle pathForResource:scaledName ofType:extension];
if (path) {
*scale = scaleValue.doubleValue; // override
return path;
}
}
// Search without scale
path = [bundle pathForResource:name ofType:extension];
return path;
}
- (UIImage *)imageForName:(NSString *)name {
NSParameterAssert(name);
UIImage *image;
LOCK({
image = [self.imageTable objectForKey:name];
});
return image;
}
- (void)storeImage:(UIImage *)image forName:(NSString *)name {
NSParameterAssert(image);
NSParameterAssert(name);
LOCK({
[self.imageTable setObject:image forKey:name];
});
}
@end
@interface SDAnimatedImage ()
@property (nonatomic, strong) id<SDWebImageAnimatedCoder> coder;
@property (nonatomic, assign, readwrite) SDImageFormat animatedImageFormat;
@property (atomic, copy) NSArray<SDWebImageFrame *> *loadedAnimatedImageFrames; // Mark as atomic to keep thread-safe
@property (nonatomic, assign, getter=isAllFramesLoaded) BOOL allFramesLoaded;
@end
@implementation SDAnimatedImage
#if SD_UIKIT || SD_WATCH
@dynamic scale; // call super
#endif
#pragma mark - UIImage override method
+ (instancetype)imageNamed:(NSString *)name {
#if __has_include(<UIKit/UITraitCollection.h>)
return [self imageNamed:name inBundle:nil compatibleWithTraitCollection:nil];
#else
return [self imageNamed:name inBundle:nil scale:0];
#endif
}
#if __has_include(<UIKit/UITraitCollection.h>)
+ (instancetype)imageNamed:(NSString *)name inBundle:(NSBundle *)bundle compatibleWithTraitCollection:(UITraitCollection *)traitCollection {
if (!traitCollection) {
traitCollection = UIScreen.mainScreen.traitCollection;
}
CGFloat scale = traitCollection.displayScale;
return [self imageNamed:name inBundle:bundle scale:scale];
}
#endif
// 0 scale means automatically check
+ (instancetype)imageNamed:(NSString *)name inBundle:(NSBundle *)bundle scale:(CGFloat)scale {
if (!name) {
return nil;
}
if (!bundle) {
bundle = [NSBundle mainBundle];
}
SDImageAssetManager *assetManager = [SDImageAssetManager sharedAssetManager];
SDAnimatedImage *image = (SDAnimatedImage *)[assetManager imageForName:name];
if ([image isKindOfClass:[SDAnimatedImage class]]) {
return image;
}
NSString *path = [assetManager getPathForName:name bundle:bundle preferredScale:&scale];
if (!path) {
return image;
}
NSData *data = [NSData dataWithContentsOfFile:path];
if (!data) {
return image;
}
image = [[self alloc] initWithData:data scale:scale];
if (image) {
[assetManager storeImage:image forName:name];
}
return image;
}
+ (instancetype)imageWithContentsOfFile:(NSString *)path {
return [[self alloc] initWithContentsOfFile:path];
}
+ (instancetype)imageWithData:(NSData *)data {
return [[self alloc] initWithData:data];
}
+ (instancetype)imageWithData:(NSData *)data scale:(CGFloat)scale {
return [[self alloc] initWithData:data scale:scale];
}
- (instancetype)initWithContentsOfFile:(NSString *)path {
NSData *data = [NSData dataWithContentsOfFile:path];
return [self initWithData:data scale:SDImageScaleFromPath(path)];
}
- (instancetype)initWithData:(NSData *)data {
return [self initWithData:data scale:1];
}
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
if (!data || data.length == 0) {
return nil;
}
data = [data copy]; // avoid mutable data
id<SDWebImageAnimatedCoder> animatedCoder = nil;
for (id<SDWebImageCoder>coder in [SDWebImageCodersManager sharedManager].coders) {
if ([coder conformsToProtocol:@protocol(SDWebImageAnimatedCoder)]) {
if ([coder canDecodeFromData:data]) {
animatedCoder = [[[coder class] alloc] initWithAnimatedImageData:data];
break;
}
}
}
if (!animatedCoder) {
return nil;
}
return [self initWithAnimatedCoder:animatedCoder scale:scale];
}
- (instancetype)initWithAnimatedCoder:(id<SDWebImageAnimatedCoder>)animatedCoder scale:(CGFloat)scale {
if (!animatedCoder) {
return nil;
}
if (scale <= 0) {
scale = 1;
}
UIImage *image = [animatedCoder animatedImageFrameAtIndex:0];
if (!image) {
return nil;
}
#if SD_MAC
self = [super initWithCGImage:image.CGImage size:NSZeroSize];
#else
self = [super initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
#endif
if (self) {
_coder = animatedCoder;
#if SD_MAC
_scale = scale;
#endif
NSData *data = [animatedCoder animatedImageData];
SDImageFormat format = [NSData sd_imageFormatForImageData:data];
_animatedImageFormat = format;
}
return self;
}
#pragma mark - Preload
- (void)preloadAllFrames {
if (!self.isAllFramesLoaded) {
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray arrayWithCapacity:self.animatedImageFrameCount];
for (size_t i = 0; i < self.animatedImageFrameCount; i++) {
UIImage *image = [self animatedImageFrameAtIndex:i];
NSTimeInterval duration = [self animatedImageDurationAtIndex:i];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration]; // through the image should be nonnull, used as nullable for `animatedImageFrameAtIndex:`
[frames addObject:frame];
}
self.loadedAnimatedImageFrames = frames;
self.allFramesLoaded = YES;
}
}
- (void)unloadAllFrames {
if (self.isAllFramesLoaded) {
self.loadedAnimatedImageFrames = nil;
self.allFramesLoaded = NO;
}
}
#pragma mark - NSSecureCoding
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
NSNumber *scale = [aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(scale))];
NSData *animatedImageData = [aDecoder decodeObjectOfClass:[NSData class] forKey:NSStringFromSelector(@selector(animatedImageData))];
if (animatedImageData) {
return [self initWithData:animatedImageData scale:scale.doubleValue];
} else {
return [super initWithCoder:aDecoder];
}
}
- (void)encodeWithCoder:(NSCoder *)aCoder {
if (self.animatedImageData) {
[aCoder encodeObject:self.animatedImageData forKey:NSStringFromSelector(@selector(animatedImageData))];
[aCoder encodeObject:@(self.scale) forKey:NSStringFromSelector(@selector(scale))];
} else {
[super encodeWithCoder:aCoder];
}
}
+ (BOOL)supportsSecureCoding {
return YES;
}
#pragma mark - SDAnimatedImage
- (NSData *)animatedImageData {
return [self.coder animatedImageData];
}
- (NSUInteger)animatedImageLoopCount {
return [self.coder animatedImageLoopCount];
}
- (NSUInteger)animatedImageFrameCount {
return [self.coder animatedImageFrameCount];
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
if (index >= self.animatedImageFrameCount) {
return nil;
}
if (self.isAllFramesLoaded) {
SDWebImageFrame *frame = [self.loadedAnimatedImageFrames objectAtIndex:index];
return frame.image;
}
return [self.coder animatedImageFrameAtIndex:index];
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
if (index >= self.animatedImageFrameCount) {
return 0;
}
if (self.isAllFramesLoaded) {
SDWebImageFrame *frame = [self.loadedAnimatedImageFrames objectAtIndex:index];
return frame.duration;
}
return [self.coder animatedImageDurationAtIndex:index];
}
@end

View File

@ -0,0 +1,126 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView.h"
#if SD_UIKIT || SD_MAC
#import "SDWebImageManager.h"
@interface SDAnimatedImageView (WebCache)
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url` and a placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @see sd_setImageWithURL:placeholderImage:options:
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
completed:(nullable SDExternalCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
completed:(nullable SDExternalCompletionBlock)completedBlock NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
completed:(nullable SDExternalCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param progressBlock A block called while image is downloading
* @note the progress block is executed on a background queue
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock;
@end
#endif

View File

@ -0,0 +1,61 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView+WebCache.h"
#if SD_UIKIT || SD_MAC
#import "UIView+WebCache.h"
#import "SDAnimatedImage.h"
@implementation SDAnimatedImageView (WebCache)
- (void)sd_setImageWithURL:(nullable NSURL *)url {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock {
Class animatedImageClass = [SDAnimatedImage class];
SDWebImageContext *context = @{SDWebImageContextAnimatedImageClass : animatedImageClass};
[self sd_internalSetImageWithURL:url
placeholderImage:placeholder
options:options
operationKey:nil
setImageBlock:nil
progress:progressBlock
completed:completedBlock
context:context];
}
@end
#endif

View File

@ -0,0 +1,66 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDWebImageCompat.h"
#if SD_UIKIT || SD_MAC
#import "SDAnimatedImage.h"
/**
A drop-in replacement for UIImageView/NSImageView, you can use this for animated image rendering.
Call `setImage:` with `UIImage(NSImage)` which conform to `SDAnimatedImage` protocol will start animated image rendering. Call with normal UIImage(NSImage) will back to normal UIImageView(NSImageView) rendering
For UIKit: use `-startAnimating`, `-stopAnimating` to control animating
For AppKit: use `-setAnimates:` to control animating. This view is layer-backed.
*/
@interface SDAnimatedImageView : UIImageView
/**
Current display frame image
*/
@property (nonatomic, strong, readonly, nullable) UIImage *currentFrame;
/**
Current frame index, zero based
*/
@property (nonatomic, assign, readonly) NSUInteger currentFrameIndex;
/**
Current loop count since its latest animating
*/
@property (nonatomic, assign, readonly) NSUInteger currentLoopCount;
/**
YES to choose `animationRepeatCount` property instead of image's loop count for animation loop count. Default is NO.
*/
@property (nonatomic, assign) BOOL shouldCustomLoopCount;
/**
Total loop count for animated image rendering. Default is animated image's loop count.
If you need to set custom loop count, set `shouldCustomLoopCount` to YES and change this value.
This class override UIImageView's `animationRepeatCount` property on iOS, use this property as well.
*/
@property (nonatomic, assign) NSInteger animationRepeatCount;
/**
Returns a Boolean value indicating whether the animation is running.
This class override UIImageView's `animating` property on iOS, use this property as well.
*/
@property (nonatomic, readonly, getter=isAnimating) BOOL animating;
/**
Provide a max buffer size by bytes. This is used to adjust frame buffer count and can be useful when the decoding cost is expensive (such as Animated WebP software decoding). Default is 0.
`0` means automatically adjust by calculating current memory usage.
`1` means without any buffer cache, each of frames will be decoded and then be freed after rendering. (Lowest Memory and Highest CPU)
`NSUIntegerMax` means cache all the buffer. (Lowest CPU and Highest Memory)
*/
@property (nonatomic, assign) NSUInteger maxBufferSize;
/**
You can specify a runloop mode to let it rendering.
Default is NSRunLoopCommonModes on multi-core iOS device, NSDefaultRunLoopMode on single-core iOS device
This value has no use on macOS
*/
@property (nonatomic, copy, nonnull) NSString *runLoopMode;
@end
#endif

View File

@ -0,0 +1,799 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView.h"
#if SD_UIKIT || SD_MAC
#import "UIImage+WebCache.h"
#import "NSImage+Additions.h"
#import <mach/mach.h>
#import <objc/runtime.h>
#if SD_MAC
#import <CoreVideo/CoreVideo.h>
static CVReturn renderCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext);
#endif
static NSUInteger SDDeviceTotalMemory() {
return (NSUInteger)[[NSProcessInfo processInfo] physicalMemory];
}
static NSUInteger SDDeviceFreeMemory() {
mach_port_t host_port = mach_host_self();
mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t);
vm_size_t page_size;
vm_statistics_data_t vm_stat;
kern_return_t kern;
kern = host_page_size(host_port, &page_size);
if (kern != KERN_SUCCESS) return 0;
kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size);
if (kern != KERN_SUCCESS) return 0;
return vm_stat.free_count * page_size;
}
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
__VA_ARGS__; \
dispatch_semaphore_signal(self->_lock);
@interface SDWeakProxy : NSProxy
@property (nonatomic, weak, readonly) id target;
- (instancetype)initWithTarget:(id)target;
+ (instancetype)proxyWithTarget:(id)target;
@end
@implementation SDWeakProxy
- (instancetype)initWithTarget:(id)target {
_target = target;
return self;
}
+ (instancetype)proxyWithTarget:(id)target {
return [[SDWeakProxy alloc] initWithTarget:target];
}
- (id)forwardingTargetForSelector:(SEL)selector {
return _target;
}
- (void)forwardInvocation:(NSInvocation *)invocation {
void *null = NULL;
[invocation setReturnValue:&null];
}
- (NSMethodSignature *)methodSignatureForSelector:(SEL)selector {
return [NSObject instanceMethodSignatureForSelector:@selector(init)];
}
- (BOOL)respondsToSelector:(SEL)aSelector {
return [_target respondsToSelector:aSelector];
}
- (BOOL)isEqual:(id)object {
return [_target isEqual:object];
}
- (NSUInteger)hash {
return [_target hash];
}
- (Class)superclass {
return [_target superclass];
}
- (Class)class {
return [_target class];
}
- (BOOL)isKindOfClass:(Class)aClass {
return [_target isKindOfClass:aClass];
}
- (BOOL)isMemberOfClass:(Class)aClass {
return [_target isMemberOfClass:aClass];
}
- (BOOL)conformsToProtocol:(Protocol *)aProtocol {
return [_target conformsToProtocol:aProtocol];
}
- (BOOL)isProxy {
return YES;
}
- (NSString *)description {
return [_target description];
}
- (NSString *)debugDescription {
return [_target debugDescription];
}
@end
@interface SDAnimatedImageView () <CALayerDelegate>
@property (nonatomic, strong, readwrite) UIImage *currentFrame;
@property (nonatomic, assign, readwrite) NSUInteger currentFrameIndex;
@property (nonatomic, assign, readwrite) NSUInteger currentLoopCount;
@property (nonatomic, assign) NSUInteger totalFrameCount;
@property (nonatomic, assign) NSUInteger totalLoopCount;
@property (nonatomic, strong) UIImage<SDAnimatedImage> *animatedImage;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, UIImage *> *frameBuffer;
@property (nonatomic, assign) NSTimeInterval currentTime;
@property (nonatomic, assign) BOOL bufferMiss;
@property (nonatomic, assign) BOOL shouldAnimate;
@property (nonatomic, assign) BOOL isProgressive;
@property (nonatomic, assign) NSUInteger maxBufferCount;
@property (nonatomic, strong) NSOperationQueue *fetchQueue;
@property (nonatomic, strong) dispatch_semaphore_t lock;
@property (nonatomic, assign) CGFloat animatedImageScale;
#if SD_MAC
@property (nonatomic, assign) CVDisplayLinkRef displayLink;
#else
@property (nonatomic, strong) CADisplayLink *displayLink;
#endif
// Layer-backed NSImageView use a subview of `NSImageViewContainerView` to do actual layer rendering. We use this layer instead of `self.layer` during animated image rendering.
#if SD_MAC
@property (nonatomic, strong, readonly) CALayer *imageViewLayer;
#endif
@end
@implementation SDAnimatedImageView
#if SD_UIKIT
@dynamic animationRepeatCount;
#else
@dynamic imageViewLayer;
#endif
#pragma mark - Initializers
#if SD_MAC
+ (instancetype)imageViewWithImage:(NSImage *)image
{
NSRect frame = NSMakeRect(0, 0, image.size.width, image.size.height);
SDAnimatedImageView *imageView = [[SDAnimatedImageView alloc] initWithFrame:frame];
[imageView setImage:image];
return imageView;
}
#else
// -initWithImage: isn't documented as a designated initializer of UIImageView, but it actually seems to be.
// Using -initWithImage: doesn't call any of the other designated initializers.
- (instancetype)initWithImage:(UIImage *)image
{
self = [super initWithImage:image];
if (self) {
[self commonInit];
}
return self;
}
// -initWithImage:highlightedImage: also isn't documented as a designated initializer of UIImageView, but it doesn't call any other designated initializers.
- (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage
{
self = [super initWithImage:image highlightedImage:highlightedImage];
if (self) {
[self commonInit];
}
return self;
}
#endif
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self commonInit];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder
{
self = [super initWithCoder:aDecoder];
if (self) {
[self commonInit];
}
return self;
}
- (void)commonInit
{
#if SD_MAC
self.wantsLayer = YES;
// Default value from `NSImageView`
self.layerContentsRedrawPolicy = NSViewLayerContentsRedrawOnSetNeedsDisplay;
self.imageScaling = NSImageScaleProportionallyDown;
self.imageAlignment = NSImageAlignCenter;
#endif
self.runLoopMode = [[self class] defaultRunLoopMode];
self.lock = dispatch_semaphore_create(1);
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)resetAnimatedImage
{
self.animatedImage = nil;
self.totalFrameCount = 0;
self.totalLoopCount = 0;
self.currentFrame = nil;
self.currentFrameIndex = 0;
self.currentLoopCount = 0;
self.currentTime = 0;
self.bufferMiss = NO;
self.shouldAnimate = NO;
self.isProgressive = NO;
self.maxBufferCount = 0;
self.animatedImageScale = 1;
[_fetchQueue cancelAllOperations];
_fetchQueue = nil;
LOCK({
[_frameBuffer removeAllObjects];
_frameBuffer = nil;
});
}
- (void)resetProgressiveImage
{
self.animatedImage = nil;
self.totalFrameCount = 0;
self.totalLoopCount = 0;
// preserve current state
self.shouldAnimate = NO;
self.isProgressive = YES;
self.maxBufferCount = 0;
self.animatedImageScale = 1;
// preserve buffer cache
}
#pragma mark - Accessors
#pragma mark Public
- (void)setImage:(UIImage *)image
{
if (self.image == image) {
return;
}
// Check Progressive coding
self.isProgressive = NO;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)] && image.sd_isIncremental) {
NSData *currentData = [((UIImage<SDAnimatedImage> *)image) animatedImageData];
if (currentData) {
NSData *previousData;
if ([self.image conformsToProtocol:@protocol(SDAnimatedImage)]) {
previousData = [((UIImage<SDAnimatedImage> *)self.image) animatedImageData];
}
// Check whether to use progressive coding
if (!previousData) {
// If previous data is nil
self.isProgressive = YES;
} else if ([currentData isEqualToData:previousData]) {
// If current data is equal to previous data
self.isProgressive = YES;
}
}
}
if (self.isProgressive) {
// Reset all value, but keep current state
[self resetProgressiveImage];
} else {
// Stop animating
[self stopAnimating];
// Reset all value
[self resetAnimatedImage];
}
// We need call super method to keep function. This will impliedly call `setNeedsDisplay`. But we have no way to avoid this when using animated image. So we call `setNeedsDisplay` again at the end.
super.image = image;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
NSUInteger animatedImageFrameCount = ((UIImage<SDAnimatedImage> *)image).animatedImageFrameCount;
// Check the frame count
if (animatedImageFrameCount <= 1) {
return;
}
self.animatedImage = (UIImage<SDAnimatedImage> *)image;
self.totalFrameCount = animatedImageFrameCount;
// Get the current frame and loop count.
self.totalLoopCount = self.animatedImage.animatedImageLoopCount;
// Get the scale
self.animatedImageScale = image.scale;
if (!self.isProgressive) {
self.currentFrame = image;
LOCK({
self.frameBuffer[@(self.currentFrameIndex)] = self.currentFrame;
});
}
// Ensure disabled highlighting; it's not supported (see `-setHighlighted:`).
super.highlighted = NO;
// UIImageView seems to bypass some accessors when calculating its intrinsic content size, so this ensures its intrinsic content size comes from the animated image.
[self invalidateIntrinsicContentSize];
// Calculate max buffer size
[self calculateMaxBufferCount];
// Update should animate
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
}
[self.layer setNeedsDisplay];
}
}
- (void)setAnimationRepeatCount:(NSInteger)animationRepeatCount
{
#if SD_MAC
_animationRepeatCount = animationRepeatCount;
#else
[super setAnimationRepeatCount:animationRepeatCount];
#endif
}
- (void)setRunLoopMode:(NSString *)runLoopMode
{
if (![@[NSDefaultRunLoopMode, NSRunLoopCommonModes] containsObject:runLoopMode]) {
NSAssert(NO, @"Invalid run loop mode: %@", runLoopMode);
_runLoopMode = [[self class] defaultRunLoopMode];
} else {
_runLoopMode = runLoopMode;
}
}
#pragma mark - Private
- (NSOperationQueue *)fetchQueue
{
if (!_fetchQueue) {
_fetchQueue = [[NSOperationQueue alloc] init];
_fetchQueue.maxConcurrentOperationCount = 1;
}
return _fetchQueue;
}
- (NSMutableDictionary<NSNumber *,UIImage *> *)frameBuffer
{
if (!_frameBuffer) {
_frameBuffer = [NSMutableDictionary dictionary];
}
return _frameBuffer;
}
#if SD_MAC
- (CVDisplayLinkRef)displayLink
{
if (!_displayLink) {
CGDirectDisplayID displayID = CGMainDisplayID();
CVReturn error = CVDisplayLinkCreateWithCGDisplay(displayID, &_displayLink);
if (error) {
return NULL;
}
CVDisplayLinkSetOutputCallback(_displayLink, renderCallback, (__bridge void *)self);
}
return _displayLink;
}
#else
- (CADisplayLink *)displayLink
{
if (!_displayLink) {
// It is important to note the use of a weak proxy here to avoid a retain cycle. `-displayLinkWithTarget:selector:`
// will retain its target until it is invalidated. We use a weak proxy so that the image view will get deallocated
// independent of the display link's lifetime. Upon image view deallocation, we invalidate the display
// link which will lead to the deallocation of both the display link and the weak proxy.
SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
_displayLink = [CADisplayLink displayLinkWithTarget:weakProxy selector:@selector(displayDidRefresh:)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:self.runLoopMode];
}
return _displayLink;
}
#endif
#if SD_MAC
- (CALayer *)imageViewLayer {
NSView *imageView = objc_getAssociatedObject(self, NSSelectorFromString(@"_imageView"));
return imageView.layer;
}
#endif
#pragma mark - Life Cycle
- (void)dealloc
{
// Removes the display link from all run loop modes.
#if SD_MAC
if (_displayLink) {
CVDisplayLinkRelease(_displayLink);
_displayLink = NULL;
}
#else
[_displayLink invalidate];
_displayLink = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification {
[_fetchQueue cancelAllOperations];
[_fetchQueue addOperationWithBlock:^{
NSNumber *currentFrameIndex = @(self.currentFrameIndex);
LOCK({
NSArray *keys = self.frameBuffer.allKeys;
// only keep the next frame for later rendering
for (NSNumber * key in keys) {
if (![key isEqualToNumber:currentFrameIndex]) {
[self.frameBuffer removeObjectForKey:key];
}
}
});
}];
}
#pragma mark - UIView Method Overrides
#pragma mark Observing View-Related Changes
#if SD_MAC
- (void)viewDidMoveToSuperview
#else
- (void)didMoveToSuperview
#endif
{
#if SD_MAC
[super viewDidMoveToSuperview];
#else
[super didMoveToSuperview];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#if SD_MAC
- (void)viewDidMoveToWindow
#else
- (void)didMoveToWindow
#endif
{
#if SD_MAC
[super viewDidMoveToWindow];
#else
[super didMoveToWindow];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#if SD_MAC
- (void)setAlphaValue:(CGFloat)alphaValue
#else
- (void)setAlpha:(CGFloat)alpha
#endif
{
#if SD_MAC
[super setAlphaValue:alphaValue];
#else
[super setAlpha:alpha];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
- (void)setHidden:(BOOL)hidden
{
[super setHidden:hidden];
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#pragma mark Auto Layout
- (CGSize)intrinsicContentSize
{
// Default to let UIImageView handle the sizing of its image, and anything else it might consider.
CGSize intrinsicContentSize = [super intrinsicContentSize];
// If we have have an animated image, use its image size.
// UIImageView's intrinsic content size seems to be the size of its image. The obvious approach, simply calling `-invalidateIntrinsicContentSize` when setting an animated image, results in UIImageView steadfastly returning `{UIViewNoIntrinsicMetric, UIViewNoIntrinsicMetric}` for its intrinsicContentSize.
// (Perhaps UIImageView bypasses its `-image` getter in its implementation of `-intrinsicContentSize`, as `-image` is not called after calling `-invalidateIntrinsicContentSize`.)
if (self.animatedImage) {
intrinsicContentSize = self.image.size;
}
return intrinsicContentSize;
}
#pragma mark - UIImageView Method Overrides
#pragma mark Image Data
- (void)startAnimating
{
if (self.animatedImage) {
#if SD_MAC
CVDisplayLinkStart(self.displayLink);
#else
self.displayLink.paused = NO;
#endif
} else {
#if SD_UIKIT
[super startAnimating];
#endif
}
}
- (void)stopAnimating
{
if (self.animatedImage) {
#if SD_MAC
CVDisplayLinkStop(_displayLink);
#else
_displayLink.paused = YES;
#endif
} else {
#if SD_UIKIT
[super stopAnimating];
#endif
}
}
- (BOOL)isAnimating
{
BOOL isAnimating = NO;
if (self.animatedImage) {
#if SD_MAC
isAnimating = CVDisplayLinkIsRunning(self.displayLink);
#else
isAnimating = !self.displayLink.isPaused;
#endif
} else {
#if SD_UIKIT
isAnimating = [super isAnimating];
#endif
}
return isAnimating;
}
#if SD_MAC
- (void)setAnimates:(BOOL)animates
{
[super setAnimates:animates];
if (animates) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#endif
#pragma mark Highlighted Image Unsupport
- (void)setHighlighted:(BOOL)highlighted
{
// Highlighted image is unsupported for animated images, but implementing it breaks the image view when embedded in a UICollectionViewCell.
if (!self.animatedImage) {
[super setHighlighted:highlighted];
}
}
#pragma mark - Private Methods
#pragma mark Animation
// Don't repeatedly check our window & superview in `-displayDidRefresh:` for performance reasons.
// Just update our cached value whenever the animated image or visibility (window, superview, hidden, alpha) is changed.
- (void)updateShouldAnimate
{
#if SD_MAC
BOOL isVisible = self.window && self.superview && ![self isHidden] && self.alphaValue > 0.0 && self.animates;
#else
BOOL isVisible = self.window && self.superview && ![self isHidden] && self.alpha > 0.0;
#endif
self.shouldAnimate = self.animatedImage && self.totalFrameCount > 1 && isVisible;
}
#if SD_MAC
- (void)displayDidRefresh:(CVDisplayLinkRef)displayLink duration:(NSTimeInterval)duration
#else
- (void)displayDidRefresh:(CADisplayLink *)displayLink
#endif
{
// If for some reason a wild call makes it through when we shouldn't be animating, bail.
// Early return!
if (!self.shouldAnimate) {
return;
}
#if SD_UIKIT
NSTimeInterval duration = displayLink.duration * displayLink.frameInterval;
#endif
NSUInteger totalFrameCount = self.totalFrameCount;
NSUInteger currentFrameIndex = self.currentFrameIndex;
NSUInteger nextFrameIndex = (currentFrameIndex + 1) % totalFrameCount;
// Check if we have the frame buffer firstly to improve performance
if (!self.bufferMiss) {
// Then check if timestamp is reached
self.currentTime += duration;
NSTimeInterval currentDuration = [self.animatedImage animatedImageDurationAtIndex:currentFrameIndex];
if (self.currentTime < currentDuration) {
// Current frame timestamp not reached, return
return;
}
self.currentTime -= currentDuration;
NSTimeInterval nextDuration = [self.animatedImage animatedImageDurationAtIndex:nextFrameIndex];
if (self.currentTime > nextDuration) {
// Do not skip frame
self.currentTime = nextDuration;
}
}
// Update the current frame
UIImage *currentFrame;
LOCK({
currentFrame = self.frameBuffer[@(currentFrameIndex)];
});
BOOL bufferFull = NO;
if (currentFrame) {
LOCK({
// Remove the frame buffer if need
if (self.frameBuffer.count > self.maxBufferCount) {
self.frameBuffer[@(currentFrameIndex)] = nil;
}
// Check whether we can stop fetch
if (self.frameBuffer.count == totalFrameCount) {
bufferFull = YES;
}
});
self.currentFrame = currentFrame;
self.currentFrameIndex = nextFrameIndex;
self.bufferMiss = NO;
[self.layer setNeedsDisplay];
} else {
self.bufferMiss = YES;
}
// Update the loop count when last frame rendered
if (nextFrameIndex == 0 && !self.bufferMiss) {
// Progressive image reach the current last frame index. Keep the state and stop animating. Wait for later restart
if (self.isProgressive) {
// Recovery the current frame index and removed frame buffer (See above)
self.currentFrameIndex = currentFrameIndex;
LOCK({
self.frameBuffer[@(currentFrameIndex)] = self.currentFrame;
});
[self stopAnimating];
return;
}
// Update the loop count
self.currentLoopCount++;
// if reached the max loop count, stop animating, 0 means loop indefinitely
NSUInteger maxLoopCount = self.shouldCustomLoopCount ? self.animationRepeatCount : self.totalLoopCount;
if (maxLoopCount != 0 && (self.currentLoopCount >= maxLoopCount)) {
[self stopAnimating];
return;
}
}
// Check if we should prefetch next frame or current frame
NSUInteger fetchFrameIndex;
if (self.bufferMiss) {
// When buffer miss, means the decode speed is slower than render speed, we fetch current miss frame
fetchFrameIndex = currentFrameIndex;
} else {
// Or, most cases, the decode speed is faster than render speed, we fetch next frame
fetchFrameIndex = nextFrameIndex;
}
if (!bufferFull && self.fetchQueue.operationCount == 0) {
// Prefetch next frame in background queue
UIImage<SDAnimatedImage> *animatedImage = self.animatedImage;
NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{
UIImage *fetchFrame = [animatedImage animatedImageFrameAtIndex:fetchFrameIndex];
LOCK({
self.frameBuffer[@(fetchFrameIndex)] = fetchFrame;
});
}];
[self.fetchQueue addOperation:operation];
}
}
+ (NSString *)defaultRunLoopMode
{
// Key off `activeProcessorCount` (as opposed to `processorCount`) since the system could shut down cores in certain situations.
return [NSProcessInfo processInfo].activeProcessorCount > 1 ? NSRunLoopCommonModes : NSDefaultRunLoopMode;
}
#pragma mark - CALayerDelegate (Informal)
#pragma mark Providing the Layer's Content
- (void)displayLayer:(CALayer *)layer
{
if (_currentFrame) {
layer.contentsScale = self.animatedImageScale;
layer.contents = (__bridge id)_currentFrame.CGImage;
}
}
#if SD_MAC
- (void)updateLayer
{
if (_currentFrame) {
[self displayLayer:self.imageViewLayer];
} else {
[super updateLayer];
}
}
#endif
#pragma mark - Util
- (void)calculateMaxBufferCount {
NSUInteger bytes = CGImageGetBytesPerRow(self.currentFrame.CGImage) * CGImageGetHeight(self.currentFrame.CGImage);
if (bytes == 0) bytes = 1024;
NSUInteger max = 0;
if (self.maxBufferSize > 0) {
max = self.maxBufferSize;
} else {
// Calculate based on current memory, these factors are by experience
NSUInteger total = SDDeviceTotalMemory();
NSUInteger free = SDDeviceFreeMemory();
max = MIN(total * 0.2, free * 0.6);
}
NSUInteger maxBufferCount = (double)max / (double)bytes;
if (!maxBufferCount) {
// At least 1 frame
maxBufferCount = 1;
}
self.maxBufferCount = maxBufferCount;
}
@end
#if SD_MAC
static CVReturn renderCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext) {
// Calculate refresh duration
NSTimeInterval duration = (double)inOutputTime->videoRefreshPeriod / ((double)inOutputTime->videoTimeScale * inOutputTime->rateScalar);
// CVDisplayLink callback is not on main queue
dispatch_async(dispatch_get_main_queue(), ^{
[(__bridge SDAnimatedImageView *)displayLinkContext displayDidRefresh:displayLink duration:duration];
});
return kCVReturnSuccess;
}
#endif
#endif

View File

@ -40,6 +40,14 @@ typedef NS_OPTIONS(NSUInteger, SDImageCacheOptions) {
* Use this flag to transform them anyway.
*/
SDImageCacheTransformAnimatedImage = 1 << 2,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produece the static image.
*/
SDImageCacheDecodeFirstFrameOnly = 1 << 3,
/**
* By default, for `SDAnimatedImage`, we decode the animated image frame during rendering to reduce memory usage. This flag actually trigger `preloadAllAnimatedImageFrames = YES` after image load from disk cache
*/
SDImageCachePreloadAllFrames = 1 << 4
};
typedef void(^SDCacheQueryCompletedBlock)(UIImage * _Nullable image, NSData * _Nullable data, SDImageCacheType cacheType);

View File

@ -9,8 +9,11 @@
#import "SDImageCache.h"
#import <CommonCrypto/CommonDigest.h>
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageTransformer.h"
#import "SDWebImageCoderHelper.h"
#import "SDAnimatedImage.h"
#define LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
#define UNLOCK(lock) dispatch_semaphore_signal(lock);
@ -294,12 +297,12 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
if (!data && image) {
// If we do not have any data to detect image format, check whether it contains alpha channel to use PNG or JPEG format
SDImageFormat format;
if (SDCGImageRefContainsAlpha(image.CGImage)) {
if ([SDWebImageCoderHelper imageRefContainsAlpha:image.CGImage]) {
format = SDImageFormatPNG;
} else {
format = SDImageFormatJPEG;
}
data = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:image format:format];
data = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:image format:format options:nil];
}
[self _storeImageDataToDisk:data forKey:key error:&writeError];
}
@ -475,11 +478,42 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
}
- (nullable UIImage *)diskImageForKey:(nullable NSString *)key data:(nullable NSData *)data {
return [self diskImageForKey:key data:data options:0 context:nil];
}
- (nullable UIImage *)diskImageForKey:(nullable NSString *)key data:(nullable NSData *)data options:(SDImageCacheOptions)options context:(SDWebImageContext *)context {
if (data) {
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data];
image = [self scaledImageForKey:key image:image];
if (self.config.shouldDecompressImages) {
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&data options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
UIImage *image;
BOOL decodeFirstFrame = options & SDImageCacheDecodeFirstFrameOnly;
if (!decodeFirstFrame) {
// check whether we should use `SDAnimatedImage`
if ([context valueForKey:SDWebImageContextAnimatedImageClass]) {
Class animatedImageClass = [context valueForKey:SDWebImageContextAnimatedImageClass];
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)]) {
CGFloat scale = SDImageScaleForKey(key);
image = [[animatedImageClass alloc] initWithData:data scale:scale];
if (options & SDImageCachePreloadAllFrames && [image respondsToSelector:@selector(preloadAllFrames)]) {
[((id<SDAnimatedImage>)image) preloadAllFrames];
}
}
}
}
if (!image) {
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
image = [self scaledImageForKey:key image:image];
}
BOOL shouldDecode = YES;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
// `SDAnimatedImage` do not decode
shouldDecode = NO;
} else if (image.sd_isAnimated) {
// animated image do not decode
shouldDecode = NO;
}
if (shouldDecode) {
if (self.config.shouldDecompressImages) {
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
}
return image;
} else {
@ -541,7 +575,7 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
cacheKey = SDTransformedKeyForKey(key, transformerKey);
}
// decode image data only if in-memory cache missed
diskImage = [self diskImageForKey:cacheKey data:diskData];
diskImage = [self diskImageForKey:cacheKey data:diskData options:options context:context];
if (diskImage && self.config.shouldCacheImagesInMemory) {
NSUInteger cost = SDCacheCostForImage(diskImage);
[self.memCache setObject:diskImage forKey:cacheKey cost:cost];

View File

@ -10,30 +10,22 @@
#import "SDWebImageCompat.h"
#import "NSData+ImageContentType.h"
/**
A Boolean value indicating whether to scale down large images during decompressing. (NSNumber)
*/
FOUNDATION_EXPORT NSString * _Nonnull const SDWebImageCoderScaleDownLargeImagesKey;
typedef NSString * SDWebImageCoderOption NS_STRING_ENUM;
typedef NSDictionary<SDWebImageCoderOption, id> SDWebImageCoderOptions;
/**
Return the shared device-dependent RGB color space created with CGColorSpaceCreateDeviceRGB.
@return The device-dependent RGB color space
A Boolean value indicating whether to decode the first frame only for animated image during decoding. (NSNumber)
*/
CG_EXTERN CGColorSpaceRef _Nonnull SDCGColorSpaceGetDeviceRGB(void);
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeFirstFrameOnly;
/**
Check whether CGImageRef contains alpha channel.
@param imageRef The CGImageRef
@return Return YES if CGImageRef contains alpha channel, otherwise return NO
A double value between 0.0-1.0 indicating the encode compression quality to produce the image data. If not provide, use 1.0. (NSNumber)
*/
CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderEncodeCompressionQuality;
/**
This is the image coder protocol to provide custom image decoding/encoding.
These methods are all required to implement.
You do not need to specify image scale during decoding because we may scale image later.
@note Pay attention that these methods are not called from main queue.
*/
@protocol SDWebImageCoder <NSObject>
@ -52,21 +44,11 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
Decode the image data to image.
@param data The image data to be decoded
@param options A dictionary containing any decoding options. Pass @{SDWebImageCoderDecodeFirstFrameOnly: @(YES)} to decode the first frame only.
@return The decoded image from data
*/
- (nullable UIImage *)decodedImageWithData:(nullable NSData *)data;
/**
Decompress the image with original image and image data.
@param image The original image to be decompressed
@param data The pointer to original image data. The pointer itself is nonnull but image data can be null. This data will set to cache if needed. If you do not need to modify data at the sametime, ignore this param.
@param optionsDict A dictionary containing any decompressing options. Pass {SDWebImageCoderScaleDownLargeImagesKey: @(YES)} to scale down large images
@return The decompressed image
*/
- (nullable UIImage *)decompressedImageWithImage:(nullable UIImage *)image
data:(NSData * _Nullable * _Nonnull)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict;
- (nullable UIImage *)decodedImageWithData:(nullable NSData *)data
options:(nullable SDWebImageCoderOptions *)options;
#pragma mark - Encoding
@ -83,9 +65,12 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
@param image The image to be encoded
@param format The image format to encode, you should note `SDImageFormatUndefined` format is also possible
@param options A dictionary containing any encoding options. Pass @{SDWebImageCoderEncodeCompressionQuality: @(1)} to specify compression quality.
@return The encoded image data
*/
- (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDImageFormat)format;
- (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image
format:(SDImageFormat)format
options:(nullable SDWebImageCoderOptions *)options;
@end
@ -104,16 +89,94 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
@param data The image data so we can look at it
@return YES if this coder can decode the data, NO otherwise
*/
- (BOOL)canIncrementallyDecodeFromData:(nullable NSData *)data;
- (BOOL)canIncrementalDecodeFromData:(nullable NSData *)data;
/**
Incremental decode the image data to image.
Because incremental decoding need to keep the decoded context, we will alloc a new instance with the same class for each download operation to avoid conflicts
This init method should not return nil
@return A new instance to do incremental decoding for the specify image format
*/
- (nonnull instancetype)initIncremental;
/**
Update the incremental decoding when new image data available
@param data The image data has been downloaded so far
@param finished Whether the download has finished
@warning because incremental decoding need to keep the decoded context, we will alloc a new instance with the same class for each download operation to avoid conflicts
@return The decoded image from data
*/
- (nullable UIImage *)incrementallyDecodedImageWithData:(nullable NSData *)data finished:(BOOL)finished;
- (void)updateIncrementalData:(nullable NSData *)data finished:(BOOL)finished;
/**
Incremental decode the current image data to image.
@param options A dictionary containing any decoding options.
@return The decoded image from current data
*/
- (nullable UIImage *)incrementalDecodedImageWithOptions:(nullable SDWebImageCoderOptions *)options;
@end
/**
This is the animated image protocol to provide the basic function for animated image rendering. It's adopted by `SDAnimatedImage` and `SDWebImageAnimatedCoder`
*/
@protocol SDAnimatedImageProvider <NSObject>
@required
/**
The original animated image data for current image. If current image is not an animated format, return nil.
We may use this method to grab back the original image data if need, such as NSCoding or compare.
@return The animated image data
*/
- (nullable NSData *)animatedImageData;
/**
Total animated frame count.
It the frame count is less than 1, then the methods below will be ignored.
@return Total animated frame count.
*/
- (NSUInteger)animatedImageFrameCount;
/**
Animation loop count, 0 means infinite looping.
@return Animation loop count
*/
- (NSUInteger)animatedImageLoopCount;
/**
Returns the frame image from a specified index.
@note The index maybe randomly if one image was set to different imageViews, keep it re-entrant. (It's not recommend to store the images into array because it's memory consuming)
@param index Frame index (zero based).
@return Frame's image
*/
- (nullable UIImage *)animatedImageFrameAtIndex:(NSUInteger)index;
/**
Returns the frames's duration from a specified index.
@note The index maybe randomly if one image was set to different imageViews, keep it re-entrant. (It's recommend to store the durations into array because it's not memory-consuming)
@param index Frame index (zero based).
@return Frame's duration
*/
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index;
@end
/**
This is the animated image coder protocol for custom animated image class like `SDAnimatedImage`. Through it inherit from `SDWebImageCoder`. We currentlly only use the method `canDecodeFromData:` to detect the proper coder for specify animated image format.
*/
@protocol SDWebImageAnimatedCoder <SDWebImageCoder, SDAnimatedImageProvider>
@required
/**
Because animated image coder should keep the original data, we will alloc a new instance with the same class for the specify animated image data
The init method should return nil if it can't decode the specify animated image data to produce any frame.
After the instance created, we may call methods in `SDAnimatedImage` to produce animated image frame.
@param data The animated image data to be decode
@return A new instance to do animated decoding for specify image data
*/
- (nullable instancetype)initWithAnimatedImageData:(nullable NSData *)data;
@end

View File

@ -8,24 +8,5 @@
#import "SDWebImageCoder.h"
NSString * const SDWebImageCoderScaleDownLargeImagesKey = @"scaleDownLargeImages";
CGColorSpaceRef SDCGColorSpaceGetDeviceRGB(void) {
static CGColorSpaceRef colorSpace;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
colorSpace = CGColorSpaceCreateDeviceRGB();
});
return colorSpace;
}
BOOL SDCGImageRefContainsAlpha(CGImageRef imageRef) {
if (!imageRef) {
return NO;
}
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
alphaInfo == kCGImageAlphaNoneSkipFirst ||
alphaInfo == kCGImageAlphaNoneSkipLast);
return hasAlpha;
}
SDWebImageCoderOption const SDWebImageCoderDecodeFirstFrameOnly = @"decodeFirstFrameOnly";
SDWebImageCoderOption const SDWebImageCoderEncodeCompressionQuality = @"encodeCompressionQuality";

View File

@ -30,7 +30,58 @@
@param animatedImage A animated image. If it's not animated, return nil
@return The frames array
*/
+ (NSArray<SDWebImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage;
+ (NSArray<SDWebImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage NS_SWIFT_NAME(frames(from:));
/**
Return the shared device-dependent RGB color space.
On iOS, it's created with deviceRGB (if available, use sRGB).
On macOS, it's from the screen colorspace (if failed, use deviceRGB)
Because it's shared, you should not retain or release this object.
@return The device-dependent RGB color space
*/
+ (CGColorSpaceRef _Nonnull)colorSpaceGetDeviceRGB CF_RETURNS_NOT_RETAINED;
/**
Retuen the color space of the CGImage
@param imageRef The CGImage
@return The color space of CGImage, or if not supported, return the device-dependent RGB color space
*/
+ (CGColorSpaceRef _Nonnull)imageRefGetColorSpace:(_Nonnull CGImageRef)imageRef CF_RETURNS_NOT_RETAINED;
/**
Check whether CGImage contains alpha channel.
@param imageRef The CGImage
@return Return YES if CGImage contains alpha channel, otherwise return NO
*/
+ (BOOL)imageRefContainsAlpha:(_Nonnull CGImageRef)imageRef;
/**
Create a decoded image by the provided image. This follows The Create Rule and you are response to call release after usage.
It will detect whether image contains alpha channel, then create a new bitmap context with the same size of image, and draw it. This can ensure that the image do not need extra decoding after been set to the imageView.
@param imageRef The CGImage
@return A new created decoded image
*/
+ (CGImageRef _Nullable)imageRefCreateDecoded:(_Nonnull CGImageRef)imageRef CF_RETURNS_RETAINED;
/**
Return the decoded image by the provided image. This one unlike `imageRefCreateDecoded:`, will not decode the image which contains alpha channel or animated image
@param image The image to be decoded
@return The decoded image
*/
+ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image;
/**
Return the decoded and probably scaled down image by the provided image. If the image is large than the limit size, will try to scale down. Or just works as `decodedImageWithImage:`
@param image The image to be decoded and scaled down
@param bytes The limit bytes size. Provide 0 to use the build-in limit.
@return The decoded and probably scaled down image
*/
+ (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes;
#if SD_UIKIT || SD_WATCH
/**
@ -39,7 +90,8 @@
@param exifOrientation EXIF orientation
@return iOS orientation
*/
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation;
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation NS_SWIFT_NAME(imageOrientation(from:));
/**
Convert an iOS orientation to an EXIF image orientation.

View File

@ -13,6 +13,34 @@
#import <ImageIO/ImageIO.h>
#import "SDAnimatedImageRep.h"
#if SD_UIKIT || SD_WATCH
static const size_t kBytesPerPixel = 4;
static const size_t kBitsPerComponent = 8;
/*
* Defines the maximum size in MB of the decoded image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 60.
* Suggested value for iPad2 and iPhone 4: 120.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 30.
*/
static const CGFloat kDestImageSizeMB = 60.f;
/*
* Defines the maximum size in MB of a tile used to decode image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 20.
* Suggested value for iPad2 and iPhone 4: 40.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 10.
*/
static const CGFloat kSourceImageTileSizeMB = 20.f;
static const CGFloat kBytesPerMB = 1024.0f * 1024.0f;
static const CGFloat kPixelsPerMB = kBytesPerMB / kBytesPerPixel;
static const CGFloat kDestTotalPixels = kDestImageSizeMB * kPixelsPerMB;
static const CGFloat kTileTotalPixels = kSourceImageTileSizeMB * kPixelsPerMB;
static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to overlap the seems where tiles meet.
#endif
@implementation SDWebImageCoderHelper
+ (UIImage *)animatedImageWithFrames:(NSArray<SDWebImageFrame *> *)frames {
@ -149,7 +177,7 @@
// NSBitmapImageRep need to manually change frame. "Good taste" API
[bitmapRep setProperty:NSImageCurrentFrame withValue:@(i)];
float frameDuration = [[bitmapRep valueForProperty:NSImageCurrentFrameDuration] floatValue];
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapRep.CGImage size:CGSizeZero];
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapRep.CGImage size:NSZeroSize];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:frameImage duration:frameDuration];
[frames addObject:frame];
}
@ -159,6 +187,232 @@
return frames;
}
+ (CGColorSpaceRef)colorSpaceGetDeviceRGB {
#if SD_MAC
CGColorSpaceRef screenColorSpace = NSScreen.mainScreen.colorSpace.CGColorSpace;
if (screenColorSpace) {
return screenColorSpace;
}
#endif
static CGColorSpaceRef colorSpace;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunguarded-availability"
BOOL shouldUseSRGB = NO;
#if SD_UIKIT
NSProcessInfo *processInfo = [NSProcessInfo processInfo];
shouldUseSRGB = processInfo.operatingSystemVersion.majorVersion >= 9;
#endif
if (shouldUseSRGB) {
// This is what iOS/tvOS device used colorspace, combined with right bitmapInfo, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceSRGB);
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
#pragma clang diagnostic pop
});
return colorSpace;
}
+ (CGColorSpaceRef)imageRefGetColorSpace:(CGImageRef)imageRef {
// current
CGColorSpaceModel imageColorSpaceModel = CGColorSpaceGetModel(CGImageGetColorSpace(imageRef));
CGColorSpaceRef colorspaceRef = CGImageGetColorSpace(imageRef);
BOOL unsupportedColorSpace = (imageColorSpaceModel == kCGColorSpaceModelUnknown ||
imageColorSpaceModel == kCGColorSpaceModelMonochrome ||
imageColorSpaceModel == kCGColorSpaceModelCMYK ||
imageColorSpaceModel == kCGColorSpaceModelIndexed);
if (unsupportedColorSpace) {
colorspaceRef = [self colorSpaceGetDeviceRGB];
}
return colorspaceRef;
}
+ (BOOL)imageRefContainsAlpha:(CGImageRef)imageRef {
if (!imageRef) {
return NO;
}
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
alphaInfo == kCGImageAlphaNoneSkipFirst ||
alphaInfo == kCGImageAlphaNoneSkipLast);
return hasAlpha;
}
+ (CGImageRef)imageRefCreateDecoded:(CGImageRef)imageRef {
if (!imageRef) {
return NULL;
}
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
if (width == 0 || height == 0) return NULL;
CGRect rect = CGRectMake(0, 0, width, height);
BOOL hasAlpha = [self imageRefContainsAlpha:imageRef];
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
// Through you can use any supported bitmapInfo (see: https://developer.apple.com/library/content/documentation/GraphicsImaging/Conceptual/drawingwithquartz2d/dq_context/dq_context.html#//apple_ref/doc/uid/TP30001066-CH203-BCIBHHBB ) and let Core Graphics reorder it when you call `CGContextDrawImage`
// But since our build-in coders use this bitmapInfo, this can have a little performance benefit
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, [self colorSpaceGetDeviceRGB], bitmapInfo);
if (!context) {
return NULL;
}
CGContextDrawImage(context, rect, imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
return newImageRef;
}
+ (UIImage *)decodedImageWithImage:(UIImage *)image {
#if SD_MAC
return image;
#else
if (![self shouldDecodeImage:image]) {
return image;
}
CGImageRef imageRef = [self imageRefCreateDecoded:image.CGImage];
if (!imageRef) {
return image;
}
UIImage *decodedImage = [[UIImage alloc] initWithCGImage:imageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(imageRef);
return decodedImage;
#endif
}
+ (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes {
#if SD_MAC
return image;
#else
if (![self shouldDecodeImage:image]) {
return image;
}
if (![self shouldScaleDownImage:image limitBytes:bytes]) {
return [self decodedImageWithImage:image];
}
CGFloat destTotalPixels;
CGFloat tileTotalPixels;
if (bytes > 0) {
destTotalPixels = bytes / kBytesPerPixel;
tileTotalPixels = destTotalPixels / 3;
} else {
destTotalPixels = kDestTotalPixels;
tileTotalPixels = kTileTotalPixels;
}
CGContextRef destContext;
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool {
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
// Determine the scale ratio to apply to the input image
// that results in an output image of the defined size.
// see kDestImageSizeMB, and how it relates to destTotalPixels.
float imageScale = destTotalPixels / sourceTotalPixels;
CGSize destResolution = CGSizeZero;
destResolution.width = (int)(sourceResolution.width*imageScale);
destResolution.height = (int)(sourceResolution.height*imageScale);
// current color space
CGColorSpaceRef colorspaceRef = [self imageRefGetColorSpace:sourceImageRef];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipFirst
// to create bitmap graphics contexts without alpha info.
destContext = CGBitmapContextCreate(NULL,
destResolution.width,
destResolution.height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrder32Host|kCGImageAlphaNoneSkipFirst);
if (destContext == NULL) {
return image;
}
CGContextSetInterpolationQuality(destContext, kCGInterpolationHigh);
// Now define the size of the rectangle to be used for the
// incremental blits from the input image to the output image.
// we use a source tile width equal to the width of the source
// image due to the way that iOS retrieves image data from disk.
// iOS must decode an image from disk in full width 'bands', even
// if current graphics context is clipped to a subrect within that
// band. Therefore we fully utilize all of the pixel data that results
// from a decoding opertion by achnoring our tile size to the full
// width of the input image.
CGRect sourceTile = CGRectZero;
sourceTile.size.width = sourceResolution.width;
// The source tile height is dynamic. Since we specified the size
// of the source tile in MB, see how many rows of pixels high it
// can be given the input image width.
sourceTile.size.height = (int)(tileTotalPixels / sourceTile.size.width );
sourceTile.origin.x = 0.0f;
// The output tile is the same proportions as the input tile, but
// scaled to image scale.
CGRect destTile;
destTile.size.width = destResolution.width;
destTile.size.height = sourceTile.size.height * imageScale;
destTile.origin.x = 0.0f;
// The source seem overlap is proportionate to the destination seem overlap.
// this is the amount of pixels to overlap each tile as we assemble the ouput image.
float sourceSeemOverlap = (int)((kDestSeemOverlap/destResolution.height)*sourceResolution.height);
CGImageRef sourceTileImageRef;
// calculate the number of read/write operations required to assemble the
// output image.
int iterations = (int)( sourceResolution.height / sourceTile.size.height );
// If tile height doesn't divide the image height evenly, add another iteration
// to account for the remaining pixels.
int remainder = (int)sourceResolution.height % (int)sourceTile.size.height;
if(remainder) {
iterations++;
}
// Add seem overlaps to the tiles, but save the original tile height for y coordinate calculations.
float sourceTileHeightMinusOverlap = sourceTile.size.height;
sourceTile.size.height += sourceSeemOverlap;
destTile.size.height += kDestSeemOverlap;
for( int y = 0; y < iterations; ++y ) {
@autoreleasepool {
sourceTile.origin.y = y * sourceTileHeightMinusOverlap + sourceSeemOverlap;
destTile.origin.y = destResolution.height - (( y + 1 ) * sourceTileHeightMinusOverlap * imageScale + kDestSeemOverlap);
sourceTileImageRef = CGImageCreateWithImageInRect( sourceImageRef, sourceTile );
if( y == iterations - 1 && remainder ) {
float dify = destTile.size.height;
destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale;
dify -= destTile.size.height;
destTile.origin.y += dify;
}
CGContextDrawImage( destContext, destTile, sourceTileImageRef );
CGImageRelease( sourceTileImageRef );
}
}
CGImageRef destImageRef = CGBitmapContextCreateImage(destContext);
CGContextRelease(destContext);
if (destImageRef == NULL) {
return image;
}
UIImage *destImage = [[UIImage alloc] initWithCGImage:destImageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(destImageRef);
if (destImage == nil) {
return image;
}
return destImage;
}
#endif
}
#if SD_UIKIT || SD_WATCH
// Convert an EXIF image orientation to an iOS one.
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation {
@ -233,6 +487,56 @@
#pragma mark - Helper Fuction
#if SD_UIKIT || SD_WATCH
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// do not decode animated images
if (image.images != nil) {
return NO;
}
CGImageRef imageRef = image.CGImage;
BOOL hasAlpha = [self imageRefContainsAlpha:imageRef];
// do not decode images with alpha
if (hasAlpha) {
return NO;
}
return YES;
}
+ (BOOL)shouldScaleDownImage:(nonnull UIImage *)image limitBytes:(NSUInteger)bytes {
BOOL shouldScaleDown = YES;
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
if (sourceTotalPixels <= 0) {
return NO;
}
CGFloat destTotalPixels;
if (bytes > 0) {
destTotalPixels = bytes / kBytesPerPixel;
} else {
destTotalPixels = kDestTotalPixels;
}
if (destTotalPixels <= kPixelsPerMB) {
// Too small to scale down
return NO;
}
float imageScale = destTotalPixels / sourceTotalPixels;
if (imageScale < 1) {
shouldScaleDown = YES;
} else {
shouldScaleDown = NO;
}
return shouldScaleDown;
}
static NSUInteger gcd(NSUInteger a, NSUInteger b) {
NSUInteger c;
while (a != 0) {

View File

@ -12,6 +12,8 @@
#ifdef SD_WEBP
#import "SDWebImageWebPCoder.h"
#endif
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
@interface SDWebImageCodersManager ()
@ -34,7 +36,7 @@
- (instancetype)init {
if (self = [super init]) {
// initialize with default coders
_mutableCoders = [@[[SDWebImageImageIOCoder sharedCoder]] mutableCopy];
_mutableCoders = [@[[SDWebImageImageIOCoder sharedCoder], [SDWebImageGIFCoder sharedCoder]] mutableCopy];
#ifdef SD_WEBP
[_mutableCoders addObject:[SDWebImageWebPCoder sharedCoder]];
#endif
@ -92,39 +94,32 @@
return NO;
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
UIImage *image;
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canDecodeFromData:data]) {
return [coder decodedImageWithData:data];
image = [coder decodedImageWithData:data options:options];
break;
}
}
return nil;
if (decodeFirstFrame && image.images.count > 0) {
image = image.images.firstObject;
}
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
if (!image) {
return nil;
}
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canDecodeFromData:*data]) {
return [coder decompressedImageWithImage:image data:data options:optionsDict];
}
}
return nil;
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canEncodeToFormat:format]) {
return [coder encodedDataWithImage:image format:format];
return [coder encodedDataWithImage:image format:format options:nil];
}
}
return nil;

View File

@ -83,8 +83,6 @@
#define NS_OPTIONS(_type, _name) enum _name : _type _name; enum _name : _type
#endif
FOUNDATION_EXPORT UIImage * _Nullable SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image);
FOUNDATION_EXPORT NSString *const _Nonnull SDWebImageErrorDomain;
#ifndef dispatch_queue_async_safe

View File

@ -7,8 +7,6 @@
*/
#import "SDWebImageCompat.h"
#import "UIImage+WebCache.h"
#import "NSImage+Additions.h"
#if !__has_feature(objc_arc)
#error SDWebImage is ARC only. Either turn on ARC for the project or use -fobjc-arc flag
@ -18,58 +16,4 @@
#error SDWebImage need ARC for dispatch object
#endif
inline UIImage *SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image) {
if (!image) {
return nil;
}
#if SD_UIKIT || SD_WATCH
if ((image.images).count > 0) {
NSMutableArray<UIImage *> *scaledImages = [NSMutableArray array];
for (UIImage *tempImage in image.images) {
[scaledImages addObject:SDScaledImageForKey(key, tempImage)];
}
UIImage *animatedImage = [UIImage animatedImageWithImages:scaledImages duration:image.duration];
if (animatedImage) {
animatedImage.sd_imageLoopCount = image.sd_imageLoopCount;
}
return animatedImage;
} else {
#endif
#if SD_WATCH
if ([[WKInterfaceDevice currentDevice] respondsToSelector:@selector(screenScale)]) {
#elif SD_UIKIT
if ([[UIScreen mainScreen] respondsToSelector:@selector(scale)]) {
#elif SD_MAC
if ([[NSScreen mainScreen] respondsToSelector:@selector(backingScaleFactor)]) {
#endif
CGFloat scale = 1;
if (key.length >= 8) {
NSRange range = [key rangeOfString:@"@2x."];
if (range.location != NSNotFound) {
scale = 2.0;
}
range = [key rangeOfString:@"@3x."];
if (range.location != NSNotFound) {
scale = 3.0;
}
}
if (scale > 1) {
#if SD_UIKIT || SD_WATCH
UIImage *scaledImage = [[UIImage alloc] initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
#else
UIImage *scaledImage = [[UIImage alloc] initWithCGImage:image.CGImage scale:scale];
#endif
image = scaledImage;
}
}
return image;
#if SD_UIKIT || SD_WATCH
}
#endif
}
NSString *const SDWebImageErrorDomain = @"SDWebImageErrorDomain";

View File

@ -6,16 +6,39 @@
* file that was distributed with this source code.
*/
#import <Foundation/Foundation.h>
#import "SDWebImageCompat.h"
typedef void(^SDWebImageNoParamsBlock)(void);
typedef NSString * SDWebImageContextOption NS_STRING_ENUM;
typedef NSDictionary<SDWebImageContextOption, id> SDWebImageContext;
#pragma mark - Image scale
/**
Return the image scale from the specify key, supports file name and url key
@param key The image cache key
@return The scale factor for image
*/
FOUNDATION_EXPORT CGFloat SDImageScaleForKey(NSString * _Nullable key);
/**
Scale the image with the scale factor from the specify key. If no need to scale, return the original image
This only works for `UIImage`(UIKit) or `NSImage`(AppKit).
@param key The image cache key
@param image The image
@return The scaled image
*/
FOUNDATION_EXPORT UIImage * _Nullable SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image);
#pragma mark - Context option
/**
A Dispatch group to maintain setImageBlock and completionBlock. This is used for custom setImageBlock advanced usage, such like perform background task but need to guarantee the completion block is called after setImageBlock. (dispatch_group_t)
*/
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextSetImageGroup;
/**
A SDWebImageManager instance to control the image download and cache process using in UIImageView+WebCache category and likes. If not provided, use the shared manager (SDWebImageManager)
*/
@ -25,3 +48,9 @@ FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextCustom
A id<SDWebImageTransformer> instance which conforms SDWebImageTransformer protocol. It's used for image transform after the image load finished and store the transformed image to cache. If you provide one, it will ignore the `transformer` in manager and use provided one instead. (id<SDWebImageTransformer>)
*/
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextCustomTransformer;
/**
A Class object which the instance is a `UIImage/NSImage` subclass and adopt `SDAnimatedImage` protocol. We will call `initWithData:scale:` to create the instance (or `initWithAnimatedCoder:scale` when using progressive download) . If the instance create failed, fallback to normal `UIImage/NSImage`.
This can be used to improve animated images rendering performance (especially memory usage on big animated images) with `SDAnimatedImageView` (Class).
*/
FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextAnimatedImageClass;

View File

@ -7,7 +7,99 @@
*/
#import "SDWebImageDefine.h"
#import "UIImage+WebCache.h"
#import "NSImage+Additions.h"
#pragma mark - Image scale
static inline NSArray<NSNumber *> * _Nonnull SDImageScaleFactors() {
return @[@2, @3];
}
inline CGFloat SDImageScaleForKey(NSString * _Nullable key) {
CGFloat scale = 1;
if (!key) {
return scale;
}
// Check if target OS support scale
#if SD_WATCH
if ([[WKInterfaceDevice currentDevice] respondsToSelector:@selector(screenScale)])
#elif SD_UIKIT
if ([[UIScreen mainScreen] respondsToSelector:@selector(scale)])
#elif SD_MAC
if ([[NSScreen mainScreen] respondsToSelector:@selector(backingScaleFactor)])
#endif
{
// a@2x.png -> 8
if (key.length >= 8) {
// Fast check
BOOL isURL = [key hasPrefix:@"http://"] || [key hasPrefix:@"https://"];
for (NSNumber *scaleFactor in SDImageScaleFactors()) {
// @2x. for file name and normal url
NSString *fileScale = [NSString stringWithFormat:@"@%@x.", scaleFactor];
if ([key containsString:fileScale]) {
scale = scaleFactor.doubleValue;
return scale;
}
if (isURL) {
// %402x. for url encode
NSString *urlScale = [NSString stringWithFormat:@"%%40%@x.", scaleFactor];
if ([key containsString:urlScale]) {
scale = scaleFactor.doubleValue;
return scale;
}
}
}
}
}
return scale;
}
inline UIImage *SDScaledImageForKey(NSString * _Nullable key, UIImage * _Nullable image) {
if (!image) {
return nil;
}
CGFloat scale = SDImageScaleForKey(key);
if (scale > 1) {
UIImage *scaledImage;
if (image.sd_isAnimated) {
UIImage *animatedImage;
#if SD_UIKIT || SD_WATCH
// `UIAnimatedImage` images share the same size and scale.
NSMutableArray<UIImage *> *scaledImages = [NSMutableArray array];
for (UIImage *tempImage in image.images) {
UIImage *tempScaledImage = [[UIImage alloc] initWithCGImage:tempImage.CGImage scale:scale orientation:tempImage.imageOrientation];
[scaledImages addObject:tempScaledImage];
}
animatedImage = [UIImage animatedImageWithImages:scaledImages duration:image.duration];
animatedImage.sd_imageLoopCount = image.sd_imageLoopCount;
#else
// Animated GIF for `NSImage` need to grab `NSBitmapImageRep`
NSSize size = NSMakeSize(image.size.width / scale, image.size.height / scale);
animatedImage = [[NSImage alloc] initWithSize:size];
NSBitmapImageRep *bitmapImageRep = image.bitmapImageRep;
[animatedImage addRepresentation:bitmapImageRep];
#endif
scaledImage = animatedImage;
} else {
#if SD_UIKIT || SD_WATCH
scaledImage = [[UIImage alloc] initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
#else
scaledImage = [[NSImage alloc] initWithCGImage:image.CGImage size:NSZeroSize];
#endif
}
return scaledImage;
}
return image;
}
#pragma mark - Context option
SDWebImageContextOption const SDWebImageContextSetImageGroup = @"setImageGroup";
SDWebImageContextOption const SDWebImageContextCustomManager = @"customManager";
SDWebImageContextOption const SDWebImageContextCustomTransformer = @"customTransformer";
SDWebImageContextOption const SDWebImageContextAnimatedImageClass = @"animatedImageClass";

View File

@ -61,6 +61,16 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageDownloaderOptions) {
* Scale down the image
*/
SDWebImageDownloaderScaleDownLargeImages = 1 << 8,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produece the static image.
*/
SDWebImageDownloaderDecodeFirstFrameOnly = 1 << 9,
/**
* By default, for `SDAnimatedImage`, we decode the animated image frame during rendering to reduce memory usage. This flag actually trigger `preloadAllAnimatedImageFrames = YES` after image load from network
*/
SDWebImageDownloaderPreloadAllFrames = 1 << 10
};
typedef NS_ENUM(NSInteger, SDWebImageDownloaderExecutionOrder) {

View File

@ -9,7 +9,10 @@
#import "SDWebImageDownloaderOperation.h"
#import "SDWebImageManager.h"
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageCoderHelper.h"
#import "SDAnimatedImage.h"
#define LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
#define UNLOCK(lock) dispatch_semaphore_signal(lock);
@ -42,6 +45,7 @@ typedef NSMutableDictionary<NSString *, id> SDCallbacksDictionary;
@property (assign, nonatomic, getter = isFinished) BOOL finished;
@property (strong, nonatomic, nullable) NSMutableData *imageData;
@property (copy, nonatomic, nullable) NSData *cachedData; // for `SDWebImageDownloaderIgnoreCachedResponse`
@property (copy, nonatomic, nullable) NSString *cacheKey;
@property (assign, nonatomic, readwrite) NSInteger expectedSize;
@property (strong, nonatomic, nullable, readwrite) NSURLResponse *response;
@ -347,22 +351,44 @@ didReceiveResponse:(NSURLResponse *)response
// We need to create a new instance for progressive decoding to avoid conflicts
for (id<SDWebImageCoder>coder in [SDWebImageCodersManager sharedManager].coders) {
if ([coder conformsToProtocol:@protocol(SDWebImageProgressiveCoder)] &&
[((id<SDWebImageProgressiveCoder>)coder) canIncrementallyDecodeFromData:imageData]) {
self.progressiveCoder = [[[coder class] alloc] init];
[((id<SDWebImageProgressiveCoder>)coder) canIncrementalDecodeFromData:imageData]) {
self.progressiveCoder = [[[coder class] alloc] initIncremental];
break;
}
}
}
[self.progressiveCoder updateIncrementalData:imageData finished:finished];
// progressive decode the image in coder queue
dispatch_async(self.coderQueue, ^{
UIImage *image = [self.progressiveCoder incrementallyDecodedImageWithData:imageData finished:finished];
if (image) {
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
if (self.shouldDecompressImages) {
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&imageData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
// check whether we should use `SDAnimatedImage`
UIImage *image;
if ([self.context valueForKey:SDWebImageContextAnimatedImageClass]) {
Class animatedImageClass = [self.context valueForKey:SDWebImageContextAnimatedImageClass];
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)] && [self.progressiveCoder conformsToProtocol:@protocol(SDWebImageAnimatedCoder)]) {
CGFloat scale = SDImageScaleForKey(self.cacheKey);
image = [[animatedImageClass alloc] initWithAnimatedCoder:(id<SDWebImageAnimatedCoder>)self.progressiveCoder scale:scale];
}
}
if (!image) {
BOOL decodeFirstFrame = self.options & SDWebImageDownloaderDecodeFirstFrameOnly;
image = [self.progressiveCoder incrementalDecodedImageWithOptions:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
image = [self scaledImageForKey:self.cacheKey image:image];
}
if (image) {
BOOL shouldDecode = self.shouldDecompressImages;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
// `SDAnimatedImage` do not decode
shouldDecode = NO;
} else if (image.sd_isAnimated) {
// animated image do not decode
shouldDecode = NO;
}
if (shouldDecode) {
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
// mark the image as progressive (completionBlock one are not mark as progressive)
image.sd_isIncremental = YES;
// We do not keep the progressive decoding image even when `finished`=YES. Because they are for view rendering but not take full function from downloader options. And some coders implementation may not keep consistent between progressive decoding and normal decoding.
@ -427,27 +453,41 @@ didReceiveResponse:(NSURLResponse *)response
} else {
// decode the image in coder queue
dispatch_async(self.coderQueue, ^{
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
BOOL shouldDecode = YES;
// Do not force decoding animated GIFs and WebPs
if (image.images) {
shouldDecode = NO;
} else {
#ifdef SD_WEBP
SDImageFormat imageFormat = [NSData sd_imageFormatForImageData:imageData];
if (imageFormat == SDImageFormatWebP) {
shouldDecode = NO;
BOOL decodeFirstFrame = self.options & SDWebImageDownloaderDecodeFirstFrameOnly;
UIImage *image;
if (!decodeFirstFrame) {
// check whether we should use `SDAnimatedImage`
if ([self.context valueForKey:SDWebImageContextAnimatedImageClass]) {
Class animatedImageClass = [self.context valueForKey:SDWebImageContextAnimatedImageClass];
if ([animatedImageClass isSubclassOfClass:[UIImage class]] && [animatedImageClass conformsToProtocol:@protocol(SDAnimatedImage)]) {
CGFloat scale = SDImageScaleForKey(self.cacheKey);
image = [[animatedImageClass alloc] initWithData:imageData scale:scale];
if (self.options & SDWebImageDownloaderPreloadAllFrames && [image respondsToSelector:@selector(preloadAllFrames)]) {
[((id<SDAnimatedImage>)image) preloadAllFrames];
}
}
}
#endif
}
if (!image) {
image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData options:@{SDWebImageCoderDecodeFirstFrameOnly : @(decodeFirstFrame)}];
image = [self scaledImageForKey:self.cacheKey image:image];
}
BOOL shouldDecode = self.shouldDecompressImages;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
// `SDAnimatedImage` do not decode
shouldDecode = NO;
} else if (image.sd_isAnimated) {
// animated image do not decode
shouldDecode = NO;
}
if (shouldDecode) {
if (self.shouldDecompressImages) {
BOOL shouldScaleDown = self.options & SDWebImageDownloaderScaleDownLargeImages;
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&imageData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(shouldScaleDown)}];
BOOL shouldScaleDown = self.options & SDWebImageDownloaderScaleDownLargeImages;
if (shouldScaleDown) {
image = [SDWebImageCoderHelper decodedAndScaledDownImageWithImage:image limitBytes:0];
} else {
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
}
CGSize imageSize = image.size;
@ -500,6 +540,13 @@ didReceiveResponse:(NSURLResponse *)response
}
#pragma mark Helper methods
- (NSString *)cacheKey {
if (!_cacheKey) {
_cacheKey = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
}
return _cacheKey;
}
- (nullable UIImage *)scaledImageForKey:(nullable NSString *)key image:(nullable UIImage *)image {
return SDScaledImageForKey(key, image);
}

View File

@ -12,11 +12,10 @@
/**
Built in coder using ImageIO that supports GIF encoding/decoding
@note `SDWebImageIOCoder` supports GIF but only as static (will use the 1st frame).
@note Use `SDWebImageGIFCoder` for fully animated GIFs - less performant than `FLAnimatedImage`
@note If you decide to make all `UIImageView`(including `FLAnimatedImageView`) instance support GIF. You should add this coder to `SDWebImageCodersManager` and make sure that it has a higher priority than `SDWebImageIOCoder`
@note The recommended approach for animated GIFs is using `FLAnimatedImage`. It's more performant than `UIImageView` for GIF displaying
@note Use `SDWebImageGIFCoder` for fully animated GIFs. For `UIImageView`, it will produce animated `UIImage`(`NSImage` on macOS) for rendering. For `SDAnimatedImageView`, it will use `SDAnimatedImage` for rendering.
@note The recommended approach for animated GIFs is using `SDAnimatedImage` with `SDAnimatedImageView`. It's more performant than `UIImageView` for GIF displaying(especially on memory usage)
*/
@interface SDWebImageGIFCoder : NSObject <SDWebImageCoder>
@interface SDWebImageGIFCoder : NSObject <SDWebImageProgressiveCoder, SDWebImageAnimatedCoder>
@property (nonatomic, class, readonly, nonnull) SDWebImageGIFCoder *sharedCoder;

View File

@ -14,7 +14,45 @@
#import "SDWebImageCoderHelper.h"
#import "SDAnimatedImageRep.h"
@implementation SDWebImageGIFCoder
@interface SDGIFCoderFrame : NSObject
@property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
@property (nonatomic, assign) NSTimeInterval duration; // Frame duration in seconds
@end
@implementation SDGIFCoderFrame
@end
@implementation SDWebImageGIFCoder {
size_t _width, _height;
CGImageSourceRef _imageSource;
NSData *_imageData;
NSUInteger _loopCount;
NSUInteger _frameCount;
NSArray<SDGIFCoderFrame *> *_frames;
BOOL _finished;
}
- (void)dealloc
{
if (_imageSource) {
CFRelease(_imageSource);
_imageSource = NULL;
}
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification
{
if (_imageSource) {
for (size_t i = 0; i < _frameCount; i++) {
CGImageSourceRemoveCacheAtIndex(_imageSource, i);
}
}
}
+ (instancetype)sharedCoder {
static SDWebImageGIFCoder *coder;
@ -30,7 +68,7 @@
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatGIF);
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
@ -50,7 +88,8 @@
UIImage *animatedImage;
if (count <= 1) {
BOOL decodeFirstFrame = [options[SDWebImageCoderDecodeFirstFrameOnly] boolValue];
if (decodeFirstFrame || count <= 1) {
animatedImage = [[UIImage alloc] initWithData:data];
} else {
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
@ -69,15 +108,7 @@
[frames addObject:frame];
}
NSUInteger loopCount = 1;
NSDictionary *imageProperties = (__bridge_transfer NSDictionary *)CGImageSourceCopyProperties(source, nil);
NSDictionary *gifProperties = [imageProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary];
if (gifProperties) {
NSNumber *gifLoopCount = [gifProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount];
if (gifLoopCount != nil) {
loopCount = gifLoopCount.unsignedIntegerValue;
}
}
NSUInteger loopCount = [self sd_imageLoopCountWithSource:source];
animatedImage = [SDWebImageCoderHelper animatedImageWithFrames:frames];
animatedImage.sd_imageLoopCount = loopCount;
@ -89,6 +120,19 @@
#endif
}
- (NSUInteger)sd_imageLoopCountWithSource:(CGImageSourceRef)source {
NSUInteger loopCount = 1;
NSDictionary *imageProperties = (__bridge_transfer NSDictionary *)CGImageSourceCopyProperties(source, nil);
NSDictionary *gifProperties = [imageProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary];
if (gifProperties) {
NSNumber *gifLoopCount = [gifProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount];
if (gifLoopCount != nil) {
loopCount = gifLoopCount.unsignedIntegerValue;
}
}
return loopCount;
}
- (float)sd_frameDurationAtIndex:(NSUInteger)index source:(CGImageSourceRef)source {
float frameDuration = 0.1f;
CFDictionaryRef cfFrameProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil);
@ -121,10 +165,68 @@
return frameDuration;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
// GIF do not decompress
#pragma mark - Progressive Decode
- (BOOL)canIncrementalDecodeFromData:(NSData *)data {
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatGIF);
}
- (instancetype)initIncremental {
self = [super init];
if (self) {
_imageSource = CGImageSourceCreateIncremental((__bridge CFDictionaryRef)@{(__bridge_transfer NSString *)kCGImageSourceShouldCache : @(YES)});
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
return self;
}
- (void)updateIncrementalData:(NSData *)data finished:(BOOL)finished {
if (_finished) {
return;
}
_imageData = data;
_finished = finished;
// The following code is from http://www.cocoaintheshell.com/2011/05/progressive-images-download-imageio/
// Thanks to the author @Nyx0uf
// Update the data source, we must pass ALL the data, not just the new bytes
CGImageSourceUpdateData(_imageSource, (__bridge CFDataRef)data, finished);
if (_width + _height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_imageSource, 0, NULL);
if (properties) {
CFTypeRef val = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
if (val) CFNumberGetValue(val, kCFNumberLongType, &_height);
val = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
if (val) CFNumberGetValue(val, kCFNumberLongType, &_width);
CFRelease(properties);
}
}
// For animated image progressive decoding because the frame count and duration may be changed.
[self scanAndCheckFramesValidWithImageSource:_imageSource];
}
- (UIImage *)incrementalDecodedImageWithOptions:(SDWebImageCoderOptions *)options {
UIImage *image;
if (_width + _height > 0) {
// Create the image
CGImageRef partialImageRef = CGImageSourceCreateImageAtIndex(_imageSource, 0, NULL);
if (partialImageRef) {
#if SD_UIKIT || SD_WATCH
image = [[UIImage alloc] initWithCGImage:partialImageRef];
#elif SD_MAC
image = [[UIImage alloc] initWithCGImage:partialImageRef size:NSZeroSize];
#endif
CGImageRelease(partialImageRef);
}
}
return image;
}
@ -133,7 +235,7 @@
return (format == SDImageFormatGIF);
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
@ -152,14 +254,21 @@
// Handle failure.
return nil;
}
NSMutableDictionary *properties = [NSMutableDictionary dictionary];
double compressionQuality = 1;
if ([options valueForKey:SDWebImageCoderEncodeCompressionQuality]) {
compressionQuality = [[options valueForKey:SDWebImageCoderEncodeCompressionQuality] doubleValue];
}
[properties setValue:@(compressionQuality) forKey:(__bridge_transfer NSString *)kCGImageDestinationLossyCompressionQuality];
if (frames.count == 0) {
// for static single GIF images
CGImageDestinationAddImage(imageDestination, image.CGImage, nil);
CGImageDestinationAddImage(imageDestination, image.CGImage, (__bridge CFDictionaryRef)properties);
} else {
// for animated GIF images
NSUInteger loopCount = image.sd_imageLoopCount;
NSDictionary *gifProperties = @{(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary: @{(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount : @(loopCount)}};
CGImageDestinationSetProperties(imageDestination, (__bridge CFDictionaryRef)gifProperties);
NSDictionary *gifProperties = @{(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount : @(loopCount)};
[properties setValue:gifProperties forKey:(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary];
CGImageDestinationSetProperties(imageDestination, (__bridge CFDictionaryRef)properties);
for (size_t i = 0; i < frames.count; i++) {
SDWebImageFrame *frame = frames[i];
@ -180,4 +289,92 @@
return [imageData copy];
}
#pragma mark - SDWebImageAnimatedCoder
- (nullable instancetype)initWithAnimatedImageData:(nullable NSData *)data {
if (!data) {
return nil;
}
self = [super init];
if (self) {
// use Image/IO cache because it's already keep a balance between CPU & memory
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)data, (__bridge CFDictionaryRef)@{(__bridge_transfer NSString *)kCGImageSourceShouldCache : @(YES)});
if (!imageSource) {
return nil;
}
BOOL framesValid = [self scanAndCheckFramesValidWithImageSource:imageSource];
if (!framesValid) {
CFRelease(imageSource);
return nil;
}
_imageSource = imageSource;
_imageData = data;
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
return self;
}
- (BOOL)scanAndCheckFramesValidWithImageSource:(CGImageSourceRef)imageSource {
if (!imageSource) {
return NO;
}
NSUInteger frameCount = CGImageSourceGetCount(imageSource);
NSUInteger loopCount = [self sd_imageLoopCountWithSource:imageSource];
NSMutableArray<SDGIFCoderFrame *> *frames = [NSMutableArray array];
for (size_t i = 0; i < frameCount; i++) {
SDGIFCoderFrame *frame = [[SDGIFCoderFrame alloc] init];
frame.index = i;
frame.duration = [self sd_frameDurationAtIndex:i source:imageSource];
[frames addObject:frame];
}
_frameCount = frameCount;
_loopCount = loopCount;
_frames = [frames copy];
return YES;
}
- (NSData *)animatedImageData {
return _imageData;
}
- (NSUInteger)animatedImageLoopCount {
return _loopCount;
}
- (NSUInteger)animatedImageFrameCount {
return _frameCount;
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
if (index >= _frameCount) {
return 0;
}
return _frames[index].duration;
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_imageSource, index, NULL);
if (!imageRef) {
return nil;
}
// Image/IO create CGImage does not decode, so we do this because this is called background queue, this can avoid main queue block when rendering(especially when one more imageViews use the same image instance)
CGImageRef newImageRef = [SDWebImageCoderHelper imageRefCreateDecoded:imageRef];
if (!newImageRef) {
newImageRef = imageRef;
} else {
CGImageRelease(imageRef);
}
#if SD_MAC
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef];
#endif
CGImageRelease(newImageRef);
return image;
}
@end

View File

@ -12,40 +12,14 @@
#import <ImageIO/ImageIO.h>
#import "NSData+ImageContentType.h"
#if SD_UIKIT || SD_WATCH
static const size_t kBytesPerPixel = 4;
static const size_t kBitsPerComponent = 8;
/*
* Defines the maximum size in MB of the decoded image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 60.
* Suggested value for iPad2 and iPhone 4: 120.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 30.
*/
static const CGFloat kDestImageSizeMB = 60.0f;
/*
* Defines the maximum size in MB of a tile used to decode image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 20.
* Suggested value for iPad2 and iPhone 4: 40.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 10.
*/
static const CGFloat kSourceImageTileSizeMB = 20.0f;
static const CGFloat kBytesPerMB = 1024.0f * 1024.0f;
static const CGFloat kPixelsPerMB = kBytesPerMB / kBytesPerPixel;
static const CGFloat kDestTotalPixels = kDestImageSizeMB * kPixelsPerMB;
static const CGFloat kTileTotalPixels = kSourceImageTileSizeMB * kPixelsPerMB;
static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to overlap the seems where tiles meet.
#endif
@implementation SDWebImageImageIOCoder {
size_t _width, _height;
size_t _width, _height;
#if SD_UIKIT || SD_WATCH
UIImageOrientation _orientation;
UIImageOrientation _orientation;
#endif
CGImageSourceRef _imageSource;
CGImageSourceRef _imageSource;
NSUInteger _frameCount;
BOOL _finished;
}
- (void)dealloc {
@ -53,6 +27,18 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
CFRelease(_imageSource);
_imageSource = NULL;
}
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification
{
if (_imageSource) {
for (size_t i = 0; i < _frameCount; i++) {
CGImageSourceRemoveCacheAtIndex(_imageSource, i);
}
}
}
+ (instancetype)sharedCoder {
@ -78,7 +64,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
- (BOOL)canIncrementallyDecodeFromData:(NSData *)data {
- (BOOL)canIncrementalDecodeFromData:(NSData *)data {
switch ([NSData sd_imageFormatForImageData:data]) {
case SDImageFormatWebP:
// Do not support WebP progressive decoding
@ -91,7 +77,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
@ -114,17 +100,30 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
#endif
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_imageSource) {
#pragma mark - Progressive Decode
- (instancetype)initIncremental {
self = [super init];
if (self) {
_imageSource = CGImageSourceCreateIncremental(NULL);
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
UIImage *image;
return self;
}
- (void)updateIncrementalData:(NSData *)data finished:(BOOL)finished {
if (_finished) {
return;
}
_finished = finished;
// The following code is from http://www.cocoaintheshell.com/2011/05/progressive-images-download-imageio/
// Thanks to the author @Nyx0uf
// Update the data source, we must pass ALL the data, not just the new bytes
CGImageSourceUpdateData(_imageSource, (__bridge CFDataRef)data, finished);
_frameCount = CGImageSourceGetCount(_imageSource);
if (_width + _height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_imageSource, 0, NULL);
@ -147,6 +146,10 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
#endif
}
}
}
- (UIImage *)incrementalDecodedImageWithOptions:(SDWebImageCoderOptions *)options {
UIImage *image;
if (_width + _height > 0) {
// Create the image
@ -156,8 +159,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// Workaround for iOS anamorphic image
if (partialImageRef) {
const size_t partialHeight = CGImageGetHeight(partialImageRef);
CGColorSpaceRef colorSpace = SDCGColorSpaceGetDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL, _width, _height, 8, 0, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRef colorSpace = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGContextRef bmContext = CGBitmapContextCreate(NULL, _width, _height, 8, 0, colorSpace, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
if (bmContext) {
CGContextDrawImage(bmContext, (CGRect){.origin.x = 0.0f, .origin.y = 0.0f, .size.width = _width, .size.height = partialHeight}, partialImageRef);
CGImageRelease(partialImageRef);
@ -181,208 +184,9 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
if (finished) {
if (_imageSource) {
CFRelease(_imageSource);
_imageSource = NULL;
}
}
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
#if SD_MAC
return image;
#endif
#if SD_UIKIT || SD_WATCH
BOOL shouldScaleDown = NO;
if (optionsDict != nil) {
NSNumber *scaleDownLargeImagesOption = nil;
if ([optionsDict[SDWebImageCoderScaleDownLargeImagesKey] isKindOfClass:[NSNumber class]]) {
scaleDownLargeImagesOption = (NSNumber *)optionsDict[SDWebImageCoderScaleDownLargeImagesKey];
}
if (scaleDownLargeImagesOption != nil) {
shouldScaleDown = [scaleDownLargeImagesOption boolValue];
}
}
if (!shouldScaleDown) {
return [self sd_decompressedImageWithImage:image];
} else {
UIImage *scaledDownImage = [self sd_decompressedAndScaledDownImageWithImage:image];
if (scaledDownImage && !CGSizeEqualToSize(scaledDownImage.size, image.size)) {
// if the image is scaled down, need to modify the data pointer as well
SDImageFormat format = [NSData sd_imageFormatForImageData:*data];
NSData *imageData = [self encodedDataWithImage:scaledDownImage format:format];
if (imageData) {
*data = imageData;
}
}
return scaledDownImage;
}
#endif
}
#if SD_UIKIT || SD_WATCH
- (nullable UIImage *)sd_decompressedImageWithImage:(nullable UIImage *)image {
if (![[self class] shouldDecodeImage:image]) {
return image;
}
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool{
CGImageRef imageRef = image.CGImage;
CGColorSpaceRef colorspaceRef = [[self class] colorSpaceForImageRef:imageRef];
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipLast
// to create bitmap graphics contexts without alpha info.
CGContextRef context = CGBitmapContextCreate(NULL,
width,
height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrderDefault|kCGImageAlphaNoneSkipLast);
if (context == NULL) {
return image;
}
// Draw the image into the context and retrieve the new bitmap image without alpha
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGImageRef imageRefWithoutAlpha = CGBitmapContextCreateImage(context);
UIImage *imageWithoutAlpha = [[UIImage alloc] initWithCGImage:imageRefWithoutAlpha scale:image.scale orientation:image.imageOrientation];
CGContextRelease(context);
CGImageRelease(imageRefWithoutAlpha);
return imageWithoutAlpha;
}
}
- (nullable UIImage *)sd_decompressedAndScaledDownImageWithImage:(nullable UIImage *)image {
if (![[self class] shouldDecodeImage:image]) {
return image;
}
if (![[self class] shouldScaleDownImage:image]) {
return [self sd_decompressedImageWithImage:image];
}
CGContextRef destContext;
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool {
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
// Determine the scale ratio to apply to the input image
// that results in an output image of the defined size.
// see kDestImageSizeMB, and how it relates to destTotalPixels.
float imageScale = kDestTotalPixels / sourceTotalPixels;
CGSize destResolution = CGSizeZero;
destResolution.width = (int)(sourceResolution.width*imageScale);
destResolution.height = (int)(sourceResolution.height*imageScale);
// current color space
CGColorSpaceRef colorspaceRef = [[self class] colorSpaceForImageRef:sourceImageRef];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipLast
// to create bitmap graphics contexts without alpha info.
destContext = CGBitmapContextCreate(NULL,
destResolution.width,
destResolution.height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrderDefault|kCGImageAlphaNoneSkipLast);
if (destContext == NULL) {
return image;
}
CGContextSetInterpolationQuality(destContext, kCGInterpolationHigh);
// Now define the size of the rectangle to be used for the
// incremental blits from the input image to the output image.
// we use a source tile width equal to the width of the source
// image due to the way that iOS retrieves image data from disk.
// iOS must decode an image from disk in full width 'bands', even
// if current graphics context is clipped to a subrect within that
// band. Therefore we fully utilize all of the pixel data that results
// from a decoding opertion by achnoring our tile size to the full
// width of the input image.
CGRect sourceTile = CGRectZero;
sourceTile.size.width = sourceResolution.width;
// The source tile height is dynamic. Since we specified the size
// of the source tile in MB, see how many rows of pixels high it
// can be given the input image width.
sourceTile.size.height = (int)(kTileTotalPixels / sourceTile.size.width );
sourceTile.origin.x = 0.0f;
// The output tile is the same proportions as the input tile, but
// scaled to image scale.
CGRect destTile;
destTile.size.width = destResolution.width;
destTile.size.height = sourceTile.size.height * imageScale;
destTile.origin.x = 0.0f;
// The source seem overlap is proportionate to the destination seem overlap.
// this is the amount of pixels to overlap each tile as we assemble the ouput image.
float sourceSeemOverlap = (int)((kDestSeemOverlap/destResolution.height)*sourceResolution.height);
CGImageRef sourceTileImageRef;
// calculate the number of read/write operations required to assemble the
// output image.
int iterations = (int)( sourceResolution.height / sourceTile.size.height );
// If tile height doesn't divide the image height evenly, add another iteration
// to account for the remaining pixels.
int remainder = (int)sourceResolution.height % (int)sourceTile.size.height;
if(remainder) {
iterations++;
}
// Add seem overlaps to the tiles, but save the original tile height for y coordinate calculations.
float sourceTileHeightMinusOverlap = sourceTile.size.height;
sourceTile.size.height += sourceSeemOverlap;
destTile.size.height += kDestSeemOverlap;
for( int y = 0; y < iterations; ++y ) {
@autoreleasepool {
sourceTile.origin.y = y * sourceTileHeightMinusOverlap + sourceSeemOverlap;
destTile.origin.y = destResolution.height - (( y + 1 ) * sourceTileHeightMinusOverlap * imageScale + kDestSeemOverlap);
sourceTileImageRef = CGImageCreateWithImageInRect( sourceImageRef, sourceTile );
if( y == iterations - 1 && remainder ) {
float dify = destTile.size.height;
destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale;
dify -= destTile.size.height;
destTile.origin.y += dify;
}
CGContextDrawImage( destContext, destTile, sourceTileImageRef );
CGImageRelease( sourceTileImageRef );
}
}
CGImageRef destImageRef = CGBitmapContextCreateImage(destContext);
CGContextRelease(destContext);
if (destImageRef == NULL) {
return image;
}
UIImage *destImage = [[UIImage alloc] initWithCGImage:destImageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(destImageRef);
if (destImage == nil) {
return image;
}
return destImage;
}
}
#endif
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
switch (format) {
@ -397,13 +201,13 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
if (format == SDImageFormatUndefined) {
BOOL hasAlpha = SDCGImageRefContainsAlpha(image.CGImage);
BOOL hasAlpha = [SDWebImageCoderHelper imageRefContainsAlpha:image.CGImage];
if (hasAlpha) {
format = SDImageFormatPNG;
} else {
@ -426,6 +230,11 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
NSInteger exifOrientation = [SDWebImageCoderHelper exifOrientationFromImageOrientation:image.imageOrientation];
[properties setValue:@(exifOrientation) forKey:(__bridge_transfer NSString *)kCGImagePropertyOrientation];
#endif
double compressionQuality = 1;
if ([options valueForKey:SDWebImageCoderEncodeCompressionQuality]) {
compressionQuality = [[options valueForKey:SDWebImageCoderEncodeCompressionQuality] doubleValue];
}
[properties setValue:@(compressionQuality) forKey:(__bridge_transfer NSString *)kCGImageDestinationLossyCompressionQuality];
// Add your image to the destination.
CGImageDestinationAddImage(imageDestination, image.CGImage, (__bridge CFDictionaryRef)properties);
@ -441,29 +250,6 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
return [imageData copy];
}
#pragma mark - Helper
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// do not decode animated images
if (image.images != nil) {
return NO;
}
CGImageRef imageRef = image.CGImage;
BOOL hasAlpha = SDCGImageRefContainsAlpha(imageRef);
// do not decode images with alpha
if (hasAlpha) {
return NO;
}
return YES;
}
+ (BOOL)canDecodeFromHEICFormat {
static BOOL canDecode = NO;
static dispatch_once_t onceToken;
@ -538,39 +324,4 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
#endif
#if SD_UIKIT || SD_WATCH
+ (BOOL)shouldScaleDownImage:(nonnull UIImage *)image {
BOOL shouldScaleDown = YES;
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
float imageScale = kDestTotalPixels / sourceTotalPixels;
if (imageScale < 1) {
shouldScaleDown = YES;
} else {
shouldScaleDown = NO;
}
return shouldScaleDown;
}
+ (CGColorSpaceRef)colorSpaceForImageRef:(CGImageRef)imageRef {
// current
CGColorSpaceModel imageColorSpaceModel = CGColorSpaceGetModel(CGImageGetColorSpace(imageRef));
CGColorSpaceRef colorspaceRef = CGImageGetColorSpace(imageRef);
BOOL unsupportedColorSpace = (imageColorSpaceModel == kCGColorSpaceModelUnknown ||
imageColorSpaceModel == kCGColorSpaceModelMonochrome ||
imageColorSpaceModel == kCGColorSpaceModelCMYK ||
imageColorSpaceModel == kCGColorSpaceModelIndexed);
if (unsupportedColorSpace) {
colorspaceRef = SDCGColorSpaceGetDeviceRGB();
}
return colorspaceRef;
}
#endif
@end

View File

@ -112,10 +112,22 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
* By default, when the cache missed, the image is download from the network. This flag can prevent network to load from cache only.
*/
SDWebImageFromCacheOnly = 1 << 15,
/**
* By default, when you use `SDWebImageTransition` to do some view transition after the image load finished, this transition is only applied for image download from the network. This mask can force to apply view transition for memory and disk cache as well.
*/
SDWebImageForceTransition = 1 << 16,
/**
* By default, we decode the animated image. This flag can force decode the first frame only and produece the static image.
*/
SDWebImageDecodeFirstFrameOnly = 1 << 17,
/**
* By default, for `SDAnimatedImage`, we decode the animated image frame during rendering to reduce memory usage. However, you can specify to preload all frames into memory to reduce CPU usage when the animated image is shared by lots of imageViews.
* This will actually trigger `preloadAllAnimatedImageFrames` in the background queue(Disk Cache & Download only).
*/
SDWebImagePreloadAllFrames = 1 << 18
};
typedef void(^SDExternalCompletionBlock)(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL);

View File

@ -8,6 +8,8 @@
#import "SDWebImageManager.h"
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
#import "SDAnimatedImage.h"
@interface SDWebImageCombinedOperation : NSObject <SDWebImageOperation>
@ -153,6 +155,8 @@
if (options & SDWebImageQueryDataWhenInMemory) cacheOptions |= SDImageCacheQueryDataWhenInMemory;
if (options & SDWebImageQueryDiskSync) cacheOptions |= SDImageCacheQueryDiskSync;
if (options & SDWebImageTransformAnimatedImage) cacheOptions |= SDImageCacheTransformAnimatedImage;
if (options & SDWebImageDecodeFirstFrameOnly) cacheOptions |= SDImageCacheDecodeFirstFrameOnly;
if (options & SDWebImagePreloadAllFrames) cacheOptions |= SDImageCachePreloadAllFrames;
// Image transformer
id<SDWebImageTransformer> transformer;
@ -195,6 +199,8 @@
if (options & SDWebImageAllowInvalidSSLCertificates) downloaderOptions |= SDWebImageDownloaderAllowInvalidSSLCertificates;
if (options & SDWebImageHighPriority) downloaderOptions |= SDWebImageDownloaderHighPriority;
if (options & SDWebImageScaleDownLargeImages) downloaderOptions |= SDWebImageDownloaderScaleDownLargeImages;
if (options & SDWebImageDecodeFirstFrameOnly) downloaderOptions |= SDWebImageDownloaderDecodeFirstFrameOnly;
if (options & SDWebImagePreloadAllFrames) downloaderOptions |= SDWebImageDownloaderPreloadAllFrames;
if (cachedImage && options & SDWebImageRefreshCached) {
// force progressive off if image already cached but forced refreshing
@ -244,7 +250,7 @@
BOOL cacheOnDisk = !(options & SDWebImageCacheMemoryOnly);
// We've done the scale process in SDWebImageDownloader with the shared manager, this is used for custom manager and avoid extra scale.
if (self != [SDWebImageManager sharedManager] && self.cacheKeyFilter && downloadedImage) {
if (self != [SDWebImageManager sharedManager] && self.cacheKeyFilter && downloadedImage && ![downloadedImage conformsToProtocol:@protocol(SDAnimatedImage)]) {
downloadedImage = [self scaledImageForKey:key image:downloadedImage];
}

View File

@ -14,7 +14,7 @@
/**
Built in coder that supports WebP and animated WebP
*/
@interface SDWebImageWebPCoder : NSObject <SDWebImageProgressiveCoder>
@interface SDWebImageWebPCoder : NSObject <SDWebImageProgressiveCoder, SDWebImageAnimatedCoder>
@property (nonatomic, class, readonly, nonnull) SDWebImageWebPCoder *sharedCoder;

View File

@ -24,8 +24,44 @@
#import "webp/mux.h"
#endif
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
__VA_ARGS__; \
dispatch_semaphore_signal(self->_lock);
@interface SDWebPCoderFrame : NSObject
@property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
@property (nonatomic, assign) NSTimeInterval duration; // Frame duration in seconds
@property (nonatomic, assign) NSUInteger width; // Frame width
@property (nonatomic, assign) NSUInteger height; // Frame height
@property (nonatomic, assign) NSUInteger offsetX; // Frame origin.x in canvas (left-bottom based)
@property (nonatomic, assign) NSUInteger offsetY; // Frame origin.y in canvas (left-bottom based)
@property (nonatomic, assign) BOOL hasAlpha; // Whether frame contains alpha
@property (nonatomic, assign) BOOL isFullSize; // Whether frame size is equal to canvas size
@property (nonatomic, assign) WebPMuxAnimBlend blend; // Frame dispose method
@property (nonatomic, assign) WebPMuxAnimDispose dispose; // Frame blend operation
@property (nonatomic, assign) NSUInteger blendFromIndex; // The nearest previous frame index which blend mode is WEBP_MUX_BLEND
@end
@implementation SDWebPCoderFrame
@end
@implementation SDWebImageWebPCoder {
WebPIDecoder *_idec;
WebPDemuxer *_demux;
NSData *_imageData;
NSUInteger _loopCount;
NSUInteger _frameCount;
NSArray<SDWebPCoderFrame *> *_frames;
CGContextRef _canvas;
BOOL _hasAnimation;
BOOL _hasAlpha;
BOOL _finished;
CGFloat _canvasWidth;
CGFloat _canvasHeight;
dispatch_semaphore_t _lock;
NSUInteger _currentBlendIndex;
}
- (void)dealloc {
@ -33,6 +69,14 @@
WebPIDelete(_idec);
_idec = NULL;
}
if (_demux) {
WebPDemuxDelete(_demux);
_demux = NULL;
}
if (_canvas) {
CGContextRelease(_canvas);
_canvas = NULL;
}
}
+ (instancetype)sharedCoder {
@ -49,11 +93,11 @@
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
}
- (BOOL)canIncrementallyDecodeFromData:(NSData *)data {
- (BOOL)canIncrementalDecodeFromData:(NSData *)data {
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
@ -68,49 +112,41 @@
}
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
CGBitmapInfo bitmapInfo;
if (!(flags & ALPHA_FLAG)) {
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
} else {
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
return nil;
}
if (!(flags & ANIMATION_FLAG)) {
BOOL hasAnimation = flags & ANIMATION_FLAG;
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
if (!hasAnimation) {
// for static single webp image
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData];
if (staticImage) {
// draw on CGBitmapContext can reduce memory usage
CGImageRef imageRef = staticImage.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
CGContextDrawImage(canvas, CGRectMake(0, 0, width, height), imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
#if SD_UIKIT || SD_WATCH
staticImage = [[UIImage alloc] initWithCGImage:newImageRef];
#else
staticImage = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#endif
CGImageRelease(newImageRef);
}
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
return staticImage;
}
// for animated webp image
WebPIterator iter;
// libwebp's index start with 1
if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
return nil;
}
if (decodeFirstFrame) {
// first frame for animated webp image
UIImage *firstFrameImage = [self sd_rawWebpImageWithData:iter.fragment];
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
return firstFrameImage;
}
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
BOOL hasAlpha = flags & ALPHA_FLAG;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
return nil;
}
@ -123,13 +159,8 @@
continue;
}
int duration = iter.duration;
if (duration <= 10) {
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
// Some animated WebP images also created without duration, we should keep compatibility
duration = 100;
}
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration / 1000.f];
NSTimeInterval duration = [self sd_frameDurationWithIterator:iter];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
[frames addObject:frame];
}
@ -145,21 +176,34 @@
return animatedImage;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_idec) {
#pragma mark - Progressive Decode
- (instancetype)initIncremental {
self = [super init];
if (self) {
// Progressive images need transparent, so always use premultiplied RGBA
_idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
if (!_idec) {
return nil;
}
_idec = WebPINewRGB(MODE_bgrA, NULL, 0, 0);
}
UIImage *image;
return self;
}
- (void)updateIncrementalData:(NSData *)data finished:(BOOL)finished {
if (_finished) {
return;
}
_imageData = data;
_finished = finished;
VP8StatusCode status = WebPIUpdate(_idec, data.bytes, data.length);
if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) {
return nil;
return;
}
}
- (BOOL)incrementalFinished {
return _finished;
}
- (UIImage *)incrementalDecodedImageWithOptions:(SDWebImageCoderOptions *)options {
UIImage *image;
int width = 0;
int height = 0;
@ -172,9 +216,9 @@
size_t rgbaSize = last_y * stride;
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGColorSpaceRef colorSpaceRef = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
size_t components = 4;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
@ -191,7 +235,7 @@
return nil;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
CGImageRelease(imageRef);
return nil;
@ -215,21 +259,29 @@
CGContextRelease(canvas);
}
if (finished) {
if (_idec) {
WebPIDelete(_idec);
_idec = NULL;
}
}
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
// WebP do not decompress
return image;
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
} else {
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
if (!image) {
return;
}
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
// If not blend, cover the target image rect. (firstly clear then draw)
if (!shouldBlend) {
CGContextClearRect(canvas, imageRect);
}
CGContextDrawImage(canvas, imageRect, image.CGImage);
}
}
- (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
@ -238,11 +290,9 @@
return nil;
}
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGSize size = CGSizeMake(canvasWidth, canvasHeight);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = size.height - iter.height - iter.y_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
@ -278,8 +328,14 @@
return nil;
}
config.output.colorspace = config.input.has_alpha ? MODE_rgbA : MODE_RGB;
BOOL hasAlpha = config.input.has_alpha;
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
// use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
WEBP_CSP_MODE colorspace = MODE_bgrA;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
config.options.use_threads = 1;
config.output.colorspace = colorspace;
// Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
@ -296,11 +352,12 @@
// Construct a UIImage from the decoded RGBA value array
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGBitmapInfo bitmapInfo = config.input.has_alpha ? kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast : kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
size_t components = config.input.has_alpha ? 4 : 3;
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = config.output.u.RGBA.stride;
CGColorSpaceRef colorSpaceRef = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGDataProviderRelease(provider);
@ -314,22 +371,36 @@
return image;
}
- (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
int duration = iter.duration;
if (duration <= 10) {
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
// Some animated WebP images also created without duration, we should keep compatibility
duration = 100;
}
return duration / 1000.0;
}
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
return (format == SDImageFormatWebP);
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
NSData *data;
double compressionQuality = 1;
if ([options valueForKey:SDWebImageCoderEncodeCompressionQuality]) {
compressionQuality = [[options valueForKey:SDWebImageCoderEncodeCompressionQuality] doubleValue];
}
NSArray<SDWebImageFrame *> *frames = [SDWebImageCoderHelper framesFromAnimatedImage:image];
if (frames.count == 0) {
// for static single webp image
data = [self sd_encodedWebpDataWithImage:image];
data = [self sd_encodedWebpDataWithImage:image quality:compressionQuality];
} else {
// for animated webp image
WebPMux *mux = WebPMuxNew();
@ -338,7 +409,7 @@
}
for (size_t i = 0; i < frames.count; i++) {
SDWebImageFrame *currentFrame = frames[i];
NSData *webpData = [self sd_encodedWebpDataWithImage:currentFrame.image];
NSData *webpData = [self sd_encodedWebpDataWithImage:currentFrame.image quality:compressionQuality];
int duration = currentFrame.duration * 1000;
WebPMuxFrameInfo frame = { .bitstream.bytes = webpData.bytes,
.bitstream.size = webpData.length,
@ -375,7 +446,7 @@
return data;
}
- (nullable NSData *)sd_encodedWebpDataWithImage:(nullable UIImage *)image {
- (nullable NSData *)sd_encodedWebpDataWithImage:(nullable UIImage *)image quality:(double)quality {
if (!image) {
return nil;
}
@ -401,8 +472,8 @@
uint8_t *rgba = (uint8_t *)CFDataGetBytePtr(dataRef);
uint8_t *data = NULL;
float quality = 100.0;
size_t size = WebPEncodeRGBA(rgba, (int)width, (int)height, (int)bytesPerRow, quality, &data);
float qualityFactor = quality * 100; // WebP quality is 0-100
size_t size = WebPEncodeRGBA(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
CFRelease(dataRef);
rgba = NULL;
@ -421,6 +492,185 @@ static void FreeImageData(void *info, const void *data, size_t size) {
free((void *)data);
}
#pragma mark - SDWebImageAnimatedCoder
- (instancetype)initWithAnimatedImageData:(NSData *)data {
if (!data) {
return nil;
}
if (self) {
WebPData webpData;
WebPDataInit(&webpData);
webpData.bytes = data.bytes;
webpData.size = data.length;
WebPDemuxer *demuxer = WebPDemux(&webpData);
if (!demuxer) {
return nil;
}
BOOL framesValid = [self scanAndCheckFramesValidWithDemuxer:demuxer];
if (!framesValid) {
WebPDemuxDelete(demuxer);
return nil;
}
_demux = demuxer;
_imageData = data;
_currentBlendIndex = NSNotFound;
_lock = dispatch_semaphore_create(1);
}
return self;
}
- (BOOL)scanAndCheckFramesValidWithDemuxer:(WebPDemuxer *)demuxer {
if (!demuxer) {
return NO;
}
WebPIterator iter;
if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
WebPDemuxReleaseIterator(&iter);
return NO;
}
uint32_t iterIndex = 0;
uint32_t lastBlendIndex = 0;
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
BOOL hasAnimation = flags & ANIMATION_FLAG;
BOOL hasAlpha = flags & ALPHA_FLAG;
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
uint32_t frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
uint32_t loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
NSMutableArray<SDWebPCoderFrame *> *frames = [NSMutableArray array];
// We should loop all the frames and scan each frames' blendFromIndex for later decoding, this can also ensure all frames is valid
do {
SDWebPCoderFrame *frame = [[SDWebPCoderFrame alloc] init];
frame.index = iterIndex;
frame.duration = [self sd_frameDurationWithIterator:iter];
frame.width = iter.width;
frame.height = iter.height;
frame.hasAlpha = iter.has_alpha;
frame.dispose = iter.dispose_method;
frame.blend = iter.blend_method;
frame.offsetX = iter.x_offset;
frame.offsetY = canvasHeight - iter.y_offset - iter.height;
BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
lastBlendIndex = iterIndex;
frame.blendFromIndex = iterIndex;
} else {
if (frame.dispose && frame.isFullSize) {
frame.blendFromIndex = lastBlendIndex;
lastBlendIndex = iterIndex + 1;
} else {
frame.blendFromIndex = lastBlendIndex;
}
}
iterIndex++;
[frames addObject:frame];
} while (WebPDemuxNextFrame(&iter));
WebPDemuxReleaseIterator(&iter);
if (frames.count != frameCount) {
return NO;
}
_frames = [frames copy];
_hasAnimation = hasAnimation;
_hasAlpha = hasAlpha;
_canvasWidth = canvasWidth;
_canvasHeight = canvasHeight;
_frameCount = frameCount;
_loopCount = loopCount;
return YES;
}
- (NSData *)animatedImageData {
return _imageData;
}
- (NSUInteger)animatedImageLoopCount {
return _loopCount;
}
- (NSUInteger)animatedImageFrameCount {
return _frameCount;
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
if (index >= _frameCount) {
return 0;
}
return _frames[index].duration;
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
UIImage *image;
if (index >= _frameCount) {
return nil;
}
LOCK({
image = [self safeAnimatedImageFrameAtIndex:index];
});
return image;
}
- (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
if (!_canvas) {
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
return nil;
}
_canvas = canvas;
}
SDWebPCoderFrame *frame = _frames[index];
UIImage *image;
WebPIterator iter;
if (_currentBlendIndex + 1 == index) {
// If current blend index is equal to request index, normal serial process
_currentBlendIndex = index;
// libwebp's index start with 1
if (!WebPDemuxGetFrame(_demux, (int)(index + 1), &iter)) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
} else {
// Else, this can happen when one image set to different imageViews or one loop end. So we should clear the shared cavans.
if (_currentBlendIndex != NSNotFound) {
CGContextClearRect(_canvas, CGRectMake(0, 0, _canvasWidth, _canvasHeight));
}
_currentBlendIndex = index;
// Then, loop from the blend from index, draw each of previous frames on the canvas.
// We use do while loop to call `WebPDemuxNextFrame`(fast), only (startIndex == endIndex) need to create image instance
size_t startIndex = frame.blendFromIndex;
size_t endIndex = frame.index;
if (!WebPDemuxGetFrame(_demux, (int)(startIndex + 1), &iter)) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
do {
@autoreleasepool {
if ((size_t)iter.frame_num == endIndex) {
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter];
} else {
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
}
}
} while ((size_t)iter.frame_num < (endIndex + 1) && WebPDemuxNextFrame(&iter));
}
WebPDemuxReleaseIterator(&iter);
return image;
}
@end
#endif

View File

@ -7,7 +7,7 @@
*/
#import "UIImage+ForceDecode.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageCoderHelper.h"
@implementation UIImage (ForceDecode)
@ -15,16 +15,14 @@
if (!image) {
return nil;
}
NSData *tempData;
return [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&tempData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
return [SDWebImageCoderHelper decodedImageWithImage:image];
}
+ (UIImage *)sd_decodedAndScaledDownImageWithImage:(UIImage *)image {
if (!image) {
return nil;
}
NSData *tempData;
return [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&tempData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(YES)}];
return [SDWebImageCoderHelper decodedAndScaledDownImageWithImage:image limitBytes:0];
}
@end

View File

@ -12,9 +12,21 @@
@interface UIImage (GIF)
/**
* Creates an animated UIImage from an NSData.
* For static GIF, will create an UIImage with `images` array set to nil. For animated GIF, will create an UIImage with valid `images` array.
Creates an animated UIImage from an NSData.
For Static GIF, will create an UIImage with `images` array set to nil. For Animated GIF, will create an UIImage with valid `images` array.
@param data The GIF data
@return The created image
*/
+ (nullable UIImage *)sd_animatedGIFWithData:(nullable NSData *)data;
/**
Creates an animated UIImage from an NSData.
@param data The GIF data
@param firstFrameOnly Even if the image data is Animated GIF format, decode the first frame only
@return The created image
*/
+ (nullable UIImage *)sd_animatedGIFWithData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly;
@end

View File

@ -12,11 +12,16 @@
@implementation UIImage (GIF)
+ (UIImage *)sd_animatedGIFWithData:(NSData *)data {
+ (nullable UIImage *)sd_animatedGIFWithData:(nullable NSData *)data {
return [self sd_animatedGIFWithData:data firstFrameOnly:NO];
}
+ (nullable UIImage *)sd_animatedGIFWithData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly {
if (!data) {
return nil;
}
return [[SDWebImageGIFCoder sharedCoder] decodedImageWithData:data];
SDWebImageCoderOptions *options = @{SDWebImageCoderDecodeFirstFrameOnly : @(firstFrameOnly)};
return [[SDWebImageGIFCoder sharedCoder] decodedImageWithData:data options:options];
}
@end

View File

@ -10,15 +10,26 @@
#import "NSData+ImageContentType.h"
@interface UIImage (MultiFormat)
#pragma mark - Decode
/**
Create and decode a image with the specify image data
If the image data is animated image format, create an animated image if possible
@param data The image data
@return The created image
*/
+ (nullable UIImage *)sd_imageWithData:(nullable NSData *)data;
/**
Create and decode a image with the specify image data
@param data The image data
@param firstFrameOnly Even if the image data is animated image format, decode the first frame only
@return The created image
*/
+ (nullable UIImage *)sd_imageWithData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly;
#pragma mark - Encode
/**
Encode the current image to the data, the image format is unspecified
@ -34,4 +45,13 @@
*/
- (nullable NSData *)sd_imageDataAsFormat:(SDImageFormat)imageFormat;
/**
Encode the current image to data with the specify image format
@param imageFormat The specify image format
@param compressionQuality The quality of the resulting image data. Value between 0.0-1.0. Some coders may not support compression quality.
@return The encoded data. If can't encode, return nil
*/
- (nullable NSData *)sd_imageDataAsFormat:(SDImageFormat)imageFormat compressionQuality:(double)compressionQuality;
@end

View File

@ -12,7 +12,15 @@
@implementation UIImage (MultiFormat)
+ (nullable UIImage *)sd_imageWithData:(nullable NSData *)data {
return [[SDWebImageCodersManager sharedManager] decodedImageWithData:data];
return [self sd_imageWithData:data firstFrameOnly:NO];
}
+ (nullable UIImage *)sd_imageWithData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly {
if (!data) {
return nil;
}
SDWebImageCoderOptions *options = @{SDWebImageCoderDecodeFirstFrameOnly : @(firstFrameOnly)};
return [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:options];
}
- (nullable NSData *)sd_imageData {
@ -20,12 +28,12 @@
}
- (nullable NSData *)sd_imageDataAsFormat:(SDImageFormat)imageFormat {
NSData *imageData = nil;
if (self) {
imageData = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:self format:imageFormat];
}
return imageData;
return [self sd_imageDataAsFormat:imageFormat compressionQuality:1];
}
- (nullable NSData *)sd_imageDataAsFormat:(SDImageFormat)imageFormat compressionQuality:(double)compressionQuality {
SDWebImageCoderOptions *options = @{SDWebImageCoderEncodeCompressionQuality : @(compressionQuality)};
return [[SDWebImageCodersManager sharedManager] encodedDataWithImage:self format:imageFormat options:options];
}
@end

View File

@ -303,7 +303,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT || SD_WATCH
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:self.scale];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);
return image;
@ -381,7 +381,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT || SD_WATCH
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
#else
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef scale:self.scale];
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef size:NSZeroSize];
#endif
CGImageRelease(imgRef);
CGContextRelease(context);
@ -417,7 +417,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT || SD_WATCH
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
#else
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef scale:self.scale];
UIImage *img = [[UIImage alloc] initWithCGImage:imgRef size:NSZeroSize];
#endif
CGImageRelease(imgRef);
return img;
@ -434,7 +434,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT || SD_WATCH
return [UIImage imageWithCGImage:self.CGImage scale:self.scale orientation:self.imageOrientation];
#else
return [[UIImage alloc] initWithCGImage:self.CGImage scale:self.scale];
return [[UIImage alloc] initWithCGImage:self.CGImage size:NSZeroSize];
#endif
}
@ -651,7 +651,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT || SD_WATCH
UIImage *outputImage = [UIImage imageWithCGImage:effectCGImage scale:self.scale orientation:self.imageOrientation];
#else
UIImage *outputImage = [[UIImage alloc] initWithCGImage:effectCGImage scale:self.scale];
UIImage *outputImage = [[UIImage alloc] initWithCGImage:effectCGImage size:NSZeroSize];
#endif
CGImageRelease(effectCGImage);
@ -676,7 +676,7 @@ static inline UIColor * SDGetColorFromPixel(Pixel_8888 pixel, CGBitmapInfo bitma
#if SD_UIKIT
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:self.scale];
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
#endif
CGImageRelease(imageRef);

View File

@ -29,4 +29,9 @@
*/
@property (nonatomic, assign, readonly) BOOL sd_isAnimated;
/**
Indicating whether the image is during incremental decoding and may not contains full pixels.
*/
@property (nonatomic, assign) BOOL sd_isIncremental;
@end

View File

@ -7,11 +7,11 @@
*/
#import "UIImage+WebCache.h"
#import "NSImage+Additions.h"
#import "objc/runtime.h"
#if SD_UIKIT
#import "objc/runtime.h"
@implementation UIImage (WebCache)
- (NSUInteger)sd_imageLoopCount {
@ -32,6 +32,15 @@
return (self.images != nil);
}
- (void)setSd_isIncremental:(BOOL)sd_isIncremental {
objc_setAssociatedObject(self, @selector(sd_isIncremental), @(sd_isIncremental), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (BOOL)sd_isIncremental {
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_isIncremental));
return value.boolValue;
}
@end
#endif
@ -42,39 +51,39 @@
- (NSUInteger)sd_imageLoopCount {
NSUInteger imageLoopCount = 0;
for (NSImageRep *rep in self.representations) {
if ([rep isKindOfClass:[NSBitmapImageRep class]]) {
NSBitmapImageRep *bitmapRep = (NSBitmapImageRep *)rep;
imageLoopCount = [[bitmapRep valueForProperty:NSImageLoopCount] unsignedIntegerValue];
break;
}
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
if (bitmapImageRep) {
imageLoopCount = [[bitmapImageRep valueForProperty:NSImageLoopCount] unsignedIntegerValue];
}
return imageLoopCount;
}
- (void)setSd_imageLoopCount:(NSUInteger)sd_imageLoopCount {
for (NSImageRep *rep in self.representations) {
if ([rep isKindOfClass:[NSBitmapImageRep class]]) {
NSBitmapImageRep *bitmapRep = (NSBitmapImageRep *)rep;
[bitmapRep setProperty:NSImageLoopCount withValue:@(sd_imageLoopCount)];
break;
}
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
if (bitmapImageRep) {
[bitmapImageRep setProperty:NSImageLoopCount withValue:@(sd_imageLoopCount)];
}
}
- (BOOL)sd_isAnimated {
BOOL isGIF = NO;
for (NSImageRep *rep in self.representations) {
if ([rep isKindOfClass:[NSBitmapImageRep class]]) {
NSBitmapImageRep *bitmapRep = (NSBitmapImageRep *)rep;
NSUInteger frameCount = [[bitmapRep valueForProperty:NSImageFrameCount] unsignedIntegerValue];
isGIF = frameCount > 1 ? YES : NO;
break;
}
NSBitmapImageRep *bitmapImageRep = self.bitmapImageRep;
if (bitmapImageRep) {
NSUInteger frameCount = [[bitmapImageRep valueForProperty:NSImageFrameCount] unsignedIntegerValue];
isGIF = frameCount > 1 ? YES : NO;
}
return isGIF;
}
- (void)setSd_isIncremental:(BOOL)sd_isIncremental {
objc_setAssociatedObject(self, @selector(sd_isIncremental), @(sd_isIncremental), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (BOOL)sd_isIncremental {
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_isIncremental));
return value.boolValue;
}
@end
#endif

View File

@ -12,8 +12,24 @@
@interface UIImage (WebP)
/**
Create a image from the WebP data.
This may create animated image if the data is Animated WebP.
@param data The WebP data
@return The created image
*/
+ (nullable UIImage *)sd_imageWithWebPData:(nullable NSData *)data;
/**
Create a image from the WebP data.
@param data The WebP data
@param firstFrameOnly Even if the image data is Animated WebP format, decode the first frame only
@return The created image
*/
+ (nullable UIImage *)sd_imageWithWebPData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly;
@end
#endif

View File

@ -14,10 +14,15 @@
@implementation UIImage (WebP)
+ (nullable UIImage *)sd_imageWithWebPData:(nullable NSData *)data {
return [self sd_imageWithWebPData:data firstFrameOnly:NO];
}
+ (nullable UIImage *)sd_imageWithWebPData:(nullable NSData *)data firstFrameOnly:(BOOL)firstFrameOnly {
if (!data) {
return nil;
}
return [[SDWebImageWebPCoder sharedCoder] decodedImageWithData:data];
SDWebImageCoderOptions *options = @{SDWebImageCoderDecodeFirstFrameOnly : @(firstFrameOnly)};
return [[SDWebImageWebPCoder sharedCoder] decodedImageWithData:data options:options];
}
@end

View File

@ -34,6 +34,7 @@
32B99EAC203B36650017FD66 /* SDWebImageDownloaderTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 1E3C51E819B46E370092B5E6 /* SDWebImageDownloaderTests.m */; };
32B99EAD203B36690017FD66 /* SDWebImagePrefetcherTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 4369C1D01D97F80F007E863A /* SDWebImagePrefetcherTests.m */; };
32B99EAE203B366C0017FD66 /* SDWebCacheCategoriesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 4369C2731D9804B1007E863A /* SDWebCacheCategoriesTests.m */; };
32A571562037DB2D002EDAAE /* SDAnimatedImageTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 32A571552037DB2D002EDAAE /* SDAnimatedImageTest.m */; };
32E6F0321F3A1B4700A945E6 /* SDWebImageTestDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 32E6F0311F3A1B4700A945E6 /* SDWebImageTestDecoder.m */; };
37D122881EC48B5E00D98CEB /* SDMockFileManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 37D122871EC48B5E00D98CEB /* SDMockFileManager.m */; };
433BBBB51D7EF5C00086B6E9 /* SDWebImageDecoderTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 433BBBB41D7EF5C00086B6E9 /* SDWebImageDecoderTests.m */; };
@ -66,6 +67,7 @@
32B99E8A203AF8690017FD66 /* SDCategoriesTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDCategoriesTests.m; sourceTree = "<group>"; };
32B99E92203B2DF90017FD66 /* Tests Mac.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "Tests Mac.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
32B99E96203B2DF90017FD66 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
32A571552037DB2D002EDAAE /* SDAnimatedImageTest.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDAnimatedImageTest.m; sourceTree = "<group>"; };
32E6F0301F3A1B4700A945E6 /* SDWebImageTestDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDWebImageTestDecoder.h; sourceTree = "<group>"; };
32E6F0311F3A1B4700A945E6 /* SDWebImageTestDecoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDWebImageTestDecoder.m; sourceTree = "<group>"; };
37D122861EC48B5E00D98CEB /* SDMockFileManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDMockFileManager.h; sourceTree = "<group>"; };
@ -195,6 +197,7 @@
3254C31F20641077008D1022 /* SDWebImageTransformerTests.m */,
4369C2731D9804B1007E863A /* SDWebCacheCategoriesTests.m */,
32B99E8A203AF8690017FD66 /* SDCategoriesTests.m */,
32A571552037DB2D002EDAAE /* SDAnimatedImageTest.m */,
37D122861EC48B5E00D98CEB /* SDMockFileManager.h */,
37D122871EC48B5E00D98CEB /* SDMockFileManager.m */,
2D7AF05E1F329763000083C2 /* SDTestCase.h */,
@ -469,6 +472,7 @@
files = (
32E6F0321F3A1B4700A945E6 /* SDWebImageTestDecoder.m in Sources */,
3254C32020641077008D1022 /* SDWebImageTransformerTests.m in Sources */,
32A571562037DB2D002EDAAE /* SDAnimatedImageTest.m in Sources */,
1E3C51E919B46E370092B5E6 /* SDWebImageDownloaderTests.m in Sources */,
37D122881EC48B5E00D98CEB /* SDMockFileManager.m in Sources */,
4369C2741D9804B1007E863A /* SDWebCacheCategoriesTests.m in Sources */,

View File

@ -0,0 +1,238 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
* (c) Matt Galloway
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDTestCase.h"
#import <SDWebImage/SDAnimatedImage.h>
#import <SDWebImage/SDAnimatedImageView.h>
#import <SDWebImage/SDWebImageGIFCoder.h>
#import <SDWebImage/UIImage+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
#import <KVOController/KVOController.h>
static const NSUInteger kTestGIFFrameCount = 5; // local TestImage.gif loop count
@interface SDAnimatedImageTest : SDTestCase
@property (nonatomic, strong) UIWindow *window;
@end
@implementation SDAnimatedImageTest
- (void)tearDown {
[[SDImageCache sharedImageCache] removeImageForKey:kTestGIFURL fromDisk:YES withCompletion:nil];
[[SDImageCache sharedImageCache] removeImageForKey:kTestWebPURL fromDisk:YES withCompletion:nil];
}
- (void)test01AnimatedImageInitWithData {
NSData *invalidData = [@"invalid data" dataUsingEncoding:NSUTF8StringEncoding];
SDAnimatedImage *image = [[SDAnimatedImage alloc] initWithData:invalidData];
expect(image).beNil();
NSData *validData = [self testGIFData];
image = [[SDAnimatedImage alloc] initWithData:validData scale:2];
expect(image).notTo.beNil(); // image
expect(image.scale).equal(2); // scale
expect(image.animatedImageData).equal(validData); // data
expect(image.animatedImageFormat).equal(SDImageFormatGIF); // format
expect(image.animatedImageLoopCount).equal(0); // loop count
expect(image.animatedImageFrameCount).equal(kTestGIFFrameCount); // frame count
expect([image animatedImageFrameAtIndex:1]).notTo.beNil(); // 1 frame
}
- (void)test02AnimatedImageInitWithContentsOfFile {
SDAnimatedImage *image = [[SDAnimatedImage alloc] initWithContentsOfFile:[self testGIFPath]];
expect(image).notTo.beNil();
expect(image.scale).equal(1); // scale
// enough, other can be test with InitWithData
}
- (void)test03AnimatedImageInitWithAnimatedCoder {
NSData *validData = [self testGIFData];
SDWebImageGIFCoder *coder = [[SDWebImageGIFCoder alloc] initWithAnimatedImageData:validData];
SDAnimatedImage *image = [[SDAnimatedImage alloc] initWithAnimatedCoder:coder scale:1];
expect(image).notTo.beNil();
// enough, other can be test with InitWithData
}
- (void)test04AnimatedImageImageNamed {
SDAnimatedImage *image = [SDAnimatedImage imageNamed:@"TestImage.gif" inBundle:[NSBundle bundleForClass:[self class]] compatibleWithTraitCollection:nil];
expect(image).notTo.beNil();
expect([image.animatedImageData isEqualToData:[self testGIFData]]).beTruthy();
}
- (void)test05AnimatedImagePreloadFrames {
NSData *validData = [self testGIFData];
SDAnimatedImage *image = [SDAnimatedImage imageWithData:validData];
// Preload all frames
[image preloadAllFrames];
NSArray *loadedAnimatedImageFrames = [image valueForKey:@"loadedAnimatedImageFrames"]; // Access the internal property, only for test and may be changed in the future
expect(loadedAnimatedImageFrames.count).equal(kTestGIFFrameCount);
// Test one frame
UIImage *frame = [image animatedImageFrameAtIndex:0];
expect(frame).notTo.beNil();
}
- (void)test06AnimatedImageViewSetImage {
SDAnimatedImageView *imageView = [SDAnimatedImageView new];
UIImage *image = [UIImage imageWithData:[self testJPEGData]];
imageView.image = image;
expect(imageView.image).notTo.beNil();
expect(imageView.currentFrame).beNil(); // current frame
}
- (void)test07AnimatedImageViewSetAnimatedImage {
SDAnimatedImageView *imageView = [SDAnimatedImageView new];
SDAnimatedImage *image = [SDAnimatedImage imageWithData:[self testAnimatedWebPData]];
imageView.image = image;
expect(imageView.image).notTo.beNil();
expect(imageView.currentFrame).notTo.beNil(); // current frame
}
- (void)test08AnimatedImageViewRendering {
XCTestExpectation *expectation = [self expectationWithDescription:@"test SDAnimatedImageView rendering"];
SDAnimatedImageView *imageView = [[SDAnimatedImageView alloc] init];
[self.window addSubview:imageView];
NSMutableDictionary *frames = [NSMutableDictionary dictionaryWithCapacity:kTestGIFFrameCount];
[self.KVOController observe:imageView keyPaths:@[NSStringFromSelector(@selector(currentFrameIndex)), NSStringFromSelector(@selector(currentLoopCount))] options:NSKeyValueObservingOptionNew block:^(id _Nullable observer, id _Nonnull object, NSDictionary<NSString *,id> * _Nonnull change) {
NSUInteger frameIndex = imageView.currentFrameIndex;
NSUInteger loopCount = imageView.currentLoopCount;
[frames setObject:@(YES) forKey:@(frameIndex)];
BOOL framesRendered = NO;
if (frames.count >= kTestGIFFrameCount) {
// All frames rendered
framesRendered = YES;
}
BOOL loopFinished = NO;
if (loopCount >= 1) {
// One loop finished
loopFinished = YES;
}
if (framesRendered && loopFinished) {
[imageView stopAnimating];
[expectation fulfill];
}
}];
SDAnimatedImage *image = [SDAnimatedImage imageWithData:[self testGIFData]];
imageView.image = image;
[self waitForExpectationsWithCommonTimeout];
}
- (void)test09AnimatedImageViewSetProgressiveAnimatedImage {
NSData *gifData = [self testGIFData];
SDWebImageGIFCoder *progressiveCoder = [[SDWebImageGIFCoder alloc] initIncremental];
// simulate progressive decode, pass partial data
NSData *partialData = [gifData subdataWithRange:NSMakeRange(0, gifData.length - 1)];
[progressiveCoder updateIncrementalData:partialData finished:NO];
SDAnimatedImage *partialImage = [[SDAnimatedImage alloc] initWithAnimatedCoder:progressiveCoder scale:1];
partialImage.sd_isIncremental = YES;
SDAnimatedImageView *imageView = [[SDAnimatedImageView alloc] init];
imageView.image = partialImage;
BOOL isProgressive = [[imageView valueForKey:@"isProgressive"] boolValue];
expect(isProgressive).equal(YES);
// pass full data
[progressiveCoder updateIncrementalData:gifData finished:YES];
SDAnimatedImage *fullImage = [[SDAnimatedImage alloc] initWithAnimatedCoder:progressiveCoder scale:1];
imageView.image = fullImage;
isProgressive = [[imageView valueForKey:@"isProgressive"] boolValue];
expect(isProgressive).equal(NO);
}
- (void)test10AnimatedImageViewCategory {
XCTestExpectation *expectation = [self expectationWithDescription:@"test SDAnimatedImageView view category"];
SDAnimatedImageView *imageView = [SDAnimatedImageView new];
NSURL *testURL = [NSURL URLWithString:kTestWebPURL];
[imageView sd_setImageWithURL:testURL completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(error).to.beNil();
expect(image).notTo.beNil();
expect([image isKindOfClass:[SDAnimatedImage class]]).beTruthy();
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
- (void)test11AnimatedImageViewCategoryProgressive {
XCTestExpectation *expectation = [self expectationWithDescription:@"test SDAnimatedImageView view category"];
SDAnimatedImageView *imageView = [SDAnimatedImageView new];
NSURL *testURL = [NSURL URLWithString:kTestGIFURL];
[imageView sd_setImageWithURL:testURL placeholderImage:nil options:SDWebImageProgressiveDownload progress:^(NSInteger receivedSize, NSInteger expectedSize, NSURL * _Nullable targetURL) {
dispatch_async(dispatch_get_main_queue(), ^{
UIImage *image = imageView.image;
// Progressive image may be nil when download data is not enough
if (image) {
expect(image.sd_isIncremental).beTruthy();
BOOL isProgressive = [[imageView valueForKey:@"isProgressive"] boolValue];
expect(isProgressive).equal(YES);
}
});
} completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) {
expect(error).to.beNil();
expect(image).notTo.beNil();
expect([image isKindOfClass:[SDAnimatedImage class]]).beTruthy();
[expectation fulfill];
}];
[self waitForExpectationsWithCommonTimeout];
}
#pragma mark - Helper
- (UIWindow *)window {
if (!_window) {
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
}
return _window;
}
- (NSString *)testGIFPath {
NSBundle *testBundle = [NSBundle bundleForClass:[self class]];
NSString *testPath = [testBundle pathForResource:@"TestImage" ofType:@"gif"];
return testPath;
}
- (NSData *)testGIFData {
NSData *testData = [NSData dataWithContentsOfFile:[self testGIFPath]];
return testData;
}
- (NSString *)testAnimatedWebPPath {
NSBundle *testBundle = [NSBundle bundleForClass:[self class]];
NSString *testPath = [testBundle pathForResource:@"TestImageAnimated" ofType:@"webp"];
return testPath;
}
- (NSData *)testAnimatedWebPData {
return [NSData dataWithContentsOfFile:[self testAnimatedWebPPath]];
}
- (NSString *)testJPEGPath {
NSBundle *testBundle = [NSBundle bundleForClass:[self class]];
NSString *testPath = [testBundle pathForResource:@"TestImage" ofType:@"jpg"];
return testPath;
}
- (NSData *)testJPEGData {
NSData *testData = [NSData dataWithContentsOfFile:[self testJPEGPath]];
return testData;
}
@end

View File

@ -17,6 +17,8 @@ FOUNDATION_EXPORT const int64_t kAsyncTestTimeout;
FOUNDATION_EXPORT const int64_t kMinDelayNanosecond;
FOUNDATION_EXPORT NSString * _Nonnull const kTestJpegURL;
FOUNDATION_EXPORT NSString * _Nonnull const kTestPNGURL;
FOUNDATION_EXPORT NSString * _Nonnull const kTestGIFURL;
FOUNDATION_EXPORT NSString * _Nonnull const kTestWebPURL;
@interface SDTestCase : XCTestCase

View File

@ -13,6 +13,8 @@ const int64_t kAsyncTestTimeout = 5;
const int64_t kMinDelayNanosecond = NSEC_PER_MSEC * 100; // 0.1s
NSString *const kTestJpegURL = @"http://via.placeholder.com/50x50.jpg";
NSString *const kTestPNGURL = @"http://via.placeholder.com/50x50.png";
NSString *const kTestGIFURL = @"https://media.giphy.com/media/UEsrLdv7ugRTq/giphy.gif";
NSString *const kTestWebPURL = @"http://littlesvr.ca/apng/images/SteamEngine.webp";
@implementation SDTestCase

View File

@ -135,7 +135,7 @@
expect([coder canDecodeFromData:inputImageData]).to.beTruthy();
// 2 - decode from NSData to UIImage and check it
UIImage *inputImage = [coder decodedImageWithData:inputImageData];
UIImage *inputImage = [coder decodedImageWithData:inputImageData options:nil];
expect(inputImage).toNot.beNil();
if (isAnimated) {
@ -157,9 +157,9 @@
expect([coder canEncodeToFormat:inputImageFormat]).to.beTruthy();
// 4 - encode from UIImage to NSData using the inputImageFormat and check it
NSData *outputImageData = [coder encodedDataWithImage:inputImage format:inputImageFormat];
NSData *outputImageData = [coder encodedDataWithImage:inputImage format:inputImageFormat options:nil];
expect(outputImageData).toNot.beNil();
UIImage *outputImage = [coder decodedImageWithData:outputImageData];
UIImage *outputImage = [coder decodedImageWithData:outputImageData options:nil];
expect(outputImage.size).to.equal(inputImage.size);
expect(outputImage.scale).to.equal(inputImage.scale);
expect(outputImage.images.count).to.equal(inputImage.images.count);

View File

@ -379,11 +379,6 @@
if (![data1 isEqualToData:data2]) {
XCTFail(@"The image data is not equal to cutom decoder, check -[SDWebImageTestDecoder decodedImageWithData:]");
}
NSString *str1 = @"TestDecompress";
NSString *str2 = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
if (![str1 isEqualToString:str2]) {
XCTFail(@"The image data is not modified by the custom decoder, check -[SDWebImageTestDecoder decompressedImageWithImage:data:options:]");
}
[[SDWebImageCodersManager sharedManager] removeCoder:testDecoder];
[expectation fulfill];
}];

View File

@ -10,6 +10,6 @@
#import <Foundation/Foundation.h>
#import <SDWebImage/SDWebImageCoder.h>
@interface SDWebImageTestDecoder : NSObject <SDWebImageCoder>
@interface SDWebImageTestDecoder : NSObject <SDWebImageProgressiveCoder>
@end

View File

@ -19,29 +19,35 @@
return YES;
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
return image;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
- (instancetype)initIncremental
{
self = [super init];
if (self) {
}
return self;
}
- (void)updateIncrementalData:(NSData *)data finished:(BOOL)finished {
return;
}
- (UIImage *)incrementalDecodedImageWithOptions:(SDWebImageCoderOptions *)options {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"gif"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
NSString *testString = @"TestDecompress";
NSData *testData = [testString dataUsingEncoding:NSUTF8StringEncoding];
*data = testData;
return image;
- (BOOL)canIncrementalDecodeFromData:(NSData *)data {
return YES;
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
NSString *testString = @"TestEncode";
NSData *data = [testString dataUsingEncoding:NSUTF8StringEncoding];
return data;

View File

@ -44,6 +44,9 @@ FOUNDATION_EXPORT const unsigned char WebImageVersionString[];
#import <SDWebImage/MKAnnotationView+WebCache.h>
#endif
#import <SDWebImage/SDAnimatedImage.h>
#import <SDWebImage/SDAnimatedImageView.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
#import <SDWebImage/SDWebImageCodersManager.h>
#import <SDWebImage/SDWebImageCoder.h>
#import <SDWebImage/SDWebImageWebPCoder.h>