Introduce SDAnimatedImage and SDAnimatedImageView for high performance animated image rendering on iOS & macOS

This commit is contained in:
DreamPiggy 2017-12-21 11:43:46 +08:00
parent 34cca584c2
commit 87bbcdc46f
35 changed files with 2332 additions and 537 deletions

View File

@ -8,11 +8,11 @@
#import "DetailViewController.h"
#import <SDWebImage/UIView+WebCache.h>
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@interface DetailViewController ()
@property (strong, nonatomic) IBOutlet FLAnimatedImageView *imageView;
@property (strong, nonatomic) IBOutlet SDAnimatedImageView *imageView;
@end

View File

@ -8,13 +8,13 @@
#import "MasterViewController.h"
#import "DetailViewController.h"
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
#import <SDWebImage/UIView+WebCache.h>
@interface MyCustomTableViewCell : UITableViewCell
@property (nonatomic, strong) UILabel *customTextLabel;
@property (nonatomic, strong) FLAnimatedImageView *customImageView;
@property (nonatomic, strong) SDAnimatedImageView *customImageView;
@end
@ -22,7 +22,7 @@
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier {
if (self = [super initWithStyle:style reuseIdentifier:reuseIdentifier]) {
_customImageView = [[FLAnimatedImageView alloc] initWithFrame:CGRectMake(20.0, 2.0, 60.0, 40.0)];
_customImageView = [[SDAnimatedImageView alloc] initWithFrame:CGRectMake(20.0, 2.0, 60.0, 40.0)];
[self.contentView addSubview:_customImageView];
_customTextLabel = [[UILabel alloc] initWithFrame:CGRectMake(100.0, 12.0, 200, 20.0)];
[self.contentView addSubview:_customTextLabel];

View File

@ -1,8 +1,12 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="10117" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none">
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="13771" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" colorMatched="YES">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13772"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="DetailViewController">
@ -16,12 +20,12 @@
<rect key="frame" x="0.0" y="0.0" width="320" height="460"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" id="7" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" id="7" customClass="SDAnimatedImageView">
<rect key="frame" x="0.0" y="0.0" width="320" height="460"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
</imageView>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
</view>
</objects>

View File

@ -687,12 +687,12 @@
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="f0P-c9-GMe"/>
</imageView>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="JIp-Or-vBM">
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="JIp-Or-vBM" customClass="SDAnimatedImageView">
<rect key="frame" x="20" y="116" width="204" height="128"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="NJq-m3-LlB"/>
</imageView>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="khI-tY-l0M">
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="khI-tY-l0M" customClass="SDAnimatedImageView">
<rect key="frame" x="256" y="116" width="204" height="128"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="WbV-Do-9qy"/>

View File

@ -7,6 +7,8 @@
*/
#import "ViewController.h"
#import <SDWebImage/UIImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@import SDWebImage;
@ -14,8 +16,8 @@
@property (weak) IBOutlet NSImageView *imageView1;
@property (weak) IBOutlet NSImageView *imageView2;
@property (weak) IBOutlet NSImageView *imageView3;
@property (weak) IBOutlet NSImageView *imageView4;
@property (weak) IBOutlet SDAnimatedImageView *imageView3;
@property (weak) IBOutlet SDAnimatedImageView *imageView4;
@property (weak) IBOutlet NSButton *clearCacheButton;
@end
@ -25,22 +27,16 @@
- (void)viewDidLoad {
[super viewDidLoad];
//Add GIF coder for better animated image rendering
[[SDWebImageCodersManager sharedManager] addCoder:[SDWebImageGIFCoder sharedCoder]];
// NOTE: https links or authentication ones do not work (there is a crash)
// Do any additional setup after loading the view.
// For animated GIF rendering, set `animates` to YES or will only show the first frame
self.imageView1.animates = YES;
self.imageView3.animates = YES;
self.imageView4.animates = YES;
self.imageView1.sd_imageIndicator = SDWebImageProgressIndicator.defaultIndicator;
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://assets.sbnation.com/assets/2512203/dogflops.gif"]];
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView2 sd_setImageWithURL:[NSURL URLWithString:@"http://www.ioncannon.net/wp-content/uploads/2011/06/test2.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"https://raw.githubusercontent.com/liyong03/YLGIFImage/master/YLGIFImageDemo/YLGIFImageDemo/joy.gif"]];
self.imageView4.wantsLayer = YES;
self.imageView4.sd_imageTransition = SDWebImageTransition.fadeTransition;
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"] placeholderImage:nil options:SDWebImageForceTransition];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"] placeholderImage:nil options:SDWebImageForceTransition];
self.clearCacheButton.target = self;
self.clearCacheButton.action = @selector(clearCacheButtonClicked:);

View File

@ -1,7 +1,12 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder.AppleTV.Storyboard" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="AppleTV" propertyAccessControl="none" useAutolayout="YES" initialViewController="BYZ-38-t0r">
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder.AppleTV.Storyboard" version="3.0" toolsVersion="13771" targetRuntime="AppleTV" propertyAccessControl="none" useAutolayout="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="appleTV" orientation="landscape">
<adaptation id="light"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
<deployment identifier="tvOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13772"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
@ -16,20 +21,24 @@
<rect key="frame" x="0.0" y="0.0" width="1920" height="1080"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xeq-iS-C6S" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xeq-iS-C6S">
<rect key="frame" x="20" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="c5h-Lg-aZx" customClass="FLAnimatedImageView">
<rect key="frame" x="636" y="20" width="300" height="200"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Udx-nH-mbX" customClass="FLAnimatedImageView">
<rect key="frame" x="944" y="20" width="300" height="200"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xvm-ne-7D9" customClass="FLAnimatedImageView">
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="xvm-ne-7D9">
<rect key="frame" x="328" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="c5h-Lg-aZx" customClass="SDAnimatedImageView">
<rect key="frame" x="636" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Udx-nH-mbX" customClass="SDAnimatedImageView">
<rect key="frame" x="944" y="20" width="300" height="200"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
</imageView>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="calibratedWhite"/>
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.0" colorSpace="custom" customColorSpace="sRGB"/>
</view>
<connections>
<outlet property="imageView1" destination="xeq-iS-C6S" id="4gp-UN-VjW"/>

View File

@ -7,14 +7,15 @@
*/
#import "ViewController.h"
#import <SDWebImage/FLAnimatedImageView+WebCache.h>
#import <SDWebImage/UIImageView+WebCache.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
@interface ViewController ()
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView1;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView2;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView3;
@property (weak, nonatomic) IBOutlet FLAnimatedImageView *imageView4;
@property (weak, nonatomic) IBOutlet UIImageView *imageView1;
@property (weak, nonatomic) IBOutlet UIImageView *imageView2;
@property (weak, nonatomic) IBOutlet SDAnimatedImageView *imageView3;
@property (weak, nonatomic) IBOutlet SDAnimatedImageView *imageView4;
@end
@ -25,10 +26,10 @@
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://assets.sbnation.com/assets/2512203/dogflops.gif"]];
[self.imageView1 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView2 sd_setImageWithURL:[NSURL URLWithString:@"http://www.ioncannon.net/wp-content/uploads/2011/06/test2.webp"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage000.jpg"]];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://s3.amazonaws.com/fast-image-cache/demo-images/FICDDemoImage001.jpg"]];
[self.imageView3 sd_setImageWithURL:[NSURL URLWithString:@"https://raw.githubusercontent.com/liyong03/YLGIFImage/master/YLGIFImageDemo/YLGIFImageDemo/joy.gif"]];
[self.imageView4 sd_setImageWithURL:[NSURL URLWithString:@"http://littlesvr.ca/apng/images/SteamEngine.webp"]];
}
- (void)didReceiveMemoryWarning {

View File

@ -316,6 +316,42 @@
323F8C1D1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
323F8C1E1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
323F8C1F1F38EF770092B609 /* muxread.c in Sources */ = {isa = PBXBuildFile; fileRef = 323F8B3D1F38EF770092B609 /* muxread.c */; };
3248475D201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
3248475E201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
3248475F201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484760201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484761201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484762201775F600AF9E5A /* SDAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 32484757201775F600AF9E5A /* SDAnimatedImageView.m */; };
32484763201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484764201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484765201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484766201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484767201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484768201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484769201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476A201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476B201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476C201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476D201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476E201775F600AF9E5A /* SDAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = 32484759201775F600AF9E5A /* SDAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248476F201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484770201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484771201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484772201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484773201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484774201775F600AF9E5A /* SDAnimatedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475A201775F600AF9E5A /* SDAnimatedImage.m */; };
32484775201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484776201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484777201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484778201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
32484779201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248477A201775F600AF9E5A /* SDAnimatedImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 3248475B201775F600AF9E5A /* SDAnimatedImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
3248477B201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477D201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477E201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
3248477F201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
32484780201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */ = {isa = PBXBuildFile; fileRef = 3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */; };
324DF4B4200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
324DF4B5200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
324DF4B6200A14DC008A84CC /* SDWebImageDefine.h in Headers */ = {isa = PBXBuildFile; fileRef = 324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */; settings = {ATTRIBUTES = (Public, ); }; };
@ -1403,6 +1439,12 @@
323F8B3B1F38EF770092B609 /* muxi.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = muxi.h; sourceTree = "<group>"; };
323F8B3C1F38EF770092B609 /* muxinternal.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = muxinternal.c; sourceTree = "<group>"; };
323F8B3D1F38EF770092B609 /* muxread.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = muxread.c; sourceTree = "<group>"; };
32484757201775F600AF9E5A /* SDAnimatedImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDAnimatedImageView.m; sourceTree = "<group>"; };
32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "SDAnimatedImageView+WebCache.h"; sourceTree = "<group>"; };
32484759201775F600AF9E5A /* SDAnimatedImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDAnimatedImageView.h; sourceTree = "<group>"; };
3248475A201775F600AF9E5A /* SDAnimatedImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDAnimatedImage.m; sourceTree = "<group>"; };
3248475B201775F600AF9E5A /* SDAnimatedImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDAnimatedImage.h; sourceTree = "<group>"; };
3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "SDAnimatedImageView+WebCache.m"; sourceTree = "<group>"; };
324DF4B2200A14DC008A84CC /* SDWebImageDefine.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SDWebImageDefine.h; sourceTree = "<group>"; };
324DF4B3200A14DC008A84CC /* SDWebImageDefine.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SDWebImageDefine.m; sourceTree = "<group>"; };
325312C6200F09910046BF1E /* SDWebImageTransition.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SDWebImageTransition.h; sourceTree = "<group>"; };
@ -1720,6 +1762,19 @@
path = mux;
sourceTree = "<group>";
};
32484756201775CE00AF9E5A /* ImageView */ = {
isa = PBXGroup;
children = (
3248475B201775F600AF9E5A /* SDAnimatedImage.h */,
3248475A201775F600AF9E5A /* SDAnimatedImage.m */,
32484759201775F600AF9E5A /* SDAnimatedImageView.h */,
32484757201775F600AF9E5A /* SDAnimatedImageView.m */,
32484758201775F600AF9E5A /* SDAnimatedImageView+WebCache.h */,
3248475C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m */,
);
name = ImageView;
sourceTree = "<group>";
};
4369C2851D9811BB007E863A /* WebCache Categories */ = {
isa = PBXGroup;
children = (
@ -1842,6 +1897,7 @@
53922DAB148C56810056699D /* Downloader */,
53922DAA148C56470056699D /* Cache */,
321E60831F38E88F00405457 /* Decoder */,
32484756201775CE00AF9E5A /* ImageView */,
53922DAC148C56DD0056699D /* Utils */,
53922DA9148C562D0056699D /* Categories */,
4369C2851D9811BB007E863A /* WebCache Categories */,
@ -2138,12 +2194,14 @@
00733A721BC4880E00A5A117 /* UIView+WebCacheOperation.h in Headers */,
80377C481F2F666300F89830 /* bit_reader_utils.h in Headers */,
80377C511F2F666300F89830 /* huffman_encode_utils.h in Headers */,
32484778201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
00733A6B1BC4880E00A5A117 /* NSData+ImageContentType.h in Headers */,
325312CB200F09910046BF1E /* SDWebImageTransition.h in Headers */,
323F8C111F38EF770092B609 /* muxi.h in Headers */,
80377EC41F2F66D500F89830 /* vp8li_dec.h in Headers */,
00733A6A1BC4880E00A5A117 /* SDWebImagePrefetcher.h in Headers */,
00733A641BC4880E00A5A117 /* SDWebImageOperation.h in Headers */,
32484766201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
321E60A51F38E8F600405457 /* SDWebImageGIFCoder.h in Headers */,
32CF1C0A1FA496B000004BD1 /* SDWebImageCoderHelper.h in Headers */,
80377C4D1F2F666300F89830 /* endian_inl_utils.h in Headers */,
@ -2154,6 +2212,7 @@
80377EC21F2F66D500F89830 /* vp8i_dec.h in Headers */,
80377EBA1F2F66D500F89830 /* common_dec.h in Headers */,
43CE757E1CFE9427006C64D0 /* FLAnimatedImageView.h in Headers */,
3248476C201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377C5F1F2F666300F89830 /* utils.h in Headers */,
80377C5B1F2F666300F89830 /* rescaler_utils.h in Headers */,
323F8BF91F38EF770092B609 /* animi.h in Headers */,
@ -2205,9 +2264,11 @@
4314D1701D0E0E3B004B36C9 /* mux.h in Headers */,
321E60871F38E8C800405457 /* SDWebImageCoder.h in Headers */,
80377EA21F2F66D400F89830 /* vp8i_dec.h in Headers */,
3248476A201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
321E60951F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
80377C211F2F666300F89830 /* quant_levels_dec_utils.h in Headers */,
4314D1721D0E0E3B004B36C9 /* SDWebImageCompat.h in Headers */,
32484776201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
80377C251F2F666300F89830 /* random_utils.h in Headers */,
80377D4F1F2F66A700F89830 /* lossless.h in Headers */,
80377D511F2F66A700F89830 /* msa_macro.h in Headers */,
@ -2238,6 +2299,7 @@
323F8B871F38EF770092B609 /* histogram_enc.h in Headers */,
80377C1F1F2F666300F89830 /* huffman_utils.h in Headers */,
4314D17F1D0E0E3B004B36C9 /* UIButton+WebCache.h in Headers */,
32484764201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
4314D1811D0E0E3B004B36C9 /* UIImageView+WebCache.h in Headers */,
4314D1841D0E0E3B004B36C9 /* SDWebImageOperation.h in Headers */,
4314D1851D0E0E3B004B36C9 /* SDWebImageDownloaderOperation.h in Headers */,
@ -2266,6 +2328,7 @@
80377EC81F2F66D500F89830 /* alphai_dec.h in Headers */,
43A62A1B1D0E0A800089D7DD /* decode.h in Headers */,
321E608A1F38E8C800405457 /* SDWebImageCoder.h in Headers */,
32484767201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377C601F2F666400F89830 /* bit_reader_inl_utils.h in Headers */,
329A185D1FFF5DFD008C9A2F /* UIImage+WebCache.h in Headers */,
431BB6DC1D06D2C1006A3455 /* UIButton+WebCache.h in Headers */,
@ -2328,9 +2391,11 @@
321E60B41F38E90100405457 /* SDWebImageWebPCoder.h in Headers */,
32F7C0732030114C00873181 /* SDWebImageTransformer.h in Headers */,
431BB6FA1D06D2C1006A3455 /* SDWebImageDownloader.h in Headers */,
3248476D201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377DF51F2F66A800F89830 /* common_sse2.h in Headers */,
323F8BDC1F38EF770092B609 /* vp8i_enc.h in Headers */,
80377ED21F2F66D500F89830 /* vp8i_dec.h in Headers */,
32484779201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
43A918681D8308FE00B3925F /* SDImageCacheConfig.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -2358,6 +2423,7 @@
321E60991F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
323F8B8B1F38EF770092B609 /* histogram_enc.h in Headers */,
4397D2C41D0DDD8C00BB2784 /* SDImageCache.h in Headers */,
3248476E201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
4397D2C51D0DDD8C00BB2784 /* UIImageView+WebCache.h in Headers */,
3290FA091FA478AF0047D20C /* SDWebImageFrame.h in Headers */,
4369C27C1D9807EC007E863A /* UIView+WebCache.h in Headers */,
@ -2371,6 +2437,7 @@
4397D2D11D0DDD8C00BB2784 /* decode.h in Headers */,
80377E481F2F66A800F89830 /* dsp.h in Headers */,
323F8BE91F38EF770092B609 /* vp8li_enc.h in Headers */,
3248477A201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
329A185E1FFF5DFD008C9A2F /* UIImage+WebCache.h in Headers */,
320224BB203979BA00E9F285 /* SDAnimatedImageRep.h in Headers */,
80377E761F2F66A800F89830 /* yuv.h in Headers */,
@ -2406,6 +2473,7 @@
321E608B1F38E8C800405457 /* SDWebImageCoder.h in Headers */,
323F8B731F38EF770092B609 /* delta_palettization_enc.h in Headers */,
321E60C31F38E91700405457 /* UIImage+ForceDecode.h in Headers */,
32484768201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377E561F2F66A800F89830 /* lossless_common.h in Headers */,
4397D2E91D0DDD8C00BB2784 /* UIImage+WebP.h in Headers */,
325312CD200F09910046BF1E /* SDWebImageTransition.h in Headers */,
@ -2473,12 +2541,14 @@
4A2CAE371AB4BB7500B6BC39 /* UIView+WebCacheOperation.h in Headers */,
80377C2E1F2F666300F89830 /* bit_reader_utils.h in Headers */,
80377C371F2F666300F89830 /* huffman_encode_utils.h in Headers */,
32484777201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
4A2CAE2F1AB4BB7500B6BC39 /* UIImage+MultiFormat.h in Headers */,
325312CA200F09910046BF1E /* SDWebImageTransition.h in Headers */,
323F8C101F38EF770092B609 /* muxi.h in Headers */,
80377EB41F2F66D400F89830 /* vp8li_dec.h in Headers */,
4A2CAE1A1AB4BB6400B6BC39 /* SDWebImageOperation.h in Headers */,
80377C331F2F666300F89830 /* endian_inl_utils.h in Headers */,
32484765201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
321E60A41F38E8F600405457 /* SDWebImageGIFCoder.h in Headers */,
32CF1C091FA496B000004BD1 /* SDWebImageCoderHelper.h in Headers */,
4A2CAE1B1AB4BB6800B6BC39 /* SDWebImageDownloader.h in Headers */,
@ -2489,6 +2559,7 @@
80377EAA1F2F66D400F89830 /* common_dec.h in Headers */,
80377C451F2F666300F89830 /* utils.h in Headers */,
80377C411F2F666300F89830 /* rescaler_utils.h in Headers */,
3248476B201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
4A2CAE311AB4BB7500B6BC39 /* UIImage+WebP.h in Headers */,
323F8BF81F38EF770092B609 /* animi.h in Headers */,
80377C351F2F666300F89830 /* filters_utils.h in Headers */,
@ -2536,6 +2607,7 @@
807A12281F89636300EC2A9B /* SDWebImageCodersManager.h in Headers */,
80377C051F2F665300F89830 /* huffman_utils.h in Headers */,
80377E881F2F66D000F89830 /* alphai_dec.h in Headers */,
32484775201775F600AF9E5A /* SDAnimatedImage.h in Headers */,
321E60941F38E8ED00405457 /* SDWebImageImageIOCoder.h in Headers */,
431738BD1CDFC2660008FEB9 /* decode.h in Headers */,
80377D0B1F2F66A100F89830 /* mips_macro.h in Headers */,
@ -2558,6 +2630,7 @@
5376131F155AD0D5005750A4 /* UIButton+WebCache.h in Headers */,
53761320155AD0D5005750A4 /* UIImageView+WebCache.h in Headers */,
530E49E816464C25002868E7 /* SDWebImageOperation.h in Headers */,
32484769201775F600AF9E5A /* SDAnimatedImageView.h in Headers */,
80377E961F2F66D000F89830 /* webpi_dec.h in Headers */,
80377BF81F2F665300F89830 /* bit_reader_inl_utils.h in Headers */,
530E49EA16464C7C002868E7 /* SDWebImageDownloaderOperation.h in Headers */,
@ -2575,6 +2648,7 @@
321E60861F38E8C800405457 /* SDWebImageCoder.h in Headers */,
321E60B01F38E90100405457 /* SDWebImageWebPCoder.h in Headers */,
80377C0D1F2F665300F89830 /* rescaler_utils.h in Headers */,
32484763201775F600AF9E5A /* SDAnimatedImageView+WebCache.h in Headers */,
80377E911F2F66D000F89830 /* vp8_dec.h in Headers */,
323F8B6E1F38EF770092B609 /* delta_palettization_enc.h in Headers */,
438096721CDFC08200DC626B /* MKAnnotationView+WebCache.h in Headers */,
@ -2885,6 +2959,7 @@
80377EBF1F2F66D500F89830 /* tree_dec.c in Sources */,
80377DD21F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
80377DB31F2F66A700F89830 /* cost_sse2.c in Sources */,
32484760201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377DDE1F2F66A700F89830 /* rescaler_mips32.c in Sources */,
80377DCA1F2F66A700F89830 /* filters_sse2.c in Sources */,
80377EBE1F2F66D500F89830 /* quant_dec.c in Sources */,
@ -2894,6 +2969,7 @@
80377DC11F2F66A700F89830 /* enc_mips32.c in Sources */,
80377DBC1F2F66A700F89830 /* dec_sse41.c in Sources */,
80377DCE1F2F66A700F89830 /* lossless_enc_mips32.c in Sources */,
3248477E201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377DCB1F2F66A700F89830 /* filters.c in Sources */,
80377DAA1F2F66A700F89830 /* alpha_processing_sse2.c in Sources */,
43A9186E1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
@ -2919,6 +2995,7 @@
80377C561F2F666300F89830 /* quant_levels_utils.c in Sources */,
323F8BCF1F38EF770092B609 /* token_enc.c in Sources */,
80377DD11F2F66A700F89830 /* lossless_enc_sse2.c in Sources */,
32484772201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8C1D1F38EF770092B609 /* muxread.c in Sources */,
807A12311F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
80377C491F2F666300F89830 /* bit_writer_utils.c in Sources */,
@ -3052,8 +3129,11 @@
323F8B7B1F38EF770092B609 /* frame_enc.c in Sources */,
80377D211F2F66A700F89830 /* alpha_processing_sse41.c in Sources */,
323F8B8D1F38EF770092B609 /* iterator_enc.c in Sources */,
3248475E201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377D481F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
32484770201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8BA91F38EF770092B609 /* picture_psnr_enc.c in Sources */,
3248477C201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8C091F38EF770092B609 /* muxedit.c in Sources */,
80377D1F1F2F66A700F89830 /* alpha_processing_neon.c in Sources */,
32C0FDE82013426C001B8F2D /* SDWebImageIndicator.m in Sources */,
@ -3202,8 +3282,11 @@
80377ECC1F2F66D500F89830 /* idec_dec.c in Sources */,
323F8B7E1F38EF770092B609 /* frame_enc.c in Sources */,
80377E171F2F66A800F89830 /* lossless_enc_sse41.c in Sources */,
32484761201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
323F8B901F38EF770092B609 /* iterator_enc.c in Sources */,
32484773201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
80377C611F2F666400F89830 /* bit_reader_utils.c in Sources */,
3248477F201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8BAC1F38EF770092B609 /* picture_psnr_enc.c in Sources */,
323F8C0C1F38EF770092B609 /* muxedit.c in Sources */,
32C0FDEB2013426C001B8F2D /* SDWebImageIndicator.m in Sources */,
@ -3308,6 +3391,7 @@
80377E4C1F2F66A800F89830 /* enc_msa.c in Sources */,
80377E4E1F2F66A800F89830 /* enc_sse2.c in Sources */,
80377E6C1F2F66A800F89830 /* rescaler.c in Sources */,
32484762201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377EE31F2F66D500F89830 /* vp8l_dec.c in Sources */,
80377ED71F2F66D500F89830 /* alpha_dec.c in Sources */,
323F8B7F1F38EF770092B609 /* frame_enc.c in Sources */,
@ -3385,6 +3469,7 @@
323F8B5B1F38EF770092B609 /* config_enc.c in Sources */,
80377E361F2F66A800F89830 /* alpha_processing.c in Sources */,
80377E351F2F66A800F89830 /* alpha_processing_sse41.c in Sources */,
32484780201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
323F8B791F38EF770092B609 /* filter_enc.c in Sources */,
80377EDD1F2F66D500F89830 /* io_dec.c in Sources */,
43A918701D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
@ -3402,6 +3487,7 @@
321E60BB1F38E90100405457 /* SDWebImageWebPCoder.m in Sources */,
80377E3C1F2F66A800F89830 /* cost_mips32.c in Sources */,
80377E421F2F66A800F89830 /* dec_mips32.c in Sources */,
32484774201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
4397D2AE1D0DDD8C00BB2784 /* UIImageView+HighlightedWebCache.m in Sources */,
323F8B851F38EF770092B609 /* histogram_enc.c in Sources */,
80377EE51F2F66D500F89830 /* webp_dec.c in Sources */,
@ -3492,6 +3578,7 @@
80377EAF1F2F66D400F89830 /* tree_dec.c in Sources */,
4A2CAE281AB4BB7500B6BC39 /* MKAnnotationView+WebCache.m in Sources */,
4A2CAE261AB4BB7000B6BC39 /* SDWebImagePrefetcher.m in Sources */,
3248475F201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377C441F2F666300F89830 /* utils.c in Sources */,
80377D8D1F2F66A700F89830 /* lossless_enc_sse41.c in Sources */,
80377EAE1F2F66D400F89830 /* quant_dec.c in Sources */,
@ -3501,6 +3588,7 @@
80377D851F2F66A700F89830 /* filters_sse2.c in Sources */,
80377D711F2F66A700F89830 /* dec_clip_tables.c in Sources */,
43A9186D1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
3248477D201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377D7C1F2F66A700F89830 /* enc_mips32.c in Sources */,
80377D771F2F66A700F89830 /* dec_sse41.c in Sources */,
80377D891F2F66A700F89830 /* lossless_enc_mips32.c in Sources */,
@ -3526,6 +3614,7 @@
4A2CAE191AB4BB6400B6BC39 /* SDWebImageCompat.m in Sources */,
80377DA11F2F66A700F89830 /* upsampling_sse2.c in Sources */,
323F8BCE1F38EF770092B609 /* token_enc.c in Sources */,
32484771201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
80377C3C1F2F666300F89830 /* quant_levels_utils.c in Sources */,
323F8C1C1F38EF770092B609 /* muxread.c in Sources */,
807A12301F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
@ -3646,6 +3735,7 @@
5376130C155AD0D5005750A4 /* SDWebImageManager.m in Sources */,
5376130D155AD0D5005750A4 /* SDWebImagePrefetcher.m in Sources */,
80377C101F2F665300F89830 /* utils.c in Sources */,
3248475D201775F600AF9E5A /* SDAnimatedImageView.m in Sources */,
80377D031F2F66A100F89830 /* lossless_enc_sse41.c in Sources */,
80377E8E1F2F66D000F89830 /* quant_dec.c in Sources */,
80377CE41F2F66A100F89830 /* cost_sse2.c in Sources */,
@ -3655,6 +3745,7 @@
80377CE71F2F66A100F89830 /* dec_clip_tables.c in Sources */,
43A9186B1D8308FE00B3925F /* SDImageCacheConfig.m in Sources */,
80377CF21F2F66A100F89830 /* enc_mips32.c in Sources */,
3248477B201775F600AF9E5A /* SDAnimatedImageView+WebCache.m in Sources */,
80377CED1F2F66A100F89830 /* dec_sse41.c in Sources */,
80377CFF1F2F66A100F89830 /* lossless_enc_mips32.c in Sources */,
80377CFC1F2F66A100F89830 /* filters.c in Sources */,
@ -3680,6 +3771,7 @@
80377D171F2F66A100F89830 /* upsampling_sse2.c in Sources */,
323F8BCC1F38EF770092B609 /* token_enc.c in Sources */,
80377C081F2F665300F89830 /* quant_levels_utils.c in Sources */,
3248476F201775F600AF9E5A /* SDAnimatedImage.m in Sources */,
323F8C1A1F38EF770092B609 /* muxread.c in Sources */,
807A122E1F89636300EC2A9B /* SDWebImageCodersManager.m in Sources */,
80377BFB1F2F665300F89830 /* bit_writer_utils.c in Sources */,

View File

@ -0,0 +1,66 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDWebImageCompat.h"
#import "NSData+ImageContentType.h"
@protocol SDAnimatedImage <NSObject>
@required
/**
Total animated frame count.
It the frame count is less than 1, then the methods below will be ignored.
@return Total animated frame count.
*/
- (NSUInteger)animatedImageFrameCount;
/**
Animation loop count, 0 means infinite looping.
@return Animation loop count
*/
- (NSUInteger)animatedImageLoopCount;
/**
Returns the frame image from a specified index.
This method may be called on background thread. And the index maybe randomly if one image was set to different imageViews, keep it re-entrant.
@param index Frame index (zero based).
@return Frame's image
*/
- (nullable UIImage *)animatedImageFrameAtIndex:(NSUInteger)index;
/**
Returns the frames's duration from a specified index.
@param index Frame index (zero based).
@return Frame's duration
*/
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index;
@end
@interface SDAnimatedImage : UIImage <SDAnimatedImage>
// This class override these methods from UIImage(NSImage), and it supports NSSecureCoding.
// You should use these methods to create a new animated image. Use other methods will just call super instead.
+ (nullable instancetype)imageWithContentsOfFile:(nonnull NSString *)path;
+ (nullable instancetype)imageWithData:(nonnull NSData *)data;
+ (nullable instancetype)imageWithData:(nonnull NSData *)data scale:(CGFloat)scale;
- (nullable instancetype)initWithContentsOfFile:(nonnull NSString *)path;
- (nullable instancetype)initWithData:(nonnull NSData *)data;
- (nullable instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale;
/**
Current animated image format
*/
@property (nonatomic, assign, readonly) SDImageFormat animatedImageFormat;
/**
Current animated image data, you can use this instead of CGImage to create another instance
*/
@property (nonatomic, copy, readonly, nullable) NSData *animatedImageData;
@end

View File

@ -0,0 +1,167 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImage.h"
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
#import "SDWebImageCoder.h"
#import "SDWebImageCodersManager.h"
static CGFloat SDImageScaleFromPath(NSString *string) {
if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
NSString *name = string.stringByDeletingPathExtension;
__block CGFloat scale = 1;
NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
[pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
if (result.range.location >= 3) {
scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
}
}];
return scale;
}
@interface SDAnimatedImage ()
@property (nonatomic, strong) id<SDWebImageAnimatedCoder> coder;
@property (nonatomic, assign, readwrite) NSUInteger animatedImageLoopCount;
@property (nonatomic, assign, readwrite) NSUInteger animatedImageFrameCount;
@property (nonatomic, assign, readwrite) SDImageFormat animatedImageFormat;
@property (nonatomic, assign) BOOL animatedImageLoopCountCheck;
@property (nonatomic, assign) BOOL animatedImageFrameCountChecked;
#if SD_MAC
@property (nonatomic, assign) CGFloat scale;
#endif
@end
@implementation SDAnimatedImage
#pragma mark - UIImage override method
+ (instancetype)imageWithContentsOfFile:(NSString *)path {
return [[self alloc] initWithContentsOfFile:path];
}
+ (instancetype)imageWithData:(NSData *)data {
return [[self alloc] initWithData:data];
}
+ (instancetype)imageWithData:(NSData *)data scale:(CGFloat)scale {
return [[self alloc] initWithData:data scale:scale];
}
- (instancetype)initWithContentsOfFile:(NSString *)path {
NSData *data = [NSData dataWithContentsOfFile:path];
return [self initWithData:data scale:SDImageScaleFromPath(path)];
}
- (instancetype)initWithData:(NSData *)data {
return [self initWithData:data scale:1];
}
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
if (!data || data.length == 0) {
return nil;
}
if (scale <= 0) {
#if SD_WATCH
scale = [WKInterfaceDevice currentDevice].screenScale;
#elif SD_UIKIT
scale = [UIScreen mainScreen].scale;
#endif
}
for (id<SDWebImageCoder>coder in [SDWebImageCodersManager sharedInstance].coders) {
if ([coder conformsToProtocol:@protocol(SDWebImageAnimatedCoder)]) {
if ([coder canDecodeFromData:data]) {
id<SDWebImageAnimatedCoder> animatedCoder = [[[coder class] alloc] initWithAnimatedImageData:data];
if (!animatedCoder) {
// check next coder
continue;
} else {
self.coder = animatedCoder;
break;
}
}
}
}
if (!self.coder) {
return nil;
}
UIImage *image = [self.coder animatedImageFrameAtIndex:0];
if (!image) {
return nil;
}
#if SD_MAC
self = [super initWithCGImage:image.CGImage size:NSZeroSize];
#else
self = [super initWithCGImage:image.CGImage scale:scale orientation:image.imageOrientation];
#endif
if (!self) {
return nil;
}
SDImageFormat format = [NSData sd_imageFormatForImageData:data];
self.animatedImageFormat = format;
return self;
}
#pragma mark - NSSecureCoding
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
NSNumber *scale = [aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(scale))];
NSData *animatedImageData = [aDecoder decodeObjectOfClass:[NSData class] forKey:NSStringFromSelector(@selector(animatedImageData))];
if (animatedImageData) {
return [self initWithData:animatedImageData scale:scale.doubleValue];
} else {
return [super initWithCoder:aDecoder];
}
}
- (void)encodeWithCoder:(NSCoder *)aCoder {
if (self.animatedImageData) {
[aCoder encodeObject:self.animatedImageData forKey:NSStringFromSelector(@selector(animatedImageData))];
[aCoder encodeObject:@(self.scale) forKey:NSStringFromSelector(@selector(scale))];
} else {
[super encodeWithCoder:aCoder];
}
}
+ (BOOL)supportsSecureCoding {
return YES;
}
#pragma mark - SDAnimatedImage
- (NSUInteger)animatedImageLoopCount {
if (!self.animatedImageLoopCountCheck) {
self.animatedImageLoopCountCheck = YES;
_animatedImageLoopCount = [self.coder animatedImageLoopCount];
}
return _animatedImageLoopCount;
}
- (NSUInteger)animatedImageFrameCount {
if (!self.animatedImageFrameCountChecked) {
self.animatedImageFrameCountChecked = YES;
_animatedImageFrameCount = [self.coder animatedImageFrameCount];
}
return _animatedImageFrameCount;
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
return [self.coder animatedImageFrameAtIndex:index];
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
return [self.coder animatedImageDurationAtIndex:index];
}
- (NSData *)animatedImageData {
return self.coder.animatedImageData;
}
@end

View File

@ -0,0 +1,126 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView.h"
#if SD_UIKIT || SD_MAC
#import "SDWebImageManager.h"
@interface SDAnimatedImageView (WebCache)
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url` and a placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @see sd_setImageWithURL:placeholderImage:options:
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
completed:(nullable SDExternalCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
completed:(nullable SDExternalCompletionBlock)completedBlock NS_REFINED_FOR_SWIFT;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
completed:(nullable SDExternalCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param progressBlock A block called while image is downloading
* @note the progress block is executed on a background queue
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrieved from the local cache or from the network.
* The fourth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock;
@end
#endif

View File

@ -0,0 +1,57 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView+WebCache.h"
#if SD_UIKIT || SD_MAC
#import "UIView+WebCache.h"
@implementation SDAnimatedImageView (WebCache)
- (void)sd_setImageWithURL:(nullable NSURL *)url {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:nil];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:nil options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:0 progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url placeholderImage:(nullable UIImage *)placeholder options:(SDWebImageOptions)options completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_setImageWithURL:url placeholderImage:placeholder options:options progress:nil completed:completedBlock];
}
- (void)sd_setImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock {
[self sd_internalSetImageWithURL:url
placeholderImage:placeholder
options:options
operationKey:nil
setImageBlock:nil
progress:progressBlock
completed:completedBlock];
}
@end
#endif

View File

@ -0,0 +1,59 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDWebImageCompat.h"
#if SD_UIKIT || SD_MAC
/**
A drop-in replacement for UIImageView/NSImageView, you can use this for animated image rendering.
Call `setImage:` with a `UIImage<SDAnimatedImage>` will start animated image rendering. Call with a UIImage(NSImage) will back to normal UIImageView(NSImageView) rendering
For UIKit: use `-startAnimating`, `-stopAnimating` to control animating
For AppKit: use `-setAnimates:` to control animating. This view is layer-backed.
*/
@interface SDAnimatedImageView : UIImageView
/**
Current display frame image
*/
@property (nonatomic, strong, readonly, nullable) UIImage *currentFrame;
/**
Current frame index, zero based
*/
@property (nonatomic, assign, readonly) NSUInteger currentFrameIndex;
/**
Current loop count since its latest animating
*/
@property (nonatomic, assign, readonly) NSUInteger currentLoopCount;
/**
YES to choose `animationRepeatCount` property instead of image's loop count for animtion loop count. Default is NO.
*/
@property (nonatomic, assign) BOOL shouldCustomLoopCount;
/**
Total loop count for animated image rendering. Default is animated image's loop count.
If you need to set custom loop count, set `shouldCustomLoopCount` to YES and change this value.
This class override UIImageView's `animationRepeatCount` property on iOS, use this property as well.
*/
@property (nonatomic, assign) NSInteger animationRepeatCount;
/**
Provide a max buffer size by bytes. This is used to adjust frame buffer count and can be useful when the decoding cost is expensive (such as Animated WebP software decoding). Default is 0.
`0` means automatically adjust by calculating current memory usage.
`1` means without any buffer cache, each of frames will be decoded and then be freed after rendering. (Lowest Memory and Highest CPU)
`NSUIntegerMax` means cache all the buffer. (Lowest CPU and Highest Memory)
*/
@property (nonatomic, assign) NSUInteger maxBufferSize;
/**
You can specify a runloop mode to let it rendering.
Default is NSRunLoopCommonModes on multi-core iOS device, NSDefaultRunLoopMode on single-core iOS device
This value has no use on macOS
*/
@property (nonatomic, copy, nonnull) NSString *runLoopMode;
@end
#endif

View File

@ -0,0 +1,757 @@
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDAnimatedImageView.h"
#import "UIImage+WebCache.h"
#import "NSImage+Additions.h"
#if SD_UIKIT || SD_MAC
#import "SDAnimatedImage.h"
#import <mach/mach.h>
#if SD_MAC
#import <CoreVideo/CoreVideo.h>
static CVReturn renderCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext);
#endif
static NSUInteger SDDeviceTotalMemory() {
return [[NSProcessInfo processInfo] physicalMemory];
}
static NSUInteger SDDeviceFreeMemory() {
mach_port_t host_port = mach_host_self();
mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t);
vm_size_t page_size;
vm_statistics_data_t vm_stat;
kern_return_t kern;
kern = host_page_size(host_port, &page_size);
if (kern != KERN_SUCCESS) return 0;
kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size);
if (kern != KERN_SUCCESS) return 0;
return vm_stat.free_count * page_size;
}
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
__VA_ARGS__; \
dispatch_semaphore_signal(self->_lock);
@interface SDWeakProxy : NSProxy
@property (nonatomic, weak, readonly) id target;
- (instancetype)initWithTarget:(id)target;
+ (instancetype)proxyWithTarget:(id)target;
@end
@implementation SDWeakProxy
- (instancetype)initWithTarget:(id)target {
_target = target;
return self;
}
+ (instancetype)proxyWithTarget:(id)target {
return [[SDWeakProxy alloc] initWithTarget:target];
}
- (id)forwardingTargetForSelector:(SEL)selector {
return _target;
}
- (void)forwardInvocation:(NSInvocation *)invocation {
void *null = NULL;
[invocation setReturnValue:&null];
}
- (NSMethodSignature *)methodSignatureForSelector:(SEL)selector {
return [NSObject instanceMethodSignatureForSelector:@selector(init)];
}
- (BOOL)respondsToSelector:(SEL)aSelector {
return [_target respondsToSelector:aSelector];
}
- (BOOL)isEqual:(id)object {
return [_target isEqual:object];
}
- (NSUInteger)hash {
return [_target hash];
}
- (Class)superclass {
return [_target superclass];
}
- (Class)class {
return [_target class];
}
- (BOOL)isKindOfClass:(Class)aClass {
return [_target isKindOfClass:aClass];
}
- (BOOL)isMemberOfClass:(Class)aClass {
return [_target isMemberOfClass:aClass];
}
- (BOOL)conformsToProtocol:(Protocol *)aProtocol {
return [_target conformsToProtocol:aProtocol];
}
- (BOOL)isProxy {
return YES;
}
- (NSString *)description {
return [_target description];
}
- (NSString *)debugDescription {
return [_target debugDescription];
}
@end
@interface SDAnimatedImageView () <CALayerDelegate>
@property (nonatomic, strong, readwrite) UIImage *currentFrame;
@property (nonatomic, assign, readwrite) NSUInteger currentFrameIndex;
@property (nonatomic, assign, readwrite) NSUInteger currentLoopCount;
@property (nonatomic, assign) NSUInteger totalFrameCount;
@property (nonatomic, assign) NSUInteger totalLoopCount;
@property (nonatomic, strong) UIImage<SDAnimatedImage> *animatedImage;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, UIImage *> *frameBuffer;
@property (nonatomic, assign) NSTimeInterval currentTime;
@property (nonatomic, assign) BOOL bufferMiss;
@property (nonatomic, assign) BOOL shouldAnimate;
@property (nonatomic, assign) NSUInteger maxBufferCount;
@property (nonatomic, strong) NSOperationQueue *fetchQueue;
@property (nonatomic, strong) dispatch_semaphore_t lock;
#if SD_MAC
@property (nonatomic, assign) CVDisplayLinkRef displayLink;
#else
@property (nonatomic, strong) CADisplayLink *displayLink;
#endif
@end
@implementation SDAnimatedImageView
#if SD_UIKIT
@dynamic animationRepeatCount;
#endif
#pragma mark - Initializers
#if SD_MAC
+ (instancetype)imageViewWithImage:(NSImage *)image
{
NSRect frame = NSMakeRect(0, 0, image.size.width, image.size.height);
SDAnimatedImageView *imageView = [[SDAnimatedImageView alloc] initWithFrame:frame];
[imageView setImage:image];
return imageView;
}
#else
// -initWithImage: isn't documented as a designated initializer of UIImageView, but it actually seems to be.
// Using -initWithImage: doesn't call any of the other designated initializers.
- (instancetype)initWithImage:(UIImage *)image
{
self = [super initWithImage:image];
if (self) {
[self commonInit];
}
return self;
}
// -initWithImage:highlightedImage: also isn't documented as a designated initializer of UIImageView, but it doesn't call any other designated initializers.
- (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage
{
self = [super initWithImage:image highlightedImage:highlightedImage];
if (self) {
[self commonInit];
}
return self;
}
#endif
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self commonInit];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder
{
self = [super initWithCoder:aDecoder];
if (self) {
[self commonInit];
}
return self;
}
- (void)commonInit
{
#if SD_MAC
self.wantsLayer = YES;
self.imageScaling = NSImageScaleProportionallyDown;
#endif
self.maxBufferCount = 0;
self.runLoopMode = [[self class] defaultRunLoopMode];
self.lock = dispatch_semaphore_create(1);
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)resetAnimated
{
LOCK({
self.animatedImage = nil;
self.totalFrameCount = 0;
self.totalLoopCount = 0;
self.currentFrame = 0;
self.currentFrameIndex = 0;
self.currentLoopCount = 0;
self.currentTime = 0;
self.bufferMiss = NO;
self.shouldAnimate = NO;
self.maxBufferCount = 0;
[_frameBuffer removeAllObjects];
_frameBuffer = nil;
[_fetchQueue cancelAllOperations];
_fetchQueue = nil;
});
}
#pragma mark - Accessors
#pragma mark Public
- (void)setImage:(UIImage *)image
{
if (self.image == image) {
return;
}
[self stopAnimating];
// Reset all value
[self resetAnimated];
super.image = image;
if ([image conformsToProtocol:@protocol(SDAnimatedImage)]) {
NSUInteger animatedImageFrameCount = ((UIImage<SDAnimatedImage> *)image).animatedImageFrameCount;
if (animatedImageFrameCount <= 1) {
return;
}
self.animatedImage = (UIImage<SDAnimatedImage> *)image;
self.totalFrameCount = animatedImageFrameCount;
self.totalLoopCount = self.animatedImage.animatedImageLoopCount;
// Ensure disabled highlighting; it's not supported (see `-setHighlighted:`).
super.highlighted = NO;
// UIImageView seems to bypass some accessors when calculating its intrinsic content size, so this ensures its intrinsic content size comes from the animated image.
[self invalidateIntrinsicContentSize];
// Get the first frame
self.currentFrame = [self.animatedImage animatedImageFrameAtIndex:0];
LOCK({
if (self.currentFrame) {
self.frameBuffer[@(0)] = self.currentFrame;
self.bufferMiss = NO;
} else {
self.bufferMiss = YES;
}
});
// Calculate max buffer size
[self calculateMaxBufferCount];
// Update should animate
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
}
[self.layer setNeedsDisplay];
}
}
- (void)setAnimationRepeatCount:(NSInteger)animationRepeatCount
{
#if SD_MAC
_animationRepeatCount = animationRepeatCount;
#else
[super setAnimationRepeatCount:animationRepeatCount];
#endif
}
- (void)setRunLoopMode:(NSString *)runLoopMode
{
if (![@[NSDefaultRunLoopMode, NSRunLoopCommonModes] containsObject:runLoopMode]) {
NSAssert(NO, @"Invalid run loop mode: %@", runLoopMode);
_runLoopMode = [[self class] defaultRunLoopMode];
} else {
_runLoopMode = runLoopMode;
}
}
#pragma mark - Private
- (NSOperationQueue *)fetchQueue
{
if (!_fetchQueue) {
_fetchQueue = [[NSOperationQueue alloc] init];
_fetchQueue.maxConcurrentOperationCount = 1;
}
return _fetchQueue;
}
- (NSMutableDictionary<NSNumber *,UIImage *> *)frameBuffer
{
if (!_frameBuffer) {
_frameBuffer = [NSMutableDictionary dictionary];
}
return _frameBuffer;
}
#if SD_MAC
- (CVDisplayLinkRef)displayLink
{
if (!_displayLink) {
CGDirectDisplayID displayID = CGMainDisplayID();
CVReturn error = CVDisplayLinkCreateWithCGDisplay(displayID, &_displayLink);
if (error) {
return NULL;
}
CVDisplayLinkSetOutputCallback(_displayLink, renderCallback, (__bridge void *)self);
}
return _displayLink;
}
#else
- (CADisplayLink *)displayLink
{
if (!_displayLink) {
// It is important to note the use of a weak proxy here to avoid a retain cycle. `-displayLinkWithTarget:selector:`
// will retain its target until it is invalidated. We use a weak proxy so that the image view will get deallocated
// independent of the display link's lifetime. Upon image view deallocation, we invalidate the display
// link which will lead to the deallocation of both the display link and the weak proxy.
SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
_displayLink = [CADisplayLink displayLinkWithTarget:weakProxy selector:@selector(displayDidRefresh:)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:self.runLoopMode];
}
return _displayLink;
}
#endif
#pragma mark - Life Cycle
- (void)dealloc
{
// Removes the display link from all run loop modes.
#if SD_MAC
if (_displayLink) {
CVDisplayLinkRelease(_displayLink);
_displayLink = NULL;
}
#else
[_displayLink invalidate];
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification {
[_fetchQueue cancelAllOperations];
[_fetchQueue addOperationWithBlock:^{
NSNumber *currentFrameIndex = @(self.currentFrameIndex);
LOCK({
NSArray *keys = self.frameBuffer.allKeys;
// only keep the next frame for later rendering
for (NSNumber * key in keys) {
if (![key isEqualToNumber:currentFrameIndex]) {
[self.frameBuffer removeObjectForKey:key];
}
}
});
}];
}
#pragma mark - UIView Method Overrides
#pragma mark Observing View-Related Changes
#if SD_MAC
- (void)viewDidMoveToSuperview
#else
- (void)didMoveToSuperview
#endif
{
#if SD_MAC
[super viewDidMoveToSuperview];
#else
[super didMoveToSuperview];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#if SD_MAC
- (void)viewDidMoveToWindow
#else
- (void)didMoveToWindow
#endif
{
#if SD_MAC
[super viewDidMoveToWindow];
#else
[super didMoveToWindow];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#if SD_MAC
- (void)setAlphaValue:(CGFloat)alphaValue
#else
- (void)setAlpha:(CGFloat)alpha
#endif
{
#if SD_MAC
[super setAlphaValue:alphaValue];
#else
[super setAlpha:alpha];
#endif
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
- (void)setHidden:(BOOL)hidden
{
[super setHidden:hidden];
[self updateShouldAnimate];
if (self.shouldAnimate) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#pragma mark Auto Layout
- (CGSize)intrinsicContentSize
{
// Default to let UIImageView handle the sizing of its image, and anything else it might consider.
CGSize intrinsicContentSize = [super intrinsicContentSize];
// If we have have an animated image, use its image size.
// UIImageView's intrinsic content size seems to be the size of its image. The obvious approach, simply calling `-invalidateIntrinsicContentSize` when setting an animated image, results in UIImageView steadfastly returning `{UIViewNoIntrinsicMetric, UIViewNoIntrinsicMetric}` for its intrinsicContentSize.
// (Perhaps UIImageView bypasses its `-image` getter in its implementation of `-intrinsicContentSize`, as `-image` is not called after calling `-invalidateIntrinsicContentSize`.)
if (self.animatedImage) {
intrinsicContentSize = self.image.size;
}
return intrinsicContentSize;
}
#if SD_MAC
#pragma mark - NSImageView Method Overrides
- (void)setImageScaling:(NSImageScaling)imageScaling {
[super setImageScaling:imageScaling];
[self updateLayerContentsPlacement];
}
- (void)setImageAlignment:(NSImageAlignment)imageAlignment {
[super setImageAlignment:imageAlignment];
[self updateLayerContentsPlacement];
}
- (void)updateLayerContentsPlacement {
NSImageScaling scale = self.imageScaling;
NSViewLayerContentsPlacement contentsPlacement = NSViewLayerContentsPlacementScaleAxesIndependently;
switch (scale) {
case NSImageScaleProportionallyDown:
contentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; break;
case NSImageScaleAxesIndependently:
contentsPlacement = NSViewLayerContentsPlacementScaleAxesIndependently; break;
case NSImageScaleProportionallyUpOrDown:
contentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFill; break;
case NSImageScaleNone: {
NSImageAlignment alignment = self.imageAlignment;
switch (alignment) {
case NSImageAlignCenter:
contentsPlacement = NSViewLayerContentsPlacementCenter; break;
case NSImageAlignTop:
contentsPlacement = NSViewLayerContentsPlacementTop; break;
case NSImageAlignTopLeft:
contentsPlacement = NSViewLayerContentsPlacementTopLeft; break;
case NSImageAlignTopRight:
contentsPlacement = NSViewLayerContentsPlacementTopRight; break;
case NSImageAlignLeft:
contentsPlacement = NSViewLayerContentsPlacementLeft; break;
case NSImageAlignBottom:
contentsPlacement = NSViewLayerContentsPlacementBottom; break;
case NSImageAlignBottomLeft:
contentsPlacement = NSViewLayerContentsPlacementBottomLeft; break;
case NSImageAlignBottomRight:
contentsPlacement = NSViewLayerContentsPlacementBottomRight; break;
case NSImageAlignRight:
contentsPlacement = NSViewLayerContentsPlacementRight; break;
}
break;
}
}
self.layerContentsPlacement = contentsPlacement;
}
#endif
#pragma mark - UIImageView Method Overrides
#pragma mark Image Data
- (void)startAnimating
{
if (self.animatedImage) {
#if SD_MAC
CVDisplayLinkStart(self.displayLink);
#else
self.displayLink.paused = NO;
#endif
} else {
#if SD_UIKIT
[super startAnimating];
#endif
}
}
- (void)stopAnimating
{
if (self.animatedImage) {
#if SD_MAC
CVDisplayLinkStop(self.displayLink);
#else
self.displayLink.paused = YES;
#endif
} else {
#if SD_UIKIT
[super stopAnimating];
#endif
}
}
- (BOOL)isAnimating
{
BOOL isAnimating = NO;
if (self.animatedImage) {
#if SD_MAC
isAnimating = CVDisplayLinkIsRunning(self.displayLink);
#else
isAnimating = !self.displayLink.isPaused;
#endif
} else {
#if SD_UIKIT
isAnimating = [super isAnimating];
#endif
}
return isAnimating;
}
#if SD_MAC
- (void)setAnimates:(BOOL)animates
{
[super setAnimates:animates];
if (animates) {
[self startAnimating];
} else {
[self stopAnimating];
}
}
#endif
#pragma mark Highlighted Image Unsupport
- (void)setHighlighted:(BOOL)highlighted
{
// Highlighted image is unsupported for animated images, but implementing it breaks the image view when embedded in a UICollectionViewCell.
if (!self.animatedImage) {
[super setHighlighted:highlighted];
}
}
#pragma mark - Private Methods
#pragma mark Animation
// Don't repeatedly check our window & superview in `-displayDidRefresh:` for performance reasons.
// Just update our cached value whenever the animated image or visibility (window, superview, hidden, alpha) is changed.
- (void)updateShouldAnimate
{
#if SD_MAC
BOOL isVisible = self.window && self.superview && ![self isHidden] && self.alphaValue > 0.0 && self.animates;
#else
BOOL isVisible = self.window && self.superview && ![self isHidden] && self.alpha > 0.0;
#endif
self.shouldAnimate = self.animatedImage && self.totalFrameCount > 1 && isVisible;
}
#if SD_MAC
- (void)displayDidRefresh:(CVDisplayLinkRef)displayLink duration:(NSTimeInterval)duration
#else
- (void)displayDidRefresh:(CADisplayLink *)displayLink
#endif
{
// If for some reason a wild call makes it through when we shouldn't be animating, bail.
// Early return!
if (!self.shouldAnimate) {
return;
}
#if SD_UIKIT
NSTimeInterval duration = displayLink.duration * displayLink.frameInterval;
#endif
NSUInteger currentFrameIndex = self.currentFrameIndex;
NSUInteger nextFrameIndex = (currentFrameIndex + 1) % self.totalFrameCount;
// Check if we have the frame buffer firstly to improve performance
if (!self.bufferMiss) {
// Then check if timestamp is reached
self.currentTime += duration;
NSTimeInterval currentDuration = [self.animatedImage animatedImageDurationAtIndex:currentFrameIndex];
if (self.currentTime < currentDuration) {
// Current frame timestamp not reached, return
return;
}
self.currentTime -= currentDuration;
NSTimeInterval nextDuration = [self.animatedImage animatedImageDurationAtIndex:nextFrameIndex];
if (self.currentTime > nextDuration) {
// Do not skip frame
self.currentTime = nextDuration;
}
}
// Update the current frame
UIImage *currentFrame;
LOCK({
currentFrame = self.frameBuffer[@(currentFrameIndex)];
});
if (currentFrame) {
LOCK({
// Remove the frame buffer if need
if (self.frameBuffer.count > self.maxBufferCount) {
self.frameBuffer[@(currentFrameIndex)] = nil;
}
});
self.currentFrame = currentFrame;
self.currentFrameIndex = nextFrameIndex;
self.bufferMiss = NO;
[self.layer setNeedsDisplay];
} else {
self.bufferMiss = YES;
}
// Update the loop count
if (nextFrameIndex == 0) {
self.currentLoopCount++;
// if reached the max loop count, stop animating, 0 means loop indefinitely
NSUInteger maxLoopCount = self.shouldCustomLoopCount ? self.animationRepeatCount : self.totalLoopCount;
if (maxLoopCount != 0 && (self.currentLoopCount >= maxLoopCount)) {
[self stopAnimating];
return;
}
}
// Check if we should prefetch next frame
if (self.fetchQueue.operationCount == 0 && self.frameBuffer.count < self.totalFrameCount) {
// Prefetch next frame in background queue
NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{
UIImage *nextFrame = [self.animatedImage animatedImageFrameAtIndex:nextFrameIndex];
LOCK({
self.frameBuffer[@(nextFrameIndex)] = nextFrame;
});
}];
[self.fetchQueue addOperation:operation];
}
}
+ (NSString *)defaultRunLoopMode
{
// Key off `activeProcessorCount` (as opposed to `processorCount`) since the system could shut down cores in certain situations.
return [NSProcessInfo processInfo].activeProcessorCount > 1 ? NSRunLoopCommonModes : NSDefaultRunLoopMode;
}
#pragma mark - CALayerDelegate (Informal)
#pragma mark Providing the Layer's Content
- (void)displayLayer:(CALayer *)layer
{
if (_currentFrame) {
layer.contents = (__bridge id)_currentFrame.CGImage;
}
}
#if SD_MAC
- (BOOL)wantsUpdateLayer
{
return YES;
}
- (void)updateLayer
{
if (_currentFrame) {
self.layer.contents = (__bridge id)_currentFrame.CGImage;
}
}
#endif
#pragma mark - Util
- (void)calculateMaxBufferCount {
NSUInteger bytes = CGImageGetBytesPerRow(self.currentFrame.CGImage) * CGImageGetHeight(self.currentFrame.CGImage);
if (bytes == 0) bytes = 1024;
NSUInteger max = 0;
if (self.maxBufferSize > 0) {
max = self.maxBufferSize;
} else {
// calculate based on current memory, these factors are by experience
NSUInteger total = SDDeviceTotalMemory();
NSUInteger free = SDDeviceFreeMemory();
max = MIN(total * 0.2, free * 0.6);
}
NSUInteger maxBufferCount = (double)max / (double)bytes;
self.maxBufferCount = maxBufferCount;
}
@end
#if SD_MAC
static CVReturn renderCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext) {
// Calculate refresh duration
NSTimeInterval duration = (double)inOutputTime->videoRefreshPeriod / ((double)inOutputTime->videoTimeScale * inOutputTime->rateScalar);
// CVDisplayLink callback is not on main queue
dispatch_async(dispatch_get_main_queue(), ^{
[(__bridge SDAnimatedImageView *)displayLinkContext displayDidRefresh:displayLink duration:duration];
});
return kCVReturnSuccess;
}
#endif
#endif

View File

@ -11,6 +11,7 @@
#import "NSImage+Additions.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageTransformer.h"
#import "SDWebImageCoderHelper.h"
#define LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
#define UNLOCK(lock) dispatch_semaphore_signal(lock);
@ -294,12 +295,12 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
if (!data && image) {
// If we do not have any data to detect image format, check whether it contains alpha channel to use PNG or JPEG format
SDImageFormat format;
if (SDCGImageRefContainsAlpha(image.CGImage)) {
if ([SDWebImageCoderHelper imageRefContainsAlpha:image.CGImage]) {
format = SDImageFormatPNG;
} else {
format = SDImageFormatJPEG;
}
data = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:image format:format];
data = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:image format:format options:nil];
}
[self _storeImageDataToDisk:data forKey:key error:&writeError];
}
@ -476,10 +477,10 @@ FOUNDATION_STATIC_INLINE NSUInteger SDCacheCostForImage(UIImage *image) {
- (nullable UIImage *)diskImageForKey:(nullable NSString *)key data:(nullable NSData *)data {
if (data) {
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data];
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:nil];
image = [self scaledImageForKey:key image:image];
if (self.config.shouldDecompressImages) {
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&data options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
return image;
} else {

View File

@ -9,27 +9,19 @@
#import <Foundation/Foundation.h>
#import "SDWebImageCompat.h"
#import "NSData+ImageContentType.h"
#import "SDAnimatedImage.h"
typedef NSString * SDWebImageCoderOption NS_STRING_ENUM;
typedef NSDictionary<SDWebImageCoderOption, id> SDWebImageCoderOptions;
/**
A Boolean value indicating whether to scale down large images during decompressing. (NSNumber)
A Boolean value indicating whether to decode the first frame only for animated image during decoding. (NSNumber)
*/
FOUNDATION_EXPORT NSString * _Nonnull const SDWebImageCoderScaleDownLargeImagesKey;
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderDecodeFirstFrameOnly;
/**
Return the shared device-dependent RGB color space created with CGColorSpaceCreateDeviceRGB.
@return The device-dependent RGB color space
A double value between 0.0-1.0 indicating the encode quality to produce the image data. If not provide, use 1.0. (NSNumber)
*/
CG_EXTERN CGColorSpaceRef _Nonnull SDCGColorSpaceGetDeviceRGB(void);
/**
Check whether CGImageRef contains alpha channel.
@param imageRef The CGImageRef
@return Return YES if CGImageRef contains alpha channel, otherwise return NO
*/
CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
FOUNDATION_EXPORT SDWebImageCoderOption _Nonnull const SDWebImageCoderEncodeQuality;
/**
This is the image coder protocol to provide custom image decoding/encoding.
@ -52,21 +44,11 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
Decode the image data to image.
@param data The image data to be decoded
@param optionsDict A dictionary containing any decoding options. Pass {SDWebImageCoderDecodeFirstFrameOnlyKey: @(YES)} to decode the first frame only.
@return The decoded image from data
*/
- (nullable UIImage *)decodedImageWithData:(nullable NSData *)data;
/**
Decompress the image with original image and image data.
@param image The original image to be decompressed
@param data The pointer to original image data. The pointer itself is nonnull but image data can be null. This data will set to cache if needed. If you do not need to modify data at the sametime, ignore this param.
@param optionsDict A dictionary containing any decompressing options. Pass {SDWebImageCoderScaleDownLargeImagesKey: @(YES)} to scale down large images
@return The decompressed image
*/
- (nullable UIImage *)decompressedImageWithImage:(nullable UIImage *)image
data:(NSData * _Nullable * _Nonnull)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict;
- (nullable UIImage *)decodedImageWithData:(nullable NSData *)data
options:(nullable SDWebImageCoderOptions *)options;
#pragma mark - Encoding
@ -85,7 +67,9 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
@param format The image format to encode, you should note `SDImageFormatUndefined` format is also possible
@return The encoded image data
*/
- (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDImageFormat)format;
- (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image
format:(SDImageFormat)format
options:(nullable SDWebImageCoderOptions *)options;
@end
@ -106,14 +90,43 @@ CG_EXTERN BOOL SDCGImageRefContainsAlpha(_Nullable CGImageRef imageRef);
*/
- (BOOL)canIncrementallyDecodeFromData:(nullable NSData *)data;
/**
Because incremental decoding need to keep the decoded context, we will alloc a new instance with the same class for each download operation to avoid conflicts
This init method should not return nil
@return A new instance to do incremental decoding for the specify image format
*/
- (nonnull instancetype)initIncrementally;
/**
Incremental decode the image data to image.
@param data The image data has been downloaded so far
@param finished Whether the download has finished
@warning because incremental decoding need to keep the decoded context, we will alloc a new instance with the same class for each download operation to avoid conflicts
@return The decoded image from data
*/
- (nullable UIImage *)incrementallyDecodedImageWithData:(nullable NSData *)data finished:(BOOL)finished;
@end
@protocol SDWebImageAnimatedCoder <SDWebImageCoder, SDAnimatedImage>
@required
/**
Because animated image coder should keep the original data, we will alloc a new instance with the same class for the specify animated image data
The init method should return nil if it can't decode the specify animated image data
@param data The animated image data to be decode
@return A new instance to do animated decoding for specify image data
*/
- (nullable instancetype)initWithAnimatedImageData:(nullable NSData *)data;
/**
Return the current animated image data. This is used for image instance archive or image information retrieval
You can return back the desired data(may be not the same instance provide for init method, but have the equal data)
@return The animated image data
*/
- (nullable NSData *)animatedImageData;
@end

View File

@ -8,24 +8,5 @@
#import "SDWebImageCoder.h"
NSString * const SDWebImageCoderScaleDownLargeImagesKey = @"scaleDownLargeImages";
CGColorSpaceRef SDCGColorSpaceGetDeviceRGB(void) {
static CGColorSpaceRef colorSpace;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
colorSpace = CGColorSpaceCreateDeviceRGB();
});
return colorSpace;
}
BOOL SDCGImageRefContainsAlpha(CGImageRef imageRef) {
if (!imageRef) {
return NO;
}
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
alphaInfo == kCGImageAlphaNoneSkipFirst ||
alphaInfo == kCGImageAlphaNoneSkipLast);
return hasAlpha;
}
SDWebImageCoderOption const SDWebImageCoderDecodeFirstFrameOnly = @"decodeFirstFrameOnly";
SDWebImageCoderOption const SDWebImageCoderEncodeQuality = @"encodeQuality";

View File

@ -30,7 +30,58 @@
@param animatedImage A animated image. If it's not animated, return nil
@return The frames array
*/
+ (NSArray<SDWebImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage;
+ (NSArray<SDWebImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage NS_SWIFT_NAME(frames(from:));
/**
Return the shared device-dependent RGB color space.
On iOS, it's created with deviceRGB (if available, use sRGB).
On macOS, it's from the screen colorspace (if failed, use deviceRGB)
Because it's shared, you should not retain or release this object.
@return The device-dependent RGB color space
*/
+ (CGColorSpaceRef _Nonnull)colorSpaceGetDeviceRGB CF_RETURNS_NOT_RETAINED;
/**
Retuen the color space of the CGImage
@param imageRef The CGImage
@return The color space of CGImage, or if not supported, return the device-dependent RGB color space
*/
+ (CGColorSpaceRef _Nonnull)imageRefGetColorSpace:(_Nonnull CGImageRef)imageRef CF_RETURNS_NOT_RETAINED;
/**
Check whether CGImage contains alpha channel.
@param imageRef The CGImage
@return Return YES if CGImage contains alpha channel, otherwise return NO
*/
+ (BOOL)imageRefContainsAlpha:(_Nonnull CGImageRef)imageRef;
/**
Create a decoded image by the provided image. This follows The Create Rule and you are response to call release after usage.
It will detect whether image contains alpha channel, then create a new bitmap context with the same size of image, and draw it. This can ensure that the image do not need extra decoding after been set to the imageView.
@param imageRef The CGImage
@return A new created decoded image
*/
+ (CGImageRef _Nullable)imageRefCreateDecoded:(_Nonnull CGImageRef)imageRef CF_RETURNS_RETAINED;
/**
Return the decoded image by the provided image. This one unlike `imageRefCreateDecoded:`, will not decode the image which contains alpha channel or animated image
@param image The image to be decoded
@return The decoded image
*/
+ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image;
/**
Return the decoded and probably scaled down image by the provided image. If the image is large than the limit size, will try to scale down. Or just works as `decodedImageWithImage:`
@param image The image to be decoded and scaled down
@param bytes The limit bytes size. Provide 0 to use the build-in limit.
@return The decoded and probably scaled down image
*/
+ (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes;
#if SD_UIKIT || SD_WATCH
/**
@ -39,7 +90,8 @@
@param exifOrientation EXIF orientation
@return iOS orientation
*/
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation;
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation NS_SWIFT_NAME(imageOrientation(from:));
/**
Convert an iOS orientation to an EXIF image orientation.

View File

@ -13,6 +13,34 @@
#import <ImageIO/ImageIO.h>
#import "SDAnimatedImageRep.h"
#if SD_UIKIT || SD_WATCH
static const size_t kBytesPerPixel = 4;
static const size_t kBitsPerComponent = 8;
/*
* Defines the maximum size in MB of the decoded image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 60.
* Suggested value for iPad2 and iPhone 4: 120.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 30.
*/
static const CGFloat kDestImageSizeMB = 120.f;
/*
* Defines the maximum size in MB of a tile used to decode image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 20.
* Suggested value for iPad2 and iPhone 4: 40.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 10.
*/
static const CGFloat kSourceImageTileSizeMB = 40.f;
static const CGFloat kBytesPerMB = 1024.0f * 1024.0f;
static const CGFloat kPixelsPerMB = kBytesPerMB / kBytesPerPixel;
static const CGFloat kDestTotalPixels = kDestImageSizeMB * kPixelsPerMB;
static const CGFloat kTileTotalPixels = kSourceImageTileSizeMB * kPixelsPerMB;
static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to overlap the seems where tiles meet.
#endif
@implementation SDWebImageCoderHelper
+ (UIImage *)animatedImageWithFrames:(NSArray<SDWebImageFrame *> *)frames {
@ -149,7 +177,7 @@
// NSBitmapImageRep need to manually change frame. "Good taste" API
[bitmapRep setProperty:NSImageCurrentFrame withValue:@(i)];
float frameDuration = [[bitmapRep valueForProperty:NSImageCurrentFrameDuration] floatValue];
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapRep.CGImage size:CGSizeZero];
NSImage *frameImage = [[NSImage alloc] initWithCGImage:bitmapRep.CGImage size:NSZeroSize];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:frameImage duration:frameDuration];
[frames addObject:frame];
}
@ -159,6 +187,229 @@
return frames;
}
+ (CGColorSpaceRef)colorSpaceGetDeviceRGB {
#if SD_MAC
CGColorSpaceRef screenColorSpace = NSScreen.mainScreen.colorSpace.CGColorSpace;
if (screenColorSpace) {
return screenColorSpace;
}
#endif
static CGColorSpaceRef colorSpace;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
#if SD_MAC
BOOL shouldUseSRGB = NO;
#else
BOOL shouldUseSRGB = NSFoundationVersionNumber > NSFoundationVersionNumber_iOS_8_x_Max;
#endif
if (shouldUseSRGB) {
// This is what iOS device used colorspace, combined with right bitmapInfo, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceSRGB);
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
});
return colorSpace;
}
+ (CGColorSpaceRef)imageRefGetColorSpace:(CGImageRef)imageRef {
// current
CGColorSpaceModel imageColorSpaceModel = CGColorSpaceGetModel(CGImageGetColorSpace(imageRef));
CGColorSpaceRef colorspaceRef = CGImageGetColorSpace(imageRef);
BOOL unsupportedColorSpace = (imageColorSpaceModel == kCGColorSpaceModelUnknown ||
imageColorSpaceModel == kCGColorSpaceModelMonochrome ||
imageColorSpaceModel == kCGColorSpaceModelCMYK ||
imageColorSpaceModel == kCGColorSpaceModelIndexed);
if (unsupportedColorSpace) {
colorspaceRef = [self colorSpaceGetDeviceRGB];
}
return colorspaceRef;
}
+ (BOOL)imageRefContainsAlpha:(CGImageRef)imageRef {
if (!imageRef) {
return NO;
}
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
alphaInfo == kCGImageAlphaNoneSkipFirst ||
alphaInfo == kCGImageAlphaNoneSkipLast);
return hasAlpha;
}
+ (CGImageRef)imageRefCreateDecoded:(CGImageRef)imageRef {
if (!imageRef) {
return NULL;
}
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
if (width == 0 || height == 0) return NULL;
CGRect rect = CGRectMake(0, 0, width, height);
BOOL hasAlpha = [self imageRefContainsAlpha:imageRef];
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
// Through you can use any supported bitmapInfo (see: https://developer.apple.com/library/content/documentation/GraphicsImaging/Conceptual/drawingwithquartz2d/dq_context/dq_context.html#//apple_ref/doc/uid/TP30001066-CH203-BCIBHHBB ) and let Core Graphics reorder it when you call `CGContextDrawImage`
// But since our build-in coders use this bitmapInfo, this can have a little performance benefit
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, [self colorSpaceGetDeviceRGB], bitmapInfo);
if (!context) {
return NULL;
}
CGContextDrawImage(context, rect, imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
return newImageRef;
}
+ (UIImage *)decodedImageWithImage:(UIImage *)image {
#if SD_MAC
return image;
#else
if (![self shouldDecodeImage:image]) {
return image;
}
CGImageRef imageRef = [self imageRefCreateDecoded:image.CGImage];
if (!imageRef) {
return image;
}
UIImage *decodedImage = [[UIImage alloc] initWithCGImage:imageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(imageRef);
return decodedImage;
#endif
}
+ (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes {
#if SD_MAC
return image;
#else
if (![self shouldDecodeImage:image]) {
return image;
}
if (![self shouldScaleDownImage:image limitBytes:bytes]) {
return [self decodedImageWithImage:image];
}
CGFloat destTotalPixels;
CGFloat tileTotalPixels;
if (bytes > 0) {
destTotalPixels = bytes / kBytesPerPixel;
tileTotalPixels = destTotalPixels / 3;
} else {
destTotalPixels = kDestTotalPixels;
tileTotalPixels = kTileTotalPixels;
}
CGContextRef destContext;
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool {
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
// Determine the scale ratio to apply to the input image
// that results in an output image of the defined size.
// see kDestImageSizeMB, and how it relates to destTotalPixels.
float imageScale = destTotalPixels / sourceTotalPixels;
CGSize destResolution = CGSizeZero;
destResolution.width = (int)(sourceResolution.width*imageScale);
destResolution.height = (int)(sourceResolution.height*imageScale);
// current color space
CGColorSpaceRef colorspaceRef = [self imageRefGetColorSpace:sourceImageRef];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipFirst
// to create bitmap graphics contexts without alpha info.
destContext = CGBitmapContextCreate(NULL,
destResolution.width,
destResolution.height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrder32Host|kCGImageAlphaNoneSkipFirst);
if (destContext == NULL) {
return image;
}
CGContextSetInterpolationQuality(destContext, kCGInterpolationHigh);
// Now define the size of the rectangle to be used for the
// incremental blits from the input image to the output image.
// we use a source tile width equal to the width of the source
// image due to the way that iOS retrieves image data from disk.
// iOS must decode an image from disk in full width 'bands', even
// if current graphics context is clipped to a subrect within that
// band. Therefore we fully utilize all of the pixel data that results
// from a decoding opertion by achnoring our tile size to the full
// width of the input image.
CGRect sourceTile = CGRectZero;
sourceTile.size.width = sourceResolution.width;
// The source tile height is dynamic. Since we specified the size
// of the source tile in MB, see how many rows of pixels high it
// can be given the input image width.
sourceTile.size.height = (int)(tileTotalPixels / sourceTile.size.width );
sourceTile.origin.x = 0.0f;
// The output tile is the same proportions as the input tile, but
// scaled to image scale.
CGRect destTile;
destTile.size.width = destResolution.width;
destTile.size.height = sourceTile.size.height * imageScale;
destTile.origin.x = 0.0f;
// The source seem overlap is proportionate to the destination seem overlap.
// this is the amount of pixels to overlap each tile as we assemble the ouput image.
float sourceSeemOverlap = (int)((kDestSeemOverlap/destResolution.height)*sourceResolution.height);
CGImageRef sourceTileImageRef;
// calculate the number of read/write operations required to assemble the
// output image.
int iterations = (int)( sourceResolution.height / sourceTile.size.height );
// If tile height doesn't divide the image height evenly, add another iteration
// to account for the remaining pixels.
int remainder = (int)sourceResolution.height % (int)sourceTile.size.height;
if(remainder) {
iterations++;
}
// Add seem overlaps to the tiles, but save the original tile height for y coordinate calculations.
float sourceTileHeightMinusOverlap = sourceTile.size.height;
sourceTile.size.height += sourceSeemOverlap;
destTile.size.height += kDestSeemOverlap;
for( int y = 0; y < iterations; ++y ) {
@autoreleasepool {
sourceTile.origin.y = y * sourceTileHeightMinusOverlap + sourceSeemOverlap;
destTile.origin.y = destResolution.height - (( y + 1 ) * sourceTileHeightMinusOverlap * imageScale + kDestSeemOverlap);
sourceTileImageRef = CGImageCreateWithImageInRect( sourceImageRef, sourceTile );
if( y == iterations - 1 && remainder ) {
float dify = destTile.size.height;
destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale;
dify -= destTile.size.height;
destTile.origin.y += dify;
}
CGContextDrawImage( destContext, destTile, sourceTileImageRef );
CGImageRelease( sourceTileImageRef );
}
}
CGImageRef destImageRef = CGBitmapContextCreateImage(destContext);
CGContextRelease(destContext);
if (destImageRef == NULL) {
return image;
}
UIImage *destImage = [[UIImage alloc] initWithCGImage:destImageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(destImageRef);
if (destImage == nil) {
return image;
}
return destImage;
}
#endif
}
#if SD_UIKIT || SD_WATCH
// Convert an EXIF image orientation to an iOS one.
+ (UIImageOrientation)imageOrientationFromEXIFOrientation:(NSInteger)exifOrientation {
@ -233,6 +484,56 @@
#pragma mark - Helper Fuction
#if SD_UIKIT || SD_WATCH
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// do not decode animated images
if (image.images != nil) {
return NO;
}
CGImageRef imageRef = image.CGImage;
BOOL hasAlpha = [self imageRefContainsAlpha:imageRef];
// do not decode images with alpha
if (hasAlpha) {
return NO;
}
return YES;
}
+ (BOOL)shouldScaleDownImage:(nonnull UIImage *)image limitBytes:(NSUInteger)bytes {
BOOL shouldScaleDown = YES;
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
if (sourceTotalPixels <= 0) {
return NO;
}
CGFloat destTotalPixels;
if (bytes > 0) {
destTotalPixels = bytes / kBytesPerPixel;
} else {
destTotalPixels = kDestTotalPixels;
}
if (destTotalPixels <= kPixelsPerMB) {
// Too small to scale down
return NO;
}
float imageScale = destTotalPixels / sourceTotalPixels;
if (imageScale < 1) {
shouldScaleDown = YES;
} else {
shouldScaleDown = NO;
}
return shouldScaleDown;
}
static NSUInteger gcd(NSUInteger a, NSUInteger b) {
NSUInteger c;
while (a != 0) {

View File

@ -12,6 +12,8 @@
#ifdef SD_WEBP
#import "SDWebImageWebPCoder.h"
#endif
#import "NSImage+Additions.h"
#import "UIImage+WebCache.h"
@interface SDWebImageCodersManager ()
@ -34,7 +36,7 @@
- (instancetype)init {
if (self = [super init]) {
// initialize with default coders
_mutableCoders = [@[[SDWebImageImageIOCoder sharedCoder]] mutableCopy];
_mutableCoders = [@[[SDWebImageImageIOCoder sharedCoder], [SDWebImageGIFCoder sharedCoder]] mutableCopy];
#ifdef SD_WEBP
[_mutableCoders addObject:[SDWebImageWebPCoder sharedCoder]];
#endif
@ -92,39 +94,32 @@
return NO;
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
BOOL decodeFirstFrame = [[options valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
UIImage *image;
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canDecodeFromData:data]) {
return [coder decodedImageWithData:data];
image = [coder decodedImageWithData:data options:options];
break;
}
}
return nil;
if (decodeFirstFrame && image.images.count > 0) {
image = image.images.firstObject;
}
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
if (!image) {
return nil;
}
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canDecodeFromData:*data]) {
return [coder decompressedImageWithImage:image data:data options:optionsDict];
}
}
return nil;
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
for (id<SDWebImageCoder> coder in self.coders) {
if ([coder canEncodeToFormat:format]) {
return [coder encodedDataWithImage:image format:format];
return [coder encodedDataWithImage:image format:format options:nil];
}
}
return nil;

View File

@ -10,6 +10,7 @@
#import "SDWebImageManager.h"
#import "NSImage+Additions.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageCoderHelper.h"
#define LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
#define UNLOCK(lock) dispatch_semaphore_signal(lock);
@ -361,7 +362,7 @@ didReceiveResponse:(NSURLResponse *)response
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
if (self.shouldDecompressImages) {
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&imageData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
// We do not keep the progressive decoding image even when `finished`=YES. Because they are for view rendering but not take full function from downloader options. And some coders implementation may not keep consistent between progressive decoding and normal decoding.
@ -427,7 +428,7 @@ didReceiveResponse:(NSURLResponse *)response
} else {
// decode the image in coder queue
dispatch_async(self.coderQueue, ^{
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData];
UIImage *image = [[SDWebImageCodersManager sharedManager] decodedImageWithData:imageData options:nil];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
@ -435,19 +436,15 @@ didReceiveResponse:(NSURLResponse *)response
// Do not force decoding animated GIFs and WebPs
if (image.images) {
shouldDecode = NO;
} else {
#ifdef SD_WEBP
SDImageFormat imageFormat = [NSData sd_imageFormatForImageData:imageData];
if (imageFormat == SDImageFormatWebP) {
shouldDecode = NO;
}
#endif
}
if (shouldDecode) {
if (self.shouldDecompressImages) {
BOOL shouldScaleDown = self.options & SDWebImageDownloaderScaleDownLargeImages;
image = [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&imageData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(shouldScaleDown)}];
if (shouldScaleDown) {
image = [SDWebImageCoderHelper decodedAndScaledDownImageWithImage:image limitBytes:0];
} else {
image = [SDWebImageCoderHelper decodedImageWithImage:image];
}
}
}
CGSize imageSize = image.size;

View File

@ -12,11 +12,10 @@
/**
Built in coder using ImageIO that supports GIF encoding/decoding
@note `SDWebImageIOCoder` supports GIF but only as static (will use the 1st frame).
@note Use `SDWebImageGIFCoder` for fully animated GIFs - less performant than `FLAnimatedImage`
@note If you decide to make all `UIImageView`(including `FLAnimatedImageView`) instance support GIF. You should add this coder to `SDWebImageCodersManager` and make sure that it has a higher priority than `SDWebImageIOCoder`
@note The recommended approach for animated GIFs is using `FLAnimatedImage`. It's more performant than `UIImageView` for GIF displaying
@note Use `SDWebImageGIFCoder` for fully animated GIFs. For `UIImageView`, it will produce animated `UIImage`(`NSImage` on macOS) for rendering. For `SDAnimatedImageView`, it will use `SDAnimatedImage` for rendering.
@note The recommended approach for animated GIFs is using `SDAnimatedImage` with `SDAnimatedImageView`. It's more performant than `UIImageView` for GIF displaying(especially on memory usage)
*/
@interface SDWebImageGIFCoder : NSObject <SDWebImageCoder>
@interface SDWebImageGIFCoder : NSObject <SDWebImageCoder, SDWebImageAnimatedCoder>
@property (nonatomic, class, readonly, nonnull) SDWebImageGIFCoder *sharedCoder;

View File

@ -14,7 +14,47 @@
#import "SDWebImageCoderHelper.h"
#import "SDAnimatedImageRep.h"
@implementation SDWebImageGIFCoder
@interface SDGIFCoderFrame : NSObject
@property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
@property (nonatomic, assign) NSTimeInterval duration; // Frame duration in seconds
@end
@implementation SDGIFCoderFrame
@end
@implementation SDWebImageGIFCoder {
size_t _width, _height;
#if SD_UIKIT || SD_WATCH
UIImageOrientation _orientation;
#endif
CGImageSourceRef _imageSource;
NSData *_imageData;
NSUInteger _loopCount;
NSUInteger _frameCount;
NSArray<SDGIFCoderFrame *> *_frames;
}
- (void)dealloc
{
if (_imageSource) {
CFRelease(_imageSource);
_imageSource = NULL;
}
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification
{
if (_imageSource) {
for (size_t i = 0; i < _frameCount; i++) {
CGImageSourceRemoveCacheAtIndex(_imageSource, i);
}
}
}
+ (instancetype)sharedCoder {
static SDWebImageGIFCoder *coder;
@ -30,7 +70,7 @@
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatGIF);
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable NSDictionary<SDWebImageCoderOption,id> *)optionsDict {
if (!data) {
return nil;
}
@ -50,7 +90,8 @@
UIImage *animatedImage;
if (count <= 1) {
BOOL decodeFirstFrame = [optionsDict[SDWebImageCoderDecodeFirstFrameOnly] boolValue];
if (decodeFirstFrame || count <= 1) {
animatedImage = [[UIImage alloc] initWithData:data];
} else {
NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
@ -69,15 +110,7 @@
[frames addObject:frame];
}
NSUInteger loopCount = 1;
NSDictionary *imageProperties = (__bridge_transfer NSDictionary *)CGImageSourceCopyProperties(source, nil);
NSDictionary *gifProperties = [imageProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary];
if (gifProperties) {
NSNumber *gifLoopCount = [gifProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount];
if (gifLoopCount != nil) {
loopCount = gifLoopCount.unsignedIntegerValue;
}
}
NSUInteger loopCount = [self sd_imageLoopCountWithSource:source];
animatedImage = [SDWebImageCoderHelper animatedImageWithFrames:frames];
animatedImage.sd_imageLoopCount = loopCount;
@ -89,6 +122,19 @@
#endif
}
- (NSUInteger)sd_imageLoopCountWithSource:(CGImageSourceRef)source {
NSUInteger loopCount = 1;
NSDictionary *imageProperties = (__bridge_transfer NSDictionary *)CGImageSourceCopyProperties(source, nil);
NSDictionary *gifProperties = [imageProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFDictionary];
if (gifProperties) {
NSNumber *gifLoopCount = [gifProperties valueForKey:(__bridge_transfer NSString *)kCGImagePropertyGIFLoopCount];
if (gifLoopCount != nil) {
loopCount = gifLoopCount.unsignedIntegerValue;
}
}
return loopCount;
}
- (float)sd_frameDurationAtIndex:(NSUInteger)index source:(CGImageSourceRef)source {
float frameDuration = 0.1f;
CFDictionaryRef cfFrameProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil);
@ -121,19 +167,12 @@
return frameDuration;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
// GIF do not decompress
return image;
}
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
return (format == SDImageFormatGIF);
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
@ -180,4 +219,98 @@
return [imageData copy];
}
#pragma mark - SDWebImageAnimatedCoder
- (nullable instancetype)initWithAnimatedImageData:(nullable NSData *)data {
if (!data) {
return nil;
}
self = [super init];
if (self) {
// use Image/IO cache because it's already keep a balance between CPU & memory
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)data, (__bridge CFDictionaryRef)@{(__bridge_transfer NSString *)kCGImageSourceShouldCache : @(YES)});
if (!imageSource) {
return nil;
}
BOOL framesValid = [self scanAndCheckFramesValidWithImageSource:imageSource];
if (!framesValid) {
CFRelease(imageSource);
return nil;
}
_imageSource = imageSource;
_imageData = data;
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
return self;
}
- (BOOL)scanAndCheckFramesValidWithImageSource:(CGImageSourceRef)imageSource
{
if (!imageSource) {
return NO;
}
NSUInteger frameCount = CGImageSourceGetCount(imageSource);
NSUInteger loopCount = [self sd_imageLoopCountWithSource:imageSource];
NSMutableArray<SDGIFCoderFrame *> *frames = [NSMutableArray array];
for (size_t i = 0; i < frameCount; i++) {
SDGIFCoderFrame *frame = [[SDGIFCoderFrame alloc] init];
frame.index = i;
frame.duration = [self sd_frameDurationAtIndex:i source:imageSource];
[frames addObject:frame];
}
_frameCount = frameCount;
_loopCount = loopCount;
_frames = [frames copy];
return YES;
}
- (NSData *)animatedImageData
{
return _imageData;
}
- (NSUInteger)animatedImageLoopCount
{
return _loopCount;
}
- (NSUInteger)animatedImageFrameCount
{
return _frameCount;
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index
{
if (index >= _frameCount) {
return 0;
}
return _frames[index].duration;
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index
{
CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_imageSource, index, NULL);
if (!imageRef) {
return nil;
}
// Image/IO create CGImage does not decode, so we do this because this is called background queue, this can avoid main queue block when rendering(especially when one more imageViews use the same image instance)
CGImageRef newImageRef = [SDWebImageCoderHelper imageRefCreateDecoded:imageRef];
if (!newImageRef) {
newImageRef = imageRef;
} else {
CGImageRelease(imageRef);
}
#if SD_MAC
UIImage *image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#else
UIImage *image = [UIImage imageWithCGImage:newImageRef];
#endif
CGImageRelease(newImageRef);
return image;
}
@end

View File

@ -12,40 +12,13 @@
#import <ImageIO/ImageIO.h>
#import "NSData+ImageContentType.h"
#if SD_UIKIT || SD_WATCH
static const size_t kBytesPerPixel = 4;
static const size_t kBitsPerComponent = 8;
/*
* Defines the maximum size in MB of the decoded image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 60.
* Suggested value for iPad2 and iPhone 4: 120.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 30.
*/
static const CGFloat kDestImageSizeMB = 60.0f;
/*
* Defines the maximum size in MB of a tile used to decode image when the flag `SDWebImageScaleDownLargeImages` is set
* Suggested value for iPad1 and iPhone 3GS: 20.
* Suggested value for iPad2 and iPhone 4: 40.
* Suggested value for iPhone 3G and iPod 2 and earlier devices: 10.
*/
static const CGFloat kSourceImageTileSizeMB = 20.0f;
static const CGFloat kBytesPerMB = 1024.0f * 1024.0f;
static const CGFloat kPixelsPerMB = kBytesPerMB / kBytesPerPixel;
static const CGFloat kDestTotalPixels = kDestImageSizeMB * kPixelsPerMB;
static const CGFloat kTileTotalPixels = kSourceImageTileSizeMB * kPixelsPerMB;
static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to overlap the seems where tiles meet.
#endif
@implementation SDWebImageImageIOCoder {
size_t _width, _height;
#if SD_UIKIT || SD_WATCH
UIImageOrientation _orientation;
#endif
CGImageSourceRef _imageSource;
NSUInteger _frameCount;
}
- (void)dealloc {
@ -53,6 +26,18 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
CFRelease(_imageSource);
_imageSource = NULL;
}
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
- (void)didReceiveMemoryWarning:(NSNotification *)notification
{
if (_imageSource) {
for (size_t i = 0; i < _frameCount; i++) {
CGImageSourceRemoveCacheAtIndex(_imageSource, i);
}
}
}
+ (instancetype)sharedCoder {
@ -91,7 +76,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
if (!data) {
return nil;
}
@ -114,10 +99,19 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
#endif
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_imageSource) {
#pragma mark - Progressive Decode
- (instancetype)initIncrementally {
self = [super init];
if (self) {
_imageSource = CGImageSourceCreateIncremental(NULL);
#if SD_UIKIT
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
#endif
}
return self;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
UIImage *image;
// The following code is from http://www.cocoaintheshell.com/2011/05/progressive-images-download-imageio/
@ -125,6 +119,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// Update the data source, we must pass ALL the data, not just the new bytes
CGImageSourceUpdateData(_imageSource, (__bridge CFDataRef)data, finished);
_frameCount = CGImageSourceGetCount(_imageSource);
if (_width + _height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_imageSource, 0, NULL);
@ -156,8 +151,8 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
// Workaround for iOS anamorphic image
if (partialImageRef) {
const size_t partialHeight = CGImageGetHeight(partialImageRef);
CGColorSpaceRef colorSpace = SDCGColorSpaceGetDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL, _width, _height, 8, 0, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRef colorSpace = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGContextRef bmContext = CGBitmapContextCreate(NULL, _width, _height, 8, 0, colorSpace, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
if (bmContext) {
CGContextDrawImage(bmContext, (CGRect){.origin.x = 0.0f, .origin.y = 0.0f, .size.width = _width, .size.height = partialHeight}, partialImageRef);
CGImageRelease(partialImageRef);
@ -191,198 +186,6 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
#if SD_MAC
return image;
#endif
#if SD_UIKIT || SD_WATCH
BOOL shouldScaleDown = NO;
if (optionsDict != nil) {
NSNumber *scaleDownLargeImagesOption = nil;
if ([optionsDict[SDWebImageCoderScaleDownLargeImagesKey] isKindOfClass:[NSNumber class]]) {
scaleDownLargeImagesOption = (NSNumber *)optionsDict[SDWebImageCoderScaleDownLargeImagesKey];
}
if (scaleDownLargeImagesOption != nil) {
shouldScaleDown = [scaleDownLargeImagesOption boolValue];
}
}
if (!shouldScaleDown) {
return [self sd_decompressedImageWithImage:image];
} else {
UIImage *scaledDownImage = [self sd_decompressedAndScaledDownImageWithImage:image];
if (scaledDownImage && !CGSizeEqualToSize(scaledDownImage.size, image.size)) {
// if the image is scaled down, need to modify the data pointer as well
SDImageFormat format = [NSData sd_imageFormatForImageData:*data];
NSData *imageData = [self encodedDataWithImage:scaledDownImage format:format];
if (imageData) {
*data = imageData;
}
}
return scaledDownImage;
}
#endif
}
#if SD_UIKIT || SD_WATCH
- (nullable UIImage *)sd_decompressedImageWithImage:(nullable UIImage *)image {
if (![[self class] shouldDecodeImage:image]) {
return image;
}
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool{
CGImageRef imageRef = image.CGImage;
CGColorSpaceRef colorspaceRef = [[self class] colorSpaceForImageRef:imageRef];
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipLast
// to create bitmap graphics contexts without alpha info.
CGContextRef context = CGBitmapContextCreate(NULL,
width,
height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrderDefault|kCGImageAlphaNoneSkipLast);
if (context == NULL) {
return image;
}
// Draw the image into the context and retrieve the new bitmap image without alpha
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGImageRef imageRefWithoutAlpha = CGBitmapContextCreateImage(context);
UIImage *imageWithoutAlpha = [[UIImage alloc] initWithCGImage:imageRefWithoutAlpha scale:image.scale orientation:image.imageOrientation];
CGContextRelease(context);
CGImageRelease(imageRefWithoutAlpha);
return imageWithoutAlpha;
}
}
- (nullable UIImage *)sd_decompressedAndScaledDownImageWithImage:(nullable UIImage *)image {
if (![[self class] shouldDecodeImage:image]) {
return image;
}
if (![[self class] shouldScaleDownImage:image]) {
return [self sd_decompressedImageWithImage:image];
}
CGContextRef destContext;
// autorelease the bitmap context and all vars to help system to free memory when there are memory warning.
// on iOS7, do not forget to call [[SDImageCache sharedImageCache] clearMemory];
@autoreleasepool {
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
// Determine the scale ratio to apply to the input image
// that results in an output image of the defined size.
// see kDestImageSizeMB, and how it relates to destTotalPixels.
float imageScale = kDestTotalPixels / sourceTotalPixels;
CGSize destResolution = CGSizeZero;
destResolution.width = (int)(sourceResolution.width*imageScale);
destResolution.height = (int)(sourceResolution.height*imageScale);
// current color space
CGColorSpaceRef colorspaceRef = [[self class] colorSpaceForImageRef:sourceImageRef];
// kCGImageAlphaNone is not supported in CGBitmapContextCreate.
// Since the original image here has no alpha info, use kCGImageAlphaNoneSkipLast
// to create bitmap graphics contexts without alpha info.
destContext = CGBitmapContextCreate(NULL,
destResolution.width,
destResolution.height,
kBitsPerComponent,
0,
colorspaceRef,
kCGBitmapByteOrderDefault|kCGImageAlphaNoneSkipLast);
if (destContext == NULL) {
return image;
}
CGContextSetInterpolationQuality(destContext, kCGInterpolationHigh);
// Now define the size of the rectangle to be used for the
// incremental blits from the input image to the output image.
// we use a source tile width equal to the width of the source
// image due to the way that iOS retrieves image data from disk.
// iOS must decode an image from disk in full width 'bands', even
// if current graphics context is clipped to a subrect within that
// band. Therefore we fully utilize all of the pixel data that results
// from a decoding opertion by achnoring our tile size to the full
// width of the input image.
CGRect sourceTile = CGRectZero;
sourceTile.size.width = sourceResolution.width;
// The source tile height is dynamic. Since we specified the size
// of the source tile in MB, see how many rows of pixels high it
// can be given the input image width.
sourceTile.size.height = (int)(kTileTotalPixels / sourceTile.size.width );
sourceTile.origin.x = 0.0f;
// The output tile is the same proportions as the input tile, but
// scaled to image scale.
CGRect destTile;
destTile.size.width = destResolution.width;
destTile.size.height = sourceTile.size.height * imageScale;
destTile.origin.x = 0.0f;
// The source seem overlap is proportionate to the destination seem overlap.
// this is the amount of pixels to overlap each tile as we assemble the ouput image.
float sourceSeemOverlap = (int)((kDestSeemOverlap/destResolution.height)*sourceResolution.height);
CGImageRef sourceTileImageRef;
// calculate the number of read/write operations required to assemble the
// output image.
int iterations = (int)( sourceResolution.height / sourceTile.size.height );
// If tile height doesn't divide the image height evenly, add another iteration
// to account for the remaining pixels.
int remainder = (int)sourceResolution.height % (int)sourceTile.size.height;
if(remainder) {
iterations++;
}
// Add seem overlaps to the tiles, but save the original tile height for y coordinate calculations.
float sourceTileHeightMinusOverlap = sourceTile.size.height;
sourceTile.size.height += sourceSeemOverlap;
destTile.size.height += kDestSeemOverlap;
for( int y = 0; y < iterations; ++y ) {
@autoreleasepool {
sourceTile.origin.y = y * sourceTileHeightMinusOverlap + sourceSeemOverlap;
destTile.origin.y = destResolution.height - (( y + 1 ) * sourceTileHeightMinusOverlap * imageScale + kDestSeemOverlap);
sourceTileImageRef = CGImageCreateWithImageInRect( sourceImageRef, sourceTile );
if( y == iterations - 1 && remainder ) {
float dify = destTile.size.height;
destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale;
dify -= destTile.size.height;
destTile.origin.y += dify;
}
CGContextDrawImage( destContext, destTile, sourceTileImageRef );
CGImageRelease( sourceTileImageRef );
}
}
CGImageRef destImageRef = CGBitmapContextCreateImage(destContext);
CGContextRelease(destContext);
if (destImageRef == NULL) {
return image;
}
UIImage *destImage = [[UIImage alloc] initWithCGImage:destImageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(destImageRef);
if (destImage == nil) {
return image;
}
return destImage;
}
}
#endif
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
switch (format) {
@ -397,13 +200,13 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
if (format == SDImageFormatUndefined) {
BOOL hasAlpha = SDCGImageRefContainsAlpha(image.CGImage);
BOOL hasAlpha = [SDWebImageCoderHelper imageRefContainsAlpha:image.CGImage];
if (hasAlpha) {
format = SDImageFormatPNG;
} else {
@ -441,29 +244,6 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
return [imageData copy];
}
#pragma mark - Helper
+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
// Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
if (image == nil) {
return NO;
}
// do not decode animated images
if (image.images != nil) {
return NO;
}
CGImageRef imageRef = image.CGImage;
BOOL hasAlpha = SDCGImageRefContainsAlpha(imageRef);
// do not decode images with alpha
if (hasAlpha) {
return NO;
}
return YES;
}
+ (BOOL)canDecodeFromHEICFormat {
static BOOL canDecode = NO;
static dispatch_once_t onceToken;
@ -538,39 +318,4 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
}
#endif
#if SD_UIKIT || SD_WATCH
+ (BOOL)shouldScaleDownImage:(nonnull UIImage *)image {
BOOL shouldScaleDown = YES;
CGImageRef sourceImageRef = image.CGImage;
CGSize sourceResolution = CGSizeZero;
sourceResolution.width = CGImageGetWidth(sourceImageRef);
sourceResolution.height = CGImageGetHeight(sourceImageRef);
float sourceTotalPixels = sourceResolution.width * sourceResolution.height;
float imageScale = kDestTotalPixels / sourceTotalPixels;
if (imageScale < 1) {
shouldScaleDown = YES;
} else {
shouldScaleDown = NO;
}
return shouldScaleDown;
}
+ (CGColorSpaceRef)colorSpaceForImageRef:(CGImageRef)imageRef {
// current
CGColorSpaceModel imageColorSpaceModel = CGColorSpaceGetModel(CGImageGetColorSpace(imageRef));
CGColorSpaceRef colorspaceRef = CGImageGetColorSpace(imageRef);
BOOL unsupportedColorSpace = (imageColorSpaceModel == kCGColorSpaceModelUnknown ||
imageColorSpaceModel == kCGColorSpaceModelMonochrome ||
imageColorSpaceModel == kCGColorSpaceModelCMYK ||
imageColorSpaceModel == kCGColorSpaceModelIndexed);
if (unsupportedColorSpace) {
colorspaceRef = SDCGColorSpaceGetDeviceRGB();
}
return colorspaceRef;
}
#endif
@end

View File

@ -14,7 +14,7 @@
/**
Built in coder that supports WebP and animated WebP
*/
@interface SDWebImageWebPCoder : NSObject <SDWebImageProgressiveCoder>
@interface SDWebImageWebPCoder : NSObject <SDWebImageProgressiveCoder, SDWebImageAnimatedCoder>
@property (nonatomic, class, readonly, nonnull) SDWebImageWebPCoder *sharedCoder;

View File

@ -24,8 +24,43 @@
#import "webp/mux.h"
#endif
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
__VA_ARGS__; \
dispatch_semaphore_signal(self->_lock);
@interface SDWebPCoderFrame : NSObject
@property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
@property (nonatomic, assign) NSTimeInterval duration; // Frame duration in seconds
@property (nonatomic, assign) NSUInteger width; // Frame width
@property (nonatomic, assign) NSUInteger height; // Frame height
@property (nonatomic, assign) NSUInteger offsetX; // Frame origin.x in canvas (left-bottom based)
@property (nonatomic, assign) NSUInteger offsetY; // Frame origin.y in canvas (left-bottom based)
@property (nonatomic, assign) BOOL hasAlpha; // Whether frame contains alpha
@property (nonatomic, assign) BOOL isFullSize; // Whether frame size is equal to canvas size
@property (nonatomic, assign) WebPMuxAnimBlend blend; // Frame dispose method
@property (nonatomic, assign) WebPMuxAnimDispose dispose; // Frame blend operation
@property (nonatomic, assign) NSUInteger blendFromIndex; // The nearest previous frame index which blend mode is WEBP_MUX_BLEND
@end
@implementation SDWebPCoderFrame
@end
@implementation SDWebImageWebPCoder {
WebPIDecoder *_idec;
WebPDemuxer *_demux;
NSData *_imageData;
NSUInteger _loopCount;
NSUInteger _frameCount;
NSArray<SDWebPCoderFrame *> *_frames;
CGContextRef _canvas;
BOOL _hasAnimation;
BOOL _hasAlpha;
CGFloat _canvasWidth;
CGFloat _canvasHeight;
dispatch_semaphore_t _lock;
NSUInteger _currentBlendIndex;
}
- (void)dealloc {
@ -33,6 +68,14 @@
WebPIDelete(_idec);
_idec = NULL;
}
if (_demux) {
WebPDemuxDelete(_demux);
_demux = NULL;
}
if (_canvas) {
CGContextRelease(_canvas);
_canvas = NULL;
}
}
+ (instancetype)sharedCoder {
@ -53,7 +96,7 @@
return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable NSDictionary<NSString *,id> *)optionsDict {
if (!data) {
return nil;
}
@ -68,49 +111,41 @@
}
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
CGBitmapInfo bitmapInfo;
if (!(flags & ALPHA_FLAG)) {
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
} else {
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
return nil;
}
if (!(flags & ANIMATION_FLAG)) {
BOOL hasAnimation = flags & ANIMATION_FLAG;
BOOL decodeFirstFrame = [[optionsDict valueForKey:SDWebImageCoderDecodeFirstFrameOnly] boolValue];
if (!hasAnimation) {
// for static single webp image
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData];
if (staticImage) {
// draw on CGBitmapContext can reduce memory usage
CGImageRef imageRef = staticImage.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
CGContextDrawImage(canvas, CGRectMake(0, 0, width, height), imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
#if SD_UIKIT || SD_WATCH
staticImage = [[UIImage alloc] initWithCGImage:newImageRef];
#else
staticImage = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
#endif
CGImageRelease(newImageRef);
}
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
return staticImage;
}
// for animated webp image
WebPIterator iter;
// libwebp's index start with 1
if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
return nil;
}
if (decodeFirstFrame) {
// first frame for animated webp image
UIImage *firstFrameImage = [self sd_rawWebpImageWithData:iter.fragment];
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
return firstFrameImage;
}
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
BOOL hasAlpha = flags & ALPHA_FLAG;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
return nil;
}
@ -123,13 +158,8 @@
continue;
}
int duration = iter.duration;
if (duration <= 10) {
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
// Some animated WebP images also created without duration, we should keep compatibility
duration = 100;
}
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration / 1000.f];
NSTimeInterval duration = [self sd_frameDurationWithIterator:iter];
SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration];
[frames addObject:frame];
}
@ -145,15 +175,20 @@
return animatedImage;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_idec) {
#pragma mark - Progressive Decode
- (instancetype)initIncrementally {
self = [super init];
if (self) {
// Progressive images need transparent, so always use premultiplied RGBA
_idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
_idec = WebPINewRGB(MODE_bgrA, NULL, 0, 0);
}
return self;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
if (!_idec) {
return nil;
}
}
UIImage *image;
VP8StatusCode status = WebPIUpdate(_idec, data.bytes, data.length);
@ -172,9 +207,9 @@
size_t rgbaSize = last_y * stride;
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGColorSpaceRef colorSpaceRef = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
size_t components = 4;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
@ -191,7 +226,7 @@
return nil;
}
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
CGImageRelease(imageRef);
return nil;
@ -225,11 +260,26 @@
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
// WebP do not decompress
return image;
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
} else {
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
if (!image) {
return;
}
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
// If not blend, cover the target image rect. (firstly clear then draw)
if (!shouldBlend) {
CGContextClearRect(canvas, imageRect);
}
CGContextDrawImage(canvas, imageRect, image.CGImage);
}
}
- (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
@ -238,11 +288,9 @@
return nil;
}
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGSize size = CGSizeMake(canvasWidth, canvasHeight);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = size.height - iter.height - iter.y_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
@ -278,8 +326,14 @@
return nil;
}
config.output.colorspace = config.input.has_alpha ? MODE_rgbA : MODE_RGB;
BOOL hasAlpha = config.input.has_alpha;
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
// use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
WEBP_CSP_MODE colorspace = MODE_bgrA;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
config.options.use_threads = 1;
config.output.colorspace = colorspace;
// Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
@ -296,11 +350,12 @@
// Construct a UIImage from the decoded RGBA value array
CGDataProviderRef provider =
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
CGBitmapInfo bitmapInfo = config.input.has_alpha ? kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast : kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
size_t components = config.input.has_alpha ? 4 : 3;
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = config.output.u.RGBA.stride;
CGColorSpaceRef colorSpaceRef = [SDWebImageCoderHelper colorSpaceGetDeviceRGB];
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGDataProviderRelease(provider);
@ -314,12 +369,22 @@
return image;
}
- (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
int duration = iter.duration;
if (duration <= 10) {
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
// Some animated WebP images also created without duration, we should keep compatibility
duration = 100;
}
return duration / 1000.0;
}
#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
return (format == SDImageFormatWebP);
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
if (!image) {
return nil;
}
@ -421,6 +486,187 @@ static void FreeImageData(void *info, const void *data, size_t size) {
free((void *)data);
}
#pragma mark - SDWebImageAnimatedCoder
- (instancetype)initWithAnimatedImageData:(NSData *)data {
if (!data) {
return nil;
}
if (self) {
WebPData webpData;
WebPDataInit(&webpData);
webpData.bytes = data.bytes;
webpData.size = data.length;
WebPDemuxer *demuxer = WebPDemux(&webpData);
if (!demuxer) {
return nil;
}
BOOL framesValid = [self scanAndCheckFramesValidWithDemuxer:demuxer];
if (!framesValid) {
WebPDemuxDelete(demuxer);
return nil;
}
_demux = demuxer;
_imageData = data;
_currentBlendIndex = NSNotFound;
_lock = dispatch_semaphore_create(1);
}
return self;
}
- (BOOL)scanAndCheckFramesValidWithDemuxer:(WebPDemuxer *)demuxer
{
if (!demuxer) {
return NO;
}
WebPIterator iter;
if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
WebPDemuxReleaseIterator(&iter);
return NO;
}
uint32_t iterIndex = 0;
uint32_t lastBlendIndex = 0;
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
BOOL hasAnimation = flags & ANIMATION_FLAG;
BOOL hasAlpha = flags & ALPHA_FLAG;
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
uint32_t frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
uint32_t loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
NSMutableArray<SDWebPCoderFrame *> *frames = [NSMutableArray array];
// We should loop all the frames and scan each frames' blendFromIndex for later decoding, this can also ensure all frames is valid
do {
SDWebPCoderFrame *frame = [[SDWebPCoderFrame alloc] init];
frame.index = iterIndex;
frame.duration = [self sd_frameDurationWithIterator:iter];
frame.width = iter.width;
frame.height = iter.height;
frame.hasAlpha = iter.has_alpha;
frame.dispose = iter.dispose_method;
frame.blend = iter.blend_method;
frame.offsetX = iter.x_offset;
frame.offsetY = canvasHeight - iter.y_offset - iter.height;
BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
lastBlendIndex = iterIndex;
frame.blendFromIndex = iterIndex;
} else {
if (frame.dispose && frame.isFullSize) {
frame.blendFromIndex = lastBlendIndex;
lastBlendIndex = iterIndex + 1;
} else {
frame.blendFromIndex = lastBlendIndex;
}
}
iterIndex++;
[frames addObject:frame];
} while (WebPDemuxNextFrame(&iter));
WebPDemuxReleaseIterator(&iter);
if (frames.count != frameCount) {
return NO;
}
_frames = [frames copy];
_hasAnimation = hasAnimation;
_hasAlpha = hasAlpha;
_canvasWidth = canvasWidth;
_canvasHeight = canvasHeight;
_frameCount = frameCount;
_loopCount = loopCount;
return YES;
}
- (NSData *)animatedImageData
{
return _imageData;
}
- (NSUInteger)animatedImageLoopCount {
return _loopCount;
}
- (NSUInteger)animatedImageFrameCount {
return _frameCount;
}
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
if (index >= _frameCount) {
return 0;
}
return _frames[index].duration;
}
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
UIImage *image;
if (index >= _frameCount) {
return nil;
}
LOCK({
image = [self safeAnimatedImageFrameAtIndex:index];
});
return image;
}
- (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
if (!_canvas) {
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDWebImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
return nil;
}
_canvas = canvas;
}
SDWebPCoderFrame *frame = _frames[index];
UIImage *image;
WebPIterator iter;
if (_currentBlendIndex + 1 == index) {
// If current blend index is equal to request index, normal serial process
_currentBlendIndex = index;
// libwebp's index start with 1
if (!WebPDemuxGetFrame(_demux, (int)(index + 1), &iter)) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
} else {
// Else, this can happen when one image set to different imageViews or one loop end. So we should clear the shared cavans.
if (_currentBlendIndex != NSNotFound) {
CGContextClearRect(_canvas, CGRectMake(0, 0, _canvasWidth, _canvasHeight));
}
_currentBlendIndex = index;
// Then, loop from the blend from index, draw each of previous frames on the canvas.
// We use do while loop to call `WebPDemuxNextFrame`(fast), only (startIndex == endIndex) need to create image instance
size_t startIndex = frame.blendFromIndex;
size_t endIndex = frame.index;
if (!WebPDemuxGetFrame(_demux, (int)(startIndex + 1), &iter)) {
WebPDemuxReleaseIterator(&iter);
return nil;
}
do {
@autoreleasepool {
if ((size_t)iter.frame_num == endIndex) {
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter];
} else {
image = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
}
}
} while ((size_t)iter.frame_num < (endIndex + 1) && WebPDemuxNextFrame(&iter));
}
WebPDemuxReleaseIterator(&iter);
return image;
}
@end
#endif

View File

@ -7,7 +7,7 @@
*/
#import "UIImage+ForceDecode.h"
#import "SDWebImageCodersManager.h"
#import "SDWebImageCoderHelper.h"
@implementation UIImage (ForceDecode)
@ -15,16 +15,14 @@
if (!image) {
return nil;
}
NSData *tempData;
return [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&tempData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(NO)}];
return [SDWebImageCoderHelper decodedImageWithImage:image];
}
+ (UIImage *)sd_decodedAndScaledDownImageWithImage:(UIImage *)image {
if (!image) {
return nil;
}
NSData *tempData;
return [[SDWebImageCodersManager sharedManager] decompressedImageWithImage:image data:&tempData options:@{SDWebImageCoderScaleDownLargeImagesKey: @(YES)}];
return [SDWebImageCoderHelper decodedAndScaledDownImageWithImage:image limitBytes:0];
}
@end

View File

@ -16,7 +16,7 @@
if (!data) {
return nil;
}
return [[SDWebImageGIFCoder sharedCoder] decodedImageWithData:data];
return [[SDWebImageGIFCoder sharedCoder] decodedImageWithData:data options:nil];
}
@end

View File

@ -12,7 +12,7 @@
@implementation UIImage (MultiFormat)
+ (nullable UIImage *)sd_imageWithData:(nullable NSData *)data {
return [[SDWebImageCodersManager sharedManager] decodedImageWithData:data];
return [[SDWebImageCodersManager sharedManager] decodedImageWithData:data options:nil];
}
- (nullable NSData *)sd_imageData {
@ -22,7 +22,7 @@
- (nullable NSData *)sd_imageDataAsFormat:(SDImageFormat)imageFormat {
NSData *imageData = nil;
if (self) {
imageData = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:self format:imageFormat];
imageData = [[SDWebImageCodersManager sharedManager] encodedDataWithImage:self format:imageFormat options:nil];
}
return imageData;
}

View File

@ -17,7 +17,7 @@
if (!data) {
return nil;
}
return [[SDWebImageWebPCoder sharedCoder] decodedImageWithData:data];
return [[SDWebImageWebPCoder sharedCoder] decodedImageWithData:data options:nil];
}
@end

View File

@ -135,7 +135,7 @@
expect([coder canDecodeFromData:inputImageData]).to.beTruthy();
// 2 - decode from NSData to UIImage and check it
UIImage *inputImage = [coder decodedImageWithData:inputImageData];
UIImage *inputImage = [coder decodedImageWithData:inputImageData options:nil];
expect(inputImage).toNot.beNil();
if (isAnimated) {
@ -159,7 +159,7 @@
// 4 - encode from UIImage to NSData using the inputImageFormat and check it
NSData *outputImageData = [coder encodedDataWithImage:inputImage format:inputImageFormat];
expect(outputImageData).toNot.beNil();
UIImage *outputImage = [coder decodedImageWithData:outputImageData];
UIImage *outputImage = [coder decodedImageWithData:outputImageData options:nil];
expect(outputImage.size).to.equal(inputImage.size);
expect(outputImage.scale).to.equal(inputImage.scale);
expect(outputImage.images.count).to.equal(inputImage.images.count);

View File

@ -379,11 +379,6 @@
if (![data1 isEqualToData:data2]) {
XCTFail(@"The image data is not equal to cutom decoder, check -[SDWebImageTestDecoder decodedImageWithData:]");
}
NSString *str1 = @"TestDecompress";
NSString *str2 = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
if (![str1 isEqualToString:str2]) {
XCTFail(@"The image data is not modified by the custom decoder, check -[SDWebImageTestDecoder decompressedImageWithImage:data:options:]");
}
[[SDWebImageCodersManager sharedManager] removeCoder:testDecoder];
[expectation fulfill];
}];

View File

@ -10,6 +10,6 @@
#import <Foundation/Foundation.h>
#import <SDWebImage/SDWebImageCoder.h>
@interface SDWebImageTestDecoder : NSObject <SDWebImageCoder>
@interface SDWebImageTestDecoder : NSObject <SDWebImageProgressiveCoder>
@end

View File

@ -19,29 +19,31 @@
return YES;
}
- (UIImage *)decodedImageWithData:(NSData *)data {
- (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDWebImageCoderOptions *)options {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"jpg"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
return image;
}
- (instancetype)initIncrementally
{
self = [super init];
if (self) {
}
return self;
}
- (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
NSString * testImagePath = [[NSBundle bundleForClass:[self class]] pathForResource:@"TestImage" ofType:@"gif"];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:testImagePath];
return image;
}
- (UIImage *)decompressedImageWithImage:(UIImage *)image
data:(NSData *__autoreleasing _Nullable *)data
options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
NSString *testString = @"TestDecompress";
NSData *testData = [testString dataUsingEncoding:NSUTF8StringEncoding];
*data = testData;
return image;
- (BOOL)canIncrementallyDecodeFromData:(nullable NSData *)data {
return YES;
}
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
- (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDWebImageCoderOptions *)options {
NSString *testString = @"TestEncode";
NSData *data = [testString dataUsingEncoding:NSUTF8StringEncoding];
return data;

View File

@ -44,6 +44,9 @@ FOUNDATION_EXPORT const unsigned char WebImageVersionString[];
#import <SDWebImage/MKAnnotationView+WebCache.h>
#endif
#import <SDWebImage/SDAnimatedImage.h>
#import <SDWebImage/SDAnimatedImageView.h>
#import <SDWebImage/SDAnimatedImageView+WebCache.h>
#import <SDWebImage/SDWebImageCodersManager.h>
#import <SDWebImage/SDWebImageCoder.h>
#import <SDWebImage/SDWebImageWebPCoder.h>