Opt the macOS legacy animated encode, with the weak assigned frames to avoid re-decode again

It's still recommended to use the new API
This commit is contained in:
DreamPiggy 2023-01-17 16:16:09 +08:00
parent 8ec3bc83d7
commit 985c84be75
3 changed files with 47 additions and 0 deletions

View File

@ -16,6 +16,11 @@
#import "SDImageHEICCoder.h"
#import "SDImageAWebPCoder.h"
@interface SDAnimatedImageRep ()
/// This wrap the animated image frames for legacy animated image coder API (`encodedDataWithImage:`).
@property (nonatomic, readwrite, weak) NSArray<SDImageFrame *> *frames;
@end
@implementation SDAnimatedImageRep {
CGImageSourceRef _imageSource;
}

View File

@ -94,6 +94,13 @@ static CGFloat kDestImageLimitBytes = 30.f * kBytesPerMB;
static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to overlap the seems where tiles meet.
#if SD_MAC
@interface SDAnimatedImageRep (Private)
/// This wrap the animated image frames for legacy animated image coder API (`encodedDataWithImage:`).
@property (nonatomic, readwrite, weak) NSArray<SDImageFrame *> *frames;
@end
#endif
@implementation SDImageCoderHelper
+ (UIImage *)animatedImageWithFrames:(NSArray<SDImageFrame *> *)frames {
@ -159,6 +166,7 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
SDAnimatedImageRep *imageRep = [[SDAnimatedImageRep alloc] initWithData:imageData];
NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
imageRep.size = size;
imageRep.frames = frames; // Weak assign to avoid effect lazy semantic of NSBitmapImageRep
animatedImage = [[NSImage alloc] initWithSize:size];
[animatedImage addRepresentation:imageRep];
#endif
@ -211,6 +219,14 @@ static const CGFloat kDestSeemOverlap = 2.0f; // the numbers of pixels to over
NSRect imageRect = NSMakeRect(0, 0, animatedImage.size.width, animatedImage.size.height);
NSImageRep *imageRep = [animatedImage bestRepresentationForRect:imageRect context:nil hints:nil];
// Check weak assigned frames firstly
if ([imageRep isKindOfClass:[SDAnimatedImageRep class]]) {
SDAnimatedImageRep *animatedImageRep = (SDAnimatedImageRep *)imageRep;
if (animatedImageRep.frames) {
return animatedImageRep.frames;
}
}
NSBitmapImageRep *bitmapImageRep;
if ([imageRep isKindOfClass:[NSBitmapImageRep class]]) {
bitmapImageRep = (NSBitmapImageRep *)imageRep;

View File

@ -422,6 +422,32 @@
#endif
}
- (void)test27ThatEncodeWithFramesWorks {
// Mock
NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];
NSUInteger frameCount = 5;
for (size_t i = 0; i < frameCount; i++) {
CGSize size = CGSizeMake(100, 100);
SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size];
UIImage *image = [renderer imageWithActions:^(CGContextRef _Nonnull context) {
CGContextSetRGBFillColor(context, 1.0 / i, 0.0, 0.0, 1.0);
CGContextSetRGBStrokeColor(context, 1.0 / i, 0.0, 0.0, 1.0);
CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
}];
SDImageFrame *frame = [SDImageFrame frameWithImage:image duration:0.1];
[frames addObject:frame];
}
// Test old API
UIImage *animatedImage = [SDImageCoderHelper animatedImageWithFrames:frames];
NSData *data = [SDImageGIFCoder.sharedCoder encodedDataWithImage:animatedImage format:SDImageFormatGIF options:nil];
expect(data).notTo.beNil();
// Test new API
NSData *data2 = [SDImageGIFCoder.sharedCoder encodedDataWithFrames:frames loopCount:0 format:SDImageFormatGIF options:nil];
expect(data2).notTo.beNil();
}
#pragma mark - Utils
- (void)verifyCoder:(id<SDImageCoder>)coder