Refactory thumbnail decoding, use a transformed CGContext instead of full context and re-scale after decode finished

This can help to avoid memory allocation

For encoding, avoid using vImageConvert_AnyToAny and just use it convenience method instead
This commit is contained in:
DreamPiggy 2023-03-09 13:42:14 +08:00
parent c779312836
commit 9dae8d36b9
1 changed files with 50 additions and 114 deletions

View File

@ -68,12 +68,24 @@ else OSSpinLockUnlock(&lock##_deprecated);
#endif #endif
#endif #endif
static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable CGContextRef canvas, CGSize scaledSize) CF_RETURNS_RETAINED { /// Used for animated WebP, which need a canvas for decoding (rendering), possible apply a scale transform for thumbnail decoding (avoiding post-rescale using vImage)
if (!canvas) return NULL; /// See more in #73
CGContextSaveGState(canvas); static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canvasSize, CGSize thumbnailSize, BOOL preserveAspectRatio) {
CGContextScaleCTM(canvas, scaledSize.width, scaledSize.height); CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
CGContextRestoreGState(canvas); bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
return CGBitmapContextCreateImage(canvas); // Check whether we need to use thumbnail
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasSize.width, canvasSize.height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO];
CGContextRef canvas = CGBitmapContextCreate(NULL, scaledSize.width, scaledSize.height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
return nil;
}
// Check whether we need to use thumbnail
if (!CGSizeEqualToSize(canvasSize, scaledSize)) {
CGFloat sx = scaledSize.width / canvasSize.width;
CGFloat sy = scaledSize.height / canvasSize.height;
CGContextScaleCTM(canvas, sx, sy);
}
return canvas;
} }
@interface SDWebPCoderFrame : NSObject @interface SDWebPCoderFrame : NSObject
@ -226,9 +238,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
} }
BOOL hasAlpha = flags & ALPHA_FLAG; BOOL hasAlpha = flags & ALPHA_FLAG;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; CGContextRef canvas = CreateWebPCanvas(hasAlpha, CGSizeMake(canvasWidth, canvasHeight), thumbnailSize, preserveAspectRatio);
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) { if (!canvas) {
WebPDemuxDelete(demuxer); WebPDemuxDelete(demuxer);
CGColorSpaceRelease(colorSpace); CGColorSpaceRelease(colorSpace);
@ -240,7 +250,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
do { do {
@autoreleasepool { @autoreleasepool {
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace scaledSize:scaledSize]; CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas demuxer:demuxer iterator:iter colorSpace:colorSpace];
if (!imageRef) { if (!imageRef) {
continue; continue;
} }
@ -389,7 +399,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
return nil; return nil;
} }
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo); CGContextRef canvas = CreateWebPCanvas(YES, CGSizeMake(width, height), _thumbnailSize, _preserveAspectRatio);
if (!canvas) { if (!canvas) {
CGImageRelease(imageRef); CGImageRelease(imageRef);
return nil; return nil;
@ -397,14 +407,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
// Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system // Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system
CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef); CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef);
// Check whether we need to use thumbnail CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
CGImageRef newImageRef;
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(width, height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
if (!CGSizeEqualToSize(CGSizeMake(width, height), scaledSize)) {
newImageRef = CGBitmapContextCreateScaledImage(canvas, scaledSize);
} else {
newImageRef = CGBitmapContextCreateImage(canvas);
}
CGImageRelease(imageRef); CGImageRelease(imageRef);
if (!newImageRef) { if (!newImageRef) {
CGContextRelease(canvas); CGContextRelease(canvas);
@ -433,8 +436,8 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
return image; return image;
} }
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef { - (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas demuxer:(nonnull WebPDemuxer *)demuxer iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
size_t canvasHeight = CGBitmapContextGetHeight(canvas); int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
CGFloat tmpX = iter.x_offset; CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset; CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height); CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
@ -456,14 +459,13 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
} }
} }
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef scaledSize:(CGSize)scaledSize CF_RETURNS_RETAINED { - (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas demuxer:(nonnull WebPDemuxer *)demuxer iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef scaledSize:CGSizeZero]; CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef scaledSize:CGSizeZero];
if (!imageRef) { if (!imageRef) {
return nil; return nil;
} }
size_t canvasWidth = CGBitmapContextGetWidth(canvas); int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGFloat tmpX = iter.x_offset; CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset; CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height); CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
@ -474,17 +476,9 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
if (!shouldBlend) { if (!shouldBlend) {
CGContextClearRect(canvas, imageRect); CGContextClearRect(canvas, imageRect);
} }
CGContextDrawImage(canvas, imageRect, imageRef); CGContextDrawImage(canvas, imageRect, imageRef);
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
CGImageRef newImageRef;
// Check whether we need to use thumbnail
if (!CGSizeEqualToSize(CGSizeMake(canvasWidth, canvasHeight), scaledSize)) {
// Use CoreGraphics canvas to scale down, no need extra allocation
newImageRef = CGBitmapContextCreateScaledImage(canvas, scaledSize);
} else {
newImageRef = CGBitmapContextCreateImage(canvas);
}
CGImageRelease(imageRef); CGImageRelease(imageRef);
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) { if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
@ -741,74 +735,22 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
if (!dataProvider) { if (!dataProvider) {
return nil; return nil;
} }
// Check colorSpace is RGB/RGBA
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
BOOL isRGB = CGColorSpaceGetModel(colorSpace) == kCGColorSpaceModelRGB;
CFDataRef dataRef;
uint8_t *rgba = NULL; // RGBA Buffer managed by CFData, don't call `free` on it, instead call `CFRelease` on `dataRef` uint8_t *rgba = NULL; // RGBA Buffer managed by CFData, don't call `free` on it, instead call `CFRelease` on `dataRef`
// We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage // We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage
BOOL isRGB888 = isRGB && byteOrderNormal && alphaInfo == kCGImageAlphaNone && components == 3; vImage_CGImageFormat destFormat = {
BOOL isRGBA8888 = isRGB && byteOrderNormal && alphaInfo == kCGImageAlphaLast && components == 4; .bitsPerComponent = 8,
if (isRGB888 || isRGBA8888) { .bitsPerPixel = hasAlpha ? 32 : 24,
// If the input CGImage is already RGB888/RGBA8888 .colorSpace = [SDImageCoderHelper colorSpaceGetDeviceRGB],
dataRef = CGDataProviderCopyData(dataProvider); .bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
if (!dataRef) { };
return nil; vImage_Buffer dest;
} vImage_Error error = vImageBuffer_InitWithCGImage(&dest, &destFormat, NULL, imageRef, kvImageNoFlags);
rgba = (uint8_t *)CFDataGetBytePtr(dataRef); if (error != kvImageNoError) {
} else { return nil;
// Convert all other cases to target color mode using vImage
vImageConverterRef convertor = NULL;
vImage_Error error = kvImageNoError;
vImage_CGImageFormat srcFormat = {
.bitsPerComponent = (uint32_t)bitsPerComponent,
.bitsPerPixel = (uint32_t)bitsPerPixel,
.colorSpace = colorSpace,
.bitmapInfo = bitmapInfo,
.renderingIntent = CGImageGetRenderingIntent(imageRef)
};
vImage_CGImageFormat destFormat = {
.bitsPerComponent = 8,
.bitsPerPixel = hasAlpha ? 32 : 24,
.colorSpace = [SDImageCoderHelper colorSpaceGetDeviceRGB],
.bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
};
convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, &destFormat, NULL, kvImageNoFlags, &error);
if (error != kvImageNoError) {
return nil;
}
vImage_Buffer src;
error = vImageBuffer_InitWithCGImage(&src, &srcFormat, nil, imageRef, kvImageNoAllocate);
if (error != kvImageNoError) {
vImageConverter_Release(convertor);
return nil;
}
vImage_Buffer dest;
error = vImageBuffer_Init(&dest, height, width, destFormat.bitsPerPixel, kvImageNoFlags);
if (error != kvImageNoError) {
vImageConverter_Release(convertor);
return nil;
}
// Convert input color mode to RGB888/RGBA8888
error = vImageConvert_AnyToAny(convertor, &src, &dest, NULL, kvImageNoFlags);
// Free the buffer
vImageConverter_Release(convertor);
if (error != kvImageNoError) {
free(dest.data);
return nil;
}
rgba = dest.data; // Converted buffer
bytesPerRow = dest.rowBytes; // Converted bytePerRow
dataRef = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, rgba, bytesPerRow * height, kCFAllocatorDefault);
} }
rgba = dest.data;
bytesPerRow = dest.rowBytes;
float qualityFactor = quality * 100; // WebP quality is 0-100 float qualityFactor = quality * 100; // WebP quality is 0-100
// Encode RGB888/RGBA8888 buffer to WebP data // Encode RGB888/RGBA8888 buffer to WebP data
@ -820,7 +762,8 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, qualityFactor) || if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, qualityFactor) ||
!WebPPictureInit(&picture)) { !WebPPictureInit(&picture)) {
// shouldn't happen, except if system installation is broken // shouldn't happen, except if system installation is broken
CFRelease(dataRef); free(dest.data);
// CFRelease(dataRef);
return nil; return nil;
} }
@ -840,7 +783,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
} }
if (!result) { if (!result) {
WebPMemoryWriterClear(&writer); WebPMemoryWriterClear(&writer);
CFRelease(dataRef); free(dest.data);
return nil; return nil;
} }
@ -851,14 +794,14 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
if (!result) { if (!result) {
WebPMemoryWriterClear(&writer); WebPMemoryWriterClear(&writer);
WebPPictureFree(&picture); WebPPictureFree(&picture);
CFRelease(dataRef); free(dest.data);
return nil; return nil;
} }
} }
result = WebPEncode(&config, &picture); result = WebPEncode(&config, &picture);
WebPPictureFree(&picture); WebPPictureFree(&picture);
CFRelease(dataRef); // Free bitmap buffer free(dest.data);
if (result) { if (result) {
// success // success
@ -1140,16 +1083,13 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
if (_hasAnimation) { if (_hasAnimation) {
// If have animation, we still need to allocate a CGContext, because the poster frame may be smaller than canvas // If have animation, we still need to allocate a CGContext, because the poster frame may be smaller than canvas
if (!_canvas) { if (!_canvas) {
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; CGContextRef canvas = CreateWebPCanvas(_hasAlpha, CGSizeMake(_canvasWidth, _canvasHeight), _thumbnailSize, _preserveAspectRatio);
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) { if (!canvas) {
return nil; return nil;
} }
_canvas = canvas; _canvas = canvas;
} }
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(_canvasWidth, _canvasHeight) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO]; imageRef = [self sd_drawnWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace scaledSize:scaledSize];
} else { } else {
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(iter.width, iter.height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO]; CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(iter.width, iter.height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:_colorSpace scaledSize:scaledSize]; imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:_colorSpace scaledSize:scaledSize];
@ -1169,9 +1109,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
- (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index { - (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
if (!_canvas) { if (!_canvas) {
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; CGContextRef canvas = CreateWebPCanvas(_hasAlpha, CGSizeMake(_canvasWidth, _canvasHeight), _thumbnailSize, _preserveAspectRatio);
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) { if (!canvas) {
return nil; return nil;
} }
@ -1215,7 +1153,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
if (endIndex > startIndex) { if (endIndex > startIndex) {
do { do {
@autoreleasepool { @autoreleasepool {
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace]; [self sd_blendWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
} }
} while ((size_t)iter.frame_num < endIndex && WebPDemuxNextFrame(&iter)); } while ((size_t)iter.frame_num < endIndex && WebPDemuxNextFrame(&iter));
} }
@ -1228,9 +1166,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
_currentBlendIndex = index; _currentBlendIndex = index;
// Now the canvas is ready, which respects of dispose method behavior. Just do normal decoding and produce image. // Now the canvas is ready, which respects of dispose method behavior. Just do normal decoding and produce image.
// Check whether we need to use thumbnail CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(_canvasWidth, _canvasHeight) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace scaledSize:scaledSize];
if (!imageRef) { if (!imageRef) {
return nil; return nil;
} }