Refactory thumbnail decoding, use a transformed CGContext instead of full context and re-scale after decode finished
This can help to avoid memory allocation For encoding, avoid using vImageConvert_AnyToAny and just use it convenience method instead
This commit is contained in:
parent
c779312836
commit
9dae8d36b9
|
@ -68,12 +68,24 @@ else OSSpinLockUnlock(&lock##_deprecated);
|
|||
#endif
|
||||
#endif
|
||||
|
||||
static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable CGContextRef canvas, CGSize scaledSize) CF_RETURNS_RETAINED {
|
||||
if (!canvas) return NULL;
|
||||
CGContextSaveGState(canvas);
|
||||
CGContextScaleCTM(canvas, scaledSize.width, scaledSize.height);
|
||||
CGContextRestoreGState(canvas);
|
||||
return CGBitmapContextCreateImage(canvas);
|
||||
/// Used for animated WebP, which need a canvas for decoding (rendering), possible apply a scale transform for thumbnail decoding (avoiding post-rescale using vImage)
|
||||
/// See more in #73
|
||||
static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canvasSize, CGSize thumbnailSize, BOOL preserveAspectRatio) {
|
||||
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
|
||||
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
|
||||
// Check whether we need to use thumbnail
|
||||
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasSize.width, canvasSize.height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO];
|
||||
CGContextRef canvas = CGBitmapContextCreate(NULL, scaledSize.width, scaledSize.height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
|
||||
if (!canvas) {
|
||||
return nil;
|
||||
}
|
||||
// Check whether we need to use thumbnail
|
||||
if (!CGSizeEqualToSize(canvasSize, scaledSize)) {
|
||||
CGFloat sx = scaledSize.width / canvasSize.width;
|
||||
CGFloat sy = scaledSize.height / canvasSize.height;
|
||||
CGContextScaleCTM(canvas, sx, sy);
|
||||
}
|
||||
return canvas;
|
||||
}
|
||||
|
||||
@interface SDWebPCoderFrame : NSObject
|
||||
|
@ -226,9 +238,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
}
|
||||
|
||||
BOOL hasAlpha = flags & ALPHA_FLAG;
|
||||
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
|
||||
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
|
||||
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
|
||||
CGContextRef canvas = CreateWebPCanvas(hasAlpha, CGSizeMake(canvasWidth, canvasHeight), thumbnailSize, preserveAspectRatio);
|
||||
if (!canvas) {
|
||||
WebPDemuxDelete(demuxer);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
@ -240,7 +250,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
|
||||
do {
|
||||
@autoreleasepool {
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace scaledSize:scaledSize];
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas demuxer:demuxer iterator:iter colorSpace:colorSpace];
|
||||
if (!imageRef) {
|
||||
continue;
|
||||
}
|
||||
|
@ -389,7 +399,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
return nil;
|
||||
}
|
||||
|
||||
CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
|
||||
CGContextRef canvas = CreateWebPCanvas(YES, CGSizeMake(width, height), _thumbnailSize, _preserveAspectRatio);
|
||||
if (!canvas) {
|
||||
CGImageRelease(imageRef);
|
||||
return nil;
|
||||
|
@ -397,14 +407,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
|
||||
// Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system
|
||||
CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef);
|
||||
// Check whether we need to use thumbnail
|
||||
CGImageRef newImageRef;
|
||||
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(width, height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
|
||||
if (!CGSizeEqualToSize(CGSizeMake(width, height), scaledSize)) {
|
||||
newImageRef = CGBitmapContextCreateScaledImage(canvas, scaledSize);
|
||||
} else {
|
||||
newImageRef = CGBitmapContextCreateImage(canvas);
|
||||
}
|
||||
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
|
||||
CGImageRelease(imageRef);
|
||||
if (!newImageRef) {
|
||||
CGContextRelease(canvas);
|
||||
|
@ -433,8 +436,8 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
return image;
|
||||
}
|
||||
|
||||
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
|
||||
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
|
||||
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas demuxer:(nonnull WebPDemuxer *)demuxer iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
|
||||
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
|
||||
CGFloat tmpX = iter.x_offset;
|
||||
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
|
||||
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
|
||||
|
@ -456,14 +459,13 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
}
|
||||
}
|
||||
|
||||
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef scaledSize:(CGSize)scaledSize CF_RETURNS_RETAINED {
|
||||
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas demuxer:(nonnull WebPDemuxer *)demuxer iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
|
||||
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef scaledSize:CGSizeZero];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
|
||||
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
|
||||
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
|
||||
CGFloat tmpX = iter.x_offset;
|
||||
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
|
||||
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
|
||||
|
@ -474,17 +476,9 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
if (!shouldBlend) {
|
||||
CGContextClearRect(canvas, imageRect);
|
||||
}
|
||||
|
||||
CGContextDrawImage(canvas, imageRect, imageRef);
|
||||
|
||||
CGImageRef newImageRef;
|
||||
// Check whether we need to use thumbnail
|
||||
if (!CGSizeEqualToSize(CGSizeMake(canvasWidth, canvasHeight), scaledSize)) {
|
||||
// Use CoreGraphics canvas to scale down, no need extra allocation
|
||||
newImageRef = CGBitmapContextCreateScaledImage(canvas, scaledSize);
|
||||
} else {
|
||||
newImageRef = CGBitmapContextCreateImage(canvas);
|
||||
}
|
||||
|
||||
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
|
||||
CGImageRelease(imageRef);
|
||||
|
||||
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
|
||||
|
@ -741,74 +735,22 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
if (!dataProvider) {
|
||||
return nil;
|
||||
}
|
||||
// Check colorSpace is RGB/RGBA
|
||||
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
|
||||
BOOL isRGB = CGColorSpaceGetModel(colorSpace) == kCGColorSpaceModelRGB;
|
||||
|
||||
CFDataRef dataRef;
|
||||
uint8_t *rgba = NULL; // RGBA Buffer managed by CFData, don't call `free` on it, instead call `CFRelease` on `dataRef`
|
||||
// We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage
|
||||
BOOL isRGB888 = isRGB && byteOrderNormal && alphaInfo == kCGImageAlphaNone && components == 3;
|
||||
BOOL isRGBA8888 = isRGB && byteOrderNormal && alphaInfo == kCGImageAlphaLast && components == 4;
|
||||
if (isRGB888 || isRGBA8888) {
|
||||
// If the input CGImage is already RGB888/RGBA8888
|
||||
dataRef = CGDataProviderCopyData(dataProvider);
|
||||
if (!dataRef) {
|
||||
return nil;
|
||||
}
|
||||
rgba = (uint8_t *)CFDataGetBytePtr(dataRef);
|
||||
} else {
|
||||
// Convert all other cases to target color mode using vImage
|
||||
vImageConverterRef convertor = NULL;
|
||||
vImage_Error error = kvImageNoError;
|
||||
|
||||
vImage_CGImageFormat srcFormat = {
|
||||
.bitsPerComponent = (uint32_t)bitsPerComponent,
|
||||
.bitsPerPixel = (uint32_t)bitsPerPixel,
|
||||
.colorSpace = colorSpace,
|
||||
.bitmapInfo = bitmapInfo,
|
||||
.renderingIntent = CGImageGetRenderingIntent(imageRef)
|
||||
};
|
||||
vImage_CGImageFormat destFormat = {
|
||||
.bitsPerComponent = 8,
|
||||
.bitsPerPixel = hasAlpha ? 32 : 24,
|
||||
.colorSpace = [SDImageCoderHelper colorSpaceGetDeviceRGB],
|
||||
.bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
|
||||
};
|
||||
|
||||
convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, &destFormat, NULL, kvImageNoFlags, &error);
|
||||
if (error != kvImageNoError) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
vImage_Buffer src;
|
||||
error = vImageBuffer_InitWithCGImage(&src, &srcFormat, nil, imageRef, kvImageNoAllocate);
|
||||
if (error != kvImageNoError) {
|
||||
vImageConverter_Release(convertor);
|
||||
return nil;
|
||||
}
|
||||
|
||||
vImage_Buffer dest;
|
||||
error = vImageBuffer_Init(&dest, height, width, destFormat.bitsPerPixel, kvImageNoFlags);
|
||||
vImage_Error error = vImageBuffer_InitWithCGImage(&dest, &destFormat, NULL, imageRef, kvImageNoFlags);
|
||||
if (error != kvImageNoError) {
|
||||
vImageConverter_Release(convertor);
|
||||
return nil;
|
||||
}
|
||||
|
||||
// Convert input color mode to RGB888/RGBA8888
|
||||
error = vImageConvert_AnyToAny(convertor, &src, &dest, NULL, kvImageNoFlags);
|
||||
|
||||
// Free the buffer
|
||||
vImageConverter_Release(convertor);
|
||||
if (error != kvImageNoError) {
|
||||
free(dest.data);
|
||||
return nil;
|
||||
}
|
||||
|
||||
rgba = dest.data; // Converted buffer
|
||||
bytesPerRow = dest.rowBytes; // Converted bytePerRow
|
||||
dataRef = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, rgba, bytesPerRow * height, kCFAllocatorDefault);
|
||||
}
|
||||
rgba = dest.data;
|
||||
bytesPerRow = dest.rowBytes;
|
||||
|
||||
float qualityFactor = quality * 100; // WebP quality is 0-100
|
||||
// Encode RGB888/RGBA8888 buffer to WebP data
|
||||
|
@ -820,7 +762,8 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, qualityFactor) ||
|
||||
!WebPPictureInit(&picture)) {
|
||||
// shouldn't happen, except if system installation is broken
|
||||
CFRelease(dataRef);
|
||||
free(dest.data);
|
||||
// CFRelease(dataRef);
|
||||
return nil;
|
||||
}
|
||||
|
||||
|
@ -840,7 +783,7 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
}
|
||||
if (!result) {
|
||||
WebPMemoryWriterClear(&writer);
|
||||
CFRelease(dataRef);
|
||||
free(dest.data);
|
||||
return nil;
|
||||
}
|
||||
|
||||
|
@ -851,14 +794,14 @@ static inline CGImageRef __nullable CGBitmapContextCreateScaledImage(cg_nullable
|
|||
if (!result) {
|
||||
WebPMemoryWriterClear(&writer);
|
||||
WebPPictureFree(&picture);
|
||||
CFRelease(dataRef);
|
||||
free(dest.data);
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
|
||||
result = WebPEncode(&config, &picture);
|
||||
WebPPictureFree(&picture);
|
||||
CFRelease(dataRef); // Free bitmap buffer
|
||||
free(dest.data);
|
||||
|
||||
if (result) {
|
||||
// success
|
||||
|
@ -1140,16 +1083,13 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
|
|||
if (_hasAnimation) {
|
||||
// If have animation, we still need to allocate a CGContext, because the poster frame may be smaller than canvas
|
||||
if (!_canvas) {
|
||||
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
|
||||
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
|
||||
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
|
||||
CGContextRef canvas = CreateWebPCanvas(_hasAlpha, CGSizeMake(_canvasWidth, _canvasHeight), _thumbnailSize, _preserveAspectRatio);
|
||||
if (!canvas) {
|
||||
return nil;
|
||||
}
|
||||
_canvas = canvas;
|
||||
}
|
||||
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(_canvasWidth, _canvasHeight) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
|
||||
imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace scaledSize:scaledSize];
|
||||
imageRef = [self sd_drawnWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
|
||||
} else {
|
||||
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(iter.width, iter.height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
|
||||
imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:_colorSpace scaledSize:scaledSize];
|
||||
|
@ -1169,9 +1109,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
|
|||
|
||||
- (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
|
||||
if (!_canvas) {
|
||||
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
|
||||
bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
|
||||
CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
|
||||
CGContextRef canvas = CreateWebPCanvas(_hasAlpha, CGSizeMake(_canvasWidth, _canvasHeight), _thumbnailSize, _preserveAspectRatio);
|
||||
if (!canvas) {
|
||||
return nil;
|
||||
}
|
||||
|
@ -1215,7 +1153,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
|
|||
if (endIndex > startIndex) {
|
||||
do {
|
||||
@autoreleasepool {
|
||||
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
|
||||
[self sd_blendWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
|
||||
}
|
||||
} while ((size_t)iter.frame_num < endIndex && WebPDemuxNextFrame(&iter));
|
||||
}
|
||||
|
@ -1228,9 +1166,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
|
|||
_currentBlendIndex = index;
|
||||
|
||||
// Now the canvas is ready, which respects of dispose method behavior. Just do normal decoding and produce image.
|
||||
// Check whether we need to use thumbnail
|
||||
CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(_canvasWidth, _canvasHeight) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO];
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace scaledSize:scaledSize];
|
||||
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas demuxer:_demux iterator:iter colorSpace:_colorSpace];
|
||||
if (!imageRef) {
|
||||
return nil;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue