summaryrefslogtreecommitdiffstats
path: root/chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp')
-rw-r--r--chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp214
1 files changed, 140 insertions, 74 deletions
diff --git a/chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp b/chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp
index b4ac82d411c..0959244acb1 100644
--- a/chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp
+++ b/chromium/third_party/WebKit/Source/platform/image-decoders/webp/WEBPImageDecoder.cpp
@@ -29,8 +29,8 @@
#include "config.h"
#include "platform/image-decoders/webp/WEBPImageDecoder.h"
-#include "RuntimeEnabledFeatures.h"
#include "platform/PlatformInstrumentation.h"
+#include "platform/RuntimeEnabledFeatures.h"
#if USE(QCMSLIB)
#include "qcms.h"
@@ -46,6 +46,83 @@ inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M
inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : MODE_BGRA; }
#endif
+inline uint8_t blendChannel(uint8_t src, uint8_t srcA, uint8_t dst, uint8_t dstA, unsigned scale)
+{
+ unsigned blendUnscaled = src * srcA + dst * dstA;
+ ASSERT(blendUnscaled < (1ULL << 32) / scale);
+ return (blendUnscaled * scale) >> 24;
+}
+
+inline uint32_t blendSrcOverDstNonPremultiplied(uint32_t src, uint32_t dst)
+{
+ uint8_t srcA = SkGetPackedA32(src);
+ if (srcA == 0)
+ return dst;
+
+ uint8_t dstA = SkGetPackedA32(dst);
+ uint8_t dstFactorA = (dstA * SkAlpha255To256(255 - srcA)) >> 8;
+ ASSERT(srcA + dstFactorA < (1U << 8));
+ uint8_t blendA = srcA + dstFactorA;
+ unsigned scale = (1UL << 24) / blendA;
+
+ uint8_t blendR = blendChannel(SkGetPackedR32(src), srcA, SkGetPackedR32(dst), dstFactorA, scale);
+ uint8_t blendG = blendChannel(SkGetPackedG32(src), srcA, SkGetPackedG32(dst), dstFactorA, scale);
+ uint8_t blendB = blendChannel(SkGetPackedB32(src), srcA, SkGetPackedB32(dst), dstFactorA, scale);
+
+ return SkPackARGB32NoCheck(blendA, blendR, blendG, blendB);
+}
+
+// Returns two point ranges (<left, width> pairs) at row 'canvasY', that belong to 'src' but not 'dst'.
+// A point range is empty if the corresponding width is 0.
+inline void findBlendRangeAtRow(const WebCore::IntRect& src, const WebCore::IntRect& dst, int canvasY, int& left1, int& width1, int& left2, int& width2)
+{
+ ASSERT_WITH_SECURITY_IMPLICATION(canvasY >= src.y() && canvasY < src.maxY());
+ left1 = -1;
+ width1 = 0;
+ left2 = -1;
+ width2 = 0;
+
+ if (canvasY < dst.y() || canvasY >= dst.maxY() || src.x() >= dst.maxX() || src.maxX() <= dst.x()) {
+ left1 = src.x();
+ width1 = src.width();
+ return;
+ }
+
+ if (src.x() < dst.x()) {
+ left1 = src.x();
+ width1 = dst.x() - src.x();
+ }
+
+ if (src.maxX() > dst.maxX()) {
+ left2 = dst.maxX();
+ width2 = src.maxX() - dst.maxX();
+ }
+}
+
+void alphaBlendPremultiplied(WebCore::ImageFrame& src, WebCore::ImageFrame& dst, int canvasY, int left, int width)
+{
+ for (int x = 0; x < width; ++x) {
+ int canvasX = left + x;
+ WebCore::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
+ if (SkGetPackedA32(pixel) != 0xff) {
+ WebCore::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
+ pixel = SkPMSrcOver(pixel, prevPixel);
+ }
+ }
+}
+
+void alphaBlendNonPremultiplied(WebCore::ImageFrame& src, WebCore::ImageFrame& dst, int canvasY, int left, int width)
+{
+ for (int x = 0; x < width; ++x) {
+ int canvasX = left + x;
+ WebCore::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
+ if (SkGetPackedA32(pixel) != 0xff) {
+ WebCore::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
+ pixel = blendSrcOverDstNonPremultiplied(pixel, prevPixel);
+ }
+ }
+}
+
namespace WebCore {
WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
@@ -55,6 +132,7 @@ WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
, m_decoder(0)
, m_formatFlags(0)
, m_frameBackgroundHasAlpha(false)
+ , m_hasColorProfile(false)
#if USE(QCMSLIB)
, m_haveReadProfile(false)
, m_transform(0)
@@ -66,6 +144,7 @@ WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
, m_repetitionCount(cAnimationLoopOnce)
, m_decodedHeight(0)
{
+ m_blendFunction = (alphaOption == ImageSource::AlphaPremultiplied) ? alphaBlendPremultiplied : alphaBlendNonPremultiplied;
}
WEBPImageDecoder::~WEBPImageDecoder()
@@ -118,49 +197,41 @@ ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
if (frame.status() == ImageFrame::FrameComplete)
return &frame;
- if (RuntimeEnabledFeatures::animatedWebPEnabled()) {
- Vector<size_t> framesToDecode;
- size_t frameToDecode = index;
- do {
- framesToDecode.append(frameToDecode);
- frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
- } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
-
- ASSERT(m_demux);
- for (size_t i = framesToDecode.size(); i > 0; --i) {
- size_t frameIndex = framesToDecode[i - 1];
- if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex))
- return 0;
- WebPIterator webpFrame;
- if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
- return 0;
- PlatformInstrumentation::willDecodeImage("WEBP");
- decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
- PlatformInstrumentation::didDecodeImage();
- WebPDemuxReleaseIterator(&webpFrame);
-
- if (failed())
- return 0;
-
- // We need more data to continue decoding.
- if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
- break;
- }
-
- // It is also a fatal error if all data is received and we have decoded all
- // frames available but the file is truncated.
- if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
- setFailed();
-
- frame.notifyBitmapIfPixelsChanged();
- return &frame;
+ Vector<size_t> framesToDecode;
+ size_t frameToDecode = index;
+ do {
+ framesToDecode.append(frameToDecode);
+ frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
+ } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
+
+ ASSERT(m_demux);
+ for (size_t i = framesToDecode.size(); i > 0; --i) {
+ size_t frameIndex = framesToDecode[i - 1];
+ if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex))
+ return 0;
+ WebPIterator webpFrame;
+ if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
+ return 0;
+ PlatformInstrumentation::willDecodeImage("WEBP");
+ decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
+ PlatformInstrumentation::didDecodeImage();
+ WebPDemuxReleaseIterator(&webpFrame);
+
+ if (failed())
+ return 0;
+
+ // We need more data to continue decoding.
+ if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
+ break;
}
- ASSERT(!index);
- PlatformInstrumentation::willDecodeImage("WEBP");
- decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), false, index);
- PlatformInstrumentation::didDecodeImage();
- return failed() ? 0 : &frame;
+ // It is also a fatal error if all data is received and we have decoded all
+ // frames available but the file is truncated.
+ if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
+ setFailed();
+
+ frame.notifyBitmapIfPixelsChanged();
+ return &frame;
}
void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
@@ -178,8 +249,6 @@ int WEBPImageDecoder::repetitionCount() const
bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
{
- if (!RuntimeEnabledFeatures::animatedWebPEnabled())
- return ImageDecoder::frameIsCompleteAtIndex(index);
if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
return false;
if (!(m_formatFlags & ANIMATION_FLAG))
@@ -210,7 +279,7 @@ bool WEBPImageDecoder::updateDemuxer()
WebPDemuxDelete(m_demux);
WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
- if (!m_demux)
+ if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE))
return setFailed();
if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
@@ -220,8 +289,14 @@ bool WEBPImageDecoder::updateDemuxer()
if (!ImageDecoder::isSizeAvailable()) {
m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
hasAnimation = (m_formatFlags & ANIMATION_FLAG);
- if (hasAnimation && !RuntimeEnabledFeatures::animatedWebPEnabled())
- return setFailed();
+ if (!hasAnimation)
+ m_repetitionCount = cAnimationNone;
+ else
+ m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images.
+#if USE(QCMSLIB)
+ if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
+ m_hasColorProfile = true;
+#endif
if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
return setFailed();
}
@@ -234,8 +309,12 @@ bool WEBPImageDecoder::updateDemuxer()
// This is because ANIM chunk always precedes ANMF chunks.
m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
- if (!m_repetitionCount)
- m_repetitionCount = cAnimationLoopInfinite;
+ // |m_repetitionCount| is the total number of animation cycles to show,
+ // with 0 meaning "infinite". But ImageSource::repetitionCount()
+ // returns -1 for "infinite", and 0 and up for "show the animation one
+ // cycle more than this value". By subtracting one here, we convert
+ // both finite and infinite cases correctly.
+ --m_repetitionCount;
m_haveReadAnimationParameters = true;
}
@@ -428,36 +507,23 @@ void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMethod();
- if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Restore transparent pixels to pixels in previous canvas.
+ if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Blend transparent pixels with pixels in previous canvas.
for (int y = m_decodedHeight; y < decodedHeight; ++y) {
- const int canvasY = top + y;
- for (int x = 0; x < width; ++x) {
- const int canvasX = left + x;
- ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
- // FIXME: Use alpha-blending when alpha is between 0 and 255.
- // Alpha-blending is being implemented in: https://bugs.webkit.org/show_bug.cgi?id=17022
- if (!((pixel >> SK_A32_SHIFT) & 0xff)) {
- ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
- pixel = prevPixel;
- }
- }
+ m_blendFunction(buffer, prevBuffer, top + y, left, width);
}
} else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) {
const IntRect& prevRect = prevBuffer.originalFrameRect();
- // We need to restore transparent pixels to as they were just after initFrame() call. That is:
- // * Transparent if it belongs to prevRect <-- This is a no-op.
- // * Pixel in the previous canvas otherwise <-- Need to restore.
+ // We need to blend a transparent pixel with its value just after initFrame() call. That is:
+ // * Blend with fully transparent pixel if it belongs to prevRect <-- This is a no-op.
+ // * Blend with the pixel in the previous canvas otherwise <-- Needs alpha-blending.
for (int y = m_decodedHeight; y < decodedHeight; ++y) {
- const int canvasY = top + y;
- for (int x = 0; x < width; ++x) {
- const int canvasX = left + x;
- ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
- // FIXME: Use alpha-blending when alpha is between 0 and 255.
- if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains(IntPoint(canvasX, canvasY))) {
- ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
- pixel = prevPixel;
- }
- }
+ int canvasY = top + y;
+ int left1, width1, left2, width2;
+ findBlendRangeAtRow(frameRect, prevRect, canvasY, left1, width1, left2, width2);
+ if (width1 > 0)
+ m_blendFunction(buffer, prevBuffer, canvasY, left1, width1);
+ if (width2 > 0)
+ m_blendFunction(buffer, prevBuffer, canvasY, left2, width2);
}
}
}