1 /*
2 * Copyright (C) 2010 Google Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #include "third_party/blink/renderer/platform/image-decoders/webp/webp_image_decoder.h"
30
31 #include <string.h>
32
33 #include "base/feature_list.h"
34 #include "build/build_config.h"
35 #include "third_party/blink/renderer/platform/instrumentation/histogram.h"
36 #include "third_party/blink/renderer/platform/runtime_enabled_features.h"
37 #include "third_party/skia/include/core/SkData.h"
38 #include "third_party/skia/include/core/SkYUVAIndex.h"
39
40 #if defined(ARCH_CPU_BIG_ENDIAN)
41 #error Blink assumes a little-endian target.
42 #endif
43
44 namespace {
45
46 // Returns two point ranges (<left, width> pairs) at row |canvasY| which belong
47 // to |src| but not |dst|. A range is empty if its width is 0.
findBlendRangeAtRow(const blink::IntRect & src,const blink::IntRect & dst,int canvasY,int & left1,int & width1,int & left2,int & width2)48 inline void findBlendRangeAtRow(const blink::IntRect& src,
49 const blink::IntRect& dst,
50 int canvasY,
51 int& left1,
52 int& width1,
53 int& left2,
54 int& width2) {
55 SECURITY_DCHECK(canvasY >= src.Y() && canvasY < src.MaxY());
56 left1 = -1;
57 width1 = 0;
58 left2 = -1;
59 width2 = 0;
60
61 if (canvasY < dst.Y() || canvasY >= dst.MaxY() || src.X() >= dst.MaxX() ||
62 src.MaxX() <= dst.X()) {
63 left1 = src.X();
64 width1 = src.Width();
65 return;
66 }
67
68 if (src.X() < dst.X()) {
69 left1 = src.X();
70 width1 = dst.X() - src.X();
71 }
72
73 if (src.MaxX() > dst.MaxX()) {
74 left2 = dst.MaxX();
75 width2 = src.MaxX() - dst.MaxX();
76 }
77 }
78
79 // alphaBlendPremultiplied and alphaBlendNonPremultiplied are separate methods,
80 // even though they only differ by one line. This is done so that the compiler
81 // can inline BlendSrcOverDstPremultiplied() and BlensSrcOverDstRaw() calls.
82 // For GIF images, this optimization reduces decoding time by 15% for 3MB
83 // images.
alphaBlendPremultiplied(blink::ImageFrame & src,blink::ImageFrame & dst,int canvasY,int left,int width)84 void alphaBlendPremultiplied(blink::ImageFrame& src,
85 blink::ImageFrame& dst,
86 int canvasY,
87 int left,
88 int width) {
89 for (int x = 0; x < width; ++x) {
90 int canvasX = left + x;
91 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY);
92 if (SkGetPackedA32(*pixel) != 0xff) {
93 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY);
94 blink::ImageFrame::BlendSrcOverDstPremultiplied(pixel, prevPixel);
95 }
96 }
97 }
98
alphaBlendNonPremultiplied(blink::ImageFrame & src,blink::ImageFrame & dst,int canvasY,int left,int width)99 void alphaBlendNonPremultiplied(blink::ImageFrame& src,
100 blink::ImageFrame& dst,
101 int canvasY,
102 int left,
103 int width) {
104 for (int x = 0; x < width; ++x) {
105 int canvasX = left + x;
106 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY);
107 if (SkGetPackedA32(*pixel) != 0xff) {
108 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY);
109 blink::ImageFrame::BlendSrcOverDstRaw(pixel, prevPixel);
110 }
111 }
112 }
113
114 // Do not rename entries nor reuse numeric values. See the following link for
115 // descriptions: https://developers.google.com/speed/webp/docs/riff_container.
116 enum WebPFileFormat {
117 kSimpleLossyFileFormat = 0,
118 kSimpleLosslessFileFormat = 1,
119 kExtendedAlphaFileFormat = 2,
120 kExtendedAnimationFileFormat = 3,
121 kExtendedAnimationWithAlphaFileFormat = 4,
122 kUnknownFileFormat = 5,
123 kCountWebPFileFormats
124 };
125
126 // Validates that |blob| is a simple lossy WebP image. Note that this explicitly
127 // checks "WEBPVP8 " to exclude extended lossy WebPs that don't actually use any
128 // extended features.
129 //
130 // TODO(crbug.com/1009237): consider combining this with the logic to detect
131 // WebPs that can be decoded to YUV.
IsSimpleLossyWebPImage(const sk_sp<SkData> & blob)132 bool IsSimpleLossyWebPImage(const sk_sp<SkData>& blob) {
133 if (blob->size() < 20UL)
134 return false;
135 DCHECK(blob->bytes());
136 return !memcmp(blob->bytes(), "RIFF", 4) &&
137 !memcmp(blob->bytes() + 8UL, "WEBPVP8 ", 8);
138 }
139
140 // This method parses |blob|'s header and emits a UMA with the file format, as
141 // defined by WebP, see WebPFileFormat.
UpdateWebPFileFormatUMA(const sk_sp<SkData> & blob)142 void UpdateWebPFileFormatUMA(const sk_sp<SkData>& blob) {
143 if (!IsMainThread())
144 return;
145
146 WebPBitstreamFeatures features{};
147 if (WebPGetFeatures(blob->bytes(), blob->size(), &features) != VP8_STATUS_OK)
148 return;
149
150 // These constants are defined verbatim in
151 // webp_dec.c::ParseHeadersInternal().
152 constexpr int kLossyFormat = 1;
153 constexpr int kLosslessFormat = 2;
154
155 WebPFileFormat file_format = kUnknownFileFormat;
156 if (features.has_alpha && features.has_animation)
157 file_format = kExtendedAnimationWithAlphaFileFormat;
158 else if (features.has_animation)
159 file_format = kExtendedAnimationFileFormat;
160 else if (features.has_alpha)
161 file_format = kExtendedAlphaFileFormat;
162 else if (features.format == kLossyFormat)
163 file_format = kSimpleLossyFileFormat;
164 else if (features.format == kLosslessFormat)
165 file_format = kSimpleLosslessFileFormat;
166
167 DEFINE_THREAD_SAFE_STATIC_LOCAL(
168 blink::EnumerationHistogram, file_format_histogram,
169 ("Blink.DecodedImage.WebPFileFormat", kCountWebPFileFormats));
170 file_format_histogram.Count(file_format);
171 }
172
173 } // namespace
174
175 namespace blink {
176
WEBPImageDecoder(AlphaOption alpha_option,const ColorBehavior & color_behavior,size_t max_decoded_bytes)177 WEBPImageDecoder::WEBPImageDecoder(AlphaOption alpha_option,
178 const ColorBehavior& color_behavior,
179 size_t max_decoded_bytes)
180 : ImageDecoder(alpha_option,
181 ImageDecoder::kDefaultBitDepth,
182 color_behavior,
183 max_decoded_bytes),
184 decoder_(nullptr),
185 format_flags_(0),
186 frame_background_has_alpha_(false),
187 demux_(nullptr),
188 demux_state_(WEBP_DEMUX_PARSING_HEADER),
189 have_already_parsed_this_data_(false),
190 repetition_count_(kAnimationLoopOnce),
191 decoded_height_(0) {
192 blend_function_ = (alpha_option == kAlphaPremultiplied)
193 ? alphaBlendPremultiplied
194 : alphaBlendNonPremultiplied;
195 }
196
~WEBPImageDecoder()197 WEBPImageDecoder::~WEBPImageDecoder() {
198 Clear();
199 }
200
Clear()201 void WEBPImageDecoder::Clear() {
202 WebPDemuxDelete(demux_);
203 demux_ = nullptr;
204 consolidated_data_.reset();
205 ClearDecoder();
206 }
207
ClearDecoder()208 void WEBPImageDecoder::ClearDecoder() {
209 WebPIDelete(decoder_);
210 decoder_ = nullptr;
211 decoded_height_ = 0;
212 frame_background_has_alpha_ = false;
213 }
214
RGBOutputMode()215 WEBP_CSP_MODE WEBPImageDecoder::RGBOutputMode() {
216 DCHECK(!IsDoingYuvDecode());
217 if (ColorTransform()) {
218 // Swizzling between RGBA and BGRA is zero cost in a color transform.
219 // So when we have a color transform, we should decode to whatever is
220 // easiest for libwebp, and then let the color transform swizzle if
221 // necessary.
222 // Lossy webp is encoded as YUV (so RGBA and BGRA are the same cost).
223 // Lossless webp is encoded as BGRA. This means decoding to BGRA is
224 // either faster or the same cost as RGBA.
225 return MODE_BGRA;
226 }
227 bool premultiply = (format_flags_ & ALPHA_FLAG) && premultiply_alpha_;
228 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android)
229 return premultiply ? MODE_rgbA : MODE_RGBA;
230 #else // Output little-endian BGRA pixels.
231 return premultiply ? MODE_bgrA : MODE_BGRA;
232 #endif
233 }
234
CanAllowYUVDecodingForWebP()235 bool WEBPImageDecoder::CanAllowYUVDecodingForWebP() {
236 if (!consolidated_data_)
237 return false;
238 // Should have been updated with a recent call to UpdateDemuxer().
239 WebPBitstreamFeatures features;
240 if (RuntimeEnabledFeatures::DecodeLossyWebPImagesToYUVEnabled() &&
241 (demux_state_ == WEBP_DEMUX_PARSED_HEADER ||
242 demux_state_ == WEBP_DEMUX_DONE) &&
243 WebPGetFeatures(consolidated_data_->bytes(), consolidated_data_->size(),
244 &features) == VP8_STATUS_OK) {
245 bool is_animated = !!(format_flags_ & ANIMATION_FLAG);
246 constexpr int kLossyFormat = ImageDecoder::CompressionFormat::kLossyFormat;
247 // TODO(crbug/910276): Change after alpha support.
248 if (features.format != kLossyFormat || features.has_alpha || is_animated)
249 return false;
250
251 // TODO(crbug/911246): Stop vetoing images with ICCP after Skia supports
252 // transforming colorspace within YUV, which would allow colorspace
253 // conversion during decode. Alternatively, look into passing along
254 // transform for raster-time.
255 bool has_iccp = !!(format_flags_ & ICCP_FLAG);
256 return !has_iccp;
257 }
258 return false;
259 }
260
OnSetData(SegmentReader * data)261 void WEBPImageDecoder::OnSetData(SegmentReader* data) {
262 have_already_parsed_this_data_ = false;
263 // TODO(crbug.com/943519): Modify this approach for incremental YUV (when
264 // we don't require IsAllDataReceived() to be true before decoding).
265 if (IsAllDataReceived()) {
266 UpdateDemuxer();
267 allow_decode_to_yuv_ =
268 RuntimeEnabledFeatures::DecodeLossyWebPImagesToYUVEnabled() &&
269 CanAllowYUVDecodingForWebP();
270 }
271 }
272
RepetitionCount() const273 int WEBPImageDecoder::RepetitionCount() const {
274 return Failed() ? kAnimationLoopOnce : repetition_count_;
275 }
276
FrameIsReceivedAtIndex(size_t index) const277 bool WEBPImageDecoder::FrameIsReceivedAtIndex(size_t index) const {
278 if (!demux_ || demux_state_ <= WEBP_DEMUX_PARSING_HEADER)
279 return false;
280 if (!(format_flags_ & ANIMATION_FLAG))
281 return ImageDecoder::FrameIsReceivedAtIndex(index);
282 bool frame_is_received_at_index = index < frame_buffer_cache_.size();
283 return frame_is_received_at_index;
284 }
285
FrameDurationAtIndex(size_t index) const286 base::TimeDelta WEBPImageDecoder::FrameDurationAtIndex(size_t index) const {
287 return index < frame_buffer_cache_.size()
288 ? frame_buffer_cache_[index].Duration()
289 : base::TimeDelta();
290 }
291
UpdateDemuxer()292 bool WEBPImageDecoder::UpdateDemuxer() {
293 if (Failed())
294 return false;
295
296 const unsigned kWebpHeaderSize = 30;
297 if (data_->size() < kWebpHeaderSize)
298 return IsAllDataReceived() ? SetFailed() : false;
299
300 if (have_already_parsed_this_data_)
301 return true;
302
303 have_already_parsed_this_data_ = true;
304
305 if (consolidated_data_ && consolidated_data_->size() >= data_->size()) {
306 // Less data provided than last time. |consolidated_data_| is guaranteed
307 // to be its own copy of the data, so it is safe to keep it.
308 return true;
309 }
310
311 if (IsAllDataReceived() && !consolidated_data_) {
312 consolidated_data_ = data_->GetAsSkData();
313 } else {
314 buffer_.ReserveCapacity(data_->size());
315 while (buffer_.size() < data_->size()) {
316 const char* segment;
317 const size_t bytes = data_->GetSomeData(segment, buffer_.size());
318 DCHECK(bytes);
319 buffer_.Append(segment, bytes);
320 }
321 DCHECK_EQ(buffer_.size(), data_->size());
322 consolidated_data_ =
323 SkData::MakeWithoutCopy(buffer_.data(), buffer_.size());
324 }
325
326 WebPDemuxDelete(demux_);
327 WebPData input_data = {
328 reinterpret_cast<const uint8_t*>(consolidated_data_->data()),
329 consolidated_data_->size()};
330 demux_ = WebPDemuxPartial(&input_data, &demux_state_);
331 if (!demux_ || (IsAllDataReceived() && demux_state_ != WEBP_DEMUX_DONE)) {
332 if (!demux_)
333 consolidated_data_.reset();
334 return SetFailed();
335 }
336
337 DCHECK_GT(demux_state_, WEBP_DEMUX_PARSING_HEADER);
338 if (!WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT))
339 return false; // Wait until the encoded image frame data arrives.
340
341 if (!IsDecodedSizeAvailable()) {
342 uint32_t width = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_WIDTH);
343 uint32_t height = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_HEIGHT);
344 if (!SetSize(base::strict_cast<unsigned>(width),
345 base::strict_cast<unsigned>(height)))
346 return SetFailed();
347
348 UpdateWebPFileFormatUMA(consolidated_data_);
349
350 format_flags_ = WebPDemuxGetI(demux_, WEBP_FF_FORMAT_FLAGS);
351 if (!(format_flags_ & ANIMATION_FLAG)) {
352 repetition_count_ = kAnimationNone;
353 } else {
354 // Since we have parsed at least one frame, even if partially,
355 // the global animation (ANIM) properties have been read since
356 // an ANIM chunk must precede the ANMF frame chunks.
357 repetition_count_ = WebPDemuxGetI(demux_, WEBP_FF_LOOP_COUNT);
358 // Repetition count is always <= 16 bits.
359 DCHECK_EQ(repetition_count_, repetition_count_ & 0xffff);
360 // Repetition count is treated as n + 1 cycles for GIF. WebP defines loop
361 // count as the number of cycles, with 0 meaning infinite.
362 repetition_count_ = repetition_count_ == 0 ? kAnimationLoopInfinite
363 : repetition_count_ - 1;
364 // FIXME: Implement ICC profile support for animated images.
365 format_flags_ &= ~ICCP_FLAG;
366 }
367
368 if ((format_flags_ & ICCP_FLAG) && !IgnoresColorSpace())
369 ReadColorProfile();
370 }
371
372 DCHECK(IsDecodedSizeAvailable());
373
374 size_t frame_count = WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT);
375 UpdateAggressivePurging(frame_count);
376
377 return true;
378 }
379
OnInitFrameBuffer(size_t frame_index)380 void WEBPImageDecoder::OnInitFrameBuffer(size_t frame_index) {
381 // ImageDecoder::InitFrameBuffer does a DCHECK if |frame_index| exists.
382 ImageFrame& buffer = frame_buffer_cache_[frame_index];
383
384 const size_t required_previous_frame_index =
385 buffer.RequiredPreviousFrameIndex();
386 if (required_previous_frame_index == kNotFound) {
387 frame_background_has_alpha_ =
388 !buffer.OriginalFrameRect().Contains(IntRect(IntPoint(), Size()));
389 } else {
390 const ImageFrame& prev_buffer =
391 frame_buffer_cache_[required_previous_frame_index];
392 frame_background_has_alpha_ =
393 prev_buffer.HasAlpha() || (prev_buffer.GetDisposalMethod() ==
394 ImageFrame::kDisposeOverwriteBgcolor);
395 }
396
397 // The buffer is transparent outside the decoded area while the image is
398 // loading. The correct alpha value for the frame will be set when it is fully
399 // decoded.
400 buffer.SetHasAlpha(true);
401 }
402
DecodeToYUV()403 void WEBPImageDecoder::DecodeToYUV() {
404 DCHECK(IsDoingYuvDecode());
405
406 if (Failed())
407 return;
408
409 DCHECK(demux_);
410 DCHECK(!(format_flags_ & ANIMATION_FLAG));
411
412 WebPIterator webp_iter;
413 // libwebp is 1-indexed.
414 if (!WebPDemuxGetFrame(demux_, 1 /* frame */, &webp_iter)) {
415 SetFailed();
416 } else {
417 std::unique_ptr<WebPIterator, void (*)(WebPIterator*)> webp_frame(
418 &webp_iter, WebPDemuxReleaseIterator);
419 DecodeSingleFrameToYUV(webp_frame->fragment.bytes,
420 webp_frame->fragment.size);
421 }
422 }
423
DecodedYUVSize(int component) const424 IntSize WEBPImageDecoder::DecodedYUVSize(int component) const {
425 DCHECK_GE(component, 0);
426 // TODO(crbug.com/910276): Change after alpha support.
427 DCHECK_LE(component, 2);
428 DCHECK(IsDecodedSizeAvailable());
429 switch (component) {
430 case SkYUVAIndex::kY_Index:
431 return Size();
432 case SkYUVAIndex::kU_Index:
433 FALLTHROUGH;
434 case SkYUVAIndex::kV_Index:
435 return IntSize((Size().Width() + 1) / 2, (Size().Height() + 1) / 2);
436 }
437 NOTREACHED();
438 return IntSize(0, 0);
439 }
440
DecodedYUVWidthBytes(int component) const441 size_t WEBPImageDecoder::DecodedYUVWidthBytes(int component) const {
442 DCHECK_GE(component, 0);
443 DCHECK_LE(component, 2);
444 switch (component) {
445 case SkYUVAIndex::kY_Index:
446 return base::checked_cast<size_t>(Size().Width());
447 case SkYUVAIndex::kU_Index:
448 FALLTHROUGH;
449 case SkYUVAIndex::kV_Index:
450 return base::checked_cast<size_t>((Size().Width() + 1) / 2);
451 }
452 NOTREACHED();
453 return 0;
454 }
455
GetYUVColorSpace() const456 SkYUVColorSpace WEBPImageDecoder::GetYUVColorSpace() const {
457 return SkYUVColorSpace::kRec601_SkYUVColorSpace;
458 }
459
GetYUVSubsampling() const460 cc::YUVSubsampling WEBPImageDecoder::GetYUVSubsampling() const {
461 DCHECK(consolidated_data_);
462 if (IsSimpleLossyWebPImage(consolidated_data_))
463 return cc::YUVSubsampling::k420;
464 // It is possible for a non-simple lossy WebP to also be YUV 4:2:0. However,
465 // we're being conservative here because this is currently only used for
466 // hardware decode acceleration, and WebPs other than simple lossy are not
467 // supported in that path anyway.
468 return cc::YUVSubsampling::kUnknown;
469 }
470
CanReusePreviousFrameBuffer(size_t frame_index) const471 bool WEBPImageDecoder::CanReusePreviousFrameBuffer(size_t frame_index) const {
472 DCHECK(frame_index < frame_buffer_cache_.size());
473 return frame_buffer_cache_[frame_index].GetAlphaBlendSource() !=
474 ImageFrame::kBlendAtopPreviousFrame;
475 }
476
ClearFrameBuffer(size_t frame_index)477 void WEBPImageDecoder::ClearFrameBuffer(size_t frame_index) {
478 if (demux_ && demux_state_ >= WEBP_DEMUX_PARSED_HEADER &&
479 frame_buffer_cache_[frame_index].GetStatus() ==
480 ImageFrame::kFramePartial) {
481 // Clear the decoder state so that this partial frame can be decoded again
482 // when requested.
483 ClearDecoder();
484 }
485 ImageDecoder::ClearFrameBuffer(frame_index);
486 }
487
ReadColorProfile()488 void WEBPImageDecoder::ReadColorProfile() {
489 WebPChunkIterator chunk_iterator;
490 if (!WebPDemuxGetChunk(demux_, "ICCP", 1, &chunk_iterator)) {
491 WebPDemuxReleaseChunkIterator(&chunk_iterator);
492 return;
493 }
494
495 const char* profile_data =
496 reinterpret_cast<const char*>(chunk_iterator.chunk.bytes);
497 size_t profile_size = chunk_iterator.chunk.size;
498
499 if (auto profile = ColorProfile::Create(profile_data, profile_size)) {
500 if (profile->GetProfile()->data_color_space == skcms_Signature_RGB) {
501 SetEmbeddedColorProfile(std::move(profile));
502 }
503 } else {
504 DLOG(ERROR) << "Failed to parse image ICC profile";
505 }
506
507 WebPDemuxReleaseChunkIterator(&chunk_iterator);
508 }
509
ApplyPostProcessing(size_t frame_index)510 void WEBPImageDecoder::ApplyPostProcessing(size_t frame_index) {
511 ImageFrame& buffer = frame_buffer_cache_[frame_index];
512 int width;
513 int decoded_height;
514 // TODO(crbug.com/911246): Do post-processing once skcms_Transform
515 // supports multiplanar formats.
516 DCHECK(!IsDoingYuvDecode());
517
518 if (!WebPIDecGetRGB(decoder_, &decoded_height, &width, nullptr, nullptr))
519 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
520 if (decoded_height <= 0)
521 return;
522
523 const IntRect& frame_rect = buffer.OriginalFrameRect();
524 SECURITY_DCHECK(width == frame_rect.Width());
525 SECURITY_DCHECK(decoded_height <= frame_rect.Height());
526 const int left = frame_rect.X();
527 const int top = frame_rect.Y();
528
529 // TODO (msarett):
530 // Here we apply the color space transformation to the dst space.
531 // It does not really make sense to transform to a gamma-encoded
532 // space and then immediately after, perform a linear premultiply
533 // and linear blending. Can we find a way to perform the
534 // premultiplication and blending in a linear space?
535 ColorProfileTransform* xform = ColorTransform();
536 if (xform) {
537 skcms_PixelFormat kSrcFormat = skcms_PixelFormat_BGRA_8888;
538 skcms_PixelFormat kDstFormat = skcms_PixelFormat_RGBA_8888;
539 skcms_AlphaFormat alpha_format = skcms_AlphaFormat_Unpremul;
540 for (int y = decoded_height_; y < decoded_height; ++y) {
541 const int canvas_y = top + y;
542 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.GetAddr(left, canvas_y));
543 bool color_conversion_successful = skcms_Transform(
544 row, kSrcFormat, alpha_format, xform->SrcProfile(), row, kDstFormat,
545 alpha_format, xform->DstProfile(), width);
546 DCHECK(color_conversion_successful);
547 uint8_t* pixel = row;
548 for (int x = 0; x < width; ++x, pixel += 4) {
549 const int canvas_x = left + x;
550 buffer.SetRGBA(canvas_x, canvas_y, pixel[0], pixel[1], pixel[2],
551 pixel[3]);
552 }
553 }
554 }
555
556 // During the decoding of the current frame, we may have set some pixels to be
557 // transparent (i.e. alpha < 255). If the alpha blend source was
558 // 'BlendAtopPreviousFrame', the values of these pixels should be
559 // determined by blending them against the pixels of the corresponding
560 // previous frame. Compute the correct opaque values now.
561 // FIXME: This could be avoided if libwebp decoder had an API that used the
562 // previous required frame to do the alpha-blending by itself.
563 if ((format_flags_ & ANIMATION_FLAG) && frame_index &&
564 buffer.GetAlphaBlendSource() == ImageFrame::kBlendAtopPreviousFrame &&
565 buffer.RequiredPreviousFrameIndex() != kNotFound) {
566 ImageFrame& prev_buffer = frame_buffer_cache_[frame_index - 1];
567 DCHECK_EQ(prev_buffer.GetStatus(), ImageFrame::kFrameComplete);
568 ImageFrame::DisposalMethod prev_disposal_method =
569 prev_buffer.GetDisposalMethod();
570 if (prev_disposal_method == ImageFrame::kDisposeKeep) {
571 // Blend transparent pixels with pixels in previous canvas.
572 for (int y = decoded_height_; y < decoded_height; ++y) {
573 blend_function_(buffer, prev_buffer, top + y, left, width);
574 }
575 } else if (prev_disposal_method == ImageFrame::kDisposeOverwriteBgcolor) {
576 const IntRect& prev_rect = prev_buffer.OriginalFrameRect();
577 // We need to blend a transparent pixel with the starting value (from just
578 // after the InitFrame() call). If the pixel belongs to prev_rect, the
579 // starting value was fully transparent, so this is a no-op. Otherwise, we
580 // need to blend against the pixel from the previous canvas.
581 for (int y = decoded_height_; y < decoded_height; ++y) {
582 int canvas_y = top + y;
583 int left1, width1, left2, width2;
584 findBlendRangeAtRow(frame_rect, prev_rect, canvas_y, left1, width1,
585 left2, width2);
586 if (width1 > 0)
587 blend_function_(buffer, prev_buffer, canvas_y, left1, width1);
588 if (width2 > 0)
589 blend_function_(buffer, prev_buffer, canvas_y, left2, width2);
590 }
591 }
592 }
593
594 decoded_height_ = decoded_height;
595 buffer.SetPixelsChanged(true);
596 }
597
DecodeFrameCount()598 size_t WEBPImageDecoder::DecodeFrameCount() {
599 // If UpdateDemuxer() fails, return the existing number of frames. This way if
600 // we get halfway through the image before decoding fails, we won't suddenly
601 // start reporting that the image has zero frames.
602 return UpdateDemuxer() ? WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT)
603 : frame_buffer_cache_.size();
604 }
605
InitializeNewFrame(size_t index)606 void WEBPImageDecoder::InitializeNewFrame(size_t index) {
607 if (!(format_flags_ & ANIMATION_FLAG)) {
608 DCHECK(!index);
609 return;
610 }
611 WebPIterator animated_frame;
612 WebPDemuxGetFrame(demux_, index + 1, &animated_frame);
613 DCHECK_EQ(animated_frame.complete, 1);
614 ImageFrame* buffer = &frame_buffer_cache_[index];
615 IntRect frame_rect(animated_frame.x_offset, animated_frame.y_offset,
616 animated_frame.width, animated_frame.height);
617 buffer->SetOriginalFrameRect(
618 Intersection(frame_rect, IntRect(IntPoint(), Size())));
619 buffer->SetDuration(
620 base::TimeDelta::FromMilliseconds(animated_frame.duration));
621 buffer->SetDisposalMethod(animated_frame.dispose_method ==
622 WEBP_MUX_DISPOSE_BACKGROUND
623 ? ImageFrame::kDisposeOverwriteBgcolor
624 : ImageFrame::kDisposeKeep);
625 buffer->SetAlphaBlendSource(animated_frame.blend_method == WEBP_MUX_BLEND
626 ? ImageFrame::kBlendAtopPreviousFrame
627 : ImageFrame::kBlendAtopBgcolor);
628 buffer->SetRequiredPreviousFrameIndex(
629 FindRequiredPreviousFrame(index, !animated_frame.has_alpha));
630 WebPDemuxReleaseIterator(&animated_frame);
631 }
632
Decode(size_t index)633 void WEBPImageDecoder::Decode(size_t index) {
634 DCHECK(!IsDoingYuvDecode());
635
636 if (Failed())
637 return;
638
639 Vector<size_t> frames_to_decode = FindFramesToDecode(index);
640
641 DCHECK(demux_);
642 for (auto i = frames_to_decode.rbegin(); i != frames_to_decode.rend(); ++i) {
643 if ((format_flags_ & ANIMATION_FLAG) && !InitFrameBuffer(*i)) {
644 SetFailed();
645 return;
646 }
647
648 WebPIterator webp_iter;
649 if (!WebPDemuxGetFrame(demux_, *i + 1, &webp_iter)) {
650 SetFailed();
651 } else {
652 std::unique_ptr<WebPIterator, void (*)(WebPIterator*)> webp_frame(
653 &webp_iter, WebPDemuxReleaseIterator);
654 DecodeSingleFrame(webp_frame->fragment.bytes, webp_frame->fragment.size,
655 *i);
656 }
657
658 if (Failed())
659 return;
660
661 // If this returns false, we need more data to continue decoding.
662 if (!PostDecodeProcessing(*i))
663 break;
664 }
665
666 // It is also a fatal error if all data is received and we have decoded all
667 // frames available but the file is truncated.
668 if (index >= frame_buffer_cache_.size() - 1 && IsAllDataReceived() &&
669 demux_ && demux_state_ != WEBP_DEMUX_DONE)
670 SetFailed();
671 }
672
DecodeSingleFrameToYUV(const uint8_t * data_bytes,size_t data_size)673 bool WEBPImageDecoder::DecodeSingleFrameToYUV(const uint8_t* data_bytes,
674 size_t data_size) {
675 DCHECK(IsDoingYuvDecode());
676 DCHECK(!Failed());
677
678 bool size_available_after_init = IsSizeAvailable();
679 DCHECK(size_available_after_init);
680
681 // Set up decoder_buffer_ with output mode
682 if (!decoder_) {
683 WebPInitDecBuffer(&decoder_buffer_);
684 decoder_buffer_.colorspace = MODE_YUV; // TODO(crbug.com/910276): Change
685 // after alpha YUV support is added.
686 }
687
688 ImagePlanes* image_planes = image_planes_.get();
689 DCHECK(image_planes);
690 // Even if |decoder_| already exists, we must get most up-to-date pointers
691 // because memory location might change e.g. upon tab resume.
692 decoder_buffer_.u.YUVA.y =
693 static_cast<uint8_t*>(image_planes->Plane(SkYUVAIndex::kY_Index));
694 decoder_buffer_.u.YUVA.u =
695 static_cast<uint8_t*>(image_planes->Plane(SkYUVAIndex::kU_Index));
696 decoder_buffer_.u.YUVA.v =
697 static_cast<uint8_t*>(image_planes->Plane(SkYUVAIndex::kV_Index));
698
699 if (!decoder_) {
700 // libwebp only supports YUV 420 subsampling
701 decoder_buffer_.u.YUVA.y_stride =
702 image_planes->RowBytes(SkYUVAIndex::kY_Index);
703 decoder_buffer_.u.YUVA.y_size =
704 decoder_buffer_.u.YUVA.y_stride *
705 DecodedYUVSize(SkYUVAIndex::kY_Index).Height();
706 decoder_buffer_.u.YUVA.u_stride =
707 image_planes->RowBytes(SkYUVAIndex::kU_Index);
708 decoder_buffer_.u.YUVA.u_size =
709 decoder_buffer_.u.YUVA.u_stride *
710 DecodedYUVSize(SkYUVAIndex::kU_Index).Height();
711 decoder_buffer_.u.YUVA.v_stride =
712 image_planes->RowBytes(SkYUVAIndex::kV_Index);
713 decoder_buffer_.u.YUVA.v_size =
714 decoder_buffer_.u.YUVA.v_stride *
715 DecodedYUVSize(SkYUVAIndex::kV_Index).Height();
716
717 decoder_buffer_.is_external_memory = 1;
718 decoder_ = WebPINewDecoder(&decoder_buffer_);
719 if (!decoder_)
720 return SetFailed();
721 }
722
723 if (WebPIUpdate(decoder_, data_bytes, data_size) != VP8_STATUS_OK) {
724 Clear();
725 return SetFailed();
726 }
727
728 // TODO(crbug.com/911246): Do post-processing once skcms_Transform
729 // supports multiplanar formats.
730 ClearDecoder();
731 return true;
732 }
733
DecodeSingleFrame(const uint8_t * data_bytes,size_t data_size,size_t frame_index)734 bool WEBPImageDecoder::DecodeSingleFrame(const uint8_t* data_bytes,
735 size_t data_size,
736 size_t frame_index) {
737 DCHECK(!IsDoingYuvDecode());
738 if (Failed())
739 return false;
740 DCHECK(IsDecodedSizeAvailable());
741
742 DCHECK_GT(frame_buffer_cache_.size(), frame_index);
743 ImageFrame& buffer = frame_buffer_cache_[frame_index];
744 DCHECK_NE(buffer.GetStatus(), ImageFrame::kFrameComplete);
745
746 if (buffer.GetStatus() == ImageFrame::kFrameEmpty) {
747 if (!buffer.AllocatePixelData(Size().Width(), Size().Height(),
748 ColorSpaceForSkImages())) {
749 return SetFailed();
750 }
751 buffer.ZeroFillPixelData();
752 buffer.SetStatus(ImageFrame::kFramePartial);
753 // The buffer is transparent outside the decoded area while the image
754 // is loading. The correct alpha value for the frame will be set when
755 // it is fully decoded.
756 buffer.SetHasAlpha(true);
757 buffer.SetOriginalFrameRect(IntRect(IntPoint(), Size()));
758 }
759
760 const IntRect& frame_rect = buffer.OriginalFrameRect();
761 if (!decoder_) {
762 // Set up decoder_buffer_ with output mode
763 WebPInitDecBuffer(&decoder_buffer_);
764 decoder_buffer_.colorspace = RGBOutputMode();
765 decoder_buffer_.u.RGBA.stride =
766 Size().Width() * sizeof(ImageFrame::PixelData);
767 decoder_buffer_.u.RGBA.size =
768 decoder_buffer_.u.RGBA.stride * frame_rect.Height();
769 decoder_buffer_.is_external_memory = 1;
770 decoder_ = WebPINewDecoder(&decoder_buffer_);
771 if (!decoder_)
772 return SetFailed();
773 }
774 decoder_buffer_.u.RGBA.rgba = reinterpret_cast<uint8_t*>(
775 buffer.GetAddr(frame_rect.X(), frame_rect.Y()));
776
777 switch (WebPIUpdate(decoder_, data_bytes, data_size)) {
778 case VP8_STATUS_OK:
779 ApplyPostProcessing(frame_index);
780 buffer.SetHasAlpha((format_flags_ & ALPHA_FLAG) ||
781 frame_background_has_alpha_);
782 buffer.SetStatus(ImageFrame::kFrameComplete);
783 ClearDecoder();
784 return true;
785 case VP8_STATUS_SUSPENDED:
786 if (!IsAllDataReceived() && !FrameIsReceivedAtIndex(frame_index)) {
787 ApplyPostProcessing(frame_index);
788 return false;
789 }
790 FALLTHROUGH;
791 default:
792 Clear();
793 return SetFailed();
794 }
795 }
796
MakeMetadataForDecodeAcceleration() const797 cc::ImageHeaderMetadata WEBPImageDecoder::MakeMetadataForDecodeAcceleration()
798 const {
799 cc::ImageHeaderMetadata image_metadata =
800 ImageDecoder::MakeMetadataForDecodeAcceleration();
801
802 DCHECK(consolidated_data_);
803 image_metadata.webp_is_non_extended_lossy =
804 IsSimpleLossyWebPImage(consolidated_data_);
805 return image_metadata;
806 }
807
808 } // namespace blink
809