1 // Copyright 2018 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_adapter.h"
6 
7 #include <algorithm>
8 #include <functional>
9 #include <utility>
10 
11 #include "base/callback_helpers.h"
12 #include "base/feature_list.h"
13 #include "base/location.h"
14 #include "base/logging.h"
15 #include "base/memory/ptr_util.h"
16 #include "base/metrics/histogram_functions.h"
17 #include "base/metrics/histogram_macros.h"
18 #include "base/sequenced_task_runner.h"
19 #include "base/stl_util.h"
20 #include "base/synchronization/waitable_event.h"
21 #include "base/threading/thread_restrictions.h"
22 #include "base/time/time.h"
23 #include "build/build_config.h"
24 #include "build/chromeos_buildflags.h"
25 #include "media/base/media_log.h"
26 #include "media/base/media_switches.h"
27 #include "media/base/media_util.h"
28 #include "media/base/overlay_info.h"
29 #include "media/base/video_types.h"
30 #include "media/video/gpu_video_accelerator_factories.h"
31 #include "media/video/video_decode_accelerator.h"
32 #include "third_party/blink/public/platform/platform.h"
33 #include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
34 #include "third_party/blink/renderer/platform/webrtc/webrtc_video_frame_adapter.h"
35 #include "third_party/blink/renderer/platform/webrtc/webrtc_video_utils.h"
36 #include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
37 #include "third_party/webrtc/api/video/video_frame.h"
38 #include "third_party/webrtc/media/base/vp9_profile.h"
39 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h"
40 #include "third_party/webrtc/rtc_base/bind.h"
41 #include "third_party/webrtc/rtc_base/ref_count.h"
42 #include "third_party/webrtc/rtc_base/ref_counted_object.h"
43 #include "ui/gfx/color_space.h"
44 
45 namespace WTF {
46 
47 template <>
48 struct CrossThreadCopier<media::VideoDecoderConfig>
49     : public CrossThreadCopierPassThrough<media::VideoDecoderConfig> {
50   STATIC_ONLY(CrossThreadCopier);
51 };
52 
53 }  // namespace WTF
54 
55 namespace blink {
56 
57 namespace {
58 
59 // Any reasonable size, will be overridden by the decoder anyway.
60 const gfx::Size kDefaultSize(640, 480);
61 
62 // Maximum number of buffers that we will queue in |pending_buffers_|.
63 const int32_t kMaxPendingBuffers = 8;
64 
65 // Maximum number of timestamps that will be maintained in |decode_timestamps_|.
66 // Really only needs to be a bit larger than the maximum reorder distance (which
67 // is presumably 0 for WebRTC), but being larger doesn't hurt much.
68 const int32_t kMaxDecodeHistory = 32;
69 
70 // Maximum number of consecutive frames that can fail to decode before
71 // requesting fallback to software decode.
72 const int32_t kMaxConsecutiveErrors = 5;
73 
74 // Map webrtc::VideoCodecType to media::VideoCodec.
ToVideoCodec(webrtc::VideoCodecType video_codec_type)75 media::VideoCodec ToVideoCodec(webrtc::VideoCodecType video_codec_type) {
76   switch (video_codec_type) {
77     case webrtc::kVideoCodecVP8:
78       return media::kCodecVP8;
79     case webrtc::kVideoCodecVP9:
80       return media::kCodecVP9;
81     case webrtc::kVideoCodecH264:
82       return media::kCodecH264;
83     default:
84       return media::kUnknownVideoCodec;
85   }
86 }
87 
88 // Map webrtc::SdpVideoFormat to a guess for media::VideoCodecProfile.
GuessVideoCodecProfile(const webrtc::SdpVideoFormat & format)89 media::VideoCodecProfile GuessVideoCodecProfile(
90     const webrtc::SdpVideoFormat& format) {
91   const webrtc::VideoCodecType video_codec_type =
92       webrtc::PayloadStringToCodecType(format.name);
93   switch (video_codec_type) {
94     case webrtc::kVideoCodecVP8:
95       return media::VP8PROFILE_ANY;
96     case webrtc::kVideoCodecVP9: {
97       const webrtc::VP9Profile vp9_profile =
98           webrtc::ParseSdpForVP9Profile(format.parameters)
99               .value_or(webrtc::VP9Profile::kProfile0);
100       switch (vp9_profile) {
101         case webrtc::VP9Profile::kProfile2:
102           return media::VP9PROFILE_PROFILE2;
103         case webrtc::VP9Profile::kProfile1:
104           return media::VP9PROFILE_PROFILE1;
105         case webrtc::VP9Profile::kProfile0:
106         default:
107           return media::VP9PROFILE_PROFILE0;
108       }
109       return media::VP9PROFILE_PROFILE0;
110     }
111     case webrtc::kVideoCodecH264:
112       return media::H264PROFILE_BASELINE;
113     default:
114       return media::VIDEO_CODEC_PROFILE_UNKNOWN;
115   }
116 }
117 
FinishWait(base::WaitableEvent * waiter,bool * result_out,bool result)118 void FinishWait(base::WaitableEvent* waiter, bool* result_out, bool result) {
119   DVLOG(3) << __func__ << "(" << result << ")";
120   *result_out = result;
121   waiter->Signal();
122 }
123 
OnRequestOverlayInfo(bool decoder_requires_restart_for_overlay,media::ProvideOverlayInfoCB overlay_info_cb)124 void OnRequestOverlayInfo(bool decoder_requires_restart_for_overlay,
125                           media::ProvideOverlayInfoCB overlay_info_cb) {
126   // Android overlays are not supported.
127   if (overlay_info_cb)
128     std::move(overlay_info_cb).Run(media::OverlayInfo());
129 }
130 
RecordInitializationLatency(base::TimeDelta latency)131 void RecordInitializationLatency(base::TimeDelta latency) {
132   base::UmaHistogramTimes("Media.RTCVideoDecoderInitializationLatencyMs",
133                           latency);
134 }
135 
RecordReinitializationLatency(base::TimeDelta latency)136 void RecordReinitializationLatency(base::TimeDelta latency) {
137   base::UmaHistogramTimes("Media.RTCVideoDecoderReinitializationLatencyMs",
138                           latency);
139 }
140 
141 // These values are persisted to logs. Entries should not be renumbered and
142 // numeric values should never be reused.
143 enum class FallbackReason {
144   kSpatialLayers = 0,
145   kConsecutivePendingBufferOverflow = 1,
146   kReinitializationFailed = 2,
147   kPreviousErrorOnDecode = 3,
148   kPreviousErrorOnRegisterCallback = 4,
149   kMaxValue = kPreviousErrorOnRegisterCallback,
150 };
151 
RecordFallbackReason(media::VideoCodec codec,FallbackReason fallback_reason)152 void RecordFallbackReason(media::VideoCodec codec,
153                           FallbackReason fallback_reason) {
154   switch (codec) {
155     case media::VideoCodec::kCodecH264:
156       base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.H264",
157                                     fallback_reason);
158       break;
159     case media::VideoCodec::kCodecVP8:
160       base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.Vp8",
161                                     fallback_reason);
162       break;
163     case media::VideoCodec::kCodecVP9:
164       base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.Vp9",
165                                     fallback_reason);
166       break;
167     default:
168       base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.Other",
169                                     fallback_reason);
170   }
171 }
172 
173 }  // namespace
174 
175 // static
176 std::vector<media::VideoDecoderImplementation>
SupportedImplementations()177 RTCVideoDecoderAdapter::SupportedImplementations() {
178 #if defined(OS_WIN)
179   if (base::FeatureList::IsEnabled(media::kD3D11VideoDecoder)) {
180     // Push alternate ahead of default to prefer D3D11 decoders over DXVA.
181     return {media::VideoDecoderImplementation::kAlternate,
182             media::VideoDecoderImplementation::kDefault};
183   }
184 #endif
185   return {media::VideoDecoderImplementation::kDefault};
186 }
187 
188 // static
Create(media::GpuVideoAcceleratorFactories * gpu_factories,const webrtc::SdpVideoFormat & format)189 std::unique_ptr<RTCVideoDecoderAdapter> RTCVideoDecoderAdapter::Create(
190     media::GpuVideoAcceleratorFactories* gpu_factories,
191     const webrtc::SdpVideoFormat& format) {
192   DVLOG(1) << __func__ << "(" << format.name << ")";
193 
194   const webrtc::VideoCodecType video_codec_type =
195       webrtc::PayloadStringToCodecType(format.name);
196 
197   if (!Platform::Current()->IsWebRtcHWH264DecodingEnabled(video_codec_type))
198     return nullptr;
199 
200   // Bail early for unknown codecs.
201   if (ToVideoCodec(video_codec_type) == media::kUnknownVideoCodec)
202     return nullptr;
203 
204   // Avoid the thread hop if the decoder is known not to support the config.
205   // TODO(sandersd): Predict size from level.
206   media::VideoDecoderConfig config(
207       ToVideoCodec(webrtc::PayloadStringToCodecType(format.name)),
208       GuessVideoCodecProfile(format),
209       media::VideoDecoderConfig::AlphaMode::kIsOpaque, media::VideoColorSpace(),
210       media::kNoTransformation, kDefaultSize, gfx::Rect(kDefaultSize),
211       kDefaultSize, media::EmptyExtraData(),
212       media::EncryptionScheme::kUnencrypted);
213 
214   for (auto impl : SupportedImplementations()) {
215     std::unique_ptr<RTCVideoDecoderAdapter> rtc_video_decoder_adapter;
216     if (gpu_factories->IsDecoderConfigSupported(impl, config) !=
217         media::GpuVideoAcceleratorFactories::Supported::kFalse) {
218       // Synchronously verify that the decoder can be initialized.
219       rtc_video_decoder_adapter = base::WrapUnique(
220           new RTCVideoDecoderAdapter(gpu_factories, config, format, impl));
221       if (rtc_video_decoder_adapter->InitializeSync(config)) {
222         return rtc_video_decoder_adapter;
223       }
224       // Initialization failed - post delete task and try next supported
225       // implementation, if any.
226       gpu_factories->GetTaskRunner()->DeleteSoon(
227           FROM_HERE, std::move(rtc_video_decoder_adapter));
228     }
229   }
230 
231   return nullptr;
232 }
233 
RTCVideoDecoderAdapter(media::GpuVideoAcceleratorFactories * gpu_factories,const media::VideoDecoderConfig & config,const webrtc::SdpVideoFormat & format,media::VideoDecoderImplementation implementation)234 RTCVideoDecoderAdapter::RTCVideoDecoderAdapter(
235     media::GpuVideoAcceleratorFactories* gpu_factories,
236     const media::VideoDecoderConfig& config,
237     const webrtc::SdpVideoFormat& format,
238     media::VideoDecoderImplementation implementation)
239     : media_task_runner_(gpu_factories->GetTaskRunner()),
240       gpu_factories_(gpu_factories),
241       format_(format),
242       implementation_(implementation),
243       config_(config) {
244   DVLOG(1) << __func__;
245   DETACH_FROM_SEQUENCE(decoding_sequence_checker_);
246   DETACH_FROM_SEQUENCE(media_sequence_checker_);
247   weak_this_ = weak_this_factory_.GetWeakPtr();
248 }
249 
~RTCVideoDecoderAdapter()250 RTCVideoDecoderAdapter::~RTCVideoDecoderAdapter() {
251   DVLOG(1) << __func__;
252   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
253 }
254 
InitializeSync(const media::VideoDecoderConfig & config)255 bool RTCVideoDecoderAdapter::InitializeSync(
256     const media::VideoDecoderConfig& config) {
257   DVLOG(3) << __func__;
258   // Can be called on |worker_thread_| or |decoding_thread_|.
259   DCHECK(!media_task_runner_->RunsTasksInCurrentSequence());
260   base::TimeTicks start_time = base::TimeTicks::Now();
261 
262   base::ScopedAllowBaseSyncPrimitivesOutsideBlockingScope allow_wait;
263   bool result = false;
264   base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
265                              base::WaitableEvent::InitialState::NOT_SIGNALED);
266   auto init_cb =
267       CrossThreadBindOnce(&FinishWait, CrossThreadUnretained(&waiter),
268                           CrossThreadUnretained(&result));
269   if (PostCrossThreadTask(
270           *media_task_runner_.get(), FROM_HERE,
271           CrossThreadBindOnce(&RTCVideoDecoderAdapter::InitializeOnMediaThread,
272                               CrossThreadUnretained(this), config,
273                               std::move(init_cb)))) {
274     // TODO(crbug.com/1076817) Remove if a root cause is found.
275     if (!waiter.TimedWait(base::TimeDelta::FromSeconds(10))) {
276       RecordInitializationLatency(base::TimeTicks::Now() - start_time);
277       return false;
278     }
279 
280     RecordInitializationLatency(base::TimeTicks::Now() - start_time);
281   }
282   return result;
283 }
284 
InitDecode(const webrtc::VideoCodec * codec_settings,int32_t number_of_cores)285 int32_t RTCVideoDecoderAdapter::InitDecode(
286     const webrtc::VideoCodec* codec_settings,
287     int32_t number_of_cores) {
288   DVLOG(1) << __func__;
289   DCHECK_CALLED_ON_VALID_SEQUENCE(decoding_sequence_checker_);
290 
291   video_codec_type_ = codec_settings->codecType;
292   DCHECK_EQ(webrtc::PayloadStringToCodecType(format_.name), video_codec_type_);
293 
294   base::AutoLock auto_lock(lock_);
295   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", !has_error_);
296   if (!has_error_) {
297     UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderProfile",
298                               GuessVideoCodecProfile(format_),
299                               media::VIDEO_CODEC_PROFILE_MAX + 1);
300   }
301   return has_error_ ? WEBRTC_VIDEO_CODEC_UNINITIALIZED : WEBRTC_VIDEO_CODEC_OK;
302 }
303 
Decode(const webrtc::EncodedImage & input_image,bool missing_frames,int64_t render_time_ms)304 int32_t RTCVideoDecoderAdapter::Decode(const webrtc::EncodedImage& input_image,
305                                        bool missing_frames,
306                                        int64_t render_time_ms) {
307   DVLOG(2) << __func__;
308   DCHECK_CALLED_ON_VALID_SEQUENCE(decoding_sequence_checker_);
309 
310   // Hardware VP9 decoders don't handle more than one spatial layer. Fall back
311   // to software decoding. See https://crbug.com/webrtc/9304.
312   if (video_codec_type_ == webrtc::kVideoCodecVP9 &&
313       input_image.SpatialIndex().value_or(0) > 0) {
314 #if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
315     if (!base::FeatureList::IsEnabled(media::kVp9kSVCHWDecoding)) {
316       RecordFallbackReason(config_.codec(), FallbackReason::kSpatialLayers);
317       return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
318     }
319 #else
320     RecordFallbackReason(config_.codec(), FallbackReason::kSpatialLayers);
321     return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
322 #endif  // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
323   }
324 
325   if (missing_frames) {
326     DVLOG(2) << "Missing frames";
327     // We probably can't handle broken frames. Request a key frame.
328     return WEBRTC_VIDEO_CODEC_ERROR;
329   }
330 
331   if (key_frame_required_) {
332     // We discarded previous frame because we have too many pending buffers (see
333     // logic) below. Now we need to wait for the key frame and discard
334     // everything else.
335     if (input_image._frameType != webrtc::VideoFrameType::kVideoFrameKey) {
336       DVLOG(2) << "Discard non-key frame";
337       return WEBRTC_VIDEO_CODEC_ERROR;
338     }
339     DVLOG(2) << "Key frame received, resume decoding";
340     // ok, we got key frame and can continue decoding
341     key_frame_required_ = false;
342   }
343 
344   std::vector<uint32_t> spatial_layer_frame_size;
345   size_t max_sl_index = input_image.SpatialIndex().value_or(0);
346   for (size_t i = 0; i <= max_sl_index; i++) {
347     auto frame_size = input_image.SpatialLayerFrameSize(i);
348     if (!frame_size)
349       continue;
350     spatial_layer_frame_size.push_back(*frame_size);
351   }
352 
353   // Convert to media::DecoderBuffer.
354   // TODO(sandersd): What is |render_time_ms|?
355   scoped_refptr<media::DecoderBuffer> buffer;
356   if (spatial_layer_frame_size.size() > 1) {
357     const uint8_t* side_data =
358         reinterpret_cast<const uint8_t*>(spatial_layer_frame_size.data());
359     size_t side_data_size =
360         spatial_layer_frame_size.size() * sizeof(uint32_t) / sizeof(uint8_t);
361     buffer = media::DecoderBuffer::CopyFrom(
362         input_image.data(), input_image.size(), side_data, side_data_size);
363   } else {
364     buffer =
365         media::DecoderBuffer::CopyFrom(input_image.data(), input_image.size());
366   }
367   buffer->set_timestamp(
368       base::TimeDelta::FromMicroseconds(input_image.Timestamp()));
369 
370   if (ShouldReinitializeForSettingHDRColorSpace(input_image)) {
371     config_.set_color_space_info(
372         blink::WebRtcToMediaVideoColorSpace(*input_image.ColorSpace()));
373     if (!ReinitializeSync(config_)) {
374       RecordFallbackReason(config_.codec(),
375                            FallbackReason::kReinitializationFailed);
376       return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
377     }
378     if (input_image._frameType != webrtc::VideoFrameType::kVideoFrameKey)
379       return WEBRTC_VIDEO_CODEC_ERROR;
380   }
381 
382   // Queue for decoding.
383   {
384     base::AutoLock auto_lock(lock_);
385     if (has_error_) {
386       RecordFallbackReason(config_.codec(),
387                            FallbackReason::kPreviousErrorOnDecode);
388       return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
389     }
390 
391     if (pending_buffers_.size() >= kMaxPendingBuffers) {
392       // We are severely behind. Drop pending buffers and request a keyframe to
393       // catch up as quickly as possible.
394       DVLOG(2) << "Pending buffers overflow";
395       pending_buffers_.clear();
396       // Actually we just discarded a frame. We must wait for the key frame and
397       // drop any other non-key frame.
398       key_frame_required_ = true;
399       if (++consecutive_error_count_ > kMaxConsecutiveErrors) {
400         decode_timestamps_.clear();
401         RecordFallbackReason(config_.codec(),
402                              FallbackReason::kConsecutivePendingBufferOverflow);
403         return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
404       }
405       return WEBRTC_VIDEO_CODEC_ERROR;
406     }
407     pending_buffers_.push_back(std::move(buffer));
408   }
409   PostCrossThreadTask(
410       *media_task_runner_.get(), FROM_HERE,
411       CrossThreadBindOnce(&RTCVideoDecoderAdapter::DecodeOnMediaThread,
412                           weak_this_));
413 
414   return WEBRTC_VIDEO_CODEC_OK;
415 }
416 
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback * callback)417 int32_t RTCVideoDecoderAdapter::RegisterDecodeCompleteCallback(
418     webrtc::DecodedImageCallback* callback) {
419   DVLOG(2) << __func__;
420   DCHECK_CALLED_ON_VALID_SEQUENCE(decoding_sequence_checker_);
421   DCHECK(callback);
422 
423   base::AutoLock auto_lock(lock_);
424   decode_complete_callback_ = callback;
425   if (has_error_) {
426     RecordFallbackReason(config_.codec(),
427                          FallbackReason::kPreviousErrorOnRegisterCallback);
428   }
429   return has_error_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
430                     : WEBRTC_VIDEO_CODEC_OK;
431 }
432 
Release()433 int32_t RTCVideoDecoderAdapter::Release() {
434   DVLOG(1) << __func__;
435 
436   base::AutoLock auto_lock(lock_);
437   pending_buffers_.clear();
438   decode_timestamps_.clear();
439   return has_error_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
440                     : WEBRTC_VIDEO_CODEC_OK;
441 }
442 
ImplementationName() const443 const char* RTCVideoDecoderAdapter::ImplementationName() const {
444   return "ExternalDecoder";
445 }
446 
InitializeOnMediaThread(const media::VideoDecoderConfig & config,InitCB init_cb)447 void RTCVideoDecoderAdapter::InitializeOnMediaThread(
448     const media::VideoDecoderConfig& config,
449     InitCB init_cb) {
450   DVLOG(3) << __func__;
451   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
452 
453   // On ReinitializeSync() calls, |video_decoder_| may already be set.
454   if (!video_decoder_) {
455     // TODO(sandersd): Plumb a real log sink here so that we can contribute to
456     // the media-internals UI. The current log just discards all messages.
457     media_log_ = std::make_unique<media::NullMediaLog>();
458 
459     video_decoder_ = gpu_factories_->CreateVideoDecoder(
460         media_log_.get(), implementation_,
461         WTF::BindRepeating(&OnRequestOverlayInfo));
462 
463     if (!video_decoder_) {
464       PostCrossThreadTask(*media_task_runner_.get(), FROM_HERE,
465                           CrossThreadBindOnce(std::move(init_cb), false));
466       return;
467     }
468   }
469 
470   // In practice this is ignored by hardware decoders.
471   bool low_delay = true;
472 
473   // Encryption is not supported.
474   media::CdmContext* cdm_context = nullptr;
475 
476   media::VideoDecoder::OutputCB output_cb = ConvertToBaseRepeatingCallback(
477       CrossThreadBindRepeating(&RTCVideoDecoderAdapter::OnOutput, weak_this_));
478   video_decoder_->Initialize(
479       config, low_delay, cdm_context,
480       base::BindOnce(&RTCVideoDecoderAdapter::OnInitializeDone,
481                      ConvertToBaseOnceCallback(std::move(init_cb))),
482       output_cb, base::DoNothing());
483 }
484 
485 // static
OnInitializeDone(base::OnceCallback<void (bool)> cb,media::Status status)486 void RTCVideoDecoderAdapter::OnInitializeDone(base::OnceCallback<void(bool)> cb,
487                                               media::Status status) {
488   std::move(cb).Run(status.is_ok());
489 }
490 
DecodeOnMediaThread()491 void RTCVideoDecoderAdapter::DecodeOnMediaThread() {
492   DVLOG(4) << __func__;
493   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
494 
495   int max_decode_requests = video_decoder_->GetMaxDecodeRequests();
496   while (outstanding_decode_requests_ < max_decode_requests) {
497     scoped_refptr<media::DecoderBuffer> buffer;
498     {
499       base::AutoLock auto_lock(lock_);
500 
501       // Take the first pending buffer.
502       if (pending_buffers_.empty())
503         return;
504       buffer = pending_buffers_.front();
505       pending_buffers_.pop_front();
506 
507       // Record the timestamp.
508       while (decode_timestamps_.size() >= kMaxDecodeHistory)
509         decode_timestamps_.pop_front();
510       decode_timestamps_.push_back(buffer->timestamp());
511     }
512 
513     // Submit for decoding.
514     outstanding_decode_requests_++;
515     video_decoder_->Decode(
516         std::move(buffer),
517         WTF::BindRepeating(&RTCVideoDecoderAdapter::OnDecodeDone, weak_this_));
518   }
519 }
520 
OnDecodeDone(media::Status status)521 void RTCVideoDecoderAdapter::OnDecodeDone(media::Status status) {
522   DVLOG(3) << __func__ << "(" << status.code() << ")";
523   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
524 
525   outstanding_decode_requests_--;
526 
527   if (!status.is_ok() && status.code() != media::StatusCode::kAborted) {
528     DVLOG(2) << "Entering permanent error state";
529     UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
530                               media::VideoDecodeAccelerator::PLATFORM_FAILURE,
531                               media::VideoDecodeAccelerator::ERROR_MAX + 1);
532 
533     base::AutoLock auto_lock(lock_);
534     has_error_ = true;
535     pending_buffers_.clear();
536     decode_timestamps_.clear();
537     return;
538   }
539 
540   DecodeOnMediaThread();
541 }
542 
OnOutput(scoped_refptr<media::VideoFrame> frame)543 void RTCVideoDecoderAdapter::OnOutput(scoped_refptr<media::VideoFrame> frame) {
544   DVLOG(3) << __func__;
545   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
546 
547   const base::TimeDelta timestamp = frame->timestamp();
548   webrtc::VideoFrame rtc_frame =
549       webrtc::VideoFrame::Builder()
550           .set_video_frame_buffer(
551               new rtc::RefCountedObject<blink::WebRtcVideoFrameAdapter>(
552                   std::move(frame)))
553           .set_timestamp_rtp(static_cast<uint32_t>(timestamp.InMicroseconds()))
554           .set_timestamp_us(0)
555           .set_rotation(webrtc::kVideoRotation_0)
556           .build();
557 
558   base::AutoLock auto_lock(lock_);
559 
560   if (!base::Contains(decode_timestamps_, timestamp)) {
561     DVLOG(2) << "Discarding frame with timestamp " << timestamp;
562     return;
563   }
564 
565   // Assumes that Decoded() can be safely called with the lock held, which
566   // apparently it can be because RTCVideoDecoder does the same.
567   DCHECK(decode_complete_callback_);
568   decode_complete_callback_->Decoded(rtc_frame);
569   consecutive_error_count_ = 0;
570 }
571 
ShouldReinitializeForSettingHDRColorSpace(const webrtc::EncodedImage & input_image) const572 bool RTCVideoDecoderAdapter::ShouldReinitializeForSettingHDRColorSpace(
573     const webrtc::EncodedImage& input_image) const {
574   DCHECK_CALLED_ON_VALID_SEQUENCE(decoding_sequence_checker_);
575 
576   if (config_.profile() == media::VP9PROFILE_PROFILE2 &&
577       input_image.ColorSpace()) {
578     const media::VideoColorSpace& new_color_space =
579         blink::WebRtcToMediaVideoColorSpace(*input_image.ColorSpace());
580     if (!config_.color_space_info().IsSpecified() ||
581         new_color_space != config_.color_space_info()) {
582       return true;
583     }
584   }
585   return false;
586 }
587 
ReinitializeSync(const media::VideoDecoderConfig & config)588 bool RTCVideoDecoderAdapter::ReinitializeSync(
589     const media::VideoDecoderConfig& config) {
590   DCHECK_CALLED_ON_VALID_SEQUENCE(decoding_sequence_checker_);
591 
592   base::TimeTicks start_time = base::TimeTicks::Now();
593   base::ScopedAllowBaseSyncPrimitivesOutsideBlockingScope allow_wait;
594   bool result = false;
595   base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
596                              base::WaitableEvent::InitialState::NOT_SIGNALED);
597   auto init_cb =
598       CrossThreadBindOnce(&FinishWait, CrossThreadUnretained(&waiter),
599                           CrossThreadUnretained(&result));
600   FlushDoneCB flush_success_cb =
601       CrossThreadBindOnce(&RTCVideoDecoderAdapter::InitializeOnMediaThread,
602                           weak_this_, config, std::move(init_cb));
603   FlushDoneCB flush_fail_cb =
604       CrossThreadBindOnce(&FinishWait, CrossThreadUnretained(&waiter),
605                           CrossThreadUnretained(&result), false);
606   if (PostCrossThreadTask(
607           *media_task_runner_.get(), FROM_HERE,
608           CrossThreadBindOnce(&RTCVideoDecoderAdapter::FlushOnMediaThread,
609                               weak_this_, std::move(flush_success_cb),
610                               std::move(flush_fail_cb)))) {
611     waiter.Wait();
612     RecordReinitializationLatency(base::TimeTicks::Now() - start_time);
613   }
614   return result;
615 }
616 
FlushOnMediaThread(FlushDoneCB flush_success_cb,FlushDoneCB flush_fail_cb)617 void RTCVideoDecoderAdapter::FlushOnMediaThread(FlushDoneCB flush_success_cb,
618                                                 FlushDoneCB flush_fail_cb) {
619   DCHECK_CALLED_ON_VALID_SEQUENCE(media_sequence_checker_);
620 
621   // Remove any pending tasks.
622   {
623     base::AutoLock auto_lock(lock_);
624     pending_buffers_.clear();
625   }
626 
627   // Send EOS frame for flush.
628   video_decoder_->Decode(
629       media::DecoderBuffer::CreateEOSBuffer(),
630       WTF::BindRepeating(
631           [](FlushDoneCB flush_success, FlushDoneCB flush_fail,
632              media::Status status) {
633             if (status.is_ok())
634               std::move(flush_success).Run();
635             else
636               std::move(flush_fail).Run();
637           },
638           base::Passed(&flush_success_cb), base::Passed(&flush_fail_cb)));
639 }
640 
641 }  // namespace blink
642