1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "third_party/blink/renderer/modules/peerconnection/media_stream_remote_video_source.h"
6
7 #include <stdint.h>
8 #include <utility>
9
10 #include "base/callback_helpers.h"
11 #include "base/location.h"
12 #include "base/single_thread_task_runner.h"
13 #include "base/trace_event/trace_event.h"
14 #include "media/base/bind_to_current_loop.h"
15 #include "media/base/timestamp_constants.h"
16 #include "media/base/video_frame.h"
17 #include "media/base/video_util.h"
18 #include "third_party/blink/public/mojom/mediastream/media_stream.mojom-blink.h"
19 #include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
20 #include "third_party/blink/renderer/platform/webrtc/track_observer.h"
21 #include "third_party/blink/renderer/platform/webrtc/webrtc_video_frame_adapter.h"
22 #include "third_party/blink/renderer/platform/webrtc/webrtc_video_utils.h"
23 #include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
24 #include "third_party/blink/renderer/platform/wtf/functional.h"
25 #include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h"
26 #include "third_party/webrtc/api/video/i420_buffer.h"
27 #include "third_party/webrtc/api/video/recordable_encoded_frame.h"
28 #include "third_party/webrtc/rtc_base/time_utils.h"
29 #include "third_party/webrtc/system_wrappers/include/clock.h"
30
31 namespace WTF {
32
33 // Template specializations of [1], needed to be able to pass WTF callbacks
34 // that have VideoTrackAdapterSettings or gfx::Size parameters across threads.
35 //
36 // [1] third_party/blink/renderer/platform/wtf/cross_thread_copier.h.
37 template <>
38 struct CrossThreadCopier<scoped_refptr<webrtc::VideoFrameBuffer>>
39 : public CrossThreadCopierPassThrough<
40 scoped_refptr<webrtc::VideoFrameBuffer>> {
41 STATIC_ONLY(CrossThreadCopier);
42 };
43
44 } // namespace WTF
45
46 namespace blink {
47
48 namespace {
49
50 class WebRtcEncodedVideoFrame : public EncodedVideoFrame {
51 public:
WebRtcEncodedVideoFrame(const webrtc::RecordableEncodedFrame & frame)52 explicit WebRtcEncodedVideoFrame(const webrtc::RecordableEncodedFrame& frame)
53 : buffer_(frame.encoded_buffer()),
54 codec_(FromWebRtcVideoCodec(frame.codec())),
55 is_key_frame_(frame.is_key_frame()),
56 resolution_(frame.resolution().width, frame.resolution().height) {
57 if (frame.color_space()) {
58 color_space_ = WebRtcToMediaVideoColorSpace(*frame.color_space());
59 }
60 }
61
Data() const62 base::span<const uint8_t> Data() const override {
63 return base::make_span(buffer_->data(), buffer_->size());
64 }
65
Codec() const66 media::VideoCodec Codec() const override { return codec_; }
67
IsKeyFrame() const68 bool IsKeyFrame() const override { return is_key_frame_; }
69
ColorSpace() const70 base::Optional<media::VideoColorSpace> ColorSpace() const override {
71 return color_space_;
72 }
73
Resolution() const74 gfx::Size Resolution() const override { return resolution_; }
75
76 private:
FromWebRtcVideoCodec(webrtc::VideoCodecType codec)77 static media::VideoCodec FromWebRtcVideoCodec(webrtc::VideoCodecType codec) {
78 switch (codec) {
79 case webrtc::kVideoCodecVP8:
80 return media::kCodecVP8;
81 case webrtc::kVideoCodecVP9:
82 return media::kCodecVP9;
83 case webrtc::kVideoCodecH264:
84 return media::kCodecH264;
85 default:
86 return media::kUnknownVideoCodec;
87 }
88 }
89
90 rtc::scoped_refptr<const webrtc::EncodedImageBufferInterface> buffer_;
91 media::VideoCodec codec_;
92 bool is_key_frame_;
93 base::Optional<media::VideoColorSpace> color_space_;
94 gfx::Size resolution_;
95 };
96
97 } // namespace
98
99 // Internal class used for receiving frames from the webrtc track on a
100 // libjingle thread and forward it to the IO-thread.
101 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
102 : public WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>,
103 public rtc::VideoSinkInterface<webrtc::VideoFrame>,
104 public rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame> {
105 public:
106 RemoteVideoSourceDelegate(
107 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
108 VideoCaptureDeliverFrameCB new_frame_callback,
109 EncodedVideoFrameCB encoded_frame_callback);
110
111 protected:
112 friend class WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>;
113 ~RemoteVideoSourceDelegate() override;
114
115 // Implements rtc::VideoSinkInterface used for receiving video frames
116 // from the PeerConnection video track. May be called on a libjingle internal
117 // thread.
118 void OnFrame(const webrtc::VideoFrame& frame) override;
119
120 // VideoSinkInterface<webrtc::RecordableEncodedFrame>
121 void OnFrame(const webrtc::RecordableEncodedFrame& frame) override;
122
123 void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,
124 base::TimeTicks estimated_capture_time);
125
126 private:
127 void OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,
128 base::TimeTicks estimated_capture_time);
129
130 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
131
132 // |frame_callback_| is accessed on the IO thread.
133 VideoCaptureDeliverFrameCB frame_callback_;
134
135 // |encoded_frame_callback_| is accessed on the IO thread.
136 EncodedVideoFrameCB encoded_frame_callback_;
137
138 // Timestamp of the first received frame.
139 base::TimeDelta start_timestamp_;
140
141 // WebRTC Chromium timestamp diff
142 const base::TimeDelta time_diff_;
143
144 // Timestamp of the first received encoded frame.
145 base::TimeDelta start_timestamp_encoded_;
146
147 // WebRTC Chromium timestamp diff
148 const base::TimeDelta time_diff_encoded_;
149
150 // WebRTC real time clock, needed to determine NTP offset.
151 webrtc::Clock* clock_;
152
153 // Offset between NTP clock and WebRTC clock.
154 const int64_t ntp_offset_;
155 };
156
157 MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
RemoteVideoSourceDelegate(scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,VideoCaptureDeliverFrameCB new_frame_callback,EncodedVideoFrameCB encoded_frame_callback)158 RemoteVideoSourceDelegate(
159 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
160 VideoCaptureDeliverFrameCB new_frame_callback,
161 EncodedVideoFrameCB encoded_frame_callback)
162 : io_task_runner_(io_task_runner),
163 frame_callback_(std::move(new_frame_callback)),
164 encoded_frame_callback_(std::move(encoded_frame_callback)),
165 start_timestamp_(media::kNoTimestamp),
166 // TODO(qiangchen): There can be two differences between clocks: 1)
167 // the offset, 2) the rate (i.e., one clock runs faster than the other).
168 // See http://crbug/516700
169 time_diff_(base::TimeTicks::Now() - base::TimeTicks() -
170 base::TimeDelta::FromMicroseconds(rtc::TimeMicros())),
171 start_timestamp_encoded_(media::kNoTimestamp),
172 time_diff_encoded_(base::TimeTicks::Now() - base::TimeTicks() -
173 base::TimeDelta::FromMicroseconds(rtc::TimeMicros())),
174 clock_(webrtc::Clock::GetRealTimeClock()),
175 ntp_offset_(clock_->TimeInMilliseconds() -
176 clock_->CurrentNtpInMilliseconds()) {}
177
178 MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
179 ~RemoteVideoSourceDelegate() = default;
180
OnFrame(const webrtc::VideoFrame & incoming_frame)181 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
182 const webrtc::VideoFrame& incoming_frame) {
183 const bool render_immediately = incoming_frame.timestamp_us() == 0;
184 const base::TimeTicks current_time = base::TimeTicks::Now();
185 const base::TimeDelta incoming_timestamp =
186 render_immediately
187 ? current_time - base::TimeTicks()
188 : base::TimeDelta::FromMicroseconds(incoming_frame.timestamp_us());
189 const base::TimeTicks render_time =
190 render_immediately ? base::TimeTicks() + incoming_timestamp
191 : base::TimeTicks() + incoming_timestamp + time_diff_;
192 if (start_timestamp_ == media::kNoTimestamp)
193 start_timestamp_ = incoming_timestamp;
194 const base::TimeDelta elapsed_timestamp =
195 incoming_timestamp - start_timestamp_;
196 TRACE_EVENT2("webrtc", "RemoteVideoSourceDelegate::RenderFrame",
197 "Ideal Render Instant", render_time.ToInternalValue(),
198 "Timestamp", elapsed_timestamp.InMicroseconds());
199
200 scoped_refptr<media::VideoFrame> video_frame;
201 scoped_refptr<webrtc::VideoFrameBuffer> buffer(
202 incoming_frame.video_frame_buffer());
203 const gfx::Size size(buffer->width(), buffer->height());
204
205 switch (buffer->type()) {
206 case webrtc::VideoFrameBuffer::Type::kNative: {
207 video_frame = static_cast<WebRtcVideoFrameAdapter*>(buffer.get())
208 ->getMediaVideoFrame();
209 video_frame->set_timestamp(elapsed_timestamp);
210 break;
211 }
212 case webrtc::VideoFrameBuffer::Type::kI420A: {
213 const webrtc::I420ABufferInterface* yuva_buffer = buffer->GetI420A();
214 video_frame = media::VideoFrame::WrapExternalYuvaData(
215 media::PIXEL_FORMAT_I420A, size, gfx::Rect(size), size,
216 yuva_buffer->StrideY(), yuva_buffer->StrideU(),
217 yuva_buffer->StrideV(), yuva_buffer->StrideA(),
218 const_cast<uint8_t*>(yuva_buffer->DataY()),
219 const_cast<uint8_t*>(yuva_buffer->DataU()),
220 const_cast<uint8_t*>(yuva_buffer->DataV()),
221 const_cast<uint8_t*>(yuva_buffer->DataA()), elapsed_timestamp);
222 break;
223 }
224 case webrtc::VideoFrameBuffer::Type::kI420: {
225 const webrtc::I420BufferInterface* yuv_buffer = buffer->GetI420();
226 video_frame = media::VideoFrame::WrapExternalYuvData(
227 media::PIXEL_FORMAT_I420, size, gfx::Rect(size), size,
228 yuv_buffer->StrideY(), yuv_buffer->StrideU(), yuv_buffer->StrideV(),
229 const_cast<uint8_t*>(yuv_buffer->DataY()),
230 const_cast<uint8_t*>(yuv_buffer->DataU()),
231 const_cast<uint8_t*>(yuv_buffer->DataV()), elapsed_timestamp);
232 break;
233 }
234 case webrtc::VideoFrameBuffer::Type::kI444: {
235 const webrtc::I444BufferInterface* yuv_buffer = buffer->GetI444();
236 video_frame = media::VideoFrame::WrapExternalYuvData(
237 media::PIXEL_FORMAT_I444, size, gfx::Rect(size), size,
238 yuv_buffer->StrideY(), yuv_buffer->StrideU(), yuv_buffer->StrideV(),
239 const_cast<uint8_t*>(yuv_buffer->DataY()),
240 const_cast<uint8_t*>(yuv_buffer->DataU()),
241 const_cast<uint8_t*>(yuv_buffer->DataV()), elapsed_timestamp);
242 break;
243 }
244 case webrtc::VideoFrameBuffer::Type::kI010: {
245 const webrtc::I010BufferInterface* yuv_buffer = buffer->GetI010();
246 // WebRTC defines I010 data as uint16 whereas Chromium uses uint8 for all
247 // video formats, so conversion and cast is needed.
248 video_frame = media::VideoFrame::WrapExternalYuvData(
249 media::PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size), size,
250 yuv_buffer->StrideY() * 2, yuv_buffer->StrideU() * 2,
251 yuv_buffer->StrideV() * 2,
252 const_cast<uint8_t*>(
253 reinterpret_cast<const uint8_t*>(yuv_buffer->DataY())),
254 const_cast<uint8_t*>(
255 reinterpret_cast<const uint8_t*>(yuv_buffer->DataU())),
256 const_cast<uint8_t*>(
257 reinterpret_cast<const uint8_t*>(yuv_buffer->DataV())),
258 elapsed_timestamp);
259 break;
260 }
261 case webrtc::VideoFrameBuffer::Type::kNV12: {
262 const webrtc::NV12BufferInterface* nv12_buffer = buffer->GetNV12();
263 video_frame = media::VideoFrame::WrapExternalYuvData(
264 media::PIXEL_FORMAT_NV12, size, gfx::Rect(size), size,
265 nv12_buffer->StrideY(), nv12_buffer->StrideUV(),
266 const_cast<uint8_t*>(nv12_buffer->DataY()),
267 const_cast<uint8_t*>(nv12_buffer->DataUV()), elapsed_timestamp);
268 break;
269 }
270 default:
271 NOTREACHED();
272 }
273
274 if (!video_frame)
275 return;
276
277 // The bind ensures that we keep a reference to the underlying buffer.
278 if (buffer->type() != webrtc::VideoFrameBuffer::Type::kNative) {
279 video_frame->AddDestructionObserver(ConvertToBaseOnceCallback(
280 CrossThreadBindOnce(base::DoNothing::Once<
281 const scoped_refptr<rtc::RefCountInterface>&>(),
282 buffer)));
283 }
284
285 // Rotation may be explicitly set sometimes.
286 if (incoming_frame.rotation() != webrtc::kVideoRotation_0) {
287 video_frame->metadata()->rotation =
288 WebRtcToMediaVideoRotation(incoming_frame.rotation());
289 }
290
291 if (incoming_frame.color_space()) {
292 video_frame->set_color_space(
293 WebRtcToMediaVideoColorSpace(*incoming_frame.color_space())
294 .ToGfxColorSpace());
295 }
296
297 // Run render smoothness algorithm only when we don't have to render
298 // immediately.
299 if (!render_immediately)
300 video_frame->metadata()->reference_time = render_time;
301
302 if (incoming_frame.max_composition_delay_in_frames()) {
303 video_frame->metadata()->maximum_composition_delay_in_frames =
304 *incoming_frame.max_composition_delay_in_frames();
305 }
306
307 video_frame->metadata()->decode_end_time = current_time;
308
309 // RTP_TIMESTAMP, PROCESSING_TIME, and CAPTURE_BEGIN_TIME are all exposed
310 // through the JavaScript callback mechanism
311 // video.requestVideoFrameCallback().
312 video_frame->metadata()->rtp_timestamp =
313 static_cast<double>(incoming_frame.timestamp());
314
315 if (incoming_frame.processing_time()) {
316 video_frame->metadata()->processing_time =
317 base::TimeDelta::FromMicroseconds(
318 incoming_frame.processing_time()->Elapsed().us());
319 }
320
321 // Set capture time to the NTP time, which is the estimated capture time
322 // converted to the local clock.
323 if (incoming_frame.ntp_time_ms() > 0) {
324 const base::TimeTicks capture_time =
325 base::TimeTicks() +
326 base::TimeDelta::FromMilliseconds(incoming_frame.ntp_time_ms() +
327 ntp_offset_) +
328 time_diff_;
329 video_frame->metadata()->capture_begin_time = capture_time;
330 }
331
332 // Set receive time to arrival of last packet.
333 if (!incoming_frame.packet_infos().empty()) {
334 int64_t last_packet_arrival_ms =
335 std::max_element(
336 incoming_frame.packet_infos().cbegin(),
337 incoming_frame.packet_infos().cend(),
338 [](const webrtc::RtpPacketInfo& a, const webrtc::RtpPacketInfo& b) {
339 return a.receive_time_ms() < b.receive_time_ms();
340 })
341 ->receive_time_ms();
342 const base::TimeTicks receive_time =
343 base::TimeTicks() +
344 base::TimeDelta::FromMilliseconds(last_packet_arrival_ms) + time_diff_;
345 video_frame->metadata()->receive_time = receive_time;
346 }
347
348 // Use our computed render time as estimated capture time. If timestamp_us()
349 // (which is actually the suggested render time) is set by WebRTC, it's based
350 // on the RTP timestamps in the frame's packets, so congruent with the
351 // received frame capture timestamps. If set by us, it's as congruent as we
352 // can get with the timestamp sequence of frames we received.
353 PostCrossThreadTask(
354 *io_task_runner_, FROM_HERE,
355 CrossThreadBindOnce(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread,
356 WrapRefCounted(this), video_frame, render_time));
357 }
358
359 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,base::TimeTicks estimated_capture_time)360 DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,
361 base::TimeTicks estimated_capture_time) {
362 DCHECK(io_task_runner_->BelongsToCurrentThread());
363 TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::DoRenderFrameOnIOThread");
364 frame_callback_.Run(std::move(video_frame), estimated_capture_time);
365 }
366
OnFrame(const webrtc::RecordableEncodedFrame & frame)367 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
368 const webrtc::RecordableEncodedFrame& frame) {
369 const bool render_immediately = frame.render_time().us() == 0;
370 const base::TimeTicks current_time = base::TimeTicks::Now();
371 const base::TimeDelta incoming_timestamp =
372 render_immediately
373 ? current_time - base::TimeTicks()
374 : base::TimeDelta::FromMicroseconds(frame.render_time().us());
375 const base::TimeTicks render_time =
376 render_immediately
377 ? base::TimeTicks() + incoming_timestamp
378 : base::TimeTicks() + incoming_timestamp + time_diff_encoded_;
379
380 // Use our computed render time as estimated capture time. If render_time()
381 // is set by WebRTC, it's based on the RTP timestamps in the frame's packets,
382 // so congruent with the received frame capture timestamps. If set by us, it's
383 // as congruent as we can get with the timestamp sequence of frames we
384 // received.
385 PostCrossThreadTask(
386 *io_task_runner_, FROM_HERE,
387 CrossThreadBindOnce(&RemoteVideoSourceDelegate::OnEncodedVideoFrameOnIO,
388 WrapRefCounted(this),
389 base::MakeRefCounted<WebRtcEncodedVideoFrame>(frame),
390 render_time));
391 }
392
393 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,base::TimeTicks estimated_capture_time)394 OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,
395 base::TimeTicks estimated_capture_time) {
396 DCHECK(io_task_runner_->BelongsToCurrentThread());
397 encoded_frame_callback_.Run(std::move(frame), estimated_capture_time);
398 }
399
MediaStreamRemoteVideoSource(std::unique_ptr<TrackObserver> observer)400 MediaStreamRemoteVideoSource::MediaStreamRemoteVideoSource(
401 std::unique_ptr<TrackObserver> observer)
402 : observer_(std::move(observer)) {
403 // The callback will be automatically cleared when 'observer_' goes out of
404 // scope and no further callbacks will occur.
405 observer_->SetCallback(WTF::BindRepeating(
406 &MediaStreamRemoteVideoSource::OnChanged, WTF::Unretained(this)));
407 }
408
~MediaStreamRemoteVideoSource()409 MediaStreamRemoteVideoSource::~MediaStreamRemoteVideoSource() {
410 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
411 DCHECK(!observer_);
412 }
413
OnSourceTerminated()414 void MediaStreamRemoteVideoSource::OnSourceTerminated() {
415 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
416 StopSourceImpl();
417 }
418
StartSourceImpl(VideoCaptureDeliverFrameCB frame_callback,EncodedVideoFrameCB encoded_frame_callback)419 void MediaStreamRemoteVideoSource::StartSourceImpl(
420 VideoCaptureDeliverFrameCB frame_callback,
421 EncodedVideoFrameCB encoded_frame_callback) {
422 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
423 DCHECK(!delegate_.get());
424 delegate_ = base::MakeRefCounted<RemoteVideoSourceDelegate>(
425 io_task_runner(), std::move(frame_callback),
426 std::move(encoded_frame_callback));
427 scoped_refptr<webrtc::VideoTrackInterface> video_track(
428 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
429 video_track->AddOrUpdateSink(delegate_.get(), rtc::VideoSinkWants());
430 OnStartDone(mojom::MediaStreamRequestResult::OK);
431 }
432
StopSourceImpl()433 void MediaStreamRemoteVideoSource::StopSourceImpl() {
434 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
435 // StopSourceImpl is called either when MediaStreamTrack.stop is called from
436 // JS or blink gc the MediaStreamSource object or when OnSourceTerminated()
437 // is called. Garbage collection will happen after the PeerConnection no
438 // longer receives the video track.
439 if (!observer_)
440 return;
441 DCHECK(state() != MediaStreamVideoSource::ENDED);
442 scoped_refptr<webrtc::VideoTrackInterface> video_track(
443 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
444 video_track->RemoveSink(delegate_.get());
445 // This removes the references to the webrtc video track.
446 observer_.reset();
447 }
448
449 rtc::VideoSinkInterface<webrtc::VideoFrame>*
SinkInterfaceForTesting()450 MediaStreamRemoteVideoSource::SinkInterfaceForTesting() {
451 return delegate_.get();
452 }
453
454 rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>*
EncodedSinkInterfaceForTesting()455 MediaStreamRemoteVideoSource::EncodedSinkInterfaceForTesting() {
456 return delegate_.get();
457 }
458
OnChanged(webrtc::MediaStreamTrackInterface::TrackState state)459 void MediaStreamRemoteVideoSource::OnChanged(
460 webrtc::MediaStreamTrackInterface::TrackState state) {
461 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
462 switch (state) {
463 case webrtc::MediaStreamTrackInterface::kLive:
464 SetReadyState(WebMediaStreamSource::kReadyStateLive);
465 break;
466 case webrtc::MediaStreamTrackInterface::kEnded:
467 SetReadyState(WebMediaStreamSource::kReadyStateEnded);
468 break;
469 default:
470 NOTREACHED();
471 break;
472 }
473 }
474
SupportsEncodedOutput() const475 bool MediaStreamRemoteVideoSource::SupportsEncodedOutput() const {
476 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
477 if (!observer_ || !observer_->track()) {
478 return false;
479 }
480 scoped_refptr<webrtc::VideoTrackInterface> video_track(
481 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
482 return video_track->GetSource()->SupportsEncodedOutput();
483 }
484
RequestRefreshFrame()485 void MediaStreamRemoteVideoSource::RequestRefreshFrame() {
486 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
487 if (!observer_ || !observer_->track()) {
488 return;
489 }
490 scoped_refptr<webrtc::VideoTrackInterface> video_track(
491 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
492 if (video_track->GetSource()) {
493 video_track->GetSource()->GenerateKeyFrame();
494 }
495 }
496
OnEncodedSinkEnabled()497 void MediaStreamRemoteVideoSource::OnEncodedSinkEnabled() {
498 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
499 if (!observer_ || !observer_->track()) {
500 return;
501 }
502 scoped_refptr<webrtc::VideoTrackInterface> video_track(
503 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
504 video_track->GetSource()->AddEncodedSink(delegate_.get());
505 }
506
OnEncodedSinkDisabled()507 void MediaStreamRemoteVideoSource::OnEncodedSinkDisabled() {
508 DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
509 if (!observer_ || !observer_->track()) {
510 return;
511 }
512 scoped_refptr<webrtc::VideoTrackInterface> video_track(
513 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
514 video_track->GetSource()->RemoveEncodedSink(delegate_.get());
515 }
516
517 } // namespace blink
518