1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5 #include "CSFLog.h"
6 #include "nspr.h"
7 #include "plstr.h"
8
9 #include "AudioConduit.h"
10 #include "RtpRtcpConfig.h"
11 #include "VideoConduit.h"
12 #include "VideoStreamFactory.h"
13 #include "YuvStamper.h"
14 #include "mozilla/TemplateLib.h"
15 #include "mozilla/media/MediaUtils.h"
16 #include "mozilla/StaticPrefs_media.h"
17 #include "mozilla/UniquePtr.h"
18 #include "nsComponentManagerUtils.h"
19 #include "nsIPrefBranch.h"
20 #include "nsIGfxInfo.h"
21 #include "nsIPrefService.h"
22 #include "nsServiceManagerUtils.h"
23
24 #include "nsThreadUtils.h"
25
26 #include "pk11pub.h"
27
28 #include "api/video_codecs/sdp_video_format.h"
29 #include "media/engine/vp8_encoder_simulcast_proxy.h"
30 #include "webrtc/common_types.h"
31 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
32 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
33 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
34 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
35 #include "webrtc/common_video/include/video_frame_buffer.h"
36
37 #include "mozilla/Unused.h"
38
39 #if defined(MOZ_WIDGET_ANDROID)
40 # include "VideoEngine.h"
41 #endif
42
43 #include "GmpVideoCodec.h"
44
45 #ifdef MOZ_WEBRTC_MEDIACODEC
46 # include "MediaCodecVideoCodec.h"
47 #endif
48 #include "WebrtcGmpVideoCodec.h"
49
50 #include "MediaDataCodec.h"
51
52 // for ntohs
53 #ifdef _MSC_VER
54 # include "Winsock2.h"
55 #else
56 # include <netinet/in.h>
57 #endif
58
59 #include <algorithm>
60 #include <math.h>
61 #include <cinttypes>
62
63 #define DEFAULT_VIDEO_MAX_FRAMERATE 30
64 #define INVALID_RTP_PAYLOAD 255 // valid payload types are 0 to 127
65
66 namespace mozilla {
67
68 static const char* vcLogTag = "WebrtcVideoSessionConduit";
69 #ifdef LOGTAG
70 # undef LOGTAG
71 #endif
72 #define LOGTAG vcLogTag
73
74 using LocalDirection = MediaSessionConduitLocalDirection;
75
76 static const int kNullPayloadType = -1;
77 static const char* kUlpFecPayloadName = "ulpfec";
78 static const char* kRedPayloadName = "red";
79
80 // The number of frame buffers WebrtcVideoConduit may create before returning
81 // errors.
82 // Sometimes these are released synchronously but they can be forwarded all the
83 // way to the encoder for asynchronous encoding. With a pool size of 5,
84 // we allow 1 buffer for the current conversion, and 4 buffers to be queued at
85 // the encoder.
86 #define SCALER_BUFFER_POOL_SIZE 5
87
88 // The pixel alignment to use for the highest resolution layer when simulcast
89 // is active and one or more layers are being scaled.
90 #define SIMULCAST_RESOLUTION_ALIGNMENT 16
91
92 // 32 bytes is what WebRTC CodecInst expects
93 const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
94
95 template <typename T>
MinIgnoreZero(const T & a,const T & b)96 T MinIgnoreZero(const T& a, const T& b) {
97 return std::min(a ? a : b, b ? b : a);
98 }
99
100 template <class t>
ConstrainPreservingAspectRatioExact(uint32_t max_fs,t * width,t * height)101 static void ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width,
102 t* height) {
103 // We could try to pick a better starting divisor, but it won't make any real
104 // performance difference.
105 for (size_t d = 1; d < std::min(*width, *height); ++d) {
106 if ((*width % d) || (*height % d)) {
107 continue; // Not divisible
108 }
109
110 if (((*width) * (*height)) / (d * d) <= max_fs) {
111 *width /= d;
112 *height /= d;
113 return;
114 }
115 }
116
117 *width = 0;
118 *height = 0;
119 }
120
121 template <class t>
ConstrainPreservingAspectRatio(uint16_t max_width,uint16_t max_height,t * width,t * height)122 static void ConstrainPreservingAspectRatio(uint16_t max_width,
123 uint16_t max_height, t* width,
124 t* height) {
125 if (((*width) <= max_width) && ((*height) <= max_height)) {
126 return;
127 }
128
129 if ((*width) * max_height > max_width * (*height)) {
130 (*height) = max_width * (*height) / (*width);
131 (*width) = max_width;
132 } else {
133 (*width) = max_height * (*width) / (*height);
134 (*height) = max_height;
135 }
136 }
137
138 /**
139 * Function to select and change the encoding frame rate based on incoming frame
140 * rate and max-mbps setting.
141 * @param current framerate
142 * @result new framerate
143 */
SelectSendFrameRate(const VideoCodecConfig * codecConfig,unsigned int old_framerate,unsigned short sending_width,unsigned short sending_height)144 static unsigned int SelectSendFrameRate(const VideoCodecConfig* codecConfig,
145 unsigned int old_framerate,
146 unsigned short sending_width,
147 unsigned short sending_height) {
148 unsigned int new_framerate = old_framerate;
149
150 // Limit frame rate based on max-mbps
151 if (codecConfig && codecConfig->mEncodingConstraints.maxMbps) {
152 unsigned int cur_fs, mb_width, mb_height;
153
154 mb_width = (sending_width + 15) >> 4;
155 mb_height = (sending_height + 15) >> 4;
156
157 cur_fs = mb_width * mb_height;
158 if (cur_fs > 0) { // in case no frames have been sent
159 new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs;
160
161 new_framerate = MinIgnoreZero(new_framerate,
162 codecConfig->mEncodingConstraints.maxFps);
163 }
164 }
165 return new_framerate;
166 }
167
168 /**
169 * Perform validation on the codecConfig to be applied
170 */
ValidateCodecConfig(const VideoCodecConfig * codecInfo)171 static MediaConduitErrorCode ValidateCodecConfig(
172 const VideoCodecConfig* codecInfo) {
173 if (!codecInfo) {
174 CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
175 return kMediaConduitMalformedArgument;
176 }
177
178 if ((codecInfo->mName.empty()) ||
179 (codecInfo->mName.length() >= WebrtcVideoConduit::CODEC_PLNAME_SIZE)) {
180 CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
181 return kMediaConduitMalformedArgument;
182 }
183
184 return kMediaConduitNoError;
185 }
186
Update(const webrtc::Call::Stats & aStats)187 void WebrtcVideoConduit::CallStatistics::Update(
188 const webrtc::Call::Stats& aStats) {
189 ASSERT_ON_THREAD(mStatsThread);
190
191 const auto rtt = aStats.rtt_ms;
192 if (rtt > static_cast<decltype(aStats.rtt_ms)>(INT32_MAX)) {
193 // If we get a bogus RTT we will keep using the previous RTT
194 #ifdef DEBUG
195 CSFLogError(LOGTAG,
196 "%s for VideoConduit:%p RTT is larger than the"
197 " maximum size of an RTCP RTT.",
198 __FUNCTION__, this);
199 #endif
200 mRttSec = Nothing();
201 } else {
202 if (mRttSec && rtt < 0) {
203 CSFLogError(LOGTAG,
204 "%s for VideoConduit:%p RTT returned an error after "
205 " previously succeeding.",
206 __FUNCTION__, this);
207 mRttSec = Nothing();
208 }
209 if (rtt >= 0) {
210 mRttSec = Some(static_cast<DOMHighResTimeStamp>(rtt) / 1000.0);
211 }
212 }
213 }
214
RttSec() const215 Maybe<DOMHighResTimeStamp> WebrtcVideoConduit::CallStatistics::RttSec() const {
216 ASSERT_ON_THREAD(mStatsThread);
217
218 return mRttSec;
219 }
220
Update(const double aFrameRate,const double aBitrate,const webrtc::RtcpPacketTypeCounter & aPacketCounts)221 void WebrtcVideoConduit::StreamStatistics::Update(
222 const double aFrameRate, const double aBitrate,
223 const webrtc::RtcpPacketTypeCounter& aPacketCounts) {
224 ASSERT_ON_THREAD(mStatsThread);
225
226 mFrameRate.Push(aFrameRate);
227 mBitRate.Push(aBitrate);
228 mPacketCounts = aPacketCounts;
229 }
230
GetVideoStreamStats(double & aOutFrMean,double & aOutFrStdDev,double & aOutBrMean,double & aOutBrStdDev) const231 bool WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats(
232 double& aOutFrMean, double& aOutFrStdDev, double& aOutBrMean,
233 double& aOutBrStdDev) const {
234 ASSERT_ON_THREAD(mStatsThread);
235
236 if (mFrameRate.NumDataValues() && mBitRate.NumDataValues()) {
237 aOutFrMean = mFrameRate.Mean();
238 aOutFrStdDev = mFrameRate.StandardDeviation();
239 aOutBrMean = mBitRate.Mean();
240 aOutBrStdDev = mBitRate.StandardDeviation();
241 return true;
242 }
243 return false;
244 }
245
RecordTelemetry() const246 void WebrtcVideoConduit::StreamStatistics::RecordTelemetry() const {
247 ASSERT_ON_THREAD(mStatsThread);
248
249 if (!mActive) {
250 return;
251 }
252 using namespace Telemetry;
253 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_BITRATE_AVG_PER_CALL_KBPS
254 : WEBRTC_VIDEO_DECODER_BITRATE_AVG_PER_CALL_KBPS,
255 mBitRate.Mean() / 1000);
256 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_BITRATE_STD_DEV_PER_CALL_KBPS
257 : WEBRTC_VIDEO_DECODER_BITRATE_STD_DEV_PER_CALL_KBPS,
258 mBitRate.StandardDeviation() / 1000);
259 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_FRAMERATE_AVG_PER_CALL
260 : WEBRTC_VIDEO_DECODER_FRAMERATE_AVG_PER_CALL,
261 mFrameRate.Mean());
262 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_FRAMERATE_10X_STD_DEV_PER_CALL
263 : WEBRTC_VIDEO_DECODER_FRAMERATE_10X_STD_DEV_PER_CALL,
264 mFrameRate.StandardDeviation() * 10);
265 }
266
267 const webrtc::RtcpPacketTypeCounter&
PacketCounts() const268 WebrtcVideoConduit::StreamStatistics::PacketCounts() const {
269 ASSERT_ON_THREAD(mStatsThread);
270
271 return mPacketCounts;
272 }
273
Active() const274 bool WebrtcVideoConduit::StreamStatistics::Active() const {
275 ASSERT_ON_THREAD(mStatsThread);
276
277 return mActive;
278 }
279
SetActive(bool aActive)280 void WebrtcVideoConduit::StreamStatistics::SetActive(bool aActive) {
281 ASSERT_ON_THREAD(mStatsThread);
282
283 mActive = aActive;
284 }
285
DroppedFrames() const286 uint32_t WebrtcVideoConduit::SendStreamStatistics::DroppedFrames() const {
287 ASSERT_ON_THREAD(mStatsThread);
288
289 return mDroppedFrames;
290 }
291
FramesEncoded() const292 uint32_t WebrtcVideoConduit::SendStreamStatistics::FramesEncoded() const {
293 ASSERT_ON_THREAD(mStatsThread);
294
295 return mFramesEncoded;
296 }
297
FrameDeliveredToEncoder()298 void WebrtcVideoConduit::SendStreamStatistics::FrameDeliveredToEncoder() {
299 ASSERT_ON_THREAD(mStatsThread);
300
301 ++mFramesDeliveredToEncoder;
302 }
303
SsrcFound() const304 bool WebrtcVideoConduit::SendStreamStatistics::SsrcFound() const {
305 ASSERT_ON_THREAD(mStatsThread);
306
307 return mSsrcFound;
308 }
309
JitterMs() const310 uint32_t WebrtcVideoConduit::SendStreamStatistics::JitterMs() const {
311 ASSERT_ON_THREAD(mStatsThread);
312
313 return mJitterMs;
314 }
315
PacketsLost() const316 uint32_t WebrtcVideoConduit::SendStreamStatistics::PacketsLost() const {
317 ASSERT_ON_THREAD(mStatsThread);
318
319 return mPacketsLost;
320 }
321
BytesReceived() const322 uint64_t WebrtcVideoConduit::SendStreamStatistics::BytesReceived() const {
323 ASSERT_ON_THREAD(mStatsThread);
324
325 return mBytesReceived;
326 }
327
PacketsReceived() const328 uint32_t WebrtcVideoConduit::SendStreamStatistics::PacketsReceived() const {
329 ASSERT_ON_THREAD(mStatsThread);
330
331 return mPacketsReceived;
332 }
333
QpSum() const334 Maybe<uint64_t> WebrtcVideoConduit::SendStreamStatistics::QpSum() const {
335 ASSERT_ON_THREAD(mStatsThread);
336 return mQpSum;
337 }
338
Update(const webrtc::VideoSendStream::Stats & aStats,uint32_t aConfiguredSsrc)339 void WebrtcVideoConduit::SendStreamStatistics::Update(
340 const webrtc::VideoSendStream::Stats& aStats, uint32_t aConfiguredSsrc) {
341 ASSERT_ON_THREAD(mStatsThread);
342
343 mSsrcFound = false;
344
345 if (aStats.substreams.empty()) {
346 CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__);
347 return;
348 }
349
350 auto ind = aStats.substreams.find(aConfiguredSsrc);
351 if (ind == aStats.substreams.end()) {
352 CSFLogError(LOGTAG,
353 "%s for VideoConduit:%p ssrc not found in SendStream stats.",
354 __FUNCTION__, this);
355 return;
356 }
357
358 mSsrcFound = true;
359
360 StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps,
361 ind->second.rtcp_packet_type_counts);
362 if (aStats.qp_sum) {
363 mQpSum = Some(aStats.qp_sum.value());
364 } else {
365 mQpSum = Nothing();
366 }
367
368 const webrtc::FrameCounts& fc = ind->second.frame_counts;
369 mFramesEncoded = fc.key_frames + fc.delta_frames;
370 CSFLogVerbose(
371 LOGTAG, "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
372 __FUNCTION__, aStats.encode_frame_rate, aStats.media_bitrate_bps,
373 mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
374 mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
375 mJitterMs = ind->second.rtcp_stats.jitter /
376 (webrtc::kVideoPayloadTypeFrequency / 1000);
377 mPacketsLost = ind->second.rtcp_stats.packets_lost;
378 mBytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
379 mPacketsReceived = ind->second.rtp_stats.transmitted.packets;
380 }
381
BytesSent() const382 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::BytesSent() const {
383 ASSERT_ON_THREAD(mStatsThread);
384
385 return mBytesSent;
386 }
387
DiscardedPackets() const388 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets() const {
389 ASSERT_ON_THREAD(mStatsThread);
390
391 return mDiscardedPackets;
392 }
393
FramesDecoded() const394 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded() const {
395 ASSERT_ON_THREAD(mStatsThread);
396
397 return mFramesDecoded;
398 }
399
JitterMs() const400 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::JitterMs() const {
401 ASSERT_ON_THREAD(mStatsThread);
402
403 return mJitterMs;
404 }
405
PacketsLost() const406 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::PacketsLost() const {
407 ASSERT_ON_THREAD(mStatsThread);
408
409 return mPacketsLost;
410 }
411
PacketsSent() const412 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::PacketsSent() const {
413 ASSERT_ON_THREAD(mStatsThread);
414
415 return mPacketsSent;
416 }
417
Ssrc() const418 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::Ssrc() const {
419 ASSERT_ON_THREAD(mStatsThread);
420
421 return mSsrc;
422 }
423
Update(const webrtc::VideoReceiveStream::Stats & aStats)424 void WebrtcVideoConduit::ReceiveStreamStatistics::Update(
425 const webrtc::VideoReceiveStream::Stats& aStats) {
426 ASSERT_ON_THREAD(mStatsThread);
427
428 CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
429 StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps,
430 aStats.rtcp_packet_type_counts);
431 mBytesSent = aStats.rtcp_sender_octets_sent;
432 mDiscardedPackets = aStats.discarded_packets;
433 mFramesDecoded =
434 aStats.frame_counts.key_frames + aStats.frame_counts.delta_frames;
435 mJitterMs =
436 aStats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000);
437 mPacketsLost = aStats.rtcp_stats.packets_lost;
438 mPacketsSent = aStats.rtcp_sender_packets_sent;
439 mSsrc = aStats.ssrc;
440 }
441
442 /**
443 * Factory Method for VideoConduit
444 */
Create(RefPtr<WebRtcCallWrapper> aCall,nsCOMPtr<nsISerialEventTarget> aStsThread)445 RefPtr<VideoSessionConduit> VideoSessionConduit::Create(
446 RefPtr<WebRtcCallWrapper> aCall,
447 nsCOMPtr<nsISerialEventTarget> aStsThread) {
448 MOZ_ASSERT(NS_IsMainThread());
449 MOZ_ASSERT(aCall, "missing required parameter: aCall");
450 CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
451
452 if (!aCall) {
453 return nullptr;
454 }
455
456 auto obj = MakeRefPtr<WebrtcVideoConduit>(aCall, aStsThread);
457 if (obj->Init() != kMediaConduitNoError) {
458 CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
459 return nullptr;
460 }
461 CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
462 return obj.forget();
463 }
464
WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,nsCOMPtr<nsISerialEventTarget> aStsThread)465 WebrtcVideoConduit::WebrtcVideoConduit(
466 RefPtr<WebRtcCallWrapper> aCall, nsCOMPtr<nsISerialEventTarget> aStsThread)
467 : mTransportMonitor("WebrtcVideoConduit"),
468 mStsThread(aStsThread),
469 mMutex("WebrtcVideoConduit::mMutex"),
470 mVideoAdapter(MakeUnique<cricket::VideoAdapter>()),
471 mBufferPool(false, SCALER_BUFFER_POOL_SIZE),
472 mEngineTransmitting(false),
473 mEngineReceiving(false),
474 mSendStreamStats(aStsThread),
475 mRecvStreamStats(aStsThread),
476 mCallStats(aStsThread),
477 mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE),
478 mActiveCodecMode(webrtc::kRealtimeVideo),
479 mCodecMode(webrtc::kRealtimeVideo),
480 mCall(aCall),
481 mSendStreamConfig(
482 this) // 'this' is stored but not dereferenced in the constructor.
483 ,
484 mRecvStreamConfig(
485 this) // 'this' is stored but not dereferenced in the constructor.
486 ,
487 mRecvSSRC(0),
488 mVideoStatsTimer(NS_NewTimer()),
489 mRtpSourceObserver(new RtpSourceObserver(mCall->GetTimestampMaker())) {
490 mCall->RegisterConduit(this);
491 mRecvStreamConfig.renderer = this;
492 mRecvStreamConfig.rtcp_event_observer = this;
493 }
494
~WebrtcVideoConduit()495 WebrtcVideoConduit::~WebrtcVideoConduit() {
496 MOZ_ASSERT(NS_IsMainThread());
497
498 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
499 mCall->UnregisterConduit(this);
500
501 // Release AudioConduit first by dropping reference on MainThread, where it
502 // expects to be
503 MOZ_ASSERT(!mSendStream && !mRecvStream,
504 "Call DeleteStreams prior to ~WebrtcVideoConduit.");
505 }
506
SetLocalRTPExtensions(LocalDirection aDirection,const RtpExtList & aExtensions)507 MediaConduitErrorCode WebrtcVideoConduit::SetLocalRTPExtensions(
508 LocalDirection aDirection, const RtpExtList& aExtensions) {
509 MOZ_ASSERT(NS_IsMainThread());
510
511 auto& extList = aDirection == LocalDirection::kSend
512 ? mSendStreamConfig.rtp.extensions
513 : mRecvStreamConfig.rtp.extensions;
514 extList = aExtensions;
515 return kMediaConduitNoError;
516 }
517
SetLocalSSRCs(const std::vector<unsigned int> & aSSRCs,const std::vector<unsigned int> & aRtxSSRCs)518 bool WebrtcVideoConduit::SetLocalSSRCs(
519 const std::vector<unsigned int>& aSSRCs,
520 const std::vector<unsigned int>& aRtxSSRCs) {
521 MOZ_ASSERT(NS_IsMainThread());
522
523 // Special case: the local SSRCs are the same - do nothing.
524 if (mSendStreamConfig.rtp.ssrcs == aSSRCs &&
525 mSendStreamConfig.rtp.rtx.ssrcs == aRtxSSRCs) {
526 return true;
527 }
528
529 {
530 MutexAutoLock lock(mMutex);
531 // Update the value of the ssrcs in the config structure.
532 mSendStreamConfig.rtp.ssrcs = aSSRCs;
533 mSendStreamConfig.rtp.rtx.ssrcs = aRtxSSRCs;
534
535 bool wasTransmitting = mEngineTransmitting;
536 if (StopTransmittingLocked() != kMediaConduitNoError) {
537 return false;
538 }
539
540 // On the next StartTransmitting() or ConfigureSendMediaCodec, force
541 // building a new SendStream to switch SSRCs.
542 DeleteSendStream();
543
544 if (wasTransmitting) {
545 if (StartTransmittingLocked() != kMediaConduitNoError) {
546 return false;
547 }
548 }
549 }
550
551 return true;
552 }
553
GetLocalSSRCs()554 std::vector<unsigned int> WebrtcVideoConduit::GetLocalSSRCs() {
555 MutexAutoLock lock(mMutex);
556
557 return mSendStreamConfig.rtp.ssrcs;
558 }
559
SetLocalCNAME(const char * cname)560 bool WebrtcVideoConduit::SetLocalCNAME(const char* cname) {
561 MOZ_ASSERT(NS_IsMainThread());
562 MutexAutoLock lock(mMutex);
563
564 mSendStreamConfig.rtp.c_name = cname;
565 return true;
566 }
567
SetLocalMID(const std::string & mid)568 bool WebrtcVideoConduit::SetLocalMID(const std::string& mid) {
569 MOZ_ASSERT(NS_IsMainThread());
570 MutexAutoLock lock(mMutex);
571
572 mSendStreamConfig.rtp.mid = mid;
573 return true;
574 }
575
SetSyncGroup(const std::string & group)576 void WebrtcVideoConduit::SetSyncGroup(const std::string& group) {
577 mRecvStreamConfig.sync_group = group;
578 }
579
ConfigureCodecMode(webrtc::VideoCodecMode mode)580 MediaConduitErrorCode WebrtcVideoConduit::ConfigureCodecMode(
581 webrtc::VideoCodecMode mode) {
582 MOZ_ASSERT(NS_IsMainThread());
583
584 CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
585 if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
586 mode == webrtc::VideoCodecMode::kScreensharing) {
587 mCodecMode = mode;
588 if (mVideoStreamFactory) {
589 mVideoStreamFactory->SetCodecMode(mCodecMode);
590 }
591 return kMediaConduitNoError;
592 }
593
594 return kMediaConduitMalformedArgument;
595 }
596
DeleteSendStream()597 void WebrtcVideoConduit::DeleteSendStream() {
598 MOZ_ASSERT(NS_IsMainThread());
599 mMutex.AssertCurrentThreadOwns();
600
601 if (mSendStream) {
602 mCall->Call()->DestroyVideoSendStream(mSendStream);
603 mSendStream = nullptr;
604 mEncoder = nullptr;
605 }
606 }
607
SupportedCodecType(webrtc::VideoCodecType aType)608 webrtc::VideoCodecType SupportedCodecType(webrtc::VideoCodecType aType) {
609 switch (aType) {
610 case webrtc::VideoCodecType::kVideoCodecVP8:
611 case webrtc::VideoCodecType::kVideoCodecVP9:
612 case webrtc::VideoCodecType::kVideoCodecH264:
613 return aType;
614 default:
615 return webrtc::VideoCodecType::kVideoCodecUnknown;
616 }
617 // NOTREACHED
618 }
619
CreateSendStream()620 MediaConduitErrorCode WebrtcVideoConduit::CreateSendStream() {
621 MOZ_ASSERT(NS_IsMainThread());
622 mMutex.AssertCurrentThreadOwns();
623
624 nsAutoString codecName;
625 codecName.AssignASCII(
626 mSendStreamConfig.encoder_settings.payload_name.c_str());
627 Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_SEND_CODEC_USED,
628 codecName, 1);
629
630 webrtc::VideoCodecType encoder_type =
631 SupportedCodecType(webrtc::PayloadStringToCodecType(
632 mSendStreamConfig.encoder_settings.payload_name));
633 if (encoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
634 return kMediaConduitInvalidSendCodec;
635 }
636
637 std::unique_ptr<webrtc::VideoEncoder> encoder(CreateEncoder(encoder_type));
638 if (!encoder) {
639 return kMediaConduitInvalidSendCodec;
640 }
641
642 mSendStreamConfig.encoder_settings.encoder = encoder.get();
643
644 MOZ_ASSERT(
645 mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.number_of_streams,
646 "Each video substream must have a corresponding ssrc.");
647
648 mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig.Copy(),
649 mEncoderConfig.Copy());
650
651 if (!mSendStream) {
652 return kMediaConduitVideoSendStreamError;
653 }
654 mSendStream->SetSource(
655 this, webrtc::VideoSendStream::DegradationPreference::kBalanced);
656
657 mEncoder = std::move(encoder);
658
659 mActiveCodecMode = mCodecMode;
660
661 return kMediaConduitNoError;
662 }
663
DeleteRecvStream()664 void WebrtcVideoConduit::DeleteRecvStream() {
665 MOZ_ASSERT(NS_IsMainThread());
666 mMutex.AssertCurrentThreadOwns();
667
668 if (mRecvStream) {
669 mRecvStream->RemoveSecondarySink(this);
670 mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
671 mRecvStream = nullptr;
672 mDecoders.clear();
673 }
674 }
675
CreateRecvStream()676 MediaConduitErrorCode WebrtcVideoConduit::CreateRecvStream() {
677 MOZ_ASSERT(NS_IsMainThread());
678 mMutex.AssertCurrentThreadOwns();
679
680 webrtc::VideoReceiveStream::Decoder decoder_desc;
681 std::unique_ptr<webrtc::VideoDecoder> decoder;
682 webrtc::VideoCodecType decoder_type;
683
684 mRecvStreamConfig.decoders.clear();
685 for (auto& config : mRecvCodecList) {
686 nsAutoString codecName;
687 codecName.AssignASCII(config->mName.c_str());
688 Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_RECV_CODEC_USED,
689 codecName, 1);
690
691 decoder_type =
692 SupportedCodecType(webrtc::PayloadStringToCodecType(config->mName));
693 if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
694 CSFLogError(LOGTAG, "%s Unknown decoder type: %s", __FUNCTION__,
695 config->mName.c_str());
696 continue;
697 }
698
699 decoder = CreateDecoder(decoder_type);
700
701 if (!decoder) {
702 // This really should never happen unless something went wrong
703 // in the negotiation code
704 NS_ASSERTION(decoder, "Failed to create video decoder");
705 CSFLogError(LOGTAG, "Failed to create decoder of type %s (%d)",
706 config->mName.c_str(), decoder_type);
707 // don't stop
708 continue;
709 }
710
711 decoder_desc.decoder = decoder.get();
712 mDecoders.push_back(std::move(decoder));
713 decoder_desc.payload_name = config->mName;
714 decoder_desc.payload_type = config->mType;
715 // XXX Ok, add:
716 // Set decoder_desc.codec_params (fmtp)
717 mRecvStreamConfig.decoders.push_back(decoder_desc);
718 }
719
720 mRecvStream =
721 mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
722 if (!mRecvStream) {
723 mDecoders.clear();
724 return kMediaConduitUnknownError;
725 }
726
727 // Add RTPPacketSinkInterface for synchronization source tracking
728 mRecvStream->AddSecondarySink(this);
729
730 CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
731 mRecvStream, mRecvStreamConfig.rtp.remote_ssrc,
732 mRecvStreamConfig.rtp.remote_ssrc);
733
734 return kMediaConduitNoError;
735 }
736
737 static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
ConfigureVideoEncoderSettings(const VideoCodecConfig * aConfig,const WebrtcVideoConduit * aConduit)738 ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
739 const WebrtcVideoConduit* aConduit) {
740 MOZ_ASSERT(NS_IsMainThread());
741
742 bool is_screencast =
743 aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
744 // No automatic resizing when using simulcast or screencast.
745 bool automatic_resize = !is_screencast && aConfig->mEncodings.size() <= 1;
746 bool frame_dropping = !is_screencast;
747 bool denoising;
748 bool codec_default_denoising = false;
749 if (is_screencast) {
750 denoising = false;
751 } else {
752 // Use codec default if video_noise_reduction is unset.
753 denoising = aConduit->Denoising();
754 codec_default_denoising = !denoising;
755 }
756
757 if (aConfig->mName == "H264") {
758 webrtc::VideoCodecH264 h264_settings =
759 webrtc::VideoEncoder::GetDefaultH264Settings();
760 h264_settings.frameDroppingOn = frame_dropping;
761 h264_settings.packetizationMode = aConfig->mPacketizationMode;
762 return new rtc::RefCountedObject<
763 webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
764 }
765 if (aConfig->mName == "VP8") {
766 webrtc::VideoCodecVP8 vp8_settings =
767 webrtc::VideoEncoder::GetDefaultVp8Settings();
768 vp8_settings.automaticResizeOn = automatic_resize;
769 // VP8 denoising is enabled by default.
770 vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
771 vp8_settings.frameDroppingOn = frame_dropping;
772 return new rtc::RefCountedObject<
773 webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
774 }
775 if (aConfig->mName == "VP9") {
776 webrtc::VideoCodecVP9 vp9_settings =
777 webrtc::VideoEncoder::GetDefaultVp9Settings();
778 if (is_screencast) {
779 // TODO(asapersson): Set to 2 for now since there is a DCHECK in
780 // VideoSendStream::ReconfigureVideoEncoder.
781 vp9_settings.numberOfSpatialLayers = 2;
782 } else {
783 vp9_settings.numberOfSpatialLayers = aConduit->SpatialLayers();
784 }
785 // VP9 denoising is disabled by default.
786 vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
787 vp9_settings.frameDroppingOn = frame_dropping;
788 return new rtc::RefCountedObject<
789 webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
790 }
791 return nullptr;
792 }
793
794 // Compare lists of codecs
CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>> & a,const nsTArray<UniquePtr<VideoCodecConfig>> & b)795 static bool CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
796 const nsTArray<UniquePtr<VideoCodecConfig>>& b) {
797 // return a != b;
798 // would work if UniquePtr<> operator== compared contents!
799 auto len = a.Length();
800 if (len != b.Length()) {
801 return true;
802 }
803
804 // XXX std::equal would work, if we could use it on this - fails for the
805 // same reason as above. c++14 would let us pass a comparator function.
806 for (uint32_t i = 0; i < len; ++i) {
807 if (!(*a[i] == *b[i])) {
808 return true;
809 }
810 }
811
812 return false;
813 }
814
815 /**
816 * Note: Setting the send-codec on the Video Engine will restart the encoder,
817 * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
818 *
819 * Note: this is called from MainThread, and the codec settings are read on
820 * videoframe delivery threads (i.e in SendVideoFrame(). With
821 * renegotiation/reconfiguration, this now needs a lock! Alternatively
822 * changes could be queued until the next frame is delivered using an
823 * Atomic pointer and swaps.
824 */
ConfigureSendMediaCodec(const VideoCodecConfig * codecConfig,const RtpRtcpConfig & aRtpRtcpConfig)825 MediaConduitErrorCode WebrtcVideoConduit::ConfigureSendMediaCodec(
826 const VideoCodecConfig* codecConfig, const RtpRtcpConfig& aRtpRtcpConfig) {
827 MOZ_ASSERT(NS_IsMainThread());
828 MutexAutoLock lock(mMutex);
829 mUpdateResolution = true;
830
831 CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__,
832 codecConfig ? codecConfig->mName.c_str() : "<null>");
833
834 MediaConduitErrorCode condError = kMediaConduitNoError;
835
836 // validate basic params
837 if ((condError = ValidateCodecConfig(codecConfig)) != kMediaConduitNoError) {
838 return condError;
839 }
840
841 size_t streamCount = std::min(codecConfig->mEncodings.size(),
842 (size_t)webrtc::kMaxSimulcastStreams);
843
844 MOZ_RELEASE_ASSERT(streamCount >= 1, "streamCount should be at least one");
845
846 CSFLogDebug(LOGTAG, "%s for VideoConduit:%p stream count:%zu", __FUNCTION__,
847 this, streamCount);
848
849 mSendingFramerate = 0;
850 mSendStreamConfig.rtp.rids.clear();
851
852 int max_framerate;
853 if (codecConfig->mEncodingConstraints.maxFps > 0) {
854 max_framerate = codecConfig->mEncodingConstraints.maxFps;
855 } else {
856 max_framerate = DEFAULT_VIDEO_MAX_FRAMERATE;
857 }
858 // apply restrictions from maxMbps/etc
859 mSendingFramerate =
860 SelectSendFrameRate(codecConfig, max_framerate, mLastWidth, mLastHeight);
861
862 // So we can comply with b=TIAS/b=AS/maxbr=X when input resolution changes
863 mNegotiatedMaxBitrate = codecConfig->mTias;
864
865 if (mLastWidth == 0 && mMinBitrateEstimate != 0) {
866 // Only do this at the start; use "have we send a frame" as a reasonable
867 // stand-in. min <= start <= max (which can be -1, note!)
868 webrtc::Call::Config::BitrateConfig config;
869 config.min_bitrate_bps = mMinBitrateEstimate;
870 if (config.start_bitrate_bps < mMinBitrateEstimate) {
871 config.start_bitrate_bps = mMinBitrateEstimate;
872 }
873 if (config.max_bitrate_bps > 0 &&
874 config.max_bitrate_bps < mMinBitrateEstimate) {
875 config.max_bitrate_bps = mMinBitrateEstimate;
876 }
877 mCall->Call()->SetBitrateConfig(config);
878 }
879
880 mVideoStreamFactory = new rtc::RefCountedObject<VideoStreamFactory>(
881 *codecConfig, mCodecMode, mMinBitrate, mStartBitrate, mPrefMaxBitrate,
882 mNegotiatedMaxBitrate, mSendingFramerate);
883 mEncoderConfig.video_stream_factory = mVideoStreamFactory.get();
884
885 // Reset the VideoAdapter. SelectResolution will ensure limits are set.
886 mVideoAdapter = MakeUnique<cricket::VideoAdapter>(
887 streamCount > 1 ? SIMULCAST_RESOLUTION_ALIGNMENT : 1);
888 mVideoAdapter->OnScaleResolutionBy(
889 codecConfig->mEncodings[0].constraints.scaleDownBy > 1.0
890 ? rtc::Optional<float>(
891 codecConfig->mEncodings[0].constraints.scaleDownBy)
892 : rtc::Optional<float>());
893
894 // XXX parse the encoded SPS/PPS data and set spsData/spsLen/ppsData/ppsLen
895 mEncoderConfig.encoder_specific_settings =
896 ConfigureVideoEncoderSettings(codecConfig, this);
897
898 mEncoderConfig.content_type =
899 mCodecMode == webrtc::kRealtimeVideo
900 ? webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo
901 : webrtc::VideoEncoderConfig::ContentType::kScreen;
902 // for the GMP H.264 encoder/decoder!!
903 mEncoderConfig.min_transmit_bitrate_bps = 0;
904 // Expected max number of encodings
905 mEncoderConfig.number_of_streams = streamCount;
906
907 // If only encoder stream attibutes have been changed, there is no need to
908 // stop, create a new webrtc::VideoSendStream, and restart. Recreating on
909 // PayloadType change may be overkill, but is safe.
910 if (mSendStream) {
911 if (!RequiresNewSendStream(*codecConfig) &&
912 mActiveCodecMode == mCodecMode) {
913 mCurSendCodecConfig->mEncodingConstraints =
914 codecConfig->mEncodingConstraints;
915 mCurSendCodecConfig->mEncodings = codecConfig->mEncodings;
916 mSendStream->ReconfigureVideoEncoder(mEncoderConfig.Copy());
917 return kMediaConduitNoError;
918 }
919
920 condError = StopTransmittingLocked();
921 if (condError != kMediaConduitNoError) {
922 return condError;
923 }
924
925 // This will cause a new encoder to be created by StartTransmitting()
926 DeleteSendStream();
927 }
928
929 mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
930 mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
931 mSendStreamConfig.rtp.rtcp_mode = aRtpRtcpConfig.GetRtcpMode();
932 mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
933 if (codecConfig->RtxPayloadTypeIsSet()) {
934 mSendStreamConfig.rtp.rtx.payload_type = codecConfig->mRTXPayloadType;
935 } else {
936 mSendStreamConfig.rtp.rtx.payload_type = -1;
937 mSendStreamConfig.rtp.rtx.ssrcs.clear();
938 }
939
940 // See Bug 1297058, enabling FEC when basic NACK is to be enabled in H.264 is
941 // problematic
942 if (codecConfig->RtcpFbFECIsSet() &&
943 !(codecConfig->mName == "H264" && codecConfig->RtcpFbNackIsSet(""))) {
944 mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type =
945 codecConfig->mULPFECPayloadType;
946 mSendStreamConfig.rtp.ulpfec.red_payload_type =
947 codecConfig->mREDPayloadType;
948 mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type =
949 codecConfig->mREDRTXPayloadType;
950 } else {
951 // Reset to defaults
952 mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type = -1;
953 mSendStreamConfig.rtp.ulpfec.red_payload_type = -1;
954 mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type = -1;
955 }
956
957 mSendStreamConfig.rtp.nack.rtp_history_ms =
958 codecConfig->RtcpFbNackIsSet("") ? 1000 : 0;
959
960 // Copy the applied config for future reference.
961 mCurSendCodecConfig = MakeUnique<VideoCodecConfig>(*codecConfig);
962
963 mSendStreamConfig.rtp.rids.clear();
964 bool has_rid = false;
965 for (size_t idx = 0; idx < streamCount; idx++) {
966 auto& encoding = mCurSendCodecConfig->mEncodings[idx];
967 if (encoding.rid[0]) {
968 has_rid = true;
969 break;
970 }
971 }
972 if (has_rid) {
973 for (size_t idx = streamCount; idx > 0; idx--) {
974 auto& encoding = mCurSendCodecConfig->mEncodings[idx - 1];
975 mSendStreamConfig.rtp.rids.push_back(encoding.rid);
976 }
977 }
978
979 return condError;
980 }
981
GenerateRandomSSRC()982 static uint32_t GenerateRandomSSRC() {
983 uint32_t ssrc;
984 do {
985 SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc),
986 sizeof(ssrc));
987 if (rv != SECSuccess) {
988 CSFLogError(LOGTAG, "%s: PK11_GenerateRandom failed with error %d",
989 __FUNCTION__, rv);
990 return 0;
991 }
992 } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
993
994 return ssrc;
995 }
996
SetRemoteSSRC(uint32_t ssrc,uint32_t rtxSsrc)997 bool WebrtcVideoConduit::SetRemoteSSRC(uint32_t ssrc, uint32_t rtxSsrc) {
998 MOZ_ASSERT(NS_IsMainThread());
999 MutexAutoLock lock(mMutex);
1000
1001 return SetRemoteSSRCLocked(ssrc, rtxSsrc);
1002 }
1003
SetRemoteSSRCLocked(uint32_t ssrc,uint32_t rtxSsrc)1004 bool WebrtcVideoConduit::SetRemoteSSRCLocked(uint32_t ssrc, uint32_t rtxSsrc) {
1005 MOZ_ASSERT(NS_IsMainThread());
1006 mMutex.AssertCurrentThreadOwns();
1007
1008 uint32_t current_ssrc;
1009 if (!GetRemoteSSRCLocked(¤t_ssrc)) {
1010 return false;
1011 }
1012
1013 if (current_ssrc == ssrc && mRecvStreamConfig.rtp.rtx_ssrc == rtxSsrc) {
1014 return true;
1015 }
1016
1017 bool wasReceiving = mEngineReceiving;
1018 if (StopReceivingLocked() != kMediaConduitNoError) {
1019 return false;
1020 }
1021
1022 {
1023 CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
1024 MutexAutoUnlock unlock(mMutex);
1025 if (!mCall->UnsetRemoteSSRC(ssrc)) {
1026 CSFLogError(LOGTAG,
1027 "%s: Failed to unset SSRC %u (0x%x) on other conduits,"
1028 " bailing",
1029 __FUNCTION__, ssrc, ssrc);
1030 return false;
1031 }
1032 }
1033
1034 mRecvStreamConfig.rtp.remote_ssrc = ssrc;
1035 mRecvStreamConfig.rtp.rtx_ssrc = rtxSsrc;
1036 mStsThread->Dispatch(NS_NewRunnableFunction(
1037 "WebrtcVideoConduit::WaitingForInitialSsrcNoMore",
1038 [this, self = RefPtr<WebrtcVideoConduit>(this)]() mutable {
1039 mWaitingForInitialSsrc = false;
1040 NS_ReleaseOnMainThread(
1041 "WebrtcVideoConduit::WaitingForInitialSsrcNoMore", self.forget());
1042 }));
1043 // On the next StartReceiving() or ConfigureRecvMediaCodec, force
1044 // building a new RecvStream to switch SSRCs.
1045 DeleteRecvStream();
1046
1047 if (wasReceiving) {
1048 if (StartReceivingLocked() != kMediaConduitNoError) {
1049 return false;
1050 }
1051 }
1052
1053 return true;
1054 }
1055
UnsetRemoteSSRC(uint32_t ssrc)1056 bool WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t ssrc) {
1057 MOZ_ASSERT(NS_IsMainThread());
1058 MutexAutoLock lock(mMutex);
1059
1060 unsigned int our_ssrc;
1061 if (!GetRemoteSSRCLocked(&our_ssrc)) {
1062 // This only fails when we aren't sending, which isn't really an error here
1063 return true;
1064 }
1065
1066 if (our_ssrc != ssrc && mRecvStreamConfig.rtp.rtx_ssrc != ssrc) {
1067 return true;
1068 }
1069
1070 mRecvStreamConfig.rtp.rtx_ssrc = 0;
1071
1072 do {
1073 our_ssrc = GenerateRandomSSRC();
1074 if (our_ssrc == 0) {
1075 return false;
1076 }
1077 } while (our_ssrc == ssrc);
1078
1079 // There is a (tiny) chance that this new random ssrc will collide with some
1080 // other conduit's remote ssrc, in which case that conduit will choose a new
1081 // one.
1082 SetRemoteSSRCLocked(our_ssrc, 0);
1083 return true;
1084 }
1085
GetRemoteSSRC(unsigned int * ssrc)1086 bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) {
1087 MutexAutoLock lock(mMutex);
1088
1089 return GetRemoteSSRCLocked(ssrc);
1090 }
1091
GetRemoteSSRCLocked(unsigned int * ssrc)1092 bool WebrtcVideoConduit::GetRemoteSSRCLocked(unsigned int* ssrc) {
1093 mMutex.AssertCurrentThreadOwns();
1094
1095 if (NS_IsMainThread()) {
1096 if (!mRecvStream) {
1097 return false;
1098 }
1099 *ssrc = mRecvStream->GetStats().ssrc;
1100 } else {
1101 ASSERT_ON_THREAD(mStsThread);
1102 *ssrc = mRecvStreamStats.Ssrc();
1103 }
1104 return true;
1105 }
1106
GetSendPacketTypeStats(webrtc::RtcpPacketTypeCounter * aPacketCounts)1107 bool WebrtcVideoConduit::GetSendPacketTypeStats(
1108 webrtc::RtcpPacketTypeCounter* aPacketCounts) {
1109 ASSERT_ON_THREAD(mStsThread);
1110
1111 MutexAutoLock lock(mMutex);
1112 if (!mSendStreamStats.Active()) {
1113 return false;
1114 }
1115 *aPacketCounts = mSendStreamStats.PacketCounts();
1116 return true;
1117 }
1118
GetRecvPacketTypeStats(webrtc::RtcpPacketTypeCounter * aPacketCounts)1119 bool WebrtcVideoConduit::GetRecvPacketTypeStats(
1120 webrtc::RtcpPacketTypeCounter* aPacketCounts) {
1121 ASSERT_ON_THREAD(mStsThread);
1122
1123 if (!mRecvStreamStats.Active()) {
1124 return false;
1125 }
1126 *aPacketCounts = mRecvStreamStats.PacketCounts();
1127 return true;
1128 }
1129
PollStats()1130 void WebrtcVideoConduit::PollStats() {
1131 MOZ_ASSERT(NS_IsMainThread());
1132
1133 nsTArray<RefPtr<Runnable>> runnables(2);
1134 if (mEngineTransmitting) {
1135 MOZ_RELEASE_ASSERT(mSendStream);
1136 if (!mSendStreamConfig.rtp.ssrcs.empty()) {
1137 uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
1138 webrtc::VideoSendStream::Stats stats = mSendStream->GetStats();
1139 runnables.AppendElement(NS_NewRunnableFunction(
1140 "WebrtcVideoConduit::SendStreamStatistics::Update",
1141 [this, self = RefPtr<WebrtcVideoConduit>(this),
1142 stats = std::move(stats),
1143 ssrc]() { mSendStreamStats.Update(stats, ssrc); }));
1144 }
1145 }
1146 if (mEngineReceiving) {
1147 MOZ_RELEASE_ASSERT(mRecvStream);
1148 webrtc::VideoReceiveStream::Stats stats = mRecvStream->GetStats();
1149 runnables.AppendElement(NS_NewRunnableFunction(
1150 "WebrtcVideoConduit::RecvStreamStatistics::Update",
1151 [this, self = RefPtr<WebrtcVideoConduit>(this),
1152 stats = std::move(stats)]() { mRecvStreamStats.Update(stats); }));
1153 }
1154 webrtc::Call::Stats stats = mCall->Call()->GetStats();
1155 mStsThread->Dispatch(NS_NewRunnableFunction(
1156 "WebrtcVideoConduit::UpdateStreamStatistics",
1157 [this, self = RefPtr<WebrtcVideoConduit>(this), stats = std::move(stats),
1158 runnables = std::move(runnables)]() mutable {
1159 mCallStats.Update(stats);
1160 for (const auto& runnable : runnables) {
1161 runnable->Run();
1162 }
1163 NS_ReleaseOnMainThread("WebrtcVideoConduit::UpdateStreamStatistics",
1164 self.forget());
1165 }));
1166 }
1167
UpdateVideoStatsTimer()1168 void WebrtcVideoConduit::UpdateVideoStatsTimer() {
1169 MOZ_ASSERT(NS_IsMainThread());
1170
1171 bool transmitting = mEngineTransmitting;
1172 bool receiving = mEngineReceiving;
1173 mStsThread->Dispatch(NS_NewRunnableFunction(
1174 "WebrtcVideoConduit::SetSendStreamStatsActive",
1175 [this, self = RefPtr<WebrtcVideoConduit>(this), transmitting,
1176 receiving]() mutable {
1177 mSendStreamStats.SetActive(transmitting);
1178 mRecvStreamStats.SetActive(receiving);
1179 NS_ReleaseOnMainThread("WebrtcVideoConduit::SetSendStreamStatsActive",
1180 self.forget());
1181 }));
1182
1183 bool shouldBeActive = transmitting || receiving;
1184 if (mVideoStatsTimerActive == shouldBeActive) {
1185 return;
1186 }
1187 mVideoStatsTimerActive = shouldBeActive;
1188 if (shouldBeActive) {
1189 nsTimerCallbackFunc callback = [](nsITimer*, void* aClosure) {
1190 CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p",
1191 aClosure);
1192 static_cast<WebrtcVideoConduit*>(aClosure)->PollStats();
1193 };
1194 mVideoStatsTimer->InitWithNamedFuncCallback(
1195 callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
1196 "WebrtcVideoConduit::SendStreamStatsUpdater");
1197 } else {
1198 mVideoStatsTimer->Cancel();
1199 }
1200 }
1201
GetVideoEncoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * droppedFrames,uint32_t * framesEncoded,Maybe<uint64_t> * qpSum)1202 bool WebrtcVideoConduit::GetVideoEncoderStats(
1203 double* framerateMean, double* framerateStdDev, double* bitrateMean,
1204 double* bitrateStdDev, uint32_t* droppedFrames, uint32_t* framesEncoded,
1205 Maybe<uint64_t>* qpSum) {
1206 ASSERT_ON_THREAD(mStsThread);
1207
1208 MutexAutoLock lock(mMutex);
1209 if (!mEngineTransmitting || !mSendStream) {
1210 return false;
1211 }
1212 mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
1213 *bitrateMean, *bitrateStdDev);
1214 *droppedFrames = mSendStreamStats.DroppedFrames();
1215 *framesEncoded = mSendStreamStats.FramesEncoded();
1216 *qpSum = mSendStreamStats.QpSum();
1217 return true;
1218 }
1219
GetVideoDecoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * discardedPackets,uint32_t * framesDecoded)1220 bool WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
1221 double* framerateStdDev,
1222 double* bitrateMean,
1223 double* bitrateStdDev,
1224 uint32_t* discardedPackets,
1225 uint32_t* framesDecoded) {
1226 ASSERT_ON_THREAD(mStsThread);
1227
1228 MutexAutoLock lock(mMutex);
1229 if (!mEngineReceiving || !mRecvStream) {
1230 return false;
1231 }
1232 mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
1233 *bitrateMean, *bitrateStdDev);
1234 *discardedPackets = mRecvStreamStats.DiscardedPackets();
1235 *framesDecoded = mRecvStreamStats.FramesDecoded();
1236 return true;
1237 }
1238
GetRTPReceiverStats(uint32_t * jitterMs,uint32_t * packetsLost)1239 bool WebrtcVideoConduit::GetRTPReceiverStats(uint32_t* jitterMs,
1240 uint32_t* packetsLost) {
1241 ASSERT_ON_THREAD(mStsThread);
1242
1243 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1244 MutexAutoLock lock(mMutex);
1245 if (!mRecvStream) {
1246 return false;
1247 }
1248
1249 *jitterMs = mRecvStreamStats.JitterMs();
1250 *packetsLost = mRecvStreamStats.PacketsLost();
1251 return true;
1252 }
1253
GetRTCPReceiverReport(uint32_t * jitterMs,uint32_t * packetsReceived,uint64_t * bytesReceived,uint32_t * cumulativeLost,Maybe<double> * aOutRttSec)1254 bool WebrtcVideoConduit::GetRTCPReceiverReport(uint32_t* jitterMs,
1255 uint32_t* packetsReceived,
1256 uint64_t* bytesReceived,
1257 uint32_t* cumulativeLost,
1258 Maybe<double>* aOutRttSec) {
1259 ASSERT_ON_THREAD(mStsThread);
1260
1261 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1262 aOutRttSec->reset();
1263 if (!mSendStreamStats.Active()) {
1264 return false;
1265 }
1266 if (!mSendStreamStats.SsrcFound()) {
1267 return false;
1268 }
1269 *jitterMs = mSendStreamStats.JitterMs();
1270 *packetsReceived = mSendStreamStats.PacketsReceived();
1271 *bytesReceived = mSendStreamStats.BytesReceived();
1272 *cumulativeLost = mSendStreamStats.PacketsLost();
1273 *aOutRttSec = mCallStats.RttSec();
1274 return true;
1275 }
1276
GetRTCPSenderReport(unsigned int * packetsSent,uint64_t * bytesSent)1277 bool WebrtcVideoConduit::GetRTCPSenderReport(unsigned int* packetsSent,
1278 uint64_t* bytesSent) {
1279 ASSERT_ON_THREAD(mStsThread);
1280
1281 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1282
1283 if (!mRecvStreamStats.Active()) {
1284 return false;
1285 }
1286
1287 *packetsSent = mRecvStreamStats.PacketsSent();
1288 *bytesSent = mRecvStreamStats.BytesSent();
1289 return true;
1290 }
1291
GetRtpSources(nsTArray<dom::RTCRtpSourceEntry> & outSources)1292 void WebrtcVideoConduit::GetRtpSources(
1293 nsTArray<dom::RTCRtpSourceEntry>& outSources) {
1294 MOZ_ASSERT(NS_IsMainThread());
1295 return mRtpSourceObserver->GetRtpSources(outSources);
1296 }
1297
InitMain()1298 MediaConduitErrorCode WebrtcVideoConduit::InitMain() {
1299 MOZ_ASSERT(NS_IsMainThread());
1300
1301 nsresult rv;
1302 nsCOMPtr<nsIPrefService> prefs =
1303 do_GetService("@mozilla.org/preferences-service;1", &rv);
1304 if (!NS_WARN_IF(NS_FAILED(rv))) {
1305 nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
1306
1307 if (branch) {
1308 int32_t temp;
1309 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1310 "media.video.test_latency", &mVideoLatencyTestEnable)));
1311 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1312 "media.video.test_latency", &mVideoLatencyTestEnable)));
1313 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1314 "media.peerconnection.video.min_bitrate", &temp)))) {
1315 if (temp >= 0) {
1316 mMinBitrate = KBPS(temp);
1317 }
1318 }
1319 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1320 "media.peerconnection.video.start_bitrate", &temp)))) {
1321 if (temp >= 0) {
1322 mStartBitrate = KBPS(temp);
1323 }
1324 }
1325 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1326 "media.peerconnection.video.max_bitrate", &temp)))) {
1327 if (temp >= 0) {
1328 mPrefMaxBitrate = KBPS(temp);
1329 }
1330 }
1331 if (mMinBitrate != 0 && mMinBitrate < kViEMinCodecBitrate_bps) {
1332 mMinBitrate = kViEMinCodecBitrate_bps;
1333 }
1334 if (mStartBitrate < mMinBitrate) {
1335 mStartBitrate = mMinBitrate;
1336 }
1337 if (mPrefMaxBitrate && mStartBitrate > mPrefMaxBitrate) {
1338 mStartBitrate = mPrefMaxBitrate;
1339 }
1340 // XXX We'd love if this was a live param for testing adaptation/etc
1341 // in automation
1342 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1343 "media.peerconnection.video.min_bitrate_estimate", &temp)))) {
1344 if (temp >= 0) {
1345 mMinBitrateEstimate = temp; // bps!
1346 }
1347 }
1348 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1349 "media.peerconnection.video.svc.spatial", &temp)))) {
1350 if (temp >= 0) {
1351 mSpatialLayers = temp;
1352 }
1353 }
1354 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1355 "media.peerconnection.video.svc.temporal", &temp)))) {
1356 if (temp >= 0) {
1357 mTemporalLayers = temp;
1358 }
1359 }
1360 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1361 "media.peerconnection.video.denoising", &mDenoising)));
1362 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1363 "media.peerconnection.video.lock_scaling", &mLockScaling)));
1364 }
1365 }
1366 #ifdef MOZ_WIDGET_ANDROID
1367 if (mozilla::camera::VideoEngine::SetAndroidObjects() != 0) {
1368 CSFLogError(LOGTAG, "%s: could not set Android objects", __FUNCTION__);
1369 return kMediaConduitSessionNotInited;
1370 }
1371 #endif // MOZ_WIDGET_ANDROID
1372 return kMediaConduitNoError;
1373 }
1374
1375 /**
1376 * Performs initialization of the MANDATORY components of the Video Engine
1377 */
Init()1378 MediaConduitErrorCode WebrtcVideoConduit::Init() {
1379 MOZ_ASSERT(NS_IsMainThread());
1380
1381 CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
1382 MediaConduitErrorCode result;
1383 result = InitMain();
1384 if (result != kMediaConduitNoError) {
1385 return result;
1386 }
1387
1388 CSFLogDebug(LOGTAG, "%s Initialization Done", __FUNCTION__);
1389 return kMediaConduitNoError;
1390 }
1391
DeleteStreams()1392 void WebrtcVideoConduit::DeleteStreams() {
1393 MOZ_ASSERT(NS_IsMainThread());
1394
1395 // We can't delete the VideoEngine until all these are released!
1396 // And we can't use a Scoped ptr, since the order is arbitrary
1397
1398 MutexAutoLock lock(mMutex);
1399 DeleteSendStream();
1400 DeleteRecvStream();
1401 }
1402
AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)1403 MediaConduitErrorCode WebrtcVideoConduit::AttachRenderer(
1404 RefPtr<mozilla::VideoRenderer> aVideoRenderer) {
1405 MOZ_ASSERT(NS_IsMainThread());
1406
1407 CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
1408
1409 // null renderer
1410 if (!aVideoRenderer) {
1411 CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
1412 MOZ_ASSERT(false);
1413 return kMediaConduitInvalidRenderer;
1414 }
1415
1416 // This function is called only from main, so we only need to protect against
1417 // modifying mRenderer while any webrtc.org code is trying to use it.
1418 {
1419 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1420 mRenderer = aVideoRenderer;
1421 // Make sure the renderer knows the resolution
1422 mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
1423 }
1424
1425 return kMediaConduitNoError;
1426 }
1427
DetachRenderer()1428 void WebrtcVideoConduit::DetachRenderer() {
1429 MOZ_ASSERT(NS_IsMainThread());
1430
1431 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1432 if (mRenderer) {
1433 mRenderer = nullptr;
1434 }
1435 }
1436
SetTransmitterTransport(RefPtr<TransportInterface> aTransport)1437 MediaConduitErrorCode WebrtcVideoConduit::SetTransmitterTransport(
1438 RefPtr<TransportInterface> aTransport) {
1439 MOZ_ASSERT(NS_IsMainThread());
1440
1441 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1442
1443 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1444 // set the transport
1445 mTransmitterTransport = aTransport;
1446 return kMediaConduitNoError;
1447 }
1448
SetReceiverTransport(RefPtr<TransportInterface> aTransport)1449 MediaConduitErrorCode WebrtcVideoConduit::SetReceiverTransport(
1450 RefPtr<TransportInterface> aTransport) {
1451 MOZ_ASSERT(NS_IsMainThread());
1452
1453 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1454
1455 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1456 // set the transport
1457 mReceiverTransport = aTransport;
1458 return kMediaConduitNoError;
1459 }
1460
ConfigureRecvMediaCodecs(const std::vector<UniquePtr<VideoCodecConfig>> & codecConfigList,const RtpRtcpConfig & aRtpRtcpConfig)1461 MediaConduitErrorCode WebrtcVideoConduit::ConfigureRecvMediaCodecs(
1462 const std::vector<UniquePtr<VideoCodecConfig>>& codecConfigList,
1463 const RtpRtcpConfig& aRtpRtcpConfig) {
1464 MOZ_ASSERT(NS_IsMainThread());
1465
1466 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1467 MediaConduitErrorCode condError = kMediaConduitNoError;
1468 std::string payloadName;
1469
1470 if (codecConfigList.empty()) {
1471 CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
1472 return kMediaConduitMalformedArgument;
1473 }
1474
1475 webrtc::KeyFrameRequestMethod kf_request_method = webrtc::kKeyFrameReqPliRtcp;
1476 bool kf_request_enabled = false;
1477 bool use_nack_basic = false;
1478 bool use_tmmbr = false;
1479 bool use_remb = false;
1480 bool use_fec = false;
1481 bool use_transport_cc = false;
1482 int ulpfec_payload_type = kNullPayloadType;
1483 int red_payload_type = kNullPayloadType;
1484 bool configuredH264 = false;
1485 nsTArray<UniquePtr<VideoCodecConfig>> recv_codecs;
1486
1487 // Try Applying the codecs in the list
1488 // we treat as success if at least one codec was applied and reception was
1489 // started successfully.
1490 std::set<unsigned int> codec_types_seen;
1491 for (const auto& codec_config : codecConfigList) {
1492 if ((condError = ValidateCodecConfig(codec_config.get())) !=
1493 kMediaConduitNoError) {
1494 CSFLogError(LOGTAG, "%s Invalid config for %s decoder: %i", __FUNCTION__,
1495 codec_config ? codec_config->mName.c_str() : "<null>",
1496 condError);
1497 continue;
1498 }
1499 if (codec_config->mName == "H264") {
1500 // TODO(bug 1200768): We can only handle configuring one recv H264 codec
1501 if (configuredH264) {
1502 continue;
1503 }
1504 configuredH264 = true;
1505 }
1506
1507 if (codec_config->mName == kUlpFecPayloadName) {
1508 ulpfec_payload_type = codec_config->mType;
1509 continue;
1510 }
1511
1512 if (codec_config->mName == kRedPayloadName) {
1513 red_payload_type = codec_config->mType;
1514 continue;
1515 }
1516
1517 // Check for the keyframe request type: PLI is preferred
1518 // over FIR, and FIR is preferred over none.
1519 // XXX (See upstream issue
1520 // https://bugs.chromium.org/p/webrtc/issues/detail?id=7002): There is no
1521 // 'none' option in webrtc.org
1522 if (codec_config->RtcpFbNackIsSet("pli")) {
1523 kf_request_enabled = true;
1524 kf_request_method = webrtc::kKeyFrameReqPliRtcp;
1525 } else if (!kf_request_enabled && codec_config->RtcpFbCcmIsSet("fir")) {
1526 kf_request_enabled = true;
1527 kf_request_method = webrtc::kKeyFrameReqFirRtcp;
1528 }
1529
1530 // What if codec A has Nack and REMB, and codec B has TMMBR, and codec C has
1531 // none? In practice, that's not a useful configuration, and
1532 // VideoReceiveStream::Config can't represent that, so simply union the
1533 // (boolean) settings
1534 use_nack_basic |= codec_config->RtcpFbNackIsSet("");
1535 use_tmmbr |= codec_config->RtcpFbCcmIsSet("tmmbr");
1536 use_remb |= codec_config->RtcpFbRembIsSet();
1537 use_fec |= codec_config->RtcpFbFECIsSet();
1538 use_transport_cc |= codec_config->RtcpFbTransportCCIsSet();
1539
1540 recv_codecs.AppendElement(new VideoCodecConfig(*codec_config));
1541 }
1542
1543 if (!recv_codecs.Length()) {
1544 CSFLogError(LOGTAG, "%s Found no valid receive codecs", __FUNCTION__);
1545 return kMediaConduitMalformedArgument;
1546 }
1547
1548 // Now decide if we need to recreate the receive stream, or can keep it
1549 if (!mRecvStream || CodecsDifferent(recv_codecs, mRecvCodecList) ||
1550 mRecvStreamConfig.rtp.nack.rtp_history_ms !=
1551 (use_nack_basic ? 1000 : 0) ||
1552 mRecvStreamConfig.rtp.remb != use_remb ||
1553 mRecvStreamConfig.rtp.transport_cc != use_transport_cc ||
1554 mRecvStreamConfig.rtp.tmmbr != use_tmmbr ||
1555 mRecvStreamConfig.rtp.keyframe_method != kf_request_method ||
1556 (use_fec &&
1557 (mRecvStreamConfig.rtp.ulpfec_payload_type != ulpfec_payload_type ||
1558 mRecvStreamConfig.rtp.red_payload_type != red_payload_type))) {
1559 MutexAutoLock lock(mMutex);
1560
1561 condError = StopReceivingLocked();
1562 if (condError != kMediaConduitNoError) {
1563 return condError;
1564 }
1565
1566 // If we fail after here things get ugly
1567 mRecvStreamConfig.rtp.rtcp_mode = aRtpRtcpConfig.GetRtcpMode();
1568 mRecvStreamConfig.rtp.nack.rtp_history_ms = use_nack_basic ? 1000 : 0;
1569 mRecvStreamConfig.rtp.remb = use_remb;
1570 mRecvStreamConfig.rtp.transport_cc = use_transport_cc;
1571 mRecvStreamConfig.rtp.tmmbr = use_tmmbr;
1572 mRecvStreamConfig.rtp.keyframe_method = kf_request_method;
1573
1574 if (use_fec) {
1575 mRecvStreamConfig.rtp.ulpfec_payload_type = ulpfec_payload_type;
1576 mRecvStreamConfig.rtp.red_payload_type = red_payload_type;
1577 } else {
1578 // Reset to defaults
1579 mRecvStreamConfig.rtp.ulpfec_payload_type = -1;
1580 mRecvStreamConfig.rtp.red_payload_type = -1;
1581 }
1582
1583 mRecvStreamConfig.rtp.rtx_associated_payload_types.clear();
1584 for (auto& codec : recv_codecs) {
1585 if (codec->RtxPayloadTypeIsSet()) {
1586 mRecvStreamConfig.rtp.AddRtxBinding(codec->mRTXPayloadType,
1587 codec->mType);
1588 }
1589 }
1590 // SetRemoteSSRC should have populated this already
1591 mRecvSSRC = mRecvStreamConfig.rtp.remote_ssrc;
1592
1593 // XXX ugh! same SSRC==0 problem that webrtc.org has
1594 if (mRecvSSRC == 0) {
1595 // Handle un-signalled SSRCs by creating a random one and then when it
1596 // actually gets set, we'll destroy and recreate. Simpler than trying to
1597 // unwind all the logic that assumes the receive stream is created and
1598 // started when we ConfigureRecvMediaCodecs()
1599 uint32_t ssrc = GenerateRandomSSRC();
1600 if (ssrc == 0) {
1601 // webrtc.org code has fits if you select an SSRC of 0, so that's how
1602 // we signal an error.
1603 return kMediaConduitUnknownError;
1604 }
1605
1606 mRecvStreamConfig.rtp.remote_ssrc = ssrc;
1607 mRecvSSRC = ssrc;
1608 }
1609
1610 // 0 isn't allowed. Would be best to ask for a random SSRC from the
1611 // RTP code. Would need to call rtp_sender.cc -- GenerateNewSSRC(),
1612 // which isn't exposed. It's called on collision, or when we decide to
1613 // send. it should be called on receiver creation. Here, we're
1614 // generating the SSRC value - but this causes ssrc_forced in set in
1615 // rtp_sender, which locks us into the SSRC - even a collision won't
1616 // change it!!!
1617 MOZ_ASSERT(!mSendStreamConfig.rtp.ssrcs.empty());
1618 auto ssrc = mSendStreamConfig.rtp.ssrcs.front();
1619 Unused << NS_WARN_IF(ssrc == mRecvStreamConfig.rtp.remote_ssrc);
1620
1621 while (ssrc == mRecvStreamConfig.rtp.remote_ssrc) {
1622 ssrc = GenerateRandomSSRC();
1623 if (ssrc == 0) {
1624 return kMediaConduitUnknownError;
1625 }
1626 }
1627
1628 mRecvStreamConfig.rtp.local_ssrc = ssrc;
1629 CSFLogDebug(LOGTAG,
1630 "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
1631 __FUNCTION__, (void*)this, ssrc,
1632 (uint32_t)mSendStreamConfig.rtp.ssrcs.size(),
1633 mRecvStreamConfig.rtp.remote_ssrc);
1634
1635 // XXX Copy over those that are the same and don't rebuild them
1636 mRecvCodecList.SwapElements(recv_codecs);
1637 recv_codecs.Clear();
1638
1639 DeleteRecvStream();
1640 return StartReceivingLocked();
1641 }
1642 return kMediaConduitNoError;
1643 }
1644
CreateDecoder(webrtc::VideoCodecType aType)1645 std::unique_ptr<webrtc::VideoDecoder> WebrtcVideoConduit::CreateDecoder(
1646 webrtc::VideoCodecType aType) {
1647 MOZ_ASSERT(NS_IsMainThread());
1648
1649 std::unique_ptr<webrtc::VideoDecoder> decoder = nullptr;
1650 mRecvCodecPluginID = 0;
1651
1652 #ifdef MOZ_WEBRTC_MEDIACODEC
1653 bool enabled = false;
1654 #endif
1655
1656 // Attempt to create a decoder using MediaDataDecoder.
1657 decoder.reset(MediaDataCodec::CreateDecoder(aType));
1658 if (decoder) {
1659 return decoder;
1660 }
1661
1662 switch (aType) {
1663 case webrtc::VideoCodecType::kVideoCodecH264:
1664 // get an external decoder
1665 decoder.reset(GmpVideoCodec::CreateDecoder());
1666 if (decoder) {
1667 mRecvCodecPluginID =
1668 static_cast<WebrtcVideoDecoder*>(decoder.get())->PluginID();
1669 }
1670 break;
1671
1672 case webrtc::VideoCodecType::kVideoCodecVP8:
1673 #ifdef MOZ_WEBRTC_MEDIACODEC
1674 // attempt to get a decoder
1675 enabled = mozilla::Preferences::GetBool(
1676 "media.navigator.hardware.vp8_decode.acceleration_enabled", false);
1677 if (enabled) {
1678 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
1679 if (gfxInfo) {
1680 int32_t status;
1681 nsCString discardFailureId;
1682
1683 if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
1684 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_DECODE,
1685 discardFailureId, &status))) {
1686 if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
1687 NS_WARNING(
1688 "VP8 decoder hardware is not whitelisted: disabling.\n");
1689 } else {
1690 decoder = MediaCodecVideoCodec::CreateDecoder(
1691 MediaCodecVideoCodec::CodecType::CODEC_VP8);
1692 }
1693 }
1694 }
1695 }
1696 #endif
1697 // Use a software VP8 decoder as a fallback.
1698 if (!decoder) {
1699 decoder = webrtc::VP8Decoder::Create();
1700 }
1701 break;
1702
1703 case webrtc::VideoCodecType::kVideoCodecVP9:
1704 MOZ_ASSERT(webrtc::VP9Decoder::IsSupported());
1705 decoder = webrtc::VP9Decoder::Create();
1706 break;
1707
1708 default:
1709 break;
1710 }
1711
1712 return decoder;
1713 }
1714
CreateEncoder(webrtc::VideoCodecType aType)1715 std::unique_ptr<webrtc::VideoEncoder> WebrtcVideoConduit::CreateEncoder(
1716 webrtc::VideoCodecType aType) {
1717 MOZ_ASSERT(NS_IsMainThread());
1718
1719 std::unique_ptr<webrtc::VideoEncoder> encoder = nullptr;
1720 mSendCodecPluginID = 0;
1721
1722 #ifdef MOZ_WEBRTC_MEDIACODEC
1723 bool enabled = false;
1724 #endif
1725
1726 if (StaticPrefs::media_webrtc_platformencoder()) {
1727 encoder.reset(MediaDataCodec::CreateEncoder(aType));
1728 if (encoder) {
1729 return encoder;
1730 }
1731 }
1732
1733 switch (aType) {
1734 case webrtc::VideoCodecType::kVideoCodecH264:
1735 // get an external encoder
1736 encoder.reset(GmpVideoCodec::CreateEncoder());
1737 if (encoder) {
1738 mSendCodecPluginID =
1739 static_cast<WebrtcVideoEncoder*>(encoder.get())->PluginID();
1740 }
1741 break;
1742
1743 case webrtc::VideoCodecType::kVideoCodecVP8:
1744 encoder.reset(new webrtc::VP8EncoderSimulcastProxy(this));
1745 break;
1746
1747 case webrtc::VideoCodecType::kVideoCodecVP9:
1748 encoder = webrtc::VP9Encoder::Create();
1749 break;
1750
1751 default:
1752 break;
1753 }
1754 return encoder;
1755 }
1756
GetSupportedFormats() const1757 std::vector<webrtc::SdpVideoFormat> WebrtcVideoConduit::GetSupportedFormats()
1758 const {
1759 MOZ_ASSERT_UNREACHABLE("Unexpected call");
1760 CSFLogError(LOGTAG, "Unexpected call to GetSupportedFormats()");
1761 return {webrtc::SdpVideoFormat("VP8")};
1762 }
1763
QueryVideoEncoder(const webrtc::SdpVideoFormat & format) const1764 WebrtcVideoConduit::CodecInfo WebrtcVideoConduit::QueryVideoEncoder(
1765 const webrtc::SdpVideoFormat& format) const {
1766 MOZ_ASSERT_UNREACHABLE("Unexpected call");
1767 CSFLogError(LOGTAG, "Unexpected call to QueryVideoEncoder()");
1768 CodecInfo info;
1769 info.is_hardware_accelerated = false;
1770 info.has_internal_source = false;
1771 return info;
1772 }
1773
CreateVideoEncoder(const webrtc::SdpVideoFormat & format)1774 std::unique_ptr<webrtc::VideoEncoder> WebrtcVideoConduit::CreateVideoEncoder(
1775 const webrtc::SdpVideoFormat& format) {
1776 MOZ_ASSERT(format.name == "VP8");
1777 std::unique_ptr<webrtc::VideoEncoder> encoder = nullptr;
1778 #ifdef MOZ_WEBRTC_MEDIACODEC
1779 // attempt to get a encoder
1780 enabled = mozilla::Preferences::GetBool(
1781 "media.navigator.hardware.vp8_encode.acceleration_enabled", false);
1782 if (enabled) {
1783 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
1784 if (gfxInfo) {
1785 int32_t status;
1786 nsCString discardFailureId;
1787
1788 if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
1789 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_ENCODE,
1790 discardFailureId, &status))) {
1791 if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
1792 NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
1793 } else {
1794 encoder = MediaCodecVideoCodec::CreateEncoder(
1795 MediaCodecVideoCodec::CodecType::CODEC_VP8);
1796 }
1797 }
1798 }
1799 }
1800 #endif
1801 // Use a software VP8 encoder as a fallback.
1802 encoder = webrtc::VP8Encoder::Create();
1803 return encoder;
1804 }
1805
1806 // XXX we need to figure out how to feed back changes in preferred capture
1807 // resolution to the getUserMedia source.
SelectSendResolution(unsigned short width,unsigned short height)1808 void WebrtcVideoConduit::SelectSendResolution(unsigned short width,
1809 unsigned short height) {
1810 mMutex.AssertCurrentThreadOwns();
1811 // XXX This will do bandwidth-resolution adaptation as well - bug 877954
1812
1813 // Enforce constraints
1814 if (mCurSendCodecConfig) {
1815 uint16_t max_width = mCurSendCodecConfig->mEncodingConstraints.maxWidth;
1816 uint16_t max_height = mCurSendCodecConfig->mEncodingConstraints.maxHeight;
1817 if (max_width || max_height) {
1818 max_width = max_width ? max_width : UINT16_MAX;
1819 max_height = max_height ? max_height : UINT16_MAX;
1820 ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
1821 }
1822
1823 int max_fs = mSinkWantsPixelCount;
1824 // Limit resolution to max-fs
1825 if (mCurSendCodecConfig->mEncodingConstraints.maxFs) {
1826 // max-fs is in macroblocks, convert to pixels
1827 max_fs = std::min(
1828 max_fs,
1829 static_cast<int>(mCurSendCodecConfig->mEncodingConstraints.maxFs *
1830 (16 * 16)));
1831 }
1832 mVideoAdapter->OnResolutionFramerateRequest(
1833 rtc::Optional<int>(), max_fs, std::numeric_limits<int>::max());
1834 }
1835
1836 unsigned int framerate = SelectSendFrameRate(
1837 mCurSendCodecConfig.get(), mSendingFramerate, width, height);
1838 if (mSendingFramerate != framerate) {
1839 CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)", __FUNCTION__,
1840 framerate, mSendingFramerate);
1841 mSendingFramerate = framerate;
1842 mVideoStreamFactory->SetSendingFramerate(mSendingFramerate);
1843 }
1844 }
1845
AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,const rtc::VideoSinkWants & wants)1846 void WebrtcVideoConduit::AddOrUpdateSink(
1847 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
1848 const rtc::VideoSinkWants& wants) {
1849 if (!NS_IsMainThread()) {
1850 // This may be called off main thread, but only to update an already added
1851 // sink. If we add it after the dispatch we're at risk of a UAF.
1852 NS_DispatchToMainThread(
1853 NS_NewRunnableFunction("WebrtcVideoConduit::UpdateSink",
1854 [this, self = RefPtr<WebrtcVideoConduit>(this),
1855 sink, wants = std::move(wants)]() {
1856 if (mRegisteredSinks.Contains(sink)) {
1857 AddOrUpdateSinkNotLocked(sink, wants);
1858 }
1859 }));
1860 return;
1861 }
1862
1863 mMutex.AssertCurrentThreadOwns();
1864 if (!mRegisteredSinks.Contains(sink)) {
1865 mRegisteredSinks.AppendElement(sink);
1866 }
1867 mVideoBroadcaster.AddOrUpdateSink(sink, wants);
1868 OnSinkWantsChanged(mVideoBroadcaster.wants());
1869 }
1870
AddOrUpdateSinkNotLocked(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,const rtc::VideoSinkWants & wants)1871 void WebrtcVideoConduit::AddOrUpdateSinkNotLocked(
1872 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
1873 const rtc::VideoSinkWants& wants) {
1874 MutexAutoLock lock(mMutex);
1875 AddOrUpdateSink(sink, wants);
1876 }
1877
RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink)1878 void WebrtcVideoConduit::RemoveSink(
1879 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
1880 MOZ_ASSERT(NS_IsMainThread());
1881 mMutex.AssertCurrentThreadOwns();
1882
1883 mRegisteredSinks.RemoveElement(sink);
1884 mVideoBroadcaster.RemoveSink(sink);
1885 OnSinkWantsChanged(mVideoBroadcaster.wants());
1886 }
1887
RemoveSinkNotLocked(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink)1888 void WebrtcVideoConduit::RemoveSinkNotLocked(
1889 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
1890 MutexAutoLock lock(mMutex);
1891 RemoveSink(sink);
1892 }
1893
OnSinkWantsChanged(const rtc::VideoSinkWants & wants)1894 void WebrtcVideoConduit::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
1895 MOZ_ASSERT(NS_IsMainThread());
1896 mMutex.AssertCurrentThreadOwns();
1897
1898 if (mLockScaling) {
1899 return;
1900 }
1901
1902 CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d",
1903 __FUNCTION__, mSendStreamConfig.rtp.ssrcs.front(),
1904 mSendStreamConfig.rtp.ssrcs.front(), wants.max_pixel_count);
1905
1906 if (!mCurSendCodecConfig) {
1907 return;
1908 }
1909
1910 mSinkWantsPixelCount = wants.max_pixel_count;
1911 mUpdateResolution = true;
1912 }
1913
SendVideoFrame(const webrtc::VideoFrame & frame)1914 MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
1915 const webrtc::VideoFrame& frame) {
1916 // XXX Google uses a "timestamp_aligner" to translate timestamps from the
1917 // camera via TranslateTimestamp(); we should look at doing the same. This
1918 // avoids sampling error when capturing frames, but google had to deal with
1919 // some broken cameras, include Logitech c920's IIRC.
1920
1921 int cropWidth;
1922 int cropHeight;
1923 int adaptedWidth;
1924 int adaptedHeight;
1925 {
1926 MutexAutoLock lock(mMutex);
1927 CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s (send SSRC %u (0x%x))",
1928 this, __FUNCTION__, mSendStreamConfig.rtp.ssrcs.front(),
1929 mSendStreamConfig.rtp.ssrcs.front());
1930
1931 if (mUpdateResolution || frame.width() != mLastWidth ||
1932 frame.height() != mLastHeight) {
1933 // See if we need to recalculate what we're sending.
1934 CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
1935 __FUNCTION__, frame.width(), frame.height());
1936 MOZ_ASSERT(frame.width() != 0 && frame.height() != 0);
1937 // Note coverity will flag this since it thinks they can be 0
1938 MOZ_ASSERT(mCurSendCodecConfig);
1939
1940 mLastWidth = frame.width();
1941 mLastHeight = frame.height();
1942 mUpdateResolution = false;
1943 SelectSendResolution(frame.width(), frame.height());
1944 }
1945
1946 // adapt input video to wants of sink
1947 if (!mVideoBroadcaster.frame_wanted()) {
1948 return kMediaConduitNoError;
1949 }
1950
1951 if (!mVideoAdapter->AdaptFrameResolution(
1952 frame.width(), frame.height(),
1953 frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, &cropWidth,
1954 &cropHeight, &adaptedWidth, &adaptedHeight)) {
1955 // VideoAdapter dropped the frame.
1956 return kMediaConduitNoError;
1957 }
1958 }
1959
1960 // If we have zero width or height, drop the frame here. Attempting to send
1961 // it will cause all sorts of problems in the webrtc.org code.
1962 if (cropWidth == 0 || cropHeight == 0) {
1963 return kMediaConduitNoError;
1964 }
1965
1966 int cropX = (frame.width() - cropWidth) / 2;
1967 int cropY = (frame.height() - cropHeight) / 2;
1968
1969 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
1970 if (adaptedWidth == frame.width() && adaptedHeight == frame.height()) {
1971 // No adaption - optimized path.
1972 buffer = frame.video_frame_buffer();
1973 } else {
1974 // Adapted I420 frame.
1975 rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
1976 mBufferPool.CreateBuffer(adaptedWidth, adaptedHeight);
1977 if (!i420Buffer) {
1978 CSFLogWarn(LOGTAG, "Creating a buffer for scaling failed, pool is empty");
1979 return kMediaConduitNoError;
1980 }
1981 i420Buffer->CropAndScaleFrom(*frame.video_frame_buffer()->GetI420().get(),
1982 cropX, cropY, cropWidth, cropHeight);
1983 buffer = i420Buffer;
1984 }
1985
1986 mVideoBroadcaster.OnFrame(webrtc::VideoFrame(
1987 buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation()));
1988
1989 mStsThread->Dispatch(NS_NewRunnableFunction(
1990 "SendStreamStatistics::FrameDeliveredToEncoder",
1991 [self = RefPtr<WebrtcVideoConduit>(this), this]() mutable {
1992 mSendStreamStats.FrameDeliveredToEncoder();
1993 NS_ReleaseOnMainThread("SendStreamStatistics::FrameDeliveredToEncoder",
1994 self.forget());
1995 }));
1996 return kMediaConduitNoError;
1997 }
1998
1999 // Transport Layer Callbacks
2000
DeliverPacket(const void * data,int len)2001 MediaConduitErrorCode WebrtcVideoConduit::DeliverPacket(const void* data,
2002 int len) {
2003 ASSERT_ON_THREAD(mStsThread);
2004
2005 // Bug 1499796 - we need to get passed the time the packet was received
2006 webrtc::PacketReceiver::DeliveryStatus status =
2007 mCall->Call()->Receiver()->DeliverPacket(
2008 webrtc::MediaType::VIDEO, static_cast<const uint8_t*>(data), len,
2009 webrtc::PacketTime());
2010
2011 if (status != webrtc::PacketReceiver::DELIVERY_OK) {
2012 CSFLogError(LOGTAG, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
2013 return kMediaConduitRTPProcessingFailed;
2014 }
2015
2016 return kMediaConduitNoError;
2017 }
2018
ReceivedRTPPacket(const void * data,int len,webrtc::RTPHeader & header)2019 MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTPPacket(
2020 const void* data, int len, webrtc::RTPHeader& header) {
2021 ASSERT_ON_THREAD(mStsThread);
2022
2023 if (mAllowSsrcChange || mWaitingForInitialSsrc) {
2024 // Handle the unknown ssrc (and ssrc-not-signaled case).
2025 // We can't just do this here; it has to happen on MainThread :-(
2026 // We also don't want to drop the packet, nor stall this thread, so we hold
2027 // the packet (and any following) for inserting once the SSRC is set.
2028 if (mRtpPacketQueue.IsQueueActive()) {
2029 mRtpPacketQueue.Enqueue(data, len);
2030 return kMediaConduitNoError;
2031 }
2032
2033 bool switchRequired = mRecvSSRC != header.ssrc;
2034 if (switchRequired) {
2035 // We need to check that the newly received ssrc is not already
2036 // associated with ulpfec or rtx. This is how webrtc.org handles
2037 // things, see https://codereview.webrtc.org/1226093002.
2038 MutexAutoLock lock(mMutex);
2039 const webrtc::VideoReceiveStream::Config::Rtp& rtp =
2040 mRecvStreamConfig.rtp;
2041 switchRequired =
2042 rtp.rtx_associated_payload_types.find(header.payloadType) ==
2043 rtp.rtx_associated_payload_types.end() &&
2044 rtp.ulpfec_payload_type != header.payloadType;
2045 }
2046
2047 if (switchRequired) {
2048 // a new switch needs to be done
2049 // any queued packets are from a previous switch that hasn't completed
2050 // yet; drop them and only process the latest SSRC
2051 mRtpPacketQueue.Clear();
2052 mRtpPacketQueue.Enqueue(data, len);
2053
2054 CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
2055 static_cast<uint32_t>(mRecvSSRC), header.ssrc);
2056 // we "switch" here immediately, but buffer until the queue is released
2057 mRecvSSRC = header.ssrc;
2058
2059 // Ensure lamba captures refs
2060 NS_DispatchToMainThread(NS_NewRunnableFunction(
2061 "WebrtcVideoConduit::WebrtcGmpPCHandleSetter",
2062 [this, self = RefPtr<WebrtcVideoConduit>(this),
2063 ssrc = header.ssrc]() mutable {
2064 // Normally this is done in CreateOrUpdateMediaPipeline() for
2065 // initial creation and renegotiation, but here we're rebuilding the
2066 // Receive channel at a lower level. This is needed whenever we're
2067 // creating a GMPVideoCodec (in particular, H264) so it can
2068 // communicate errors to the PC.
2069 WebrtcGmpPCHandleSetter setter(mPCHandle);
2070 // TODO: This is problematic with rtx enabled, we don't know if
2071 // new ssrc is for rtx or not. This is fixed in a later patch in
2072 // this series.
2073 SetRemoteSSRC(
2074 ssrc, 0); // this will likely re-create the VideoReceiveStream
2075 // We want to unblock the queued packets on the original thread
2076 mStsThread->Dispatch(NS_NewRunnableFunction(
2077 "WebrtcVideoConduit::QueuedPacketsHandler",
2078 [this, self = RefPtr<WebrtcVideoConduit>(this),
2079 ssrc]() mutable {
2080 if (ssrc != mRecvSSRC) {
2081 // this is an intermediate switch; another is in-flight
2082 return;
2083 }
2084 mRtpPacketQueue.DequeueAll(this);
2085 NS_ReleaseOnMainThread(
2086 "WebrtcVideoConduit::QueuedPacketsHandler",
2087 self.forget());
2088 }));
2089 }));
2090 return kMediaConduitNoError;
2091 }
2092 }
2093
2094 CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
2095 (uint16_t)ntohs(((uint16_t*)data)[1]), len,
2096 (uint32_t)ntohl(((uint32_t*)data)[2]),
2097 (uint32_t)ntohl(((uint32_t*)data)[2]));
2098
2099 if (DeliverPacket(data, len) != kMediaConduitNoError) {
2100 CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
2101 return kMediaConduitRTPProcessingFailed;
2102 }
2103 return kMediaConduitNoError;
2104 }
2105
ReceivedRTCPPacket(const void * data,int len)2106 MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTCPPacket(const void* data,
2107 int len) {
2108 ASSERT_ON_THREAD(mStsThread);
2109
2110 CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len);
2111
2112 if (DeliverPacket(data, len) != kMediaConduitNoError) {
2113 CSFLogError(LOGTAG, "%s RTCP Processing Failed", __FUNCTION__);
2114 return kMediaConduitRTPProcessingFailed;
2115 }
2116
2117 // TODO(bug 1496533): We will need to keep separate timestamps for each SSRC,
2118 // and for each SSRC we will need to keep a timestamp for SR and RR.
2119 mLastRtcpReceived = Some(GetNow());
2120 return kMediaConduitNoError;
2121 }
2122
2123 // TODO(bug 1496533): We will need to add a type (ie; SR or RR) param here, or
2124 // perhaps break this function into two functions, one for each type.
LastRtcpReceived() const2125 Maybe<DOMHighResTimeStamp> WebrtcVideoConduit::LastRtcpReceived() const {
2126 ASSERT_ON_THREAD(mStsThread);
2127 return mLastRtcpReceived;
2128 }
2129
StopTransmitting()2130 MediaConduitErrorCode WebrtcVideoConduit::StopTransmitting() {
2131 MOZ_ASSERT(NS_IsMainThread());
2132 MutexAutoLock lock(mMutex);
2133
2134 return StopTransmittingLocked();
2135 }
2136
StartTransmitting()2137 MediaConduitErrorCode WebrtcVideoConduit::StartTransmitting() {
2138 MOZ_ASSERT(NS_IsMainThread());
2139 MutexAutoLock lock(mMutex);
2140
2141 return StartTransmittingLocked();
2142 }
2143
StopReceiving()2144 MediaConduitErrorCode WebrtcVideoConduit::StopReceiving() {
2145 MOZ_ASSERT(NS_IsMainThread());
2146 MutexAutoLock lock(mMutex);
2147
2148 return StopReceivingLocked();
2149 }
2150
StartReceiving()2151 MediaConduitErrorCode WebrtcVideoConduit::StartReceiving() {
2152 MOZ_ASSERT(NS_IsMainThread());
2153 MutexAutoLock lock(mMutex);
2154
2155 return StartReceivingLocked();
2156 }
2157
StopTransmittingLocked()2158 MediaConduitErrorCode WebrtcVideoConduit::StopTransmittingLocked() {
2159 MOZ_ASSERT(NS_IsMainThread());
2160 mMutex.AssertCurrentThreadOwns();
2161
2162 if (mEngineTransmitting) {
2163 if (mSendStream) {
2164 CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ",
2165 __FUNCTION__);
2166 mSendStream->Stop();
2167 }
2168
2169 mEngineTransmitting = false;
2170 UpdateVideoStatsTimer();
2171 }
2172 return kMediaConduitNoError;
2173 }
2174
StartTransmittingLocked()2175 MediaConduitErrorCode WebrtcVideoConduit::StartTransmittingLocked() {
2176 MOZ_ASSERT(NS_IsMainThread());
2177 mMutex.AssertCurrentThreadOwns();
2178
2179 if (mEngineTransmitting) {
2180 return kMediaConduitNoError;
2181 }
2182
2183 CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__);
2184 // Start Transmitting on the video engine
2185 if (!mSendStream) {
2186 MediaConduitErrorCode rval = CreateSendStream();
2187 if (rval != kMediaConduitNoError) {
2188 CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
2189 return rval;
2190 }
2191 }
2192
2193 mSendStream->Start();
2194 // XXX File a bug to consider hooking this up to the state of mtransport
2195 mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
2196 webrtc::kNetworkUp);
2197 mEngineTransmitting = true;
2198 UpdateVideoStatsTimer();
2199
2200 return kMediaConduitNoError;
2201 }
2202
StopReceivingLocked()2203 MediaConduitErrorCode WebrtcVideoConduit::StopReceivingLocked() {
2204 MOZ_ASSERT(NS_IsMainThread());
2205 mMutex.AssertCurrentThreadOwns();
2206
2207 // Are we receiving already? If so, stop receiving and playout
2208 // since we can't apply new recv codec when the engine is playing.
2209 if (mEngineReceiving && mRecvStream) {
2210 CSFLogDebug(LOGTAG, "%s Engine Already Receiving . Attemping to Stop ",
2211 __FUNCTION__);
2212 mRecvStream->Stop();
2213 }
2214
2215 mEngineReceiving = false;
2216 UpdateVideoStatsTimer();
2217 return kMediaConduitNoError;
2218 }
2219
StartReceivingLocked()2220 MediaConduitErrorCode WebrtcVideoConduit::StartReceivingLocked() {
2221 MOZ_ASSERT(NS_IsMainThread());
2222 mMutex.AssertCurrentThreadOwns();
2223
2224 if (mEngineReceiving) {
2225 return kMediaConduitNoError;
2226 }
2227
2228 CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__,
2229 static_cast<uint32_t>(mRecvSSRC),
2230 static_cast<uint32_t>(mRecvSSRC));
2231 // Start Receiving on the video engine
2232 if (!mRecvStream) {
2233 MediaConduitErrorCode rval = CreateRecvStream();
2234 if (rval != kMediaConduitNoError) {
2235 CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
2236 return rval;
2237 }
2238 }
2239
2240 mRecvStream->Start();
2241 // XXX File a bug to consider hooking this up to the state of mtransport
2242 mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
2243 webrtc::kNetworkUp);
2244 mEngineReceiving = true;
2245 UpdateVideoStatsTimer();
2246
2247 return kMediaConduitNoError;
2248 }
2249
2250 // WebRTC::RTP Callback Implementation
2251 // Called on MTG thread
SendRtp(const uint8_t * packet,size_t length,const webrtc::PacketOptions & options)2252 bool WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
2253 const webrtc::PacketOptions& options) {
2254 CSFLogVerbose(LOGTAG, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
2255 __FUNCTION__, (uint16_t)ntohs(*((uint16_t*)&packet[2])),
2256 (unsigned long)length,
2257 (uint32_t)ntohl(*((uint32_t*)&packet[8])),
2258 (uint32_t)ntohl(*((uint32_t*)&packet[8])));
2259
2260 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2261 if (!mTransmitterTransport ||
2262 NS_FAILED(mTransmitterTransport->SendRtpPacket(packet, length))) {
2263 CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
2264 return false;
2265 }
2266 if (options.packet_id >= 0) {
2267 int64_t now_ms = PR_Now() / 1000;
2268 mCall->Call()->OnSentPacket({options.packet_id, now_ms});
2269 }
2270 return true;
2271 }
2272
2273 // Called from multiple threads including webrtc Process thread
SendRtcp(const uint8_t * packet,size_t length)2274 bool WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length) {
2275 CSFLogVerbose(LOGTAG, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
2276 // We come here if we have only one pipeline/conduit setup,
2277 // such as for unidirectional streams.
2278 // We also end up here if we are receiving
2279 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2280 if (mReceiverTransport &&
2281 NS_SUCCEEDED(mReceiverTransport->SendRtcpPacket(packet, length))) {
2282 // Might be a sender report, might be a receiver report, we don't know.
2283 CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
2284 return true;
2285 }
2286 if (mTransmitterTransport &&
2287 NS_SUCCEEDED(mTransmitterTransport->SendRtcpPacket(packet, length))) {
2288 return true;
2289 }
2290
2291 CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
2292 return false;
2293 }
2294
OnFrame(const webrtc::VideoFrame & video_frame)2295 void WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame) {
2296 CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
2297 static_cast<uint32_t>(mRecvSSRC),
2298 static_cast<uint32_t>(mRecvSSRC), video_frame.width(),
2299 video_frame.height());
2300 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2301
2302 if (!mRenderer) {
2303 CSFLogError(LOGTAG, "%s Renderer is NULL ", __FUNCTION__);
2304 return;
2305 }
2306
2307 if (mReceivingWidth != video_frame.width() ||
2308 mReceivingHeight != video_frame.height()) {
2309 mReceivingWidth = video_frame.width();
2310 mReceivingHeight = video_frame.height();
2311 mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
2312 }
2313
2314 // Attempt to retrieve an timestamp encoded in the image pixels if enabled.
2315 if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
2316 uint64_t now = PR_Now();
2317 uint64_t timestamp = 0;
2318 uint8_t* data = const_cast<uint8_t*>(
2319 video_frame.video_frame_buffer()->GetI420()->DataY());
2320 bool ok = YuvStamper::Decode(
2321 mReceivingWidth, mReceivingHeight, mReceivingWidth, data,
2322 reinterpret_cast<unsigned char*>(×tamp), sizeof(timestamp), 0, 0);
2323 if (ok) {
2324 VideoLatencyUpdate(now - timestamp);
2325 }
2326 }
2327
2328 mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
2329 video_frame.timestamp(),
2330 video_frame.render_time_ms());
2331 }
2332
DumpCodecDB() const2333 void WebrtcVideoConduit::DumpCodecDB() const {
2334 MOZ_ASSERT(NS_IsMainThread());
2335
2336 for (auto& entry : mRecvCodecList) {
2337 CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str());
2338 CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType);
2339 CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d",
2340 entry->mEncodingConstraints.maxFs);
2341 CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %d",
2342 entry->mEncodingConstraints.maxFps);
2343 }
2344 }
2345
VideoLatencyUpdate(uint64_t newSample)2346 void WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample) {
2347 mTransportMonitor.AssertCurrentThreadIn();
2348
2349 mVideoLatencyAvg =
2350 (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
2351 }
2352
MozVideoLatencyAvg()2353 uint64_t WebrtcVideoConduit::MozVideoLatencyAvg() {
2354 mTransportMonitor.AssertCurrentThreadIn();
2355
2356 return mVideoLatencyAvg / sRoundingPadding;
2357 }
2358
OnRtpPacket(const webrtc::RtpPacketReceived & aPacket)2359 void WebrtcVideoConduit::OnRtpPacket(const webrtc::RtpPacketReceived& aPacket) {
2360 ASSERT_ON_THREAD(mStsThread);
2361 webrtc::RTPHeader header;
2362 aPacket.GetHeader(&header);
2363 if (header.extension.hasAudioLevel ||
2364 header.extension.csrcAudioLevels.numAudioLevels) {
2365 CSFLogDebug(LOGTAG,
2366 "Video packet has audio level extension."
2367 "RTP source tracking ignored for this packet.");
2368 return;
2369 }
2370 mRtpSourceObserver->OnRtpPacket(header, mRecvStreamStats.JitterMs());
2371 }
2372
OnRtcpBye()2373 void WebrtcVideoConduit::OnRtcpBye() {
2374 RefPtr<WebrtcVideoConduit> self = this;
2375 NS_DispatchToMainThread(media::NewRunnableFrom([self]() mutable {
2376 MOZ_ASSERT(NS_IsMainThread());
2377 if (self->mRtcpEventObserver) {
2378 self->mRtcpEventObserver->OnRtcpBye();
2379 }
2380 return NS_OK;
2381 }));
2382 }
2383
OnRtcpTimeout()2384 void WebrtcVideoConduit::OnRtcpTimeout() {
2385 RefPtr<WebrtcVideoConduit> self = this;
2386 NS_DispatchToMainThread(media::NewRunnableFrom([self]() mutable {
2387 MOZ_ASSERT(NS_IsMainThread());
2388 if (self->mRtcpEventObserver) {
2389 self->mRtcpEventObserver->OnRtcpTimeout();
2390 }
2391 return NS_OK;
2392 }));
2393 }
2394
SetRtcpEventObserver(mozilla::RtcpEventObserver * observer)2395 void WebrtcVideoConduit::SetRtcpEventObserver(
2396 mozilla::RtcpEventObserver* observer) {
2397 MOZ_ASSERT(NS_IsMainThread());
2398 mRtcpEventObserver = observer;
2399 }
2400
CodecPluginID()2401 uint64_t WebrtcVideoConduit::CodecPluginID() {
2402 MOZ_ASSERT(NS_IsMainThread());
2403
2404 if (mSendCodecPluginID) {
2405 return mSendCodecPluginID;
2406 }
2407 if (mRecvCodecPluginID) {
2408 return mRecvCodecPluginID;
2409 }
2410
2411 return 0;
2412 }
2413
RequiresNewSendStream(const VideoCodecConfig & newConfig) const2414 bool WebrtcVideoConduit::RequiresNewSendStream(
2415 const VideoCodecConfig& newConfig) const {
2416 MOZ_ASSERT(NS_IsMainThread());
2417
2418 return !mCurSendCodecConfig ||
2419 mCurSendCodecConfig->mName != newConfig.mName ||
2420 mCurSendCodecConfig->mType != newConfig.mType ||
2421 mCurSendCodecConfig->RtcpFbNackIsSet("") !=
2422 newConfig.RtcpFbNackIsSet("") ||
2423 mCurSendCodecConfig->RtcpFbFECIsSet() != newConfig.RtcpFbFECIsSet()
2424 #if 0
2425 // XXX Do we still want/need to do this?
2426 || (newConfig.mName == "H264" &&
2427 !CompatibleH264Config(mEncoderSpecificH264, newConfig))
2428 #endif
2429 ;
2430 }
2431
HasH264Hardware()2432 bool WebrtcVideoConduit::HasH264Hardware() {
2433 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
2434 if (!gfxInfo) {
2435 return false;
2436 }
2437 int32_t status;
2438 nsCString discardFailureId;
2439 return NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
2440 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_H264, discardFailureId,
2441 &status)) &&
2442 status == nsIGfxInfo::FEATURE_STATUS_OK;
2443 }
2444
2445 } // namespace mozilla
2446