1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5 #include "common/browser_logging/CSFLog.h"
6 #include "nspr.h"
7 #include "plstr.h"
8
9 #include "AudioConduit.h"
10 #include "RtpRtcpConfig.h"
11 #include "VideoConduit.h"
12 #include "VideoStreamFactory.h"
13 #include "common/YuvStamper.h"
14 #include "modules/rtp_rtcp/source/rtp_packet_received.h"
15 #include "mozilla/TemplateLib.h"
16 #include "mozilla/media/MediaUtils.h"
17 #include "mozilla/StaticPrefs_media.h"
18 #include "mozilla/UniquePtr.h"
19 #include "nsComponentManagerUtils.h"
20 #include "nsIPrefBranch.h"
21 #include "nsIGfxInfo.h"
22 #include "nsIPrefService.h"
23 #include "nsServiceManagerUtils.h"
24
25 #include "nsThreadUtils.h"
26
27 #include "pk11pub.h"
28
29 #include "api/video_codecs/sdp_video_format.h"
30 #include "media/engine/encoder_simulcast_proxy.h"
31 #include "webrtc/common_types.h"
32 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
33 #include "webrtc/media/base/mediaconstants.h"
34 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
35 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
36 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
37 #include "webrtc/common_video/include/video_frame_buffer.h"
38
39 #include "mozilla/Unused.h"
40
41 #if defined(MOZ_WIDGET_ANDROID)
42 # include "VideoEngine.h"
43 #endif
44
45 #include "GmpVideoCodec.h"
46
47 #ifdef MOZ_WEBRTC_MEDIACODEC
48 # include "MediaCodecVideoCodec.h"
49 #endif
50 #include "WebrtcGmpVideoCodec.h"
51
52 #include "MediaDataCodec.h"
53
54 // for ntohs
55 #ifdef _MSC_VER
56 # include "Winsock2.h"
57 #else
58 # include <netinet/in.h>
59 #endif
60
61 #include <algorithm>
62 #include <math.h>
63 #include <cinttypes>
64
65 #define DEFAULT_VIDEO_MAX_FRAMERATE 30
66 #define INVALID_RTP_PAYLOAD 255 // valid payload types are 0 to 127
67
68 namespace mozilla {
69
70 static const char* vcLogTag = "WebrtcVideoSessionConduit";
71 #ifdef LOGTAG
72 # undef LOGTAG
73 #endif
74 #define LOGTAG vcLogTag
75
76 using LocalDirection = MediaSessionConduitLocalDirection;
77
78 static const int kNullPayloadType = -1;
79 static const char* kUlpFecPayloadName = "ulpfec";
80 static const char* kRedPayloadName = "red";
81
82 // The number of frame buffers WebrtcVideoConduit may create before returning
83 // errors.
84 // Sometimes these are released synchronously but they can be forwarded all the
85 // way to the encoder for asynchronous encoding. With a pool size of 5,
86 // we allow 1 buffer for the current conversion, and 4 buffers to be queued at
87 // the encoder.
88 #define SCALER_BUFFER_POOL_SIZE 5
89
90 // The pixel alignment to use for the highest resolution layer when simulcast
91 // is active and one or more layers are being scaled.
92 #define SIMULCAST_RESOLUTION_ALIGNMENT 16
93
94 // 32 bytes is what WebRTC CodecInst expects
95 const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
96
97 template <typename T>
MinIgnoreZero(const T & a,const T & b)98 T MinIgnoreZero(const T& a, const T& b) {
99 return std::min(a ? a : b, b ? b : a);
100 }
101
102 template <class t>
ConstrainPreservingAspectRatioExact(uint32_t max_fs,t * width,t * height)103 static void ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width,
104 t* height) {
105 // We could try to pick a better starting divisor, but it won't make any real
106 // performance difference.
107 for (size_t d = 1; d < std::min(*width, *height); ++d) {
108 if ((*width % d) || (*height % d)) {
109 continue; // Not divisible
110 }
111
112 if (((*width) * (*height)) / (d * d) <= max_fs) {
113 *width /= d;
114 *height /= d;
115 return;
116 }
117 }
118
119 *width = 0;
120 *height = 0;
121 }
122
123 template <class t>
ConstrainPreservingAspectRatio(uint16_t max_width,uint16_t max_height,t * width,t * height)124 static void ConstrainPreservingAspectRatio(uint16_t max_width,
125 uint16_t max_height, t* width,
126 t* height) {
127 if (((*width) <= max_width) && ((*height) <= max_height)) {
128 return;
129 }
130
131 if ((*width) * max_height > max_width * (*height)) {
132 (*height) = max_width * (*height) / (*width);
133 (*width) = max_width;
134 } else {
135 (*width) = max_height * (*width) / (*height);
136 (*height) = max_height;
137 }
138 }
139
140 /**
141 * Function to select and change the encoding frame rate based on incoming frame
142 * rate and max-mbps setting.
143 * @param current framerate
144 * @result new framerate
145 */
SelectSendFrameRate(const VideoCodecConfig * codecConfig,unsigned int old_framerate,unsigned short sending_width,unsigned short sending_height)146 static unsigned int SelectSendFrameRate(const VideoCodecConfig* codecConfig,
147 unsigned int old_framerate,
148 unsigned short sending_width,
149 unsigned short sending_height) {
150 unsigned int new_framerate = old_framerate;
151
152 // Limit frame rate based on max-mbps
153 if (codecConfig && codecConfig->mEncodingConstraints.maxMbps) {
154 unsigned int cur_fs, mb_width, mb_height;
155
156 mb_width = (sending_width + 15) >> 4;
157 mb_height = (sending_height + 15) >> 4;
158
159 cur_fs = mb_width * mb_height;
160 if (cur_fs > 0) { // in case no frames have been sent
161 new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs;
162
163 new_framerate = MinIgnoreZero(new_framerate,
164 codecConfig->mEncodingConstraints.maxFps);
165 }
166 }
167 return new_framerate;
168 }
169
170 /**
171 * Perform validation on the codecConfig to be applied
172 */
ValidateCodecConfig(const VideoCodecConfig * codecInfo)173 static MediaConduitErrorCode ValidateCodecConfig(
174 const VideoCodecConfig* codecInfo) {
175 if (!codecInfo) {
176 CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
177 return kMediaConduitMalformedArgument;
178 }
179
180 if ((codecInfo->mName.empty()) ||
181 (codecInfo->mName.length() >= WebrtcVideoConduit::CODEC_PLNAME_SIZE)) {
182 CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
183 return kMediaConduitMalformedArgument;
184 }
185
186 return kMediaConduitNoError;
187 }
188
Update(const webrtc::Call::Stats & aStats)189 void WebrtcVideoConduit::CallStatistics::Update(
190 const webrtc::Call::Stats& aStats) {
191 ASSERT_ON_THREAD(mStatsThread);
192
193 mStats = Some(aStats);
194 const auto rtt = aStats.rtt_ms;
195 if (rtt > static_cast<decltype(aStats.rtt_ms)>(INT32_MAX)) {
196 // If we get a bogus RTT we will keep using the previous RTT
197 #ifdef DEBUG
198 CSFLogError(LOGTAG,
199 "%s for VideoConduit:%p RTT is larger than the"
200 " maximum size of an RTCP RTT.",
201 __FUNCTION__, this);
202 #endif
203 mRttSec = Nothing();
204 } else {
205 if (mRttSec && rtt < 0) {
206 CSFLogError(LOGTAG,
207 "%s for VideoConduit:%p RTT returned an error after "
208 " previously succeeding.",
209 __FUNCTION__, this);
210 mRttSec = Nothing();
211 }
212 if (rtt >= 0) {
213 mRttSec = Some(static_cast<DOMHighResTimeStamp>(rtt) / 1000.0);
214 }
215 }
216 }
217
RttSec() const218 Maybe<DOMHighResTimeStamp> WebrtcVideoConduit::CallStatistics::RttSec() const {
219 ASSERT_ON_THREAD(mStatsThread);
220
221 return mRttSec;
222 }
223
224 Maybe<mozilla::dom::RTCBandwidthEstimationInternal>
Stats() const225 WebrtcVideoConduit::CallStatistics::Stats() const {
226 ASSERT_ON_THREAD(mStatsThread);
227 if (mStats.isNothing()) {
228 return Nothing();
229 }
230 const auto& stats = mStats.value();
231 dom::RTCBandwidthEstimationInternal bw;
232 bw.mSendBandwidthBps.Construct(stats.send_bandwidth_bps / 8);
233 bw.mMaxPaddingBps.Construct(stats.max_padding_bitrate_bps / 8);
234 bw.mReceiveBandwidthBps.Construct(stats.recv_bandwidth_bps / 8);
235 bw.mPacerDelayMs.Construct(stats.pacer_delay_ms);
236 if (stats.rtt_ms >= 0) {
237 bw.mRttMs.Construct(stats.rtt_ms);
238 }
239 return Some(std::move(bw));
240 }
241
Update(const double aFrameRate,const double aBitrate,const webrtc::RtcpPacketTypeCounter & aPacketCounts)242 void WebrtcVideoConduit::StreamStatistics::Update(
243 const double aFrameRate, const double aBitrate,
244 const webrtc::RtcpPacketTypeCounter& aPacketCounts) {
245 ASSERT_ON_THREAD(mStatsThread);
246
247 mFrameRate.Push(aFrameRate);
248 mBitRate.Push(aBitrate);
249 mPacketCounts = aPacketCounts;
250 }
251
GetVideoStreamStats(double & aOutFrMean,double & aOutFrStdDev,double & aOutBrMean,double & aOutBrStdDev) const252 bool WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats(
253 double& aOutFrMean, double& aOutFrStdDev, double& aOutBrMean,
254 double& aOutBrStdDev) const {
255 ASSERT_ON_THREAD(mStatsThread);
256
257 if (mFrameRate.NumDataValues() && mBitRate.NumDataValues()) {
258 aOutFrMean = mFrameRate.Mean();
259 aOutFrStdDev = mFrameRate.StandardDeviation();
260 aOutBrMean = mBitRate.Mean();
261 aOutBrStdDev = mBitRate.StandardDeviation();
262 return true;
263 }
264 return false;
265 }
266
RecordTelemetry() const267 void WebrtcVideoConduit::StreamStatistics::RecordTelemetry() const {
268 ASSERT_ON_THREAD(mStatsThread);
269
270 if (!mActive) {
271 return;
272 }
273 using namespace Telemetry;
274 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_BITRATE_AVG_PER_CALL_KBPS
275 : WEBRTC_VIDEO_DECODER_BITRATE_AVG_PER_CALL_KBPS,
276 mBitRate.Mean() / 1000);
277 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_BITRATE_STD_DEV_PER_CALL_KBPS
278 : WEBRTC_VIDEO_DECODER_BITRATE_STD_DEV_PER_CALL_KBPS,
279 mBitRate.StandardDeviation() / 1000);
280 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_FRAMERATE_AVG_PER_CALL
281 : WEBRTC_VIDEO_DECODER_FRAMERATE_AVG_PER_CALL,
282 mFrameRate.Mean());
283 Accumulate(IsSend() ? WEBRTC_VIDEO_ENCODER_FRAMERATE_10X_STD_DEV_PER_CALL
284 : WEBRTC_VIDEO_DECODER_FRAMERATE_10X_STD_DEV_PER_CALL,
285 mFrameRate.StandardDeviation() * 10);
286 }
287
288 const webrtc::RtcpPacketTypeCounter&
PacketCounts() const289 WebrtcVideoConduit::StreamStatistics::PacketCounts() const {
290 ASSERT_ON_THREAD(mStatsThread);
291
292 return mPacketCounts;
293 }
294
Active() const295 bool WebrtcVideoConduit::StreamStatistics::Active() const {
296 ASSERT_ON_THREAD(mStatsThread);
297
298 return mActive;
299 }
300
SetActive(bool aActive)301 void WebrtcVideoConduit::StreamStatistics::SetActive(bool aActive) {
302 ASSERT_ON_THREAD(mStatsThread);
303
304 mActive = aActive;
305 }
306
DroppedFrames() const307 uint32_t WebrtcVideoConduit::SendStreamStatistics::DroppedFrames() const {
308 ASSERT_ON_THREAD(mStatsThread);
309
310 return mDroppedFrames;
311 }
312
FramesEncoded() const313 uint32_t WebrtcVideoConduit::SendStreamStatistics::FramesEncoded() const {
314 ASSERT_ON_THREAD(mStatsThread);
315
316 return mFramesEncoded;
317 }
318
FrameDeliveredToEncoder()319 void WebrtcVideoConduit::SendStreamStatistics::FrameDeliveredToEncoder() {
320 ASSERT_ON_THREAD(mStatsThread);
321
322 ++mFramesDeliveredToEncoder;
323 }
324
SsrcFound() const325 bool WebrtcVideoConduit::SendStreamStatistics::SsrcFound() const {
326 ASSERT_ON_THREAD(mStatsThread);
327
328 return mSsrcFound;
329 }
330
JitterMs() const331 uint32_t WebrtcVideoConduit::SendStreamStatistics::JitterMs() const {
332 ASSERT_ON_THREAD(mStatsThread);
333
334 return mJitterMs;
335 }
336
PacketsLost() const337 uint32_t WebrtcVideoConduit::SendStreamStatistics::PacketsLost() const {
338 ASSERT_ON_THREAD(mStatsThread);
339
340 return mPacketsLost;
341 }
342
BytesReceived() const343 uint64_t WebrtcVideoConduit::SendStreamStatistics::BytesReceived() const {
344 ASSERT_ON_THREAD(mStatsThread);
345
346 return mBytesReceived;
347 }
348
PacketsReceived() const349 uint32_t WebrtcVideoConduit::SendStreamStatistics::PacketsReceived() const {
350 ASSERT_ON_THREAD(mStatsThread);
351
352 return mPacketsReceived;
353 }
354
QpSum() const355 Maybe<uint64_t> WebrtcVideoConduit::SendStreamStatistics::QpSum() const {
356 ASSERT_ON_THREAD(mStatsThread);
357 return mQpSum;
358 }
359
Update(const webrtc::VideoSendStream::Stats & aStats,uint32_t aConfiguredSsrc)360 void WebrtcVideoConduit::SendStreamStatistics::Update(
361 const webrtc::VideoSendStream::Stats& aStats, uint32_t aConfiguredSsrc) {
362 ASSERT_ON_THREAD(mStatsThread);
363
364 mSsrcFound = false;
365
366 if (aStats.substreams.empty()) {
367 CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__);
368 return;
369 }
370
371 auto ind = aStats.substreams.find(aConfiguredSsrc);
372 if (ind == aStats.substreams.end()) {
373 CSFLogError(LOGTAG,
374 "%s for VideoConduit:%p ssrc not found in SendStream stats.",
375 __FUNCTION__, this);
376 return;
377 }
378
379 mSsrcFound = true;
380
381 StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps,
382 ind->second.rtcp_packet_type_counts);
383 if (aStats.qp_sum) {
384 mQpSum = Some(aStats.qp_sum.value());
385 } else {
386 mQpSum = Nothing();
387 }
388
389 const webrtc::FrameCounts& fc = ind->second.frame_counts;
390 mFramesEncoded = fc.key_frames + fc.delta_frames;
391 CSFLogVerbose(
392 LOGTAG, "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
393 __FUNCTION__, aStats.encode_frame_rate, aStats.media_bitrate_bps,
394 mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
395 mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
396 mJitterMs = ind->second.rtcp_stats.jitter /
397 (webrtc::kVideoPayloadTypeFrequency / 1000);
398 mPacketsLost = ind->second.rtcp_stats.packets_lost;
399 mBytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
400 mPacketsReceived = ind->second.rtp_stats.transmitted.packets;
401 }
402
BytesSent() const403 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::BytesSent() const {
404 ASSERT_ON_THREAD(mStatsThread);
405
406 return mBytesSent;
407 }
408
DiscardedPackets() const409 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets() const {
410 ASSERT_ON_THREAD(mStatsThread);
411
412 return mDiscardedPackets;
413 }
414
FramesDecoded() const415 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded() const {
416 ASSERT_ON_THREAD(mStatsThread);
417
418 return mFramesDecoded;
419 }
420
JitterMs() const421 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::JitterMs() const {
422 ASSERT_ON_THREAD(mStatsThread);
423
424 return mJitterMs;
425 }
426
PacketsLost() const427 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::PacketsLost() const {
428 ASSERT_ON_THREAD(mStatsThread);
429
430 return mPacketsLost;
431 }
432
PacketsSent() const433 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::PacketsSent() const {
434 ASSERT_ON_THREAD(mStatsThread);
435
436 return mPacketsSent;
437 }
438
Ssrc() const439 uint32_t WebrtcVideoConduit::ReceiveStreamStatistics::Ssrc() const {
440 ASSERT_ON_THREAD(mStatsThread);
441
442 return mSsrc;
443 }
444
445 DOMHighResTimeStamp
RemoteTimestamp() const446 WebrtcVideoConduit::ReceiveStreamStatistics::RemoteTimestamp() const {
447 ASSERT_ON_THREAD(mStatsThread);
448
449 return mRemoteTimestamp;
450 }
451
Update(const webrtc::VideoReceiveStream::Stats & aStats)452 void WebrtcVideoConduit::ReceiveStreamStatistics::Update(
453 const webrtc::VideoReceiveStream::Stats& aStats) {
454 ASSERT_ON_THREAD(mStatsThread);
455
456 CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
457 StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps,
458 aStats.rtcp_packet_type_counts);
459 mBytesSent = aStats.rtcp_sender_octets_sent;
460 mDiscardedPackets = aStats.discarded_packets;
461 mFramesDecoded =
462 aStats.frame_counts.key_frames + aStats.frame_counts.delta_frames;
463 mJitterMs =
464 aStats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000);
465 mPacketsLost = aStats.rtcp_stats.packets_lost;
466 mPacketsSent = aStats.rtcp_sender_packets_sent;
467 mRemoteTimestamp = aStats.rtcp_sender_ntp_timestamp.ToMs();
468 mSsrc = aStats.ssrc;
469 }
470
471 /**
472 * Factory Method for VideoConduit
473 */
Create(RefPtr<WebRtcCallWrapper> aCall,nsCOMPtr<nsISerialEventTarget> aStsThread)474 RefPtr<VideoSessionConduit> VideoSessionConduit::Create(
475 RefPtr<WebRtcCallWrapper> aCall,
476 nsCOMPtr<nsISerialEventTarget> aStsThread) {
477 MOZ_ASSERT(NS_IsMainThread());
478 MOZ_ASSERT(aCall, "missing required parameter: aCall");
479 CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
480
481 if (!aCall) {
482 return nullptr;
483 }
484
485 auto obj = MakeRefPtr<WebrtcVideoConduit>(aCall, aStsThread);
486 if (obj->Init() != kMediaConduitNoError) {
487 CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
488 return nullptr;
489 }
490 CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
491 return obj.forget();
492 }
493
WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,nsCOMPtr<nsISerialEventTarget> aStsThread)494 WebrtcVideoConduit::WebrtcVideoConduit(
495 RefPtr<WebRtcCallWrapper> aCall, nsCOMPtr<nsISerialEventTarget> aStsThread)
496 : mTransportMonitor("WebrtcVideoConduit"),
497 mStsThread(aStsThread),
498 mMutex("WebrtcVideoConduit::mMutex"),
499 mVideoAdapter(MakeUnique<cricket::VideoAdapter>()),
500 mBufferPool(false, SCALER_BUFFER_POOL_SIZE),
501 mEngineTransmitting(false),
502 mEngineReceiving(false),
503 mSendStreamStats(aStsThread),
504 mRecvStreamStats(aStsThread),
505 mCallStats(aStsThread),
506 mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE),
507 mActiveCodecMode(webrtc::kRealtimeVideo),
508 mCodecMode(webrtc::kRealtimeVideo),
509 mCall(aCall),
510 mSendStreamConfig(
511 this) // 'this' is stored but not dereferenced in the constructor.
512 ,
513 mRecvStreamConfig(
514 this) // 'this' is stored but not dereferenced in the constructor.
515 ,
516 mRecvSSRC(0),
517 mRemoteSSRC(0),
518 mVideoStatsTimer(NS_NewTimer()),
519 mRtpSourceObserver(new RtpSourceObserver(mCall->GetTimestampMaker())) {
520 mCall->RegisterConduit(this);
521 mRecvStreamConfig.renderer = this;
522 mRecvStreamConfig.rtcp_event_observer = this;
523 }
524
~WebrtcVideoConduit()525 WebrtcVideoConduit::~WebrtcVideoConduit() {
526 MOZ_ASSERT(NS_IsMainThread());
527
528 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
529 mCall->UnregisterConduit(this);
530
531 // Release AudioConduit first by dropping reference on MainThread, where it
532 // expects to be
533 MOZ_ASSERT(!mSendStream && !mRecvStream,
534 "Call DeleteStreams prior to ~WebrtcVideoConduit.");
535 }
536
SetLocalRTPExtensions(LocalDirection aDirection,const RtpExtList & aExtensions)537 MediaConduitErrorCode WebrtcVideoConduit::SetLocalRTPExtensions(
538 LocalDirection aDirection, const RtpExtList& aExtensions) {
539 MOZ_ASSERT(NS_IsMainThread());
540
541 auto& extList = aDirection == LocalDirection::kSend
542 ? mSendStreamConfig.rtp.extensions
543 : mRecvStreamConfig.rtp.extensions;
544 extList = aExtensions;
545 return kMediaConduitNoError;
546 }
547
SetLocalSSRCs(const std::vector<unsigned int> & aSSRCs,const std::vector<unsigned int> & aRtxSSRCs)548 bool WebrtcVideoConduit::SetLocalSSRCs(
549 const std::vector<unsigned int>& aSSRCs,
550 const std::vector<unsigned int>& aRtxSSRCs) {
551 MOZ_ASSERT(NS_IsMainThread());
552
553 // Special case: the local SSRCs are the same - do nothing.
554 if (mSendStreamConfig.rtp.ssrcs == aSSRCs &&
555 mSendStreamConfig.rtp.rtx.ssrcs == aRtxSSRCs) {
556 return true;
557 }
558
559 {
560 MutexAutoLock lock(mMutex);
561 // Update the value of the ssrcs in the config structure.
562 mSendStreamConfig.rtp.ssrcs = aSSRCs;
563 mSendStreamConfig.rtp.rtx.ssrcs = aRtxSSRCs;
564
565 bool wasTransmitting = mEngineTransmitting;
566 if (StopTransmittingLocked() != kMediaConduitNoError) {
567 return false;
568 }
569
570 // On the next StartTransmitting() or ConfigureSendMediaCodec, force
571 // building a new SendStream to switch SSRCs.
572 DeleteSendStream();
573
574 if (wasTransmitting) {
575 if (StartTransmittingLocked() != kMediaConduitNoError) {
576 return false;
577 }
578 }
579 }
580
581 return true;
582 }
583
GetLocalSSRCs()584 std::vector<unsigned int> WebrtcVideoConduit::GetLocalSSRCs() {
585 MutexAutoLock lock(mMutex);
586
587 return mSendStreamConfig.rtp.ssrcs;
588 }
589
SetLocalCNAME(const char * cname)590 bool WebrtcVideoConduit::SetLocalCNAME(const char* cname) {
591 MOZ_ASSERT(NS_IsMainThread());
592 MutexAutoLock lock(mMutex);
593
594 mSendStreamConfig.rtp.c_name = cname;
595 return true;
596 }
597
SetLocalMID(const std::string & mid)598 bool WebrtcVideoConduit::SetLocalMID(const std::string& mid) {
599 MOZ_ASSERT(NS_IsMainThread());
600 MutexAutoLock lock(mMutex);
601
602 mSendStreamConfig.rtp.mid = mid;
603 return true;
604 }
605
SetSyncGroup(const std::string & group)606 void WebrtcVideoConduit::SetSyncGroup(const std::string& group) {
607 mRecvStreamConfig.sync_group = group;
608 }
609
ConfigureCodecMode(webrtc::VideoCodecMode mode)610 MediaConduitErrorCode WebrtcVideoConduit::ConfigureCodecMode(
611 webrtc::VideoCodecMode mode) {
612 MOZ_ASSERT(NS_IsMainThread());
613
614 CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
615 if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
616 mode == webrtc::VideoCodecMode::kScreensharing) {
617 mCodecMode = mode;
618 if (mVideoStreamFactory) {
619 mVideoStreamFactory->SetCodecMode(mCodecMode);
620 }
621 return kMediaConduitNoError;
622 }
623
624 return kMediaConduitMalformedArgument;
625 }
626
DeleteSendStream()627 void WebrtcVideoConduit::DeleteSendStream() {
628 MOZ_ASSERT(NS_IsMainThread());
629 mMutex.AssertCurrentThreadOwns();
630
631 if (mSendStream) {
632 mCall->Call()->DestroyVideoSendStream(mSendStream);
633 mSendStream = nullptr;
634 mEncoder = nullptr;
635 }
636 }
637
SupportedCodecType(webrtc::VideoCodecType aType)638 webrtc::VideoCodecType SupportedCodecType(webrtc::VideoCodecType aType) {
639 switch (aType) {
640 case webrtc::VideoCodecType::kVideoCodecVP8:
641 case webrtc::VideoCodecType::kVideoCodecVP9:
642 case webrtc::VideoCodecType::kVideoCodecH264:
643 return aType;
644 default:
645 return webrtc::VideoCodecType::kVideoCodecUnknown;
646 }
647 // NOTREACHED
648 }
649
CreateSendStream()650 MediaConduitErrorCode WebrtcVideoConduit::CreateSendStream() {
651 MOZ_ASSERT(NS_IsMainThread());
652 mMutex.AssertCurrentThreadOwns();
653
654 nsAutoString codecName;
655 codecName.AssignASCII(
656 mSendStreamConfig.encoder_settings.payload_name.c_str());
657 Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_SEND_CODEC_USED,
658 codecName, 1);
659
660 webrtc::VideoCodecType encoder_type =
661 SupportedCodecType(webrtc::PayloadStringToCodecType(
662 mSendStreamConfig.encoder_settings.payload_name));
663 if (encoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
664 return kMediaConduitInvalidSendCodec;
665 }
666
667 std::unique_ptr<webrtc::VideoEncoder> encoder(CreateEncoder(encoder_type));
668 if (!encoder) {
669 return kMediaConduitInvalidSendCodec;
670 }
671
672 mSendStreamConfig.encoder_settings.encoder = encoder.get();
673
674 MOZ_ASSERT(
675 mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.number_of_streams,
676 "Each video substream must have a corresponding ssrc.");
677
678 mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig.Copy(),
679 mEncoderConfig.Copy());
680
681 if (!mSendStream) {
682 return kMediaConduitVideoSendStreamError;
683 }
684 mSendStream->SetSource(
685 this, webrtc::VideoSendStream::DegradationPreference::kBalanced);
686
687 mEncoder = std::move(encoder);
688
689 mActiveCodecMode = mCodecMode;
690
691 return kMediaConduitNoError;
692 }
693
DeleteRecvStream()694 void WebrtcVideoConduit::DeleteRecvStream() {
695 MOZ_ASSERT(NS_IsMainThread());
696 mMutex.AssertCurrentThreadOwns();
697
698 if (mRecvStream) {
699 mRecvStream->RemoveSecondarySink(this);
700 mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
701 mRecvStream = nullptr;
702 mDecoders.clear();
703 }
704 }
705
CreateRecvStream()706 MediaConduitErrorCode WebrtcVideoConduit::CreateRecvStream() {
707 MOZ_ASSERT(NS_IsMainThread());
708 mMutex.AssertCurrentThreadOwns();
709
710 webrtc::VideoReceiveStream::Decoder decoder_desc;
711 std::unique_ptr<webrtc::VideoDecoder> decoder;
712 webrtc::VideoCodecType decoder_type;
713
714 mRecvStreamConfig.decoders.clear();
715 for (auto& config : mRecvCodecList) {
716 nsAutoString codecName;
717 codecName.AssignASCII(config->mName.c_str());
718 Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_RECV_CODEC_USED,
719 codecName, 1);
720
721 decoder_type =
722 SupportedCodecType(webrtc::PayloadStringToCodecType(config->mName));
723 if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
724 CSFLogError(LOGTAG, "%s Unknown decoder type: %s", __FUNCTION__,
725 config->mName.c_str());
726 continue;
727 }
728
729 decoder = CreateDecoder(decoder_type);
730
731 if (!decoder) {
732 // This really should never happen unless something went wrong
733 // in the negotiation code
734 NS_ASSERTION(decoder, "Failed to create video decoder");
735 CSFLogError(LOGTAG, "Failed to create decoder of type %s (%d)",
736 config->mName.c_str(), decoder_type);
737 // don't stop
738 continue;
739 }
740
741 decoder_desc.decoder = decoder.get();
742 mDecoders.push_back(std::move(decoder));
743 decoder_desc.payload_name = config->mName;
744 decoder_desc.payload_type = config->mType;
745 // XXX Ok, add:
746 // Set decoder_desc.codec_params (fmtp)
747 mRecvStreamConfig.decoders.push_back(decoder_desc);
748 }
749
750 mRecvStream =
751 mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
752 if (!mRecvStream) {
753 mDecoders.clear();
754 return kMediaConduitUnknownError;
755 }
756
757 // Add RTPPacketSinkInterface for synchronization source tracking
758 mRecvStream->AddSecondarySink(this);
759
760 CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
761 mRecvStream, mRecvStreamConfig.rtp.remote_ssrc,
762 mRecvStreamConfig.rtp.remote_ssrc);
763
764 return kMediaConduitNoError;
765 }
766
767 static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
ConfigureVideoEncoderSettings(const VideoCodecConfig * aConfig,const WebrtcVideoConduit * aConduit)768 ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
769 const WebrtcVideoConduit* aConduit) {
770 MOZ_ASSERT(NS_IsMainThread());
771
772 bool is_screencast =
773 aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
774 // No automatic resizing when using simulcast or screencast.
775 bool automatic_resize = !is_screencast && aConfig->mEncodings.size() <= 1;
776 bool frame_dropping = !is_screencast;
777 bool denoising;
778 bool codec_default_denoising = false;
779 if (is_screencast) {
780 denoising = false;
781 } else {
782 // Use codec default if video_noise_reduction is unset.
783 denoising = aConduit->Denoising();
784 codec_default_denoising = !denoising;
785 }
786
787 if (aConfig->mName == "H264") {
788 webrtc::VideoCodecH264 h264_settings =
789 webrtc::VideoEncoder::GetDefaultH264Settings();
790 h264_settings.frameDroppingOn = frame_dropping;
791 h264_settings.packetizationMode = aConfig->mPacketizationMode;
792 return new rtc::RefCountedObject<
793 webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
794 }
795 if (aConfig->mName == "VP8") {
796 webrtc::VideoCodecVP8 vp8_settings =
797 webrtc::VideoEncoder::GetDefaultVp8Settings();
798 vp8_settings.automaticResizeOn = automatic_resize;
799 // VP8 denoising is enabled by default.
800 vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
801 vp8_settings.frameDroppingOn = frame_dropping;
802 return new rtc::RefCountedObject<
803 webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
804 }
805 if (aConfig->mName == "VP9") {
806 webrtc::VideoCodecVP9 vp9_settings =
807 webrtc::VideoEncoder::GetDefaultVp9Settings();
808 if (is_screencast) {
809 // TODO(asapersson): Set to 2 for now since there is a DCHECK in
810 // VideoSendStream::ReconfigureVideoEncoder.
811 vp9_settings.numberOfSpatialLayers = 2;
812 } else {
813 vp9_settings.numberOfSpatialLayers = aConduit->SpatialLayers();
814 }
815 // VP9 denoising is disabled by default.
816 vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
817 vp9_settings.frameDroppingOn = true; // This must be true for VP9
818 return new rtc::RefCountedObject<
819 webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
820 }
821 return nullptr;
822 }
823
824 // Compare lists of codecs
CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>> & a,const nsTArray<UniquePtr<VideoCodecConfig>> & b)825 static bool CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
826 const nsTArray<UniquePtr<VideoCodecConfig>>& b) {
827 // return a != b;
828 // would work if UniquePtr<> operator== compared contents!
829 auto len = a.Length();
830 if (len != b.Length()) {
831 return true;
832 }
833
834 // XXX std::equal would work, if we could use it on this - fails for the
835 // same reason as above. c++14 would let us pass a comparator function.
836 for (uint32_t i = 0; i < len; ++i) {
837 if (!(*a[i] == *b[i])) {
838 return true;
839 }
840 }
841
842 return false;
843 }
844
845 /**
846 * Note: Setting the send-codec on the Video Engine will restart the encoder,
847 * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
848 *
849 * Note: this is called from MainThread, and the codec settings are read on
850 * videoframe delivery threads (i.e in SendVideoFrame(). With
851 * renegotiation/reconfiguration, this now needs a lock! Alternatively
852 * changes could be queued until the next frame is delivered using an
853 * Atomic pointer and swaps.
854 */
ConfigureSendMediaCodec(const VideoCodecConfig * codecConfig,const RtpRtcpConfig & aRtpRtcpConfig)855 MediaConduitErrorCode WebrtcVideoConduit::ConfigureSendMediaCodec(
856 const VideoCodecConfig* codecConfig, const RtpRtcpConfig& aRtpRtcpConfig) {
857 MOZ_ASSERT(NS_IsMainThread());
858 MutexAutoLock lock(mMutex);
859 mUpdateResolution = true;
860
861 CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__,
862 codecConfig ? codecConfig->mName.c_str() : "<null>");
863
864 MediaConduitErrorCode condError = kMediaConduitNoError;
865
866 // validate basic params
867 if ((condError = ValidateCodecConfig(codecConfig)) != kMediaConduitNoError) {
868 return condError;
869 }
870
871 size_t streamCount = std::min(codecConfig->mEncodings.size(),
872 (size_t)webrtc::kMaxSimulcastStreams);
873 size_t highestResolutionIndex = 0;
874 for (size_t i = 1; i < streamCount; ++i) {
875 if (codecConfig->mEncodings[i].constraints.scaleDownBy <
876 codecConfig->mEncodings[highestResolutionIndex]
877 .constraints.scaleDownBy) {
878 highestResolutionIndex = i;
879 }
880 }
881
882 MOZ_RELEASE_ASSERT(streamCount >= 1, "streamCount should be at least one");
883
884 CSFLogDebug(LOGTAG, "%s for VideoConduit:%p stream count:%zu", __FUNCTION__,
885 this, streamCount);
886
887 mSendingFramerate = 0;
888 mSendStreamConfig.rtp.rids.clear();
889
890 int max_framerate;
891 if (codecConfig->mEncodingConstraints.maxFps > 0) {
892 max_framerate = codecConfig->mEncodingConstraints.maxFps;
893 } else {
894 max_framerate = DEFAULT_VIDEO_MAX_FRAMERATE;
895 }
896 // apply restrictions from maxMbps/etc
897 mSendingFramerate =
898 SelectSendFrameRate(codecConfig, max_framerate, mLastWidth, mLastHeight);
899
900 // So we can comply with b=TIAS/b=AS/maxbr=X when input resolution changes
901 mNegotiatedMaxBitrate = codecConfig->mTias;
902
903 if (mLastWidth == 0 && mMinBitrateEstimate != 0) {
904 // Only do this at the start; use "have we send a frame" as a reasonable
905 // stand-in. min <= start <= max (which can be -1, note!)
906 webrtc::Call::Config::BitrateConfig config;
907 config.min_bitrate_bps = mMinBitrateEstimate;
908 if (config.start_bitrate_bps < mMinBitrateEstimate) {
909 config.start_bitrate_bps = mMinBitrateEstimate;
910 }
911 if (config.max_bitrate_bps > 0 &&
912 config.max_bitrate_bps < mMinBitrateEstimate) {
913 config.max_bitrate_bps = mMinBitrateEstimate;
914 }
915 mCall->Call()->SetBitrateConfig(config);
916 }
917
918 mVideoStreamFactory = new rtc::RefCountedObject<VideoStreamFactory>(
919 *codecConfig, mCodecMode, mMinBitrate, mStartBitrate, mPrefMaxBitrate,
920 mNegotiatedMaxBitrate, mSendingFramerate);
921 mEncoderConfig.video_stream_factory = mVideoStreamFactory.get();
922
923 // Reset the VideoAdapter. SelectResolution will ensure limits are set.
924 mVideoAdapter = MakeUnique<cricket::VideoAdapter>(
925 streamCount > 1 ? SIMULCAST_RESOLUTION_ALIGNMENT : 1);
926 mVideoAdapter->OnScaleResolutionBy(
927 codecConfig->mEncodings[highestResolutionIndex].constraints.scaleDownBy >
928 1.0
929 ? rtc::Optional<float>(codecConfig->mEncodings[highestResolutionIndex]
930 .constraints.scaleDownBy)
931 : rtc::Optional<float>());
932
933 // XXX parse the encoded SPS/PPS data and set spsData/spsLen/ppsData/ppsLen
934 mEncoderConfig.encoder_specific_settings =
935 ConfigureVideoEncoderSettings(codecConfig, this);
936
937 mEncoderConfig.content_type =
938 mCodecMode == webrtc::kRealtimeVideo
939 ? webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo
940 : webrtc::VideoEncoderConfig::ContentType::kScreen;
941 // for the GMP H.264 encoder/decoder!!
942 mEncoderConfig.min_transmit_bitrate_bps = 0;
943 // Expected max number of encodings
944 mEncoderConfig.number_of_streams = streamCount;
945
946 // If only encoder stream attibutes have been changed, there is no need to
947 // stop, create a new webrtc::VideoSendStream, and restart. Recreating on
948 // PayloadType change may be overkill, but is safe.
949 if (mSendStream) {
950 if (!RequiresNewSendStream(*codecConfig) &&
951 mActiveCodecMode == mCodecMode) {
952 mCurSendCodecConfig->mEncodingConstraints =
953 codecConfig->mEncodingConstraints;
954 mCurSendCodecConfig->mEncodings = codecConfig->mEncodings;
955 mSendStream->ReconfigureVideoEncoder(mEncoderConfig.Copy());
956 return kMediaConduitNoError;
957 }
958
959 condError = StopTransmittingLocked();
960 if (condError != kMediaConduitNoError) {
961 return condError;
962 }
963
964 // This will cause a new encoder to be created by StartTransmitting()
965 DeleteSendStream();
966 }
967
968 mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
969 mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
970 mSendStreamConfig.rtp.rtcp_mode = aRtpRtcpConfig.GetRtcpMode();
971 mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
972 if (codecConfig->RtxPayloadTypeIsSet()) {
973 mSendStreamConfig.rtp.rtx.payload_type = codecConfig->mRTXPayloadType;
974 } else {
975 mSendStreamConfig.rtp.rtx.payload_type = -1;
976 mSendStreamConfig.rtp.rtx.ssrcs.clear();
977 }
978
979 // See Bug 1297058, enabling FEC when basic NACK is to be enabled in H.264 is
980 // problematic
981 if (codecConfig->RtcpFbFECIsSet() &&
982 !(codecConfig->mName == "H264" && codecConfig->RtcpFbNackIsSet(""))) {
983 mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type =
984 codecConfig->mULPFECPayloadType;
985 mSendStreamConfig.rtp.ulpfec.red_payload_type =
986 codecConfig->mREDPayloadType;
987 mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type =
988 codecConfig->mREDRTXPayloadType;
989 } else {
990 // Reset to defaults
991 mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type = -1;
992 mSendStreamConfig.rtp.ulpfec.red_payload_type = -1;
993 mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type = -1;
994 }
995
996 mSendStreamConfig.rtp.nack.rtp_history_ms =
997 codecConfig->RtcpFbNackIsSet("") ? 1000 : 0;
998
999 // Copy the applied config for future reference.
1000 mCurSendCodecConfig = MakeUnique<VideoCodecConfig>(*codecConfig);
1001
1002 mSendStreamConfig.rtp.rids.clear();
1003 bool has_rid = false;
1004 for (size_t idx = 0; idx < streamCount; idx++) {
1005 auto& encoding = mCurSendCodecConfig->mEncodings[idx];
1006 if (encoding.rid[0]) {
1007 has_rid = true;
1008 break;
1009 }
1010 }
1011 if (has_rid) {
1012 for (size_t idx = streamCount; idx > 0; idx--) {
1013 auto& encoding = mCurSendCodecConfig->mEncodings[idx - 1];
1014 mSendStreamConfig.rtp.rids.push_back(encoding.rid);
1015 }
1016 }
1017
1018 return condError;
1019 }
1020
GenerateRandomSSRC()1021 static uint32_t GenerateRandomSSRC() {
1022 uint32_t ssrc;
1023 do {
1024 SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc),
1025 sizeof(ssrc));
1026 if (rv != SECSuccess) {
1027 CSFLogError(LOGTAG, "%s: PK11_GenerateRandom failed with error %d",
1028 __FUNCTION__, rv);
1029 return 0;
1030 }
1031 } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
1032
1033 return ssrc;
1034 }
1035
SetRemoteSSRC(uint32_t ssrc,uint32_t rtxSsrc)1036 bool WebrtcVideoConduit::SetRemoteSSRC(uint32_t ssrc, uint32_t rtxSsrc) {
1037 MOZ_ASSERT(NS_IsMainThread());
1038 MutexAutoLock lock(mMutex);
1039
1040 return SetRemoteSSRCLocked(ssrc, rtxSsrc);
1041 }
1042
SetRemoteSSRCLocked(uint32_t ssrc,uint32_t rtxSsrc)1043 bool WebrtcVideoConduit::SetRemoteSSRCLocked(uint32_t ssrc, uint32_t rtxSsrc) {
1044 MOZ_ASSERT(NS_IsMainThread());
1045 mMutex.AssertCurrentThreadOwns();
1046
1047 if (mRecvStreamConfig.rtp.remote_ssrc == ssrc &&
1048 mRecvStreamConfig.rtp.rtx_ssrc == rtxSsrc) {
1049 return true;
1050 }
1051
1052 bool wasReceiving = mEngineReceiving;
1053 if (NS_WARN_IF(StopReceivingLocked() != kMediaConduitNoError)) {
1054 return false;
1055 }
1056
1057 {
1058 CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
1059 MutexAutoUnlock unlock(mMutex);
1060 if (!mCall->UnsetRemoteSSRC(ssrc)) {
1061 CSFLogError(LOGTAG,
1062 "%s: Failed to unset SSRC %u (0x%x) on other conduits,"
1063 " bailing",
1064 __FUNCTION__, ssrc, ssrc);
1065 return false;
1066 }
1067 }
1068
1069 mRemoteSSRC = ssrc;
1070 mRecvStreamConfig.rtp.remote_ssrc = ssrc;
1071 mRecvStreamConfig.rtp.rtx_ssrc = rtxSsrc;
1072 mStsThread->Dispatch(NS_NewRunnableFunction(
1073 "WebrtcVideoConduit::WaitingForInitialSsrcNoMore",
1074 [this, self = RefPtr<WebrtcVideoConduit>(this)]() mutable {
1075 mWaitingForInitialSsrc = false;
1076 NS_ReleaseOnMainThread(
1077 "WebrtcVideoConduit::WaitingForInitialSsrcNoMore", self.forget());
1078 }));
1079 // On the next StartReceiving() or ConfigureRecvMediaCodec, force
1080 // building a new RecvStream to switch SSRCs.
1081 DeleteRecvStream();
1082
1083 if (wasReceiving) {
1084 if (StartReceivingLocked() != kMediaConduitNoError) {
1085 return false;
1086 }
1087 }
1088
1089 return true;
1090 }
1091
UnsetRemoteSSRC(uint32_t ssrc)1092 bool WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t ssrc) {
1093 MOZ_ASSERT(NS_IsMainThread());
1094 MutexAutoLock lock(mMutex);
1095
1096 if (mRecvStreamConfig.rtp.remote_ssrc != ssrc &&
1097 mRecvStreamConfig.rtp.rtx_ssrc != ssrc) {
1098 return true;
1099 }
1100
1101 mRecvStreamConfig.rtp.rtx_ssrc = 0;
1102
1103 uint32_t our_ssrc = 0;
1104 do {
1105 our_ssrc = GenerateRandomSSRC();
1106 if (our_ssrc == 0) {
1107 return false;
1108 }
1109 } while (our_ssrc == ssrc);
1110
1111 // There is a (tiny) chance that this new random ssrc will collide with some
1112 // other conduit's remote ssrc, in which case that conduit will choose a new
1113 // one.
1114 SetRemoteSSRCLocked(our_ssrc, 0);
1115 return true;
1116 }
1117
GetRemoteSSRC(uint32_t * ssrc)1118 bool WebrtcVideoConduit::GetRemoteSSRC(uint32_t* ssrc) {
1119 if (NS_IsMainThread()) {
1120 if (!mRecvStream) {
1121 return false;
1122 }
1123 }
1124 // libwebrtc uses 0 to mean a lack of SSRC. That is not to spec.
1125 *ssrc = mRemoteSSRC;
1126 return true;
1127 }
1128
GetSendPacketTypeStats(webrtc::RtcpPacketTypeCounter * aPacketCounts)1129 bool WebrtcVideoConduit::GetSendPacketTypeStats(
1130 webrtc::RtcpPacketTypeCounter* aPacketCounts) {
1131 ASSERT_ON_THREAD(mStsThread);
1132
1133 MutexAutoLock lock(mMutex);
1134 if (!mSendStreamStats.Active()) {
1135 return false;
1136 }
1137 *aPacketCounts = mSendStreamStats.PacketCounts();
1138 return true;
1139 }
1140
GetRecvPacketTypeStats(webrtc::RtcpPacketTypeCounter * aPacketCounts)1141 bool WebrtcVideoConduit::GetRecvPacketTypeStats(
1142 webrtc::RtcpPacketTypeCounter* aPacketCounts) {
1143 ASSERT_ON_THREAD(mStsThread);
1144
1145 if (!mRecvStreamStats.Active()) {
1146 return false;
1147 }
1148 *aPacketCounts = mRecvStreamStats.PacketCounts();
1149 return true;
1150 }
1151
PollStats()1152 void WebrtcVideoConduit::PollStats() {
1153 MOZ_ASSERT(NS_IsMainThread());
1154
1155 nsTArray<RefPtr<Runnable>> runnables(2);
1156 if (mEngineTransmitting) {
1157 MOZ_RELEASE_ASSERT(mSendStream);
1158 if (!mSendStreamConfig.rtp.ssrcs.empty()) {
1159 uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
1160 webrtc::VideoSendStream::Stats stats = mSendStream->GetStats();
1161 runnables.AppendElement(NS_NewRunnableFunction(
1162 "WebrtcVideoConduit::SendStreamStatistics::Update",
1163 [this, self = RefPtr<WebrtcVideoConduit>(this),
1164 stats = std::move(stats),
1165 ssrc]() { mSendStreamStats.Update(stats, ssrc); }));
1166 }
1167 }
1168 if (mEngineReceiving) {
1169 MOZ_RELEASE_ASSERT(mRecvStream);
1170 webrtc::VideoReceiveStream::Stats stats = mRecvStream->GetStats();
1171 runnables.AppendElement(NS_NewRunnableFunction(
1172 "WebrtcVideoConduit::RecvStreamStatistics::Update",
1173 [this, self = RefPtr<WebrtcVideoConduit>(this),
1174 stats = std::move(stats)]() { mRecvStreamStats.Update(stats); }));
1175 }
1176 webrtc::Call::Stats stats = mCall->Call()->GetStats();
1177 mStsThread->Dispatch(NS_NewRunnableFunction(
1178 "WebrtcVideoConduit::UpdateStreamStatistics",
1179 [this, self = RefPtr<WebrtcVideoConduit>(this), stats = std::move(stats),
1180 runnables = std::move(runnables)]() mutable {
1181 mCallStats.Update(stats);
1182 for (const auto& runnable : runnables) {
1183 runnable->Run();
1184 }
1185 NS_ReleaseOnMainThread("WebrtcVideoConduit::UpdateStreamStatistics",
1186 self.forget());
1187 }));
1188 }
1189
UpdateVideoStatsTimer()1190 void WebrtcVideoConduit::UpdateVideoStatsTimer() {
1191 MOZ_ASSERT(NS_IsMainThread());
1192
1193 bool transmitting = mEngineTransmitting;
1194 bool receiving = mEngineReceiving;
1195 mStsThread->Dispatch(NS_NewRunnableFunction(
1196 "WebrtcVideoConduit::SetSendStreamStatsActive",
1197 [this, self = RefPtr<WebrtcVideoConduit>(this), transmitting,
1198 receiving]() mutable {
1199 mSendStreamStats.SetActive(transmitting);
1200 mRecvStreamStats.SetActive(receiving);
1201 NS_ReleaseOnMainThread("WebrtcVideoConduit::SetSendStreamStatsActive",
1202 self.forget());
1203 }));
1204
1205 bool shouldBeActive = transmitting || receiving;
1206 if (mVideoStatsTimerActive == shouldBeActive) {
1207 return;
1208 }
1209 mVideoStatsTimerActive = shouldBeActive;
1210 if (shouldBeActive) {
1211 nsTimerCallbackFunc callback = [](nsITimer*, void* aClosure) {
1212 CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p",
1213 aClosure);
1214 static_cast<WebrtcVideoConduit*>(aClosure)->PollStats();
1215 };
1216 mVideoStatsTimer->InitWithNamedFuncCallback(
1217 callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
1218 "WebrtcVideoConduit::SendStreamStatsUpdater");
1219 } else {
1220 mVideoStatsTimer->Cancel();
1221 }
1222 }
1223
GetVideoEncoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * droppedFrames,uint32_t * framesEncoded,Maybe<uint64_t> * qpSum)1224 bool WebrtcVideoConduit::GetVideoEncoderStats(
1225 double* framerateMean, double* framerateStdDev, double* bitrateMean,
1226 double* bitrateStdDev, uint32_t* droppedFrames, uint32_t* framesEncoded,
1227 Maybe<uint64_t>* qpSum) {
1228 ASSERT_ON_THREAD(mStsThread);
1229
1230 MutexAutoLock lock(mMutex);
1231 if (!mEngineTransmitting || !mSendStream) {
1232 return false;
1233 }
1234 mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
1235 *bitrateMean, *bitrateStdDev);
1236 *droppedFrames = mSendStreamStats.DroppedFrames();
1237 *framesEncoded = mSendStreamStats.FramesEncoded();
1238 *qpSum = mSendStreamStats.QpSum();
1239 return true;
1240 }
1241
GetVideoDecoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * discardedPackets,uint32_t * framesDecoded)1242 bool WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
1243 double* framerateStdDev,
1244 double* bitrateMean,
1245 double* bitrateStdDev,
1246 uint32_t* discardedPackets,
1247 uint32_t* framesDecoded) {
1248 ASSERT_ON_THREAD(mStsThread);
1249
1250 MutexAutoLock lock(mMutex);
1251 if (!mEngineReceiving || !mRecvStream) {
1252 return false;
1253 }
1254 mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
1255 *bitrateMean, *bitrateStdDev);
1256 *discardedPackets = mRecvStreamStats.DiscardedPackets();
1257 *framesDecoded = mRecvStreamStats.FramesDecoded();
1258 return true;
1259 }
1260
GetRTPReceiverStats(uint32_t * jitterMs,uint32_t * packetsLost)1261 bool WebrtcVideoConduit::GetRTPReceiverStats(uint32_t* jitterMs,
1262 uint32_t* packetsLost) {
1263 ASSERT_ON_THREAD(mStsThread);
1264
1265 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1266 MutexAutoLock lock(mMutex);
1267 if (!mRecvStream) {
1268 return false;
1269 }
1270
1271 *jitterMs = mRecvStreamStats.JitterMs();
1272 *packetsLost = mRecvStreamStats.PacketsLost();
1273 return true;
1274 }
1275
GetRTCPReceiverReport(uint32_t * jitterMs,uint32_t * packetsReceived,uint64_t * bytesReceived,uint32_t * cumulativeLost,Maybe<double> * aOutRttSec)1276 bool WebrtcVideoConduit::GetRTCPReceiverReport(uint32_t* jitterMs,
1277 uint32_t* packetsReceived,
1278 uint64_t* bytesReceived,
1279 uint32_t* cumulativeLost,
1280 Maybe<double>* aOutRttSec) {
1281 ASSERT_ON_THREAD(mStsThread);
1282
1283 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1284 aOutRttSec->reset();
1285 if (!mSendStreamStats.Active()) {
1286 return false;
1287 }
1288 if (!mSendStreamStats.SsrcFound()) {
1289 return false;
1290 }
1291 *jitterMs = mSendStreamStats.JitterMs();
1292 *packetsReceived = mSendStreamStats.PacketsReceived();
1293 *bytesReceived = mSendStreamStats.BytesReceived();
1294 *cumulativeLost = mSendStreamStats.PacketsLost();
1295 *aOutRttSec = mCallStats.RttSec();
1296 return true;
1297 }
1298
GetRTCPSenderReport(unsigned int * packetsSent,uint64_t * bytesSent,DOMHighResTimeStamp * aRemoteTimestamp)1299 bool WebrtcVideoConduit::GetRTCPSenderReport(
1300 unsigned int* packetsSent, uint64_t* bytesSent,
1301 DOMHighResTimeStamp* aRemoteTimestamp) {
1302 ASSERT_ON_THREAD(mStsThread);
1303
1304 CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
1305
1306 if (!mRecvStreamStats.Active()) {
1307 return false;
1308 }
1309
1310 *packetsSent = mRecvStreamStats.PacketsSent();
1311 *bytesSent = mRecvStreamStats.BytesSent();
1312 *aRemoteTimestamp = mRecvStreamStats.RemoteTimestamp();
1313 return true;
1314 }
1315
1316 Maybe<mozilla::dom::RTCBandwidthEstimationInternal>
GetBandwidthEstimation()1317 WebrtcVideoConduit::GetBandwidthEstimation() {
1318 ASSERT_ON_THREAD(mStsThread);
1319 return mCallStats.Stats();
1320 }
1321
GetRtpSources(nsTArray<dom::RTCRtpSourceEntry> & outSources)1322 void WebrtcVideoConduit::GetRtpSources(
1323 nsTArray<dom::RTCRtpSourceEntry>& outSources) {
1324 MOZ_ASSERT(NS_IsMainThread());
1325 return mRtpSourceObserver->GetRtpSources(outSources);
1326 }
1327
InitMain()1328 MediaConduitErrorCode WebrtcVideoConduit::InitMain() {
1329 MOZ_ASSERT(NS_IsMainThread());
1330
1331 nsresult rv;
1332 nsCOMPtr<nsIPrefService> prefs =
1333 do_GetService("@mozilla.org/preferences-service;1", &rv);
1334 if (!NS_WARN_IF(NS_FAILED(rv))) {
1335 nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
1336
1337 if (branch) {
1338 int32_t temp;
1339 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1340 "media.video.test_latency", &mVideoLatencyTestEnable)));
1341 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1342 "media.video.test_latency", &mVideoLatencyTestEnable)));
1343 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1344 "media.peerconnection.video.min_bitrate", &temp)))) {
1345 if (temp >= 0) {
1346 mMinBitrate = KBPS(temp);
1347 }
1348 }
1349 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1350 "media.peerconnection.video.start_bitrate", &temp)))) {
1351 if (temp >= 0) {
1352 mStartBitrate = KBPS(temp);
1353 }
1354 }
1355 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1356 "media.peerconnection.video.max_bitrate", &temp)))) {
1357 if (temp >= 0) {
1358 mPrefMaxBitrate = KBPS(temp);
1359 }
1360 }
1361 if (mMinBitrate != 0 && mMinBitrate < kViEMinCodecBitrate_bps) {
1362 mMinBitrate = kViEMinCodecBitrate_bps;
1363 }
1364 if (mStartBitrate < mMinBitrate) {
1365 mStartBitrate = mMinBitrate;
1366 }
1367 if (mPrefMaxBitrate && mStartBitrate > mPrefMaxBitrate) {
1368 mStartBitrate = mPrefMaxBitrate;
1369 }
1370 // XXX We'd love if this was a live param for testing adaptation/etc
1371 // in automation
1372 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1373 "media.peerconnection.video.min_bitrate_estimate", &temp)))) {
1374 if (temp >= 0) {
1375 mMinBitrateEstimate = temp; // bps!
1376 }
1377 }
1378 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1379 "media.peerconnection.video.svc.spatial", &temp)))) {
1380 if (temp >= 0) {
1381 mSpatialLayers = temp;
1382 }
1383 }
1384 if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
1385 "media.peerconnection.video.svc.temporal", &temp)))) {
1386 if (temp >= 0) {
1387 mTemporalLayers = temp;
1388 }
1389 }
1390 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1391 "media.peerconnection.video.denoising", &mDenoising)));
1392 Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
1393 "media.peerconnection.video.lock_scaling", &mLockScaling)));
1394 }
1395 }
1396 #ifdef MOZ_WIDGET_ANDROID
1397 if (mozilla::camera::VideoEngine::SetAndroidObjects() != 0) {
1398 CSFLogError(LOGTAG, "%s: could not set Android objects", __FUNCTION__);
1399 return kMediaConduitSessionNotInited;
1400 }
1401 #endif // MOZ_WIDGET_ANDROID
1402 return kMediaConduitNoError;
1403 }
1404
1405 /**
1406 * Performs initialization of the MANDATORY components of the Video Engine
1407 */
Init()1408 MediaConduitErrorCode WebrtcVideoConduit::Init() {
1409 MOZ_ASSERT(NS_IsMainThread());
1410
1411 CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
1412 MediaConduitErrorCode result;
1413 result = InitMain();
1414 if (result != kMediaConduitNoError) {
1415 return result;
1416 }
1417
1418 CSFLogDebug(LOGTAG, "%s Initialization Done", __FUNCTION__);
1419 return kMediaConduitNoError;
1420 }
1421
DeleteStreams()1422 void WebrtcVideoConduit::DeleteStreams() {
1423 MOZ_ASSERT(NS_IsMainThread());
1424
1425 // We can't delete the VideoEngine until all these are released!
1426 // And we can't use a Scoped ptr, since the order is arbitrary
1427
1428 MutexAutoLock lock(mMutex);
1429 DeleteSendStream();
1430 DeleteRecvStream();
1431 }
1432
AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)1433 MediaConduitErrorCode WebrtcVideoConduit::AttachRenderer(
1434 RefPtr<mozilla::VideoRenderer> aVideoRenderer) {
1435 MOZ_ASSERT(NS_IsMainThread());
1436
1437 CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
1438
1439 // null renderer
1440 if (!aVideoRenderer) {
1441 CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
1442 MOZ_ASSERT(false);
1443 return kMediaConduitInvalidRenderer;
1444 }
1445
1446 // This function is called only from main, so we only need to protect against
1447 // modifying mRenderer while any webrtc.org code is trying to use it.
1448 {
1449 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1450 mRenderer = aVideoRenderer;
1451 // Make sure the renderer knows the resolution
1452 mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
1453 }
1454
1455 return kMediaConduitNoError;
1456 }
1457
DetachRenderer()1458 void WebrtcVideoConduit::DetachRenderer() {
1459 MOZ_ASSERT(NS_IsMainThread());
1460
1461 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1462 if (mRenderer) {
1463 mRenderer = nullptr;
1464 }
1465 }
1466
SetTransmitterTransport(RefPtr<TransportInterface> aTransport)1467 MediaConduitErrorCode WebrtcVideoConduit::SetTransmitterTransport(
1468 RefPtr<TransportInterface> aTransport) {
1469 MOZ_ASSERT(NS_IsMainThread());
1470
1471 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1472
1473 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1474 // set the transport
1475 mTransmitterTransport = aTransport;
1476 return kMediaConduitNoError;
1477 }
1478
SetReceiverTransport(RefPtr<TransportInterface> aTransport)1479 MediaConduitErrorCode WebrtcVideoConduit::SetReceiverTransport(
1480 RefPtr<TransportInterface> aTransport) {
1481 MOZ_ASSERT(NS_IsMainThread());
1482
1483 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1484
1485 ReentrantMonitorAutoEnter enter(mTransportMonitor);
1486 // set the transport
1487 mReceiverTransport = aTransport;
1488 return kMediaConduitNoError;
1489 }
1490
ConfigureRecvMediaCodecs(const std::vector<UniquePtr<VideoCodecConfig>> & codecConfigList,const RtpRtcpConfig & aRtpRtcpConfig)1491 MediaConduitErrorCode WebrtcVideoConduit::ConfigureRecvMediaCodecs(
1492 const std::vector<UniquePtr<VideoCodecConfig>>& codecConfigList,
1493 const RtpRtcpConfig& aRtpRtcpConfig) {
1494 MOZ_ASSERT(NS_IsMainThread());
1495
1496 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1497 MediaConduitErrorCode condError = kMediaConduitNoError;
1498 std::string payloadName;
1499
1500 if (codecConfigList.empty()) {
1501 CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
1502 return kMediaConduitMalformedArgument;
1503 }
1504
1505 webrtc::KeyFrameRequestMethod kf_request_method = webrtc::kKeyFrameReqPliRtcp;
1506 bool kf_request_enabled = false;
1507 bool use_nack_basic = false;
1508 bool use_tmmbr = false;
1509 bool use_remb = false;
1510 bool use_fec = false;
1511 bool use_transport_cc = false;
1512 int ulpfec_payload_type = kNullPayloadType;
1513 int red_payload_type = kNullPayloadType;
1514 bool configuredH264 = false;
1515 nsTArray<UniquePtr<VideoCodecConfig>> recv_codecs;
1516
1517 // Try Applying the codecs in the list
1518 // we treat as success if at least one codec was applied and reception was
1519 // started successfully.
1520 std::set<unsigned int> codec_types_seen;
1521 for (const auto& codec_config : codecConfigList) {
1522 if ((condError = ValidateCodecConfig(codec_config.get())) !=
1523 kMediaConduitNoError) {
1524 CSFLogError(LOGTAG, "%s Invalid config for %s decoder: %i", __FUNCTION__,
1525 codec_config ? codec_config->mName.c_str() : "<null>",
1526 condError);
1527 continue;
1528 }
1529 if (codec_config->mName == "H264") {
1530 // TODO(bug 1200768): We can only handle configuring one recv H264 codec
1531 if (configuredH264) {
1532 continue;
1533 }
1534 configuredH264 = true;
1535 }
1536
1537 if (codec_config->mName == kUlpFecPayloadName) {
1538 ulpfec_payload_type = codec_config->mType;
1539 continue;
1540 }
1541
1542 if (codec_config->mName == kRedPayloadName) {
1543 red_payload_type = codec_config->mType;
1544 continue;
1545 }
1546
1547 // Check for the keyframe request type: PLI is preferred
1548 // over FIR, and FIR is preferred over none.
1549 // XXX (See upstream issue
1550 // https://bugs.chromium.org/p/webrtc/issues/detail?id=7002): There is no
1551 // 'none' option in webrtc.org
1552 if (codec_config->RtcpFbNackIsSet("pli")) {
1553 kf_request_enabled = true;
1554 kf_request_method = webrtc::kKeyFrameReqPliRtcp;
1555 } else if (!kf_request_enabled && codec_config->RtcpFbCcmIsSet("fir")) {
1556 kf_request_enabled = true;
1557 kf_request_method = webrtc::kKeyFrameReqFirRtcp;
1558 }
1559
1560 // What if codec A has Nack and REMB, and codec B has TMMBR, and codec C has
1561 // none? In practice, that's not a useful configuration, and
1562 // VideoReceiveStream::Config can't represent that, so simply union the
1563 // (boolean) settings
1564 use_nack_basic |= codec_config->RtcpFbNackIsSet("");
1565 use_tmmbr |= codec_config->RtcpFbCcmIsSet("tmmbr");
1566 use_remb |= codec_config->RtcpFbRembIsSet();
1567 use_fec |= codec_config->RtcpFbFECIsSet();
1568 use_transport_cc |= codec_config->RtcpFbTransportCCIsSet();
1569
1570 recv_codecs.AppendElement(new VideoCodecConfig(*codec_config));
1571 }
1572
1573 if (!recv_codecs.Length()) {
1574 CSFLogError(LOGTAG, "%s Found no valid receive codecs", __FUNCTION__);
1575 return kMediaConduitMalformedArgument;
1576 }
1577
1578 // Now decide if we need to recreate the receive stream, or can keep it
1579 if (!mRecvStream || CodecsDifferent(recv_codecs, mRecvCodecList) ||
1580 mRecvStreamConfig.rtp.nack.rtp_history_ms !=
1581 (use_nack_basic ? 1000 : 0) ||
1582 mRecvStreamConfig.rtp.remb != use_remb ||
1583 mRecvStreamConfig.rtp.transport_cc != use_transport_cc ||
1584 mRecvStreamConfig.rtp.tmmbr != use_tmmbr ||
1585 mRecvStreamConfig.rtp.keyframe_method != kf_request_method ||
1586 (use_fec &&
1587 (mRecvStreamConfig.rtp.ulpfec_payload_type != ulpfec_payload_type ||
1588 mRecvStreamConfig.rtp.red_payload_type != red_payload_type))) {
1589 MutexAutoLock lock(mMutex);
1590
1591 condError = StopReceivingLocked();
1592 if (condError != kMediaConduitNoError) {
1593 return condError;
1594 }
1595
1596 // If we fail after here things get ugly
1597 mRecvStreamConfig.rtp.rtcp_mode = aRtpRtcpConfig.GetRtcpMode();
1598 mRecvStreamConfig.rtp.nack.rtp_history_ms = use_nack_basic ? 1000 : 0;
1599 mRecvStreamConfig.rtp.remb = use_remb;
1600 mRecvStreamConfig.rtp.transport_cc = use_transport_cc;
1601 mRecvStreamConfig.rtp.tmmbr = use_tmmbr;
1602 mRecvStreamConfig.rtp.keyframe_method = kf_request_method;
1603
1604 if (use_fec) {
1605 mRecvStreamConfig.rtp.ulpfec_payload_type = ulpfec_payload_type;
1606 mRecvStreamConfig.rtp.red_payload_type = red_payload_type;
1607 } else {
1608 // Reset to defaults
1609 mRecvStreamConfig.rtp.ulpfec_payload_type = -1;
1610 mRecvStreamConfig.rtp.red_payload_type = -1;
1611 }
1612
1613 mRecvStreamConfig.rtp.rtx_associated_payload_types.clear();
1614 for (auto& codec : recv_codecs) {
1615 if (codec->RtxPayloadTypeIsSet()) {
1616 mRecvStreamConfig.rtp.AddRtxBinding(codec->mRTXPayloadType,
1617 codec->mType);
1618 }
1619 }
1620 // SetRemoteSSRC should have populated this already
1621 mRecvSSRC = mRecvStreamConfig.rtp.remote_ssrc;
1622
1623 // XXX ugh! same SSRC==0 problem that webrtc.org has
1624 if (mRecvSSRC == 0) {
1625 // Handle un-signalled SSRCs by creating a random one and then when it
1626 // actually gets set, we'll destroy and recreate. Simpler than trying to
1627 // unwind all the logic that assumes the receive stream is created and
1628 // started when we ConfigureRecvMediaCodecs()
1629 uint32_t ssrc = GenerateRandomSSRC();
1630 if (ssrc == 0) {
1631 // webrtc.org code has fits if you select an SSRC of 0, so that's how
1632 // we signal an error.
1633 return kMediaConduitUnknownError;
1634 }
1635
1636 mRecvStreamConfig.rtp.remote_ssrc = ssrc;
1637 mRecvSSRC = ssrc;
1638 }
1639
1640 // 0 isn't allowed. Would be best to ask for a random SSRC from the
1641 // RTP code. Would need to call rtp_sender.cc -- GenerateNewSSRC(),
1642 // which isn't exposed. It's called on collision, or when we decide to
1643 // send. it should be called on receiver creation. Here, we're
1644 // generating the SSRC value - but this causes ssrc_forced in set in
1645 // rtp_sender, which locks us into the SSRC - even a collision won't
1646 // change it!!!
1647 MOZ_ASSERT(!mSendStreamConfig.rtp.ssrcs.empty());
1648 auto ssrc = mSendStreamConfig.rtp.ssrcs.front();
1649 Unused << NS_WARN_IF(ssrc == mRecvStreamConfig.rtp.remote_ssrc);
1650
1651 while (ssrc == mRecvStreamConfig.rtp.remote_ssrc) {
1652 ssrc = GenerateRandomSSRC();
1653 if (ssrc == 0) {
1654 return kMediaConduitUnknownError;
1655 }
1656 }
1657
1658 mRecvStreamConfig.rtp.local_ssrc = ssrc;
1659 CSFLogDebug(LOGTAG,
1660 "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
1661 __FUNCTION__, (void*)this, ssrc,
1662 (uint32_t)mSendStreamConfig.rtp.ssrcs.size(),
1663 mRecvStreamConfig.rtp.remote_ssrc);
1664
1665 // XXX Copy over those that are the same and don't rebuild them
1666 mRecvCodecList = std::move(recv_codecs);
1667
1668 DeleteRecvStream();
1669 return StartReceivingLocked();
1670 }
1671 return kMediaConduitNoError;
1672 }
1673
CreateDecoder(webrtc::VideoCodecType aType)1674 std::unique_ptr<webrtc::VideoDecoder> WebrtcVideoConduit::CreateDecoder(
1675 webrtc::VideoCodecType aType) {
1676 MOZ_ASSERT(NS_IsMainThread());
1677
1678 std::unique_ptr<webrtc::VideoDecoder> decoder = nullptr;
1679 mRecvCodecPluginID = 0;
1680
1681 #ifdef MOZ_WEBRTC_MEDIACODEC
1682 bool enabled = false;
1683 #endif
1684
1685 // Attempt to create a decoder using MediaDataDecoder.
1686 decoder.reset(MediaDataCodec::CreateDecoder(aType));
1687 if (decoder) {
1688 return decoder;
1689 }
1690
1691 switch (aType) {
1692 case webrtc::VideoCodecType::kVideoCodecH264:
1693 // get an external decoder
1694 decoder.reset(GmpVideoCodec::CreateDecoder());
1695 if (decoder) {
1696 mRecvCodecPluginID =
1697 static_cast<WebrtcVideoDecoder*>(decoder.get())->PluginID();
1698 }
1699 break;
1700
1701 case webrtc::VideoCodecType::kVideoCodecVP8:
1702 #ifdef MOZ_WEBRTC_MEDIACODEC
1703 // attempt to get a decoder
1704 enabled = mozilla::Preferences::GetBool(
1705 "media.navigator.hardware.vp8_decode.acceleration_enabled", false);
1706 if (enabled) {
1707 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
1708 if (gfxInfo) {
1709 int32_t status;
1710 nsCString discardFailureId;
1711
1712 if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
1713 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_DECODE,
1714 discardFailureId, &status))) {
1715 if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
1716 NS_WARNING(
1717 "VP8 decoder hardware is not whitelisted: disabling.\n");
1718 } else {
1719 decoder = MediaCodecVideoCodec::CreateDecoder(
1720 MediaCodecVideoCodec::CodecType::CODEC_VP8);
1721 }
1722 }
1723 }
1724 }
1725 #endif
1726 // Use a software VP8 decoder as a fallback.
1727 if (!decoder) {
1728 decoder = webrtc::VP8Decoder::Create();
1729 }
1730 break;
1731
1732 case webrtc::VideoCodecType::kVideoCodecVP9:
1733 MOZ_ASSERT(webrtc::VP9Decoder::IsSupported());
1734 decoder = webrtc::VP9Decoder::Create();
1735 break;
1736
1737 default:
1738 break;
1739 }
1740
1741 return decoder;
1742 }
1743
CreateEncoder(webrtc::VideoCodecType aType)1744 std::unique_ptr<webrtc::VideoEncoder> WebrtcVideoConduit::CreateEncoder(
1745 webrtc::VideoCodecType aType) {
1746 MOZ_ASSERT(NS_IsMainThread());
1747
1748 std::unique_ptr<webrtc::VideoEncoder> encoder = nullptr;
1749 mSendCodecPluginID = 0;
1750
1751 #ifdef MOZ_WEBRTC_MEDIACODEC
1752 bool enabled = false;
1753 #endif
1754
1755 if (StaticPrefs::media_webrtc_platformencoder()) {
1756 encoder.reset(MediaDataCodec::CreateEncoder(aType));
1757 if (encoder) {
1758 return encoder;
1759 }
1760 }
1761
1762 switch (aType) {
1763 case webrtc::VideoCodecType::kVideoCodecH264:
1764 // get an external encoder
1765 encoder.reset(GmpVideoCodec::CreateEncoder());
1766 if (encoder) {
1767 mSendCodecPluginID =
1768 static_cast<WebrtcVideoEncoder*>(encoder.get())->PluginID();
1769 }
1770 break;
1771
1772 case webrtc::VideoCodecType::kVideoCodecVP8:
1773 encoder.reset(new webrtc::EncoderSimulcastProxy(
1774 this, webrtc::SdpVideoFormat(cricket::kVp8CodecName)));
1775 break;
1776
1777 case webrtc::VideoCodecType::kVideoCodecVP9:
1778 encoder = webrtc::VP9Encoder::Create();
1779 break;
1780
1781 default:
1782 break;
1783 }
1784 return encoder;
1785 }
1786
GetSupportedFormats() const1787 std::vector<webrtc::SdpVideoFormat> WebrtcVideoConduit::GetSupportedFormats()
1788 const {
1789 MOZ_ASSERT_UNREACHABLE("Unexpected call");
1790 CSFLogError(LOGTAG, "Unexpected call to GetSupportedFormats()");
1791 return {webrtc::SdpVideoFormat("VP8")};
1792 }
1793
QueryVideoEncoder(const webrtc::SdpVideoFormat & format) const1794 WebrtcVideoConduit::CodecInfo WebrtcVideoConduit::QueryVideoEncoder(
1795 const webrtc::SdpVideoFormat& format) const {
1796 MOZ_ASSERT_UNREACHABLE("Unexpected call");
1797 CSFLogError(LOGTAG, "Unexpected call to QueryVideoEncoder()");
1798 CodecInfo info;
1799 info.is_hardware_accelerated = false;
1800 info.has_internal_source = false;
1801 return info;
1802 }
1803
CreateVideoEncoder(const webrtc::SdpVideoFormat & format)1804 std::unique_ptr<webrtc::VideoEncoder> WebrtcVideoConduit::CreateVideoEncoder(
1805 const webrtc::SdpVideoFormat& format) {
1806 MOZ_ASSERT(format.name == "VP8");
1807 std::unique_ptr<webrtc::VideoEncoder> encoder = nullptr;
1808 #ifdef MOZ_WEBRTC_MEDIACODEC
1809 // attempt to get a encoder
1810 enabled = mozilla::Preferences::GetBool(
1811 "media.navigator.hardware.vp8_encode.acceleration_enabled", false);
1812 if (enabled) {
1813 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
1814 if (gfxInfo) {
1815 int32_t status;
1816 nsCString discardFailureId;
1817
1818 if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
1819 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_ENCODE,
1820 discardFailureId, &status))) {
1821 if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
1822 NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
1823 } else {
1824 encoder = MediaCodecVideoCodec::CreateEncoder(
1825 MediaCodecVideoCodec::CodecType::CODEC_VP8);
1826 }
1827 }
1828 }
1829 }
1830 #endif
1831 // Use a software VP8 encoder as a fallback.
1832 encoder = webrtc::VP8Encoder::Create();
1833 return encoder;
1834 }
1835
1836 // XXX we need to figure out how to feed back changes in preferred capture
1837 // resolution to the getUserMedia source.
SelectSendResolution(unsigned short width,unsigned short height)1838 void WebrtcVideoConduit::SelectSendResolution(unsigned short width,
1839 unsigned short height) {
1840 mMutex.AssertCurrentThreadOwns();
1841 // XXX This will do bandwidth-resolution adaptation as well - bug 877954
1842
1843 // Enforce constraints
1844 if (mCurSendCodecConfig) {
1845 uint16_t max_width = mCurSendCodecConfig->mEncodingConstraints.maxWidth;
1846 uint16_t max_height = mCurSendCodecConfig->mEncodingConstraints.maxHeight;
1847 if (max_width || max_height) {
1848 max_width = max_width ? max_width : UINT16_MAX;
1849 max_height = max_height ? max_height : UINT16_MAX;
1850 ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
1851 }
1852
1853 int max_fs = mSinkWantsPixelCount;
1854 // Limit resolution to max-fs
1855 if (mCurSendCodecConfig->mEncodingConstraints.maxFs) {
1856 // max-fs is in macroblocks, convert to pixels
1857 max_fs = std::min(
1858 max_fs,
1859 static_cast<int>(mCurSendCodecConfig->mEncodingConstraints.maxFs *
1860 (16 * 16)));
1861 }
1862 mVideoAdapter->OnResolutionFramerateRequest(
1863 rtc::Optional<int>(), max_fs, std::numeric_limits<int>::max());
1864 }
1865
1866 unsigned int framerate = SelectSendFrameRate(
1867 mCurSendCodecConfig.get(), mSendingFramerate, width, height);
1868 if (mSendingFramerate != framerate) {
1869 CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)", __FUNCTION__,
1870 framerate, mSendingFramerate);
1871 mSendingFramerate = framerate;
1872 mVideoStreamFactory->SetSendingFramerate(mSendingFramerate);
1873 }
1874 }
1875
AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,const rtc::VideoSinkWants & wants)1876 void WebrtcVideoConduit::AddOrUpdateSink(
1877 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
1878 const rtc::VideoSinkWants& wants) {
1879 if (!NS_IsMainThread()) {
1880 // This may be called off main thread, but only to update an already added
1881 // sink. If we add it after the dispatch we're at risk of a UAF.
1882 NS_DispatchToMainThread(
1883 NS_NewRunnableFunction("WebrtcVideoConduit::UpdateSink",
1884 [this, self = RefPtr<WebrtcVideoConduit>(this),
1885 sink, wants = std::move(wants)]() {
1886 if (mRegisteredSinks.Contains(sink)) {
1887 AddOrUpdateSinkNotLocked(sink, wants);
1888 }
1889 }));
1890 return;
1891 }
1892
1893 mMutex.AssertCurrentThreadOwns();
1894 if (!mRegisteredSinks.Contains(sink)) {
1895 mRegisteredSinks.AppendElement(sink);
1896 }
1897 mVideoBroadcaster.AddOrUpdateSink(sink, wants);
1898 OnSinkWantsChanged(mVideoBroadcaster.wants());
1899 }
1900
AddOrUpdateSinkNotLocked(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,const rtc::VideoSinkWants & wants)1901 void WebrtcVideoConduit::AddOrUpdateSinkNotLocked(
1902 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
1903 const rtc::VideoSinkWants& wants) {
1904 MutexAutoLock lock(mMutex);
1905 AddOrUpdateSink(sink, wants);
1906 }
1907
RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink)1908 void WebrtcVideoConduit::RemoveSink(
1909 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
1910 MOZ_ASSERT(NS_IsMainThread());
1911 mMutex.AssertCurrentThreadOwns();
1912
1913 mRegisteredSinks.RemoveElement(sink);
1914 mVideoBroadcaster.RemoveSink(sink);
1915 OnSinkWantsChanged(mVideoBroadcaster.wants());
1916 }
1917
RemoveSinkNotLocked(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink)1918 void WebrtcVideoConduit::RemoveSinkNotLocked(
1919 rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
1920 MutexAutoLock lock(mMutex);
1921 RemoveSink(sink);
1922 }
1923
OnSinkWantsChanged(const rtc::VideoSinkWants & wants)1924 void WebrtcVideoConduit::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
1925 MOZ_ASSERT(NS_IsMainThread());
1926 mMutex.AssertCurrentThreadOwns();
1927
1928 if (mLockScaling) {
1929 return;
1930 }
1931
1932 CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d",
1933 __FUNCTION__, mSendStreamConfig.rtp.ssrcs.front(),
1934 mSendStreamConfig.rtp.ssrcs.front(), wants.max_pixel_count);
1935
1936 if (!mCurSendCodecConfig) {
1937 return;
1938 }
1939
1940 mSinkWantsPixelCount = wants.max_pixel_count;
1941 mUpdateResolution = true;
1942 }
1943
SendVideoFrame(const webrtc::VideoFrame & frame)1944 MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
1945 const webrtc::VideoFrame& frame) {
1946 // XXX Google uses a "timestamp_aligner" to translate timestamps from the
1947 // camera via TranslateTimestamp(); we should look at doing the same. This
1948 // avoids sampling error when capturing frames, but google had to deal with
1949 // some broken cameras, include Logitech c920's IIRC.
1950
1951 int cropWidth;
1952 int cropHeight;
1953 int adaptedWidth;
1954 int adaptedHeight;
1955 {
1956 MutexAutoLock lock(mMutex);
1957 CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s (send SSRC %u (0x%x))",
1958 this, __FUNCTION__, mSendStreamConfig.rtp.ssrcs.front(),
1959 mSendStreamConfig.rtp.ssrcs.front());
1960
1961 if (mUpdateResolution || frame.width() != mLastWidth ||
1962 frame.height() != mLastHeight) {
1963 // See if we need to recalculate what we're sending.
1964 CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
1965 __FUNCTION__, frame.width(), frame.height());
1966 MOZ_ASSERT(frame.width() != 0 && frame.height() != 0);
1967 // Note coverity will flag this since it thinks they can be 0
1968 MOZ_ASSERT(mCurSendCodecConfig);
1969
1970 mLastWidth = frame.width();
1971 mLastHeight = frame.height();
1972 mUpdateResolution = false;
1973 SelectSendResolution(frame.width(), frame.height());
1974 }
1975
1976 // adapt input video to wants of sink
1977 if (!mVideoBroadcaster.frame_wanted()) {
1978 return kMediaConduitNoError;
1979 }
1980
1981 if (!mVideoAdapter->AdaptFrameResolution(
1982 frame.width(), frame.height(),
1983 frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, &cropWidth,
1984 &cropHeight, &adaptedWidth, &adaptedHeight)) {
1985 // VideoAdapter dropped the frame.
1986 return kMediaConduitNoError;
1987 }
1988 }
1989
1990 // If we have zero width or height, drop the frame here. Attempting to send
1991 // it will cause all sorts of problems in the webrtc.org code.
1992 if (cropWidth == 0 || cropHeight == 0) {
1993 return kMediaConduitNoError;
1994 }
1995
1996 int cropX = (frame.width() - cropWidth) / 2;
1997 int cropY = (frame.height() - cropHeight) / 2;
1998
1999 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
2000 if (adaptedWidth == frame.width() && adaptedHeight == frame.height()) {
2001 // No adaption - optimized path.
2002 buffer = frame.video_frame_buffer();
2003 } else {
2004 // Adapted I420 frame.
2005 rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
2006 mBufferPool.CreateBuffer(adaptedWidth, adaptedHeight);
2007 if (!i420Buffer) {
2008 CSFLogWarn(LOGTAG, "Creating a buffer for scaling failed, pool is empty");
2009 return kMediaConduitNoError;
2010 }
2011 i420Buffer->CropAndScaleFrom(*frame.video_frame_buffer()->GetI420().get(),
2012 cropX, cropY, cropWidth, cropHeight);
2013 buffer = i420Buffer;
2014 }
2015
2016 mVideoBroadcaster.OnFrame(webrtc::VideoFrame(
2017 buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation()));
2018
2019 mStsThread->Dispatch(NS_NewRunnableFunction(
2020 "SendStreamStatistics::FrameDeliveredToEncoder",
2021 [self = RefPtr<WebrtcVideoConduit>(this), this]() mutable {
2022 mSendStreamStats.FrameDeliveredToEncoder();
2023 NS_ReleaseOnMainThread("SendStreamStatistics::FrameDeliveredToEncoder",
2024 self.forget());
2025 }));
2026 return kMediaConduitNoError;
2027 }
2028
2029 // Transport Layer Callbacks
2030
DeliverPacket(const void * data,int len)2031 MediaConduitErrorCode WebrtcVideoConduit::DeliverPacket(const void* data,
2032 int len) {
2033 ASSERT_ON_THREAD(mStsThread);
2034
2035 // Bug 1499796 - we need to get passed the time the packet was received
2036 webrtc::PacketReceiver::DeliveryStatus status =
2037 mCall->Call()->Receiver()->DeliverPacket(
2038 webrtc::MediaType::VIDEO, static_cast<const uint8_t*>(data), len,
2039 webrtc::PacketTime());
2040
2041 if (status != webrtc::PacketReceiver::DELIVERY_OK) {
2042 CSFLogError(LOGTAG, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
2043 return kMediaConduitRTPProcessingFailed;
2044 }
2045
2046 return kMediaConduitNoError;
2047 }
2048
ReceivedRTPPacket(const void * data,int len,webrtc::RTPHeader & header)2049 MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTPPacket(
2050 const void* data, int len, webrtc::RTPHeader& header) {
2051 ASSERT_ON_THREAD(mStsThread);
2052
2053 if (mAllowSsrcChange || mWaitingForInitialSsrc) {
2054 // Handle the unknown ssrc (and ssrc-not-signaled case).
2055 // We can't just do this here; it has to happen on MainThread :-(
2056 // We also don't want to drop the packet, nor stall this thread, so we hold
2057 // the packet (and any following) for inserting once the SSRC is set.
2058 if (mRtpPacketQueue.IsQueueActive()) {
2059 mRtpPacketQueue.Enqueue(data, len);
2060 return kMediaConduitNoError;
2061 }
2062
2063 bool switchRequired = mRecvSSRC != header.ssrc;
2064 if (switchRequired) {
2065 // We need to check that the newly received ssrc is not already
2066 // associated with ulpfec or rtx. This is how webrtc.org handles
2067 // things, see https://codereview.webrtc.org/1226093002.
2068 MutexAutoLock lock(mMutex);
2069 const webrtc::VideoReceiveStream::Config::Rtp& rtp =
2070 mRecvStreamConfig.rtp;
2071 switchRequired =
2072 rtp.rtx_associated_payload_types.find(header.payloadType) ==
2073 rtp.rtx_associated_payload_types.end() &&
2074 rtp.ulpfec_payload_type != header.payloadType;
2075 }
2076
2077 if (switchRequired) {
2078 // a new switch needs to be done
2079 // any queued packets are from a previous switch that hasn't completed
2080 // yet; drop them and only process the latest SSRC
2081 mRtpPacketQueue.Clear();
2082 mRtpPacketQueue.Enqueue(data, len);
2083
2084 CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
2085 static_cast<uint32_t>(mRecvSSRC), header.ssrc);
2086 // we "switch" here immediately, but buffer until the queue is released
2087 mRecvSSRC = header.ssrc;
2088
2089 // Ensure lamba captures refs
2090 NS_DispatchToMainThread(NS_NewRunnableFunction(
2091 "WebrtcVideoConduit::WebrtcGmpPCHandleSetter",
2092 [this, self = RefPtr<WebrtcVideoConduit>(this),
2093 ssrc = header.ssrc]() mutable {
2094 // Normally this is done in CreateOrUpdateMediaPipeline() for
2095 // initial creation and renegotiation, but here we're rebuilding the
2096 // Receive channel at a lower level. This is needed whenever we're
2097 // creating a GMPVideoCodec (in particular, H264) so it can
2098 // communicate errors to the PC.
2099 WebrtcGmpPCHandleSetter setter(mPCHandle);
2100 // TODO: This is problematic with rtx enabled, we don't know if
2101 // new ssrc is for rtx or not. This is fixed in a later patch in
2102 // this series.
2103 SetRemoteSSRC(
2104 ssrc, 0); // this will likely re-create the VideoReceiveStream
2105 // We want to unblock the queued packets on the original thread
2106 mStsThread->Dispatch(NS_NewRunnableFunction(
2107 "WebrtcVideoConduit::QueuedPacketsHandler",
2108 [this, self = RefPtr<WebrtcVideoConduit>(this),
2109 ssrc]() mutable {
2110 if (ssrc != mRecvSSRC) {
2111 // this is an intermediate switch; another is in-flight
2112 return;
2113 }
2114 mRtpPacketQueue.DequeueAll(this);
2115 NS_ReleaseOnMainThread(
2116 "WebrtcVideoConduit::QueuedPacketsHandler",
2117 self.forget());
2118 }));
2119 }));
2120 return kMediaConduitNoError;
2121 }
2122 }
2123
2124 CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
2125 (uint16_t)ntohs(((uint16_t*)data)[1]), len,
2126 (uint32_t)ntohl(((uint32_t*)data)[2]),
2127 (uint32_t)ntohl(((uint32_t*)data)[2]));
2128
2129 if (DeliverPacket(data, len) != kMediaConduitNoError) {
2130 CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
2131 return kMediaConduitRTPProcessingFailed;
2132 }
2133 return kMediaConduitNoError;
2134 }
2135
ReceivedRTCPPacket(const void * data,int len)2136 MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTCPPacket(const void* data,
2137 int len) {
2138 ASSERT_ON_THREAD(mStsThread);
2139
2140 CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len);
2141
2142 if (DeliverPacket(data, len) != kMediaConduitNoError) {
2143 CSFLogError(LOGTAG, "%s RTCP Processing Failed", __FUNCTION__);
2144 return kMediaConduitRTPProcessingFailed;
2145 }
2146
2147 // TODO(bug 1496533): We will need to keep separate timestamps for each SSRC,
2148 // and for each SSRC we will need to keep a timestamp for SR and RR.
2149 mLastRtcpReceived = Some(GetNow());
2150 return kMediaConduitNoError;
2151 }
2152
2153 // TODO(bug 1496533): We will need to add a type (ie; SR or RR) param here, or
2154 // perhaps break this function into two functions, one for each type.
LastRtcpReceived() const2155 Maybe<DOMHighResTimeStamp> WebrtcVideoConduit::LastRtcpReceived() const {
2156 ASSERT_ON_THREAD(mStsThread);
2157 return mLastRtcpReceived;
2158 }
2159
StopTransmitting()2160 MediaConduitErrorCode WebrtcVideoConduit::StopTransmitting() {
2161 MOZ_ASSERT(NS_IsMainThread());
2162 MutexAutoLock lock(mMutex);
2163
2164 return StopTransmittingLocked();
2165 }
2166
StartTransmitting()2167 MediaConduitErrorCode WebrtcVideoConduit::StartTransmitting() {
2168 MOZ_ASSERT(NS_IsMainThread());
2169 MutexAutoLock lock(mMutex);
2170
2171 return StartTransmittingLocked();
2172 }
2173
StopReceiving()2174 MediaConduitErrorCode WebrtcVideoConduit::StopReceiving() {
2175 MOZ_ASSERT(NS_IsMainThread());
2176 MutexAutoLock lock(mMutex);
2177
2178 return StopReceivingLocked();
2179 }
2180
StartReceiving()2181 MediaConduitErrorCode WebrtcVideoConduit::StartReceiving() {
2182 MOZ_ASSERT(NS_IsMainThread());
2183 MutexAutoLock lock(mMutex);
2184
2185 return StartReceivingLocked();
2186 }
2187
StopTransmittingLocked()2188 MediaConduitErrorCode WebrtcVideoConduit::StopTransmittingLocked() {
2189 MOZ_ASSERT(NS_IsMainThread());
2190 mMutex.AssertCurrentThreadOwns();
2191
2192 if (mEngineTransmitting) {
2193 if (mSendStream) {
2194 CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ",
2195 __FUNCTION__);
2196 mSendStream->Stop();
2197 }
2198
2199 mEngineTransmitting = false;
2200 UpdateVideoStatsTimer();
2201 }
2202 return kMediaConduitNoError;
2203 }
2204
StartTransmittingLocked()2205 MediaConduitErrorCode WebrtcVideoConduit::StartTransmittingLocked() {
2206 MOZ_ASSERT(NS_IsMainThread());
2207 mMutex.AssertCurrentThreadOwns();
2208
2209 if (mEngineTransmitting) {
2210 return kMediaConduitNoError;
2211 }
2212
2213 CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__);
2214 // Start Transmitting on the video engine
2215 if (!mSendStream) {
2216 MediaConduitErrorCode rval = CreateSendStream();
2217 if (rval != kMediaConduitNoError) {
2218 CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
2219 return rval;
2220 }
2221 }
2222
2223 mSendStream->Start();
2224 // XXX File a bug to consider hooking this up to the state of mtransport
2225 mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
2226 webrtc::kNetworkUp);
2227 mEngineTransmitting = true;
2228 UpdateVideoStatsTimer();
2229
2230 return kMediaConduitNoError;
2231 }
2232
StopReceivingLocked()2233 MediaConduitErrorCode WebrtcVideoConduit::StopReceivingLocked() {
2234 MOZ_ASSERT(NS_IsMainThread());
2235 mMutex.AssertCurrentThreadOwns();
2236
2237 // Are we receiving already? If so, stop receiving and playout
2238 // since we can't apply new recv codec when the engine is playing.
2239 if (mEngineReceiving && mRecvStream) {
2240 CSFLogDebug(LOGTAG, "%s Engine Already Receiving . Attemping to Stop ",
2241 __FUNCTION__);
2242 mRecvStream->Stop();
2243 }
2244
2245 mEngineReceiving = false;
2246 UpdateVideoStatsTimer();
2247 return kMediaConduitNoError;
2248 }
2249
StartReceivingLocked()2250 MediaConduitErrorCode WebrtcVideoConduit::StartReceivingLocked() {
2251 MOZ_ASSERT(NS_IsMainThread());
2252 mMutex.AssertCurrentThreadOwns();
2253
2254 if (mEngineReceiving) {
2255 return kMediaConduitNoError;
2256 }
2257
2258 CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__,
2259 static_cast<uint32_t>(mRecvSSRC),
2260 static_cast<uint32_t>(mRecvSSRC));
2261 // Start Receiving on the video engine
2262 if (!mRecvStream) {
2263 MediaConduitErrorCode rval = CreateRecvStream();
2264 if (rval != kMediaConduitNoError) {
2265 CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
2266 return rval;
2267 }
2268 }
2269
2270 mRecvStream->Start();
2271 // XXX File a bug to consider hooking this up to the state of mtransport
2272 mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
2273 webrtc::kNetworkUp);
2274 mEngineReceiving = true;
2275 UpdateVideoStatsTimer();
2276
2277 return kMediaConduitNoError;
2278 }
2279
2280 // WebRTC::RTP Callback Implementation
2281 // Called on MTG thread
SendRtp(const uint8_t * packet,size_t length,const webrtc::PacketOptions & options)2282 bool WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
2283 const webrtc::PacketOptions& options) {
2284 CSFLogVerbose(LOGTAG, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
2285 __FUNCTION__, (uint16_t)ntohs(*((uint16_t*)&packet[2])),
2286 (unsigned long)length,
2287 (uint32_t)ntohl(*((uint32_t*)&packet[8])),
2288 (uint32_t)ntohl(*((uint32_t*)&packet[8])));
2289
2290 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2291 if (!mTransmitterTransport ||
2292 NS_FAILED(mTransmitterTransport->SendRtpPacket(packet, length))) {
2293 CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
2294 return false;
2295 }
2296 if (options.packet_id >= 0) {
2297 int64_t now_ms = PR_Now() / 1000;
2298 mCall->Call()->OnSentPacket({options.packet_id, now_ms});
2299 }
2300 return true;
2301 }
2302
2303 // Called from multiple threads including webrtc Process thread
SendRtcp(const uint8_t * packet,size_t length)2304 bool WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length) {
2305 CSFLogVerbose(LOGTAG, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
2306 // We come here if we have only one pipeline/conduit setup,
2307 // such as for unidirectional streams.
2308 // We also end up here if we are receiving
2309 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2310 if (mReceiverTransport &&
2311 NS_SUCCEEDED(mReceiverTransport->SendRtcpPacket(packet, length))) {
2312 // Might be a sender report, might be a receiver report, we don't know.
2313 CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
2314 return true;
2315 }
2316 if (mTransmitterTransport &&
2317 NS_SUCCEEDED(mTransmitterTransport->SendRtcpPacket(packet, length))) {
2318 return true;
2319 }
2320
2321 CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
2322 return false;
2323 }
2324
OnFrame(const webrtc::VideoFrame & video_frame)2325 void WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame) {
2326 CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
2327 static_cast<uint32_t>(mRecvSSRC),
2328 static_cast<uint32_t>(mRecvSSRC), video_frame.width(),
2329 video_frame.height());
2330 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2331
2332 if (!mRenderer) {
2333 CSFLogError(LOGTAG, "%s Renderer is NULL ", __FUNCTION__);
2334 return;
2335 }
2336
2337 bool needsNewHistoryElement = !mReceivedFrameHistory.mEntries.Length();
2338
2339 if (mReceivingWidth != video_frame.width() ||
2340 mReceivingHeight != video_frame.height()) {
2341 mReceivingWidth = video_frame.width();
2342 mReceivingHeight = video_frame.height();
2343 mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
2344 needsNewHistoryElement = true;
2345 }
2346
2347 uint32_t remoteSsrc;
2348 if (!GetRemoteSSRC(&remoteSsrc) && needsNewHistoryElement) {
2349 // Frame was decoded after the connection ended
2350 return;
2351 }
2352
2353 if (!needsNewHistoryElement) {
2354 auto& currentEntry = mReceivedFrameHistory.mEntries.LastElement();
2355 needsNewHistoryElement =
2356 currentEntry.mRotationAngle !=
2357 static_cast<unsigned long>(video_frame.rotation()) ||
2358 currentEntry.mLocalSsrc != mRecvSSRC ||
2359 currentEntry.mRemoteSsrc != remoteSsrc;
2360 }
2361
2362 // Record frame history
2363 const auto historyNow = mCall->GetNow();
2364 if (needsNewHistoryElement) {
2365 dom::RTCVideoFrameHistoryEntryInternal frameHistoryElement;
2366 frameHistoryElement.mConsecutiveFrames = 0;
2367 frameHistoryElement.mWidth = video_frame.width();
2368 frameHistoryElement.mHeight = video_frame.height();
2369 frameHistoryElement.mRotationAngle =
2370 static_cast<unsigned long>(video_frame.rotation());
2371 frameHistoryElement.mFirstFrameTimestamp = historyNow;
2372 frameHistoryElement.mLocalSsrc = mRecvSSRC;
2373 frameHistoryElement.mRemoteSsrc = remoteSsrc;
2374 if (!mReceivedFrameHistory.mEntries.AppendElement(frameHistoryElement,
2375 fallible)) {
2376 mozalloc_handle_oom(0);
2377 }
2378 }
2379 auto& currentEntry = mReceivedFrameHistory.mEntries.LastElement();
2380
2381 currentEntry.mConsecutiveFrames++;
2382 currentEntry.mLastFrameTimestamp = historyNow;
2383 // Attempt to retrieve an timestamp encoded in the image pixels if enabled.
2384 if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
2385 uint64_t now = PR_Now();
2386 uint64_t timestamp = 0;
2387 uint8_t* data = const_cast<uint8_t*>(
2388 video_frame.video_frame_buffer()->GetI420()->DataY());
2389 bool ok = YuvStamper::Decode(
2390 mReceivingWidth, mReceivingHeight, mReceivingWidth, data,
2391 reinterpret_cast<unsigned char*>(×tamp), sizeof(timestamp), 0, 0);
2392 if (ok) {
2393 VideoLatencyUpdate(now - timestamp);
2394 }
2395 }
2396
2397 mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
2398 video_frame.timestamp(),
2399 video_frame.render_time_ms());
2400 }
2401
AddFrameHistory(dom::Sequence<dom::RTCVideoFrameHistoryInternal> * outHistories) const2402 bool WebrtcVideoConduit::AddFrameHistory(
2403 dom::Sequence<dom::RTCVideoFrameHistoryInternal>* outHistories) const {
2404 ReentrantMonitorAutoEnter enter(mTransportMonitor);
2405 if (!outHistories->AppendElement(mReceivedFrameHistory, fallible)) {
2406 mozalloc_handle_oom(0);
2407 return false;
2408 }
2409 return true;
2410 }
2411
DumpCodecDB() const2412 void WebrtcVideoConduit::DumpCodecDB() const {
2413 MOZ_ASSERT(NS_IsMainThread());
2414
2415 for (auto& entry : mRecvCodecList) {
2416 CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str());
2417 CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType);
2418 CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d",
2419 entry->mEncodingConstraints.maxFs);
2420 CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %d",
2421 entry->mEncodingConstraints.maxFps);
2422 }
2423 }
2424
VideoLatencyUpdate(uint64_t newSample)2425 void WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample) {
2426 mTransportMonitor.AssertCurrentThreadIn();
2427
2428 mVideoLatencyAvg =
2429 (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
2430 }
2431
MozVideoLatencyAvg()2432 uint64_t WebrtcVideoConduit::MozVideoLatencyAvg() {
2433 mTransportMonitor.AssertCurrentThreadIn();
2434
2435 return mVideoLatencyAvg / sRoundingPadding;
2436 }
2437
OnRtpPacket(const webrtc::RtpPacketReceived & aPacket)2438 void WebrtcVideoConduit::OnRtpPacket(const webrtc::RtpPacketReceived& aPacket) {
2439 ASSERT_ON_THREAD(mStsThread);
2440 webrtc::RTPHeader header;
2441 aPacket.GetHeader(&header);
2442 if (header.extension.hasAudioLevel ||
2443 header.extension.csrcAudioLevels.numAudioLevels) {
2444 CSFLogDebug(LOGTAG,
2445 "Video packet has audio level extension."
2446 "RTP source tracking ignored for this packet.");
2447 return;
2448 }
2449 mRtpSourceObserver->OnRtpPacket(header, mRecvStreamStats.JitterMs());
2450 }
2451
OnRtcpBye()2452 void WebrtcVideoConduit::OnRtcpBye() {
2453 RefPtr<WebrtcVideoConduit> self = this;
2454 NS_DispatchToMainThread(media::NewRunnableFrom([self]() mutable {
2455 MOZ_ASSERT(NS_IsMainThread());
2456 if (self->mRtcpEventObserver) {
2457 self->mRtcpEventObserver->OnRtcpBye();
2458 }
2459 return NS_OK;
2460 }));
2461 }
2462
OnRtcpTimeout()2463 void WebrtcVideoConduit::OnRtcpTimeout() {
2464 RefPtr<WebrtcVideoConduit> self = this;
2465 NS_DispatchToMainThread(media::NewRunnableFrom([self]() mutable {
2466 MOZ_ASSERT(NS_IsMainThread());
2467 if (self->mRtcpEventObserver) {
2468 self->mRtcpEventObserver->OnRtcpTimeout();
2469 }
2470 return NS_OK;
2471 }));
2472 }
2473
SetRtcpEventObserver(mozilla::RtcpEventObserver * observer)2474 void WebrtcVideoConduit::SetRtcpEventObserver(
2475 mozilla::RtcpEventObserver* observer) {
2476 MOZ_ASSERT(NS_IsMainThread());
2477 mRtcpEventObserver = observer;
2478 }
2479
CodecPluginID()2480 uint64_t WebrtcVideoConduit::CodecPluginID() {
2481 MOZ_ASSERT(NS_IsMainThread());
2482
2483 if (mSendCodecPluginID) {
2484 return mSendCodecPluginID;
2485 }
2486 if (mRecvCodecPluginID) {
2487 return mRecvCodecPluginID;
2488 }
2489
2490 return 0;
2491 }
2492
RequiresNewSendStream(const VideoCodecConfig & newConfig) const2493 bool WebrtcVideoConduit::RequiresNewSendStream(
2494 const VideoCodecConfig& newConfig) const {
2495 MOZ_ASSERT(NS_IsMainThread());
2496
2497 return !mCurSendCodecConfig ||
2498 mCurSendCodecConfig->mName != newConfig.mName ||
2499 mCurSendCodecConfig->mType != newConfig.mType ||
2500 mCurSendCodecConfig->RtcpFbNackIsSet("") !=
2501 newConfig.RtcpFbNackIsSet("") ||
2502 mCurSendCodecConfig->RtcpFbFECIsSet() != newConfig.RtcpFbFECIsSet()
2503 #if 0
2504 // XXX Do we still want/need to do this?
2505 || (newConfig.mName == "H264" &&
2506 !CompatibleH264Config(mEncoderSpecificH264, newConfig))
2507 #endif
2508 ;
2509 }
2510
HasH264Hardware()2511 bool WebrtcVideoConduit::HasH264Hardware() {
2512 nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
2513 if (!gfxInfo) {
2514 return false;
2515 }
2516 int32_t status;
2517 nsCString discardFailureId;
2518 return NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
2519 nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_H264, discardFailureId,
2520 &status)) &&
2521 status == nsIGfxInfo::FEATURE_STATUS_OK;
2522 }
2523
2524 } // namespace mozilla
2525