1 /* This Source Code Form is subject to the terms of the Mozilla Public
2  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3  * You can obtain one at http://mozilla.org/MPL/2.0/. */
4 
5 #include "CSFLog.h"
6 #include "nspr.h"
7 #include "plstr.h"
8 
9 #include "VideoConduit.h"
10 #include "AudioConduit.h"
11 #include "nsThreadUtils.h"
12 #include "LoadManager.h"
13 #include "YuvStamper.h"
14 #include "nsServiceManagerUtils.h"
15 #include "nsIPrefService.h"
16 #include "nsIPrefBranch.h"
17 #include "mozilla/media/MediaUtils.h"
18 #include "mozilla/TemplateLib.h"
19 
20 #include "webrtc/common_types.h"
21 #include "webrtc/common_video/interface/native_handle.h"
22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
23 #include "webrtc/video_engine/include/vie_errors.h"
24 #include "webrtc/video_engine/vie_defines.h"
25 
26 #include "mozilla/Unused.h"
27 
28 #ifdef MOZ_WIDGET_ANDROID
29 #include "AndroidJNIWrapper.h"
30 #endif
31 
32 // for ntohs
33 #ifdef _MSC_VER
34 #include "Winsock2.h"
35 #else
36 #include <netinet/in.h>
37 #endif
38 
39 #include <algorithm>
40 #include <math.h>
41 
42 #define DEFAULT_VIDEO_MAX_FRAMERATE 30
43 #define INVALID_RTP_PAYLOAD 255  //valid payload types are 0 to 127
44 
45 namespace mozilla {
46 
47 static const char* logTag ="WebrtcVideoSessionConduit";
48 
49 // 32 bytes is what WebRTC CodecInst expects
50 const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
51 
52 /**
53  * Factory Method for VideoConduit
54  */
55 RefPtr<VideoSessionConduit>
Create()56 VideoSessionConduit::Create()
57 {
58   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
59   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
60 
61   WebrtcVideoConduit* obj = new WebrtcVideoConduit();
62   if(obj->Init() != kMediaConduitNoError)
63   {
64     CSFLogError(logTag,  "%s VideoConduit Init Failed ", __FUNCTION__);
65     delete obj;
66     return nullptr;
67   }
68   CSFLogDebug(logTag,  "%s Successfully created VideoConduit ", __FUNCTION__);
69   return obj;
70 }
71 
WebrtcVideoConduit()72 WebrtcVideoConduit::WebrtcVideoConduit():
73   mVideoEngine(nullptr),
74   mTransportMonitor("WebrtcVideoConduit"),
75   mTransmitterTransport(nullptr),
76   mReceiverTransport(nullptr),
77   mRenderer(nullptr),
78   mPtrExtCapture(nullptr),
79   mEngineTransmitting(false),
80   mEngineReceiving(false),
81   mChannel(-1),
82   mCapId(-1),
83   mCodecMutex("VideoConduit codec db"),
84   mInReconfig(false),
85   mLastWidth(0), // forces a check for reconfig at start
86   mLastHeight(0),
87   mSendingWidth(0),
88   mSendingHeight(0),
89   mReceivingWidth(0),
90   mReceivingHeight(0),
91   mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE),
92   mLastFramerateTenths(DEFAULT_VIDEO_MAX_FRAMERATE*10),
93   mNumReceivingStreams(1),
94   mVideoLatencyTestEnable(false),
95   mVideoLatencyAvg(0),
96   mMinBitrate(0),
97   mStartBitrate(0),
98   mMaxBitrate(0),
99   mMinBitrateEstimate(0),
100   mRtpStreamIdEnabled(false),
101   mRtpStreamIdExtId(0),
102   mCodecMode(webrtc::kRealtimeVideo)
103 {}
104 
~WebrtcVideoConduit()105 WebrtcVideoConduit::~WebrtcVideoConduit()
106 {
107   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
108   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
109 
110   // Release AudioConduit first by dropping reference on MainThread, where it expects to be
111   SyncTo(nullptr);
112   Destroy();
113 }
114 
SetLocalSSRC(unsigned int ssrc)115 bool WebrtcVideoConduit::SetLocalSSRC(unsigned int ssrc)
116 {
117   unsigned int oldSsrc;
118   if (!GetLocalSSRC(&oldSsrc)) {
119     MOZ_ASSERT(false, "GetLocalSSRC failed");
120     return false;
121   }
122 
123   if (oldSsrc == ssrc) {
124     return true;
125   }
126 
127   bool wasTransmitting = mEngineTransmitting;
128   if (StopTransmitting() != kMediaConduitNoError) {
129     return false;
130   }
131 
132   if (mPtrRTP->SetLocalSSRC(mChannel, ssrc)) {
133     return false;
134   }
135 
136   if (wasTransmitting) {
137     if (StartTransmitting() != kMediaConduitNoError) {
138       return false;
139     }
140   }
141   return true;
142 }
143 
GetLocalSSRC(unsigned int * ssrc)144 bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc)
145 {
146   return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc);
147 }
148 
GetRemoteSSRC(unsigned int * ssrc)149 bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
150 {
151   return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc);
152 }
153 
SetLocalCNAME(const char * cname)154 bool WebrtcVideoConduit::SetLocalCNAME(const char* cname)
155 {
156   char temp[256];
157   strncpy(temp, cname, sizeof(temp) - 1);
158   temp[sizeof(temp) - 1] = 0;
159   return !mPtrRTP->SetRTCPCName(mChannel, temp);
160 }
161 
GetVideoEncoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * droppedFrames)162 bool WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
163                                               double* framerateStdDev,
164                                               double* bitrateMean,
165                                               double* bitrateStdDev,
166                                               uint32_t* droppedFrames)
167 {
168   if (!mEngineTransmitting) {
169     return false;
170   }
171   MOZ_ASSERT(mVideoCodecStat);
172   mVideoCodecStat->GetEncoderStats(framerateMean, framerateStdDev,
173                                    bitrateMean, bitrateStdDev,
174                                    droppedFrames);
175 
176   // See if we need to adjust bandwidth.
177   // Avoid changing bandwidth constantly; use hysteresis.
178 
179   // Note: mLastFramerate is a relaxed Atomic because we're setting it here, and
180   // reading it on whatever thread calls DeliverFrame/SendVideoFrame.  Alternately
181   // we could use a lock.  Note that we don't change it often, and read it once per frame.
182   // We scale by *10 because mozilla::Atomic<> doesn't do 'double' or 'float'.
183   double framerate = mLastFramerateTenths/10.0; // fetch once
184   if (std::abs(*framerateMean - framerate)/framerate > 0.1 &&
185       *framerateMean >= 0.5) {
186     // unchanged resolution, but adjust bandwidth limits to match camera fps
187     CSFLogDebug(logTag, "Encoder frame rate changed from %f to %f",
188                 (mLastFramerateTenths/10.0), *framerateMean);
189     MutexAutoLock lock(mCodecMutex);
190     mLastFramerateTenths = *framerateMean * 10;
191     SelectSendResolution(mSendingWidth, mSendingHeight, nullptr);
192   }
193   return true;
194 }
195 
GetVideoDecoderStats(double * framerateMean,double * framerateStdDev,double * bitrateMean,double * bitrateStdDev,uint32_t * discardedPackets)196 bool WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
197                                               double* framerateStdDev,
198                                               double* bitrateMean,
199                                               double* bitrateStdDev,
200                                               uint32_t* discardedPackets)
201 {
202   if (!mEngineReceiving) {
203     return false;
204   }
205   MOZ_ASSERT(mVideoCodecStat);
206   mVideoCodecStat->GetDecoderStats(framerateMean, framerateStdDev,
207                                    bitrateMean, bitrateStdDev,
208                                    discardedPackets);
209   return true;
210 }
211 
GetAVStats(int32_t * jitterBufferDelayMs,int32_t * playoutBufferDelayMs,int32_t * avSyncOffsetMs)212 bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
213                                     int32_t* playoutBufferDelayMs,
214                                     int32_t* avSyncOffsetMs) {
215   return false;
216 }
217 
GetRTPStats(unsigned int * jitterMs,unsigned int * cumulativeLost)218 bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
219                                      unsigned int* cumulativeLost) {
220   unsigned short fractionLost;
221   unsigned extendedMax;
222   int64_t rttMs;
223   // GetReceivedRTCPStatistics is a poorly named GetRTPStatistics variant
224   return !mPtrRTP->GetReceivedRTCPStatistics(mChannel, fractionLost,
225                                              *cumulativeLost,
226                                              extendedMax,
227                                              *jitterMs,
228                                              rttMs);
229 }
230 
GetRTCPReceiverReport(DOMHighResTimeStamp * timestamp,uint32_t * jitterMs,uint32_t * packetsReceived,uint64_t * bytesReceived,uint32_t * cumulativeLost,int32_t * rttMs)231 bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
232                                                uint32_t* jitterMs,
233                                                uint32_t* packetsReceived,
234                                                uint64_t* bytesReceived,
235                                                uint32_t* cumulativeLost,
236                                                int32_t* rttMs) {
237   uint32_t ntpHigh, ntpLow;
238   uint16_t fractionLost;
239   bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow,
240                                                     *packetsReceived,
241                                                     *bytesReceived,
242                                                     jitterMs,
243                                                     &fractionLost,
244                                                     cumulativeLost,
245                                                     rttMs);
246   if (result) {
247     *timestamp = NTPtoDOMHighResTimeStamp(ntpHigh, ntpLow);
248   }
249   return result;
250 }
251 
GetRTCPSenderReport(DOMHighResTimeStamp * timestamp,unsigned int * packetsSent,uint64_t * bytesSent)252 bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
253                                              unsigned int* packetsSent,
254                                              uint64_t* bytesSent) {
255   struct webrtc::SenderInfo senderInfo;
256   bool result = !mPtrRTP->GetRemoteRTCPSenderInfo(mChannel, &senderInfo);
257   if (result) {
258     *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTP_timestamp_high,
259                                           senderInfo.NTP_timestamp_low);
260     *packetsSent = senderInfo.sender_packet_count;
261     *bytesSent = senderInfo.sender_octet_count;
262   }
263   return result;
264 }
265 
266 MediaConduitErrorCode
InitMain()267 WebrtcVideoConduit::InitMain()
268 {
269 #if defined(MOZILLA_INTERNAL_API)
270   // already know we must be on MainThread barring unit test weirdness
271   MOZ_ASSERT(NS_IsMainThread());
272 
273   nsresult rv;
274   nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
275   if (!NS_WARN_IF(NS_FAILED(rv)))
276   {
277     nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
278 
279     if (branch)
280     {
281       int32_t temp;
282       Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref("media.video.test_latency", &mVideoLatencyTestEnable)));
283       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref("media.peerconnection.video.min_bitrate", &temp))))
284       {
285          if (temp >= 0) {
286             mMinBitrate = temp;
287          }
288       }
289       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref("media.peerconnection.video.start_bitrate", &temp))))
290       {
291          if (temp >= 0) {
292          mStartBitrate = temp;
293          }
294       }
295       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref("media.peerconnection.video.max_bitrate", &temp))))
296       {
297         if (temp >= 0) {
298           mMaxBitrate = temp;
299         }
300       }
301       if (mMinBitrate != 0 && mMinBitrate < webrtc::kViEMinCodecBitrate) {
302         mMinBitrate = webrtc::kViEMinCodecBitrate;
303       }
304       if (mStartBitrate < mMinBitrate) {
305         mStartBitrate = mMinBitrate;
306       }
307       if (mStartBitrate > mMaxBitrate) {
308         mStartBitrate = mMaxBitrate;
309       }
310       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref("media.peerconnection.video.min_bitrate_estimate", &temp))))
311       {
312         if (temp >= 0) {
313           mMinBitrateEstimate = temp;
314         }
315       }
316       bool use_loadmanager = false;
317       if (!NS_WARN_IF(NS_FAILED(branch->GetBoolPref("media.navigator.load_adapt", &use_loadmanager))))
318       {
319         if (use_loadmanager) {
320           mLoadManager = LoadManagerBuild();
321         }
322       }
323     }
324   }
325 
326 #ifdef MOZ_WIDGET_ANDROID
327   // get the JVM
328   JavaVM *jvm = jsjni_GetVM();
329 
330   if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) {
331     CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
332     return kMediaConduitSessionNotInited;
333   }
334 #endif
335 #endif
336   return kMediaConduitNoError;
337 }
338 
339 /**
340  * Performs initialization of the MANDATORY components of the Video Engine
341  */
342 MediaConduitErrorCode
Init()343 WebrtcVideoConduit::Init()
344 {
345   CSFLogDebug(logTag,  "%s this=%p", __FUNCTION__, this);
346   MediaConduitErrorCode result;
347   // Run code that must run on MainThread first
348   MOZ_ASSERT(NS_IsMainThread());
349   result = InitMain();
350   if (result != kMediaConduitNoError) {
351     return result;
352   }
353 
354   // Per WebRTC APIs below function calls return nullptr on failure
355   mVideoEngine = webrtc::VideoEngine::Create();
356   if(!mVideoEngine)
357   {
358     CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
359     return kMediaConduitSessionNotInited;
360   }
361 
362   if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
363   {
364     CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
365     return kMediaConduitSessionNotInited;
366   }
367 
368   if( !(mPtrViECapture = ViECapture::GetInterface(mVideoEngine)))
369   {
370     CSFLogError(logTag, "%s Unable to get video capture interface", __FUNCTION__);
371     return kMediaConduitSessionNotInited;
372   }
373 
374   if( !(mPtrViECodec = ViECodec::GetInterface(mVideoEngine)))
375   {
376     CSFLogError(logTag, "%s Unable to get video codec interface ", __FUNCTION__);
377     return kMediaConduitSessionNotInited;
378   }
379 
380   if( !(mPtrViENetwork = ViENetwork::GetInterface(mVideoEngine)))
381   {
382     CSFLogError(logTag, "%s Unable to get video network interface ", __FUNCTION__);
383     return kMediaConduitSessionNotInited;
384   }
385 
386   if( !(mPtrViERender = ViERender::GetInterface(mVideoEngine)))
387   {
388     CSFLogError(logTag, "%s Unable to get video render interface ", __FUNCTION__);
389     return kMediaConduitSessionNotInited;
390   }
391 
392   mPtrExtCodec = webrtc::ViEExternalCodec::GetInterface(mVideoEngine);
393   if (!mPtrExtCodec) {
394     CSFLogError(logTag, "%s Unable to get external codec interface: %d ",
395                 __FUNCTION__,mPtrViEBase->LastError());
396     return kMediaConduitSessionNotInited;
397   }
398 
399   if( !(mPtrRTP = webrtc::ViERTP_RTCP::GetInterface(mVideoEngine)))
400   {
401     CSFLogError(logTag, "%s Unable to get video RTCP interface ", __FUNCTION__);
402     return kMediaConduitSessionNotInited;
403   }
404 
405   if ( !(mPtrExtCodec = webrtc::ViEExternalCodec::GetInterface(mVideoEngine)))
406   {
407     CSFLogError(logTag, "%s Unable to get external codec interface %d ",
408                 __FUNCTION__, mPtrViEBase->LastError());
409     return kMediaConduitSessionNotInited;
410   }
411 
412   CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
413 
414   if(mPtrViEBase->Init() == -1)
415   {
416     CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
417                 mPtrViEBase->LastError());
418     return kMediaConduitSessionNotInited;
419   }
420 
421   if(mPtrViEBase->CreateChannel(mChannel) == -1)
422   {
423     CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
424                 mPtrViEBase->LastError());
425     return kMediaConduitChannelError;
426   }
427 
428   if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
429   {
430     CSFLogError(logTag,  "%s ViENetwork Failed %d ", __FUNCTION__,
431                 mPtrViEBase->LastError());
432     return kMediaConduitTransportRegistrationFail;
433   }
434 
435   if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
436                                                    mPtrExtCapture) == -1)
437   {
438     CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
439                 __FUNCTION__, mPtrViEBase->LastError());
440     return kMediaConduitCaptureError;
441   }
442 
443   if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
444   {
445     CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
446                 __FUNCTION__,mPtrViEBase->LastError());
447     return kMediaConduitCaptureError;
448   }
449   // Set up some parameters, per juberti. Set MTU.
450   if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
451   {
452     CSFLogError(logTag,  "%s MTU Failed %d ", __FUNCTION__,
453                 mPtrViEBase->LastError());
454     return kMediaConduitMTUError;
455   }
456   // Turn on RTCP and loss feedback reporting.
457   if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
458   {
459     CSFLogError(logTag,  "%s RTCPStatus Failed %d ", __FUNCTION__,
460                 mPtrViEBase->LastError());
461     return kMediaConduitRTCPStatusError;
462   }
463 
464   if (mPtrViERender->AddRenderer(mChannel,
465                                 webrtc::kVideoI420,
466                                 (webrtc::ExternalRenderer*) this) == -1) {
467       CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
468       return kMediaConduitInvalidRenderer;
469   }
470 
471   if (mLoadManager) {
472     mPtrViEBase->RegisterCpuOveruseObserver(mChannel, mLoadManager);
473     mPtrViEBase->SetLoadManager(mLoadManager);
474   }
475 
476   CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
477   return kMediaConduitNoError;
478 }
479 
480 void
Destroy()481 WebrtcVideoConduit::Destroy()
482 {
483   // The first one of a pair to be deleted shuts down media for both
484   //Deal with External Capturer
485   if(mPtrViECapture)
486   {
487     mPtrViECapture->DisconnectCaptureDevice(mCapId);
488     mPtrViECapture->ReleaseCaptureDevice(mCapId);
489     mPtrExtCapture = nullptr;
490   }
491 
492    if (mPtrExtCodec) {
493      mPtrExtCodec->Release();
494      mPtrExtCodec = NULL;
495    }
496 
497   //Deal with External Renderer
498   if(mPtrViERender)
499   {
500     if(mRenderer) {
501       mPtrViERender->StopRender(mChannel);
502     }
503     mPtrViERender->RemoveRenderer(mChannel);
504   }
505 
506   //Deal with the transport
507   if(mPtrViENetwork)
508   {
509     mPtrViENetwork->DeregisterSendTransport(mChannel);
510   }
511 
512   if(mPtrViEBase)
513   {
514     mPtrViEBase->StopSend(mChannel);
515     mPtrViEBase->StopReceive(mChannel);
516     mPtrViEBase->DeleteChannel(mChannel);
517   }
518 
519   // mVideoCodecStat has a back-ptr to mPtrViECodec that must be released first
520   if (mVideoCodecStat) {
521     mVideoCodecStat->EndOfCallStats();
522   }
523   mVideoCodecStat = nullptr;
524   // We can't delete the VideoEngine until all these are released!
525   // And we can't use a Scoped ptr, since the order is arbitrary
526   mPtrViEBase = nullptr;
527   mPtrViECapture = nullptr;
528   mPtrViECodec = nullptr;
529   mPtrViENetwork = nullptr;
530   mPtrViERender = nullptr;
531   mPtrRTP = nullptr;
532   mPtrExtCodec = nullptr;
533 
534   // only one opener can call Delete.  Have it be the last to close.
535   if(mVideoEngine)
536   {
537     webrtc::VideoEngine::Delete(mVideoEngine);
538   }
539 }
540 
541 void
SyncTo(WebrtcAudioConduit * aConduit)542 WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
543 {
544   CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
545 
546   // SyncTo(value) syncs to the AudioConduit, and if already synced replaces
547   // the current sync target.  SyncTo(nullptr) cancels any existing sync and
548   // releases the strong ref to AudioConduit.
549   if (aConduit) {
550     mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
551     mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
552     // NOTE: this means the VideoConduit will keep the AudioConduit alive!
553   } else {
554     mPtrViEBase->DisconnectAudioChannel(mChannel);
555     mPtrViEBase->SetVoiceEngine(nullptr);
556   }
557 
558   mSyncedTo = aConduit;
559 }
560 
561 MediaConduitErrorCode
AttachRenderer(RefPtr<VideoRenderer> aVideoRenderer)562 WebrtcVideoConduit::AttachRenderer(RefPtr<VideoRenderer> aVideoRenderer)
563 {
564   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
565 
566   //null renderer
567   if(!aVideoRenderer)
568   {
569     CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
570     MOZ_ASSERT(false);
571     return kMediaConduitInvalidRenderer;
572   }
573 
574   // This function is called only from main, so we only need to protect against
575   // modifying mRenderer while any webrtc.org code is trying to use it.
576   bool wasRendering;
577   {
578     ReentrantMonitorAutoEnter enter(mTransportMonitor);
579     wasRendering = !!mRenderer;
580     mRenderer = aVideoRenderer;
581     // Make sure the renderer knows the resolution
582     mRenderer->FrameSizeChange(mReceivingWidth,
583                                mReceivingHeight,
584                                mNumReceivingStreams);
585   }
586 
587   if (!wasRendering) {
588     if(mPtrViERender->StartRender(mChannel) == -1)
589     {
590       CSFLogError(logTag, "%s Starting the Renderer Failed %d ", __FUNCTION__,
591                                                       mPtrViEBase->LastError());
592       ReentrantMonitorAutoEnter enter(mTransportMonitor);
593       mRenderer = nullptr;
594       return kMediaConduitRendererFail;
595     }
596   }
597 
598   return kMediaConduitNoError;
599 }
600 
601 void
DetachRenderer()602 WebrtcVideoConduit::DetachRenderer()
603 {
604   {
605     ReentrantMonitorAutoEnter enter(mTransportMonitor);
606     if(mRenderer)
607     {
608       mRenderer = nullptr;
609     }
610   }
611 
612   mPtrViERender->StopRender(mChannel);
613 }
614 
615 MediaConduitErrorCode
SetTransmitterTransport(RefPtr<TransportInterface> aTransport)616 WebrtcVideoConduit::SetTransmitterTransport(RefPtr<TransportInterface> aTransport)
617 {
618   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
619 
620   ReentrantMonitorAutoEnter enter(mTransportMonitor);
621   // set the transport
622   mTransmitterTransport = aTransport;
623   return kMediaConduitNoError;
624 }
625 
626 MediaConduitErrorCode
SetReceiverTransport(RefPtr<TransportInterface> aTransport)627 WebrtcVideoConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
628 {
629   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
630 
631   ReentrantMonitorAutoEnter enter(mTransportMonitor);
632   // set the transport
633   mReceiverTransport = aTransport;
634   return kMediaConduitNoError;
635 }
636 MediaConduitErrorCode
ConfigureCodecMode(webrtc::VideoCodecMode mode)637 WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode)
638 {
639   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
640   mCodecMode = mode;
641   return kMediaConduitNoError;
642 }
643 /**
644  * Note: Setting the send-codec on the Video Engine will restart the encoder,
645  * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
646  *
647  * Note: this is called from MainThread, and the codec settings are read on
648  * videoframe delivery threads (i.e in SendVideoFrame().  With
649  * renegotiation/reconfiguration, this now needs a lock!  Alternatively
650  * changes could be queued until the next frame is delivered using an
651  * Atomic pointer and swaps.
652  */
653 MediaConduitErrorCode
ConfigureSendMediaCodec(const VideoCodecConfig * codecConfig)654 WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
655 {
656   CSFLogDebug(logTag,  "%s for %s", __FUNCTION__, codecConfig ? codecConfig->mName.c_str() : "<null>");
657   bool codecFound = false;
658   MediaConduitErrorCode condError = kMediaConduitNoError;
659   int error = 0; //webrtc engine errors
660   webrtc::VideoCodec  video_codec;
661   std::string payloadName;
662 
663   memset(&video_codec, 0, sizeof(video_codec));
664 
665   {
666     //validate basic params
667     if((condError = ValidateCodecConfig(codecConfig,true)) != kMediaConduitNoError)
668     {
669       return condError;
670     }
671   }
672 
673   condError = StopTransmitting();
674   if (condError != kMediaConduitNoError) {
675     return condError;
676   }
677 
678   if (mRtpStreamIdEnabled) {
679     video_codec.ridId = mRtpStreamIdExtId;
680   }
681   if (mExternalSendCodec &&
682       codecConfig->mType == mExternalSendCodec->mType) {
683     CSFLogError(logTag, "%s Configuring External H264 Send Codec", __FUNCTION__);
684 
685     // width/height will be overridden on the first frame
686     video_codec.width = 320;
687     video_codec.height = 240;
688 #ifdef MOZ_WEBRTC_OMX
689     if (codecConfig->mType == webrtc::kVideoCodecH264) {
690       video_codec.resolution_divisor = 16;
691     } else {
692       video_codec.resolution_divisor = 1; // We could try using it to handle odd resolutions
693     }
694 #else
695     video_codec.resolution_divisor = 1; // We could try using it to handle odd resolutions
696 #endif
697     video_codec.qpMax = 56;
698     video_codec.numberOfSimulcastStreams = 1;
699     video_codec.simulcastStream[0].jsScaleDownBy =
700         codecConfig->mEncodingConstraints.scaleDownBy;
701     video_codec.mode = mCodecMode;
702 
703     codecFound = true;
704   } else {
705     // we should be good here to set the new codec.
706     for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
707     {
708       if(0 == mPtrViECodec->GetCodec(idx, video_codec))
709       {
710         payloadName = video_codec.plName;
711         if(codecConfig->mName.compare(payloadName) == 0)
712         {
713           // Note: side-effect of this is that video_codec is filled in
714           // by GetCodec()
715           codecFound = true;
716           break;
717         }
718       }
719     }//for
720   }
721 
722   if(codecFound == false)
723   {
724     CSFLogError(logTag, "%s Codec Mismatch ", __FUNCTION__);
725     return kMediaConduitInvalidSendCodec;
726   }
727   // Note: only for overriding parameters from GetCodec()!
728   CodecConfigToWebRTCCodec(codecConfig, video_codec);
729   if (mSendingWidth != 0) {
730     // We're already in a call and are reconfiguring (perhaps due to
731     // ReplaceTrack).  Set to match the last frame we sent.
732 
733     // We could also set mLastWidth to 0, to force immediate reconfig -
734     // more expensive, but perhaps less risk of missing something.  Really
735     // on ReplaceTrack we should just call ConfigureCodecMode(), and if the
736     // mode changed, we re-configure.
737     // Do this after CodecConfigToWebRTCCodec() to avoid messing up simulcast
738     video_codec.width = mSendingWidth;
739     video_codec.height = mSendingHeight;
740     video_codec.maxFramerate = mSendingFramerate;
741   } else {
742     mSendingWidth = 0;
743     mSendingHeight = 0;
744     mSendingFramerate = video_codec.maxFramerate;
745   }
746 
747   video_codec.mode = mCodecMode;
748 
749   if(mPtrViECodec->SetSendCodec(mChannel, video_codec) == -1)
750   {
751     error = mPtrViEBase->LastError();
752     if(error == kViECodecInvalidCodec)
753     {
754       CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__);
755       return kMediaConduitInvalidSendCodec;
756     }
757     CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__,
758                 mPtrViEBase->LastError());
759     return kMediaConduitUnknownError;
760   }
761 
762   if (mMinBitrateEstimate != 0) {
763     mPtrViENetwork->SetBitrateConfig(mChannel,
764                                      mMinBitrateEstimate,
765                                      std::max(video_codec.startBitrate,
766                                               mMinBitrateEstimate),
767                                      std::max(video_codec.maxBitrate,
768                                               mMinBitrateEstimate));
769   }
770 
771   if (!mVideoCodecStat) {
772     mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec);
773   }
774   mVideoCodecStat->Register(true);
775 
776   // See Bug 1297058, enabling FEC when NACK is set on H.264 is problematic
777   bool use_fec = codecConfig->RtcpFbFECIsSet();
778   if ((mExternalSendCodec && codecConfig->mType == mExternalSendCodec->mType)
779       || codecConfig->mType == webrtc::kVideoCodecH264) {
780     if(codecConfig->RtcpFbNackIsSet("")) {
781       use_fec = false;
782     }
783   }
784 
785   if (use_fec)
786   {
787     uint8_t payload_type_red = INVALID_RTP_PAYLOAD;
788     uint8_t payload_type_ulpfec = INVALID_RTP_PAYLOAD;
789     if (!DetermineREDAndULPFECPayloadTypes(payload_type_red, payload_type_ulpfec)) {
790       CSFLogError(logTag, "%s Unable to set FEC status: could not determine"
791                   "payload type: red %u ulpfec %u",
792                   __FUNCTION__, payload_type_red, payload_type_ulpfec);
793         return kMediaConduitFECStatusError;
794     }
795 
796     if(codecConfig->RtcpFbNackIsSet("")) {
797       CSFLogDebug(logTag, "Enabling NACK/FEC (send) for video stream\n");
798       if (mPtrRTP->SetHybridNACKFECStatus(mChannel, true,
799                                           payload_type_red,
800                                           payload_type_ulpfec) != 0) {
801         CSFLogError(logTag,  "%s SetHybridNACKFECStatus Failed %d ",
802                     __FUNCTION__, mPtrViEBase->LastError());
803         return kMediaConduitHybridNACKFECStatusError;
804       }
805     } else {
806       CSFLogDebug(logTag, "Enabling FEC (send) for video stream\n");
807       if (mPtrRTP->SetFECStatus(mChannel, true,
808                                 payload_type_red, payload_type_ulpfec) != 0)
809       {
810         CSFLogError(logTag,  "%s SetFECStatus Failed %d ", __FUNCTION__,
811                     mPtrViEBase->LastError());
812         return kMediaConduitFECStatusError;
813       }
814     }
815   } else if(codecConfig->RtcpFbNackIsSet("")) {
816     CSFLogDebug(logTag, "Enabling NACK (send) for video stream\n");
817     if (mPtrRTP->SetNACKStatus(mChannel, true) != 0)
818     {
819       CSFLogError(logTag,  "%s NACKStatus Failed %d ", __FUNCTION__,
820                   mPtrViEBase->LastError());
821       return kMediaConduitNACKStatusError;
822     }
823   }
824 
825   {
826     MutexAutoLock lock(mCodecMutex);
827 
828     //Copy the applied config for future reference.
829     mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
830   }
831 
832   bool remb_requested = codecConfig->RtcpFbRembIsSet();
833   mPtrRTP->SetRembStatus(mChannel, true, remb_requested);
834 
835   return kMediaConduitNoError;
836 }
837 
838 MediaConduitErrorCode
ConfigureRecvMediaCodecs(const std::vector<VideoCodecConfig * > & codecConfigList)839 WebrtcVideoConduit::ConfigureRecvMediaCodecs(
840     const std::vector<VideoCodecConfig* >& codecConfigList)
841 {
842   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
843   MediaConduitErrorCode condError = kMediaConduitNoError;
844   bool success = false;
845   std::string  payloadName;
846 
847   condError = StopReceiving();
848   if (condError != kMediaConduitNoError) {
849     return condError;
850   }
851 
852   if(codecConfigList.empty())
853   {
854     CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
855     return kMediaConduitMalformedArgument;
856   }
857 
858   webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone;
859   bool use_nack_basic = false;
860   bool use_tmmbr = false;
861   bool use_remb = false;
862   bool use_fec = false;
863 
864   //Try Applying the codecs in the list
865   // we treat as success if atleast one codec was applied and reception was
866   // started successfully.
867   for(std::vector<VideoCodecConfig*>::size_type i=0;i < codecConfigList.size();i++)
868   {
869     //if the codec param is invalid or diplicate, return error
870     if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError)
871     {
872       return condError;
873     }
874 
875     // Check for the keyframe request type: PLI is preferred
876     // over FIR, and FIR is preferred over none.
877     if (codecConfigList[i]->RtcpFbNackIsSet("pli"))
878     {
879       kf_request = webrtc::kViEKeyFrameRequestPliRtcp;
880     } else if(kf_request == webrtc::kViEKeyFrameRequestNone &&
881               codecConfigList[i]->RtcpFbCcmIsSet("fir"))
882     {
883       kf_request = webrtc::kViEKeyFrameRequestFirRtcp;
884     }
885 
886     // Check whether NACK is requested
887     if(codecConfigList[i]->RtcpFbNackIsSet(""))
888     {
889       use_nack_basic = true;
890     }
891 
892     // Check whether TMMBR is requested
893     if (codecConfigList[i]->RtcpFbCcmIsSet("tmmbr")) {
894       use_tmmbr = true;
895     }
896 
897     // Check whether REMB is requested
898     if (codecConfigList[i]->RtcpFbRembIsSet()) {
899       use_remb = true;
900     }
901 
902     // Check whether FEC is requested
903     if (codecConfigList[i]->RtcpFbFECIsSet()) {
904       use_fec = true;
905     }
906 
907     webrtc::VideoCodec  video_codec;
908 
909     memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
910 
911     if (mExternalRecvCodec &&
912         codecConfigList[i]->mType == mExternalRecvCodec->mType) {
913       CSFLogError(logTag, "%s Configuring External H264 Receive Codec", __FUNCTION__);
914 
915       // XXX Do we need a separate setting for receive maxbitrate?  Is it
916       // different for hardware codecs?  For now assume symmetry.
917       CodecConfigToWebRTCCodec(codecConfigList[i], video_codec);
918 
919       // values SetReceiveCodec() cares about are name, type, maxbitrate
920       if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1)
921       {
922         CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__,
923                     mPtrViEBase->LastError());
924       } else {
925         CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__,
926                     codecConfigList[i]->mName.c_str());
927         success = true;
928       }
929     } else {
930       //Retrieve pre-populated codec structure for our codec.
931       for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
932       {
933         if(mPtrViECodec->GetCodec(idx, video_codec) == 0)
934         {
935           payloadName = video_codec.plName;
936           if(codecConfigList[i]->mName.compare(payloadName) == 0)
937           {
938             CodecConfigToWebRTCCodec(codecConfigList[i], video_codec);
939             if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1)
940             {
941               CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__,
942                           mPtrViEBase->LastError());
943             } else {
944               CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__,
945                           codecConfigList[i]->mName.c_str());
946               success = true;
947             }
948             break; //we found a match
949           }
950         }
951       }//end for codeclist
952     }
953   }//end for
954 
955   if(!success)
956   {
957     CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__);
958     return kMediaConduitInvalidReceiveCodec;
959   }
960 
961   if (!mVideoCodecStat) {
962     mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec);
963   }
964   mVideoCodecStat->Register(false);
965 
966   // XXX Currently, we gather up all of the feedback types that the remote
967   // party indicated it supports for all video codecs and configure the entire
968   // conduit based on those capabilities. This is technically out of spec,
969   // as these values should be configured on a per-codec basis. However,
970   // the video engine only provides this API on a per-conduit basis, so that's
971   // how we have to do it. The approach of considering the remote capablities
972   // for the entire conduit to be a union of all remote codec capabilities
973   // (rather than the more conservative approach of using an intersection)
974   // is made to provide as many feedback mechanisms as are likely to be
975   // processed by the remote party (and should be relatively safe, since the
976   // remote party is required to ignore feedback types that it does not
977   // understand).
978   //
979   // Note that our configuration uses this union of remote capabilites as
980   // input to the configuration. It is not isomorphic to the configuration.
981   // For example, it only makes sense to have one frame request mechanism
982   // active at a time; so, if the remote party indicates more than one
983   // supported mechanism, we're only configuring the one we most prefer.
984   //
985   // See http://code.google.com/p/webrtc/issues/detail?id=2331
986 
987   if (kf_request != webrtc::kViEKeyFrameRequestNone)
988   {
989     CSFLogDebug(logTag, "Enabling %s frame requests for video stream\n",
990                 (kf_request == webrtc::kViEKeyFrameRequestPliRtcp ?
991                  "PLI" : "FIR"));
992     if(mPtrRTP->SetKeyFrameRequestMethod(mChannel, kf_request) != 0)
993     {
994       CSFLogError(logTag,  "%s KeyFrameRequest Failed %d ", __FUNCTION__,
995                   mPtrViEBase->LastError());
996       return kMediaConduitKeyFrameRequestError;
997     }
998   }
999 
1000   switch (kf_request) {
1001     case webrtc::kViEKeyFrameRequestNone:
1002       mFrameRequestMethod = FrameRequestNone;
1003       break;
1004     case webrtc::kViEKeyFrameRequestPliRtcp:
1005       mFrameRequestMethod = FrameRequestPli;
1006       break;
1007     case webrtc::kViEKeyFrameRequestFirRtcp:
1008       mFrameRequestMethod = FrameRequestFir;
1009       break;
1010     default:
1011       MOZ_ASSERT(false);
1012       mFrameRequestMethod = FrameRequestUnknown;
1013   }
1014 
1015   if (use_fec)
1016   {
1017     uint8_t payload_type_red = INVALID_RTP_PAYLOAD;
1018     uint8_t payload_type_ulpfec = INVALID_RTP_PAYLOAD;
1019     if (!DetermineREDAndULPFECPayloadTypes(payload_type_red, payload_type_ulpfec)) {
1020       CSFLogError(logTag, "%s Unable to set FEC status: could not determine"
1021                   "payload type: red %u ulpfec %u",
1022                   __FUNCTION__, payload_type_red, payload_type_ulpfec);
1023         return kMediaConduitFECStatusError;
1024     }
1025 
1026     // We also need to call SetReceiveCodec for RED and ULPFEC codecs
1027     for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) {
1028       webrtc::VideoCodec video_codec;
1029       if(mPtrViECodec->GetCodec(idx, video_codec) == 0) {
1030         payloadName = video_codec.plName;
1031         if(video_codec.codecType == webrtc::VideoCodecType::kVideoCodecRED ||
1032            video_codec.codecType == webrtc::VideoCodecType::kVideoCodecULPFEC) {
1033           if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1) {
1034             CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__,
1035                         mPtrViEBase->LastError());
1036           } else {
1037             CSFLogDebug(logTag, "%s Successfully Set the codec %s", __FUNCTION__,
1038                         video_codec.plName);
1039           }
1040         }
1041       }
1042     }
1043 
1044     if (use_nack_basic) {
1045       CSFLogDebug(logTag, "Enabling NACK/FEC (recv) for video stream\n");
1046       if (mPtrRTP->SetHybridNACKFECStatus(mChannel, true,
1047                                           payload_type_red,
1048                                           payload_type_ulpfec) != 0) {
1049         CSFLogError(logTag,  "%s SetHybridNACKFECStatus Failed %d ",
1050                     __FUNCTION__, mPtrViEBase->LastError());
1051         return kMediaConduitNACKStatusError;
1052       }
1053     } else {
1054       CSFLogDebug(logTag, "Enabling FEC (recv) for video stream\n");
1055       if (mPtrRTP->SetFECStatus(mChannel, true,
1056                                 payload_type_red, payload_type_ulpfec) != 0)
1057       {
1058         CSFLogError(logTag,  "%s SetFECStatus Failed %d ", __FUNCTION__,
1059                     mPtrViEBase->LastError());
1060         return kMediaConduitNACKStatusError;
1061       }
1062     }
1063   } else if(use_nack_basic) {
1064     CSFLogDebug(logTag, "Enabling NACK (recv) for video stream\n");
1065     if (mPtrRTP->SetNACKStatus(mChannel, true) != 0)
1066     {
1067       CSFLogError(logTag,  "%s NACKStatus Failed %d ", __FUNCTION__,
1068                   mPtrViEBase->LastError());
1069       return kMediaConduitNACKStatusError;
1070     }
1071   }
1072   mUsingNackBasic = use_nack_basic;
1073   mUsingFEC = use_fec;
1074 
1075   if (use_tmmbr) {
1076     CSFLogDebug(logTag, "Enabling TMMBR for video stream");
1077     if (mPtrRTP->SetTMMBRStatus(mChannel, true) != 0) {
1078       CSFLogError(logTag, "%s SetTMMBRStatus Failed %d ", __FUNCTION__,
1079         mPtrViEBase->LastError());
1080       return kMediaConduitTMMBRStatusError;
1081     }
1082   }
1083   mUsingTmmbr = use_tmmbr;
1084 
1085   condError = StartReceiving();
1086   if (condError != kMediaConduitNoError) {
1087     return condError;
1088   }
1089 
1090   // by now we should be successfully started the reception
1091   CSFLogDebug(logTag, "REMB enabled for video stream %s",
1092               (use_remb ? "yes" : "no"));
1093   mPtrRTP->SetRembStatus(mChannel, use_remb, true);
1094   return kMediaConduitNoError;
1095 }
1096 
1097 template<typename T>
MinIgnoreZero(const T & a,const T & b)1098 T MinIgnoreZero(const T& a, const T& b)
1099 {
1100   return std::min(a? a:b, b? b:a);
1101 }
1102 
1103 struct ResolutionAndBitrateLimits {
1104   uint32_t resolution_in_mb;
1105   uint16_t min_bitrate;
1106   uint16_t start_bitrate;
1107   uint16_t max_bitrate;
1108 };
1109 
1110 #define MB_OF(w,h) ((unsigned int)((((w+15)>>4))*((unsigned int)((h+15)>>4))))
1111 
1112 // For now, try to set the max rates well above the knee in the curve.
1113 // Chosen somewhat arbitrarily; it's hard to find good data oriented for
1114 // realtime interactive/talking-head recording.  These rates assume
1115 // 30fps.
1116 
1117 // XXX Populate this based on a pref (which we should consider sorting because
1118 // people won't assume they need to).
1119 static ResolutionAndBitrateLimits kResolutionAndBitrateLimits[] = {
1120   {MB_OF(1920, 1200), 1500, 2000, 10000}, // >HD (3K, 4K, etc)
1121   {MB_OF(1280, 720), 1200, 1500, 5000}, // HD ~1080-1200
1122   {MB_OF(800, 480), 600, 800, 2500}, // HD ~720
1123   {tl::Max<MB_OF(400, 240), MB_OF(352, 288)>::value, 200, 300, 1300}, // VGA, WVGA
1124   {MB_OF(176, 144), 100, 150, 500}, // WQVGA, CIF
1125   {0 , 40, 80, 250} // QCIF and below
1126 };
1127 
1128 void
SelectBitrates(unsigned short width,unsigned short height,unsigned int cap,mozilla::Atomic<int32_t,mozilla::Relaxed> & aLastFramerateTenths,unsigned int & out_min,unsigned int & out_start,unsigned int & out_max)1129 WebrtcVideoConduit::SelectBitrates(unsigned short width,
1130                                    unsigned short height,
1131                                    unsigned int cap,
1132                                    mozilla::Atomic<int32_t, mozilla::Relaxed>& aLastFramerateTenths,
1133                                    unsigned int& out_min,
1134                                    unsigned int& out_start,
1135                                    unsigned int& out_max)
1136 {
1137   // max bandwidth should be proportional (not linearly!) to resolution, and
1138   // proportional (perhaps linearly, or close) to current frame rate.
1139   unsigned int fs = MB_OF(width, height);
1140 
1141   for (ResolutionAndBitrateLimits resAndLimits : kResolutionAndBitrateLimits) {
1142     if (fs > resAndLimits.resolution_in_mb &&
1143         // pick the highest range where at least start rate is within cap
1144         // (or if we're at the end of the array).
1145         (!cap || resAndLimits.start_bitrate <= cap ||
1146          resAndLimits.resolution_in_mb == 0)) {
1147       out_min = MinIgnoreZero((unsigned int)resAndLimits.min_bitrate, cap);
1148       out_start = MinIgnoreZero((unsigned int)resAndLimits.start_bitrate, cap);
1149       out_max = MinIgnoreZero((unsigned int)resAndLimits.max_bitrate, cap);
1150       break;
1151     }
1152   }
1153 
1154   // mLastFramerateTenths is an atomic, and scaled by *10
1155   double framerate = std::min((aLastFramerateTenths/10.),60.0);
1156   MOZ_ASSERT(framerate > 0);
1157   // Now linear reduction/increase based on fps (max 60fps i.e. doubling)
1158   if (framerate >= 10) {
1159     out_min = out_min * (framerate/30);
1160     out_start = out_start * (framerate/30);
1161     out_max = std::max((unsigned int)(out_max * (framerate/30)), cap);
1162   } else {
1163     // At low framerates, don't reduce bandwidth as much - cut slope to 1/2.
1164     // Mostly this would be ultra-low-light situations/mobile or screensharing.
1165     out_min = out_min * ((10-(framerate/2))/30);
1166     out_start = out_start * ((10-(framerate/2))/30);
1167     out_max = std::max((unsigned int)(out_max * ((10-(framerate/2))/30)), cap);
1168   }
1169 
1170   if (mMinBitrate && mMinBitrate > out_min) {
1171     out_min = mMinBitrate;
1172   }
1173   // If we try to set a minimum bitrate that is too low, ViE will reject it.
1174   out_min = std::max((unsigned int) webrtc::kViEMinCodecBitrate,
1175                                   out_min);
1176   if (mStartBitrate && mStartBitrate > out_start) {
1177     out_start = mStartBitrate;
1178   }
1179   out_start = std::max(out_start, out_min);
1180 
1181   // Note: mMaxBitrate is the max transport bitrate - it applies to a
1182   // single codec encoding, but should also apply to the sum of all
1183   // simulcast layers in this encoding!
1184   // So sum(layers.maxBitrate) <= mMaxBitrate
1185   if (mMaxBitrate && mMaxBitrate > out_max) {
1186     out_max = mMaxBitrate;
1187   }
1188 }
1189 
ConstrainPreservingAspectRatioExact(uint32_t max_fs,unsigned short * width,unsigned short * height)1190 static void ConstrainPreservingAspectRatioExact(uint32_t max_fs,
1191                                                 unsigned short* width,
1192                                                 unsigned short* height)
1193 {
1194   // We could try to pick a better starting divisor, but it won't make any real
1195   // performance difference.
1196   for (size_t d = 1; d < std::min(*width, *height); ++d) {
1197     if ((*width % d) || (*height % d)) {
1198       continue; // Not divisible
1199     }
1200 
1201     if (((*width) * (*height))/(d*d) <= max_fs) {
1202       *width /= d;
1203       *height /= d;
1204       return;
1205     }
1206   }
1207 
1208   *width = 0;
1209   *height = 0;
1210 }
1211 
ConstrainPreservingAspectRatio(uint16_t max_width,uint16_t max_height,unsigned short * width,unsigned short * height)1212 static void ConstrainPreservingAspectRatio(uint16_t max_width,
1213                                            uint16_t max_height,
1214                                            unsigned short* width,
1215                                            unsigned short* height)
1216 {
1217   if (((*width) <= max_width) && ((*height) <= max_height)) {
1218     return;
1219   }
1220 
1221   if ((*width) * max_height > max_width * (*height))
1222   {
1223     (*height) = max_width * (*height) / (*width);
1224     (*width) = max_width;
1225   }
1226   else
1227   {
1228     (*width) = max_height * (*width) / (*height);
1229     (*height) = max_height;
1230   }
1231 }
1232 
1233 // XXX we need to figure out how to feed back changes in preferred capture
1234 // resolution to the getUserMedia source.
1235 // Returns boolean if we've submitted an async change (and took ownership
1236 // of *frame's data)
1237 bool
SelectSendResolution(unsigned short width,unsigned short height,webrtc::I420VideoFrame * frame)1238 WebrtcVideoConduit::SelectSendResolution(unsigned short width,
1239                                          unsigned short height,
1240                                          webrtc::I420VideoFrame *frame) // may be null
1241 {
1242   mCodecMutex.AssertCurrentThreadOwns();
1243   // XXX This will do bandwidth-resolution adaptation as well - bug 877954
1244 
1245   mLastWidth = width;
1246   mLastHeight = height;
1247   // Enforce constraints
1248   if (mCurSendCodecConfig) {
1249     uint16_t max_width = mCurSendCodecConfig->mEncodingConstraints.maxWidth;
1250     uint16_t max_height = mCurSendCodecConfig->mEncodingConstraints.maxHeight;
1251     if (max_width || max_height) {
1252       max_width = max_width ? max_width : UINT16_MAX;
1253       max_height = max_height ? max_height : UINT16_MAX;
1254       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
1255     }
1256 
1257     // Limit resolution to max-fs while keeping same aspect ratio as the
1258     // incoming image.
1259     if (mCurSendCodecConfig->mEncodingConstraints.maxFs)
1260     {
1261       uint32_t max_fs = mCurSendCodecConfig->mEncodingConstraints.maxFs;
1262       unsigned int cur_fs, mb_width, mb_height, mb_max;
1263 
1264       // Could we make this simpler by picking the larger of width and height,
1265       // calculating a max for just that value based on the scale parameter,
1266       // and then let ConstrainPreservingAspectRatio do the rest?
1267       mb_width = (width + 15) >> 4;
1268       mb_height = (height + 15) >> 4;
1269 
1270       cur_fs = mb_width * mb_height;
1271 
1272       // Limit resolution to max_fs, but don't scale up.
1273       if (cur_fs > max_fs)
1274       {
1275         double scale_ratio;
1276 
1277         scale_ratio = sqrt((double) max_fs / (double) cur_fs);
1278 
1279         mb_width = mb_width * scale_ratio;
1280         mb_height = mb_height * scale_ratio;
1281 
1282         // Adjust mb_width and mb_height if they were truncated to zero.
1283         if (mb_width == 0) {
1284           mb_width = 1;
1285           mb_height = std::min(mb_height, max_fs);
1286         }
1287         if (mb_height == 0) {
1288           mb_height = 1;
1289           mb_width = std::min(mb_width, max_fs);
1290         }
1291       }
1292 
1293       // Limit width/height seperately to limit effect of extreme aspect ratios.
1294       mb_max = (unsigned) sqrt(8 * (double) max_fs);
1295 
1296       max_width = 16 * std::min(mb_width, mb_max);
1297       max_height = 16 * std::min(mb_height, mb_max);
1298       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
1299     }
1300   }
1301 
1302 
1303   // Adapt to getUserMedia resolution changes
1304   // check if we need to reconfigure the sending resolution.
1305   bool changed = false;
1306   if (mSendingWidth != width || mSendingHeight != height)
1307   {
1308     CSFLogDebug(logTag, "%s: resolution changing to %ux%u (from %ux%u)",
1309                 __FUNCTION__, width, height, mSendingWidth, mSendingHeight);
1310     // This will avoid us continually retrying this operation if it fails.
1311     // If the resolution changes, we'll try again.  In the meantime, we'll
1312     // keep using the old size in the encoder.
1313     mSendingWidth = width;
1314     mSendingHeight = height;
1315     changed = true;
1316   }
1317 
1318   // uses mSendingWidth/Height
1319   unsigned int framerate = SelectSendFrameRate(mSendingFramerate);
1320   if (mSendingFramerate != framerate) {
1321     CSFLogDebug(logTag, "%s: framerate changing to %u (from %u)",
1322                 __FUNCTION__, framerate, mSendingFramerate);
1323     mSendingFramerate = framerate;
1324     changed = true;
1325   }
1326 
1327   if (changed) {
1328     // On a resolution change, bounce this to the correct thread to
1329     // re-configure (same as used for Init().  Do *not* block the calling
1330     // thread since that may be the MSG thread.
1331 
1332     // MUST run on the same thread as Init()/etc
1333     if (!NS_IsMainThread()) {
1334       // Note: on *initial* config (first frame), best would be to drop
1335       // frames until the config is done, then encode the most recent frame
1336       // provided and continue from there.  We don't do this, but we do drop
1337       // all frames while in the process of a reconfig and then encode the
1338       // frame that started the reconfig, which is close.  There may be
1339       // barely perceptible glitch in the video due to the dropped frame(s).
1340       mInReconfig = true;
1341 
1342       // We can't pass a UniquePtr<> or unique_ptr<> to a lambda directly
1343       webrtc::I420VideoFrame *new_frame = nullptr;
1344       if (frame) {
1345         new_frame = new webrtc::I420VideoFrame();
1346         // the internal buffer pointer is refcounted, so we don't have 2 copies here
1347         new_frame->ShallowCopy(*frame);
1348       }
1349       RefPtr<WebrtcVideoConduit> self(this);
1350       RefPtr<Runnable> webrtc_runnable =
1351         media::NewRunnableFrom([self, width, height, new_frame]() -> nsresult {
1352             UniquePtr<webrtc::I420VideoFrame> local_frame(new_frame); // Simplify cleanup
1353 
1354             MutexAutoLock lock(self->mCodecMutex);
1355             return self->ReconfigureSendCodec(width, height, new_frame);
1356           });
1357       // new_frame now owned by lambda
1358       CSFLogDebug(logTag, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
1359                   __FUNCTION__, width, mLastWidth, height, mLastHeight);
1360       NS_DispatchToMainThread(webrtc_runnable.forget());
1361       if (new_frame) {
1362         return true; // queued it
1363       }
1364     } else {
1365       // already on the right thread
1366       ReconfigureSendCodec(width, height, frame);
1367     }
1368   }
1369   return false;
1370 }
1371 
1372 nsresult
ReconfigureSendCodec(unsigned short width,unsigned short height,webrtc::I420VideoFrame * frame)1373 WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
1374                                          unsigned short height,
1375                                          webrtc::I420VideoFrame *frame)
1376 {
1377   mCodecMutex.AssertCurrentThreadOwns();
1378 
1379   // Get current vie codec.
1380   webrtc::VideoCodec vie_codec;
1381   int32_t err;
1382 
1383   mInReconfig = false;
1384   if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0)
1385   {
1386     CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err);
1387     return NS_ERROR_FAILURE;
1388   }
1389 
1390   CSFLogDebug(logTag,
1391               "%s: Requesting resolution change to %ux%u (from %ux%u)",
1392               __FUNCTION__, width, height, vie_codec.width, vie_codec.height);
1393 
1394   if (mRtpStreamIdEnabled) {
1395     vie_codec.ridId = mRtpStreamIdExtId;
1396   }
1397 
1398   vie_codec.width = width;
1399   vie_codec.height = height;
1400   vie_codec.maxFramerate = mSendingFramerate;
1401   SelectBitrates(vie_codec.width, vie_codec.height, 0,
1402                  mLastFramerateTenths,
1403                  vie_codec.minBitrate,
1404                  vie_codec.startBitrate,
1405                  vie_codec.maxBitrate);
1406 
1407   // These are based on lowest-fidelity, because if there is insufficient
1408   // bandwidth for all streams, only the lowest fidelity one will be sent.
1409   uint32_t minMinBitrate = 0;
1410   uint32_t minStartBitrate = 0;
1411   // Total for all simulcast streams.
1412   uint32_t totalMaxBitrate = 0;
1413 
1414   for (size_t i = vie_codec.numberOfSimulcastStreams; i > 0; --i) {
1415     webrtc::SimulcastStream& stream(vie_codec.simulcastStream[i - 1]);
1416     stream.width = width;
1417     stream.height = height;
1418     MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
1419     uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
1420     uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
1421     // TODO: If two layers are similar, only alloc bits to one (Bug 1249859)
1422     if (new_width != width || new_height != height) {
1423       if (vie_codec.numberOfSimulcastStreams == 1) {
1424         // Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
1425         ConstrainPreservingAspectRatio(new_width, new_height,
1426                                        &stream.width, &stream.height);
1427       } else {
1428         // webrtc.org supposedly won't tolerate simulcast unless every stream
1429         // is exactly the same aspect ratio. 320x240 / 3 = 80x60.
1430         ConstrainPreservingAspectRatioExact(new_width*new_height,
1431                                             &stream.width, &stream.height);
1432       }
1433     }
1434     // Give each layer default appropriate bandwidth limits based on the
1435     // resolution/framerate of that layer
1436     SelectBitrates(stream.width, stream.height,
1437                    MinIgnoreZero(stream.jsMaxBitrate, vie_codec.maxBitrate),
1438                    mLastFramerateTenths,
1439                    stream.minBitrate,
1440                    stream.targetBitrate,
1441                    stream.maxBitrate);
1442 
1443     // webrtc.org expects the last, highest fidelity, simulcast stream to
1444     // always have the same resolution as vie_codec
1445     // Also set the least user-constrained of the stream bitrates on vie_codec.
1446     if (i == vie_codec.numberOfSimulcastStreams) {
1447       vie_codec.width = stream.width;
1448       vie_codec.height = stream.height;
1449     }
1450     minMinBitrate = MinIgnoreZero(stream.minBitrate, minMinBitrate);
1451     minStartBitrate = MinIgnoreZero(stream.targetBitrate, minStartBitrate);
1452     totalMaxBitrate += stream.maxBitrate;
1453   }
1454   if (vie_codec.numberOfSimulcastStreams != 0) {
1455     vie_codec.minBitrate = std::max(minMinBitrate, vie_codec.minBitrate);
1456     vie_codec.maxBitrate = std::min(totalMaxBitrate, vie_codec.maxBitrate);
1457     vie_codec.startBitrate = std::max(vie_codec.minBitrate,
1458                                       std::min(minStartBitrate,
1459                                                vie_codec.maxBitrate));
1460   }
1461   vie_codec.mode = mCodecMode;
1462   if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
1463   {
1464     CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
1465                 __FUNCTION__, width, height, err);
1466     return NS_ERROR_FAILURE;
1467   }
1468   if (mMinBitrateEstimate != 0) {
1469     mPtrViENetwork->SetBitrateConfig(mChannel,
1470                                      mMinBitrateEstimate,
1471                                      std::max(vie_codec.startBitrate,
1472                                               mMinBitrateEstimate),
1473                                      std::max(vie_codec.maxBitrate,
1474                                               mMinBitrateEstimate));
1475   }
1476 
1477   CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
1478               __FUNCTION__, width, height, mSendingFramerate,
1479               vie_codec.minBitrate, vie_codec.maxBitrate);
1480   if (frame) {
1481     // XXX I really don't like doing this from MainThread...
1482     mPtrExtCapture->IncomingFrame(*frame);
1483     mVideoCodecStat->SentFrame();
1484     CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
1485   }
1486   return NS_OK;
1487 }
1488 
1489 // Invoked under lock of mCodecMutex!
1490 unsigned int
SelectSendFrameRate(unsigned int framerate) const1491 WebrtcVideoConduit::SelectSendFrameRate(unsigned int framerate) const
1492 {
1493   mCodecMutex.AssertCurrentThreadOwns();
1494   unsigned int new_framerate = framerate;
1495 
1496   // Limit frame rate based on max-mbps
1497   if (mCurSendCodecConfig && mCurSendCodecConfig->mEncodingConstraints.maxMbps)
1498   {
1499     unsigned int cur_fs, mb_width, mb_height, max_fps;
1500 
1501     mb_width = (mSendingWidth + 15) >> 4;
1502     mb_height = (mSendingHeight + 15) >> 4;
1503 
1504     cur_fs = mb_width * mb_height;
1505     if (cur_fs > 0) { // in case no frames have been sent
1506       max_fps = mCurSendCodecConfig->mEncodingConstraints.maxMbps/cur_fs;
1507       if (max_fps < mSendingFramerate) {
1508         new_framerate = max_fps;
1509       }
1510 
1511       if (mCurSendCodecConfig->mEncodingConstraints.maxFps != 0 &&
1512           mCurSendCodecConfig->mEncodingConstraints.maxFps < mSendingFramerate) {
1513         new_framerate = mCurSendCodecConfig->mEncodingConstraints.maxFps;
1514       }
1515     }
1516   }
1517   return new_framerate;
1518 }
1519 
1520 MediaConduitErrorCode
SetExternalSendCodec(VideoCodecConfig * config,VideoEncoder * encoder)1521 WebrtcVideoConduit::SetExternalSendCodec(VideoCodecConfig* config,
1522                                          VideoEncoder* encoder) {
1523   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
1524   if (!mPtrExtCodec->RegisterExternalSendCodec(mChannel,
1525                                               config->mType,
1526                                               static_cast<WebrtcVideoEncoder*>(encoder),
1527                                               false)) {
1528     mExternalSendCodecHandle = encoder;
1529     mExternalSendCodec = new VideoCodecConfig(*config);
1530     return kMediaConduitNoError;
1531   }
1532   return kMediaConduitInvalidSendCodec;
1533 }
1534 
1535 MediaConduitErrorCode
SetExternalRecvCodec(VideoCodecConfig * config,VideoDecoder * decoder)1536 WebrtcVideoConduit::SetExternalRecvCodec(VideoCodecConfig* config,
1537                                          VideoDecoder* decoder) {
1538   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
1539   if (!mPtrExtCodec->RegisterExternalReceiveCodec(mChannel,
1540                                                   config->mType,
1541                                                   static_cast<WebrtcVideoDecoder*>(decoder))) {
1542     mExternalRecvCodecHandle = decoder;
1543     mExternalRecvCodec = new VideoCodecConfig(*config);
1544     return kMediaConduitNoError;
1545   }
1546   return kMediaConduitInvalidReceiveCodec;
1547 }
1548 
1549 MediaConduitErrorCode
EnableRTPStreamIdExtension(bool enabled,uint8_t id)1550 WebrtcVideoConduit::EnableRTPStreamIdExtension(bool enabled, uint8_t id) {
1551   mRtpStreamIdEnabled = enabled;
1552   mRtpStreamIdExtId = id;
1553   return kMediaConduitNoError;
1554 }
1555 
1556 MediaConduitErrorCode
SendVideoFrame(unsigned char * video_frame,unsigned int video_frame_length,unsigned short width,unsigned short height,VideoType video_type,uint64_t capture_time)1557 WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
1558                                    unsigned int video_frame_length,
1559                                    unsigned short width,
1560                                    unsigned short height,
1561                                    VideoType video_type,
1562                                    uint64_t capture_time)
1563 {
1564 
1565   //check for  the parameters sanity
1566   if(!video_frame || video_frame_length == 0 ||
1567      width == 0 || height == 0)
1568   {
1569     CSFLogError(logTag,  "%s Invalid Parameters ",__FUNCTION__);
1570     MOZ_ASSERT(false);
1571     return kMediaConduitMalformedArgument;
1572   }
1573   MOZ_ASSERT(video_type == VideoType::kVideoI420);
1574   MOZ_ASSERT(mPtrExtCapture);
1575 
1576   // Transmission should be enabled before we insert any frames.
1577   if(!mEngineTransmitting)
1578   {
1579     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
1580     return kMediaConduitSessionNotInited;
1581   }
1582 
1583   // insert the frame to video engine in I420 format only
1584   webrtc::I420VideoFrame i420_frame;
1585   i420_frame.CreateFrame(video_frame, width, height, webrtc::kVideoRotation_0);
1586   i420_frame.set_timestamp(capture_time);
1587   i420_frame.set_render_time_ms(capture_time);
1588 
1589   return SendVideoFrame(i420_frame);
1590 }
1591 
1592 MediaConduitErrorCode
SendVideoFrame(webrtc::I420VideoFrame & frame)1593 WebrtcVideoConduit::SendVideoFrame(webrtc::I420VideoFrame& frame)
1594 {
1595   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
1596   // See if we need to recalculate what we're sending.
1597   // Don't compare mSendingWidth/Height, since those may not be the same as the input.
1598   {
1599     MutexAutoLock lock(mCodecMutex);
1600     if (mInReconfig) {
1601       // Waiting for it to finish
1602       return kMediaConduitNoError;
1603     }
1604     if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
1605       CSFLogDebug(logTag, "%s: call SelectSendResolution with %ux%u",
1606                   __FUNCTION__, frame.width(), frame.height());
1607       if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
1608         // SelectSendResolution took ownership of the data in i420_frame.
1609         // Submit the frame after reconfig is done
1610         return kMediaConduitNoError;
1611       }
1612     }
1613   }
1614   mPtrExtCapture->IncomingFrame(frame);
1615 
1616   mVideoCodecStat->SentFrame();
1617   CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
1618   return kMediaConduitNoError;
1619 }
1620 
1621 // Transport Layer Callbacks
1622 MediaConduitErrorCode
ReceivedRTPPacket(const void * data,int len)1623 WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
1624 {
1625   CSFLogDebug(logTag, "%s: seq# %u, Channel %d, Len %d ", __FUNCTION__,
1626               (uint16_t) ntohs(((uint16_t*) data)[1]), mChannel, len);
1627 
1628   // Media Engine should be receiving already.
1629   if(mEngineReceiving)
1630   {
1631     // let the engine know of a RTP packet to decode
1632     // XXX we need to get passed the time the packet was received
1633     if(mPtrViENetwork->ReceivedRTPPacket(mChannel, data, len, webrtc::PacketTime()) == -1)
1634     {
1635       int error = mPtrViEBase->LastError();
1636       CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error);
1637       if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
1638       {
1639         return kMediaConduitRTPProcessingFailed;
1640       }
1641       return kMediaConduitRTPRTCPModuleError;
1642     }
1643   } else {
1644     CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
1645     return kMediaConduitSessionNotInited;
1646   }
1647 
1648   return kMediaConduitNoError;
1649 }
1650 
1651 MediaConduitErrorCode
ReceivedRTCPPacket(const void * data,int len)1652 WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
1653 {
1654   CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
1655 
1656   //Media Engine should be receiving already
1657   if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1)
1658   {
1659     int error = mPtrViEBase->LastError();
1660     CSFLogError(logTag, "%s RTCP Processing Failed %d", __FUNCTION__, error);
1661     if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
1662     {
1663       return kMediaConduitRTPProcessingFailed;
1664     }
1665     return kMediaConduitRTPRTCPModuleError;
1666   }
1667   return kMediaConduitNoError;
1668 }
1669 
1670 MediaConduitErrorCode
StopTransmitting()1671 WebrtcVideoConduit::StopTransmitting()
1672 {
1673   if(mEngineTransmitting)
1674   {
1675     CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
1676     if(mPtrViEBase->StopSend(mChannel) == -1)
1677     {
1678       CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__,
1679                   mPtrViEBase->LastError());
1680       return kMediaConduitUnknownError;
1681     }
1682 
1683     mEngineTransmitting = false;
1684   }
1685 
1686   return kMediaConduitNoError;
1687 }
1688 
1689 MediaConduitErrorCode
StartTransmitting()1690 WebrtcVideoConduit::StartTransmitting()
1691 {
1692   if (!mEngineTransmitting) {
1693     if(mPtrViEBase->StartSend(mChannel) == -1)
1694     {
1695       CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__,
1696                   mPtrViEBase->LastError());
1697       return kMediaConduitUnknownError;
1698     }
1699 
1700     mEngineTransmitting = true;
1701   }
1702 
1703   return kMediaConduitNoError;
1704 }
1705 
1706 MediaConduitErrorCode
StopReceiving()1707 WebrtcVideoConduit::StopReceiving()
1708 {
1709   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
1710   // Are we receiving already? If so, stop receiving and playout
1711   // since we can't apply new recv codec when the engine is playing.
1712   if(mEngineReceiving)
1713   {
1714     CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
1715     if(mPtrViEBase->StopReceive(mChannel) == -1)
1716     {
1717       int error = mPtrViEBase->LastError();
1718       if(error == kViEBaseUnknownError)
1719       {
1720         CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__);
1721       } else {
1722         CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__,
1723                     mPtrViEBase->LastError());
1724         return kMediaConduitUnknownError;
1725       }
1726     }
1727     mEngineReceiving = false;
1728   }
1729 
1730   return kMediaConduitNoError;
1731 }
1732 
1733 MediaConduitErrorCode
StartReceiving()1734 WebrtcVideoConduit::StartReceiving()
1735 {
1736   if (!mEngineReceiving) {
1737     CSFLogDebug(logTag, "%s Attemping to start... ", __FUNCTION__);
1738     //Start Receive on the video engine
1739     if(mPtrViEBase->StartReceive(mChannel) == -1)
1740     {
1741       int error = mPtrViEBase->LastError();
1742       CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error);
1743 
1744       return kMediaConduitUnknownError;
1745     }
1746 
1747     mEngineReceiving = true;
1748   }
1749 
1750   return kMediaConduitNoError;
1751 }
1752 
1753 //WebRTC::RTP Callback Implementation
1754 // Called on MSG thread
SendPacket(int channel,const void * data,size_t len)1755 int WebrtcVideoConduit::SendPacket(int channel, const void* data, size_t len)
1756 {
1757   CSFLogDebug(logTag,  "%s : channel %d len %lu", __FUNCTION__, channel, (unsigned long) len);
1758 
1759   ReentrantMonitorAutoEnter enter(mTransportMonitor);
1760   if(mTransmitterTransport &&
1761      (mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
1762   {
1763     CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
1764     return len;
1765   } else {
1766     CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
1767     return -1;
1768   }
1769 }
1770 
1771 // Called from multiple threads including webrtc Process thread
SendRTCPPacket(int channel,const void * data,size_t len)1772 int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, size_t len)
1773 {
1774   CSFLogDebug(logTag,  "%s : channel %d , len %lu ", __FUNCTION__, channel, (unsigned long) len);
1775 
1776   // We come here if we have only one pipeline/conduit setup,
1777   // such as for unidirectional streams.
1778   // We also end up here if we are receiving
1779   ReentrantMonitorAutoEnter enter(mTransportMonitor);
1780   if(mReceiverTransport &&
1781      mReceiverTransport->SendRtcpPacket(data, len) == NS_OK)
1782   {
1783     // Might be a sender report, might be a receiver report, we don't know.
1784     CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
1785     return len;
1786   } else if(mTransmitterTransport &&
1787             (mTransmitterTransport->SendRtcpPacket(data, len) == NS_OK)) {
1788       CSFLogDebug(logTag, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
1789       return len;
1790   } else {
1791     CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
1792     return -1;
1793   }
1794 }
1795 
1796 // WebRTC::ExternalMedia Implementation
1797 int
FrameSizeChange(unsigned int width,unsigned int height,unsigned int numStreams)1798 WebrtcVideoConduit::FrameSizeChange(unsigned int width,
1799                                     unsigned int height,
1800                                     unsigned int numStreams)
1801 {
1802   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
1803 
1804 
1805   ReentrantMonitorAutoEnter enter(mTransportMonitor);
1806   mReceivingWidth = width;
1807   mReceivingHeight = height;
1808   mNumReceivingStreams = numStreams;
1809 
1810   if(mRenderer)
1811   {
1812     mRenderer->FrameSizeChange(width, height, numStreams);
1813     return 0;
1814   }
1815 
1816   CSFLogError(logTag,  "%s Renderer is NULL ", __FUNCTION__);
1817   return -1;
1818 }
1819 
1820 int
DeliverFrame(unsigned char * buffer,size_t buffer_size,uint32_t time_stamp,int64_t ntp_time_ms,int64_t render_time,void * handle)1821 WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
1822                                  size_t buffer_size,
1823                                  uint32_t time_stamp,
1824                                  int64_t ntp_time_ms,
1825                                  int64_t render_time,
1826                                  void *handle)
1827 {
1828   return DeliverFrame(buffer, buffer_size, mReceivingWidth, (mReceivingWidth+1)>>1,
1829                       time_stamp, ntp_time_ms, render_time, handle);
1830 }
1831 
1832 int
DeliverFrame(unsigned char * buffer,size_t buffer_size,uint32_t y_stride,uint32_t cbcr_stride,uint32_t time_stamp,int64_t ntp_time_ms,int64_t render_time,void * handle)1833 WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
1834                                  size_t buffer_size,
1835                                  uint32_t y_stride,
1836                                  uint32_t cbcr_stride,
1837                                  uint32_t time_stamp,
1838                                  int64_t ntp_time_ms,
1839                                  int64_t render_time,
1840                                  void *handle)
1841 {
1842   CSFLogDebug(logTag,  "%s Buffer Size %lu", __FUNCTION__, (unsigned long) buffer_size);
1843 
1844   ReentrantMonitorAutoEnter enter(mTransportMonitor);
1845   if(mRenderer)
1846   {
1847     layers::Image* img = nullptr;
1848     // |handle| should be a webrtc::NativeHandle if available.
1849     if (handle) {
1850       webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(handle);
1851       // In the handle, there should be a layers::Image.
1852       img = static_cast<layers::Image*>(native_h->GetHandle());
1853     }
1854 
1855     if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
1856       uint64_t now = PR_Now();
1857       uint64_t timestamp = 0;
1858       bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
1859                                    buffer,
1860                                    reinterpret_cast<unsigned char*>(&timestamp),
1861                                    sizeof(timestamp), 0, 0);
1862       if (ok) {
1863         VideoLatencyUpdate(now - timestamp);
1864       }
1865     }
1866 
1867     const ImageHandle img_h(img);
1868     mRenderer->RenderVideoFrame(buffer, buffer_size, y_stride, cbcr_stride,
1869                                 time_stamp, render_time, img_h);
1870     return 0;
1871   }
1872 
1873   CSFLogError(logTag,  "%s Renderer is NULL  ", __FUNCTION__);
1874   return -1;
1875 }
1876 
1877 int
DeliverI420Frame(const webrtc::I420VideoFrame & webrtc_frame)1878 WebrtcVideoConduit::DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame)
1879 {
1880   if (!webrtc_frame.native_handle()) {
1881     uint32_t y_stride = webrtc_frame.stride(static_cast<webrtc::PlaneType>(0));
1882     return DeliverFrame(const_cast<uint8_t*>(webrtc_frame.buffer(webrtc::kYPlane)),
1883                         CalcBufferSize(webrtc::kI420, y_stride, webrtc_frame.height()),
1884                         y_stride,
1885                         webrtc_frame.stride(static_cast<webrtc::PlaneType>(1)),
1886                         webrtc_frame.timestamp(),
1887                         webrtc_frame.ntp_time_ms(),
1888                         webrtc_frame.render_time_ms(), nullptr);
1889   }
1890   size_t buffer_size = CalcBufferSize(webrtc::kI420, webrtc_frame.width(), webrtc_frame.height());
1891   CSFLogDebug(logTag,  "%s Buffer Size %lu", __FUNCTION__, (unsigned long) buffer_size);
1892 
1893   ReentrantMonitorAutoEnter enter(mTransportMonitor);
1894   if(mRenderer)
1895   {
1896     layers::Image* img = nullptr;
1897     // |handle| should be a webrtc::NativeHandle if available.
1898     webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(webrtc_frame.native_handle());
1899     if (native_h) {
1900       // In the handle, there should be a layers::Image.
1901       img = static_cast<layers::Image*>(native_h->GetHandle());
1902     }
1903 
1904 #if 0
1905     //#ifndef MOZ_WEBRTC_OMX
1906     // XXX - this may not be possible on GONK with textures!
1907     if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
1908       uint64_t now = PR_Now();
1909       uint64_t timestamp = 0;
1910       bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
1911                                    buffer,
1912                                    reinterpret_cast<unsigned char*>(&timestamp),
1913                                    sizeof(timestamp), 0, 0);
1914       if (ok) {
1915         VideoLatencyUpdate(now - timestamp);
1916       }
1917     }
1918 #endif
1919 
1920     const ImageHandle img_h(img);
1921     mRenderer->RenderVideoFrame(nullptr, buffer_size, webrtc_frame.timestamp(),
1922                                 webrtc_frame.render_time_ms(), img_h);
1923     return 0;
1924   }
1925 
1926   CSFLogError(logTag,  "%s Renderer is NULL  ", __FUNCTION__);
1927   return -1;
1928 }
1929 
1930 /**
1931  * Copy the codec passed into Conduit's database
1932  */
1933 
1934 void
CodecConfigToWebRTCCodec(const VideoCodecConfig * codecInfo,webrtc::VideoCodec & cinst)1935 WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo,
1936                                               webrtc::VideoCodec& cinst)
1937 {
1938   // Note: this assumes cinst is initialized to a base state either by
1939   // hand or from a config fetched with GetConfig(); this modifies the config
1940   // to match parameters from VideoCodecConfig
1941   cinst.plType  = codecInfo->mType;
1942   if (codecInfo->mName == "H264") {
1943     cinst.codecType = webrtc::kVideoCodecH264;
1944     PL_strncpyz(cinst.plName, "H264", sizeof(cinst.plName));
1945   } else if (codecInfo->mName == "VP8") {
1946     cinst.codecType = webrtc::kVideoCodecVP8;
1947     PL_strncpyz(cinst.plName, "VP8", sizeof(cinst.plName));
1948   } else if (codecInfo->mName == "VP9") {
1949     cinst.codecType = webrtc::kVideoCodecVP9;
1950     PL_strncpyz(cinst.plName, "VP9", sizeof(cinst.plName));
1951   } else if (codecInfo->mName == "I420") {
1952     cinst.codecType = webrtc::kVideoCodecI420;
1953     PL_strncpyz(cinst.plName, "I420", sizeof(cinst.plName));
1954   } else {
1955     cinst.codecType = webrtc::kVideoCodecUnknown;
1956     PL_strncpyz(cinst.plName, "Unknown", sizeof(cinst.plName));
1957   }
1958 
1959   // width/height will be overridden on the first frame; they must be 'sane' for
1960   // SetSendCodec()
1961   if (codecInfo->mEncodingConstraints.maxFps > 0) {
1962     cinst.maxFramerate = codecInfo->mEncodingConstraints.maxFps;
1963   } else {
1964     cinst.maxFramerate = DEFAULT_VIDEO_MAX_FRAMERATE;
1965   }
1966 
1967   // Defaults if rates aren't forced by pref.  Typically defaults are
1968   // overridden on the first video frame.
1969   cinst.minBitrate = mMinBitrate ? mMinBitrate : 200;
1970   cinst.startBitrate = mStartBitrate ? mStartBitrate : 300;
1971   cinst.targetBitrate = cinst.startBitrate;
1972   cinst.maxBitrate = mMaxBitrate ? mMaxBitrate : 2000;
1973 
1974   if (cinst.codecType == webrtc::kVideoCodecH264)
1975   {
1976 #ifdef MOZ_WEBRTC_OMX
1977     cinst.resolution_divisor = 16;
1978 #endif
1979     // cinst.codecSpecific.H264.profile = ?
1980     cinst.codecSpecific.H264.profile_byte = codecInfo->mProfile;
1981     cinst.codecSpecific.H264.constraints = codecInfo->mConstraints;
1982     cinst.codecSpecific.H264.level = codecInfo->mLevel;
1983     cinst.codecSpecific.H264.packetizationMode = codecInfo->mPacketizationMode;
1984     if (codecInfo->mEncodingConstraints.maxBr > 0) {
1985       // webrtc.org uses kbps, we use bps
1986       cinst.maxBitrate =
1987         MinIgnoreZero(cinst.maxBitrate,
1988                       codecInfo->mEncodingConstraints.maxBr)/1000;
1989     }
1990     if (codecInfo->mEncodingConstraints.maxMbps > 0) {
1991       // Not supported yet!
1992       CSFLogError(logTag,  "%s H.264 max_mbps not supported yet  ", __FUNCTION__);
1993     }
1994     // XXX parse the encoded SPS/PPS data
1995     // paranoia
1996     cinst.codecSpecific.H264.spsData = nullptr;
1997     cinst.codecSpecific.H264.spsLen = 0;
1998     cinst.codecSpecific.H264.ppsData = nullptr;
1999     cinst.codecSpecific.H264.ppsLen = 0;
2000   }
2001   // Init mSimulcastEncodings always since they hold info from setParameters.
2002   // TODO(bug 1210175): H264 doesn't support simulcast yet.
2003   size_t numberOfSimulcastEncodings = std::min(codecInfo->mSimulcastEncodings.size(), (size_t)webrtc::kMaxSimulcastStreams);
2004   for (size_t i = 0; i < numberOfSimulcastEncodings; ++i) {
2005     const VideoCodecConfig::SimulcastEncoding& encoding =
2006       codecInfo->mSimulcastEncodings[i];
2007     // Make sure the constraints on the whole stream are reflected.
2008     webrtc::SimulcastStream stream;
2009     memset(&stream, 0, sizeof(stream));
2010     stream.width = cinst.width;
2011     stream.height = cinst.height;
2012     stream.numberOfTemporalLayers = 1;
2013     stream.maxBitrate = cinst.maxBitrate;
2014     stream.targetBitrate = cinst.targetBitrate;
2015     stream.minBitrate = cinst.minBitrate;
2016     stream.qpMax = cinst.qpMax;
2017     strncpy(stream.rid, encoding.rid.c_str(), sizeof(stream.rid)-1);
2018     stream.rid[sizeof(stream.rid) - 1] = 0;
2019 
2020     // Apply encoding-specific constraints.
2021     stream.width = MinIgnoreZero(
2022         stream.width,
2023         (unsigned short)encoding.constraints.maxWidth);
2024     stream.height = MinIgnoreZero(
2025         stream.height,
2026         (unsigned short)encoding.constraints.maxHeight);
2027 
2028     // webrtc.org uses kbps, we use bps
2029     stream.jsMaxBitrate = encoding.constraints.maxBr/1000;
2030     stream.jsScaleDownBy = encoding.constraints.scaleDownBy;
2031 
2032     MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
2033     uint32_t width = stream.width? stream.width : 640;
2034     uint32_t height = stream.height? stream.height : 480;
2035     uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
2036     uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
2037 
2038     if (new_width != width || new_height != height) {
2039       // Estimate. Overridden on first frame.
2040       SelectBitrates(new_width, new_height, stream.jsMaxBitrate,
2041                      mLastFramerateTenths,
2042                      stream.minBitrate,
2043                      stream.targetBitrate,
2044                      stream.maxBitrate);
2045     }
2046     // webrtc.org expects simulcast streams to be ordered by increasing
2047     // fidelity, our jsep code does the opposite.
2048     cinst.simulcastStream[numberOfSimulcastEncodings-i-1] = stream;
2049   }
2050 
2051   cinst.numberOfSimulcastStreams = numberOfSimulcastEncodings;
2052 }
2053 
2054 /**
2055  * Perform validation on the codecConfig to be applied
2056  * Verifies if the codec is already applied.
2057  */
2058 MediaConduitErrorCode
ValidateCodecConfig(const VideoCodecConfig * codecInfo,bool send)2059 WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
2060                                         bool send)
2061 {
2062   if(!codecInfo)
2063   {
2064     CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
2065     return kMediaConduitMalformedArgument;
2066   }
2067 
2068   if((codecInfo->mName.empty()) ||
2069      (codecInfo->mName.length() >= CODEC_PLNAME_SIZE))
2070   {
2071     CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
2072     return kMediaConduitMalformedArgument;
2073   }
2074 
2075   return kMediaConduitNoError;
2076 }
2077 
2078 void
VideoLatencyUpdate(uint64_t newSample)2079 WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
2080 {
2081   mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
2082 }
2083 
2084 uint64_t
MozVideoLatencyAvg()2085 WebrtcVideoConduit::MozVideoLatencyAvg()
2086 {
2087   return mVideoLatencyAvg / sRoundingPadding;
2088 }
2089 
2090 uint64_t
CodecPluginID()2091 WebrtcVideoConduit::CodecPluginID()
2092 {
2093   if (mExternalSendCodecHandle) {
2094     return mExternalSendCodecHandle->PluginID();
2095   } else if (mExternalRecvCodecHandle) {
2096     return mExternalRecvCodecHandle->PluginID();
2097   }
2098   return 0;
2099 }
2100 
2101 bool
DetermineREDAndULPFECPayloadTypes(uint8_t & payload_type_red,uint8_t & payload_type_ulpfec)2102 WebrtcVideoConduit::DetermineREDAndULPFECPayloadTypes(uint8_t &payload_type_red, uint8_t &payload_type_ulpfec)
2103 {
2104     webrtc::VideoCodec video_codec;
2105     payload_type_red = INVALID_RTP_PAYLOAD;
2106     payload_type_ulpfec = INVALID_RTP_PAYLOAD;
2107 
2108     for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
2109     {
2110       if(mPtrViECodec->GetCodec(idx, video_codec) == 0)
2111       {
2112         switch(video_codec.codecType) {
2113           case webrtc::VideoCodecType::kVideoCodecRED:
2114             payload_type_red = video_codec.plType;
2115             break;
2116           case webrtc::VideoCodecType::kVideoCodecULPFEC:
2117             payload_type_ulpfec = video_codec.plType;
2118             break;
2119           default:
2120             break;
2121         }
2122       }
2123     }
2124 
2125     return payload_type_red != INVALID_RTP_PAYLOAD
2126            && payload_type_ulpfec != INVALID_RTP_PAYLOAD;
2127 }
2128 
2129 }// end namespace
2130