1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * This Source Code Form is subject to the terms of the Mozilla Public
3  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
4  * You can obtain one at http://mozilla.org/MPL/2.0/. */
5 
6 #include "MediaEngineRemoteVideoSource.h"
7 
8 #include "AllocationHandle.h"
9 #include "CamerasChild.h"
10 #include "MediaManager.h"
11 #include "MediaTrackConstraints.h"
12 #include "mozilla/RefPtr.h"
13 #include "nsIPrefService.h"
14 #include "VideoFrameUtils.h"
15 #include "VideoUtils.h"
16 #include "webrtc/common_video/include/video_frame_buffer.h"
17 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
18 
19 mozilla::LogModule* GetMediaManagerLog();
20 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
21 #define LOGFRAME(msg) \
22   MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
23 
24 namespace mozilla {
25 
26 using dom::ConstrainLongRange;
27 using dom::MediaSourceEnum;
28 using dom::MediaTrackConstraintSet;
29 using dom::MediaTrackConstraints;
30 using dom::MediaTrackSettings;
31 using dom::VideoFacingModeEnum;
32 
MediaEngineRemoteVideoSource(int aIndex,camera::CaptureEngine aCapEngine,MediaSourceEnum aMediaSource,bool aScary)33 MediaEngineRemoteVideoSource::MediaEngineRemoteVideoSource(
34     int aIndex, camera::CaptureEngine aCapEngine, MediaSourceEnum aMediaSource,
35     bool aScary)
36     : mCaptureIndex(aIndex),
37       mMediaSource(aMediaSource),
38       mCapEngine(aCapEngine),
39       mScary(aScary),
40       mMutex("MediaEngineRemoteVideoSource::mMutex"),
41       mRescalingBufferPool(/* zero_initialize */ false,
42                            /* max_number_of_buffers */ 1),
43       mSettingsUpdatedByFrame(MakeAndAddRef<media::Refcountable<AtomicBool>>()),
44       mSettings(MakeAndAddRef<media::Refcountable<MediaTrackSettings>>()) {
45   MOZ_ASSERT(aMediaSource != MediaSourceEnum::Other);
46   mSettings->mWidth.Construct(0);
47   mSettings->mHeight.Construct(0);
48   mSettings->mFrameRate.Construct(0);
49   Init();
50 }
51 
Init()52 void MediaEngineRemoteVideoSource::Init() {
53   LOG((__PRETTY_FUNCTION__));
54   AssertIsOnOwningThread();
55 
56   char deviceName[kMaxDeviceNameLength];
57   char uniqueId[kMaxUniqueIdLength];
58   if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice,
59                               mCapEngine, mCaptureIndex, deviceName,
60                               kMaxDeviceNameLength, uniqueId,
61                               kMaxUniqueIdLength, nullptr)) {
62     LOG(("Error initializing RemoteVideoSource (GetCaptureDevice)"));
63     return;
64   }
65 
66   SetName(NS_ConvertUTF8toUTF16(deviceName));
67   SetUUID(uniqueId);
68 
69   mInitDone = true;
70 }
71 
Shutdown()72 void MediaEngineRemoteVideoSource::Shutdown() {
73   LOG((__PRETTY_FUNCTION__));
74   AssertIsOnOwningThread();
75 
76   if (!mInitDone) {
77     // Already shut down
78     return;
79   }
80 
81   // Allocate always returns a null AllocationHandle.
82   // We can safely pass nullptr here.
83   if (mState == kStarted) {
84     Stop(nullptr);
85   }
86   if (mState == kAllocated || mState == kStopped) {
87     Deallocate(nullptr);
88   }
89   MOZ_ASSERT(mState == kReleased);
90 
91   mInitDone = false;
92 }
93 
SetName(nsString aName)94 void MediaEngineRemoteVideoSource::SetName(nsString aName) {
95   LOG((__PRETTY_FUNCTION__));
96   AssertIsOnOwningThread();
97 
98   mDeviceName = Move(aName);
99   bool hasFacingMode = false;
100   VideoFacingModeEnum facingMode = VideoFacingModeEnum::User;
101 
102   // Set facing mode based on device name.
103 #if defined(ANDROID)
104   // Names are generated. Example: "Camera 0, Facing back, Orientation 90"
105   //
106   // See media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/
107   // webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
108 
109   if (mDeviceName.Find(NS_LITERAL_STRING("Facing back")) != kNotFound) {
110     hasFacingMode = true;
111     facingMode = VideoFacingModeEnum::Environment;
112   } else if (mDeviceName.Find(NS_LITERAL_STRING("Facing front")) != kNotFound) {
113     hasFacingMode = true;
114     facingMode = VideoFacingModeEnum::User;
115   }
116 #endif  // ANDROID
117 #ifdef XP_MACOSX
118   // Kludge to test user-facing cameras on OSX.
119   if (mDeviceName.Find(NS_LITERAL_STRING("Face")) != -1) {
120     hasFacingMode = true;
121     facingMode = VideoFacingModeEnum::User;
122   }
123 #endif
124 #ifdef XP_WIN
125   // The cameras' name of Surface book are "Microsoft Camera Front" and
126   // "Microsoft Camera Rear" respectively.
127 
128   if (mDeviceName.Find(NS_LITERAL_STRING("Front")) != kNotFound) {
129     hasFacingMode = true;
130     facingMode = VideoFacingModeEnum::User;
131   } else if (mDeviceName.Find(NS_LITERAL_STRING("Rear")) != kNotFound) {
132     hasFacingMode = true;
133     facingMode = VideoFacingModeEnum::Environment;
134   }
135 #endif  // WINDOWS
136   if (hasFacingMode) {
137     mFacingMode.Assign(NS_ConvertUTF8toUTF16(
138         dom::VideoFacingModeEnumValues::strings[uint32_t(facingMode)].value));
139   } else {
140     mFacingMode.Truncate();
141   }
142 }
143 
GetName() const144 nsString MediaEngineRemoteVideoSource::GetName() const {
145   AssertIsOnOwningThread();
146 
147   return mDeviceName;
148 }
149 
SetUUID(const char * aUUID)150 void MediaEngineRemoteVideoSource::SetUUID(const char* aUUID) {
151   AssertIsOnOwningThread();
152 
153   mUniqueId.Assign(aUUID);
154 }
155 
GetUUID() const156 nsCString MediaEngineRemoteVideoSource::GetUUID() const {
157   AssertIsOnOwningThread();
158 
159   return mUniqueId;
160 }
161 
Allocate(const MediaTrackConstraints & aConstraints,const MediaEnginePrefs & aPrefs,const nsString & aDeviceId,const mozilla::ipc::PrincipalInfo & aPrincipalInfo,AllocationHandle ** aOutHandle,const char ** aOutBadConstraint)162 nsresult MediaEngineRemoteVideoSource::Allocate(
163     const MediaTrackConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
164     const nsString& aDeviceId,
165     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
166     AllocationHandle** aOutHandle, const char** aOutBadConstraint) {
167   LOG((__PRETTY_FUNCTION__));
168   AssertIsOnOwningThread();
169 
170   MOZ_ASSERT(mInitDone);
171   MOZ_ASSERT(mState == kReleased);
172 
173   NormalizedConstraints constraints(aConstraints);
174   webrtc::CaptureCapability newCapability;
175   LOG(("ChooseCapability(kFitness) for mCapability (Allocate) ++"));
176   if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability,
177                         kFitness)) {
178     *aOutBadConstraint =
179         MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
180     return NS_ERROR_FAILURE;
181   }
182   LOG(("ChooseCapability(kFitness) for mCapability (Allocate) --"));
183 
184   if (camera::GetChildAndCall(&camera::CamerasChild::AllocateCaptureDevice,
185                               mCapEngine, mUniqueId.get(), kMaxUniqueIdLength,
186                               mCaptureIndex, aPrincipalInfo)) {
187     return NS_ERROR_FAILURE;
188   }
189 
190   *aOutHandle = nullptr;
191 
192   {
193     MutexAutoLock lock(mMutex);
194     mState = kAllocated;
195     mCapability = newCapability;
196   }
197 
198   LOG(("Video device %d allocated", mCaptureIndex));
199   return NS_OK;
200 }
201 
Deallocate(const RefPtr<const AllocationHandle> & aHandle)202 nsresult MediaEngineRemoteVideoSource::Deallocate(
203     const RefPtr<const AllocationHandle>& aHandle) {
204   LOG((__PRETTY_FUNCTION__));
205   AssertIsOnOwningThread();
206 
207   MOZ_ASSERT(mState == kStopped || mState == kAllocated);
208 
209   if (mStream && IsTrackIDExplicit(mTrackID)) {
210     mStream->EndTrack(mTrackID);
211   }
212 
213   {
214     MutexAutoLock lock(mMutex);
215 
216     mStream = nullptr;
217     mTrackID = TRACK_NONE;
218     mPrincipal = PRINCIPAL_HANDLE_NONE;
219     mState = kReleased;
220   }
221 
222   // Stop() has stopped capture synchronously on the media thread before we get
223   // here, so there are no longer any callbacks on an IPC thread accessing
224   // mImageContainer or mRescalingBufferPool.
225   mImageContainer = nullptr;
226   mRescalingBufferPool.Release();
227 
228   LOG(("Video device %d deallocated", mCaptureIndex));
229 
230   if (camera::GetChildAndCall(&camera::CamerasChild::ReleaseCaptureDevice,
231                               mCapEngine, mCaptureIndex)) {
232     MOZ_ASSERT_UNREACHABLE("Couldn't release allocated device");
233   }
234   return NS_OK;
235 }
236 
SetTrack(const RefPtr<const AllocationHandle> & aHandle,const RefPtr<SourceMediaStream> & aStream,TrackID aTrackID,const PrincipalHandle & aPrincipal)237 nsresult MediaEngineRemoteVideoSource::SetTrack(
238     const RefPtr<const AllocationHandle>& aHandle,
239     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
240     const PrincipalHandle& aPrincipal) {
241   LOG((__PRETTY_FUNCTION__));
242   AssertIsOnOwningThread();
243 
244   MOZ_ASSERT(mState == kAllocated);
245   MOZ_ASSERT(!mStream);
246   MOZ_ASSERT(mTrackID == TRACK_NONE);
247   MOZ_ASSERT(aStream);
248   MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
249 
250   if (!mImageContainer) {
251     mImageContainer = layers::LayerManager::CreateImageContainer(
252         layers::ImageContainer::ASYNCHRONOUS);
253   }
254 
255   {
256     MutexAutoLock lock(mMutex);
257     mStream = aStream;
258     mTrackID = aTrackID;
259     mPrincipal = aPrincipal;
260   }
261   aStream->AddTrack(aTrackID, 0, new VideoSegment(),
262                     SourceMediaStream::ADDTRACK_QUEUED);
263   return NS_OK;
264 }
265 
Start(const RefPtr<const AllocationHandle> & aHandle)266 nsresult MediaEngineRemoteVideoSource::Start(
267     const RefPtr<const AllocationHandle>& aHandle) {
268   LOG((__PRETTY_FUNCTION__));
269   AssertIsOnOwningThread();
270 
271   MOZ_ASSERT(mInitDone);
272   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
273   MOZ_ASSERT(mStream);
274   MOZ_ASSERT(IsTrackIDExplicit(mTrackID));
275 
276   {
277     MutexAutoLock lock(mMutex);
278     mState = kStarted;
279   }
280 
281   mSettingsUpdatedByFrame->mValue = false;
282 
283   if (camera::GetChildAndCall(&camera::CamerasChild::StartCapture, mCapEngine,
284                               mCaptureIndex, mCapability, this)) {
285     LOG(("StartCapture failed"));
286     MutexAutoLock lock(mMutex);
287     mState = kStopped;
288     return NS_ERROR_FAILURE;
289   }
290 
291   NS_DispatchToMainThread(NS_NewRunnableFunction(
292       "MediaEngineRemoteVideoSource::SetLastCapability", [
293         settings = mSettings, updated = mSettingsUpdatedByFrame,
294         source = mMediaSource, cap = mCapability
295       ]() mutable {
296         switch (source) {
297           case dom::MediaSourceEnum::Screen:
298           case dom::MediaSourceEnum::Window:
299           case dom::MediaSourceEnum::Application:
300             // Undo the hack where ideal and max constraints are crammed
301             // together in mCapability for consumption by low-level code. We
302             // don't actually know the real resolution yet, so report min(ideal,
303             // max) for now.
304             // TODO: This can be removed in bug 1453269.
305             cap.width = std::min(cap.width >> 16, cap.width & 0xffff);
306             cap.height = std::min(cap.height >> 16, cap.height & 0xffff);
307             break;
308           default:
309             break;
310         }
311 
312         if (!updated->mValue) {
313           settings->mWidth.Value() = cap.width;
314           settings->mHeight.Value() = cap.height;
315         }
316         settings->mFrameRate.Value() = cap.maxFPS;
317       }));
318 
319   return NS_OK;
320 }
321 
Stop(const RefPtr<const AllocationHandle> & aHandle)322 nsresult MediaEngineRemoteVideoSource::Stop(
323     const RefPtr<const AllocationHandle>& aHandle) {
324   LOG((__PRETTY_FUNCTION__));
325   AssertIsOnOwningThread();
326 
327   if (mState == kStopped || mState == kAllocated) {
328     return NS_OK;
329   }
330 
331   MOZ_ASSERT(mState == kStarted);
332 
333   if (camera::GetChildAndCall(&camera::CamerasChild::StopCapture, mCapEngine,
334                               mCaptureIndex)) {
335     MOZ_DIAGNOSTIC_ASSERT(false, "Stopping a started capture failed");
336   }
337 
338   {
339     MutexAutoLock lock(mMutex);
340     mState = kStopped;
341 
342     // Drop any cached image so we don't start with a stale image on next
343     // usage.  Also, gfx gets very upset if these are held until this object
344     // is gc'd in final-cc during shutdown (bug 1374164)
345     mImage = nullptr;
346   }
347 
348   return NS_OK;
349 }
350 
Reconfigure(const RefPtr<AllocationHandle> & aHandle,const MediaTrackConstraints & aConstraints,const MediaEnginePrefs & aPrefs,const nsString & aDeviceId,const char ** aOutBadConstraint)351 nsresult MediaEngineRemoteVideoSource::Reconfigure(
352     const RefPtr<AllocationHandle>& aHandle,
353     const MediaTrackConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
354     const nsString& aDeviceId, const char** aOutBadConstraint) {
355   LOG((__PRETTY_FUNCTION__));
356   AssertIsOnOwningThread();
357 
358   MOZ_ASSERT(mInitDone);
359 
360   NormalizedConstraints constraints(aConstraints);
361   webrtc::CaptureCapability newCapability;
362   LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) ++"));
363   if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability,
364                         kFitness)) {
365     *aOutBadConstraint =
366         MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
367     return NS_ERROR_FAILURE;
368   }
369   LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) --"));
370 
371   if (mCapability == newCapability) {
372     return NS_OK;
373   }
374 
375   {
376     MutexAutoLock lock(mMutex);
377     // Start() applies mCapability on the device.
378     mCapability = newCapability;
379   }
380 
381   if (mState == kStarted) {
382     // Allocate always returns a null AllocationHandle.
383     // We can safely pass nullptr below.
384     nsresult rv = Stop(nullptr);
385     if (NS_WARN_IF(NS_FAILED(rv))) {
386       return rv;
387     }
388 
389     rv = Start(nullptr);
390     if (NS_WARN_IF(NS_FAILED(rv))) {
391       return rv;
392     }
393   }
394 
395   return NS_OK;
396 }
397 
NumCapabilities() const398 size_t MediaEngineRemoteVideoSource::NumCapabilities() const {
399   AssertIsOnOwningThread();
400 
401   mHardcodedCapabilities.Clear();
402   int num = camera::GetChildAndCall(&camera::CamerasChild::NumberOfCapabilities,
403                                     mCapEngine, mUniqueId.get());
404 
405   if (num >= 1) {
406     return num;
407   }
408 
409   // The default for devices that don't return discrete capabilities: treat
410   // them as supporting all capabilities orthogonally. E.g. screensharing.
411   // CaptureCapability defaults key values to 0, which means accept any value.
412   mHardcodedCapabilities.AppendElement(webrtc::CaptureCapability());
413   return mHardcodedCapabilities.Length();  // 1
414 }
415 
GetCapability(size_t aIndex) const416 webrtc::CaptureCapability MediaEngineRemoteVideoSource::GetCapability(
417     size_t aIndex) const {
418   AssertIsOnOwningThread();
419   webrtc::CaptureCapability result;
420   if (!mHardcodedCapabilities.IsEmpty()) {
421     MOZ_ASSERT(aIndex < mHardcodedCapabilities.Length());
422     result = mHardcodedCapabilities.SafeElementAt(aIndex,
423                                                   webrtc::CaptureCapability());
424   }
425   camera::GetChildAndCall(&camera::CamerasChild::GetCaptureCapability,
426                           mCapEngine, mUniqueId.get(), aIndex, result);
427   return result;
428 }
429 
Pull(const RefPtr<const AllocationHandle> & aHandle,const RefPtr<SourceMediaStream> & aStream,TrackID aTrackID,StreamTime aDesiredTime,const PrincipalHandle & aPrincipalHandle)430 void MediaEngineRemoteVideoSource::Pull(
431     const RefPtr<const AllocationHandle>& aHandle,
432     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
433     StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
434   MutexAutoLock lock(mMutex);
435   if (mState == kReleased) {
436     // We end the track before deallocating, so this is safe.
437     return;
438   }
439 
440   MOZ_ASSERT(mState == kStarted || mState == kStopped);
441 
442   StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
443   if (delta <= 0) {
444     return;
445   }
446 
447   VideoSegment segment;
448   RefPtr<layers::Image> image = mImage;
449   if (mState == kStarted) {
450     MOZ_ASSERT(!image || mImageSize == image->GetSize());
451     segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle);
452   } else {
453     // nullptr images are allowed, but we force it to black and retain the size.
454     segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle,
455                         true);
456   }
457 
458   // This is safe from any thread, and is safe if the track is Finished
459   // or Destroyed.
460   // This can fail if either a) we haven't added the track yet, or b)
461   // we've removed or finished the track.
462   aStream->AppendToTrack(aTrackID, &segment);
463 }
464 
DeliverFrame(uint8_t * aBuffer,const camera::VideoFrameProperties & aProps)465 int MediaEngineRemoteVideoSource::DeliverFrame(
466     uint8_t* aBuffer, const camera::VideoFrameProperties& aProps) {
467   // Cameras IPC thread - take great care with accessing members!
468 
469   int32_t req_max_width;
470   int32_t req_max_height;
471   int32_t req_ideal_width;
472   int32_t req_ideal_height;
473   {
474     MutexAutoLock lock(mMutex);
475     MOZ_ASSERT(mState == kStarted);
476     // TODO: These can be removed in bug 1453269.
477     req_max_width = mCapability.width & 0xffff;
478     req_max_height = mCapability.height & 0xffff;
479     req_ideal_width = (mCapability.width >> 16) & 0xffff;
480     req_ideal_height = (mCapability.height >> 16) & 0xffff;
481   }
482 
483   // This is only used in the case of screen sharing, see bug 1453269.
484   const int32_t target_width = aProps.width();
485   const int32_t target_height = aProps.height();
486 
487   if (aProps.rotation() == 90 || aProps.rotation() == 270) {
488     // This frame is rotated, so what was negotiated as width is now height,
489     // and vice versa.
490     std::swap(req_max_width, req_max_height);
491     std::swap(req_ideal_width, req_ideal_height);
492   }
493 
494   int32_t dst_max_width = std::min(req_max_width, aProps.width());
495   int32_t dst_max_height = std::min(req_max_height, aProps.height());
496   // This logic works for both camera and screen sharing case.
497   // for camera case, req_ideal_width and req_ideal_height is 0.
498   // The following snippet will set dst_width to dst_max_width and dst_height to
499   // dst_max_height
500   int32_t dst_width = std::min(
501       req_ideal_width > 0 ? req_ideal_width : aProps.width(), dst_max_width);
502   int32_t dst_height =
503       std::min(req_ideal_height > 0 ? req_ideal_height : aProps.height(),
504                dst_max_height);
505 
506   // Apply scaling for screen sharing, see bug 1453269.
507   switch (mMediaSource) {
508     case MediaSourceEnum::Screen:
509     case MediaSourceEnum::Window:
510     case MediaSourceEnum::Application: {
511       // scale to average of portrait and landscape
512       float scale_width = (float)dst_width / (float)aProps.width();
513       float scale_height = (float)dst_height / (float)aProps.height();
514       float scale = (scale_width + scale_height) / 2;
515       dst_width = (int)(scale * target_width);
516       dst_height = (int)(scale * target_height);
517 
518       // if scaled rectangle exceeds max rectangle, scale to minimum of portrait
519       // and landscape
520       if (dst_width > dst_max_width || dst_height > dst_max_height) {
521         scale_width = (float)dst_max_width / (float)dst_width;
522         scale_height = (float)dst_max_height / (float)dst_height;
523         scale = std::min(scale_width, scale_height);
524         dst_width = (int32_t)(scale * dst_width);
525         dst_height = (int32_t)(scale * dst_height);
526       }
527       break;
528     }
529     default: { break; }
530   }
531 
532   rtc::Callback0<void> callback_unused;
533   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
534       new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
535           aProps.width(), aProps.height(), aBuffer, aProps.yStride(),
536           aBuffer + aProps.yAllocatedSize(), aProps.uStride(),
537           aBuffer + aProps.yAllocatedSize() + aProps.uAllocatedSize(),
538           aProps.vStride(), callback_unused);
539 
540   if ((dst_width != aProps.width() || dst_height != aProps.height()) &&
541       dst_width <= aProps.width() && dst_height <= aProps.height()) {
542     // Destination resolution is smaller than source buffer. We'll rescale.
543     rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer =
544         mRescalingBufferPool.CreateBuffer(dst_width, dst_height);
545     if (!scaledBuffer) {
546       MOZ_ASSERT_UNREACHABLE(
547           "We might fail to allocate a buffer, but with this "
548           "being a recycling pool that shouldn't happen");
549       return 0;
550     }
551     scaledBuffer->CropAndScaleFrom(*buffer);
552     buffer = scaledBuffer;
553   }
554 
555   layers::PlanarYCbCrData data;
556   data.mYChannel = const_cast<uint8_t*>(buffer->DataY());
557   data.mYSize = IntSize(buffer->width(), buffer->height());
558   data.mYStride = buffer->StrideY();
559   MOZ_ASSERT(buffer->StrideU() == buffer->StrideV());
560   data.mCbCrStride = buffer->StrideU();
561   data.mCbChannel = const_cast<uint8_t*>(buffer->DataU());
562   data.mCrChannel = const_cast<uint8_t*>(buffer->DataV());
563   data.mCbCrSize =
564       IntSize((buffer->width() + 1) / 2, (buffer->height() + 1) / 2);
565   data.mPicX = 0;
566   data.mPicY = 0;
567   data.mPicSize = IntSize(buffer->width(), buffer->height());
568 
569   RefPtr<layers::PlanarYCbCrImage> image =
570       mImageContainer->CreatePlanarYCbCrImage();
571   if (!image->CopyData(data)) {
572     MOZ_ASSERT_UNREACHABLE(
573         "We might fail to allocate a buffer, but with this "
574         "being a recycling container that shouldn't happen");
575     return 0;
576   }
577 
578 #ifdef DEBUG
579   static uint32_t frame_num = 0;
580   LOGFRAME(
581       ("frame %d (%dx%d)->(%dx%d); rotation %d, timeStamp %u, "
582        "ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
583        frame_num++, aProps.width(), aProps.height(), dst_width, dst_height,
584        aProps.rotation(), aProps.timeStamp(), aProps.ntpTimeMs(),
585        aProps.renderTimeMs()));
586 #endif
587 
588   if (mImageSize.width != dst_width || mImageSize.height != dst_height) {
589     NS_DispatchToMainThread(NS_NewRunnableFunction(
590         "MediaEngineRemoteVideoSource::FrameSizeChange", [
591           settings = mSettings, updated = mSettingsUpdatedByFrame, dst_width,
592           dst_height
593         ]() mutable {
594           settings->mWidth.Value() = dst_width;
595           settings->mHeight.Value() = dst_height;
596           updated->mValue = true;
597         }));
598   }
599 
600   {
601     MutexAutoLock lock(mMutex);
602     // implicitly releases last image
603     mImage = image.forget();
604     mImageSize = mImage->GetSize();
605   }
606 
607   // We'll push the frame into the MSG on the next Pull. This will avoid
608   // swamping the MSG with frames should it be taking longer than normal to run
609   // an iteration.
610 
611   return 0;
612 }
613 
GetDistance(const webrtc::CaptureCapability & aCandidate,const NormalizedConstraintSet & aConstraints,const nsString & aDeviceId,const DistanceCalculation aCalculate) const614 uint32_t MediaEngineRemoteVideoSource::GetDistance(
615     const webrtc::CaptureCapability& aCandidate,
616     const NormalizedConstraintSet& aConstraints, const nsString& aDeviceId,
617     const DistanceCalculation aCalculate) const {
618   if (aCalculate == kFeasibility) {
619     return GetFeasibilityDistance(aCandidate, aConstraints, aDeviceId);
620   }
621   return GetFitnessDistance(aCandidate, aConstraints, aDeviceId);
622 }
623 
GetFitnessDistance(const webrtc::CaptureCapability & aCandidate,const NormalizedConstraintSet & aConstraints,const nsString & aDeviceId) const624 uint32_t MediaEngineRemoteVideoSource::GetFitnessDistance(
625     const webrtc::CaptureCapability& aCandidate,
626     const NormalizedConstraintSet& aConstraints,
627     const nsString& aDeviceId) const {
628   AssertIsOnOwningThread();
629 
630   // Treat width|height|frameRate == 0 on capability as "can do any".
631   // This allows for orthogonal capabilities that are not in discrete steps.
632 
633   typedef MediaConstraintsHelper H;
634   uint64_t distance =
635       uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
636       uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
637       uint64_t(aCandidate.width ? H::FitnessDistance(int32_t(aCandidate.width),
638                                                      aConstraints.mWidth)
639                                 : 0) +
640       uint64_t(aCandidate.height
641                    ? H::FitnessDistance(int32_t(aCandidate.height),
642                                         aConstraints.mHeight)
643                    : 0) +
644       uint64_t(aCandidate.maxFPS ? H::FitnessDistance(double(aCandidate.maxFPS),
645                                                       aConstraints.mFrameRate)
646                                  : 0);
647   return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
648 }
649 
GetFeasibilityDistance(const webrtc::CaptureCapability & aCandidate,const NormalizedConstraintSet & aConstraints,const nsString & aDeviceId) const650 uint32_t MediaEngineRemoteVideoSource::GetFeasibilityDistance(
651     const webrtc::CaptureCapability& aCandidate,
652     const NormalizedConstraintSet& aConstraints,
653     const nsString& aDeviceId) const {
654   AssertIsOnOwningThread();
655 
656   // Treat width|height|frameRate == 0 on capability as "can do any".
657   // This allows for orthogonal capabilities that are not in discrete steps.
658 
659   typedef MediaConstraintsHelper H;
660   uint64_t distance =
661       uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
662       uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
663       uint64_t(aCandidate.width
664                    ? H::FeasibilityDistance(int32_t(aCandidate.width),
665                                             aConstraints.mWidth)
666                    : 0) +
667       uint64_t(aCandidate.height
668                    ? H::FeasibilityDistance(int32_t(aCandidate.height),
669                                             aConstraints.mHeight)
670                    : 0) +
671       uint64_t(aCandidate.maxFPS
672                    ? H::FeasibilityDistance(double(aCandidate.maxFPS),
673                                             aConstraints.mFrameRate)
674                    : 0);
675   return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
676 }
677 
678 // Find best capability by removing inferiors. May leave >1 of equal distance
679 
TrimLessFitCandidates(nsTArray<CapabilityCandidate> & aSet)680 /* static */ void MediaEngineRemoteVideoSource::TrimLessFitCandidates(
681     nsTArray<CapabilityCandidate>& aSet) {
682   uint32_t best = UINT32_MAX;
683   for (auto& candidate : aSet) {
684     if (best > candidate.mDistance) {
685       best = candidate.mDistance;
686     }
687   }
688   for (size_t i = 0; i < aSet.Length();) {
689     if (aSet[i].mDistance > best) {
690       aSet.RemoveElementAt(i);
691     } else {
692       ++i;
693     }
694   }
695   MOZ_ASSERT(aSet.Length());
696 }
697 
GetBestFitnessDistance(const nsTArray<const NormalizedConstraintSet * > & aConstraintSets,const nsString & aDeviceId) const698 uint32_t MediaEngineRemoteVideoSource::GetBestFitnessDistance(
699     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
700     const nsString& aDeviceId) const {
701   AssertIsOnOwningThread();
702 
703   size_t num = NumCapabilities();
704   nsTArray<CapabilityCandidate> candidateSet;
705   for (size_t i = 0; i < num; i++) {
706     candidateSet.AppendElement(CapabilityCandidate(GetCapability(i)));
707   }
708 
709   bool first = true;
710   for (const NormalizedConstraintSet* ns : aConstraintSets) {
711     for (size_t i = 0; i < candidateSet.Length();) {
712       auto& candidate = candidateSet[i];
713       uint32_t distance =
714           GetFitnessDistance(candidate.mCapability, *ns, aDeviceId);
715       if (distance == UINT32_MAX) {
716         candidateSet.RemoveElementAt(i);
717       } else {
718         ++i;
719         if (first) {
720           candidate.mDistance = distance;
721         }
722       }
723     }
724     first = false;
725   }
726   if (!candidateSet.Length()) {
727     return UINT32_MAX;
728   }
729   TrimLessFitCandidates(candidateSet);
730   return candidateSet[0].mDistance;
731 }
732 
LogConstraints(const NormalizedConstraintSet & aConstraints)733 static void LogConstraints(const NormalizedConstraintSet& aConstraints) {
734   auto& c = aConstraints;
735   if (c.mWidth.mIdeal.isSome()) {
736     LOG(("Constraints: width: { min: %d, max: %d, ideal: %d }", c.mWidth.mMin,
737          c.mWidth.mMax, c.mWidth.mIdeal.valueOr(0)));
738   } else {
739     LOG(("Constraints: width: { min: %d, max: %d }", c.mWidth.mMin,
740          c.mWidth.mMax));
741   }
742   if (c.mHeight.mIdeal.isSome()) {
743     LOG(("             height: { min: %d, max: %d, ideal: %d }", c.mHeight.mMin,
744          c.mHeight.mMax, c.mHeight.mIdeal.valueOr(0)));
745   } else {
746     LOG(("             height: { min: %d, max: %d }", c.mHeight.mMin,
747          c.mHeight.mMax));
748   }
749   if (c.mFrameRate.mIdeal.isSome()) {
750     LOG(("             frameRate: { min: %f, max: %f, ideal: %f }",
751          c.mFrameRate.mMin, c.mFrameRate.mMax, c.mFrameRate.mIdeal.valueOr(0)));
752   } else {
753     LOG(("             frameRate: { min: %f, max: %f }", c.mFrameRate.mMin,
754          c.mFrameRate.mMax));
755   }
756 }
757 
LogCapability(const char * aHeader,const webrtc::CaptureCapability & aCapability,uint32_t aDistance)758 static void LogCapability(const char* aHeader,
759                           const webrtc::CaptureCapability& aCapability,
760                           uint32_t aDistance) {
761   // RawVideoType and VideoCodecType media/webrtc/trunk/webrtc/common_types.h
762   static const char* const types[] = {
763       "I420",  "YV12",  "YUY2",   "UYVY",     "IYUV",
764       "ARGB",  "RGB24", "RGB565", "ARGB4444", "ARGB1555",
765       "MJPEG", "NV12",  "NV21",   "BGRA",     "Unknown type"};
766 
767   static const char* const codec[] = {"VP8",           "VP9",          "H264",
768                                       "I420",          "RED",          "ULPFEC",
769                                       "Generic codec", "Unknown codec"};
770 
771   LOG(("%s: %4u x %4u x %2u maxFps, %s, %s. Distance = %" PRIu32, aHeader,
772        aCapability.width, aCapability.height, aCapability.maxFPS,
773        types[std::min(std::max(uint32_t(0), uint32_t(aCapability.rawType)),
774                       uint32_t(sizeof(types) / sizeof(*types) - 1))],
775        codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.codecType)),
776                       uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
777        aDistance));
778 }
779 
ChooseCapability(const NormalizedConstraints & aConstraints,const MediaEnginePrefs & aPrefs,const nsString & aDeviceId,webrtc::CaptureCapability & aCapability,const DistanceCalculation aCalculate)780 bool MediaEngineRemoteVideoSource::ChooseCapability(
781     const NormalizedConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
782     const nsString& aDeviceId, webrtc::CaptureCapability& aCapability,
783     const DistanceCalculation aCalculate) {
784   LOG((__PRETTY_FUNCTION__));
785   AssertIsOnOwningThread();
786 
787   if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
788     LOG(("ChooseCapability: prefs: %dx%d @%dfps", aPrefs.GetWidth(),
789          aPrefs.GetHeight(), aPrefs.mFPS));
790     LogConstraints(aConstraints);
791     if (!aConstraints.mAdvanced.empty()) {
792       LOG(("Advanced array[%zu]:", aConstraints.mAdvanced.size()));
793       for (auto& advanced : aConstraints.mAdvanced) {
794         LogConstraints(advanced);
795       }
796     }
797   }
798 
799   switch (mMediaSource) {
800     case MediaSourceEnum::Screen:
801     case MediaSourceEnum::Window:
802     case MediaSourceEnum::Application: {
803       FlattenedConstraints c(aConstraints);
804       // The actual resolution to constrain around is not easy to find ahead of
805       // time (and may in fact change over time), so as a hack, we push ideal
806       // and max constraints down to desktop_capture_impl.cc and finish the
807       // algorithm there.
808       // TODO: This can be removed in bug 1453269.
809       aCapability.width = (c.mWidth.mIdeal.valueOr(0) & 0xffff) << 16 |
810                           (c.mWidth.mMax & 0xffff);
811       aCapability.height = (c.mHeight.mIdeal.valueOr(0) & 0xffff) << 16 |
812                            (c.mHeight.mMax & 0xffff);
813       aCapability.maxFPS =
814           c.mFrameRate.Clamp(c.mFrameRate.mIdeal.valueOr(aPrefs.mFPS));
815       return true;
816     }
817     default:
818       break;
819   }
820 
821   nsTArray<CapabilityCandidate> candidateSet;
822   size_t num = NumCapabilities();
823   for (size_t i = 0; i < num; i++) {
824     candidateSet.AppendElement(CapabilityCandidate(GetCapability(i)));
825   }
826 
827   if (!mHardcodedCapabilities.IsEmpty() &&
828       mMediaSource == MediaSourceEnum::Camera) {
829     // We have a hardcoded capability, which means this camera didn't report
830     // discrete capabilities. It might still allow a ranged capability, so we
831     // add a couple of default candidates based on prefs and constraints.
832     // The chosen candidate will be propagated to StartCapture() which will fail
833     // for an invalid candidate.
834     MOZ_DIAGNOSTIC_ASSERT(mHardcodedCapabilities.Length() == 1);
835     MOZ_DIAGNOSTIC_ASSERT(candidateSet.Length() == 1);
836     candidateSet.Clear();
837 
838     FlattenedConstraints c(aConstraints);
839     // Reuse the code across both the low-definition (`false`) pref and
840     // the high-definition (`true`) pref.
841     // If there are constraints we try to satisfy them but we default to prefs.
842     // Note that since constraints are from content and can literally be
843     // anything we put (rather generous) caps on them.
844     for (bool isHd : {false, true}) {
845       webrtc::CaptureCapability cap;
846       int32_t prefWidth = aPrefs.GetWidth(isHd);
847       int32_t prefHeight = aPrefs.GetHeight(isHd);
848 
849       cap.width = c.mWidth.Get(prefWidth);
850       cap.width = std::max(0, std::min(cap.width, 7680));
851 
852       cap.height = c.mHeight.Get(prefHeight);
853       cap.height = std::max(0, std::min(cap.height, 4320));
854 
855       cap.maxFPS = c.mFrameRate.Get(aPrefs.mFPS);
856       cap.maxFPS = std::max(0, std::min(cap.maxFPS, 480));
857 
858       if (cap.width != prefWidth) {
859         // Width was affected by constraints.
860         // We'll adjust the height too so the aspect ratio is retained.
861         cap.height = cap.width * prefHeight / prefWidth;
862       } else if (cap.height != prefHeight) {
863         // Height was affected by constraints but not width.
864         // We'll adjust the width too so the aspect ratio is retained.
865         cap.width = cap.height * prefWidth / prefHeight;
866       }
867 
868       if (candidateSet.Contains(cap, CapabilityComparator())) {
869         continue;
870       }
871       LogCapability("Hardcoded capability", cap, 0);
872       candidateSet.AppendElement(CapabilityCandidate(Move(cap)));
873     }
874   }
875 
876   // First, filter capabilities by required constraints (min, max, exact).
877 
878   for (size_t i = 0; i < candidateSet.Length();) {
879     auto& candidate = candidateSet[i];
880     candidate.mDistance =
881         GetDistance(candidate.mCapability, aConstraints, aDeviceId, aCalculate);
882     LogCapability("Capability", candidate.mCapability, candidate.mDistance);
883     if (candidate.mDistance == UINT32_MAX) {
884       candidateSet.RemoveElementAt(i);
885     } else {
886       ++i;
887     }
888   }
889 
890   if (candidateSet.IsEmpty()) {
891     LOG(("failed to find capability match from %zu choices",
892          candidateSet.Length()));
893     return false;
894   }
895 
896   // Filter further with all advanced constraints (that don't overconstrain).
897 
898   for (const auto& cs : aConstraints.mAdvanced) {
899     nsTArray<CapabilityCandidate> rejects;
900     for (size_t i = 0; i < candidateSet.Length();) {
901       if (GetDistance(candidateSet[i].mCapability, cs, aDeviceId, aCalculate) ==
902           UINT32_MAX) {
903         rejects.AppendElement(candidateSet[i]);
904         candidateSet.RemoveElementAt(i);
905       } else {
906         ++i;
907       }
908     }
909     if (!candidateSet.Length()) {
910       candidateSet.AppendElements(Move(rejects));
911     }
912   }
913   MOZ_ASSERT(
914       candidateSet.Length(),
915       "advanced constraints filtering step can't reduce candidates to zero");
916 
917   // Remaining algorithm is up to the UA.
918 
919   TrimLessFitCandidates(candidateSet);
920 
921   // Any remaining multiples all have the same distance. A common case of this
922   // occurs when no ideal is specified. Lean toward defaults.
923   uint32_t sameDistance = candidateSet[0].mDistance;
924   {
925     MediaTrackConstraintSet prefs;
926     prefs.mWidth.SetAsLong() = aPrefs.GetWidth();
927     prefs.mHeight.SetAsLong() = aPrefs.GetHeight();
928     prefs.mFrameRate.SetAsDouble() = aPrefs.mFPS;
929     NormalizedConstraintSet normPrefs(prefs, false);
930 
931     for (auto& candidate : candidateSet) {
932       candidate.mDistance =
933           GetDistance(candidate.mCapability, normPrefs, aDeviceId, aCalculate);
934     }
935     TrimLessFitCandidates(candidateSet);
936   }
937 
938   // Any remaining multiples all have the same distance, but may vary on
939   // format. Some formats are more desirable for certain use like WebRTC.
940   // E.g. I420 over RGB24 can remove a needless format conversion.
941 
942   bool found = false;
943   for (auto& candidate : candidateSet) {
944     const webrtc::CaptureCapability& cap = candidate.mCapability;
945     if (cap.rawType == webrtc::RawVideoType::kVideoI420 ||
946         cap.rawType == webrtc::RawVideoType::kVideoYUY2 ||
947         cap.rawType == webrtc::RawVideoType::kVideoYV12) {
948       aCapability = cap;
949       found = true;
950       break;
951     }
952   }
953   if (!found) {
954     aCapability = candidateSet[0].mCapability;
955   }
956 
957   LogCapability("Chosen capability", aCapability, sameDistance);
958   return true;
959 }
960 
GetSettings(MediaTrackSettings & aOutSettings) const961 void MediaEngineRemoteVideoSource::GetSettings(
962     MediaTrackSettings& aOutSettings) const {
963   aOutSettings = *mSettings;
964 }
965 
Refresh(int aIndex)966 void MediaEngineRemoteVideoSource::Refresh(int aIndex) {
967   LOG((__PRETTY_FUNCTION__));
968   AssertIsOnOwningThread();
969 
970   // NOTE: mCaptureIndex might have changed when allocated!
971   // Use aIndex to update information, but don't change mCaptureIndex!!
972   // Caller looked up this source by uniqueId, so it shouldn't change
973   char deviceName[kMaxDeviceNameLength];
974   char uniqueId[kMaxUniqueIdLength];
975 
976   if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice,
977                               mCapEngine, aIndex, deviceName,
978                               sizeof(deviceName), uniqueId, sizeof(uniqueId),
979                               nullptr)) {
980     return;
981   }
982 
983   SetName(NS_ConvertUTF8toUTF16(deviceName));
984   MOZ_ASSERT(mUniqueId.Equals(uniqueId));
985 }
986 
987 }  // namespace mozilla
988