1 /* This Source Code Form is subject to the terms of the Mozilla Public
2  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3  * You can obtain one at http://mozilla.org/MPL/2.0/. */
4 
5 #ifndef MEDIAENGINEWEBRTC_H_
6 #define MEDIAENGINEWEBRTC_H_
7 
8 #include "AudioPacketizer.h"
9 #include "AudioSegment.h"
10 #include "CamerasChild.h"
11 #include "cubeb/cubeb.h"
12 #include "CubebUtils.h"
13 #include "DOMMediaStream.h"
14 #include "ipc/IPCMessageUtils.h"
15 #include "MediaEngine.h"
16 #include "MediaEnginePrefs.h"
17 #include "MediaEngineSource.h"
18 #include "MediaEngineWrapper.h"
19 #include "MediaStreamGraph.h"
20 #include "mozilla/dom/File.h"
21 #include "mozilla/dom/MediaStreamTrackBinding.h"
22 #include "mozilla/Mutex.h"
23 #include "mozilla/Mutex.h"
24 #include "mozilla/Sprintf.h"
25 #include "mozilla/StaticMutex.h"
26 #include "mozilla/UniquePtr.h"
27 #include "nsAutoPtr.h"
28 #include "nsComponentManagerUtils.h"
29 #include "nsCOMPtr.h"
30 #include "nsDirectoryServiceDefs.h"
31 #include "nsIThread.h"
32 #include "nsIRunnable.h"
33 #include "nsRefPtrHashtable.h"
34 #include "nsThreadUtils.h"
35 #include "NullTransport.h"
36 #include "prcvar.h"
37 #include "prthread.h"
38 #include "StreamTracks.h"
39 #include "VideoSegment.h"
40 #include "VideoUtils.h"
41 
42 // WebRTC library includes follow
43 // Audio Engine
44 #include "webrtc/voice_engine/include/voe_base.h"
45 #include "webrtc/voice_engine/include/voe_codec.h"
46 #include "webrtc/voice_engine/include/voe_network.h"
47 #include "webrtc/voice_engine/include/voe_audio_processing.h"
48 #include "webrtc/voice_engine/include/voe_volume_control.h"
49 #include "webrtc/voice_engine/include/voe_external_media.h"
50 #include "webrtc/voice_engine/include/voe_audio_processing.h"
51 #include "webrtc/modules/audio_device/include/audio_device.h"
52 #include "webrtc/modules/audio_processing/include/audio_processing.h"
53 // Video Engine
54 // conflicts with #include of scoped_ptr.h
55 #undef FF
56 #include "webrtc/modules/video_capture/video_capture_defines.h"
57 
58 namespace mozilla {
59 
60 class MediaEngineWebRTCMicrophoneSource;
61 
62 class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource {
63  public:
MediaEngineWebRTCAudioCaptureSource(const char * aUuid)64   explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid) {}
65   nsString GetName() const override;
66   nsCString GetUUID() const override;
Allocate(const dom::MediaTrackConstraints & aConstraints,const MediaEnginePrefs & aPrefs,const nsString & aDeviceId,const ipc::PrincipalInfo & aPrincipalInfo,AllocationHandle ** aOutHandle,const char ** aOutBadConstraint)67   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
68                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
69                     const ipc::PrincipalInfo& aPrincipalInfo,
70                     AllocationHandle** aOutHandle,
71                     const char** aOutBadConstraint) override {
72     // Nothing to do here, everything is managed in MediaManager.cpp
73     *aOutHandle = nullptr;
74     return NS_OK;
75   }
Deallocate(const RefPtr<const AllocationHandle> & aHandle)76   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override {
77     // Nothing to do here, everything is managed in MediaManager.cpp
78     MOZ_ASSERT(!aHandle);
79     return NS_OK;
80   }
81   nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
82                     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
83                     const PrincipalHandle& aPrincipal) override;
84   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
85   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
86   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
87                        const dom::MediaTrackConstraints& aConstraints,
88                        const MediaEnginePrefs& aPrefs,
89                        const nsString& aDeviceId,
90                        const char** aOutBadConstraint) override;
91 
Pull(const RefPtr<const AllocationHandle> & aHandle,const RefPtr<SourceMediaStream> & aStream,TrackID aTrackID,StreamTime aDesiredTime,const PrincipalHandle & aPrincipalHandle)92   void Pull(const RefPtr<const AllocationHandle>& aHandle,
93             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
94             StreamTime aDesiredTime,
95             const PrincipalHandle& aPrincipalHandle) override {}
96 
GetMediaSource()97   dom::MediaSourceEnum GetMediaSource() const override {
98     return dom::MediaSourceEnum::AudioCapture;
99   }
100 
TakePhoto(MediaEnginePhotoCallback * aCallback)101   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override {
102     return NS_ERROR_NOT_IMPLEMENTED;
103   }
104 
105   uint32_t GetBestFitnessDistance(
106       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
107       const nsString& aDeviceId) const override;
108 
109  protected:
110   virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
111 };
112 
113 // Small subset of VoEHardware
114 class AudioInput {
115  public:
116   AudioInput() = default;
117   // Threadsafe because it's referenced from an MicrophoneSource, which can
118   // had references to it on other threads.
119   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioInput)
120 
121   virtual int GetNumOfRecordingDevices(int& aDevices) = 0;
122   virtual int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
123                                      char aStrGuidUTF8[128]) = 0;
124   virtual int GetRecordingDeviceStatus(bool& aIsAvailable) = 0;
125   virtual void GetChannelCount(uint32_t& aChannels) = 0;
126   virtual int GetMaxAvailableChannels(uint32_t& aChannels) = 0;
127   virtual void StartRecording(SourceMediaStream* aStream,
128                               AudioDataListener* aListener) = 0;
129   virtual void StopRecording(SourceMediaStream* aStream) = 0;
130   virtual int SetRecordingDevice(int aIndex) = 0;
131   virtual void SetUserChannelCount(uint32_t aChannels) = 0;
132 
133  protected:
134   // Protected destructor, to discourage deletion outside of Release():
135   virtual ~AudioInput() = default;
136 };
137 
138 class AudioInputCubeb final : public AudioInput {
139  public:
140   explicit AudioInputCubeb(int aIndex = 0)
AudioInput()141       : AudioInput(), mSelectedDevice(aIndex), mInUseCount(0) {
142     if (!mDeviceIndexes) {
143       mDeviceIndexes = new nsTArray<int>;
144       mDeviceNames = new nsTArray<nsCString>;
145       mDefaultDevice = -1;
146     }
147   }
148 
CleanupGlobalData()149   static void CleanupGlobalData() {
150     cubeb_device_collection_destroy(CubebUtils::GetCubebContext(), &mDevices);
151     delete mDeviceIndexes;
152     mDeviceIndexes = nullptr;
153     delete mDeviceNames;
154     mDeviceNames = nullptr;
155   }
156 
GetNumOfRecordingDevices(int & aDevices)157   int GetNumOfRecordingDevices(int& aDevices) {
158 #ifdef MOZ_WIDGET_ANDROID
159     // OpenSL ES does not support enumerate device.
160     aDevices = 1;
161 #else
162     UpdateDeviceList();
163     aDevices = mDeviceIndexes->Length();
164 #endif
165     return 0;
166   }
167 
DeviceIndex(int aIndex)168   static int32_t DeviceIndex(int aIndex) {
169     // -1 = system default if any
170     if (aIndex == -1) {
171       if (mDefaultDevice == -1) {
172         aIndex = 0;
173       } else {
174         aIndex = mDefaultDevice;
175       }
176     }
177     MOZ_ASSERT(mDeviceIndexes);
178     if (aIndex < 0 || aIndex >= (int)mDeviceIndexes->Length()) {
179       return -1;
180     }
181     // Note: if the device is gone, this will be -1
182     return (*mDeviceIndexes)[aIndex];  // translate to mDevices index
183   }
184 
Mutex()185   static StaticMutex& Mutex() { return sMutex; }
186 
GetDeviceID(int aDeviceIndex,CubebUtils::AudioDeviceID & aID)187   static bool GetDeviceID(int aDeviceIndex, CubebUtils::AudioDeviceID& aID) {
188     // Assert sMutex is held
189     sMutex.AssertCurrentThreadOwns();
190 #ifdef MOZ_WIDGET_ANDROID
191     aID = nullptr;
192     return true;
193 #else
194     int dev_index = DeviceIndex(aDeviceIndex);
195     if (dev_index != -1) {
196       aID = mDevices.device[dev_index].devid;
197       return true;
198     }
199     return false;
200 #endif
201   }
202 
GetRecordingDeviceName(int aIndex,char (& aStrNameUTF8)[128],char aStrGuidUTF8[128])203   int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
204                              char aStrGuidUTF8[128]) {
205 #ifdef MOZ_WIDGET_ANDROID
206     aStrNameUTF8[0] = '\0';
207     aStrGuidUTF8[0] = '\0';
208 #else
209     int32_t devindex = DeviceIndex(aIndex);
210     if (mDevices.count == 0 || devindex < 0) {
211       return 1;
212     }
213     SprintfLiteral(aStrNameUTF8, "%s%s", aIndex == -1 ? "default: " : "",
214                    mDevices.device[devindex].friendly_name);
215     aStrGuidUTF8[0] = '\0';
216 #endif
217     return 0;
218   }
219 
GetRecordingDeviceStatus(bool & aIsAvailable)220   int GetRecordingDeviceStatus(bool& aIsAvailable) {
221     // With cubeb, we only expose devices of type CUBEB_DEVICE_TYPE_INPUT,
222     // so unless it was removed, say it's available
223     aIsAvailable = true;
224     return 0;
225   }
226 
GetChannelCount(uint32_t & aChannels)227   void GetChannelCount(uint32_t& aChannels) {
228     GetUserChannelCount(mSelectedDevice, aChannels);
229   }
230 
GetUserChannelCount(int aDeviceIndex,uint32_t & aChannels)231   static void GetUserChannelCount(int aDeviceIndex, uint32_t& aChannels) {
232     aChannels = sUserChannelCount;
233   }
234 
GetMaxAvailableChannels(uint32_t & aChannels)235   int GetMaxAvailableChannels(uint32_t& aChannels) {
236     return GetDeviceMaxChannels(mSelectedDevice, aChannels);
237   }
238 
GetDeviceMaxChannels(int aDeviceIndex,uint32_t & aChannels)239   static int GetDeviceMaxChannels(int aDeviceIndex, uint32_t& aChannels) {
240 #ifdef MOZ_WIDGET_ANDROID
241     aChannels = 1;
242 #else
243     int32_t devindex = DeviceIndex(aDeviceIndex);
244     if (mDevices.count == 0 || devindex < 0) {
245       return 1;
246     }
247     aChannels = mDevices.device[devindex].max_channels;
248 #endif
249     return 0;
250   }
251 
SetUserChannelCount(uint32_t aChannels)252   void SetUserChannelCount(uint32_t aChannels) {
253     if (GetDeviceMaxChannels(mSelectedDevice, sUserChannelCount)) {
254       sUserChannelCount = 1;  // error capture mono
255       return;
256     }
257 
258     if (aChannels && aChannels < sUserChannelCount) {
259       sUserChannelCount = aChannels;
260     }
261   }
262 
StartRecording(SourceMediaStream * aStream,AudioDataListener * aListener)263   void StartRecording(SourceMediaStream* aStream,
264                       AudioDataListener* aListener) {
265 #ifdef MOZ_WIDGET_ANDROID
266     // OpenSL ES does not support enumerating devices.
267     MOZ_ASSERT(mDevices.count == 0);
268 #else
269     MOZ_ASSERT(mDevices.count > 0);
270 #endif
271 
272     mAnyInUse = true;
273     mInUseCount++;
274     // Always tell the stream we're using it for input
275     aStream->OpenAudioInput(mSelectedDevice, aListener);
276   }
277 
StopRecording(SourceMediaStream * aStream)278   void StopRecording(SourceMediaStream* aStream) {
279     aStream->CloseAudioInput();
280     if (--mInUseCount == 0) {
281       mAnyInUse = false;
282     }
283   }
284 
SetRecordingDevice(int aIndex)285   int SetRecordingDevice(int aIndex) {
286     mSelectedDevice = aIndex;
287     return 0;
288   }
289 
290  protected:
~AudioInputCubeb()291   ~AudioInputCubeb() { MOZ_RELEASE_ASSERT(mInUseCount == 0); }
292 
293  private:
294   // It would be better to watch for device-change notifications
295   void UpdateDeviceList();
296 
297   // We have an array, which consists of indexes to the current mDevices
298   // list.  This is updated on mDevices updates.  Many devices in mDevices
299   // won't be included in the array (wrong type, etc), or if a device is
300   // removed it will map to -1 (and opens of this device will need to check
301   // for this - and be careful of threading access.  The mappings need to
302   // updated on each re-enumeration.
303   int mSelectedDevice;
304   uint32_t mInUseCount;
305 
306   // pointers to avoid static constructors
307   static nsTArray<int>* mDeviceIndexes;
308   static int mDefaultDevice;  // -1 == not set
309   static nsTArray<nsCString>* mDeviceNames;
310   static cubeb_device_collection mDevices;
311   static bool mAnyInUse;
312   static StaticMutex sMutex;
313   static uint32_t sUserChannelCount;
314 };
315 
316 class WebRTCAudioDataListener : public AudioDataListener {
317  protected:
318   // Protected destructor, to discourage deletion outside of Release():
~WebRTCAudioDataListener()319   virtual ~WebRTCAudioDataListener() {}
320 
321  public:
WebRTCAudioDataListener(MediaEngineWebRTCMicrophoneSource * aAudioSource)322   explicit WebRTCAudioDataListener(
323       MediaEngineWebRTCMicrophoneSource* aAudioSource)
324       : mMutex("WebRTCAudioDataListener::mMutex"), mAudioSource(aAudioSource) {}
325 
326   // AudioDataListenerInterface methods
327   void NotifyOutputData(MediaStreamGraph* aGraph, AudioDataValue* aBuffer,
328                         size_t aFrames, TrackRate aRate,
329                         uint32_t aChannels) override;
330 
331   void NotifyInputData(MediaStreamGraph* aGraph, const AudioDataValue* aBuffer,
332                        size_t aFrames, TrackRate aRate,
333                        uint32_t aChannels) override;
334 
335   void DeviceChanged() override;
336 
337   void Shutdown();
338 
339  private:
340   Mutex mMutex;
341   RefPtr<MediaEngineWebRTCMicrophoneSource> mAudioSource;
342 };
343 
344 class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource,
345                                           public AudioDataListenerInterface {
346  public:
347   MediaEngineWebRTCMicrophoneSource(mozilla::AudioInput* aAudioInput,
348                                     int aIndex, const char* name,
349                                     const char* uuid, bool aDelayAgnostic,
350                                     bool aExtendedFilter);
351 
RequiresSharing()352   bool RequiresSharing() const override { return true; }
353 
354   nsString GetName() const override;
355   nsCString GetUUID() const override;
356 
357   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
358                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
359                     const ipc::PrincipalInfo& aPrincipalInfo,
360                     AllocationHandle** aOutHandle,
361                     const char** aOutBadConstraint) override;
362   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
363   nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
364                     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
365                     const PrincipalHandle& aPrincipal) override;
366   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
367   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
368   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
369                        const dom::MediaTrackConstraints& aConstraints,
370                        const MediaEnginePrefs& aPrefs,
371                        const nsString& aDeviceId,
372                        const char** aOutBadConstraint) override;
373 
374   /**
375    * Assigns the current settings of the capture to aOutSettings.
376    * Main thread only.
377    */
378   void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
379 
380   void Pull(const RefPtr<const AllocationHandle>& aHandle,
381             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
382             StreamTime aDesiredTime,
383             const PrincipalHandle& aPrincipalHandle) override;
384 
385   // AudioDataListenerInterface methods
386   void NotifyOutputData(MediaStreamGraph* aGraph, AudioDataValue* aBuffer,
387                         size_t aFrames, TrackRate aRate,
388                         uint32_t aChannels) override;
389   void NotifyInputData(MediaStreamGraph* aGraph, const AudioDataValue* aBuffer,
390                        size_t aFrames, TrackRate aRate,
391                        uint32_t aChannels) override;
392 
393   void DeviceChanged() override;
394 
GetMediaSource()395   dom::MediaSourceEnum GetMediaSource() const override {
396     return dom::MediaSourceEnum::Microphone;
397   }
398 
TakePhoto(MediaEnginePhotoCallback * aCallback)399   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override {
400     return NS_ERROR_NOT_IMPLEMENTED;
401   }
402 
403   uint32_t GetBestFitnessDistance(
404       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
405       const nsString& aDeviceId) const override;
406 
407   void Shutdown() override;
408 
409  protected:
~MediaEngineWebRTCMicrophoneSource()410   ~MediaEngineWebRTCMicrophoneSource() {}
411 
412  private:
413   /**
414    * Representation of data tied to an AllocationHandle rather than to the
415    * source.
416    */
417   struct Allocation {
418     Allocation() = delete;
419     explicit Allocation(const RefPtr<AllocationHandle>& aHandle);
420     ~Allocation();
421 
422 #ifdef DEBUG
423     // The MSGImpl::IterationEnd() of the last time we appended data from an
424     // audio callback.
425     // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
426     GraphTime mLastCallbackAppendTime = 0;
427 #endif
428     // Set to false by Start(). Becomes true after the first time we append real
429     // audio frames from the audio callback.
430     // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
431     bool mLiveFramesAppended = false;
432 
433     // Set to false by Start(). Becomes true after the first time we append
434     // silence *after* the first audio callback has appended real frames.
435     // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
436     bool mLiveSilenceAppended = false;
437 
438     const RefPtr<AllocationHandle> mHandle;
439     RefPtr<SourceMediaStream> mStream;
440     TrackID mTrackID = TRACK_NONE;
441     PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
442     bool mEnabled = false;
443   };
444 
445   /**
446    * Used with nsTArray<Allocation>::IndexOf to locate an Allocation by a
447    * handle.
448    */
449   class AllocationHandleComparator {
450    public:
Equals(const Allocation & aAllocation,const RefPtr<const AllocationHandle> & aHandle)451     bool Equals(const Allocation& aAllocation,
452                 const RefPtr<const AllocationHandle>& aHandle) const {
453       return aHandle == aAllocation.mHandle;
454     }
455   };
456 
457   /**
458    * Reevaluates the aggregated constraints of all allocations and restarts the
459    * underlying device if necessary.
460    *
461    * If the given AllocationHandle was already registered, its constraints will
462    * be updated before reevaluation. If not, they will be added before
463    * reevaluation.
464    */
465   nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
466                                 const NormalizedConstraints* aConstraintsUpdate,
467                                 const MediaEnginePrefs& aPrefs,
468                                 const nsString& aDeviceId,
469                                 const char** aOutBadConstraint);
470 
471   /**
472    * Updates the underlying (single) device with the aggregated constraints
473    * aNetConstraints. If the chosen settings for the device changes based on
474    * these new constraints, and capture is active, the device will be restarted.
475    */
476   nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
477                               const NormalizedConstraints& aNetConstraints,
478                               const MediaEnginePrefs& aPrefs,
479                               const nsString& aDeviceId,
480                               const char** aOutBadConstraint);
481 
482   void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
483   void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
484   void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
485 
486   void ApplySettings(const MediaEnginePrefs& aPrefs,
487                      RefPtr<MediaStreamGraphImpl> aGraph);
488 
489   bool HasEnabledTrack() const;
490 
491   template <typename T>
492   void InsertInGraph(const T* aBuffer, size_t aFrames, uint32_t aChannels);
493 
494   void PacketizeAndProcess(MediaStreamGraph* aGraph,
495                            const AudioDataValue* aBuffer, size_t aFrames,
496                            TrackRate aRate, uint32_t aChannels);
497 
498   // This is true when all processing is disabled, we can skip
499   // packetization, resampling and other processing passes.
500   // Graph thread only.
501   bool PassThrough() const;
502 
503   // Graph thread only.
504   void SetPassThrough(bool aPassThrough);
505 
506   // Owning thread only.
507   RefPtr<WebRTCAudioDataListener> mListener;
508 
509   // Note: shared across all microphone sources. Owning thread only.
510   static int sChannelsOpen;
511 
512   const RefPtr<mozilla::AudioInput> mAudioInput;
513   const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
514 
515   // accessed from the GraphDriver thread except for deletion.
516   nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
517   nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
518 
519   // mMutex protects some of our members off the owning thread.
520   Mutex mMutex;
521 
522   // We append an allocation in Allocate() and remove it in Deallocate().
523   // Both the array and the Allocation members are modified under mMutex on
524   // the owning thread. Accessed under one of the two.
525   nsTArray<Allocation> mAllocations;
526 
527   // Current state of the shared resource for this source.
528   // Set under mMutex on the owning thread. Accessed under one of the two
529   MediaEngineSourceState mState = kReleased;
530 
531   int mCapIndex;
532   bool mDelayAgnostic;
533   bool mExtendedFilter;
534   bool mStarted;
535 
536   const nsString mDeviceName;
537   const nsCString mDeviceUUID;
538 
539   // The current settings for the underlying device.
540   // Member access is main thread only after construction.
541   const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>>
542       mSettings;
543 
544   uint64_t mTotalFrames;
545   uint64_t mLastLogFrames;
546 
547   // mSkipProcessing is true if none of the processing passes are enabled,
548   // because of prefs or constraints. This allows simply copying the audio into
549   // the MSG, skipping resampling and the whole webrtc.org code.
550   // This is read and written to only on the MSG thread.
551   bool mSkipProcessing;
552 
553   // To only update microphone when needed, we keep track of the prefs
554   // representing the currently applied settings for this source. This is the
555   // net result of the prefs across all allocations.
556   // Owning thread only.
557   MediaEnginePrefs mNetPrefs;
558 
559   // Stores the mixed audio output for the reverse-stream of the AEC.
560   AlignedFloatBuffer mOutputBuffer;
561 
562   AlignedFloatBuffer mInputBuffer;
563   AlignedFloatBuffer mDeinterleavedBuffer;
564   AlignedFloatBuffer mInputDownmixBuffer;
565 };
566 
567 class MediaEngineWebRTC : public MediaEngine {
568   typedef MediaEngine Super;
569 
570  public:
571   explicit MediaEngineWebRTC(MediaEnginePrefs& aPrefs);
572 
573   virtual void SetFakeDeviceChangeEvents() override;
574 
575   // Clients should ensure to clean-up sources video/audio sources
576   // before invoking Shutdown on this class.
577   void Shutdown() override;
578 
579   // Returns whether the host supports duplex audio stream.
580   bool SupportsDuplex();
581 
582   void EnumerateDevices(uint64_t aWindowId, dom::MediaSourceEnum,
583                         nsTArray<RefPtr<MediaEngineSource>>*) override;
584   void ReleaseResourcesForWindow(uint64_t aWindowId) override;
585 
586  private:
587   ~MediaEngineWebRTC() = default;
588 
589   nsCOMPtr<nsIThread> mThread;
590 
591   // gUM runnables can e.g. Enumerate from multiple threads
592   Mutex mMutex;
593   RefPtr<mozilla::AudioInput> mAudioInput;
594   bool mFullDuplex;
595   bool mDelayAgnostic;
596   bool mExtendedFilter;
597   bool mHasTabVideoSource;
598 
599   // Maps WindowID to a map of device uuid to their MediaEngineSource,
600   // separately for audio and video.
601   nsClassHashtable<nsUint64HashKey,
602                    nsRefPtrHashtable<nsStringHashKey, MediaEngineSource>>
603       mVideoSources;
604   nsClassHashtable<nsUint64HashKey,
605                    nsRefPtrHashtable<nsStringHashKey, MediaEngineSource>>
606       mAudioSources;
607 };
608 
609 }  // namespace mozilla
610 
611 #endif /* NSMEDIAENGINEWEBRTC_H_ */
612