1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "third_party/blink/renderer/modules/mediastream/user_media_processor.h"
6 
7 #include <stddef.h>
8 
9 #include <algorithm>
10 #include <utility>
11 #include <vector>
12 
13 #include "base/location.h"
14 #include "base/logging.h"
15 #include "base/single_thread_task_runner.h"
16 #include "base/stl_util.h"
17 #include "base/strings/stringprintf.h"
18 #include "media/base/audio_parameters.h"
19 #include "media/capture/video_capture_types.h"
20 #include "third_party/blink/public/common/browser_interface_broker_proxy.h"
21 #include "third_party/blink/public/common/mediastream/media_stream_controls.h"
22 #include "third_party/blink/public/common/mediastream/media_stream_request.h"
23 #include "third_party/blink/public/common/widget/screen_info.h"
24 #include "third_party/blink/public/platform/modules/mediastream/web_media_stream_source.h"
25 #include "third_party/blink/public/platform/modules/mediastream/web_media_stream_track.h"
26 #include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
27 #include "third_party/blink/public/platform/web_string.h"
28 #include "third_party/blink/public/platform/web_vector.h"
29 #include "third_party/blink/public/web/modules/mediastream/web_media_stream_device_observer.h"
30 #include "third_party/blink/public/web/web_local_frame.h"
31 #include "third_party/blink/public/web/web_local_frame_client.h"
32 #include "third_party/blink/renderer/core/frame/local_dom_window.h"
33 #include "third_party/blink/renderer/core/frame/local_frame.h"
34 #include "third_party/blink/renderer/core/page/chrome_client.h"
35 #include "third_party/blink/renderer/modules/mediastream/local_media_stream_audio_source.h"
36 #include "third_party/blink/renderer/modules/mediastream/media_stream_audio_processor.h"
37 #include "third_party/blink/renderer/modules/mediastream/media_stream_constraints_util.h"
38 #include "third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_audio.h"
39 #include "third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_content.h"
40 #include "third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_device.h"
41 #include "third_party/blink/renderer/modules/mediastream/media_stream_video_capturer_source.h"
42 #include "third_party/blink/renderer/modules/mediastream/media_stream_video_track.h"
43 #include "third_party/blink/renderer/modules/mediastream/processed_local_audio_source.h"
44 #include "third_party/blink/renderer/modules/mediastream/user_media_client.h"
45 #include "third_party/blink/renderer/platform/mediastream/media_constraints.h"
46 #include "third_party/blink/renderer/platform/mediastream/media_stream_audio_source.h"
47 #include "third_party/blink/renderer/platform/mediastream/media_stream_component.h"
48 #include "third_party/blink/renderer/platform/mediastream/media_stream_descriptor.h"
49 #include "third_party/blink/renderer/platform/mediastream/webrtc_uma_histograms.h"
50 #include "third_party/blink/renderer/platform/runtime_enabled_features.h"
51 #include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
52 #include "third_party/blink/renderer/platform/video_capture/local_video_capturer_source.h"
53 #include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
54 #include "third_party/blink/renderer/platform/wtf/text/string_builder.h"
55 #include "third_party/blink/renderer/platform/wtf/wtf_size_t.h"
56 #include "ui/gfx/geometry/size.h"
57 
58 namespace blink {
59 
60 using blink::mojom::MediaStreamRequestResult;
61 using blink::mojom::MediaStreamType;
62 using blink::mojom::StreamSelectionStrategy;
63 using EchoCancellationType =
64     blink::AudioProcessingProperties::EchoCancellationType;
65 
66 namespace {
67 
MediaStreamRequestResultToString(MediaStreamRequestResult value)68 const char* MediaStreamRequestResultToString(MediaStreamRequestResult value) {
69   switch (value) {
70     case MediaStreamRequestResult::OK:
71       return "OK";
72     case MediaStreamRequestResult::PERMISSION_DENIED:
73       return "PERMISSION_DENIED";
74     case MediaStreamRequestResult::PERMISSION_DISMISSED:
75       return "PERMISSION_DISMISSED";
76     case MediaStreamRequestResult::INVALID_STATE:
77       return "INVALID_STATE";
78     case MediaStreamRequestResult::NO_HARDWARE:
79       return "NO_HARDWARE";
80     case MediaStreamRequestResult::INVALID_SECURITY_ORIGIN:
81       return "INVALID_SECURITY_ORIGIN";
82     case MediaStreamRequestResult::TAB_CAPTURE_FAILURE:
83       return "TAB_CAPTURE_FAILURE";
84     case MediaStreamRequestResult::SCREEN_CAPTURE_FAILURE:
85       return "SCREEN_CAPTURE_FAILURE";
86     case MediaStreamRequestResult::CAPTURE_FAILURE:
87       return "CAPTURE_FAILURE";
88     case MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED:
89       return "CONSTRAINT_NOT_SATISFIED";
90     case MediaStreamRequestResult::TRACK_START_FAILURE_AUDIO:
91       return "TRACK_START_FAILURE_AUDIO";
92     case MediaStreamRequestResult::TRACK_START_FAILURE_VIDEO:
93       return "TRACK_START_FAILURE_VIDEO";
94     case MediaStreamRequestResult::NOT_SUPPORTED:
95       return "NOT_SUPPORTED";
96     case MediaStreamRequestResult::FAILED_DUE_TO_SHUTDOWN:
97       return "FAILED_DUE_TO_SHUTDOWN";
98     case MediaStreamRequestResult::KILL_SWITCH_ON:
99       return "KILL_SWITCH_ON";
100     case MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED:
101       return "SYSTEM_PERMISSION_DENIED";
102     case MediaStreamRequestResult::NUM_MEDIA_REQUEST_RESULTS:
103       return "NUM_MEDIA_REQUEST_RESULTS";
104     default:
105       NOTREACHED();
106   }
107   return "INVALID";
108 }
109 
SendLogMessage(const std::string & message)110 void SendLogMessage(const std::string& message) {
111   blink::WebRtcLogMessage("UMP::" + message);
112 }
113 
GetTrackLogString(MediaStreamComponent * component,bool is_pending)114 std::string GetTrackLogString(MediaStreamComponent* component,
115                               bool is_pending) {
116   String str = String::Format(
117       "StartAudioTrack({track=[id: %s, enabled: %d, muted: %d]}, "
118       "{is_pending=%d})",
119       component->Id().Utf8().c_str(), component->Enabled(), component->Muted(),
120       is_pending);
121   return str.Utf8();
122 }
123 
GetTrackSourceLogString(blink::MediaStreamAudioSource * source)124 std::string GetTrackSourceLogString(blink::MediaStreamAudioSource* source) {
125   const MediaStreamDevice& device = source->device();
126   StringBuilder builder;
127   builder.AppendFormat("StartAudioTrack(source: {session_id=%s}, ",
128                        device.session_id().ToString().c_str());
129   builder.AppendFormat("{is_local_source=%d}, ", source->is_local_source());
130   builder.AppendFormat("{device=[id: %s", device.id.c_str());
131   if (device.group_id.has_value()) {
132     builder.AppendFormat(", group_id: %s", device.group_id.value().c_str());
133   }
134   builder.AppendFormat(", name: %s", device.name.c_str());
135   builder.Append(String("]})"));
136   return builder.ToString().Utf8();
137 }
138 
GetOnTrackStartedLogString(blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result)139 std::string GetOnTrackStartedLogString(
140     blink::WebPlatformMediaStreamSource* source,
141     MediaStreamRequestResult result) {
142   const MediaStreamDevice& device = source->device();
143   String str = String::Format("OnTrackStarted({session_id=%s}, {result=%s})",
144                               device.session_id().ToString().c_str(),
145                               MediaStreamRequestResultToString(result));
146   return str.Utf8();
147 }
148 
InitializeAudioTrackControls(UserMediaRequest * user_media_request,TrackControls * track_controls)149 void InitializeAudioTrackControls(UserMediaRequest* user_media_request,
150                                   TrackControls* track_controls) {
151   if (user_media_request->MediaRequestType() ==
152           UserMediaRequest::MediaType::kDisplayMedia ||
153       user_media_request->MediaRequestType() ==
154           UserMediaRequest::MediaType::kGetCurrentBrowsingContextMedia) {
155     track_controls->requested = true;
156     track_controls->stream_type = MediaStreamType::DISPLAY_AUDIO_CAPTURE;
157     return;
158   }
159 
160   DCHECK_EQ(UserMediaRequest::MediaType::kUserMedia,
161             user_media_request->MediaRequestType());
162   const MediaConstraints& constraints = user_media_request->AudioConstraints();
163   DCHECK(!constraints.IsNull());
164   track_controls->requested = true;
165 
166   MediaStreamType* stream_type = &track_controls->stream_type;
167   *stream_type = MediaStreamType::NO_SERVICE;
168 
169   String source_constraint =
170       constraints.Basic().media_stream_source.Exact().IsEmpty()
171           ? String()
172           : String(constraints.Basic().media_stream_source.Exact()[0]);
173   if (!source_constraint.IsEmpty()) {
174     if (source_constraint == blink::kMediaStreamSourceTab) {
175       *stream_type = MediaStreamType::GUM_TAB_AUDIO_CAPTURE;
176     } else if (source_constraint == blink::kMediaStreamSourceDesktop ||
177                source_constraint == blink::kMediaStreamSourceSystem) {
178       *stream_type = MediaStreamType::GUM_DESKTOP_AUDIO_CAPTURE;
179     }
180   } else {
181     *stream_type = MediaStreamType::DEVICE_AUDIO_CAPTURE;
182   }
183 }
184 
InitializeVideoTrackControls(UserMediaRequest * user_media_request,TrackControls * track_controls)185 void InitializeVideoTrackControls(UserMediaRequest* user_media_request,
186                                   TrackControls* track_controls) {
187   if (user_media_request->MediaRequestType() ==
188       UserMediaRequest::MediaType::kDisplayMedia) {
189     track_controls->requested = true;
190     track_controls->stream_type = MediaStreamType::DISPLAY_VIDEO_CAPTURE;
191     return;
192   } else if (user_media_request->MediaRequestType() ==
193              UserMediaRequest::MediaType::kGetCurrentBrowsingContextMedia) {
194     track_controls->requested = true;
195     track_controls->stream_type =
196         MediaStreamType::DISPLAY_VIDEO_CAPTURE_THIS_TAB;
197     return;
198   }
199 
200   DCHECK_EQ(UserMediaRequest::MediaType::kUserMedia,
201             user_media_request->MediaRequestType());
202   const MediaConstraints& constraints = user_media_request->VideoConstraints();
203   DCHECK(!constraints.IsNull());
204   track_controls->requested = true;
205 
206   MediaStreamType* stream_type = &track_controls->stream_type;
207   *stream_type = MediaStreamType::NO_SERVICE;
208 
209   String source_constraint =
210       constraints.Basic().media_stream_source.Exact().IsEmpty()
211           ? String()
212           : String(constraints.Basic().media_stream_source.Exact()[0]);
213   if (!source_constraint.IsEmpty()) {
214     if (source_constraint == blink::kMediaStreamSourceTab) {
215       *stream_type = MediaStreamType::GUM_TAB_VIDEO_CAPTURE;
216     } else if (source_constraint == blink::kMediaStreamSourceDesktop ||
217                source_constraint == blink::kMediaStreamSourceScreen) {
218       *stream_type = MediaStreamType::GUM_DESKTOP_VIDEO_CAPTURE;
219     }
220   } else {
221     *stream_type = MediaStreamType::DEVICE_VIDEO_CAPTURE;
222   }
223 }
224 
IsSameDevice(const MediaStreamDevice & device,const MediaStreamDevice & other_device)225 bool IsSameDevice(const MediaStreamDevice& device,
226                   const MediaStreamDevice& other_device) {
227   return device.id == other_device.id && device.type == other_device.type &&
228          device.session_id() == other_device.session_id();
229 }
230 
IsSameSource(MediaStreamSource * source,MediaStreamSource * other_source)231 bool IsSameSource(MediaStreamSource* source, MediaStreamSource* other_source) {
232   WebPlatformMediaStreamSource* const source_extra_data =
233       source->GetPlatformSource();
234   const MediaStreamDevice& device = source_extra_data->device();
235 
236   WebPlatformMediaStreamSource* const other_source_extra_data =
237       other_source->GetPlatformSource();
238   const MediaStreamDevice& other_device = other_source_extra_data->device();
239 
240   return IsSameDevice(device, other_device);
241 }
242 
SurfaceAudioProcessingSettings(MediaStreamSource * source)243 void SurfaceAudioProcessingSettings(MediaStreamSource* source) {
244   auto* source_impl =
245       static_cast<blink::MediaStreamAudioSource*>(source->GetPlatformSource());
246 
247   // If the source is a processed source, get the properties from it.
248   if (auto* processed_source =
249           blink::ProcessedLocalAudioSource::From(source_impl)) {
250     blink::AudioProcessingProperties properties =
251         processed_source->audio_processing_properties();
252     MediaStreamSource::EchoCancellationMode echo_cancellation_mode;
253 
254     switch (properties.echo_cancellation_type) {
255       case EchoCancellationType::kEchoCancellationDisabled:
256         echo_cancellation_mode =
257             MediaStreamSource::EchoCancellationMode::kDisabled;
258         break;
259       case EchoCancellationType::kEchoCancellationAec3:
260         echo_cancellation_mode =
261             MediaStreamSource::EchoCancellationMode::kBrowser;
262         break;
263       case EchoCancellationType::kEchoCancellationSystem:
264         echo_cancellation_mode =
265             MediaStreamSource::EchoCancellationMode::kSystem;
266         break;
267     }
268 
269     source->SetAudioProcessingProperties(echo_cancellation_mode,
270                                          properties.goog_auto_gain_control,
271                                          properties.goog_noise_suppression);
272   } else {
273     // If the source is not a processed source, it could still support system
274     // echo cancellation. Surface that if it does.
275     media::AudioParameters params = source_impl->GetAudioParameters();
276     const MediaStreamSource::EchoCancellationMode echo_cancellation_mode =
277         params.IsValid() &&
278                 (params.effects() & media::AudioParameters::ECHO_CANCELLER)
279             ? MediaStreamSource::EchoCancellationMode::kSystem
280             : MediaStreamSource::EchoCancellationMode::kDisabled;
281 
282     source->SetAudioProcessingProperties(echo_cancellation_mode, false, false);
283   }
284 }
285 
286 // TODO(crbug.com/704136): Check all places where this helper is used.
287 // Change their types from using std::vector to WTF::Vector, so this
288 // extra conversion round is not needed.
289 template <typename T>
ToStdVector(const Vector<T> & format_vector)290 std::vector<T> ToStdVector(const Vector<T>& format_vector) {
291   std::vector<T> formats;
292   std::copy(format_vector.begin(), format_vector.end(),
293             std::back_inserter(formats));
294   return formats;
295 }
296 
ToVideoInputDeviceCapabilities(const Vector<blink::mojom::blink::VideoInputDeviceCapabilitiesPtr> & input_capabilities)297 Vector<blink::VideoInputDeviceCapabilities> ToVideoInputDeviceCapabilities(
298     const Vector<blink::mojom::blink::VideoInputDeviceCapabilitiesPtr>&
299         input_capabilities) {
300   Vector<blink::VideoInputDeviceCapabilities> capabilities;
301   for (const auto& capability : input_capabilities) {
302     capabilities.emplace_back(capability->device_id, capability->group_id,
303                               capability->control_support, capability->formats,
304                               capability->facing_mode);
305   }
306 
307   return capabilities;
308 }
309 
310 }  // namespace
311 
312 // Class for storing state of the the processing of getUserMedia requests.
313 class UserMediaProcessor::RequestInfo final
314     : public GarbageCollected<UserMediaProcessor::RequestInfo> {
315  public:
316   using ResourcesReady =
317       base::OnceCallback<void(RequestInfo* request_info,
318                               MediaStreamRequestResult result,
319                               const String& result_name)>;
320   enum class State {
321     NOT_SENT_FOR_GENERATION,
322     SENT_FOR_GENERATION,
323     GENERATED,
324   };
325 
326   explicit RequestInfo(UserMediaRequest* request);
327 
328   void StartAudioTrack(MediaStreamComponent* component, bool is_pending);
329   MediaStreamComponent* CreateAndStartVideoTrack(MediaStreamSource* source);
330 
331   // Triggers |callback| when all sources used in this request have either
332   // successfully started, or a source has failed to start.
333   void CallbackOnTracksStarted(ResourcesReady callback);
334 
335   // Called when a local audio source has finished (or failed) initializing.
336   void OnAudioSourceStarted(blink::WebPlatformMediaStreamSource* source,
337                             MediaStreamRequestResult result,
338                             const String& result_name);
339 
request()340   UserMediaRequest* request() { return request_; }
request_id() const341   int request_id() const { return request_->request_id(); }
342 
state() const343   State state() const { return state_; }
set_state(State state)344   void set_state(State state) { state_ = state; }
345 
audio_capture_settings() const346   const blink::AudioCaptureSettings& audio_capture_settings() const {
347     return audio_capture_settings_;
348   }
SetAudioCaptureSettings(const blink::AudioCaptureSettings & settings,bool is_content_capture)349   void SetAudioCaptureSettings(const blink::AudioCaptureSettings& settings,
350                                bool is_content_capture) {
351     DCHECK(settings.HasValue());
352     is_audio_content_capture_ = is_content_capture;
353     audio_capture_settings_ = settings;
354   }
video_capture_settings() const355   const blink::VideoCaptureSettings& video_capture_settings() const {
356     return video_capture_settings_;
357   }
is_video_content_capture() const358   bool is_video_content_capture() const {
359     return video_capture_settings_.HasValue() && is_video_content_capture_;
360   }
is_video_device_capture() const361   bool is_video_device_capture() const {
362     return video_capture_settings_.HasValue() && !is_video_content_capture_;
363   }
SetVideoCaptureSettings(const blink::VideoCaptureSettings & settings,bool is_content_capture)364   void SetVideoCaptureSettings(const blink::VideoCaptureSettings& settings,
365                                bool is_content_capture) {
366     DCHECK(settings.HasValue());
367     is_video_content_capture_ = is_content_capture;
368     video_capture_settings_ = settings;
369   }
370 
SetDevices(Vector<MediaStreamDevice> audio_devices,Vector<MediaStreamDevice> video_devices)371   void SetDevices(Vector<MediaStreamDevice> audio_devices,
372                   Vector<MediaStreamDevice> video_devices) {
373     audio_devices_ = std::move(audio_devices);
374     video_devices_ = std::move(video_devices);
375   }
376 
AddNativeVideoFormats(const String & device_id,Vector<media::VideoCaptureFormat> formats)377   void AddNativeVideoFormats(const String& device_id,
378                              Vector<media::VideoCaptureFormat> formats) {
379     video_formats_map_.insert(device_id, std::move(formats));
380   }
381 
382   // Do not store or delete the returned pointer.
GetNativeVideoFormats(const String & device_id)383   Vector<media::VideoCaptureFormat>* GetNativeVideoFormats(
384       const String& device_id) {
385     auto it = video_formats_map_.find(device_id);
386     CHECK(it != video_formats_map_.end());
387     return &it->value;
388   }
389 
InitializeWebStream(const String & label,const MediaStreamComponentVector & audios,const MediaStreamComponentVector & videos)390   void InitializeWebStream(const String& label,
391                            const MediaStreamComponentVector& audios,
392                            const MediaStreamComponentVector& videos) {
393     descriptor_ =
394         MakeGarbageCollected<MediaStreamDescriptor>(label, audios, videos);
395   }
396 
audio_devices() const397   const Vector<MediaStreamDevice>& audio_devices() const {
398     return audio_devices_;
399   }
video_devices() const400   const Vector<MediaStreamDevice>& video_devices() const {
401     return video_devices_;
402   }
403 
CanStartTracks() const404   bool CanStartTracks() const {
405     return video_formats_map_.size() == video_devices_.size();
406   }
407 
descriptor()408   MediaStreamDescriptor* descriptor() {
409     DCHECK(descriptor_);
410     return descriptor_;
411   }
412 
stream_controls()413   StreamControls* stream_controls() { return &stream_controls_; }
414 
is_processing_user_gesture() const415   bool is_processing_user_gesture() const {
416     return request_->has_transient_user_activation();
417   }
418 
pan_tilt_zoom_allowed() const419   bool pan_tilt_zoom_allowed() const { return pan_tilt_zoom_allowed_; }
set_pan_tilt_zoom_allowed(bool pan_tilt_zoom_allowed)420   void set_pan_tilt_zoom_allowed(bool pan_tilt_zoom_allowed) {
421     pan_tilt_zoom_allowed_ = pan_tilt_zoom_allowed;
422   }
423 
Trace(Visitor * visitor) const424   void Trace(Visitor* visitor) const {
425     visitor->Trace(request_);
426     visitor->Trace(descriptor_);
427     visitor->Trace(sources_);
428   }
429 
430  private:
431   void OnTrackStarted(blink::WebPlatformMediaStreamSource* source,
432                       MediaStreamRequestResult result,
433                       const blink::WebString& result_name);
434 
435   // Checks if the sources for all tracks have been started and if so,
436   // invoke the |ready_callback_|.  Note that the caller should expect
437   // that |this| might be deleted when the function returns.
438   void CheckAllTracksStarted();
439 
440   Member<UserMediaRequest> request_;
441   State state_ = State::NOT_SENT_FOR_GENERATION;
442   blink::AudioCaptureSettings audio_capture_settings_;
443   bool is_audio_content_capture_ = false;
444   blink::VideoCaptureSettings video_capture_settings_;
445   bool is_video_content_capture_ = false;
446   Member<MediaStreamDescriptor> descriptor_;
447   StreamControls stream_controls_;
448   ResourcesReady ready_callback_;
449   MediaStreamRequestResult request_result_ = MediaStreamRequestResult::OK;
450   String request_result_name_;
451   // Sources used in this request.
452   HeapVector<Member<MediaStreamSource>> sources_;
453   Vector<blink::WebPlatformMediaStreamSource*> sources_waiting_for_callback_;
454   HashMap<String, Vector<media::VideoCaptureFormat>> video_formats_map_;
455   Vector<MediaStreamDevice> audio_devices_;
456   Vector<MediaStreamDevice> video_devices_;
457   bool pan_tilt_zoom_allowed_ = false;
458 };
459 
460 // TODO(guidou): Initialize request_result_name_ as a null WTF::String.
461 // https://crbug.com/764293
RequestInfo(UserMediaRequest * request)462 UserMediaProcessor::RequestInfo::RequestInfo(UserMediaRequest* request)
463     : request_(request), request_result_name_("") {}
464 
StartAudioTrack(MediaStreamComponent * component,bool is_pending)465 void UserMediaProcessor::RequestInfo::StartAudioTrack(
466     MediaStreamComponent* component,
467     bool is_pending) {
468   DCHECK(component->Source()->GetType() == MediaStreamSource::kTypeAudio);
469   DCHECK(request()->Audio());
470 #if DCHECK_IS_ON()
471   DCHECK(audio_capture_settings_.HasValue());
472 #endif
473   SendLogMessage(GetTrackLogString(component, is_pending));
474   auto* native_source = MediaStreamAudioSource::From(component->Source());
475   SendLogMessage(GetTrackSourceLogString(native_source));
476   // Add the source as pending since OnTrackStarted will expect it to be there.
477   sources_waiting_for_callback_.push_back(native_source);
478 
479   sources_.push_back(component->Source());
480   bool connected = native_source->ConnectToTrack(component);
481   if (!is_pending) {
482     OnTrackStarted(native_source,
483                    connected
484                        ? MediaStreamRequestResult::OK
485                        : MediaStreamRequestResult::TRACK_START_FAILURE_AUDIO,
486                    "");
487   }
488 }
489 
CreateAndStartVideoTrack(MediaStreamSource * source)490 MediaStreamComponent* UserMediaProcessor::RequestInfo::CreateAndStartVideoTrack(
491     MediaStreamSource* source) {
492   DCHECK(source->GetType() == MediaStreamSource::kTypeVideo);
493   DCHECK(request()->Video());
494   DCHECK(video_capture_settings_.HasValue());
495   SendLogMessage(base::StringPrintf(
496       "UMP::RI::CreateAndStartVideoTrack({request_id=%d})", request_id()));
497 
498   MediaStreamVideoSource* native_source =
499       MediaStreamVideoSource::GetVideoSource(source);
500   DCHECK(native_source);
501   sources_.push_back(source);
502   sources_waiting_for_callback_.push_back(native_source);
503   return MediaStreamVideoTrack::CreateVideoTrack(
504       native_source, video_capture_settings_.track_adapter_settings(),
505       video_capture_settings_.noise_reduction(), is_video_content_capture_,
506       video_capture_settings_.min_frame_rate(), video_capture_settings_.pan(),
507       video_capture_settings_.tilt(), video_capture_settings_.zoom(),
508       pan_tilt_zoom_allowed(),
509       WTF::Bind(&UserMediaProcessor::RequestInfo::OnTrackStarted,
510                 WrapWeakPersistent(this)),
511       true);
512 }
513 
CallbackOnTracksStarted(ResourcesReady callback)514 void UserMediaProcessor::RequestInfo::CallbackOnTracksStarted(
515     ResourcesReady callback) {
516   DCHECK(ready_callback_.is_null());
517   ready_callback_ = std::move(callback);
518   CheckAllTracksStarted();
519 }
520 
OnTrackStarted(blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result,const blink::WebString & result_name)521 void UserMediaProcessor::RequestInfo::OnTrackStarted(
522     blink::WebPlatformMediaStreamSource* source,
523     MediaStreamRequestResult result,
524     const blink::WebString& result_name) {
525   SendLogMessage(GetOnTrackStartedLogString(source, result));
526   auto** it = std::find(sources_waiting_for_callback_.begin(),
527                         sources_waiting_for_callback_.end(), source);
528   DCHECK(it != sources_waiting_for_callback_.end());
529   sources_waiting_for_callback_.erase(it);
530   // All tracks must be started successfully. Otherwise the request is a
531   // failure.
532   if (result != MediaStreamRequestResult::OK) {
533     request_result_ = result;
534     request_result_name_ = result_name;
535   }
536 
537   CheckAllTracksStarted();
538 }
539 
CheckAllTracksStarted()540 void UserMediaProcessor::RequestInfo::CheckAllTracksStarted() {
541   if (ready_callback_ && sources_waiting_for_callback_.IsEmpty()) {
542     std::move(ready_callback_).Run(this, request_result_, request_result_name_);
543     // NOTE: |this| might now be deleted.
544   }
545 }
546 
OnAudioSourceStarted(blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result,const String & result_name)547 void UserMediaProcessor::RequestInfo::OnAudioSourceStarted(
548     blink::WebPlatformMediaStreamSource* source,
549     MediaStreamRequestResult result,
550     const String& result_name) {
551   // Check if we're waiting to be notified of this source.  If not, then we'll
552   // ignore the notification.
553   if (base::Contains(sources_waiting_for_callback_, source))
554     OnTrackStarted(source, result, result_name);
555 }
556 
UserMediaProcessor(LocalFrame * frame,MediaDevicesDispatcherCallback media_devices_dispatcher_cb,scoped_refptr<base::SingleThreadTaskRunner> task_runner)557 UserMediaProcessor::UserMediaProcessor(
558     LocalFrame* frame,
559     MediaDevicesDispatcherCallback media_devices_dispatcher_cb,
560     scoped_refptr<base::SingleThreadTaskRunner> task_runner)
561     : dispatcher_host_(frame->DomWindow()),
562       media_devices_dispatcher_cb_(std::move(media_devices_dispatcher_cb)),
563       frame_(frame),
564       task_runner_(std::move(task_runner)) {}
565 
~UserMediaProcessor()566 UserMediaProcessor::~UserMediaProcessor() {
567   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
568 
569   // Ensure StopAllProcessing() has been called by UserMediaClient.
570   DCHECK(!current_request_info_ && !request_completed_cb_ &&
571          !local_sources_.size());
572 }
573 
CurrentRequest()574 UserMediaRequest* UserMediaProcessor::CurrentRequest() {
575   return current_request_info_ ? current_request_info_->request() : nullptr;
576 }
577 
ProcessRequest(UserMediaRequest * request,base::OnceClosure callback)578 void UserMediaProcessor::ProcessRequest(UserMediaRequest* request,
579                                         base::OnceClosure callback) {
580   DCHECK(!request_completed_cb_);
581   DCHECK(!current_request_info_);
582   request_completed_cb_ = std::move(callback);
583   current_request_info_ = MakeGarbageCollected<RequestInfo>(request);
584   SendLogMessage(
585       base::StringPrintf("ProcessRequest({request_id=%d}, {audio=%d}, "
586                          "{video=%d})",
587                          current_request_info_->request_id(),
588                          current_request_info_->request()->Audio(),
589                          current_request_info_->request()->Video()));
590   // TODO(guidou): Set up audio and video in parallel.
591   if (current_request_info_->request()->Audio()) {
592     SetupAudioInput();
593     return;
594   }
595   SetupVideoInput();
596 }
597 
SetupAudioInput()598 void UserMediaProcessor::SetupAudioInput() {
599   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
600   DCHECK(current_request_info_);
601   DCHECK(current_request_info_->request()->Audio());
602   SendLogMessage(
603       base::StringPrintf("SetupAudioInput({request_id=%d}, {constraints=%s})",
604                          current_request_info_->request_id(),
605                          current_request_info_->request()
606                              ->AudioConstraints()
607                              .ToString()
608                              .Utf8()
609                              .c_str()));
610 
611   auto& audio_controls = current_request_info_->stream_controls()->audio;
612   InitializeAudioTrackControls(current_request_info_->request(),
613                                &audio_controls);
614 
615   if (audio_controls.stream_type == MediaStreamType::DISPLAY_AUDIO_CAPTURE) {
616     SelectAudioSettings(current_request_info_->request(),
617                         {blink::AudioDeviceCaptureCapability()});
618     return;
619   }
620 
621   if (blink::IsDeviceMediaType(audio_controls.stream_type)) {
622     SendLogMessage(
623         base::StringPrintf("SetupAudioInput({request_id=%d}) => "
624                            "(Requesting device capabilities)",
625                            current_request_info_->request_id()));
626     GetMediaDevicesDispatcher()->GetAudioInputCapabilities(
627         WTF::Bind(&UserMediaProcessor::SelectAudioDeviceSettings,
628                   WrapWeakPersistent(this),
629                   WrapPersistent(current_request_info_->request())));
630   } else {
631     if (!blink::IsAudioInputMediaType(audio_controls.stream_type)) {
632       String failed_constraint_name =
633           String(current_request_info_->request()
634                      ->AudioConstraints()
635                      .Basic()
636                      .media_stream_source.GetName());
637       MediaStreamRequestResult result =
638           MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED;
639       GetUserMediaRequestFailed(result, failed_constraint_name);
640       return;
641     }
642     SelectAudioSettings(current_request_info_->request(),
643                         {blink::AudioDeviceCaptureCapability()});
644   }
645 }
646 
SelectAudioDeviceSettings(UserMediaRequest * user_media_request,Vector<blink::mojom::blink::AudioInputDeviceCapabilitiesPtr> audio_input_capabilities)647 void UserMediaProcessor::SelectAudioDeviceSettings(
648     UserMediaRequest* user_media_request,
649     Vector<blink::mojom::blink::AudioInputDeviceCapabilitiesPtr>
650         audio_input_capabilities) {
651   blink::AudioDeviceCaptureCapabilities capabilities;
652   for (const auto& device : audio_input_capabilities) {
653     // Find the first occurrence of blink::ProcessedLocalAudioSource that
654     // matches the same device ID as |device|. If more than one exists, any
655     // such source will contain the same non-reconfigurable settings that limit
656     // the associated capabilities.
657     blink::MediaStreamAudioSource* audio_source = nullptr;
658     auto* it = std::find_if(local_sources_.begin(), local_sources_.end(),
659                             [&device](MediaStreamSource* source) {
660                               DCHECK(source);
661                               MediaStreamAudioSource* platform_source =
662                                   MediaStreamAudioSource::From(source);
663                               ProcessedLocalAudioSource* processed_source =
664                                   ProcessedLocalAudioSource::From(
665                                       platform_source);
666                               return processed_source &&
667                                      source->Id() == device->device_id;
668                             });
669     if (it != local_sources_.end()) {
670       WebPlatformMediaStreamSource* const source = (*it)->GetPlatformSource();
671       if (source->device().type == MediaStreamType::DEVICE_AUDIO_CAPTURE)
672         audio_source = static_cast<MediaStreamAudioSource*>(source);
673     }
674     if (audio_source) {
675       capabilities.emplace_back(audio_source);
676     } else {
677       capabilities.emplace_back(device->device_id, device->group_id,
678                                 device->parameters);
679     }
680   }
681 
682   SelectAudioSettings(user_media_request, capabilities);
683 }
684 
SelectAudioSettings(UserMediaRequest * user_media_request,const blink::AudioDeviceCaptureCapabilities & capabilities)685 void UserMediaProcessor::SelectAudioSettings(
686     UserMediaRequest* user_media_request,
687     const blink::AudioDeviceCaptureCapabilities& capabilities) {
688   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
689   // The frame might reload or |user_media_request| might be cancelled while
690   // capabilities are queried. Do nothing if a different request is being
691   // processed at this point.
692   if (!IsCurrentRequestInfo(user_media_request))
693     return;
694 
695   DCHECK(current_request_info_->stream_controls()->audio.requested);
696   SendLogMessage(base::StringPrintf("SelectAudioSettings({request_id=%d})",
697                                     current_request_info_->request_id()));
698   auto settings = SelectSettingsAudioCapture(
699       capabilities, user_media_request->AudioConstraints(),
700       user_media_request->ShouldDisableHardwareNoiseSuppression(),
701       true /* is_reconfiguration_allowed */);
702   if (!settings.HasValue()) {
703     String failed_constraint_name = String(settings.failed_constraint_name());
704     MediaStreamRequestResult result =
705         failed_constraint_name.IsEmpty()
706             ? MediaStreamRequestResult::NO_HARDWARE
707             : MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED;
708     GetUserMediaRequestFailed(result, failed_constraint_name);
709     return;
710   }
711   if (current_request_info_->stream_controls()->audio.stream_type !=
712       MediaStreamType::DISPLAY_AUDIO_CAPTURE) {
713     current_request_info_->stream_controls()->audio.device_id =
714         settings.device_id();
715     current_request_info_->stream_controls()->disable_local_echo =
716         settings.disable_local_echo();
717   }
718   current_request_info_->SetAudioCaptureSettings(
719       settings,
720       !blink::IsDeviceMediaType(
721           current_request_info_->stream_controls()->audio.stream_type));
722 
723   // No further audio setup required. Continue with video.
724   SetupVideoInput();
725 }
726 
727 base::Optional<base::UnguessableToken>
DetermineExistingAudioSessionId()728 UserMediaProcessor::DetermineExistingAudioSessionId() {
729   DCHECK(current_request_info_->request()->Audio());
730 
731   auto settings = current_request_info_->audio_capture_settings();
732   auto device_id = settings.device_id();
733 
734   // Create a copy of the MediaStreamSource objects that are
735   // associated to the same audio device capture based on its device ID.
736   HeapVector<Member<MediaStreamSource>> matching_sources;
737   for (const auto& source : local_sources_) {
738     MediaStreamSource* source_copy = source;
739     if (source_copy->GetType() == MediaStreamSource::kTypeAudio &&
740         source_copy->Id().Utf8() == device_id) {
741       matching_sources.push_back(source_copy);
742     }
743   }
744 
745   // Return the session ID associated to the source that has the same settings
746   // that have been previously selected, if one exists.
747   if (!matching_sources.IsEmpty()) {
748     for (auto& matching_source : matching_sources) {
749       auto* audio_source = static_cast<MediaStreamAudioSource*>(
750           matching_source->GetPlatformSource());
751       if (audio_source->HasSameReconfigurableSettings(
752               settings.audio_processing_properties())) {
753         return audio_source->device().session_id();
754       }
755     }
756   }
757 
758   return base::nullopt;
759 }
760 
SetupVideoInput()761 void UserMediaProcessor::SetupVideoInput() {
762   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
763   DCHECK(current_request_info_);
764 
765   if (!current_request_info_->request()->Video()) {
766     base::Optional<base::UnguessableToken> audio_session_id =
767         DetermineExistingAudioSessionId();
768     GenerateStreamForCurrentRequestInfo(
769         audio_session_id, audio_session_id.has_value()
770                               ? StreamSelectionStrategy::SEARCH_BY_SESSION_ID
771                               : StreamSelectionStrategy::FORCE_NEW_STREAM);
772     return;
773   }
774   SendLogMessage(
775       base::StringPrintf("SetupVideoInput. request_id=%d, video constraints=%s",
776                          current_request_info_->request_id(),
777                          current_request_info_->request()
778                              ->VideoConstraints()
779                              .ToString()
780                              .Utf8()
781                              .c_str()));
782 
783   auto& video_controls = current_request_info_->stream_controls()->video;
784   InitializeVideoTrackControls(current_request_info_->request(),
785                                &video_controls);
786 
787   current_request_info_->stream_controls()->request_pan_tilt_zoom_permission =
788       IsPanTiltZoomPermissionRequested(
789           current_request_info_->request()->VideoConstraints());
790 
791   if (blink::IsDeviceMediaType(video_controls.stream_type)) {
792     GetMediaDevicesDispatcher()->GetVideoInputCapabilities(
793         WTF::Bind(&UserMediaProcessor::SelectVideoDeviceSettings,
794                   WrapWeakPersistent(this),
795                   WrapPersistent(current_request_info_->request())));
796   } else {
797     if (!blink::IsVideoInputMediaType(video_controls.stream_type)) {
798       String failed_constraint_name =
799           String(current_request_info_->request()
800                      ->VideoConstraints()
801                      .Basic()
802                      .media_stream_source.GetName());
803       MediaStreamRequestResult result =
804           MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED;
805       GetUserMediaRequestFailed(result, failed_constraint_name);
806       return;
807     }
808     SelectVideoContentSettings();
809   }
810 }
811 
812 // static
IsPanTiltZoomPermissionRequested(const MediaConstraints & constraints)813 bool UserMediaProcessor::IsPanTiltZoomPermissionRequested(
814     const MediaConstraints& constraints) {
815   if (!RuntimeEnabledFeatures::MediaCapturePanTiltEnabled())
816     return false;
817 
818   if (constraints.Basic().pan.IsPresent() ||
819       constraints.Basic().tilt.IsPresent() ||
820       constraints.Basic().zoom.IsPresent()) {
821     return true;
822   }
823 
824   for (const auto& advanced_set : constraints.Advanced()) {
825     if (advanced_set.pan.IsPresent() || advanced_set.tilt.IsPresent() ||
826         advanced_set.zoom.IsPresent()) {
827       return true;
828     }
829   }
830 
831   return false;
832 }
833 
SelectVideoDeviceSettings(UserMediaRequest * user_media_request,Vector<blink::mojom::blink::VideoInputDeviceCapabilitiesPtr> video_input_capabilities)834 void UserMediaProcessor::SelectVideoDeviceSettings(
835     UserMediaRequest* user_media_request,
836     Vector<blink::mojom::blink::VideoInputDeviceCapabilitiesPtr>
837         video_input_capabilities) {
838   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
839   // The frame might reload or |user_media_request| might be cancelled while
840   // capabilities are queried. Do nothing if a different request is being
841   // processed at this point.
842   if (!IsCurrentRequestInfo(user_media_request))
843     return;
844 
845   DCHECK(current_request_info_->stream_controls()->video.requested);
846   DCHECK(blink::IsDeviceMediaType(
847       current_request_info_->stream_controls()->video.stream_type));
848   SendLogMessage(base::StringPrintf("SelectVideoDeviceSettings. request_id=%d.",
849                                     current_request_info_->request_id()));
850 
851   blink::VideoDeviceCaptureCapabilities capabilities;
852   capabilities.device_capabilities =
853       ToVideoInputDeviceCapabilities(video_input_capabilities);
854   capabilities.noise_reduction_capabilities = {base::Optional<bool>(),
855                                                base::Optional<bool>(true),
856                                                base::Optional<bool>(false)};
857   blink::VideoCaptureSettings settings = SelectSettingsVideoDeviceCapture(
858       std::move(capabilities), user_media_request->VideoConstraints(),
859       blink::MediaStreamVideoSource::kDefaultWidth,
860       blink::MediaStreamVideoSource::kDefaultHeight,
861       blink::MediaStreamVideoSource::kDefaultFrameRate);
862   if (!settings.HasValue()) {
863     String failed_constraint_name = String(settings.failed_constraint_name());
864     MediaStreamRequestResult result =
865         failed_constraint_name.IsEmpty()
866             ? MediaStreamRequestResult::NO_HARDWARE
867             : MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED;
868     GetUserMediaRequestFailed(result, failed_constraint_name);
869     return;
870   }
871   current_request_info_->stream_controls()->video.device_id =
872       settings.device_id();
873   current_request_info_->SetVideoCaptureSettings(
874       settings, false /* is_content_capture */);
875 
876   if (current_request_info_->request()->Audio()) {
877     base::Optional<base::UnguessableToken> audio_session_id =
878         DetermineExistingAudioSessionId();
879     GenerateStreamForCurrentRequestInfo(
880         audio_session_id, audio_session_id.has_value()
881                               ? StreamSelectionStrategy::SEARCH_BY_SESSION_ID
882                               : StreamSelectionStrategy::FORCE_NEW_STREAM);
883   } else {
884     GenerateStreamForCurrentRequestInfo();
885   }
886 }
887 
SelectVideoContentSettings()888 void UserMediaProcessor::SelectVideoContentSettings() {
889   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
890   DCHECK(current_request_info_);
891   SendLogMessage(
892       base::StringPrintf("SelectVideoContentSettings. request_id=%d.",
893                          current_request_info_->request_id()));
894   gfx::Size screen_size = GetScreenSize();
895   blink::VideoCaptureSettings settings =
896       blink::SelectSettingsVideoContentCapture(
897           current_request_info_->request()->VideoConstraints(),
898           current_request_info_->stream_controls()->video.stream_type,
899           screen_size.width(), screen_size.height());
900   if (!settings.HasValue()) {
901     String failed_constraint_name = String(settings.failed_constraint_name());
902     DCHECK(!failed_constraint_name.IsEmpty());
903     GetUserMediaRequestFailed(
904         MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED,
905         failed_constraint_name);
906     return;
907   }
908 
909   const MediaStreamType stream_type =
910       current_request_info_->stream_controls()->video.stream_type;
911   if (stream_type != MediaStreamType::DISPLAY_VIDEO_CAPTURE &&
912       stream_type != MediaStreamType::DISPLAY_VIDEO_CAPTURE_THIS_TAB) {
913     current_request_info_->stream_controls()->video.device_id =
914         settings.device_id();
915   }
916 
917   current_request_info_->SetVideoCaptureSettings(settings,
918                                                  true /* is_content_capture */);
919   GenerateStreamForCurrentRequestInfo();
920 }
921 
GenerateStreamForCurrentRequestInfo(base::Optional<base::UnguessableToken> requested_audio_capture_session_id,blink::mojom::StreamSelectionStrategy strategy)922 void UserMediaProcessor::GenerateStreamForCurrentRequestInfo(
923     base::Optional<base::UnguessableToken> requested_audio_capture_session_id,
924     blink::mojom::StreamSelectionStrategy strategy) {
925   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
926   DCHECK(current_request_info_);
927   SendLogMessage(base::StringPrintf(
928       "GenerateStreamForCurrentRequestInfo({request_id=%d}, "
929       "{audio.device_id=%s}, {video.device_id=%s})",
930       current_request_info_->request_id(),
931       current_request_info_->stream_controls()->audio.device_id.c_str(),
932       current_request_info_->stream_controls()->video.device_id.c_str()));
933   current_request_info_->set_state(RequestInfo::State::SENT_FOR_GENERATION);
934 
935   // The browser replies to this request by invoking OnStreamGenerated().
936   GetMediaStreamDispatcherHost()->GenerateStream(
937       current_request_info_->request_id(),
938       *current_request_info_->stream_controls(),
939       current_request_info_->is_processing_user_gesture(),
940       blink::mojom::blink::StreamSelectionInfo::New(
941           strategy, requested_audio_capture_session_id),
942       WTF::Bind(&UserMediaProcessor::OnStreamGenerated,
943                 WrapWeakPersistent(this), current_request_info_->request_id()));
944 }
945 
946 WebMediaStreamDeviceObserver*
GetMediaStreamDeviceObserver()947 UserMediaProcessor::GetMediaStreamDeviceObserver() {
948   auto* media_stream_device_observer =
949       media_stream_device_observer_for_testing_;
950   if (frame_) {  // Can be null for tests.
951     auto* web_frame = static_cast<WebLocalFrame*>(WebFrame::FromFrame(frame_));
952     if (!web_frame || !web_frame->Client())
953       return nullptr;
954 
955     // TODO(704136): Move ownership of |WebMediaStreamDeviceObserver| out of
956     // RenderFrameImpl, back to UserMediaClient.
957     media_stream_device_observer =
958         web_frame->Client()->MediaStreamDeviceObserver();
959     DCHECK(media_stream_device_observer);
960   }
961 
962   return media_stream_device_observer;
963 }
964 
OnStreamGenerated(int request_id,MediaStreamRequestResult result,const String & label,const Vector<MediaStreamDevice> & audio_devices,const Vector<MediaStreamDevice> & video_devices,bool pan_tilt_zoom_allowed)965 void UserMediaProcessor::OnStreamGenerated(
966     int request_id,
967     MediaStreamRequestResult result,
968     const String& label,
969     const Vector<MediaStreamDevice>& audio_devices,
970     const Vector<MediaStreamDevice>& video_devices,
971     bool pan_tilt_zoom_allowed) {
972   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
973 
974   if (result != MediaStreamRequestResult::OK) {
975     OnStreamGenerationFailed(request_id, result);
976     return;
977   }
978 
979   if (!IsCurrentRequestInfo(request_id)) {
980     // This can happen if the request is canceled or the frame reloads while
981     // MediaStreamDispatcherHost is processing the request.
982     SendLogMessage(base::StringPrintf(
983         "OnStreamGenerated([request_id=%d]) => (ERROR: invalid request ID)",
984         request_id));
985     OnStreamGeneratedForCancelledRequest(audio_devices, video_devices);
986     return;
987   }
988 
989   current_request_info_->set_state(RequestInfo::State::GENERATED);
990   current_request_info_->set_pan_tilt_zoom_allowed(pan_tilt_zoom_allowed);
991 
992   for (const auto* devices : {&audio_devices, &video_devices}) {
993     for (const auto& device : *devices) {
994       SendLogMessage(base::StringPrintf(
995           "OnStreamGenerated({request_id=%d}, {label=%s}, {device=[id: %s, "
996           "name: "
997           "%s]})",
998           request_id, label.Utf8().c_str(), device.id.c_str(),
999           device.name.c_str()));
1000     }
1001   }
1002 
1003   current_request_info_->SetDevices(audio_devices, video_devices);
1004 
1005   if (video_devices.IsEmpty()) {
1006     StartTracks(label);
1007     return;
1008   }
1009 
1010   if (current_request_info_->is_video_content_capture()) {
1011     media::VideoCaptureFormat format =
1012         current_request_info_->video_capture_settings().Format();
1013     for (const auto& video_device : video_devices) {
1014       String video_device_id(video_device.id.data());
1015       current_request_info_->AddNativeVideoFormats(
1016           video_device_id,
1017           {media::VideoCaptureFormat(GetScreenSize(), format.frame_rate,
1018                                      format.pixel_format)});
1019     }
1020     StartTracks(label);
1021     return;
1022   }
1023 
1024   for (const auto& video_device : video_devices) {
1025     SendLogMessage(base::StringPrintf(
1026         "OnStreamGenerated({request_id=%d}, {label=%s}, {device=[id: %s, "
1027         "name: %s]}) => (Requesting video device formats)",
1028         request_id, label.Utf8().c_str(), video_device.id.c_str(),
1029         video_device.name.c_str()));
1030     String video_device_id(video_device.id.data());
1031     GetMediaDevicesDispatcher()->GetAllVideoInputDeviceFormats(
1032         video_device_id,
1033         WTF::Bind(&UserMediaProcessor::GotAllVideoInputFormatsForDevice,
1034                   WrapWeakPersistent(this),
1035                   WrapPersistent(current_request_info_->request()), label,
1036                   video_device_id));
1037   }
1038 }
1039 
GotAllVideoInputFormatsForDevice(UserMediaRequest * user_media_request,const String & label,const String & device_id,const Vector<media::VideoCaptureFormat> & formats)1040 void UserMediaProcessor::GotAllVideoInputFormatsForDevice(
1041     UserMediaRequest* user_media_request,
1042     const String& label,
1043     const String& device_id,
1044     const Vector<media::VideoCaptureFormat>& formats) {
1045   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1046   // The frame might reload or |user_media_request| might be cancelled while
1047   // video formats are queried. Do nothing if a different request is being
1048   // processed at this point.
1049   if (!IsCurrentRequestInfo(user_media_request))
1050     return;
1051 
1052   SendLogMessage(
1053       base::StringPrintf("GotAllVideoInputFormatsForDevice({request_id=%d}, "
1054                          "{label=%s}, {device=[id: %s]})",
1055                          current_request_info_->request_id(),
1056                          label.Utf8().c_str(), device_id.Utf8().c_str()));
1057   current_request_info_->AddNativeVideoFormats(device_id, formats);
1058   if (current_request_info_->CanStartTracks())
1059     StartTracks(label);
1060 }
1061 
GetScreenSize()1062 gfx::Size UserMediaProcessor::GetScreenSize() {
1063   gfx::Size screen_size(blink::kDefaultScreenCastWidth,
1064                         blink::kDefaultScreenCastHeight);
1065   if (frame_) {  // Can be null in tests.
1066     blink::ScreenInfo info = frame_->GetChromeClient().GetScreenInfo(*frame_);
1067     screen_size = info.rect.size();
1068   }
1069   return screen_size;
1070 }
1071 
OnStreamGeneratedForCancelledRequest(const Vector<MediaStreamDevice> & audio_devices,const Vector<MediaStreamDevice> & video_devices)1072 void UserMediaProcessor::OnStreamGeneratedForCancelledRequest(
1073     const Vector<MediaStreamDevice>& audio_devices,
1074     const Vector<MediaStreamDevice>& video_devices) {
1075   SendLogMessage("OnStreamGeneratedForCancelledRequest()");
1076   // Only stop the device if the device is not used in another MediaStream.
1077   for (auto* it = audio_devices.begin(); it != audio_devices.end(); ++it) {
1078     if (!FindLocalSource(*it)) {
1079       String id(it->id.data());
1080       GetMediaStreamDispatcherHost()->StopStreamDevice(
1081           id, it->serializable_session_id());
1082     }
1083   }
1084 
1085   for (auto* it = video_devices.begin(); it != video_devices.end(); ++it) {
1086     if (!FindLocalSource(*it)) {
1087       String id(it->id.data());
1088       GetMediaStreamDispatcherHost()->StopStreamDevice(
1089           id, it->serializable_session_id());
1090     }
1091   }
1092 }
1093 
1094 // static
OnAudioSourceStartedOnAudioThread(scoped_refptr<base::SingleThreadTaskRunner> task_runner,UserMediaProcessor * weak_ptr,blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result,const blink::WebString & result_name)1095 void UserMediaProcessor::OnAudioSourceStartedOnAudioThread(
1096     scoped_refptr<base::SingleThreadTaskRunner> task_runner,
1097     UserMediaProcessor* weak_ptr,
1098     blink::WebPlatformMediaStreamSource* source,
1099     MediaStreamRequestResult result,
1100     const blink::WebString& result_name) {
1101   PostCrossThreadTask(
1102       *task_runner.get(), FROM_HERE,
1103       CrossThreadBindOnce(&UserMediaProcessor::OnAudioSourceStarted,
1104                           WrapCrossThreadWeakPersistent(weak_ptr),
1105                           CrossThreadUnretained(source), result,
1106                           String(result_name)));
1107 }
1108 
OnAudioSourceStarted(blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result,const String & result_name)1109 void UserMediaProcessor::OnAudioSourceStarted(
1110     blink::WebPlatformMediaStreamSource* source,
1111     MediaStreamRequestResult result,
1112     const String& result_name) {
1113   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1114 
1115   for (auto* it = pending_local_sources_.begin();
1116        it != pending_local_sources_.end(); ++it) {
1117     blink::WebPlatformMediaStreamSource* const source_extra_data =
1118         (*it)->GetPlatformSource();
1119     if (source_extra_data != source)
1120       continue;
1121     if (result == MediaStreamRequestResult::OK)
1122       local_sources_.push_back((*it));
1123     pending_local_sources_.erase(it);
1124 
1125     NotifyCurrentRequestInfoOfAudioSourceStarted(source, result, result_name);
1126     return;
1127   }
1128 }
1129 
NotifyCurrentRequestInfoOfAudioSourceStarted(blink::WebPlatformMediaStreamSource * source,MediaStreamRequestResult result,const String & result_name)1130 void UserMediaProcessor::NotifyCurrentRequestInfoOfAudioSourceStarted(
1131     blink::WebPlatformMediaStreamSource* source,
1132     MediaStreamRequestResult result,
1133     const String& result_name) {
1134   // The only request possibly being processed is |current_request_info_|.
1135   if (current_request_info_)
1136     current_request_info_->OnAudioSourceStarted(source, result, result_name);
1137 }
1138 
OnStreamGenerationFailed(int request_id,MediaStreamRequestResult result)1139 void UserMediaProcessor::OnStreamGenerationFailed(
1140     int request_id,
1141     MediaStreamRequestResult result) {
1142   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1143   if (!IsCurrentRequestInfo(request_id)) {
1144     // This can happen if the request is canceled or the frame reloads while
1145     // MediaStreamDispatcherHost is processing the request.
1146     return;
1147   }
1148   SendLogMessage(base::StringPrintf("OnStreamGenerationFailed({request_id=%d})",
1149                                     current_request_info_->request_id()));
1150 
1151   GetUserMediaRequestFailed(result);
1152   DeleteUserMediaRequest(current_request_info_->request());
1153 }
1154 
OnDeviceStopped(const MediaStreamDevice & device)1155 void UserMediaProcessor::OnDeviceStopped(const MediaStreamDevice& device) {
1156   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1157   SendLogMessage(base::StringPrintf(
1158       "OnDeviceStopped({session_id=%s}, {device_id=%s})",
1159       device.session_id().ToString().c_str(), device.id.c_str()));
1160 
1161   MediaStreamSource* source = FindLocalSource(device);
1162   if (!source) {
1163     // This happens if the same device is used in several guM requests or
1164     // if a user happens to stop a track from JS at the same time
1165     // as the underlying media device is unplugged from the system.
1166     return;
1167   }
1168 
1169   StopLocalSource(source, false);
1170   RemoveLocalSource(source);
1171 }
1172 
OnDeviceChanged(const MediaStreamDevice & old_device,const MediaStreamDevice & new_device)1173 void UserMediaProcessor::OnDeviceChanged(const MediaStreamDevice& old_device,
1174                                          const MediaStreamDevice& new_device) {
1175   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1176   // TODO(https://crbug.com/1017219): possibly useful in native logs as well.
1177   DVLOG(1) << "UserMediaProcessor::OnDeviceChange("
1178            << "{old_device_id = " << old_device.id
1179            << ", session id = " << old_device.session_id()
1180            << ", type = " << old_device.type << "}"
1181            << "{new_device_id = " << new_device.id
1182            << ", session id = " << new_device.session_id()
1183            << ", type = " << new_device.type << "})";
1184 
1185   MediaStreamSource* source = FindLocalSource(old_device);
1186   if (!source) {
1187     // This happens if the same device is used in several guM requests or
1188     // if a user happens to stop a track from JS at the same time
1189     // as the underlying media device is unplugged from the system.
1190     DVLOG(1) << "failed to find existing source with device " << old_device.id;
1191     return;
1192   }
1193 
1194   if (old_device.type != MediaStreamType::NO_SERVICE &&
1195       new_device.type == MediaStreamType::NO_SERVICE) {
1196     // At present, this will only happen to the case that a new desktop capture
1197     // source without audio share is selected, then the previous audio capture
1198     // device should be stopped if existing.
1199     DCHECK(blink::IsAudioInputMediaType(old_device.type));
1200     OnDeviceStopped(old_device);
1201     return;
1202   }
1203 
1204   WebPlatformMediaStreamSource* const source_impl = source->GetPlatformSource();
1205   source_impl->ChangeSource(new_device);
1206 }
1207 
OnDeviceRequestStateChange(const MediaStreamDevice & device,const mojom::blink::MediaStreamStateChange new_state)1208 void UserMediaProcessor::OnDeviceRequestStateChange(
1209     const MediaStreamDevice& device,
1210     const mojom::blink::MediaStreamStateChange new_state) {
1211   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1212   SendLogMessage(base::StringPrintf(
1213       "OnDeviceRequestStateChange({session_id=%s}, {device_id=%s}, "
1214       "{new_state=%s})",
1215       device.session_id().ToString().c_str(), device.id.c_str(),
1216       (new_state == mojom::blink::MediaStreamStateChange::PAUSE ? "PAUSE"
1217                                                                 : "PLAY")));
1218 
1219   MediaStreamSource* source = FindLocalSource(device);
1220   if (!source) {
1221     // This happens if the same device is used in several guM requests or
1222     // if a user happens to stop a track from JS at the same time
1223     // as the underlying media device is unplugged from the system.
1224     return;
1225   }
1226 
1227   WebPlatformMediaStreamSource* const source_impl = source->GetPlatformSource();
1228   source_impl->SetSourceMuted(new_state ==
1229                               mojom::blink::MediaStreamStateChange::PAUSE);
1230   MediaStreamVideoSource* video_source =
1231       static_cast<blink::MediaStreamVideoSource*>(source_impl);
1232   if (!video_source) {
1233     return;
1234   }
1235   if (new_state == mojom::blink::MediaStreamStateChange::PAUSE) {
1236     if (video_source->IsRunning()) {
1237       video_source->StopForRestart(base::DoNothing(),
1238                                    /*send_black_frame=*/true);
1239     }
1240   } else if (new_state == mojom::blink::MediaStreamStateChange::PLAY) {
1241     if (video_source->IsStoppedForRestart()) {
1242       video_source->Restart(*video_source->GetCurrentFormat(),
1243                             base::DoNothing());
1244     }
1245   } else {
1246     NOTREACHED();
1247   }
1248 }
1249 
Trace(Visitor * visitor) const1250 void UserMediaProcessor::Trace(Visitor* visitor) const {
1251   visitor->Trace(dispatcher_host_);
1252   visitor->Trace(frame_);
1253   visitor->Trace(current_request_info_);
1254   visitor->Trace(local_sources_);
1255   visitor->Trace(pending_local_sources_);
1256 }
1257 
InitializeVideoSourceObject(const MediaStreamDevice & device)1258 MediaStreamSource* UserMediaProcessor::InitializeVideoSourceObject(
1259     const MediaStreamDevice& device) {
1260   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1261   DCHECK(current_request_info_);
1262   SendLogMessage(base::StringPrintf(
1263       "UMP::InitializeVideoSourceObject({request_id=%d}, {device=[id: %s, "
1264       "name: %s]})",
1265       current_request_info_->request_id(), device.id.c_str(),
1266       device.name.c_str()));
1267   MediaStreamSource* source = FindOrInitializeSourceObject(device);
1268   if (!source->GetPlatformSource()) {
1269     auto video_source = CreateVideoSource(
1270         device, WTF::Bind(&UserMediaProcessor::OnLocalSourceStopped,
1271                           WrapWeakPersistent(this)));
1272     source->SetPlatformSource(std::move(video_source));
1273 
1274     String device_id(device.id.data());
1275     source->SetCapabilities(ComputeCapabilitiesForVideoSource(
1276         // TODO(crbug.com/704136): Change ComputeCapabilitiesForVideoSource to
1277         // operate over WTF::Vector.
1278         String::FromUTF8(device.id),
1279         ToStdVector(*current_request_info_->GetNativeVideoFormats(device_id)),
1280         device.video_facing, current_request_info_->is_video_device_capture(),
1281         device.group_id));
1282     local_sources_.push_back(source);
1283   }
1284   return source;
1285 }
1286 
InitializeAudioSourceObject(const MediaStreamDevice & device,bool * is_pending)1287 MediaStreamSource* UserMediaProcessor::InitializeAudioSourceObject(
1288     const MediaStreamDevice& device,
1289     bool* is_pending) {
1290   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1291   DCHECK(current_request_info_);
1292   SendLogMessage(
1293       base::StringPrintf("InitializeAudioSourceObject({session_id=%s})",
1294                          device.session_id().ToString().c_str()));
1295 
1296   *is_pending = true;
1297 
1298   // See if the source is already being initialized.
1299   auto* pending = FindPendingLocalSource(device);
1300   if (pending)
1301     return pending;
1302 
1303   MediaStreamSource* source = FindOrInitializeSourceObject(device);
1304   if (source->GetPlatformSource()) {
1305     // The only return point for non-pending sources.
1306     *is_pending = false;
1307     return source;
1308   }
1309 
1310   // While sources are being initialized, keep them in a separate array.
1311   // Once they've finished initialized, they'll be moved over to local_sources_.
1312   // See OnAudioSourceStarted for more details.
1313   pending_local_sources_.push_back(source);
1314 
1315   blink::WebPlatformMediaStreamSource::ConstraintsRepeatingCallback
1316       source_ready = ConvertToBaseRepeatingCallback(CrossThreadBindRepeating(
1317           &UserMediaProcessor::OnAudioSourceStartedOnAudioThread, task_runner_,
1318           WrapCrossThreadWeakPersistent(this)));
1319 
1320   std::unique_ptr<blink::MediaStreamAudioSource> audio_source =
1321       CreateAudioSource(device, std::move(source_ready));
1322   audio_source->SetStopCallback(WTF::Bind(
1323       &UserMediaProcessor::OnLocalSourceStopped, WrapWeakPersistent(this)));
1324 
1325 #if DCHECK_IS_ON()
1326   for (auto local_source : local_sources_) {
1327     auto* platform_source = static_cast<WebPlatformMediaStreamSource*>(
1328         local_source->GetPlatformSource());
1329     DCHECK(platform_source);
1330     if (platform_source->device().id == audio_source->device().id) {
1331       auto* audio_platform_source =
1332           static_cast<MediaStreamAudioSource*>(platform_source);
1333       auto* processed_existing_source =
1334           ProcessedLocalAudioSource::From(audio_platform_source);
1335       auto* processed_new_source =
1336           ProcessedLocalAudioSource::From(audio_source.get());
1337       if (processed_new_source && processed_existing_source) {
1338         DCHECK(audio_source->HasSameNonReconfigurableSettings(
1339             audio_platform_source));
1340       }
1341     }
1342   }
1343 #endif  // DCHECK_IS_ON()
1344 
1345   MediaStreamSource::Capabilities capabilities;
1346   capabilities.echo_cancellation = {true, false};
1347   capabilities.echo_cancellation_type.ReserveCapacity(3);
1348   capabilities.echo_cancellation_type.emplace_back(
1349       String::FromUTF8(kEchoCancellationTypeBrowser));
1350   capabilities.echo_cancellation_type.emplace_back(
1351       String::FromUTF8(kEchoCancellationTypeAec3));
1352   if (device.input.effects() &
1353       (media::AudioParameters::ECHO_CANCELLER |
1354        media::AudioParameters::EXPERIMENTAL_ECHO_CANCELLER)) {
1355     capabilities.echo_cancellation_type.emplace_back(
1356         String::FromUTF8(kEchoCancellationTypeSystem));
1357   }
1358   capabilities.auto_gain_control = {true, false};
1359   capabilities.noise_suppression = {true, false};
1360   capabilities.sample_size = {
1361       media::SampleFormatToBitsPerChannel(media::kSampleFormatS16),  // min
1362       media::SampleFormatToBitsPerChannel(media::kSampleFormatS16)   // max
1363   };
1364   auto device_parameters = audio_source->device().input;
1365   if (device_parameters.IsValid()) {
1366     capabilities.channel_count = {1, device_parameters.channels()};
1367     capabilities.sample_rate = {std::min(blink::kAudioProcessingSampleRate,
1368                                          device_parameters.sample_rate()),
1369                                 std::max(blink::kAudioProcessingSampleRate,
1370                                          device_parameters.sample_rate())};
1371     double fallback_latency =
1372         static_cast<double>(blink::kFallbackAudioLatencyMs) / 1000;
1373     double min_latency, max_latency;
1374     std::tie(min_latency, max_latency) =
1375         blink::GetMinMaxLatenciesForAudioParameters(device_parameters);
1376     capabilities.latency = {std::min(fallback_latency, min_latency),
1377                             std::max(fallback_latency, max_latency)};
1378   }
1379 
1380   capabilities.device_id = blink::WebString::FromUTF8(device.id);
1381   if (device.group_id)
1382     capabilities.group_id = blink::WebString::FromUTF8(*device.group_id);
1383 
1384   source->SetPlatformSource(std::move(audio_source));
1385   source->SetCapabilities(capabilities);
1386   return source;
1387 }
1388 
1389 std::unique_ptr<blink::MediaStreamAudioSource>
CreateAudioSource(const MediaStreamDevice & device,blink::WebPlatformMediaStreamSource::ConstraintsRepeatingCallback source_ready)1390 UserMediaProcessor::CreateAudioSource(
1391     const MediaStreamDevice& device,
1392     blink::WebPlatformMediaStreamSource::ConstraintsRepeatingCallback
1393         source_ready) {
1394   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1395   DCHECK(current_request_info_);
1396 
1397   StreamControls* stream_controls = current_request_info_->stream_controls();
1398   // If the audio device is a loopback device (for screen capture), or if the
1399   // constraints/effects parameters indicate no audio processing is needed,
1400   // create an efficient, direct-path MediaStreamAudioSource instance.
1401   blink::AudioProcessingProperties audio_processing_properties =
1402       current_request_info_->audio_capture_settings()
1403           .audio_processing_properties();
1404   if (blink::IsScreenCaptureMediaType(device.type) ||
1405       !blink::MediaStreamAudioProcessor::WouldModifyAudio(
1406           audio_processing_properties)) {
1407     return std::make_unique<blink::LocalMediaStreamAudioSource>(
1408         frame_, device,
1409         base::OptionalOrNullptr(current_request_info_->audio_capture_settings()
1410                                     .requested_buffer_size()),
1411         stream_controls->disable_local_echo, std::move(source_ready),
1412         task_runner_);
1413   }
1414 
1415   // The audio device is not associated with screen capture and also requires
1416   // processing.
1417   return std::make_unique<blink::ProcessedLocalAudioSource>(
1418       frame_, device, stream_controls->disable_local_echo,
1419       audio_processing_properties, std::move(source_ready), task_runner_);
1420 }
1421 
1422 std::unique_ptr<blink::MediaStreamVideoSource>
CreateVideoSource(const MediaStreamDevice & device,blink::WebPlatformMediaStreamSource::SourceStoppedCallback stop_callback)1423 UserMediaProcessor::CreateVideoSource(
1424     const MediaStreamDevice& device,
1425     blink::WebPlatformMediaStreamSource::SourceStoppedCallback stop_callback) {
1426   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1427   DCHECK(current_request_info_);
1428   DCHECK(current_request_info_->video_capture_settings().HasValue());
1429 
1430   return std::make_unique<blink::MediaStreamVideoCapturerSource>(
1431       frame_, std::move(stop_callback), device,
1432       current_request_info_->video_capture_settings().capture_params(),
1433       WTF::BindRepeating(
1434           &blink::LocalVideoCapturerSource::Create,
1435           frame_->GetTaskRunner(blink::TaskType::kInternalMedia)));
1436 }
1437 
StartTracks(const String & label)1438 void UserMediaProcessor::StartTracks(const String& label) {
1439   DCHECK(current_request_info_->request());
1440   SendLogMessage(base::StringPrintf("StartTracks({request_id=%d}, {label=%s})",
1441                                     current_request_info_->request_id(),
1442                                     label.Utf8().c_str()));
1443   if (auto* media_stream_device_observer = GetMediaStreamDeviceObserver()) {
1444     media_stream_device_observer->AddStream(
1445         blink::WebString(label),
1446         ToStdVector(current_request_info_->audio_devices()),
1447         ToStdVector(current_request_info_->video_devices()),
1448         WTF::BindRepeating(&UserMediaProcessor::OnDeviceStopped,
1449                            WrapWeakPersistent(this)),
1450         WTF::BindRepeating(&UserMediaProcessor::OnDeviceChanged,
1451                            WrapWeakPersistent(this)),
1452         WTF::BindRepeating(&UserMediaProcessor::OnDeviceRequestStateChange,
1453                            WrapWeakPersistent(this)));
1454   }
1455 
1456   HeapVector<Member<MediaStreamComponent>> audio_tracks(
1457       current_request_info_->audio_devices().size());
1458   CreateAudioTracks(current_request_info_->audio_devices(), &audio_tracks);
1459 
1460   HeapVector<Member<MediaStreamComponent>> video_tracks(
1461       current_request_info_->video_devices().size());
1462   CreateVideoTracks(current_request_info_->video_devices(), &video_tracks);
1463 
1464   String blink_id = label;
1465   current_request_info_->InitializeWebStream(blink_id, audio_tracks,
1466                                              video_tracks);
1467 
1468   // Wait for the tracks to be started successfully or to fail.
1469   current_request_info_->CallbackOnTracksStarted(
1470       WTF::Bind(&UserMediaProcessor::OnCreateNativeTracksCompleted,
1471                 WrapWeakPersistent(this), label));
1472 }
1473 
CreateVideoTracks(const Vector<MediaStreamDevice> & devices,HeapVector<Member<MediaStreamComponent>> * components)1474 void UserMediaProcessor::CreateVideoTracks(
1475     const Vector<MediaStreamDevice>& devices,
1476     HeapVector<Member<MediaStreamComponent>>* components) {
1477   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1478   DCHECK(current_request_info_);
1479   DCHECK_EQ(devices.size(), components->size());
1480   SendLogMessage(base::StringPrintf("UMP::CreateVideoTracks({request_id=%d})",
1481                                     current_request_info_->request_id()));
1482 
1483   for (WTF::wtf_size_t i = 0; i < devices.size(); ++i) {
1484     MediaStreamSource* source = InitializeVideoSourceObject(devices[i]);
1485     (*components)[i] = current_request_info_->CreateAndStartVideoTrack(source);
1486   }
1487 }
1488 
CreateAudioTracks(const Vector<MediaStreamDevice> & devices,HeapVector<Member<MediaStreamComponent>> * components)1489 void UserMediaProcessor::CreateAudioTracks(
1490     const Vector<MediaStreamDevice>& devices,
1491     HeapVector<Member<MediaStreamComponent>>* components) {
1492   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1493   DCHECK(current_request_info_);
1494   DCHECK_EQ(devices.size(), components->size());
1495 
1496   Vector<MediaStreamDevice> overridden_audio_devices = devices;
1497   bool render_to_associated_sink =
1498       current_request_info_->audio_capture_settings().HasValue() &&
1499       current_request_info_->audio_capture_settings()
1500           .render_to_associated_sink();
1501   SendLogMessage(
1502       base::StringPrintf("CreateAudioTracks({render_to_associated_sink=%d})",
1503                          render_to_associated_sink));
1504   if (!render_to_associated_sink) {
1505     // If the GetUserMedia request did not explicitly set the constraint
1506     // kMediaStreamRenderToAssociatedSink, the output device id must
1507     // be removed.
1508     for (auto& device : overridden_audio_devices)
1509       device.matched_output_device_id.reset();
1510   }
1511 
1512   for (WTF::wtf_size_t i = 0; i < overridden_audio_devices.size(); ++i) {
1513     bool is_pending = false;
1514     MediaStreamSource* source =
1515         InitializeAudioSourceObject(overridden_audio_devices[i], &is_pending);
1516     (*components)[i] = MakeGarbageCollected<MediaStreamComponent>(source);
1517     current_request_info_->StartAudioTrack((*components)[i], is_pending);
1518     // At this point the source has started, and its audio parameters have been
1519     // set. Thus, all audio processing properties are known and can be surfaced
1520     // to |source|.
1521     SurfaceAudioProcessingSettings(source);
1522   }
1523 }
1524 
OnCreateNativeTracksCompleted(const String & label,RequestInfo * request_info,MediaStreamRequestResult result,const String & constraint_name)1525 void UserMediaProcessor::OnCreateNativeTracksCompleted(
1526     const String& label,
1527     RequestInfo* request_info,
1528     MediaStreamRequestResult result,
1529     const String& constraint_name) {
1530   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1531   SendLogMessage(base::StringPrintf(
1532       "UMP::OnCreateNativeTracksCompleted({request_id = %d}, {label=%s})",
1533       request_info->request_id(), label.Utf8().c_str()));
1534   if (result == MediaStreamRequestResult::OK) {
1535     GetUserMediaRequestSucceeded(request_info->descriptor(),
1536                                  request_info->request());
1537     GetMediaStreamDispatcherHost()->OnStreamStarted(label);
1538   } else {
1539     GetUserMediaRequestFailed(result, constraint_name);
1540 
1541     for (auto web_track : request_info->descriptor()->AudioComponents()) {
1542       MediaStreamTrackPlatform* track =
1543           MediaStreamTrackPlatform::GetTrack(WebMediaStreamTrack(web_track));
1544       if (track)
1545         track->Stop();
1546     }
1547 
1548     for (auto web_track : request_info->descriptor()->VideoComponents()) {
1549       MediaStreamTrackPlatform* track =
1550           MediaStreamTrackPlatform::GetTrack(WebMediaStreamTrack(web_track));
1551       if (track)
1552         track->Stop();
1553     }
1554   }
1555 
1556   DeleteUserMediaRequest(request_info->request());
1557 }
1558 
GetUserMediaRequestSucceeded(MediaStreamDescriptor * descriptor,UserMediaRequest * user_media_request)1559 void UserMediaProcessor::GetUserMediaRequestSucceeded(
1560     MediaStreamDescriptor* descriptor,
1561     UserMediaRequest* user_media_request) {
1562   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1563   DCHECK(IsCurrentRequestInfo(user_media_request));
1564   SendLogMessage(
1565       base::StringPrintf("GetUserMediaRequestSucceeded({request_id=%d})",
1566                          current_request_info_->request_id()));
1567 
1568   // Completing the getUserMedia request can lead to that the RenderFrame and
1569   // the UserMediaClient/UserMediaProcessor are destroyed if the JavaScript
1570   // code request the frame to be destroyed within the scope of the callback.
1571   // Therefore, post a task to complete the request with a clean stack.
1572   task_runner_->PostTask(
1573       FROM_HERE,
1574       WTF::Bind(&UserMediaProcessor::DelayedGetUserMediaRequestSucceeded,
1575                 WrapWeakPersistent(this), current_request_info_->request_id(),
1576                 WrapPersistent(descriptor),
1577                 WrapPersistent(user_media_request)));
1578 }
1579 
DelayedGetUserMediaRequestSucceeded(int request_id,MediaStreamDescriptor * component,UserMediaRequest * user_media_request)1580 void UserMediaProcessor::DelayedGetUserMediaRequestSucceeded(
1581     int request_id,
1582     MediaStreamDescriptor* component,
1583     UserMediaRequest* user_media_request) {
1584   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1585   SendLogMessage(base::StringPrintf(
1586       "DelayedGetUserMediaRequestSucceeded({request_id=%d}, {result=%s})",
1587       request_id,
1588       MediaStreamRequestResultToString(MediaStreamRequestResult::OK)));
1589   blink::LogUserMediaRequestResult(MediaStreamRequestResult::OK);
1590   DeleteUserMediaRequest(user_media_request);
1591   user_media_request->Succeed(component);
1592 }
1593 
GetUserMediaRequestFailed(MediaStreamRequestResult result,const String & constraint_name)1594 void UserMediaProcessor::GetUserMediaRequestFailed(
1595     MediaStreamRequestResult result,
1596     const String& constraint_name) {
1597   DCHECK(current_request_info_);
1598   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1599   SendLogMessage(
1600       base::StringPrintf("GetUserMediaRequestFailed({request_id=%d})",
1601                          current_request_info_->request_id()));
1602 
1603   // Completing the getUserMedia request can lead to that the RenderFrame and
1604   // the UserMediaClient/UserMediaProcessor are destroyed if the JavaScript
1605   // code request the frame to be destroyed within the scope of the callback.
1606   // Therefore, post a task to complete the request with a clean stack.
1607   task_runner_->PostTask(
1608       FROM_HERE,
1609       WTF::Bind(&UserMediaProcessor::DelayedGetUserMediaRequestFailed,
1610                 WrapWeakPersistent(this), current_request_info_->request_id(),
1611                 WrapPersistent(current_request_info_->request()), result,
1612                 constraint_name));
1613 }
1614 
DelayedGetUserMediaRequestFailed(int request_id,UserMediaRequest * user_media_request,MediaStreamRequestResult result,const String & constraint_name)1615 void UserMediaProcessor::DelayedGetUserMediaRequestFailed(
1616     int request_id,
1617     UserMediaRequest* user_media_request,
1618     MediaStreamRequestResult result,
1619     const String& constraint_name) {
1620   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1621   blink::LogUserMediaRequestResult(result);
1622   SendLogMessage(base::StringPrintf(
1623       "DelayedGetUserMediaRequestFailed({request_id=%d}, {result=%s})",
1624       request_id, MediaStreamRequestResultToString(result)));
1625   DeleteUserMediaRequest(user_media_request);
1626   switch (result) {
1627     case MediaStreamRequestResult::OK:
1628     case MediaStreamRequestResult::NUM_MEDIA_REQUEST_RESULTS:
1629       NOTREACHED();
1630       return;
1631     case MediaStreamRequestResult::PERMISSION_DENIED:
1632       user_media_request->Fail(UserMediaRequest::Error::kPermissionDenied,
1633                                "Permission denied");
1634       return;
1635     case MediaStreamRequestResult::PERMISSION_DISMISSED:
1636       user_media_request->Fail(UserMediaRequest::Error::kPermissionDismissed,
1637                                "Permission dismissed");
1638       return;
1639     case MediaStreamRequestResult::INVALID_STATE:
1640       user_media_request->Fail(UserMediaRequest::Error::kInvalidState,
1641                                "Invalid state");
1642       return;
1643     case MediaStreamRequestResult::NO_HARDWARE:
1644       user_media_request->Fail(UserMediaRequest::Error::kDevicesNotFound,
1645                                "Requested device not found");
1646       return;
1647     case MediaStreamRequestResult::INVALID_SECURITY_ORIGIN:
1648       user_media_request->Fail(UserMediaRequest::Error::kSecurityError,
1649                                "Invalid security origin");
1650       return;
1651     case MediaStreamRequestResult::TAB_CAPTURE_FAILURE:
1652       user_media_request->Fail(UserMediaRequest::Error::kTabCapture,
1653                                "Error starting tab capture");
1654       return;
1655     case MediaStreamRequestResult::SCREEN_CAPTURE_FAILURE:
1656       user_media_request->Fail(UserMediaRequest::Error::kScreenCapture,
1657                                "Error starting screen capture");
1658       return;
1659     case MediaStreamRequestResult::CAPTURE_FAILURE:
1660       user_media_request->Fail(UserMediaRequest::Error::kCapture,
1661                                "Error starting capture");
1662       return;
1663     case MediaStreamRequestResult::CONSTRAINT_NOT_SATISFIED:
1664       user_media_request->FailConstraint(constraint_name, "");
1665       return;
1666     case MediaStreamRequestResult::TRACK_START_FAILURE_AUDIO:
1667       user_media_request->Fail(UserMediaRequest::Error::kTrackStart,
1668                                "Could not start audio source");
1669       return;
1670     case MediaStreamRequestResult::TRACK_START_FAILURE_VIDEO:
1671       user_media_request->Fail(UserMediaRequest::Error::kTrackStart,
1672                                "Could not start video source");
1673       return;
1674     case MediaStreamRequestResult::NOT_SUPPORTED:
1675       user_media_request->Fail(UserMediaRequest::Error::kNotSupported,
1676                                "Not supported");
1677       return;
1678     case MediaStreamRequestResult::FAILED_DUE_TO_SHUTDOWN:
1679       user_media_request->Fail(UserMediaRequest::Error::kFailedDueToShutdown,
1680                                "Failed due to shutdown");
1681       return;
1682     case MediaStreamRequestResult::KILL_SWITCH_ON:
1683       user_media_request->Fail(UserMediaRequest::Error::kKillSwitchOn, "");
1684       return;
1685     case MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED:
1686       user_media_request->Fail(UserMediaRequest::Error::kSystemPermissionDenied,
1687                                "Permission denied by system");
1688       return;
1689   }
1690   NOTREACHED();
1691   user_media_request->Fail(UserMediaRequest::Error::kPermissionDenied, "");
1692 }
1693 
FindLocalSource(const LocalStreamSources & sources,const MediaStreamDevice & device) const1694 MediaStreamSource* UserMediaProcessor::FindLocalSource(
1695     const LocalStreamSources& sources,
1696     const MediaStreamDevice& device) const {
1697   for (auto local_source : sources) {
1698     WebPlatformMediaStreamSource* const source =
1699         local_source->GetPlatformSource();
1700     const MediaStreamDevice& active_device = source->device();
1701     if (IsSameDevice(active_device, device))
1702       return local_source;
1703   }
1704   return nullptr;
1705 }
1706 
FindOrInitializeSourceObject(const MediaStreamDevice & device)1707 MediaStreamSource* UserMediaProcessor::FindOrInitializeSourceObject(
1708     const MediaStreamDevice& device) {
1709   MediaStreamSource* existing_source = FindLocalSource(device);
1710   if (existing_source) {
1711     DVLOG(1) << "Source already exists. Reusing source with id "
1712              << existing_source->Id().Utf8();
1713     return existing_source;
1714   }
1715 
1716   MediaStreamSource::StreamType type = IsAudioInputMediaType(device.type)
1717                                            ? MediaStreamSource::kTypeAudio
1718                                            : MediaStreamSource::kTypeVideo;
1719 
1720   auto* source = MakeGarbageCollected<MediaStreamSource>(
1721       String::FromUTF8(device.id), type, String::FromUTF8(device.name),
1722       false /* remote */);
1723   if (device.group_id)
1724     source->SetGroupId(String::FromUTF8(*device.group_id));
1725   return source;
1726 }
1727 
RemoveLocalSource(MediaStreamSource * source)1728 bool UserMediaProcessor::RemoveLocalSource(MediaStreamSource* source) {
1729   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1730   SendLogMessage(base::StringPrintf(
1731       "RemoveLocalSource({id=%s}, {name=%s}, {group_id=%s})",
1732       source->Id().Utf8().c_str(), source->GetName().Utf8().c_str(),
1733       source->GroupId().Utf8().c_str()));
1734 
1735   for (auto* device_it = local_sources_.begin();
1736        device_it != local_sources_.end(); ++device_it) {
1737     if (IsSameSource(*device_it, source)) {
1738       local_sources_.erase(device_it);
1739       return true;
1740     }
1741   }
1742 
1743   // Check if the source was pending.
1744   for (auto* device_it = pending_local_sources_.begin();
1745        device_it != pending_local_sources_.end(); ++device_it) {
1746     if (IsSameSource(*device_it, source)) {
1747       WebPlatformMediaStreamSource* const source_extra_data =
1748           source->GetPlatformSource();
1749       const bool is_audio_source =
1750           source->GetType() == MediaStreamSource::kTypeAudio;
1751       NotifyCurrentRequestInfoOfAudioSourceStarted(
1752           source_extra_data,
1753           is_audio_source ? MediaStreamRequestResult::TRACK_START_FAILURE_AUDIO
1754                           : MediaStreamRequestResult::TRACK_START_FAILURE_VIDEO,
1755           String::FromUTF8(is_audio_source
1756                                ? "Failed to access audio capture device"
1757                                : "Failed to access video capture device"));
1758       pending_local_sources_.erase(device_it);
1759       return true;
1760     }
1761   }
1762 
1763   return false;
1764 }
1765 
IsCurrentRequestInfo(int request_id) const1766 bool UserMediaProcessor::IsCurrentRequestInfo(int request_id) const {
1767   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1768   return current_request_info_ &&
1769          current_request_info_->request_id() == request_id;
1770 }
1771 
IsCurrentRequestInfo(UserMediaRequest * user_media_request) const1772 bool UserMediaProcessor::IsCurrentRequestInfo(
1773     UserMediaRequest* user_media_request) const {
1774   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1775   return current_request_info_ &&
1776          current_request_info_->request() == user_media_request;
1777 }
1778 
DeleteUserMediaRequest(UserMediaRequest * user_media_request)1779 bool UserMediaProcessor::DeleteUserMediaRequest(
1780     UserMediaRequest* user_media_request) {
1781   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1782   if (current_request_info_ &&
1783       current_request_info_->request() == user_media_request) {
1784     current_request_info_ = nullptr;
1785     std::move(request_completed_cb_).Run();
1786     return true;
1787   }
1788   return false;
1789 }
1790 
StopAllProcessing()1791 void UserMediaProcessor::StopAllProcessing() {
1792   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1793   if (current_request_info_) {
1794     switch (current_request_info_->state()) {
1795       case RequestInfo::State::SENT_FOR_GENERATION:
1796         // Let the browser process know that the previously sent request must be
1797         // canceled.
1798         GetMediaStreamDispatcherHost()->CancelRequest(
1799             current_request_info_->request_id());
1800         FALLTHROUGH;
1801 
1802       case RequestInfo::State::NOT_SENT_FOR_GENERATION:
1803         LogUserMediaRequestWithNoResult(
1804             blink::MEDIA_STREAM_REQUEST_NOT_GENERATED);
1805         break;
1806 
1807       case RequestInfo::State::GENERATED:
1808         LogUserMediaRequestWithNoResult(
1809             blink::MEDIA_STREAM_REQUEST_PENDING_MEDIA_TRACKS);
1810         break;
1811     }
1812     current_request_info_ = nullptr;
1813   }
1814   request_completed_cb_.Reset();
1815 
1816   // Loop through all current local sources and stop the sources.
1817   auto* it = local_sources_.begin();
1818   while (it != local_sources_.end()) {
1819     StopLocalSource(*it, true);
1820     it = local_sources_.erase(it);
1821   }
1822 }
1823 
OnLocalSourceStopped(const blink::WebMediaStreamSource & source)1824 void UserMediaProcessor::OnLocalSourceStopped(
1825     const blink::WebMediaStreamSource& source) {
1826   // The client can be null if the frame is already detached.
1827   // If it's already detached, dispatcher_host_ shouldn't be bound again.
1828   // (ref: crbug.com/1105842)
1829   if (!frame_->Client())
1830     return;
1831 
1832   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1833   blink::WebPlatformMediaStreamSource* source_impl = source.GetPlatformSource();
1834   SendLogMessage(base::StringPrintf(
1835       "OnLocalSourceStopped({session_id=%s})",
1836       source_impl->device().session_id().ToString().c_str()));
1837 
1838   const bool some_source_removed = RemoveLocalSource(source);
1839   CHECK(some_source_removed);
1840 
1841   if (auto* media_stream_device_observer = GetMediaStreamDeviceObserver())
1842     media_stream_device_observer->RemoveStreamDevice(source_impl->device());
1843 
1844   String device_id(source_impl->device().id.data());
1845   GetMediaStreamDispatcherHost()->StopStreamDevice(
1846       device_id, source_impl->device().serializable_session_id());
1847 }
1848 
StopLocalSource(MediaStreamSource * source,bool notify_dispatcher)1849 void UserMediaProcessor::StopLocalSource(MediaStreamSource* source,
1850                                          bool notify_dispatcher) {
1851   WebPlatformMediaStreamSource* source_impl = source->GetPlatformSource();
1852   SendLogMessage(base::StringPrintf(
1853       "StopLocalSource({session_id=%s})",
1854       source_impl->device().session_id().ToString().c_str()));
1855 
1856   if (notify_dispatcher) {
1857     if (auto* media_stream_device_observer = GetMediaStreamDeviceObserver())
1858       media_stream_device_observer->RemoveStreamDevice(source_impl->device());
1859 
1860     String device_id(source_impl->device().id.data());
1861     GetMediaStreamDispatcherHost()->StopStreamDevice(
1862         device_id, source_impl->device().serializable_session_id());
1863   }
1864 
1865   source_impl->ResetSourceStoppedCallback();
1866   source_impl->StopSource();
1867 }
1868 
HasActiveSources() const1869 bool UserMediaProcessor::HasActiveSources() const {
1870   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
1871   return !local_sources_.IsEmpty();
1872 }
1873 
1874 blink::mojom::blink::MediaStreamDispatcherHost*
GetMediaStreamDispatcherHost()1875 UserMediaProcessor::GetMediaStreamDispatcherHost() {
1876   if (!dispatcher_host_.is_bound()) {
1877     frame_->GetBrowserInterfaceBroker().GetInterface(
1878         dispatcher_host_.BindNewPipeAndPassReceiver(task_runner_));
1879   }
1880   return dispatcher_host_.get();
1881 }
1882 
1883 blink::mojom::blink::MediaDevicesDispatcherHost*
GetMediaDevicesDispatcher()1884 UserMediaProcessor::GetMediaDevicesDispatcher() {
1885   return media_devices_dispatcher_cb_.Run();
1886 }
1887 
1888 const blink::AudioCaptureSettings&
AudioCaptureSettingsForTesting() const1889 UserMediaProcessor::AudioCaptureSettingsForTesting() const {
1890   DCHECK(current_request_info_);
1891   return current_request_info_->audio_capture_settings();
1892 }
1893 
1894 const blink::VideoCaptureSettings&
VideoCaptureSettingsForTesting() const1895 UserMediaProcessor::VideoCaptureSettingsForTesting() const {
1896   DCHECK(current_request_info_);
1897   return current_request_info_->video_capture_settings();
1898 }
1899 
SetMediaStreamDeviceObserverForTesting(WebMediaStreamDeviceObserver * media_stream_device_observer)1900 void UserMediaProcessor::SetMediaStreamDeviceObserverForTesting(
1901     WebMediaStreamDeviceObserver* media_stream_device_observer) {
1902   DCHECK(!GetMediaStreamDeviceObserver());
1903   DCHECK(media_stream_device_observer);
1904   media_stream_device_observer_for_testing_ = media_stream_device_observer;
1905 }
1906 
1907 }  // namespace blink
1908