1 #include "v2/InstanceV2Impl.h"
2
3 #include "LogSinkImpl.h"
4 #include "VideoCaptureInterfaceImpl.h"
5 #include "VideoCapturerInterface.h"
6 #include "v2/NativeNetworkingImpl.h"
7 #include "v2/Signaling.h"
8
9 #include "CodecSelectHelper.h"
10 #include "platform/PlatformInterface.h"
11
12 #include "api/audio_codecs/audio_decoder_factory_template.h"
13 #include "api/audio_codecs/audio_encoder_factory_template.h"
14 #include "api/audio_codecs/opus/audio_decoder_opus.h"
15 #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h"
16 #include "api/audio_codecs/opus/audio_encoder_opus.h"
17 #include "api/audio_codecs/L16/audio_decoder_L16.h"
18 #include "api/audio_codecs/L16/audio_encoder_L16.h"
19 #include "api/task_queue/default_task_queue_factory.h"
20 #include "media/engine/webrtc_media_engine.h"
21 #include "system_wrappers/include/field_trial.h"
22 #include "api/video/builtin_video_bitrate_allocator_factory.h"
23 #include "call/call.h"
24 #include "modules/rtp_rtcp/source/rtp_utility.h"
25 #include "api/call/audio_sink.h"
26 #include "modules/audio_processing/audio_buffer.h"
27 #include "absl/strings/match.h"
28 #include "modules/audio_processing/agc2/vad_with_level.h"
29 #include "pc/channel_manager.h"
30 #include "media/base/rtp_data_engine.h"
31 #include "audio/audio_state.h"
32 #include "modules/audio_coding/neteq/default_neteq_factory.h"
33 #include "modules/audio_coding/include/audio_coding_module.h"
34 #include "api/candidate.h"
35 #include "api/jsep_ice_candidate.h"
36 #include "media/base/h264_profile_level_id.h"
37 #include "pc/used_ids.h"
38
39 #include "AudioFrame.h"
40 #include "ThreadLocalObject.h"
41 #include "Manager.h"
42 #include "NetworkManager.h"
43 #include "VideoCaptureInterfaceImpl.h"
44 #include "platform/PlatformInterface.h"
45 #include "LogSinkImpl.h"
46 #include "CodecSelectHelper.h"
47 #include "AudioDeviceHelper.h"
48 #include "SignalingEncryption.h"
49
50 #include <random>
51 #include <sstream>
52
53 namespace tgcalls {
54 namespace {
55
stringToInt(std::string const & string)56 static int stringToInt(std::string const &string) {
57 std::stringstream stringStream(string);
58 int value = 0;
59 stringStream >> value;
60 return value;
61 }
62
intToString(int value)63 static std::string intToString(int value) {
64 std::ostringstream stringStream;
65 stringStream << value;
66 return stringStream.str();
67 }
68
GetVideoCaptureAssumingSameThread(VideoCaptureInterface * videoCapture)69 static VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(VideoCaptureInterface *videoCapture) {
70 return videoCapture
71 ? static_cast<VideoCaptureInterfaceImpl*>(videoCapture)->object()->getSyncAssumingSameThread()
72 : nullptr;
73 }
74
75 struct OutgoingVideoFormat {
76 cricket::VideoCodec videoCodec;
77 absl::optional<cricket::VideoCodec> rtxCodec;
78 };
79
addDefaultFeedbackParams(cricket::VideoCodec * codec)80 static void addDefaultFeedbackParams(cricket::VideoCodec *codec) {
81 // Don't add any feedback params for RED and ULPFEC.
82 if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName) {
83 return;
84 }
85 codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
86 codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
87 // Don't add any more feedback params for FLEXFEC.
88 if (codec->name == cricket::kFlexfecCodecName) {
89 return;
90 }
91 codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
92 codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
93 codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
94 }
95
96 template <class C>
IsRtxCodec(const C & codec)97 static bool IsRtxCodec(const C& codec) {
98 return absl::EqualsIgnoreCase(codec.name, cricket::kRtxCodecName);
99 }
100
101 template <class C>
ReferencedCodecsMatch(const std::vector<C> & codecs1,const int codec1_id,const std::vector<C> & codecs2,const int codec2_id)102 static bool ReferencedCodecsMatch(const std::vector<C>& codecs1,
103 const int codec1_id,
104 const std::vector<C>& codecs2,
105 const int codec2_id) {
106 const C* codec1 = FindCodecById(codecs1, codec1_id);
107 const C* codec2 = FindCodecById(codecs2, codec2_id);
108 return codec1 != nullptr && codec2 != nullptr && codec1->Matches(*codec2);
109 }
110
111 // Finds a codec in |codecs2| that matches |codec_to_match|, which is
112 // a member of |codecs1|. If |codec_to_match| is an RTX codec, both
113 // the codecs themselves and their associated codecs must match.
114 template <class C>
FindMatchingCodec(const std::vector<C> & codecs1,const std::vector<C> & codecs2,const C & codec_to_match,C * found_codec)115 static bool FindMatchingCodec(const std::vector<C>& codecs1,
116 const std::vector<C>& codecs2,
117 const C& codec_to_match,
118 C* found_codec) {
119 // |codec_to_match| should be a member of |codecs1|, in order to look up RTX
120 // codecs' associated codecs correctly. If not, that's a programming error.
121 RTC_DCHECK(absl::c_any_of(codecs1, [&codec_to_match](const C& codec) {
122 return &codec == &codec_to_match;
123 }));
124 for (const C& potential_match : codecs2) {
125 if (potential_match.Matches(codec_to_match)) {
126 if (IsRtxCodec(codec_to_match)) {
127 int apt_value_1 = 0;
128 int apt_value_2 = 0;
129 if (!codec_to_match.GetParam(cricket::kCodecParamAssociatedPayloadType,
130 &apt_value_1) ||
131 !potential_match.GetParam(cricket::kCodecParamAssociatedPayloadType,
132 &apt_value_2)) {
133 RTC_LOG(LS_WARNING) << "RTX missing associated payload type.";
134 continue;
135 }
136 if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2,
137 apt_value_2)) {
138 continue;
139 }
140 }
141 if (found_codec) {
142 *found_codec = potential_match;
143 }
144 return true;
145 }
146 }
147 return false;
148 }
149
150 template <class C>
NegotiatePacketization(const C & local_codec,const C & remote_codec,C * negotiated_codec)151 static void NegotiatePacketization(const C& local_codec,
152 const C& remote_codec,
153 C* negotiated_codec) {}
154
155 template <>
NegotiatePacketization(const cricket::VideoCodec & local_codec,const cricket::VideoCodec & remote_codec,cricket::VideoCodec * negotiated_codec)156 void NegotiatePacketization(const cricket::VideoCodec& local_codec,
157 const cricket::VideoCodec& remote_codec,
158 cricket::VideoCodec* negotiated_codec) {
159 negotiated_codec->packetization =
160 cricket::VideoCodec::IntersectPacketization(local_codec, remote_codec);
161 }
162
163 template <class C>
NegotiateCodecs(const std::vector<C> & local_codecs,const std::vector<C> & offered_codecs,std::vector<C> * negotiated_codecs,bool keep_offer_order)164 static void NegotiateCodecs(const std::vector<C>& local_codecs,
165 const std::vector<C>& offered_codecs,
166 std::vector<C>* negotiated_codecs,
167 bool keep_offer_order) {
168 for (const C& ours : local_codecs) {
169 C theirs;
170 // Note that we intentionally only find one matching codec for each of our
171 // local codecs, in case the remote offer contains duplicate codecs.
172 if (FindMatchingCodec(local_codecs, offered_codecs, ours, &theirs)) {
173 C negotiated = ours;
174 NegotiatePacketization(ours, theirs, &negotiated);
175 negotiated.IntersectFeedbackParams(theirs);
176 if (IsRtxCodec(negotiated)) {
177 const auto apt_it =
178 theirs.params.find(cricket::kCodecParamAssociatedPayloadType);
179 // FindMatchingCodec shouldn't return something with no apt value.
180 RTC_DCHECK(apt_it != theirs.params.end());
181 negotiated.SetParam(cricket::kCodecParamAssociatedPayloadType, apt_it->second);
182 }
183 if (absl::EqualsIgnoreCase(ours.name, cricket::kH264CodecName)) {
184 webrtc::H264::GenerateProfileLevelIdForAnswer(
185 ours.params, theirs.params, &negotiated.params);
186 }
187 negotiated.id = theirs.id;
188 negotiated.name = theirs.name;
189 negotiated_codecs->push_back(std::move(negotiated));
190 }
191 }
192 if (keep_offer_order) {
193 // RFC3264: Although the answerer MAY list the formats in their desired
194 // order of preference, it is RECOMMENDED that unless there is a
195 // specific reason, the answerer list formats in the same relative order
196 // they were present in the offer.
197 // This can be skipped when the transceiver has any codec preferences.
198 std::unordered_map<int, int> payload_type_preferences;
199 int preference = static_cast<int>(offered_codecs.size() + 1);
200 for (const C& codec : offered_codecs) {
201 payload_type_preferences[codec.id] = preference--;
202 }
203 absl::c_sort(*negotiated_codecs, [&payload_type_preferences](const C& a,
204 const C& b) {
205 return payload_type_preferences[a.id] > payload_type_preferences[b.id];
206 });
207 }
208 }
209
210 // Find the codec in |codec_list| that |rtx_codec| is associated with.
211 template <class C>
GetAssociatedCodec(const std::vector<C> & codec_list,const C & rtx_codec)212 static const C* GetAssociatedCodec(const std::vector<C>& codec_list,
213 const C& rtx_codec) {
214 std::string associated_pt_str;
215 if (!rtx_codec.GetParam(cricket::kCodecParamAssociatedPayloadType,
216 &associated_pt_str)) {
217 RTC_LOG(LS_WARNING) << "RTX codec " << rtx_codec.name
218 << " is missing an associated payload type.";
219 return nullptr;
220 }
221
222 int associated_pt;
223 if (!rtc::FromString(associated_pt_str, &associated_pt)) {
224 RTC_LOG(LS_WARNING) << "Couldn't convert payload type " << associated_pt_str
225 << " of RTX codec " << rtx_codec.name
226 << " to an integer.";
227 return nullptr;
228 }
229
230 // Find the associated reference codec for the reference RTX codec.
231 const C* associated_codec = FindCodecById(codec_list, associated_pt);
232 if (!associated_codec) {
233 RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type "
234 << associated_pt << " for RTX codec " << rtx_codec.name
235 << ".";
236 }
237 return associated_codec;
238 }
239
240 // Adds all codecs from |reference_codecs| to |offered_codecs| that don't
241 // already exist in |offered_codecs| and ensure the payload types don't
242 // collide.
243 template <class C>
MergeCodecs(const std::vector<C> & reference_codecs,std::vector<C> * offered_codecs,cricket::UsedPayloadTypes * used_pltypes)244 static void MergeCodecs(const std::vector<C>& reference_codecs,
245 std::vector<C>* offered_codecs,
246 cricket::UsedPayloadTypes* used_pltypes) {
247 // Add all new codecs that are not RTX codecs.
248 for (const C& reference_codec : reference_codecs) {
249 if (!IsRtxCodec(reference_codec) &&
250 !FindMatchingCodec<C>(reference_codecs, *offered_codecs,
251 reference_codec, nullptr)) {
252 C codec = reference_codec;
253 used_pltypes->FindAndSetIdUsed(&codec);
254 offered_codecs->push_back(codec);
255 }
256 }
257
258 // Add all new RTX codecs.
259 for (const C& reference_codec : reference_codecs) {
260 if (IsRtxCodec(reference_codec) &&
261 !FindMatchingCodec<C>(reference_codecs, *offered_codecs,
262 reference_codec, nullptr)) {
263 C rtx_codec = reference_codec;
264 const C* associated_codec =
265 GetAssociatedCodec(reference_codecs, rtx_codec);
266 if (!associated_codec) {
267 continue;
268 }
269 // Find a codec in the offered list that matches the reference codec.
270 // Its payload type may be different than the reference codec.
271 C matching_codec;
272 if (!FindMatchingCodec<C>(reference_codecs, *offered_codecs,
273 *associated_codec, &matching_codec)) {
274 RTC_LOG(LS_WARNING)
275 << "Couldn't find matching " << associated_codec->name << " codec.";
276 continue;
277 }
278
279 rtx_codec.params[cricket::kCodecParamAssociatedPayloadType] =
280 rtc::ToString(matching_codec.id);
281 used_pltypes->FindAndSetIdUsed(&rtx_codec);
282 offered_codecs->push_back(rtx_codec);
283 }
284 }
285 }
286
generateAvailableVideoFormats(std::vector<webrtc::SdpVideoFormat> const & formats)287 static std::vector<OutgoingVideoFormat> generateAvailableVideoFormats(std::vector<webrtc::SdpVideoFormat> const &formats) {
288 if (formats.empty()) {
289 return {};
290 }
291
292 constexpr int kFirstDynamicPayloadType = 120;
293 constexpr int kLastDynamicPayloadType = 127;
294
295 int payload_type = kFirstDynamicPayloadType;
296
297 std::vector<OutgoingVideoFormat> result;
298
299 bool codecSelected = false;
300
301 for (const auto &format : formats) {
302 if (codecSelected) {
303 break;
304 }
305
306 OutgoingVideoFormat resultFormat;
307
308 cricket::VideoCodec codec(format);
309 codec.id = payload_type;
310 addDefaultFeedbackParams(&codec);
311
312 if (!absl::EqualsIgnoreCase(codec.name, cricket::kVp8CodecName)) {
313 continue;
314 }
315
316 resultFormat.videoCodec = codec;
317 codecSelected = true;
318
319 // Increment payload type.
320 ++payload_type;
321 if (payload_type > kLastDynamicPayloadType) {
322 RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
323 break;
324 }
325
326 // Add associated RTX codec for non-FEC codecs.
327 if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) &&
328 !absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) {
329 resultFormat.rtxCodec = cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id);
330
331 // Increment payload type.
332 ++payload_type;
333 if (payload_type > kLastDynamicPayloadType) {
334 RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
335 break;
336 }
337 }
338
339 result.push_back(std::move(resultFormat));
340 }
341 return result;
342 }
343
getCodecsFromMediaContent(signaling::MediaContent const & content,std::vector<cricket::VideoCodec> & codecs)344 static void getCodecsFromMediaContent(signaling::MediaContent const &content, std::vector<cricket::VideoCodec> &codecs) {
345 for (const auto &payloadType : content.payloadTypes) {
346 cricket::VideoCodec codec(payloadType.id, payloadType.name);
347 for (const auto &feedbackType : payloadType.feedbackTypes) {
348 codec.AddFeedbackParam(cricket::FeedbackParam(feedbackType.type, feedbackType.subtype));
349 }
350 for (const auto ¶meter : payloadType.parameters) {
351 codec.SetParam(parameter.first, parameter.second);
352 }
353 codecs.push_back(std::move(codec));
354 }
355 }
356
getPayloadTypesFromVideoCodecs(std::vector<cricket::VideoCodec> const & codecs)357 static std::vector<signaling::PayloadType> getPayloadTypesFromVideoCodecs(std::vector<cricket::VideoCodec> const &codecs) {
358 std::vector<signaling::PayloadType> payloadTypes;
359
360 for (const auto &codec : codecs) {
361 signaling::PayloadType payloadType;
362
363 payloadType.id = codec.id;
364 payloadType.name = codec.name;
365 payloadType.clockrate = 90000;
366 payloadType.channels = 0;
367
368 for (const auto &feedbackParam : codec.feedback_params.params()) {
369 signaling::FeedbackType feedbackType;
370 feedbackType.type = feedbackParam.id();
371 feedbackType.subtype = feedbackParam.param();
372 payloadType.feedbackTypes.push_back(std::move(feedbackType));
373 }
374
375 for (const auto ¶m : codec.params) {
376 payloadType.parameters.push_back(std::make_pair(param.first, param.second));
377 }
378
379 payloadTypes.push_back(std::move(payloadType));
380 }
381
382 return payloadTypes;
383 }
384
getCodecsFromMediaContent(signaling::MediaContent const & content,std::vector<cricket::AudioCodec> & codecs)385 static void getCodecsFromMediaContent(signaling::MediaContent const &content, std::vector<cricket::AudioCodec> &codecs) {
386 for (const auto &payloadType : content.payloadTypes) {
387 cricket::AudioCodec codec(payloadType.id, payloadType.name, payloadType.clockrate, 0, payloadType.channels);
388 for (const auto &feedbackType : payloadType.feedbackTypes) {
389 codec.AddFeedbackParam(cricket::FeedbackParam(feedbackType.type, feedbackType.subtype));
390 }
391 for (const auto ¶meter : payloadType.parameters) {
392 codec.SetParam(parameter.first, parameter.second);
393 }
394 codecs.push_back(std::move(codec));
395 }
396 }
397
getPayloadTypesFromAudioCodecs(std::vector<cricket::AudioCodec> const & codecs)398 static std::vector<signaling::PayloadType> getPayloadTypesFromAudioCodecs(std::vector<cricket::AudioCodec> const &codecs) {
399 std::vector<signaling::PayloadType> payloadTypes;
400
401 for (const auto &codec : codecs) {
402 signaling::PayloadType payloadType;
403
404 payloadType.id = codec.id;
405 payloadType.name = codec.name;
406 payloadType.clockrate = codec.clockrate;
407 payloadType.channels = (uint32_t)codec.channels;
408
409 for (const auto &feedbackParam : codec.feedback_params.params()) {
410 signaling::FeedbackType feedbackType;
411 feedbackType.type = feedbackParam.id();
412 feedbackType.subtype = feedbackParam.param();
413 payloadType.feedbackTypes.push_back(std::move(feedbackType));
414 }
415
416 for (const auto ¶m : codec.params) {
417 payloadType.parameters.push_back(std::make_pair(param.first, param.second));
418 }
419
420 payloadTypes.push_back(std::move(payloadType));
421 }
422
423 return payloadTypes;
424 }
425
426 template <class C>
427 struct NegotiatedMediaContent {
428 uint32_t ssrc = 0;
429 std::vector<signaling::SsrcGroup> ssrcGroups;
430 std::vector<webrtc::RtpExtension> rtpExtensions;
431 std::vector<C> codecs;
432 };
433
FindByUri(const cricket::RtpHeaderExtensions & extensions,const webrtc::RtpExtension & ext_to_match,webrtc::RtpExtension * found_extension)434 static bool FindByUri(const cricket::RtpHeaderExtensions& extensions,
435 const webrtc::RtpExtension& ext_to_match,
436 webrtc::RtpExtension* found_extension) {
437 // We assume that all URIs are given in a canonical format.
438 const webrtc::RtpExtension* found =
439 webrtc::RtpExtension::FindHeaderExtensionByUri(extensions,
440 ext_to_match.uri);
441 if (!found) {
442 return false;
443 }
444 if (found_extension) {
445 *found_extension = *found;
446 }
447 return true;
448 }
449
450 template <class C>
negotiateMediaContent(signaling::MediaContent const & baseMediaContent,signaling::MediaContent const & localContent,signaling::MediaContent const & remoteContent,bool isAnswer)451 static NegotiatedMediaContent<C> negotiateMediaContent(signaling::MediaContent const &baseMediaContent, signaling::MediaContent const &localContent, signaling::MediaContent const &remoteContent, bool isAnswer) {
452 std::vector<C> localCodecs;
453 getCodecsFromMediaContent(localContent, localCodecs);
454
455 std::vector<C> remoteCodecs;
456 getCodecsFromMediaContent(remoteContent, remoteCodecs);
457
458 std::vector<C> negotiatedCodecs;
459
460 cricket::UsedPayloadTypes usedPayloadTypes;
461 NegotiateCodecs<C>(localCodecs, remoteCodecs, &negotiatedCodecs, true);
462
463 NegotiatedMediaContent<C> result;
464
465 result.ssrc = baseMediaContent.ssrc;
466 result.ssrcGroups = baseMediaContent.ssrcGroups;
467 result.codecs = std::move(negotiatedCodecs);
468
469 cricket::UsedRtpHeaderExtensionIds extensionIds(cricket::UsedRtpHeaderExtensionIds::IdDomain::kOneByteOnly);
470
471 for (const auto &extension : remoteContent.rtpExtensions) {
472 if (isAnswer) {
473 webrtc::RtpExtension found;
474 if (!FindByUri(localContent.rtpExtensions, extension, &found)) {
475 continue;
476 }
477 }
478
479 webrtc::RtpExtension mutableExtension = extension;
480 extensionIds.FindAndSetIdUsed(&mutableExtension);
481 result.rtpExtensions.push_back(std::move(mutableExtension));
482 }
483
484 if (!isAnswer) {
485 for (const auto &extension : localContent.rtpExtensions) {
486 webrtc::RtpExtension found;
487 if (!FindByUri(result.rtpExtensions, extension, &found)) {
488 webrtc::RtpExtension mutableExtension = extension;
489 extensionIds.FindAndSetIdUsed(&mutableExtension);
490 result.rtpExtensions.push_back(std::move(mutableExtension));
491 }
492 }
493 }
494
495 return result;
496 }
497
498 class OutgoingAudioChannel : public sigslot::has_slots<> {
499 public:
createOutgoingContentDescription()500 static absl::optional<signaling::MediaContent> createOutgoingContentDescription() {
501 signaling::MediaContent mediaContent;
502
503 auto generator = std::mt19937(std::random_device()());
504 auto distribution = std::uniform_int_distribution<uint32_t>();
505 do {
506 mediaContent.ssrc = distribution(generator) & 0x7fffffffU;
507 } while (!mediaContent.ssrc);
508
509 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAudioLevelUri, 1);
510 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAbsSendTimeUri, 2);
511 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 3);
512
513 cricket::AudioCodec opusCodec(109, "opus", 48000, 0, 2);
514 opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
515 opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1);
516 opusCodec.SetParam(cricket::kCodecParamMinPTime, 60);
517
518 mediaContent.payloadTypes = getPayloadTypesFromAudioCodecs({ opusCodec });
519
520 return mediaContent;
521 }
522
523 public:
OutgoingAudioChannel(webrtc::Call * call,cricket::ChannelManager * channelManager,rtc::UniqueRandomIdGenerator * uniqueRandomIdGenerator,webrtc::LocalAudioSinkAdapter * audioSource,webrtc::RtpTransport * rtpTransport,NegotiatedMediaContent<cricket::AudioCodec> const & mediaContent,std::shared_ptr<Threads> threads)524 OutgoingAudioChannel(
525 webrtc::Call *call,
526 cricket::ChannelManager *channelManager,
527 rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator,
528 webrtc::LocalAudioSinkAdapter *audioSource,
529 webrtc::RtpTransport *rtpTransport,
530 NegotiatedMediaContent<cricket::AudioCodec> const &mediaContent,
531 std::shared_ptr<Threads> threads
532 ) :
533 _ssrc(mediaContent.ssrc),
534 _call(call),
535 _channelManager(channelManager),
536 _audioSource(audioSource) {
537 cricket::AudioOptions audioOptions;
538 bool _disableOutgoingAudioProcessing = false;
539
540 if (_disableOutgoingAudioProcessing) {
541 audioOptions.echo_cancellation = false;
542 audioOptions.noise_suppression = false;
543 audioOptions.auto_gain_control = false;
544 audioOptions.highpass_filter = false;
545 audioOptions.typing_detection = false;
546 audioOptions.experimental_agc = false;
547 audioOptions.experimental_ns = false;
548 audioOptions.residual_echo_detector = false;
549 } else {
550 audioOptions.echo_cancellation = true;
551 audioOptions.noise_suppression = true;
552 }
553
554 std::vector<std::string> streamIds;
555 streamIds.push_back("1");
556
557 _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "audio0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), uniqueRandomIdGenerator, audioOptions);
558
559 std::vector<cricket::AudioCodec> codecs;
560 for (const auto &codec : mediaContent.codecs) {
561 if (codec.name == "opus") {
562 auto mutableCodec = codec;
563
564 const uint8_t opusMinBitrateKbps = 16;
565 const uint8_t opusMaxBitrateKbps = 32;
566 const uint8_t opusStartBitrateKbps = 32;
567 const uint8_t opusPTimeMs = 60;
568
569 mutableCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps);
570 mutableCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps);
571 mutableCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps);
572 mutableCodec.SetParam(cricket::kCodecParamUseInbandFec, 1);
573 mutableCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs);
574
575 codecs.push_back(std::move(mutableCodec));
576 }
577 }
578
579 auto outgoingAudioDescription = std::make_unique<cricket::AudioContentDescription>();
580 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
581 outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
582 }
583 outgoingAudioDescription->set_rtcp_mux(true);
584 outgoingAudioDescription->set_rtcp_reduced_size(true);
585 outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly);
586 outgoingAudioDescription->set_codecs(codecs);
587 outgoingAudioDescription->set_bandwidth(1032000);
588 outgoingAudioDescription->AddStream(cricket::StreamParams::CreateLegacy(_ssrc));
589
590 auto incomingAudioDescription = std::make_unique<cricket::AudioContentDescription>();
591 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
592 incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
593 }
594 incomingAudioDescription->set_rtcp_mux(true);
595 incomingAudioDescription->set_rtcp_reduced_size(true);
596 incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly);
597 incomingAudioDescription->set_codecs(codecs);
598 incomingAudioDescription->set_bandwidth(1032000);
599
600 _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false);
601 _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr);
602 _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr);
603
604 _outgoingAudioChannel->SignalSentPacket().connect(this, &OutgoingAudioChannel::OnSentPacket_w);
605 //_outgoingAudioChannel->UpdateRtpTransport(nullptr);
606
607 setIsMuted(false);
608 }
609
~OutgoingAudioChannel()610 ~OutgoingAudioChannel() {
611 _outgoingAudioChannel->SignalSentPacket().disconnect(this);
612 _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, false, nullptr, _audioSource);
613 _outgoingAudioChannel->Enable(false);
614 _channelManager->DestroyVoiceChannel(_outgoingAudioChannel);
615 _outgoingAudioChannel = nullptr;
616 }
617
setIsMuted(bool isMuted)618 void setIsMuted(bool isMuted) {
619 if (_isMuted != isMuted) {
620 _isMuted = false;
621
622 _outgoingAudioChannel->Enable(!_isMuted);
623 _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, !_isMuted, nullptr, _audioSource);
624 }
625 }
626
627 private:
OnSentPacket_w(const rtc::SentPacket & sent_packet)628 void OnSentPacket_w(const rtc::SentPacket& sent_packet) {
629 _call->OnSentPacket(sent_packet);
630 }
631
632 private:
633 uint32_t _ssrc = 0;
634 webrtc::Call *_call = nullptr;
635 cricket::ChannelManager *_channelManager = nullptr;
636 webrtc::LocalAudioSinkAdapter *_audioSource = nullptr;
637 cricket::VoiceChannel *_outgoingAudioChannel = nullptr;
638
639 bool _isMuted = true;
640 };
641
642 class IncomingV2AudioChannel : public sigslot::has_slots<> {
643 public:
IncomingV2AudioChannel(cricket::ChannelManager * channelManager,webrtc::Call * call,webrtc::RtpTransport * rtpTransport,rtc::UniqueRandomIdGenerator * randomIdGenerator,NegotiatedMediaContent<cricket::AudioCodec> const & mediaContent,std::shared_ptr<Threads> threads)644 IncomingV2AudioChannel(
645 cricket::ChannelManager *channelManager,
646 webrtc::Call *call,
647 webrtc::RtpTransport *rtpTransport,
648 rtc::UniqueRandomIdGenerator *randomIdGenerator,
649 NegotiatedMediaContent<cricket::AudioCodec> const &mediaContent,
650 std::shared_ptr<Threads> threads) :
651 _ssrc(mediaContent.ssrc),
652 _channelManager(channelManager),
653 _call(call) {
654 _creationTimestamp = rtc::TimeMillis();
655
656 cricket::AudioOptions audioOptions;
657 audioOptions.audio_jitter_buffer_fast_accelerate = true;
658 audioOptions.audio_jitter_buffer_min_delay_ms = 50;
659
660 std::string streamId = std::string("stream1");
661
662 _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, audioOptions);
663
664 auto audioCodecs = mediaContent.codecs;
665
666 auto outgoingAudioDescription = std::make_unique<cricket::AudioContentDescription>();
667 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
668 outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
669 }
670 outgoingAudioDescription->set_rtcp_mux(true);
671 outgoingAudioDescription->set_rtcp_reduced_size(true);
672 outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly);
673 outgoingAudioDescription->set_codecs(audioCodecs);
674 outgoingAudioDescription->set_bandwidth(1032000);
675
676 auto incomingAudioDescription = std::make_unique<cricket::AudioContentDescription>();
677 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
678 incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
679 }
680 incomingAudioDescription->set_rtcp_mux(true);
681 incomingAudioDescription->set_rtcp_reduced_size(true);
682 incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly);
683 incomingAudioDescription->set_codecs(audioCodecs);
684 incomingAudioDescription->set_bandwidth(1032000);
685 cricket::StreamParams streamParams = cricket::StreamParams::CreateLegacy(mediaContent.ssrc);
686 streamParams.set_stream_ids({ streamId });
687 incomingAudioDescription->AddStream(streamParams);
688
689 _audioChannel->SetPayloadTypeDemuxingEnabled(false);
690 _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr);
691 _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr);
692
693 outgoingAudioDescription.reset();
694 incomingAudioDescription.reset();
695
696 //std::unique_ptr<AudioSinkImpl> audioLevelSink(new AudioSinkImpl(onAudioLevelUpdated, _ssrc, std::move(onAudioFrame)));
697 //_audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink));
698
699 _audioChannel->SignalSentPacket().connect(this, &IncomingV2AudioChannel::OnSentPacket_w);
700 //_audioChannel->UpdateRtpTransport(nullptr);
701
702 _audioChannel->Enable(true);
703 }
704
~IncomingV2AudioChannel()705 ~IncomingV2AudioChannel() {
706 _audioChannel->SignalSentPacket().disconnect(this);
707 _audioChannel->Enable(false);
708 _channelManager->DestroyVoiceChannel(_audioChannel);
709 _audioChannel = nullptr;
710 }
711
setVolume(double value)712 void setVolume(double value) {
713 _audioChannel->media_channel()->SetOutputVolume(_ssrc, value);
714 }
715
updateActivity()716 void updateActivity() {
717 _activityTimestamp = rtc::TimeMillis();
718 }
719
getActivity()720 int64_t getActivity() {
721 return _activityTimestamp;
722 }
723
724 private:
OnSentPacket_w(const rtc::SentPacket & sent_packet)725 void OnSentPacket_w(const rtc::SentPacket& sent_packet) {
726 _call->OnSentPacket(sent_packet);
727 }
728
729 private:
730 uint32_t _ssrc = 0;
731 // Memory is managed by _channelManager
732 cricket::VoiceChannel *_audioChannel = nullptr;
733 // Memory is managed externally
734 cricket::ChannelManager *_channelManager = nullptr;
735 webrtc::Call *_call = nullptr;
736 int64_t _creationTimestamp = 0;
737 int64_t _activityTimestamp = 0;
738 };
739
740 class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_shared_from_this<OutgoingVideoChannel> {
741 public:
createOutgoingContentDescription(std::vector<webrtc::SdpVideoFormat> const & availableVideoFormats)742 static absl::optional<signaling::MediaContent> createOutgoingContentDescription(std::vector<webrtc::SdpVideoFormat> const &availableVideoFormats) {
743 signaling::MediaContent mediaContent;
744
745 auto generator = std::mt19937(std::random_device()());
746 auto distribution = std::uniform_int_distribution<uint32_t>();
747 do {
748 mediaContent.ssrc = distribution(generator) & 0x7fffffffU;
749 } while (!mediaContent.ssrc);
750
751 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAbsSendTimeUri, 2);
752 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 3);
753 mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kVideoRotationUri, 13);
754
755 signaling::SsrcGroup fidGroup;
756 fidGroup.semantics = "FID";
757 fidGroup.ssrcs.push_back(mediaContent.ssrc);
758 fidGroup.ssrcs.push_back(mediaContent.ssrc + 1);
759 mediaContent.ssrcGroups.push_back(std::move(fidGroup));
760
761 const auto videoFormats = generateAvailableVideoFormats(availableVideoFormats);
762
763 for (const auto &format : videoFormats) {
764 signaling::PayloadType videoPayload;
765 videoPayload.id = format.videoCodec.id;
766 videoPayload.name = format.videoCodec.name;
767 videoPayload.clockrate = format.videoCodec.clockrate;
768 videoPayload.channels = 0;
769
770 std::vector<signaling::FeedbackType> videoFeedbackTypes;
771
772 signaling::FeedbackType fbGoogRemb;
773 fbGoogRemb.type = "goog-remb";
774 videoFeedbackTypes.push_back(fbGoogRemb);
775
776 signaling::FeedbackType fbTransportCc;
777 fbTransportCc.type = "transport-cc";
778 videoFeedbackTypes.push_back(fbTransportCc);
779
780 signaling::FeedbackType fbCcmFir;
781 fbCcmFir.type = "ccm";
782 fbCcmFir.subtype = "fir";
783 videoFeedbackTypes.push_back(fbCcmFir);
784
785 signaling::FeedbackType fbNack;
786 fbNack.type = "nack";
787 videoFeedbackTypes.push_back(fbNack);
788
789 signaling::FeedbackType fbNackPli;
790 fbNackPli.type = "nack";
791 fbNackPli.subtype = "pli";
792 videoFeedbackTypes.push_back(fbNackPli);
793
794 videoPayload.feedbackTypes = videoFeedbackTypes;
795 videoPayload.parameters = {};
796
797 mediaContent.payloadTypes.push_back(std::move(videoPayload));
798
799 if (format.rtxCodec) {
800 signaling::PayloadType rtxPayload;
801 rtxPayload.id = format.rtxCodec->id;
802 rtxPayload.name = format.rtxCodec->name;
803 rtxPayload.clockrate = format.rtxCodec->clockrate;
804 rtxPayload.parameters.push_back(std::make_pair("apt", intToString(videoPayload.id)));
805 mediaContent.payloadTypes.push_back(std::move(rtxPayload));
806 }
807 }
808
809 return mediaContent;
810 }
811
812 public:
OutgoingVideoChannel(std::shared_ptr<Threads> threads,cricket::ChannelManager * channelManager,webrtc::Call * call,webrtc::RtpTransport * rtpTransport,rtc::UniqueRandomIdGenerator * randomIdGenerator,webrtc::VideoBitrateAllocatorFactory * videoBitrateAllocatorFactory,std::function<void ()> rotationUpdated,NegotiatedMediaContent<cricket::VideoCodec> const & mediaContent)813 OutgoingVideoChannel(
814 std::shared_ptr<Threads> threads,
815 cricket::ChannelManager *channelManager,
816 webrtc::Call *call,
817 webrtc::RtpTransport *rtpTransport,
818 rtc::UniqueRandomIdGenerator *randomIdGenerator,
819 webrtc::VideoBitrateAllocatorFactory *videoBitrateAllocatorFactory,
820 std::function<void()> rotationUpdated,
821 NegotiatedMediaContent<cricket::VideoCodec> const &mediaContent
822 ) :
823 _threads(threads),
824 _mainSsrc(mediaContent.ssrc),
825 _call(call),
826 _channelManager(channelManager),
827 _rotationUpdated(rotationUpdated) {
828 _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "out1", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), videoBitrateAllocatorFactory);
829
830 auto videoCodecs = mediaContent.codecs;
831
832 auto outgoingVideoDescription = std::make_unique<cricket::VideoContentDescription>();
833 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
834 outgoingVideoDescription->AddRtpHeaderExtension(rtpExtension);
835 }
836
837 outgoingVideoDescription->set_rtcp_mux(true);
838 outgoingVideoDescription->set_rtcp_reduced_size(true);
839 outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly);
840 outgoingVideoDescription->set_codecs(videoCodecs);
841 outgoingVideoDescription->set_bandwidth(1032000);
842
843 cricket::StreamParams videoSendStreamParams;
844
845 for (const auto &ssrcGroup : mediaContent.ssrcGroups) {
846 for (auto ssrc : ssrcGroup.ssrcs) {
847 videoSendStreamParams.ssrcs.push_back(ssrc);
848 }
849
850 cricket::SsrcGroup mappedGroup(ssrcGroup.semantics, ssrcGroup.ssrcs);
851 videoSendStreamParams.ssrc_groups.push_back(std::move(mappedGroup));
852 }
853
854 videoSendStreamParams.cname = "cname";
855
856 outgoingVideoDescription->AddStream(videoSendStreamParams);
857
858 auto incomingVideoDescription = std::make_unique<cricket::VideoContentDescription>();
859 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
860 incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
861 }
862 incomingVideoDescription->set_rtcp_mux(true);
863 incomingVideoDescription->set_rtcp_reduced_size(true);
864 incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly);
865 incomingVideoDescription->set_codecs(videoCodecs);
866 incomingVideoDescription->set_bandwidth(1032000);
867
868 _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false);
869 _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr);
870 _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr);
871
872 webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(mediaContent.ssrc);
873
874 _outgoingVideoChannel->media_channel()->SetRtpSendParameters(mediaContent.ssrc, rtpParameters);
875
876 _outgoingVideoChannel->SignalSentPacket().connect(this, &OutgoingVideoChannel::OnSentPacket_w);
877 //_outgoingVideoChannel->UpdateRtpTransport(nullptr);
878
879 _outgoingVideoChannel->Enable(false);
880 _outgoingVideoChannel->media_channel()->SetVideoSend(mediaContent.ssrc, NULL, nullptr);
881 }
882
~OutgoingVideoChannel()883 ~OutgoingVideoChannel() {
884 _outgoingVideoChannel->SignalSentPacket().disconnect(this);
885 _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, nullptr, nullptr);
886 _outgoingVideoChannel->Enable(false);
887 _channelManager->DestroyVideoChannel(_outgoingVideoChannel);
888 _outgoingVideoChannel = nullptr;
889 }
890
setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture)891 void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) {
892 _videoCapture = videoCapture;
893
894 if (_videoCapture) {
895 _outgoingVideoChannel->Enable(true);
896 auto videoCaptureImpl = GetVideoCaptureAssumingSameThread(_videoCapture.get());
897 _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source());
898
899 const auto weak = std::weak_ptr<OutgoingVideoChannel>(shared_from_this());
900 videoCaptureImpl->setRotationUpdated([threads = _threads, weak](int angle) {
901 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
902 const auto strong = weak.lock();
903 if (!strong) {
904 return;
905 }
906 signaling::MediaStateMessage::VideoRotation videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
907 switch (angle) {
908 case 0: {
909 videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
910 break;
911 }
912 case 90: {
913 videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation90;
914 break;
915 }
916 case 180: {
917 videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation180;
918 break;
919 }
920 case 270: {
921 videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation270;
922 break;
923 }
924 default: {
925 videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
926 break;
927 }
928 }
929 if (strong->_videoRotation != videoRotation) {
930 strong->_videoRotation = videoRotation;
931 strong->_rotationUpdated();
932 }
933 });
934 });
935
936 switch (videoCaptureImpl->getRotation()) {
937 case 0: {
938 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
939 break;
940 }
941 case 90: {
942 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation90;
943 break;
944 }
945 case 180: {
946 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation180;
947 break;
948 }
949 case 270: {
950 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation270;
951 break;
952 }
953 default: {
954 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
955 break;
956 }
957 }
958 } else {
959 _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
960 _outgoingVideoChannel->Enable(false);
961 _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, nullptr);
962 }
963 }
964
965 public:
videoCapture()966 std::shared_ptr<VideoCaptureInterface> videoCapture() {
967 return _videoCapture;
968 }
969
getRotation()970 signaling::MediaStateMessage::VideoRotation getRotation() {
971 return _videoRotation;
972 }
973
974 private:
OnSentPacket_w(const rtc::SentPacket & sent_packet)975 void OnSentPacket_w(const rtc::SentPacket& sent_packet) {
976 _call->OnSentPacket(sent_packet);
977 }
978
979 private:
980 std::shared_ptr<Threads> _threads;
981
982 uint32_t _mainSsrc = 0;
983 webrtc::Call *_call = nullptr;
984 cricket::ChannelManager *_channelManager = nullptr;
985 cricket::VideoChannel *_outgoingVideoChannel = nullptr;
986
987 std::function<void()> _rotationUpdated;
988
989 std::shared_ptr<VideoCaptureInterface> _videoCapture;
990 signaling::MediaStateMessage::VideoRotation _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
991 };
992
993 class VideoSinkImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
994 public:
VideoSinkImpl()995 VideoSinkImpl() {
996 }
997
~VideoSinkImpl()998 virtual ~VideoSinkImpl() {
999 }
1000
OnFrame(const webrtc::VideoFrame & frame)1001 virtual void OnFrame(const webrtc::VideoFrame& frame) override {
1002 //_lastFrame = frame;
1003 for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) {
1004 auto strong = _sinks[i].lock();
1005 if (!strong) {
1006 _sinks.erase(_sinks.begin() + i);
1007 } else {
1008 strong->OnFrame(frame);
1009 }
1010 }
1011 }
1012
OnDiscardedFrame()1013 virtual void OnDiscardedFrame() override {
1014 for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) {
1015 auto strong = _sinks[i].lock();
1016 if (!strong) {
1017 _sinks.erase(_sinks.begin() + i);
1018 } else {
1019 strong->OnDiscardedFrame();
1020 }
1021 }
1022 }
1023
addSink(std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> impl)1024 void addSink(std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> impl) {
1025 _sinks.push_back(impl);
1026 if (_lastFrame) {
1027 auto strong = impl.lock();
1028 if (strong) {
1029 strong->OnFrame(_lastFrame.value());
1030 }
1031 }
1032 }
1033
1034 private:
1035 std::vector<std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>> _sinks;
1036 absl::optional<webrtc::VideoFrame> _lastFrame;
1037 };
1038
1039 class IncomingV2VideoChannel : public sigslot::has_slots<> {
1040 public:
IncomingV2VideoChannel(cricket::ChannelManager * channelManager,webrtc::Call * call,webrtc::RtpTransport * rtpTransport,rtc::UniqueRandomIdGenerator * randomIdGenerator,NegotiatedMediaContent<cricket::VideoCodec> const & mediaContent,std::shared_ptr<Threads> threads)1041 IncomingV2VideoChannel(
1042 cricket::ChannelManager *channelManager,
1043 webrtc::Call *call,
1044 webrtc::RtpTransport *rtpTransport,
1045 rtc::UniqueRandomIdGenerator *randomIdGenerator,
1046 NegotiatedMediaContent<cricket::VideoCodec> const &mediaContent,
1047 std::shared_ptr<Threads> threads) :
1048 _channelManager(channelManager),
1049 _call(call) {
1050 _videoSink.reset(new VideoSinkImpl());
1051
1052 std::string streamId = "1";
1053
1054 _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
1055
1056 _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "1", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get());
1057
1058 std::vector<cricket::VideoCodec> videoCodecs = mediaContent.codecs;
1059
1060 auto outgoingVideoDescription = std::make_unique<cricket::VideoContentDescription>();
1061 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
1062 outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
1063 }
1064 outgoingVideoDescription->set_rtcp_mux(true);
1065 outgoingVideoDescription->set_rtcp_reduced_size(true);
1066 outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly);
1067 outgoingVideoDescription->set_codecs(videoCodecs);
1068 outgoingVideoDescription->set_bandwidth(1032000);
1069
1070 cricket::StreamParams videoRecvStreamParams;
1071
1072 _mainVideoSsrc = mediaContent.ssrc;
1073
1074 std::vector<uint32_t> allSsrcs;
1075 for (const auto &group : mediaContent.ssrcGroups) {
1076 for (auto ssrc : group.ssrcs) {
1077 if (std::find(allSsrcs.begin(), allSsrcs.end(), ssrc) == allSsrcs.end()) {
1078 allSsrcs.push_back(ssrc);
1079 }
1080 }
1081
1082 cricket::SsrcGroup parsedGroup(group.semantics, group.ssrcs);
1083 videoRecvStreamParams.ssrc_groups.push_back(parsedGroup);
1084 }
1085 videoRecvStreamParams.ssrcs = allSsrcs;
1086
1087 videoRecvStreamParams.cname = "cname";
1088 videoRecvStreamParams.set_stream_ids({ streamId });
1089
1090 auto incomingVideoDescription = std::make_unique<cricket::VideoContentDescription>();
1091 for (const auto &rtpExtension : mediaContent.rtpExtensions) {
1092 incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id));
1093 }
1094 incomingVideoDescription->set_rtcp_mux(true);
1095 incomingVideoDescription->set_rtcp_reduced_size(true);
1096 incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly);
1097 incomingVideoDescription->set_codecs(videoCodecs);
1098 incomingVideoDescription->set_bandwidth(1032000);
1099
1100 incomingVideoDescription->AddStream(videoRecvStreamParams);
1101
1102 _videoChannel->SetPayloadTypeDemuxingEnabled(false);
1103 _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr);
1104 _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr);
1105
1106 _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get());
1107
1108 _videoChannel->SignalSentPacket().connect(this, &IncomingV2VideoChannel::OnSentPacket_w);
1109 //_videoChannel->UpdateRtpTransport(nullptr);
1110
1111 _videoChannel->Enable(true);
1112 }
1113
~IncomingV2VideoChannel()1114 ~IncomingV2VideoChannel() {
1115 _videoChannel->Enable(false);
1116 _channelManager->DestroyVideoChannel(_videoChannel);
1117 _videoChannel = nullptr;
1118 }
1119
addSink(std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> impl)1120 void addSink(std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> impl) {
1121 _videoSink->addSink(impl);
1122 }
1123
1124 private:
OnSentPacket_w(const rtc::SentPacket & sent_packet)1125 void OnSentPacket_w(const rtc::SentPacket& sent_packet) {
1126 _call->OnSentPacket(sent_packet);
1127 }
1128
1129 private:
1130 uint32_t _mainVideoSsrc = 0;
1131 std::unique_ptr<VideoSinkImpl> _videoSink;
1132 std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> _videoBitrateAllocatorFactory;
1133 // Memory is managed by _channelManager
1134 cricket::VideoChannel *_videoChannel;
1135 // Memory is managed externally
1136 cricket::ChannelManager *_channelManager = nullptr;
1137 webrtc::Call *_call = nullptr;
1138 };
1139
1140 } // namespace
1141
1142 class InstanceV2ImplInternal : public std::enable_shared_from_this<InstanceV2ImplInternal> {
1143 public:
InstanceV2ImplInternal(Descriptor && descriptor,std::shared_ptr<Threads> threads)1144 InstanceV2ImplInternal(Descriptor &&descriptor, std::shared_ptr<Threads> threads) :
1145 _threads(threads),
1146 _rtcServers(descriptor.rtcServers),
1147 _encryptionKey(std::move(descriptor.encryptionKey)),
1148 _stateUpdated(descriptor.stateUpdated),
1149 _signalBarsUpdated(descriptor.signalBarsUpdated),
1150 _audioLevelUpdated(descriptor.audioLevelUpdated),
1151 _remoteBatteryLevelIsLowUpdated(descriptor.remoteBatteryLevelIsLowUpdated),
1152 _remoteMediaStateUpdated(descriptor.remoteMediaStateUpdated),
1153 _remotePrefferedAspectRatioUpdated(descriptor.remotePrefferedAspectRatioUpdated),
1154 _signalingDataEmitted(descriptor.signalingDataEmitted),
1155 _createAudioDeviceModule(descriptor.createAudioDeviceModule),
1156 _eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
1157 _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
1158 _videoCapture(descriptor.videoCapture) {
1159 }
1160
~InstanceV2ImplInternal()1161 ~InstanceV2ImplInternal() {
1162 _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) {
1163 networking->stop();
1164 });
1165 _threads->getNetworkThread()->Invoke<void>(RTC_FROM_HERE, []() {
1166 });
1167 }
1168
start()1169 void start() {
1170 const auto weak = std::weak_ptr<InstanceV2ImplInternal>(shared_from_this());
1171
1172 _networking.reset(new ThreadLocalObject<NativeNetworkingImpl>(_threads->getNetworkThread(), [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing, rtcServers = _rtcServers]() {
1173 return new NativeNetworkingImpl((NativeNetworkingImpl::Configuration){
1174 .isOutgoing = isOutgoing,
1175 .enableStunMarking = false,
1176 .enableTCP = false,
1177 .enableP2P = true,
1178 .rtcServers = rtcServers,
1179 .stateUpdated = [threads, weak](const NativeNetworkingImpl::State &state) {
1180 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1181 const auto strong = weak.lock();
1182 if (!strong) {
1183 return;
1184 }
1185 strong->onNetworkStateUpdated(state);
1186 });
1187 },
1188 .candidateGathered = [threads, weak](const cricket::Candidate &candidate) {
1189 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1190 const auto strong = weak.lock();
1191 if (!strong) {
1192 return;
1193 }
1194
1195 strong->sendCandidate(candidate);
1196 });
1197 },
1198 .transportMessageReceived = [threads, weak](rtc::CopyOnWriteBuffer const &packet, bool isMissing) {
1199 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1200 const auto strong = weak.lock();
1201 if (!strong) {
1202 return;
1203 }
1204 });
1205 },
1206 .rtcpPacketReceived = [threads, weak](rtc::CopyOnWriteBuffer const &packet, int64_t timestamp) {
1207 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1208 const auto strong = weak.lock();
1209 if (!strong) {
1210 return;
1211 }
1212 strong->_call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp);
1213 });
1214 },
1215 .dataChannelStateUpdated = [threads, weak](bool isDataChannelOpen) {
1216 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1217 const auto strong = weak.lock();
1218 if (!strong) {
1219 return;
1220 }
1221 strong->onDataChannelStateUpdated(isDataChannelOpen);
1222 });
1223 },
1224 .dataChannelMessageReceived = [threads, weak](std::string const &message) {
1225 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1226 const auto strong = weak.lock();
1227 if (!strong) {
1228 return;
1229 }
1230 strong->onDataChannelMessage(message);
1231 });
1232 },
1233 .threads = threads
1234 });
1235 }));
1236
1237 PlatformInterface::SharedInstance()->configurePlatformAudio();
1238
1239 cricket::MediaEngineDependencies mediaDeps;
1240 mediaDeps.task_queue_factory = _taskQueueFactory.get();
1241 mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus, webrtc::AudioEncoderL16>();
1242 mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus, webrtc::AudioDecoderL16>();
1243
1244 mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory();
1245 mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory();
1246
1247 _audioDeviceModule = createAudioDeviceModule();
1248 if (!_audioDeviceModule) {
1249 return;
1250 }
1251 mediaDeps.adm = _audioDeviceModule;
1252
1253 _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats();
1254
1255 std::unique_ptr<cricket::MediaEngineInterface> mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
1256
1257 _channelManager = cricket::ChannelManager::Create(
1258 std::move(mediaEngine),
1259 std::make_unique<cricket::RtpDataEngine>(),
1260 true,
1261 _threads->getMediaThread(),
1262 _threads->getNetworkThread()
1263 );
1264
1265 //setAudioInputDevice(_initialInputDeviceId);
1266 //setAudioOutputDevice(_initialOutputDeviceId);
1267
1268 webrtc::Call::Config callConfig(_eventLog.get());
1269 callConfig.task_queue_factory = _taskQueueFactory.get();
1270 callConfig.trials = &_fieldTrials;
1271 callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState();
1272 _call.reset(webrtc::Call::Create(callConfig));
1273
1274 _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator());
1275
1276 _threads->getNetworkThread()->Invoke<void>(RTC_FROM_HERE, [this]() {
1277 _rtpTransport = _networking->getSyncAssumingSameThread()->getRtpTransport();
1278 });
1279
1280 _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
1281
1282 _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) {
1283 networking->start();
1284 });
1285
1286 if (_videoCapture) {
1287 setVideoCapture(_videoCapture);
1288 }
1289
1290 beginSignaling();
1291
1292 adjustBitratePreferences(true);
1293 }
1294
sendSignalingMessage(signaling::Message const & message)1295 void sendSignalingMessage(signaling::Message const &message) {
1296 auto data = message.serialize();
1297
1298 RTC_LOG(LS_INFO) << "sendSignalingMessage: " << std::string(data.begin(), data.end());
1299
1300 if (_signalingEncryption) {
1301 if (const auto encryptedData = _signalingEncryption->encryptOutgoing(data)) {
1302 _signalingDataEmitted(std::vector<uint8_t>(encryptedData->data(), encryptedData->data() + encryptedData->size()));
1303 } else {
1304 RTC_LOG(LS_ERROR) << "sendSignalingMessage: failed to encrypt payload";
1305 }
1306 } else {
1307 _signalingDataEmitted(data);
1308 }
1309 }
1310
beginSignaling()1311 void beginSignaling() {
1312 _signalingEncryption.reset(new SignalingEncryption(_encryptionKey));
1313
1314 if (_encryptionKey.isOutgoing) {
1315 _outgoingAudioContent = OutgoingAudioChannel::createOutgoingContentDescription();
1316 _outgoingVideoContent = OutgoingVideoChannel::createOutgoingContentDescription(_availableVideoFormats);
1317
1318 sendInitialSetup();
1319 }
1320 }
1321
createNegotiatedChannels()1322 void createNegotiatedChannels() {
1323 if (_negotiatedOutgoingVideoContent) {
1324 const auto weak = std::weak_ptr<InstanceV2ImplInternal>(shared_from_this());
1325
1326 _outgoingVideoChannel.reset(new OutgoingVideoChannel(
1327 _threads,
1328 _channelManager.get(),
1329 _call.get(),
1330 _rtpTransport,
1331 _uniqueRandomIdGenerator.get(),
1332 _videoBitrateAllocatorFactory.get(),
1333 [threads = _threads, weak]() {
1334 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] {
1335 const auto strong = weak.lock();
1336 if (!strong) {
1337 return;
1338 }
1339 strong->sendMediaState();
1340 });
1341 },
1342 _negotiatedOutgoingVideoContent.value()
1343 ));
1344
1345 if (_videoCapture) {
1346 _outgoingVideoChannel->setVideoCapture(_videoCapture);
1347 }
1348 }
1349
1350 if (_negotiatedOutgoingAudioContent) {
1351 _outgoingAudioChannel.reset(new OutgoingAudioChannel(
1352 _call.get(),
1353 _channelManager.get(),
1354 _uniqueRandomIdGenerator.get(),
1355 &_audioSource,
1356 _rtpTransport,
1357 _negotiatedOutgoingAudioContent.value(),
1358 _threads
1359 ));
1360 }
1361
1362 adjustBitratePreferences(true);
1363 }
1364
sendInitialSetup()1365 void sendInitialSetup() {
1366 const auto weak = std::weak_ptr<InstanceV2ImplInternal>(shared_from_this());
1367
1368 _networking->perform(RTC_FROM_HERE, [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) {
1369 auto localFingerprint = networking->getLocalFingerprint();
1370 std::string hash = localFingerprint->algorithm;
1371 std::string fingerprint = localFingerprint->GetRfc4572Fingerprint();
1372 std::string setup;
1373 if (isOutgoing) {
1374 setup = "actpass";
1375 } else {
1376 setup = "passive";
1377 }
1378
1379 auto localIceParams = networking->getLocalIceParameters();
1380 std::string ufrag = localIceParams.ufrag;
1381 std::string pwd = localIceParams.pwd;
1382
1383 threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ufrag, pwd, hash, fingerprint, setup, localIceParams]() {
1384 const auto strong = weak.lock();
1385 if (!strong) {
1386 return;
1387 }
1388
1389 signaling::InitialSetupMessage data;
1390
1391 if (strong->_outgoingAudioContent) {
1392 data.audio = strong->_outgoingAudioContent.value();
1393 }
1394 if (strong->_outgoingVideoContent) {
1395 data.video = strong->_outgoingVideoContent.value();
1396 }
1397
1398 data.ufrag = ufrag;
1399 data.pwd = pwd;
1400
1401 signaling::DtlsFingerprint dtlsFingerprint;
1402 dtlsFingerprint.hash = hash;
1403 dtlsFingerprint.fingerprint = fingerprint;
1404 dtlsFingerprint.setup = setup;
1405 data.fingerprints.push_back(std::move(dtlsFingerprint));
1406
1407 signaling::Message message;
1408 message.data = std::move(data);
1409 strong->sendSignalingMessage(message);
1410 });
1411 });
1412 }
1413
receiveSignalingData(const std::vector<uint8_t> & data)1414 void receiveSignalingData(const std::vector<uint8_t> &data) {
1415 std::vector<uint8_t> decryptedData;
1416
1417 if (_signalingEncryption) {
1418 const auto rawDecryptedData = _signalingEncryption->decryptIncoming(data);
1419 if (!rawDecryptedData) {
1420 RTC_LOG(LS_ERROR) << "receiveSignalingData: could not decrypt payload";
1421
1422 return;
1423 }
1424
1425 decryptedData = std::vector<uint8_t>(rawDecryptedData->data(), rawDecryptedData->data() + rawDecryptedData->size());
1426 } else {
1427 decryptedData = data;
1428 }
1429
1430 processSignalingData(decryptedData);
1431 }
1432
processSignalingData(const std::vector<uint8_t> & data)1433 void processSignalingData(const std::vector<uint8_t> &data) {
1434 RTC_LOG(LS_INFO) << "processSignalingData: " << std::string(data.begin(), data.end());
1435
1436 const auto message = signaling::Message::parse(data);
1437 if (!message) {
1438 return;
1439 }
1440 const auto messageData = &message->data;
1441 if (const auto initialSetup = absl::get_if<signaling::InitialSetupMessage>(messageData)) {
1442 PeerIceParameters remoteIceParameters;
1443 remoteIceParameters.ufrag = initialSetup->ufrag;
1444 remoteIceParameters.pwd = initialSetup->pwd;
1445
1446 std::unique_ptr<rtc::SSLFingerprint> fingerprint;
1447 std::string sslSetup;
1448 if (initialSetup->fingerprints.size() != 0) {
1449 fingerprint = rtc::SSLFingerprint::CreateUniqueFromRfc4572(initialSetup->fingerprints[0].hash, initialSetup->fingerprints[0].fingerprint);
1450 sslSetup = initialSetup->fingerprints[0].setup;
1451 }
1452
1453 _networking->perform(RTC_FROM_HERE, [threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) {
1454 networking->setRemoteParams(remoteIceParameters, fingerprint.get(), sslSetup);
1455 });
1456
1457 if (const auto audio = initialSetup->audio) {
1458 if (_encryptionKey.isOutgoing) {
1459 if (_outgoingAudioContent) {
1460 _negotiatedOutgoingAudioContent = negotiateMediaContent<cricket::AudioCodec>(_outgoingAudioContent.value(), _outgoingAudioContent.value(), audio.value(), false);
1461 const auto incomingAudioContent = negotiateMediaContent<cricket::AudioCodec>(audio.value(), _outgoingAudioContent.value(), audio.value(), false);
1462
1463 signaling::MediaContent outgoingAudioContent;
1464
1465 outgoingAudioContent.ssrc = _outgoingAudioContent->ssrc;
1466 outgoingAudioContent.ssrcGroups = _outgoingAudioContent->ssrcGroups;
1467 outgoingAudioContent.rtpExtensions = _negotiatedOutgoingAudioContent->rtpExtensions;
1468 outgoingAudioContent.payloadTypes = getPayloadTypesFromAudioCodecs(_negotiatedOutgoingAudioContent->codecs);
1469
1470 _outgoingAudioContent = std::move(outgoingAudioContent);
1471
1472 _incomingAudioChannel.reset(new IncomingV2AudioChannel(
1473 _channelManager.get(),
1474 _call.get(),
1475 _rtpTransport,
1476 _uniqueRandomIdGenerator.get(),
1477 incomingAudioContent,
1478 _threads
1479 ));
1480 }
1481 } else {
1482 const auto generatedOutgoingContent = OutgoingAudioChannel::createOutgoingContentDescription();
1483
1484 if (generatedOutgoingContent) {
1485 _negotiatedOutgoingAudioContent = negotiateMediaContent<cricket::AudioCodec>(generatedOutgoingContent.value(), generatedOutgoingContent.value(), audio.value(), true);
1486 const auto incomingAudioContent = negotiateMediaContent<cricket::AudioCodec>(audio.value(), generatedOutgoingContent.value(), audio.value(), true);
1487
1488 if (_negotiatedOutgoingAudioContent) {
1489 signaling::MediaContent outgoingAudioContent;
1490
1491 outgoingAudioContent.ssrc = generatedOutgoingContent->ssrc;
1492 outgoingAudioContent.ssrcGroups = generatedOutgoingContent->ssrcGroups;
1493 outgoingAudioContent.rtpExtensions = _negotiatedOutgoingAudioContent->rtpExtensions;
1494 outgoingAudioContent.payloadTypes = getPayloadTypesFromAudioCodecs(_negotiatedOutgoingAudioContent->codecs);
1495
1496 _outgoingAudioContent = std::move(outgoingAudioContent);
1497
1498 _incomingAudioChannel.reset(new IncomingV2AudioChannel(
1499 _channelManager.get(),
1500 _call.get(),
1501 _rtpTransport,
1502 _uniqueRandomIdGenerator.get(),
1503 incomingAudioContent,
1504 _threads
1505 ));
1506 }
1507 }
1508 }
1509 }
1510
1511 if (const auto video = initialSetup->video) {
1512 if (_encryptionKey.isOutgoing) {
1513 if (_outgoingVideoContent) {
1514 _negotiatedOutgoingVideoContent = negotiateMediaContent<cricket::VideoCodec>(_outgoingVideoContent.value(), _outgoingVideoContent.value(), video.value(), false);
1515 const auto incomingVideoContent = negotiateMediaContent<cricket::VideoCodec>(video.value(), _outgoingVideoContent.value(), video.value(), false);
1516
1517 signaling::MediaContent outgoingVideoContent;
1518
1519 outgoingVideoContent.ssrc = _outgoingVideoContent->ssrc;
1520 outgoingVideoContent.ssrcGroups = _outgoingVideoContent->ssrcGroups;
1521 outgoingVideoContent.rtpExtensions = _negotiatedOutgoingVideoContent->rtpExtensions;
1522 outgoingVideoContent.payloadTypes = getPayloadTypesFromVideoCodecs(_negotiatedOutgoingVideoContent->codecs);
1523
1524 _outgoingVideoContent = std::move(outgoingVideoContent);
1525
1526 _incomingVideoChannel.reset(new IncomingV2VideoChannel(
1527 _channelManager.get(),
1528 _call.get(),
1529 _rtpTransport,
1530 _uniqueRandomIdGenerator.get(),
1531 incomingVideoContent,
1532 _threads
1533 ));
1534 }
1535 } else {
1536 const auto generatedOutgoingContent = OutgoingVideoChannel::createOutgoingContentDescription(_availableVideoFormats);
1537
1538 if (generatedOutgoingContent) {
1539 _negotiatedOutgoingVideoContent = negotiateMediaContent<cricket::VideoCodec>(generatedOutgoingContent.value(), generatedOutgoingContent.value(), video.value(), true);
1540 const auto incomingVideoContent = negotiateMediaContent<cricket::VideoCodec>(video.value(), generatedOutgoingContent.value(), video.value(), true);
1541
1542 if (_negotiatedOutgoingVideoContent) {
1543 signaling::MediaContent outgoingVideoContent;
1544
1545 outgoingVideoContent.ssrc = generatedOutgoingContent->ssrc;
1546 outgoingVideoContent.ssrcGroups = generatedOutgoingContent->ssrcGroups;
1547 outgoingVideoContent.rtpExtensions = _negotiatedOutgoingVideoContent->rtpExtensions;
1548 outgoingVideoContent.payloadTypes = getPayloadTypesFromVideoCodecs(_negotiatedOutgoingVideoContent->codecs);
1549
1550 _outgoingVideoContent = std::move(outgoingVideoContent);
1551
1552 _incomingVideoChannel.reset(new IncomingV2VideoChannel(
1553 _channelManager.get(),
1554 _call.get(),
1555 _rtpTransport,
1556 _uniqueRandomIdGenerator.get(),
1557 incomingVideoContent,
1558 _threads
1559 ));
1560 }
1561 }
1562 }
1563 }
1564
1565 createNegotiatedChannels();
1566
1567 if (!_encryptionKey.isOutgoing) {
1568 sendInitialSetup();
1569 }
1570
1571 _handshakeCompleted = true;
1572 commitPendingIceCandidates();
1573 } else if (const auto candidatesList = absl::get_if<signaling::CandidatesMessage>(messageData)) {
1574 for (const auto &candidate : candidatesList->iceCandidates) {
1575 webrtc::JsepIceCandidate parseCandidate{ std::string(), 0 };
1576 if (!parseCandidate.Initialize(candidate.sdpString, nullptr)) {
1577 RTC_LOG(LS_ERROR) << "Could not parse candidate: " << candidate.sdpString;
1578 continue;
1579 }
1580 _pendingIceCandidates.push_back(parseCandidate.candidate());
1581 }
1582
1583 if (_handshakeCompleted) {
1584 commitPendingIceCandidates();
1585 }
1586 } else if (const auto mediaState = absl::get_if<signaling::MediaStateMessage>(messageData)) {
1587 AudioState mappedAudioState;
1588 if (mediaState->isMuted) {
1589 mappedAudioState = AudioState::Muted;
1590 } else {
1591 mappedAudioState = AudioState::Active;
1592 }
1593
1594 VideoState mappedVideoState;
1595 switch (mediaState->videoState) {
1596 case signaling::MediaStateMessage::VideoState::Inactive: {
1597 mappedVideoState = VideoState::Inactive;
1598 break;
1599 }
1600 case signaling::MediaStateMessage::VideoState::Suspended: {
1601 mappedVideoState = VideoState::Paused;
1602 break;
1603 }
1604 case signaling::MediaStateMessage::VideoState::Active: {
1605 mappedVideoState = VideoState::Active;
1606 break;
1607 }
1608 default: {
1609 RTC_FATAL() << "Unknown videoState";
1610 break;
1611 }
1612 }
1613
1614 if (_remoteMediaStateUpdated) {
1615 _remoteMediaStateUpdated(mappedAudioState, mappedVideoState);
1616 }
1617
1618 if (_remoteBatteryLevelIsLowUpdated) {
1619 _remoteBatteryLevelIsLowUpdated(mediaState->isBatteryLow);
1620 }
1621 }
1622 }
1623
commitPendingIceCandidates()1624 void commitPendingIceCandidates() {
1625 if (_pendingIceCandidates.size() == 0) {
1626 return;
1627 }
1628 _networking->perform(RTC_FROM_HERE, [threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) {
1629 networking->addCandidates(parsedCandidates);
1630 });
1631 _pendingIceCandidates.clear();
1632 }
1633
onNetworkStateUpdated(NativeNetworkingImpl::State const & state)1634 void onNetworkStateUpdated(NativeNetworkingImpl::State const &state) {
1635 State mappedState;
1636 if (state.isReadyToSendData) {
1637 mappedState = State::Established;
1638 } else {
1639 mappedState = State::Reconnecting;
1640 }
1641 _stateUpdated(mappedState);
1642 }
1643
onDataChannelStateUpdated(bool isDataChannelOpen)1644 void onDataChannelStateUpdated(bool isDataChannelOpen) {
1645 if (_isDataChannelOpen != isDataChannelOpen) {
1646 _isDataChannelOpen = isDataChannelOpen;
1647
1648 if (_isDataChannelOpen) {
1649 sendMediaState();
1650 }
1651 }
1652 }
1653
sendDataChannelMessage(signaling::Message const & message)1654 void sendDataChannelMessage(signaling::Message const &message) {
1655 if (!_isDataChannelOpen) {
1656 RTC_LOG(LS_ERROR) << "sendDataChannelMessage called, but data channel is not open";
1657 return;
1658 }
1659 auto data = message.serialize();
1660 std::string stringData(data.begin(), data.end());
1661 RTC_LOG(LS_INFO) << "sendDataChannelMessage: " << stringData;
1662 _networking->perform(RTC_FROM_HERE, [stringData = std::move(stringData)](NativeNetworkingImpl *networking) {
1663 networking->sendDataChannelMessage(stringData);
1664 });
1665 }
1666
onDataChannelMessage(std::string const & message)1667 void onDataChannelMessage(std::string const &message) {
1668 RTC_LOG(LS_INFO) << "dataChannelMessage received: " << message;
1669 std::vector<uint8_t> data(message.begin(), message.end());
1670 processSignalingData(data);
1671 }
1672
sendMediaState()1673 void sendMediaState() {
1674 if (!_isDataChannelOpen) {
1675 return;
1676 }
1677 signaling::Message message;
1678 signaling::MediaStateMessage data;
1679 data.isMuted = _isMicrophoneMuted;
1680 data.isBatteryLow = _isBatteryLow;
1681 if (_outgoingVideoChannel) {
1682 if (_outgoingVideoChannel->videoCapture()) {
1683 data.videoState = signaling::MediaStateMessage::VideoState::Active;
1684 } else{
1685 data.videoState = signaling::MediaStateMessage::VideoState::Inactive;
1686 }
1687 data.videoRotation = _outgoingVideoChannel->getRotation();
1688 } else {
1689 data.videoState = signaling::MediaStateMessage::VideoState::Inactive;
1690 data.videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0;
1691 }
1692 message.data = std::move(data);
1693 sendDataChannelMessage(message);
1694 }
1695
sendCandidate(const cricket::Candidate & candidate)1696 void sendCandidate(const cricket::Candidate &candidate) {
1697 cricket::Candidate patchedCandidate = candidate;
1698 patchedCandidate.set_component(1);
1699
1700 signaling::CandidatesMessage data;
1701
1702 signaling::IceCandidate serializedCandidate;
1703
1704 webrtc::JsepIceCandidate iceCandidate{ std::string(), 0 };
1705 iceCandidate.SetCandidate(patchedCandidate);
1706 std::string serialized;
1707 const auto success = iceCandidate.ToString(&serialized);
1708 assert(success);
1709
1710 serializedCandidate.sdpString = serialized;
1711
1712 data.iceCandidates.push_back(std::move(serializedCandidate));
1713
1714 signaling::Message message;
1715 message.data = std::move(data);
1716 sendSignalingMessage(message);
1717 }
1718
setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture)1719 void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) {
1720 _videoCapture = videoCapture;
1721
1722 if (_outgoingVideoChannel) {
1723 _outgoingVideoChannel->setVideoCapture(videoCapture);
1724
1725 sendMediaState();
1726
1727 adjustBitratePreferences(true);
1728 }
1729 }
1730
setRequestedVideoAspect(float aspect)1731 void setRequestedVideoAspect(float aspect) {
1732
1733 }
1734
setNetworkType(NetworkType networkType)1735 void setNetworkType(NetworkType networkType) {
1736
1737 }
1738
setMuteMicrophone(bool muteMicrophone)1739 void setMuteMicrophone(bool muteMicrophone) {
1740 if (_isMicrophoneMuted != muteMicrophone) {
1741 _isMicrophoneMuted = muteMicrophone;
1742
1743 if (_outgoingAudioChannel) {
1744 _outgoingAudioChannel->setIsMuted(muteMicrophone);
1745 }
1746
1747 sendMediaState();
1748 }
1749 }
1750
setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink)1751 void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
1752 if (_incomingVideoChannel) {
1753 _incomingVideoChannel->addSink(sink);
1754 }
1755 }
1756
setAudioInputDevice(std::string id)1757 void setAudioInputDevice(std::string id) {
1758
1759 }
1760
setAudioOutputDevice(std::string id)1761 void setAudioOutputDevice(std::string id) {
1762
1763 }
1764
setIsLowBatteryLevel(bool isLowBatteryLevel)1765 void setIsLowBatteryLevel(bool isLowBatteryLevel) {
1766 if (_isBatteryLow != isLowBatteryLevel) {
1767 _isBatteryLow = isLowBatteryLevel;
1768 sendMediaState();
1769 }
1770 }
1771
stop(std::function<void (FinalState)> completion)1772 void stop(std::function<void(FinalState)> completion) {
1773 completion({});
1774 }
1775
adjustBitratePreferences(bool resetStartBitrate)1776 void adjustBitratePreferences(bool resetStartBitrate) {
1777 webrtc::BitrateConstraints preferences;
1778 if (_videoCapture) {
1779 preferences.min_bitrate_bps = 64000;
1780 if (resetStartBitrate) {
1781 preferences.start_bitrate_bps = (100 + 800 + 32 + 100) * 1000;
1782 }
1783 preferences.max_bitrate_bps = (100 + 200 + 800 + 32 + 100) * 1000;
1784 } else {
1785 preferences.min_bitrate_bps = 32000;
1786 if (resetStartBitrate) {
1787 preferences.start_bitrate_bps = 32000;
1788 }
1789 preferences.max_bitrate_bps = 32000;
1790 }
1791
1792 _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
1793 }
1794
1795 private:
createAudioDeviceModule()1796 rtc::scoped_refptr<webrtc::AudioDeviceModule> createAudioDeviceModule() {
1797 const auto create = [&](webrtc::AudioDeviceModule::AudioLayer layer) {
1798 return webrtc::AudioDeviceModule::Create(
1799 layer,
1800 _taskQueueFactory.get());
1801 };
1802 const auto check = [&](const rtc::scoped_refptr<webrtc::AudioDeviceModule> &result) {
1803 return (result && result->Init() == 0) ? result : nullptr;
1804 };
1805 if (_createAudioDeviceModule) {
1806 if (const auto result = check(_createAudioDeviceModule(_taskQueueFactory.get()))) {
1807 return result;
1808 }
1809 }
1810 return check(create(webrtc::AudioDeviceModule::kPlatformDefaultAudio));
1811 }
1812
1813 private:
1814 std::shared_ptr<Threads> _threads;
1815 std::vector<RtcServer> _rtcServers;
1816 EncryptionKey _encryptionKey;
1817 std::function<void(State)> _stateUpdated;
1818 std::function<void(int)> _signalBarsUpdated;
1819 std::function<void(float)> _audioLevelUpdated;
1820 std::function<void(bool)> _remoteBatteryLevelIsLowUpdated;
1821 std::function<void(AudioState, VideoState)> _remoteMediaStateUpdated;
1822 std::function<void(float)> _remotePrefferedAspectRatioUpdated;
1823 std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted;
1824 std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> _createAudioDeviceModule;
1825
1826 std::unique_ptr<SignalingEncryption> _signalingEncryption;
1827
1828 bool _handshakeCompleted = false;
1829 std::vector<cricket::Candidate> _pendingIceCandidates;
1830 bool _isDataChannelOpen = false;
1831
1832 std::unique_ptr<webrtc::RtcEventLogNull> _eventLog;
1833 std::unique_ptr<webrtc::TaskQueueFactory> _taskQueueFactory;
1834 std::unique_ptr<cricket::MediaEngineInterface> _mediaEngine;
1835 std::unique_ptr<webrtc::Call> _call;
1836 webrtc::FieldTrialBasedConfig _fieldTrials;
1837 webrtc::LocalAudioSinkAdapter _audioSource;
1838 rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
1839
1840 std::unique_ptr<rtc::UniqueRandomIdGenerator> _uniqueRandomIdGenerator;
1841 webrtc::RtpTransport *_rtpTransport = nullptr;
1842 std::unique_ptr<cricket::ChannelManager> _channelManager;
1843 std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> _videoBitrateAllocatorFactory;
1844
1845 std::shared_ptr<ThreadLocalObject<NativeNetworkingImpl>> _networking;
1846
1847 absl::optional<signaling::MediaContent> _outgoingAudioContent;
1848 absl::optional<NegotiatedMediaContent<cricket::AudioCodec>> _negotiatedOutgoingAudioContent;
1849
1850 std::unique_ptr<OutgoingAudioChannel> _outgoingAudioChannel;
1851 bool _isMicrophoneMuted = false;
1852
1853 std::vector<webrtc::SdpVideoFormat> _availableVideoFormats;
1854
1855 absl::optional<signaling::MediaContent> _outgoingVideoContent;
1856 absl::optional<NegotiatedMediaContent<cricket::VideoCodec>> _negotiatedOutgoingVideoContent;
1857
1858 std::shared_ptr<OutgoingVideoChannel> _outgoingVideoChannel;
1859
1860 bool _isBatteryLow = false;
1861
1862 std::unique_ptr<IncomingV2AudioChannel> _incomingAudioChannel;
1863 std::unique_ptr<IncomingV2VideoChannel> _incomingVideoChannel;
1864
1865 std::shared_ptr<VideoCaptureInterface> _videoCapture;
1866 };
1867
InstanceV2Impl(Descriptor && descriptor)1868 InstanceV2Impl::InstanceV2Impl(Descriptor &&descriptor) {
1869 if (descriptor.config.logPath.data.size() != 0) {
1870 _logSink = std::make_unique<LogSinkImpl>(descriptor.config.logPath);
1871 }
1872 rtc::LogMessage::LogToDebug(rtc::LS_INFO);
1873 rtc::LogMessage::SetLogToStderr(false);
1874 if (_logSink) {
1875 rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO);
1876 }
1877
1878 _threads = StaticThreads::getThreads();
1879 _internal.reset(new ThreadLocalObject<InstanceV2ImplInternal>(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable {
1880 return new InstanceV2ImplInternal(std::move(descriptor), threads);
1881 }));
1882 _internal->perform(RTC_FROM_HERE, [](InstanceV2ImplInternal *internal) {
1883 internal->start();
1884 });
1885 }
1886
~InstanceV2Impl()1887 InstanceV2Impl::~InstanceV2Impl() {
1888 rtc::LogMessage::RemoveLogToStream(_logSink.get());
1889 }
1890
receiveSignalingData(const std::vector<uint8_t> & data)1891 void InstanceV2Impl::receiveSignalingData(const std::vector<uint8_t> &data) {
1892 _internal->perform(RTC_FROM_HERE, [data](InstanceV2ImplInternal *internal) {
1893 internal->receiveSignalingData(data);
1894 });
1895 }
1896
setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture)1897 void InstanceV2Impl::setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) {
1898 _internal->perform(RTC_FROM_HERE, [videoCapture](InstanceV2ImplInternal *internal) {
1899 internal->setVideoCapture(videoCapture);
1900 });
1901 }
1902
setRequestedVideoAspect(float aspect)1903 void InstanceV2Impl::setRequestedVideoAspect(float aspect) {
1904 _internal->perform(RTC_FROM_HERE, [aspect](InstanceV2ImplInternal *internal) {
1905 internal->setRequestedVideoAspect(aspect);
1906 });
1907 }
1908
setNetworkType(NetworkType networkType)1909 void InstanceV2Impl::setNetworkType(NetworkType networkType) {
1910 _internal->perform(RTC_FROM_HERE, [networkType](InstanceV2ImplInternal *internal) {
1911 internal->setNetworkType(networkType);
1912 });
1913 }
1914
setMuteMicrophone(bool muteMicrophone)1915 void InstanceV2Impl::setMuteMicrophone(bool muteMicrophone) {
1916 _internal->perform(RTC_FROM_HERE, [muteMicrophone](InstanceV2ImplInternal *internal) {
1917 internal->setMuteMicrophone(muteMicrophone);
1918 });
1919 }
1920
setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink)1921 void InstanceV2Impl::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
1922 _internal->perform(RTC_FROM_HERE, [sink](InstanceV2ImplInternal *internal) {
1923 internal->setIncomingVideoOutput(sink);
1924 });
1925 }
1926
setAudioInputDevice(std::string id)1927 void InstanceV2Impl::setAudioInputDevice(std::string id) {
1928 _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) {
1929 internal->setAudioInputDevice(id);
1930 });
1931 }
1932
setAudioOutputDevice(std::string id)1933 void InstanceV2Impl::setAudioOutputDevice(std::string id) {
1934 _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) {
1935 internal->setAudioOutputDevice(id);
1936 });
1937 }
1938
setIsLowBatteryLevel(bool isLowBatteryLevel)1939 void InstanceV2Impl::setIsLowBatteryLevel(bool isLowBatteryLevel) {
1940 _internal->perform(RTC_FROM_HERE, [isLowBatteryLevel](InstanceV2ImplInternal *internal) {
1941 internal->setIsLowBatteryLevel(isLowBatteryLevel);
1942 });
1943 }
1944
setInputVolume(float level)1945 void InstanceV2Impl::setInputVolume(float level) {
1946 }
1947
setOutputVolume(float level)1948 void InstanceV2Impl::setOutputVolume(float level) {
1949 }
1950
setAudioOutputDuckingEnabled(bool enabled)1951 void InstanceV2Impl::setAudioOutputDuckingEnabled(bool enabled) {
1952 }
1953
setAudioOutputGainControlEnabled(bool enabled)1954 void InstanceV2Impl::setAudioOutputGainControlEnabled(bool enabled) {
1955 }
1956
setEchoCancellationStrength(int strength)1957 void InstanceV2Impl::setEchoCancellationStrength(int strength) {
1958 }
1959
GetVersions()1960 std::vector<std::string> InstanceV2Impl::GetVersions() {
1961 std::vector<std::string> result;
1962 result.push_back("4.0.0");
1963 return result;
1964 }
1965
GetConnectionMaxLayer()1966 int InstanceV2Impl::GetConnectionMaxLayer() {
1967 return 92;
1968 }
1969
getLastError()1970 std::string InstanceV2Impl::getLastError() {
1971 return "";
1972 }
1973
getDebugInfo()1974 std::string InstanceV2Impl::getDebugInfo() {
1975 return "";
1976 }
1977
getPreferredRelayId()1978 int64_t InstanceV2Impl::getPreferredRelayId() {
1979 return 0;
1980 }
1981
getTrafficStats()1982 TrafficStats InstanceV2Impl::getTrafficStats() {
1983 return {};
1984 }
1985
getPersistentState()1986 PersistentState InstanceV2Impl::getPersistentState() {
1987 return {};
1988 }
1989
stop(std::function<void (FinalState)> completion)1990 void InstanceV2Impl::stop(std::function<void(FinalState)> completion) {
1991 std::string debugLog;
1992 if (_logSink) {
1993 debugLog = _logSink->result();
1994 }
1995 _internal->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](InstanceV2ImplInternal *internal) mutable {
1996 internal->stop([completion, debugLog = std::move(debugLog)](FinalState finalState) mutable {
1997 finalState.debugLog = debugLog;
1998 completion(finalState);
1999 });
2000 });
2001 }
2002
2003 template <>
Register()2004 bool Register<InstanceV2Impl>() {
2005 return Meta::RegisterOne<InstanceV2Impl>();
2006 }
2007
2008 } // namespace tgcalls
2009