1 /*
2 This file is part of Telegram Desktop,
3 the official desktop application for the Telegram messaging service.
4
5 For license and copyright information please follow this link:
6 https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
7 */
8 #include "calls/group/calls_group_call.h"
9
10 #include "calls/group/calls_group_common.h"
11 #include "main/main_session.h"
12 #include "api/api_send_progress.h"
13 #include "api/api_updates.h"
14 #include "apiwrap.h"
15 #include "lang/lang_keys.h"
16 #include "lang/lang_hardcoded.h"
17 #include "boxes/peers/edit_participants_box.h" // SubscribeToMigration.
18 #include "ui/toasts/common_toasts.h"
19 #include "base/unixtime.h"
20 #include "core/application.h"
21 #include "core/core_settings.h"
22 #include "data/data_changes.h"
23 #include "data/data_user.h"
24 #include "data/data_chat.h"
25 #include "data/data_channel.h"
26 #include "data/data_group_call.h"
27 #include "data/data_peer_values.h"
28 #include "data/data_session.h"
29 #include "base/global_shortcuts.h"
30 #include "base/random.h"
31 #include "webrtc/webrtc_video_track.h"
32 #include "webrtc/webrtc_media_devices.h"
33 #include "webrtc/webrtc_create_adm.h"
34
35 #include <tgcalls/group/GroupInstanceCustomImpl.h>
36 #include <tgcalls/VideoCaptureInterface.h>
37 #include <tgcalls/StaticThreads.h>
38 #include <QtCore/QJsonDocument>
39 #include <QtCore/QJsonObject>
40 #include <QtCore/QJsonArray>
41
42 namespace Calls {
43 namespace {
44
45 constexpr auto kMaxInvitePerSlice = 10;
46 constexpr auto kCheckLastSpokeInterval = crl::time(1000);
47 constexpr auto kCheckJoinedTimeout = 4 * crl::time(1000);
48 constexpr auto kUpdateSendActionEach = crl::time(500);
49 constexpr auto kPlayConnectingEach = crl::time(1056) + 2 * crl::time(1000);
50 constexpr auto kFixManualLargeVideoDuration = 5 * crl::time(1000);
51 constexpr auto kFixSpeakingLargeVideoDuration = 3 * crl::time(1000);
52 constexpr auto kFullAsMediumsCount = 4; // 1 Full is like 4 Mediums.
53 constexpr auto kMaxMediumQualities = 16; // 4 Fulls or 16 Mediums.
54
CreateMediaDevices()55 [[nodiscard]] std::unique_ptr<Webrtc::MediaDevices> CreateMediaDevices() {
56 const auto &settings = Core::App().settings();
57 return Webrtc::CreateMediaDevices(
58 settings.callInputDeviceId(),
59 settings.callOutputDeviceId(),
60 settings.callVideoInputDeviceId());
61 }
62
LookupParticipant(not_null<PeerData * > peer,CallId id,not_null<PeerData * > participantPeer)63 [[nodiscard]] const Data::GroupCallParticipant *LookupParticipant(
64 not_null<PeerData*> peer,
65 CallId id,
66 not_null<PeerData*> participantPeer) {
67 const auto call = peer->groupCall();
68 return (id && call && call->id() == id)
69 ? call->participantByPeer(participantPeer)
70 : nullptr;
71 }
72
TimestampFromMsgId(mtpMsgId msgId)73 [[nodiscard]] double TimestampFromMsgId(mtpMsgId msgId) {
74 return msgId / double(1ULL << 32);
75 }
76
TimestampInMsFromMsgId(mtpMsgId msgId)77 [[nodiscard]] int64 TimestampInMsFromMsgId(mtpMsgId msgId) {
78 // return (msgId * 1000) / (1ULL << 32); // Almost... But this overflows.
79 return ((msgId / (1ULL << 10)) * 1000) / (1ULL << 22);
80 }
81
FindLocalRaisedHandRating(const std::vector<Data::GroupCallParticipant> & list)82 [[nodiscard]] uint64 FindLocalRaisedHandRating(
83 const std::vector<Data::GroupCallParticipant> &list) {
84 const auto i = ranges::max_element(
85 list,
86 ranges::less(),
87 &Data::GroupCallParticipant::raisedHandRating);
88 return (i == end(list)) ? 1 : (i->raisedHandRating + 1);
89 }
90
91 struct JoinVideoEndpoint {
92 std::string id;
93 };
94
95 struct JoinBroadcastStream {
96 };
97
98 using JoinClientFields = std::variant<
99 v::null_t,
100 JoinVideoEndpoint,
101 JoinBroadcastStream>;
102
103 class RequestCurrentTimeTask final : public tgcalls::BroadcastPartTask {
104 public:
105 RequestCurrentTimeTask(
106 base::weak_ptr<GroupCall> call,
107 Fn<void(int64)> done);
108
109 void done(int64 value);
110 void cancel() override;
111
112 private:
113 const base::weak_ptr<GroupCall> _call;
114 Fn<void(int64)> _done;
115 QMutex _mutex;
116
117 };
118
RequestCurrentTimeTask(base::weak_ptr<GroupCall> call,Fn<void (int64)> done)119 RequestCurrentTimeTask::RequestCurrentTimeTask(
120 base::weak_ptr<GroupCall> call,
121 Fn<void(int64)> done)
122 : _call(call)
123 , _done(std::move(done)) {
124 }
125
done(int64 value)126 void RequestCurrentTimeTask::done(int64 value) {
127 QMutexLocker lock(&_mutex);
128 if (_done) {
129 base::take(_done)(value);
130 }
131 }
132
cancel()133 void RequestCurrentTimeTask::cancel() {
134 QMutexLocker lock(&_mutex);
135 _done = nullptr;
136 }
137
ParseJoinResponse(const QByteArray & json)138 [[nodiscard]] JoinClientFields ParseJoinResponse(const QByteArray &json) {
139 auto error = QJsonParseError{ 0, QJsonParseError::NoError };
140 const auto document = QJsonDocument::fromJson(json, &error);
141 if (error.error != QJsonParseError::NoError) {
142 LOG(("API Error: "
143 "Failed to parse join response params, error: %1."
144 ).arg(error.errorString()));
145 return {};
146 } else if (!document.isObject()) {
147 LOG(("API Error: "
148 "Not an object received in join response params."));
149 return {};
150 }
151 if (document.object().value("stream").toBool()) {
152 return JoinBroadcastStream{};
153 }
154 const auto video = document.object().value("video").toObject();
155 return JoinVideoEndpoint{
156 video.value("endpoint").toString().toStdString(),
157 };
158 }
159
EmptyString()160 [[nodiscard]] const std::string &EmptyString() {
161 static const auto result = std::string();
162 return result;
163 }
164
165 } // namespace
166
167 class GroupCall::LoadPartTask final : public tgcalls::BroadcastPartTask {
168 public:
169 using Quality = tgcalls::VideoChannelDescription::Quality;
170 LoadPartTask(
171 base::weak_ptr<GroupCall> call,
172 int64 time,
173 int64 period,
174 Fn<void(tgcalls::BroadcastPart&&)> done);
175 LoadPartTask(
176 base::weak_ptr<GroupCall> call,
177 int64 time,
178 int64 period,
179 int32 videoChannel,
180 Quality videoQuality,
181 Fn<void(tgcalls::BroadcastPart&&)> done);
182
time() const183 [[nodiscard]] int64 time() const {
184 return _time;
185 }
scale() const186 [[nodiscard]] int32 scale() const {
187 return _scale;
188 }
videoChannel() const189 [[nodiscard]] int32 videoChannel() const {
190 return _videoChannel;
191 }
videoQuality() const192 [[nodiscard]] Quality videoQuality() const {
193 return _videoQuality;
194 }
195
196 void done(tgcalls::BroadcastPart &&part);
197 void cancel() override;
198
199 private:
200 const base::weak_ptr<GroupCall> _call;
201 const int64 _time = 0;
202 const int32 _scale = 0;
203 const int32 _videoChannel = 0;
204 const Quality _videoQuality = {};
205 Fn<void(tgcalls::BroadcastPart &&)> _done;
206 QMutex _mutex;
207
208 };
209
210 class GroupCall::MediaChannelDescriptionsTask final
211 : public tgcalls::RequestMediaChannelDescriptionTask {
212 public:
213 MediaChannelDescriptionsTask(
214 base::weak_ptr<GroupCall> call,
215 const std::vector<std::uint32_t> &ssrcs,
216 Fn<void(std::vector<tgcalls::MediaChannelDescription>&&)> done);
217
218 [[nodiscard]] base::flat_set<uint32> ssrcs() const;
219
220 [[nodiscard]] bool finishWithAdding(
221 uint32 ssrc,
222 std::optional<tgcalls::MediaChannelDescription> description,
223 bool screen = false);
224
225 void cancel() override;
226
227 private:
228 const base::weak_ptr<GroupCall> _call;
229 base::flat_set<uint32> _ssrcs;
230 base::flat_set<uint32> _cameraAdded;
231 base::flat_set<uint32> _screenAdded;
232 std::vector<tgcalls::MediaChannelDescription> _result;
233 Fn<void(std::vector<tgcalls::MediaChannelDescription>&&)> _done;
234 QMutex _mutex;
235
236 };
237
238 struct GroupCall::SinkPointer {
239 std::weak_ptr<Webrtc::SinkInterface> data;
240 };
241
242 struct GroupCall::VideoTrack {
243 VideoTrack(bool paused, bool requireARGB32, not_null<PeerData*> peer);
244
245 Webrtc::VideoTrack track;
246 rpl::variable<QSize> trackSize;
247 not_null<PeerData*> peer;
248 rpl::lifetime lifetime;
249 Group::VideoQuality quality = Group::VideoQuality();
250 bool shown = false;
251 };
252
VideoTrack(bool paused,bool requireARGB32,not_null<PeerData * > peer)253 GroupCall::VideoTrack::VideoTrack(
254 bool paused,
255 bool requireARGB32,
256 not_null<PeerData*> peer)
257 : track((paused
258 ? Webrtc::VideoState::Paused
259 : Webrtc::VideoState::Active),
260 requireARGB32)
261 , peer(peer) {
262 }
263
IsGroupCallAdmin(not_null<PeerData * > peer,not_null<PeerData * > participantPeer)264 [[nodiscard]] bool IsGroupCallAdmin(
265 not_null<PeerData*> peer,
266 not_null<PeerData*> participantPeer) {
267 const auto user = participantPeer->asUser();
268 if (!user) {
269 return (peer == participantPeer);
270 }
271 if (const auto chat = peer->asChat()) {
272 return chat->admins.contains(user)
273 || (chat->creator == peerToUser(user->id));
274 } else if (const auto group = peer->asChannel()) {
275 if (const auto mgInfo = group->mgInfo.get()) {
276 if (mgInfo->creator == user) {
277 return true;
278 }
279 const auto i = mgInfo->lastAdmins.find(user);
280 if (i == mgInfo->lastAdmins.end()) {
281 return false;
282 }
283 return (i->second.rights.flags & ChatAdminRight::ManageCall);
284 }
285 }
286 return false;
287 }
288
289 struct VideoParams {
290 std::string endpointId;
291 std::vector<tgcalls::MediaSsrcGroup> ssrcGroups;
292 uint32 additionalSsrc = 0;
293 bool paused = false;
294
emptyCalls::VideoParams295 [[nodiscard]] bool empty() const {
296 return !additionalSsrc && (endpointId.empty() || ssrcGroups.empty());
297 }
operator boolCalls::VideoParams298 [[nodiscard]] explicit operator bool() const {
299 return !empty();
300 }
301 };
302
303 struct ParticipantVideoParams {
304 VideoParams camera;
305 VideoParams screen;
306 };
307
VideoParamsAreEqual(const VideoParams & was,const tl::conditional<MTPGroupCallParticipantVideo> & now)308 [[nodiscard]] bool VideoParamsAreEqual(
309 const VideoParams &was,
310 const tl::conditional<MTPGroupCallParticipantVideo> &now) {
311 if (!now) {
312 return !was;
313 }
314 return now->match([&](const MTPDgroupCallParticipantVideo &data) {
315 if (data.is_paused() != was.paused
316 || data.vaudio_source().value_or_empty() != was.additionalSsrc) {
317 return false;
318 }
319 if (gsl::make_span(data.vendpoint().v)
320 != gsl::make_span(was.endpointId)) {
321 return false;
322 }
323 const auto &list = data.vsource_groups().v;
324 if (list.size() != was.ssrcGroups.size()) {
325 return false;
326 }
327 auto index = 0;
328 for (const auto &group : list) {
329 const auto equal = group.match([&](
330 const MTPDgroupCallParticipantVideoSourceGroup &data) {
331 const auto &group = was.ssrcGroups[index++];
332 if (gsl::make_span(data.vsemantics().v)
333 != gsl::make_span(group.semantics)) {
334 return false;
335 }
336 const auto list = data.vsources().v;
337 if (list.size() != group.ssrcs.size()) {
338 return false;
339 }
340 auto i = 0;
341 for (const auto &ssrc : list) {
342 if (ssrc.v != group.ssrcs[i++]) {
343 return false;
344 }
345 }
346 return true;
347 });
348 if (!equal) {
349 return false;
350 }
351 }
352 return true;
353 });
354 }
355
ParseVideoParams(const tl::conditional<MTPGroupCallParticipantVideo> & params)356 [[nodiscard]] VideoParams ParseVideoParams(
357 const tl::conditional<MTPGroupCallParticipantVideo> ¶ms) {
358 if (!params) {
359 return VideoParams();
360 }
361 auto result = VideoParams();
362 params->match([&](const MTPDgroupCallParticipantVideo &data) {
363 result.paused = data.is_paused();
364 result.endpointId = data.vendpoint().v.toStdString();
365 result.additionalSsrc = data.vaudio_source().value_or_empty();
366 const auto &list = data.vsource_groups().v;
367 result.ssrcGroups.reserve(list.size());
368 for (const auto &group : list) {
369 group.match([&](
370 const MTPDgroupCallParticipantVideoSourceGroup &data) {
371 const auto &list = data.vsources().v;
372 auto ssrcs = std::vector<uint32_t>();
373 ssrcs.reserve(list.size());
374 for (const auto &ssrc : list) {
375 ssrcs.push_back(ssrc.v);
376 }
377 result.ssrcGroups.push_back({
378 .semantics = data.vsemantics().v.toStdString(),
379 .ssrcs = std::move(ssrcs),
380 });
381 });
382 }
383 });
384 return result;
385 }
386
GetCameraEndpoint(const std::shared_ptr<ParticipantVideoParams> & params)387 const std::string &GetCameraEndpoint(
388 const std::shared_ptr<ParticipantVideoParams> ¶ms) {
389 return params ? params->camera.endpointId : EmptyString();
390 }
391
GetScreenEndpoint(const std::shared_ptr<ParticipantVideoParams> & params)392 const std::string &GetScreenEndpoint(
393 const std::shared_ptr<ParticipantVideoParams> ¶ms) {
394 return params ? params->screen.endpointId : EmptyString();
395 }
396
IsCameraPaused(const std::shared_ptr<ParticipantVideoParams> & params)397 bool IsCameraPaused(const std::shared_ptr<ParticipantVideoParams> ¶ms) {
398 return params && params->camera.paused;
399 }
400
IsScreenPaused(const std::shared_ptr<ParticipantVideoParams> & params)401 bool IsScreenPaused(const std::shared_ptr<ParticipantVideoParams> ¶ms) {
402 return params && params->screen.paused;
403 }
404
GetAdditionalAudioSsrc(const std::shared_ptr<ParticipantVideoParams> & params)405 uint32 GetAdditionalAudioSsrc(
406 const std::shared_ptr<ParticipantVideoParams> ¶ms) {
407 return params ? params->screen.additionalSsrc : 0;
408 }
409
ParseVideoParams(const tl::conditional<MTPGroupCallParticipantVideo> & camera,const tl::conditional<MTPGroupCallParticipantVideo> & screen,const std::shared_ptr<ParticipantVideoParams> & existing)410 std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
411 const tl::conditional<MTPGroupCallParticipantVideo> &camera,
412 const tl::conditional<MTPGroupCallParticipantVideo> &screen,
413 const std::shared_ptr<ParticipantVideoParams> &existing) {
414 using namespace tgcalls;
415
416 if (!camera && !screen) {
417 return nullptr;
418 }
419 if (existing
420 && VideoParamsAreEqual(existing->camera, camera)
421 && VideoParamsAreEqual(existing->screen, screen)) {
422 return existing;
423 }
424 // We don't reuse existing pointer, that way we can compare pointers
425 // to see if anything was changed in video params.
426 const auto data = /*existing
427 ? existing
428 : */std::make_shared<ParticipantVideoParams>();
429 data->camera = ParseVideoParams(camera);
430 data->screen = ParseVideoParams(screen);
431 return data;
432 }
433
LoadPartTask(base::weak_ptr<GroupCall> call,int64 time,int64 period,Fn<void (tgcalls::BroadcastPart &&)> done)434 GroupCall::LoadPartTask::LoadPartTask(
435 base::weak_ptr<GroupCall> call,
436 int64 time,
437 int64 period,
438 Fn<void(tgcalls::BroadcastPart&&)> done)
439 : LoadPartTask(std::move(call), time, period, 0, {}, std::move(done)) {
440 }
441
LoadPartTask(base::weak_ptr<GroupCall> call,int64 time,int64 period,int32 videoChannel,tgcalls::VideoChannelDescription::Quality videoQuality,Fn<void (tgcalls::BroadcastPart &&)> done)442 GroupCall::LoadPartTask::LoadPartTask(
443 base::weak_ptr<GroupCall> call,
444 int64 time,
445 int64 period,
446 int32 videoChannel,
447 tgcalls::VideoChannelDescription::Quality videoQuality,
448 Fn<void(tgcalls::BroadcastPart&&)> done)
449 : _call(std::move(call))
450 , _time(time ? time : (base::unixtime::now() * int64(1000)))
451 , _scale([&] {
452 switch (period) {
453 case 1000: return 0;
454 case 500: return 1;
455 case 250: return 2;
456 case 125: return 3;
457 }
458 Unexpected("Period in LoadPartTask.");
459 }())
460 , _videoChannel(videoChannel)
461 , _videoQuality(videoQuality)
462 , _done(std::move(done)) {
463 }
464
done(tgcalls::BroadcastPart && part)465 void GroupCall::LoadPartTask::done(tgcalls::BroadcastPart &&part) {
466 QMutexLocker lock(&_mutex);
467 if (_done) {
468 base::take(_done)(std::move(part));
469 }
470 }
471
cancel()472 void GroupCall::LoadPartTask::cancel() {
473 QMutexLocker lock(&_mutex);
474 if (!_done) {
475 return;
476 }
477 _done = nullptr;
478 lock.unlock();
479
480 if (_call) {
481 const auto that = this;
482 crl::on_main(_call, [weak = _call, that] {
483 if (const auto strong = weak.get()) {
484 strong->broadcastPartCancel(that);
485 }
486 });
487 }
488 }
489
MediaChannelDescriptionsTask(base::weak_ptr<GroupCall> call,const std::vector<std::uint32_t> & ssrcs,Fn<void (std::vector<tgcalls::MediaChannelDescription> &&)> done)490 GroupCall::MediaChannelDescriptionsTask::MediaChannelDescriptionsTask(
491 base::weak_ptr<GroupCall> call,
492 const std::vector<std::uint32_t> &ssrcs,
493 Fn<void(std::vector<tgcalls::MediaChannelDescription>&&)> done)
494 : _call(std::move(call))
495 , _ssrcs(ssrcs.begin(), ssrcs.end())
496 , _done(std::move(done)) {
497 }
498
ssrcs() const499 auto GroupCall::MediaChannelDescriptionsTask::ssrcs() const
500 -> base::flat_set<uint32> {
501 return _ssrcs;
502 }
503
finishWithAdding(uint32 ssrc,std::optional<tgcalls::MediaChannelDescription> description,bool screen)504 bool GroupCall::MediaChannelDescriptionsTask::finishWithAdding(
505 uint32 ssrc,
506 std::optional<tgcalls::MediaChannelDescription> description,
507 bool screen) {
508 Expects(_ssrcs.contains(ssrc));
509
510 using Type = tgcalls::MediaChannelDescription::Type;
511 _ssrcs.remove(ssrc);
512 if (!description) {
513 } else if (description->type == Type::Audio
514 || (!screen && _cameraAdded.emplace(description->audioSsrc).second)
515 || (screen && _screenAdded.emplace(description->audioSsrc).second)) {
516 _result.push_back(std::move(*description));
517 }
518
519 if (!_ssrcs.empty()) {
520 return false;
521 }
522 QMutexLocker lock(&_mutex);
523 if (_done) {
524 base::take(_done)(std::move(_result));
525 }
526 return true;
527 }
528
cancel()529 void GroupCall::MediaChannelDescriptionsTask::cancel() {
530 QMutexLocker lock(&_mutex);
531 if (!_done) {
532 return;
533 }
534 _done = nullptr;
535 lock.unlock();
536
537 if (_call) {
538 const auto that = this;
539 crl::on_main(_call, [weak = _call, that] {
540 if (const auto strong = weak.get()) {
541 strong->mediaChannelDescriptionsCancel(that);
542 }
543 });
544 }
545 }
546
TrackPeer(const std::unique_ptr<VideoTrack> & track)547 not_null<PeerData*> GroupCall::TrackPeer(
548 const std::unique_ptr<VideoTrack> &track) {
549 return track->peer;
550 }
551
TrackPointer(const std::unique_ptr<VideoTrack> & track)552 not_null<Webrtc::VideoTrack*> GroupCall::TrackPointer(
553 const std::unique_ptr<VideoTrack> &track) {
554 return &track->track;
555 }
556
TrackSizeValue(const std::unique_ptr<VideoTrack> & track)557 rpl::producer<QSize> GroupCall::TrackSizeValue(
558 const std::unique_ptr<VideoTrack> &track) {
559 return track->trackSize.value();
560 }
561
GroupCall(not_null<Delegate * > delegate,Group::JoinInfo info,const MTPInputGroupCall & inputCall)562 GroupCall::GroupCall(
563 not_null<Delegate*> delegate,
564 Group::JoinInfo info,
565 const MTPInputGroupCall &inputCall)
566 : _delegate(delegate)
567 , _peer(info.peer)
568 , _history(_peer->owner().history(_peer))
569 , _api(&_peer->session().mtp())
570 , _joinAs(info.joinAs)
571 , _possibleJoinAs(std::move(info.possibleJoinAs))
572 , _joinHash(info.joinHash)
573 , _canManage(Data::CanManageGroupCallValue(_peer))
574 , _id(inputCall.c_inputGroupCall().vid().v)
575 , _scheduleDate(info.scheduleDate)
576 , _lastSpokeCheckTimer([=] { checkLastSpoke(); })
__anon988a561e0a02null577 , _checkJoinedTimer([=] { checkJoined(); })
__anon988a561e0b02null578 , _pushToTalkCancelTimer([=] { pushToTalkCancel(); })
__anon988a561e0c02null579 , _connectingSoundTimer([=] { playConnectingSoundOnce(); })
580 , _mediaDevices(CreateMediaDevices()) {
581 _muted.value(
582 ) | rpl::combine_previous(
__anon988a561e0d02(MuteState previous, MuteState state) 583 ) | rpl::start_with_next([=](MuteState previous, MuteState state) {
584 if (_instance) {
585 updateInstanceMuteState();
586 }
587 if (_joinState.ssrc
588 && (!_initialMuteStateSent || state == MuteState::Active)) {
589 _initialMuteStateSent = true;
590 maybeSendMutedUpdate(previous);
591 }
592 }, _lifetime);
593
594 _instanceState.value(
__anon988a561e0e02null595 ) | rpl::filter([=] {
596 return _hadJoinedState;
597 }) | rpl::start_with_next([=](InstanceState state) {
598 if (state == InstanceState::Disconnected) {
599 playConnectingSound();
600 } else {
601 stopConnectingSound();
602 }
603 }, _lifetime);
604
605 checkGlobalShortcutAvailability();
606
607 if (const auto real = lookupReal()) {
608 subscribeToReal(real);
609 if (!canManage() && real->joinMuted()) {
610 _muted = MuteState::ForceMuted;
611 }
612 } else {
613 _peer->session().changes().peerFlagsValue(
614 _peer,
615 Data::PeerUpdate::Flag::GroupCall
__anon988a561e1002null616 ) | rpl::map([=] {
617 return lookupReal();
618 }) | rpl::filter([](Data::GroupCall *real) {
619 return real != nullptr;
__anon988a561e1202(Data::GroupCall *real) 620 }) | rpl::map([](Data::GroupCall *real) {
621 return not_null{ real };
622 }) | rpl::take(
623 1
__anon988a561e1302(not_null<Data::GroupCall*> real) 624 ) | rpl::start_with_next([=](not_null<Data::GroupCall*> real) {
625 subscribeToReal(real);
626 _realChanges.fire_copy(real);
627 }, _lifetime);
628 }
629
630 setupMediaDevices();
631 setupOutgoingVideo();
632
633 if (_id) {
634 join(inputCall);
635 } else {
636 start(info.scheduleDate);
637 }
638 if (_scheduleDate) {
639 saveDefaultJoinAs(joinAs());
640 }
641 }
642
~GroupCall()643 GroupCall::~GroupCall() {
644 destroyScreencast();
645 destroyController();
646 }
647
isSharingScreen() const648 bool GroupCall::isSharingScreen() const {
649 return _isSharingScreen.current();
650 }
651
isSharingScreenValue() const652 rpl::producer<bool> GroupCall::isSharingScreenValue() const {
653 return _isSharingScreen.value();
654 }
655
isScreenPaused() const656 bool GroupCall::isScreenPaused() const {
657 return (_screenState.current() == Webrtc::VideoState::Paused);
658 }
659
screenSharingEndpoint() const660 const std::string &GroupCall::screenSharingEndpoint() const {
661 return isSharingScreen() ? _screenEndpoint : EmptyString();
662 }
663
isSharingCamera() const664 bool GroupCall::isSharingCamera() const {
665 return _isSharingCamera.current();
666 }
667
isSharingCameraValue() const668 rpl::producer<bool> GroupCall::isSharingCameraValue() const {
669 return _isSharingCamera.value();
670 }
671
isCameraPaused() const672 bool GroupCall::isCameraPaused() const {
673 return (_cameraState.current() == Webrtc::VideoState::Paused);
674 }
675
cameraSharingEndpoint() const676 const std::string &GroupCall::cameraSharingEndpoint() const {
677 return isSharingCamera() ? _cameraEndpoint : EmptyString();
678 }
679
screenSharingDeviceId() const680 QString GroupCall::screenSharingDeviceId() const {
681 return isSharingScreen() ? _screenDeviceId : QString();
682 }
683
screenSharingWithAudio() const684 bool GroupCall::screenSharingWithAudio() const {
685 return isSharingScreen() && _screenWithAudio;
686 }
687
mutedByAdmin() const688 bool GroupCall::mutedByAdmin() const {
689 const auto mute = muted();
690 return mute == MuteState::ForceMuted || mute == MuteState::RaisedHand;
691 }
692
canManage() const693 bool GroupCall::canManage() const {
694 return _canManage.current();
695 }
696
canManageValue() const697 rpl::producer<bool> GroupCall::canManageValue() const {
698 return _canManage.value();
699 }
700
toggleVideo(bool active)701 void GroupCall::toggleVideo(bool active) {
702 if (!_instance || !_id) {
703 return;
704 }
705 _cameraState = active
706 ? Webrtc::VideoState::Active
707 : Webrtc::VideoState::Inactive;
708 }
709
toggleScreenSharing(std::optional<QString> uniqueId,bool withAudio)710 void GroupCall::toggleScreenSharing(
711 std::optional<QString> uniqueId,
712 bool withAudio) {
713 if (!_instance || !_id) {
714 return;
715 } else if (!uniqueId) {
716 _screenState = Webrtc::VideoState::Inactive;
717 return;
718 }
719 const auto changed = (_screenDeviceId != *uniqueId);
720 const auto wasSharing = isSharingScreen();
721 _screenDeviceId = *uniqueId;
722 _screenWithAudio = withAudio;
723 _screenState = Webrtc::VideoState::Active;
724 if (changed && wasSharing && isSharingScreen()) {
725 _screenCapture->switchToDevice(uniqueId->toStdString(), true);
726 }
727 if (_screenInstance) {
728 _screenInstance->setIsMuted(!withAudio);
729 }
730 }
731
hasVideoWithFrames() const732 bool GroupCall::hasVideoWithFrames() const {
733 return !_shownVideoTracks.empty();
734 }
735
hasVideoWithFramesValue() const736 rpl::producer<bool> GroupCall::hasVideoWithFramesValue() const {
737 return _videoStreamShownUpdates.events_starting_with(
738 VideoStateToggle()
739 ) | rpl::map([=] {
740 return hasVideoWithFrames();
741 }) | rpl::distinct_until_changed();
742 }
743
setScheduledDate(TimeId date)744 void GroupCall::setScheduledDate(TimeId date) {
745 const auto was = _scheduleDate;
746 _scheduleDate = date;
747 if (was && !date) {
748 join(inputCall());
749 }
750 }
751
subscribeToReal(not_null<Data::GroupCall * > real)752 void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
753 real->scheduleDateValue(
754 ) | rpl::start_with_next([=](TimeId date) {
755 setScheduledDate(date);
756 }, _lifetime);
757
758 // Postpone creating video tracks, so that we know if Panel
759 // supports OpenGL and we don't need ARGB32 frames at all.
760 Ui::PostponeCall(this, [=] {
761 if (const auto real = lookupReal()) {
762 real->participantsReloaded(
763 ) | rpl::start_with_next([=] {
764 fillActiveVideoEndpoints();
765 }, _lifetime);
766 fillActiveVideoEndpoints();
767 }
768 });
769
770 using Update = Data::GroupCall::ParticipantUpdate;
771 real->participantUpdated(
772 ) | rpl::start_with_next([=](const Update &data) {
773 const auto regularEndpoint = [&](const std::string &endpoint)
774 -> const std::string & {
775 return (endpoint.empty()
776 || endpoint == _cameraEndpoint
777 || endpoint == _screenEndpoint)
778 ? EmptyString()
779 : endpoint;
780 };
781
782 const auto peer = data.was ? data.was->peer : data.now->peer;
783 if (peer == joinAs()) {
784 const auto working = data.now && data.now->videoJoined;
785 if (videoIsWorking() != working) {
786 fillActiveVideoEndpoints();
787 }
788 return;
789 }
790 const auto &wasCameraEndpoint = data.was
791 ? regularEndpoint(GetCameraEndpoint(data.was->videoParams))
792 : EmptyString();
793 const auto &nowCameraEndpoint = data.now
794 ? regularEndpoint(GetCameraEndpoint(data.now->videoParams))
795 : EmptyString();
796 const auto wasCameraPaused = !wasCameraEndpoint.empty()
797 && IsCameraPaused(data.was->videoParams);
798 const auto nowCameraPaused = !nowCameraEndpoint.empty()
799 && IsCameraPaused(data.now->videoParams);
800 if (wasCameraEndpoint != nowCameraEndpoint) {
801 markEndpointActive({
802 VideoEndpointType::Camera,
803 peer,
804 nowCameraEndpoint,
805 }, true, nowCameraPaused);
806 markEndpointActive({
807 VideoEndpointType::Camera,
808 peer,
809 wasCameraEndpoint,
810 }, false, false);
811 } else if (wasCameraPaused != nowCameraPaused) {
812 markTrackPaused({
813 VideoEndpointType::Camera,
814 peer,
815 nowCameraEndpoint,
816 }, nowCameraPaused);
817 }
818 const auto &wasScreenEndpoint = data.was
819 ? regularEndpoint(data.was->screenEndpoint())
820 : EmptyString();
821 const auto &nowScreenEndpoint = data.now
822 ? regularEndpoint(data.now->screenEndpoint())
823 : EmptyString();
824 const auto wasScreenPaused = !wasScreenEndpoint.empty()
825 && IsScreenPaused(data.was->videoParams);
826 const auto nowScreenPaused = !nowScreenEndpoint.empty()
827 && IsScreenPaused(data.now->videoParams);
828 if (wasScreenEndpoint != nowScreenEndpoint) {
829 markEndpointActive({
830 VideoEndpointType::Screen,
831 peer,
832 nowScreenEndpoint,
833 }, true, nowScreenPaused);
834 markEndpointActive({
835 VideoEndpointType::Screen,
836 peer,
837 wasScreenEndpoint,
838 }, false, false);
839 } else if (wasScreenPaused != nowScreenPaused) {
840 markTrackPaused({
841 VideoEndpointType::Screen,
842 peer,
843 wasScreenEndpoint,
844 }, nowScreenPaused);
845 }
846 }, _lifetime);
847
848 real->participantsResolved(
849 ) | rpl::start_with_next([=](
850 not_null<const base::flat_map<
851 uint32,
852 Data::LastSpokeTimes>*> ssrcs) {
853 checkMediaChannelDescriptions([&](uint32 ssrc) {
854 return ssrcs->contains(ssrc);
855 });
856 }, _lifetime);
857
858 real->participantSpeaking(
859 ) | rpl::filter([=] {
860 return _videoEndpointLarge.current();
861 }) | rpl::start_with_next([=](not_null<Data::GroupCallParticipant*> p) {
862 const auto now = crl::now();
863 if (_videoEndpointLarge.current().peer == p->peer) {
864 _videoLargeTillTime = std::max(
865 _videoLargeTillTime,
866 now + kFixSpeakingLargeVideoDuration);
867 return;
868 } else if (videoEndpointPinned() || _videoLargeTillTime > now) {
869 return;
870 }
871 using Type = VideoEndpointType;
872 const auto ¶ms = p->videoParams;
873 if (GetCameraEndpoint(params).empty()
874 && GetScreenEndpoint(params).empty()) {
875 return;
876 }
877 const auto tryEndpoint = [&](Type type, const std::string &id) {
878 if (id.empty()) {
879 return false;
880 }
881 const auto endpoint = VideoEndpoint{ type, p->peer, id };
882 if (!shownVideoTracks().contains(endpoint)) {
883 return false;
884 }
885 setVideoEndpointLarge(endpoint);
886 return true;
887 };
888 if (tryEndpoint(Type::Screen, GetScreenEndpoint(params))
889 || tryEndpoint(Type::Camera, GetCameraEndpoint(params))) {
890 _videoLargeTillTime = now + kFixSpeakingLargeVideoDuration;
891 }
892 }, _lifetime);
893 }
894
checkGlobalShortcutAvailability()895 void GroupCall::checkGlobalShortcutAvailability() {
896 auto &settings = Core::App().settings();
897 if (!settings.groupCallPushToTalk()) {
898 return;
899 } else if (!base::GlobalShortcutsAllowed()) {
900 settings.setGroupCallPushToTalk(false);
901 Core::App().saveSettingsDelayed();
902 }
903 }
904
setState(State state)905 void GroupCall::setState(State state) {
906 const auto current = _state.current();
907 if (current == State::Failed) {
908 return;
909 } else if (current == State::Ended && state != State::Failed) {
910 return;
911 } else if (current == State::FailedHangingUp && state != State::Failed) {
912 return;
913 } else if (current == State::HangingUp
914 && state != State::Ended
915 && state != State::Failed) {
916 return;
917 }
918 if (current == state) {
919 return;
920 }
921 _state = state;
922
923 if (state == State::Joined) {
924 stopConnectingSound();
925 if (const auto call = _peer->groupCall(); call && call->id() == _id) {
926 call->setInCall();
927 }
928 }
929
930 if (false
931 || state == State::Ended
932 || state == State::Failed) {
933 // Destroy controller before destroying Call Panel,
934 // so that the panel hide animation is smooth.
935 destroyScreencast();
936 destroyController();
937 }
938 switch (state) {
939 case State::HangingUp:
940 case State::FailedHangingUp:
941 stopConnectingSound();
942 _delegate->groupCallPlaySound(Delegate::GroupCallSound::Ended);
943 break;
944 case State::Ended:
945 stopConnectingSound();
946 _delegate->groupCallFinished(this);
947 break;
948 case State::Failed:
949 stopConnectingSound();
950 _delegate->groupCallFailed(this);
951 break;
952 case State::Connecting:
953 if (!_checkJoinedTimer.isActive()) {
954 _checkJoinedTimer.callOnce(kCheckJoinedTimeout);
955 }
956 break;
957 }
958 }
959
playConnectingSound()960 void GroupCall::playConnectingSound() {
961 const auto state = _state.current();
962 if (_connectingSoundTimer.isActive()
963 || state == State::HangingUp
964 || state == State::FailedHangingUp
965 || state == State::Ended
966 || state == State::Failed) {
967 return;
968 }
969 playConnectingSoundOnce();
970 _connectingSoundTimer.callEach(kPlayConnectingEach);
971 }
972
stopConnectingSound()973 void GroupCall::stopConnectingSound() {
974 _connectingSoundTimer.cancel();
975 }
976
playConnectingSoundOnce()977 void GroupCall::playConnectingSoundOnce() {
978 _delegate->groupCallPlaySound(Delegate::GroupCallSound::Connecting);
979 }
980
showChooseJoinAs() const981 bool GroupCall::showChooseJoinAs() const {
982 return (_possibleJoinAs.size() > 1)
983 || (_possibleJoinAs.size() == 1
984 && !_possibleJoinAs.front()->isSelf());
985 }
986
scheduleStartSubscribed() const987 bool GroupCall::scheduleStartSubscribed() const {
988 if (const auto real = lookupReal()) {
989 return real->scheduleStartSubscribed();
990 }
991 return false;
992 }
993
lookupReal() const994 Data::GroupCall *GroupCall::lookupReal() const {
995 const auto real = _peer->groupCall();
996 return (real && real->id() == _id) ? real : nullptr;
997 }
998
real() const999 rpl::producer<not_null<Data::GroupCall*>> GroupCall::real() const {
1000 if (const auto real = lookupReal()) {
1001 return rpl::single(not_null{ real });
1002 }
1003 return _realChanges.events();
1004 }
1005
start(TimeId scheduleDate)1006 void GroupCall::start(TimeId scheduleDate) {
1007 using Flag = MTPphone_CreateGroupCall::Flag;
1008 _createRequestId = _api.request(MTPphone_CreateGroupCall(
1009 MTP_flags(scheduleDate ? Flag::f_schedule_date : Flag(0)),
1010 _peer->input,
1011 MTP_int(base::RandomValue<int32>()),
1012 MTPstring(), // title
1013 MTP_int(scheduleDate)
1014 )).done([=](const MTPUpdates &result) {
1015 _acceptFields = true;
1016 _peer->session().api().applyUpdates(result);
1017 _acceptFields = false;
1018 }).fail([=](const MTP::Error &error) {
1019 LOG(("Call Error: Could not create, error: %1"
1020 ).arg(error.type()));
1021 hangup();
1022 }).send();
1023 }
1024
join(const MTPInputGroupCall & inputCall)1025 void GroupCall::join(const MTPInputGroupCall &inputCall) {
1026 inputCall.match([&](const MTPDinputGroupCall &data) {
1027 _id = data.vid().v;
1028 _accessHash = data.vaccess_hash().v;
1029 });
1030 setState(_scheduleDate ? State::Waiting : State::Joining);
1031
1032 if (_scheduleDate) {
1033 return;
1034 }
1035 rejoin();
1036
1037 using Update = Data::GroupCall::ParticipantUpdate;
1038 const auto real = lookupReal();
1039 Assert(real != nullptr);
1040 real->participantUpdated(
1041 ) | rpl::filter([=](const Update &update) {
1042 return (_instance != nullptr);
1043 }) | rpl::start_with_next([=](const Update &update) {
1044 if (!update.now) {
1045 _instance->removeSsrcs({
1046 update.was->ssrc,
1047 GetAdditionalAudioSsrc(update.was->videoParams),
1048 });
1049 } else {
1050 updateInstanceVolume(update.was, *update.now);
1051 }
1052 }, _lifetime);
1053
1054 _peer->session().updates().addActiveChat(
1055 _peerStream.events_starting_with_copy(_peer));
1056 SubscribeToMigration(_peer, _lifetime, [=](not_null<ChannelData*> group) {
1057 _peer = group;
1058 _canManage = Data::CanManageGroupCallValue(_peer);
1059 _peerStream.fire_copy(group);
1060 });
1061 }
1062
setScreenEndpoint(std::string endpoint)1063 void GroupCall::setScreenEndpoint(std::string endpoint) {
1064 if (_screenEndpoint == endpoint) {
1065 return;
1066 }
1067 if (!_screenEndpoint.empty()) {
1068 markEndpointActive({
1069 VideoEndpointType::Screen,
1070 joinAs(),
1071 _screenEndpoint
1072 }, false, false);
1073 }
1074 _screenEndpoint = std::move(endpoint);
1075 if (_screenEndpoint.empty()) {
1076 return;
1077 }
1078 if (isSharingScreen()) {
1079 markEndpointActive({
1080 VideoEndpointType::Screen,
1081 joinAs(),
1082 _screenEndpoint
1083 }, true, isScreenPaused());
1084 }
1085 }
1086
setCameraEndpoint(std::string endpoint)1087 void GroupCall::setCameraEndpoint(std::string endpoint) {
1088 if (_cameraEndpoint == endpoint) {
1089 return;
1090 }
1091 if (!_cameraEndpoint.empty()) {
1092 markEndpointActive({
1093 VideoEndpointType::Camera,
1094 joinAs(),
1095 _cameraEndpoint
1096 }, false, false);
1097 }
1098 _cameraEndpoint = std::move(endpoint);
1099 if (_cameraEndpoint.empty()) {
1100 return;
1101 }
1102 if (isSharingCamera()) {
1103 markEndpointActive({
1104 VideoEndpointType::Camera,
1105 joinAs(),
1106 _cameraEndpoint
1107 }, true, isCameraPaused());
1108 }
1109 }
1110
addVideoOutput(const std::string & endpoint,SinkPointer sink)1111 void GroupCall::addVideoOutput(
1112 const std::string &endpoint,
1113 SinkPointer sink) {
1114 if (_cameraEndpoint == endpoint) {
1115 if (auto strong = sink.data.lock()) {
1116 _cameraCapture->setOutput(std::move(strong));
1117 }
1118 } else if (_screenEndpoint == endpoint) {
1119 if (auto strong = sink.data.lock()) {
1120 _screenCapture->setOutput(std::move(strong));
1121 }
1122 } else if (_instance) {
1123 _instance->addIncomingVideoOutput(endpoint, std::move(sink.data));
1124 } else {
1125 _pendingVideoOutputs.emplace(endpoint, std::move(sink));
1126 }
1127 }
1128
markEndpointActive(VideoEndpoint endpoint,bool active,bool paused)1129 void GroupCall::markEndpointActive(
1130 VideoEndpoint endpoint,
1131 bool active,
1132 bool paused) {
1133 if (!endpoint) {
1134 return;
1135 }
1136 const auto i = _activeVideoTracks.find(endpoint);
1137 const auto changed = active
1138 ? (i == end(_activeVideoTracks))
1139 : (i != end(_activeVideoTracks));
1140 if (!changed) {
1141 if (active) {
1142 markTrackPaused(endpoint, paused);
1143 }
1144 return;
1145 }
1146 auto shown = false;
1147 if (active) {
1148 const auto i = _activeVideoTracks.emplace(
1149 endpoint,
1150 std::make_unique<VideoTrack>(
1151 paused,
1152 _requireARGB32,
1153 endpoint.peer)).first;
1154 const auto track = &i->second->track;
1155
1156 track->renderNextFrame(
1157 ) | rpl::start_with_next([=] {
1158 const auto activeTrack = _activeVideoTracks[endpoint].get();
1159 const auto size = track->frameSize();
1160 if (size.isEmpty()) {
1161 track->markFrameShown();
1162 } else if (!activeTrack->shown) {
1163 activeTrack->shown = true;
1164 markTrackShown(endpoint, true);
1165 }
1166 activeTrack->trackSize = size;
1167 }, i->second->lifetime);
1168
1169 const auto size = track->frameSize();
1170 i->second->trackSize = size;
1171 if (!size.isEmpty() || paused) {
1172 i->second->shown = true;
1173 shown = true;
1174 } else {
1175 track->stateValue(
1176 ) | rpl::filter([=](Webrtc::VideoState state) {
1177 return (state == Webrtc::VideoState::Paused)
1178 && !_activeVideoTracks[endpoint]->shown;
1179 }) | rpl::start_with_next([=] {
1180 _activeVideoTracks[endpoint]->shown = true;
1181 markTrackShown(endpoint, true);
1182 }, i->second->lifetime);
1183 }
1184 addVideoOutput(i->first.id, { track->sink() });
1185 } else {
1186 if (_videoEndpointLarge.current() == endpoint) {
1187 setVideoEndpointLarge({});
1188 }
1189 markTrackShown(endpoint, false);
1190 markTrackPaused(endpoint, false);
1191 _activeVideoTracks.erase(i);
1192 }
1193 updateRequestedVideoChannelsDelayed();
1194 _videoStreamActiveUpdates.fire({ endpoint, active });
1195 if (active) {
1196 markTrackShown(endpoint, shown);
1197 markTrackPaused(endpoint, paused);
1198 }
1199 }
1200
markTrackShown(const VideoEndpoint & endpoint,bool shown)1201 void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) {
1202 const auto changed = shown
1203 ? _shownVideoTracks.emplace(endpoint).second
1204 : _shownVideoTracks.remove(endpoint);
1205 if (!changed) {
1206 return;
1207 }
1208 _videoStreamShownUpdates.fire_copy({ endpoint, shown });
1209 if (shown && endpoint.type == VideoEndpointType::Screen) {
1210 crl::on_main(this, [=] {
1211 if (_shownVideoTracks.contains(endpoint)) {
1212 pinVideoEndpoint(endpoint);
1213 }
1214 });
1215 }
1216 }
1217
markTrackPaused(const VideoEndpoint & endpoint,bool paused)1218 void GroupCall::markTrackPaused(const VideoEndpoint &endpoint, bool paused) {
1219 if (!endpoint) {
1220 return;
1221 }
1222
1223 const auto i = _activeVideoTracks.find(endpoint);
1224 Assert(i != end(_activeVideoTracks));
1225
1226 i->second->track.setState(paused
1227 ? Webrtc::VideoState::Paused
1228 : Webrtc::VideoState::Active);
1229 }
1230
rejoin()1231 void GroupCall::rejoin() {
1232 rejoin(joinAs());
1233 }
1234
rejoinWithHash(const QString & hash)1235 void GroupCall::rejoinWithHash(const QString &hash) {
1236 if (!hash.isEmpty() && mutedByAdmin()) {
1237 _joinHash = hash;
1238 rejoin();
1239 }
1240 }
1241
setJoinAs(not_null<PeerData * > as)1242 void GroupCall::setJoinAs(not_null<PeerData*> as) {
1243 _joinAs = as;
1244 if (const auto chat = _peer->asChat()) {
1245 chat->setGroupCallDefaultJoinAs(joinAs()->id);
1246 } else if (const auto channel = _peer->asChannel()) {
1247 channel->setGroupCallDefaultJoinAs(joinAs()->id);
1248 }
1249 }
1250
saveDefaultJoinAs(not_null<PeerData * > as)1251 void GroupCall::saveDefaultJoinAs(not_null<PeerData*> as) {
1252 setJoinAs(as);
1253 _api.request(MTPphone_SaveDefaultGroupCallJoinAs(
1254 _peer->input,
1255 joinAs()->input
1256 )).send();
1257 }
1258
rejoin(not_null<PeerData * > as)1259 void GroupCall::rejoin(not_null<PeerData*> as) {
1260 if (state() != State::Joining
1261 && state() != State::Joined
1262 && state() != State::Connecting) {
1263 return;
1264 } else if (_joinState.action != JoinAction::None) {
1265 return;
1266 }
1267
1268 if (joinAs() != as) {
1269 toggleVideo(false);
1270 toggleScreenSharing(std::nullopt);
1271 }
1272
1273 _joinState.action = JoinAction::Joining;
1274 _joinState.ssrc = 0;
1275 _initialMuteStateSent = false;
1276 setState(State::Joining);
1277 if (!tryCreateController()) {
1278 setInstanceMode(InstanceMode::None);
1279 }
1280 applyMeInCallLocally();
1281 LOG(("Call Info: Requesting join payload."));
1282
1283 setJoinAs(as);
1284
1285 const auto weak = base::make_weak(&_instanceGuard);
1286 _instance->emitJoinPayload([=](tgcalls::GroupJoinPayload payload) {
1287 crl::on_main(weak, [=, payload = std::move(payload)] {
1288 if (state() != State::Joining) {
1289 _joinState.finish();
1290 checkNextJoinAction();
1291 return;
1292 }
1293 const auto ssrc = payload.audioSsrc;
1294 LOG(("Call Info: Join payload received, joining with ssrc: %1."
1295 ).arg(ssrc));
1296
1297 const auto json = QByteArray::fromStdString(payload.json);
1298 const auto wasMuteState = muted();
1299 const auto wasVideoStopped = !isSharingCamera();
1300 using Flag = MTPphone_JoinGroupCall::Flag;
1301 const auto flags = (wasMuteState != MuteState::Active
1302 ? Flag::f_muted
1303 : Flag(0))
1304 | (_joinHash.isEmpty()
1305 ? Flag(0)
1306 : Flag::f_invite_hash)
1307 | (wasVideoStopped
1308 ? Flag::f_video_stopped
1309 : Flag(0));
1310 _api.request(MTPphone_JoinGroupCall(
1311 MTP_flags(flags),
1312 inputCall(),
1313 joinAs()->input,
1314 MTP_string(_joinHash),
1315 MTP_dataJSON(MTP_bytes(json))
1316 )).done([=](
1317 const MTPUpdates &updates,
1318 const MTP::Response &response) {
1319 _serverTimeMs = TimestampInMsFromMsgId(response.outerMsgId);
1320 _serverTimeMsGotAt = crl::now();
1321
1322 _joinState.finish(ssrc);
1323 _mySsrcs.emplace(ssrc);
1324
1325 setState((_instanceState.current()
1326 == InstanceState::Disconnected)
1327 ? State::Connecting
1328 : State::Joined);
1329 applyMeInCallLocally();
1330 maybeSendMutedUpdate(wasMuteState);
1331 _peer->session().api().applyUpdates(updates);
1332 applyQueuedSelfUpdates();
1333 checkFirstTimeJoined();
1334 _screenJoinState.nextActionPending = true;
1335 checkNextJoinAction();
1336 if (wasVideoStopped == isSharingCamera()) {
1337 sendSelfUpdate(SendUpdateType::CameraStopped);
1338 }
1339 if (isCameraPaused()) {
1340 sendSelfUpdate(SendUpdateType::CameraPaused);
1341 }
1342 sendPendingSelfUpdates();
1343 }).fail([=](const MTP::Error &error) {
1344 _joinState.finish();
1345
1346 const auto type = error.type();
1347 LOG(("Call Error: Could not join, error: %1").arg(type));
1348
1349 if (type == u"GROUPCALL_SSRC_DUPLICATE_MUCH") {
1350 rejoin();
1351 return;
1352 }
1353
1354 hangup();
1355 Ui::ShowMultilineToast({
1356 .text = { type == u"GROUPCALL_FORBIDDEN"_q
1357 ? tr::lng_group_not_accessible(tr::now)
1358 : Lang::Hard::ServerError() },
1359 });
1360 }).send();
1361 });
1362 });
1363 }
1364
checkNextJoinAction()1365 void GroupCall::checkNextJoinAction() {
1366 if (_joinState.action != JoinAction::None) {
1367 return;
1368 } else if (_joinState.nextActionPending) {
1369 _joinState.nextActionPending = false;
1370 const auto state = _state.current();
1371 if (state != State::HangingUp && state != State::FailedHangingUp) {
1372 rejoin();
1373 } else {
1374 leave();
1375 }
1376 } else if (!_joinState.ssrc) {
1377 rejoin();
1378 } else if (_screenJoinState.action != JoinAction::None
1379 || !_screenJoinState.nextActionPending) {
1380 return;
1381 } else {
1382 _screenJoinState.nextActionPending = false;
1383 if (isSharingScreen()) {
1384 rejoinPresentation();
1385 } else {
1386 leavePresentation();
1387 }
1388 }
1389 }
1390
rejoinPresentation()1391 void GroupCall::rejoinPresentation() {
1392 if (!_joinState.ssrc
1393 || _screenJoinState.action == JoinAction::Joining
1394 || !isSharingScreen()) {
1395 return;
1396 } else if (_screenJoinState.action != JoinAction::None) {
1397 _screenJoinState.nextActionPending = true;
1398 return;
1399 }
1400
1401 _screenJoinState.action = JoinAction::Joining;
1402 _screenJoinState.ssrc = 0;
1403 if (!tryCreateScreencast()) {
1404 setScreenInstanceMode(InstanceMode::None);
1405 }
1406 LOG(("Call Info: Requesting join screen payload."));
1407
1408 const auto weak = base::make_weak(&_screenInstanceGuard);
1409 _screenInstance->emitJoinPayload([=](tgcalls::GroupJoinPayload payload) {
1410 crl::on_main(weak, [=, payload = std::move(payload)]{
1411 if (!isSharingScreen() || !_joinState.ssrc) {
1412 _screenJoinState.finish();
1413 checkNextJoinAction();
1414 return;
1415 }
1416 const auto withMainSsrc = _joinState.ssrc;
1417 const auto ssrc = payload.audioSsrc;
1418 LOG(("Call Info: Join screen payload received, ssrc: %1."
1419 ).arg(ssrc));
1420
1421 const auto json = QByteArray::fromStdString(payload.json);
1422 _api.request(
1423 MTPphone_JoinGroupCallPresentation(
1424 inputCall(),
1425 MTP_dataJSON(MTP_bytes(json)))
1426 ).done([=](const MTPUpdates &updates) {
1427 _screenJoinState.finish(ssrc);
1428 _mySsrcs.emplace(ssrc);
1429
1430 _peer->session().api().applyUpdates(updates);
1431 checkNextJoinAction();
1432 if (isScreenPaused()) {
1433 sendSelfUpdate(SendUpdateType::ScreenPaused);
1434 }
1435 sendPendingSelfUpdates();
1436 }).fail([=](const MTP::Error &error) {
1437 _screenJoinState.finish();
1438
1439 const auto type = error.type();
1440 if (type == u"GROUPCALL_SSRC_DUPLICATE_MUCH") {
1441 _screenJoinState.nextActionPending = true;
1442 checkNextJoinAction();
1443 } else if (type == u"GROUPCALL_JOIN_MISSING"_q
1444 || type == u"GROUPCALL_FORBIDDEN"_q) {
1445 if (_joinState.ssrc != withMainSsrc) {
1446 // We've rejoined, rejoin presentation again.
1447 _screenJoinState.nextActionPending = true;
1448 checkNextJoinAction();
1449 }
1450 } else {
1451 LOG(("Call Error: "
1452 "Could not screen join, error: %1").arg(type));
1453 _screenState = Webrtc::VideoState::Inactive;
1454 _errors.fire_copy(mutedByAdmin()
1455 ? Error::MutedNoScreen
1456 : Error::ScreenFailed);
1457 }
1458 }).send();
1459 });
1460 });
1461 }
1462
leavePresentation()1463 void GroupCall::leavePresentation() {
1464 destroyScreencast();
1465 if (!_screenJoinState.ssrc) {
1466 setScreenEndpoint(std::string());
1467 return;
1468 } else if (_screenJoinState.action == JoinAction::Leaving) {
1469 return;
1470 } else if (_screenJoinState.action != JoinAction::None) {
1471 _screenJoinState.nextActionPending = true;
1472 return;
1473 }
1474 _api.request(
1475 MTPphone_LeaveGroupCallPresentation(inputCall())
1476 ).done([=](const MTPUpdates &updates) {
1477 _screenJoinState.finish();
1478
1479 _peer->session().api().applyUpdates(updates);
1480 setScreenEndpoint(std::string());
1481 checkNextJoinAction();
1482 }).fail([=](const MTP::Error &error) {
1483 _screenJoinState.finish();
1484
1485 const auto type = error.type();
1486 LOG(("Call Error: "
1487 "Could not screen leave, error: %1").arg(type));
1488 setScreenEndpoint(std::string());
1489 checkNextJoinAction();
1490 }).send();
1491 }
1492
applyMeInCallLocally()1493 void GroupCall::applyMeInCallLocally() {
1494 const auto real = lookupReal();
1495 if (!real) {
1496 return;
1497 }
1498 using Flag = MTPDgroupCallParticipant::Flag;
1499 const auto participant = real->participantByPeer(joinAs());
1500 const auto date = participant
1501 ? participant->date
1502 : base::unixtime::now();
1503 const auto lastActive = participant
1504 ? participant->lastActive
1505 : TimeId(0);
1506 const auto volume = participant
1507 ? participant->volume
1508 : Group::kDefaultVolume;
1509 const auto canSelfUnmute = !mutedByAdmin();
1510 const auto raisedHandRating = (muted() != MuteState::RaisedHand)
1511 ? uint64(0)
1512 : participant
1513 ? participant->raisedHandRating
1514 : FindLocalRaisedHandRating(real->participants());
1515 const auto flags = (canSelfUnmute ? Flag::f_can_self_unmute : Flag(0))
1516 | (lastActive ? Flag::f_active_date : Flag(0))
1517 | (_joinState.ssrc ? Flag(0) : Flag::f_left)
1518 | (_videoIsWorking.current() ? Flag::f_video_joined : Flag(0))
1519 | Flag::f_self
1520 | Flag::f_volume // Without flag the volume is reset to 100%.
1521 | Flag::f_volume_by_admin // Self volume can only be set by admin.
1522 | ((muted() != MuteState::Active) ? Flag::f_muted : Flag(0))
1523 | (raisedHandRating > 0 ? Flag::f_raise_hand_rating : Flag(0));
1524 real->applyLocalUpdate(
1525 MTP_updateGroupCallParticipants(
1526 inputCall(),
1527 MTP_vector<MTPGroupCallParticipant>(
1528 1,
1529 MTP_groupCallParticipant(
1530 MTP_flags(flags),
1531 peerToMTP(joinAs()->id),
1532 MTP_int(date),
1533 MTP_int(lastActive),
1534 MTP_int(_joinState.ssrc),
1535 MTP_int(volume),
1536 MTPstring(), // Don't update about text in local updates.
1537 MTP_long(raisedHandRating),
1538 MTPGroupCallParticipantVideo(),
1539 MTPGroupCallParticipantVideo())),
1540 MTP_int(0)).c_updateGroupCallParticipants());
1541 }
1542
applyParticipantLocally(not_null<PeerData * > participantPeer,bool mute,std::optional<int> volume)1543 void GroupCall::applyParticipantLocally(
1544 not_null<PeerData*> participantPeer,
1545 bool mute,
1546 std::optional<int> volume) {
1547 const auto participant = LookupParticipant(_peer, _id, participantPeer);
1548 if (!participant || !participant->ssrc) {
1549 return;
1550 }
1551 const auto canManageCall = canManage();
1552 const auto isMuted = participant->muted || (mute && canManageCall);
1553 const auto canSelfUnmute = !canManageCall
1554 ? participant->canSelfUnmute
1555 : (!mute || IsGroupCallAdmin(_peer, participantPeer));
1556 const auto isMutedByYou = mute && !canManageCall;
1557 using Flag = MTPDgroupCallParticipant::Flag;
1558 const auto flags = (canSelfUnmute ? Flag::f_can_self_unmute : Flag(0))
1559 | Flag::f_volume // Without flag the volume is reset to 100%.
1560 | ((participant->applyVolumeFromMin && !volume)
1561 ? Flag::f_volume_by_admin
1562 : Flag(0))
1563 | (participant->videoJoined ? Flag::f_video_joined : Flag(0))
1564 | (participant->lastActive ? Flag::f_active_date : Flag(0))
1565 | (isMuted ? Flag::f_muted : Flag(0))
1566 | (isMutedByYou ? Flag::f_muted_by_you : Flag(0))
1567 | (participantPeer == joinAs() ? Flag::f_self : Flag(0))
1568 | (participant->raisedHandRating
1569 ? Flag::f_raise_hand_rating
1570 : Flag(0));
1571 _peer->groupCall()->applyLocalUpdate(
1572 MTP_updateGroupCallParticipants(
1573 inputCall(),
1574 MTP_vector<MTPGroupCallParticipant>(
1575 1,
1576 MTP_groupCallParticipant(
1577 MTP_flags(flags),
1578 peerToMTP(participantPeer->id),
1579 MTP_int(participant->date),
1580 MTP_int(participant->lastActive),
1581 MTP_int(participant->ssrc),
1582 MTP_int(volume.value_or(participant->volume)),
1583 MTPstring(), // Don't update about text in local updates.
1584 MTP_long(participant->raisedHandRating),
1585 MTPGroupCallParticipantVideo(),
1586 MTPGroupCallParticipantVideo())),
1587 MTP_int(0)).c_updateGroupCallParticipants());
1588 }
1589
hangup()1590 void GroupCall::hangup() {
1591 finish(FinishType::Ended);
1592 }
1593
discard()1594 void GroupCall::discard() {
1595 if (!_id) {
1596 _api.request(_createRequestId).cancel();
1597 hangup();
1598 return;
1599 }
1600 _api.request(MTPphone_DiscardGroupCall(
1601 inputCall()
1602 )).done([=](const MTPUpdates &result) {
1603 // Here 'this' could be destroyed by updates, so we set Ended after
1604 // updates being handled, but in a guarded way.
1605 crl::on_main(this, [=] { hangup(); });
1606 _peer->session().api().applyUpdates(result);
1607 }).fail([=](const MTP::Error &error) {
1608 hangup();
1609 }).send();
1610 }
1611
rejoinAs(Group::JoinInfo info)1612 void GroupCall::rejoinAs(Group::JoinInfo info) {
1613 _possibleJoinAs = std::move(info.possibleJoinAs);
1614 if (info.joinAs == joinAs()) {
1615 return;
1616 }
1617 const auto event = Group::RejoinEvent{
1618 .wasJoinAs = joinAs(),
1619 .nowJoinAs = info.joinAs,
1620 };
1621 if (_scheduleDate) {
1622 saveDefaultJoinAs(info.joinAs);
1623 } else {
1624 setState(State::Joining);
1625 rejoin(info.joinAs);
1626 }
1627 _rejoinEvents.fire_copy(event);
1628 }
1629
finish(FinishType type)1630 void GroupCall::finish(FinishType type) {
1631 Expects(type != FinishType::None);
1632
1633 const auto finalState = (type == FinishType::Ended)
1634 ? State::Ended
1635 : State::Failed;
1636 const auto hangupState = (type == FinishType::Ended)
1637 ? State::HangingUp
1638 : State::FailedHangingUp;
1639 const auto state = _state.current();
1640 if (state == State::HangingUp
1641 || state == State::FailedHangingUp
1642 || state == State::Ended
1643 || state == State::Failed) {
1644 return;
1645 } else if (_joinState.action == JoinAction::None && !_joinState.ssrc) {
1646 setState(finalState);
1647 return;
1648 }
1649 setState(hangupState);
1650 _joinState.nextActionPending = true;
1651 checkNextJoinAction();
1652 }
1653
leave()1654 void GroupCall::leave() {
1655 Expects(_joinState.action == JoinAction::None);
1656
1657 _joinState.action = JoinAction::Leaving;
1658
1659 const auto finalState = (_state.current() == State::HangingUp)
1660 ? State::Ended
1661 : State::Failed;
1662
1663 // We want to leave request still being sent and processed even if
1664 // the call is already destroyed.
1665 const auto session = &_peer->session();
1666 const auto weak = base::make_weak(this);
1667 session->api().request(MTPphone_LeaveGroupCall(
1668 inputCall(),
1669 MTP_int(base::take(_joinState.ssrc))
1670 )).done([=](const MTPUpdates &result) {
1671 // Here 'this' could be destroyed by updates, so we set Ended after
1672 // updates being handled, but in a guarded way.
1673 crl::on_main(weak, [=] { setState(finalState); });
1674 session->api().applyUpdates(result);
1675 }).fail(crl::guard(weak, [=](const MTP::Error &error) {
1676 setState(finalState);
1677 })).send();
1678 }
1679
startScheduledNow()1680 void GroupCall::startScheduledNow() {
1681 if (!lookupReal()) {
1682 return;
1683 }
1684 _api.request(MTPphone_StartScheduledGroupCall(
1685 inputCall()
1686 )).done([=](const MTPUpdates &result) {
1687 _peer->session().api().applyUpdates(result);
1688 }).send();
1689 }
1690
toggleScheduleStartSubscribed(bool subscribed)1691 void GroupCall::toggleScheduleStartSubscribed(bool subscribed) {
1692 if (!lookupReal()) {
1693 return;
1694 }
1695 _api.request(MTPphone_ToggleGroupCallStartSubscription(
1696 inputCall(),
1697 MTP_bool(subscribed)
1698 )).done([=](const MTPUpdates &result) {
1699 _peer->session().api().applyUpdates(result);
1700 }).send();
1701 }
1702
setNoiseSuppression(bool enabled)1703 void GroupCall::setNoiseSuppression(bool enabled) {
1704 if (_instance) {
1705 _instance->setIsNoiseSuppressionEnabled(enabled);
1706 }
1707 }
1708
addVideoOutput(const std::string & endpoint,not_null<Webrtc::VideoTrack * > track)1709 void GroupCall::addVideoOutput(
1710 const std::string &endpoint,
1711 not_null<Webrtc::VideoTrack*> track) {
1712 addVideoOutput(endpoint, { track->sink() });
1713 }
1714
setMuted(MuteState mute)1715 void GroupCall::setMuted(MuteState mute) {
1716 const auto set = [=] {
1717 const auto was = muted();
1718 const auto wasSpeaking = (was == MuteState::Active)
1719 || (was == MuteState::PushToTalk);
1720 const auto wasMuted = (was == MuteState::Muted)
1721 || (was == MuteState::PushToTalk);
1722 const auto wasRaiseHand = (was == MuteState::RaisedHand);
1723 _muted = mute;
1724 const auto now = muted();
1725 const auto nowSpeaking = (now == MuteState::Active)
1726 || (now == MuteState::PushToTalk);
1727 const auto nowMuted = (now == MuteState::Muted)
1728 || (now == MuteState::PushToTalk);
1729 const auto nowRaiseHand = (now == MuteState::RaisedHand);
1730 if (wasMuted != nowMuted || wasRaiseHand != nowRaiseHand) {
1731 applyMeInCallLocally();
1732 }
1733 if (mutedByAdmin()) {
1734 toggleVideo(false);
1735 toggleScreenSharing(std::nullopt);
1736 }
1737 if (wasSpeaking && !nowSpeaking && _joinState.ssrc) {
1738 _levelUpdates.fire(LevelUpdate{
1739 .ssrc = _joinState.ssrc,
1740 .value = 0.f,
1741 .voice = false,
1742 .me = true,
1743 });
1744 }
1745 };
1746 if (mute == MuteState::Active || mute == MuteState::PushToTalk) {
1747 _delegate->groupCallRequestPermissionsOrFail(crl::guard(this, set));
1748 } else {
1749 set();
1750 }
1751 }
1752
setMutedAndUpdate(MuteState mute)1753 void GroupCall::setMutedAndUpdate(MuteState mute) {
1754 const auto was = muted();
1755
1756 // Active state is sent from _muted changes,
1757 // because it may be set delayed, after permissions request, not now.
1758 const auto send = _initialMuteStateSent && (mute != MuteState::Active);
1759 setMuted(mute);
1760 if (send) {
1761 maybeSendMutedUpdate(was);
1762 }
1763 }
1764
handlePossibleCreateOrJoinResponse(const MTPDupdateGroupCall & data)1765 void GroupCall::handlePossibleCreateOrJoinResponse(
1766 const MTPDupdateGroupCall &data) {
1767 data.vcall().match([&](const MTPDgroupCall &data) {
1768 handlePossibleCreateOrJoinResponse(data);
1769 }, [&](const MTPDgroupCallDiscarded &data) {
1770 handlePossibleDiscarded(data);
1771 });
1772 }
1773
handlePossibleCreateOrJoinResponse(const MTPDgroupCall & data)1774 void GroupCall::handlePossibleCreateOrJoinResponse(
1775 const MTPDgroupCall &data) {
1776 if (_acceptFields) {
1777 if (!_instance && !_id) {
1778 const auto input = MTP_inputGroupCall(
1779 data.vid(),
1780 data.vaccess_hash());
1781 const auto scheduleDate = data.vschedule_date().value_or_empty();
1782 setScheduledDate(scheduleDate);
1783 if (const auto chat = _peer->asChat()) {
1784 chat->setGroupCall(input, scheduleDate);
1785 } else if (const auto group = _peer->asChannel()) {
1786 group->setGroupCall(input, scheduleDate);
1787 } else {
1788 Unexpected("Peer type in GroupCall::join.");
1789 }
1790 join(input);
1791 }
1792 return;
1793 } else if (_id != data.vid().v || !_instance) {
1794 return;
1795 }
1796 setScheduledDate(data.vschedule_date().value_or_empty());
1797 if (const auto streamDcId = data.vstream_dc_id()) {
1798 _broadcastDcId = MTP::BareDcId(streamDcId->v);
1799 }
1800 }
1801
handlePossibleCreateOrJoinResponse(const MTPDupdateGroupCallConnection & data)1802 void GroupCall::handlePossibleCreateOrJoinResponse(
1803 const MTPDupdateGroupCallConnection &data) {
1804 if (data.is_presentation()) {
1805 if (!_screenInstance) {
1806 return;
1807 }
1808 setScreenInstanceMode(InstanceMode::Rtc);
1809 data.vparams().match([&](const MTPDdataJSON &data) {
1810 const auto json = data.vdata().v;
1811 const auto response = ParseJoinResponse(json);
1812 const auto endpoint = std::get_if<JoinVideoEndpoint>(&response);
1813 if (endpoint) {
1814 setScreenEndpoint(endpoint->id);
1815 } else {
1816 LOG(("Call Error: Bad response for 'presentation' flag."));
1817 }
1818 _screenInstance->setJoinResponsePayload(json.toStdString());
1819 });
1820 } else {
1821 if (!_instance) {
1822 return;
1823 }
1824 data.vparams().match([&](const MTPDdataJSON &data) {
1825 const auto json = data.vdata().v;
1826 const auto response = ParseJoinResponse(json);
1827 const auto endpoint = std::get_if<JoinVideoEndpoint>(&response);
1828 if (v::is<JoinBroadcastStream>(response)) {
1829 if (!_broadcastDcId) {
1830 LOG(("Api Error: Empty stream_dc_id in groupCall."));
1831 _broadcastDcId = _peer->session().mtp().mainDcId();
1832 }
1833 setInstanceMode(InstanceMode::Stream);
1834 } else {
1835 setInstanceMode(InstanceMode::Rtc);
1836 setCameraEndpoint(endpoint ? endpoint->id : std::string());
1837 _instance->setJoinResponsePayload(json.toStdString());
1838 }
1839 updateRequestedVideoChannels();
1840 checkMediaChannelDescriptions();
1841 });
1842 }
1843 }
1844
handlePossibleDiscarded(const MTPDgroupCallDiscarded & data)1845 void GroupCall::handlePossibleDiscarded(const MTPDgroupCallDiscarded &data) {
1846 if (data.vid().v == _id) {
1847 LOG(("Call Info: Hangup after groupCallDiscarded."));
1848 _joinState.finish();
1849 hangup();
1850 }
1851 }
1852
checkMediaChannelDescriptions(Fn<bool (uint32)> resolved)1853 void GroupCall::checkMediaChannelDescriptions(
1854 Fn<bool(uint32)> resolved) {
1855 const auto real = lookupReal();
1856 if (!real || (_instanceMode == InstanceMode::None)) {
1857 return;
1858 }
1859 for (auto i = begin(_mediaChannelDescriptionses)
1860 ; i != end(_mediaChannelDescriptionses);) {
1861 if (mediaChannelDescriptionsFill(i->get(), resolved)) {
1862 i = _mediaChannelDescriptionses.erase(i);
1863 } else {
1864 ++i;
1865 }
1866 }
1867 if (!_unresolvedSsrcs.empty()) {
1868 real->resolveParticipants(base::take(_unresolvedSsrcs));
1869 }
1870 }
1871
handleUpdate(const MTPUpdate & update)1872 void GroupCall::handleUpdate(const MTPUpdate &update) {
1873 update.match([&](const MTPDupdateGroupCall &data) {
1874 handleUpdate(data);
1875 }, [&](const MTPDupdateGroupCallParticipants &data) {
1876 handleUpdate(data);
1877 }, [](const auto &) {
1878 Unexpected("Type in Instance::applyGroupCallUpdateChecked.");
1879 });
1880 }
1881
handleUpdate(const MTPDupdateGroupCall & data)1882 void GroupCall::handleUpdate(const MTPDupdateGroupCall &data) {
1883 data.vcall().match([](const MTPDgroupCall &) {
1884 }, [&](const MTPDgroupCallDiscarded &data) {
1885 handlePossibleDiscarded(data);
1886 });
1887 }
1888
handleUpdate(const MTPDupdateGroupCallParticipants & data)1889 void GroupCall::handleUpdate(const MTPDupdateGroupCallParticipants &data) {
1890 const auto callId = data.vcall().match([](const auto &data) {
1891 return data.vid().v;
1892 });
1893 if (_id != callId) {
1894 return;
1895 }
1896 const auto state = _state.current();
1897 const auto joined = (state == State::Joined)
1898 || (state == State::Connecting);
1899 for (const auto &participant : data.vparticipants().v) {
1900 participant.match([&](const MTPDgroupCallParticipant &data) {
1901 const auto isSelf = data.is_self()
1902 || (data.is_min()
1903 && peerFromMTP(data.vpeer()) == joinAs()->id);
1904 if (!isSelf) {
1905 applyOtherParticipantUpdate(data);
1906 } else if (joined) {
1907 applySelfUpdate(data);
1908 } else {
1909 _queuedSelfUpdates.push_back(participant);
1910 }
1911 });
1912 }
1913 }
1914
applyQueuedSelfUpdates()1915 void GroupCall::applyQueuedSelfUpdates() {
1916 const auto weak = base::make_weak(this);
1917 while (weak
1918 && !_queuedSelfUpdates.empty()
1919 && (_state.current() == State::Joined
1920 || _state.current() == State::Connecting)) {
1921 const auto update = _queuedSelfUpdates.front();
1922 _queuedSelfUpdates.erase(_queuedSelfUpdates.begin());
1923 update.match([&](const MTPDgroupCallParticipant &data) {
1924 applySelfUpdate(data);
1925 });
1926 }
1927 }
1928
applySelfUpdate(const MTPDgroupCallParticipant & data)1929 void GroupCall::applySelfUpdate(const MTPDgroupCallParticipant &data) {
1930 if (data.is_left()) {
1931 if (data.vsource().v == _joinState.ssrc) {
1932 // I was removed from the call, rejoin.
1933 LOG(("Call Info: "
1934 "Rejoin after got 'left' with my ssrc."));
1935 setState(State::Joining);
1936 rejoin();
1937 }
1938 return;
1939 } else if (data.vsource().v != _joinState.ssrc) {
1940 if (!_mySsrcs.contains(data.vsource().v)) {
1941 // I joined from another device, hangup.
1942 LOG(("Call Info: "
1943 "Hangup after '!left' with ssrc %1, my %2."
1944 ).arg(data.vsource().v
1945 ).arg(_joinState.ssrc));
1946 _joinState.finish();
1947 hangup();
1948 } else {
1949 LOG(("Call Info: "
1950 "Some old 'self' with '!left' and ssrc %1, my %2."
1951 ).arg(data.vsource().v
1952 ).arg(_joinState.ssrc));
1953 }
1954 return;
1955 }
1956 if (data.is_muted() && !data.is_can_self_unmute()) {
1957 setMuted(data.vraise_hand_rating().value_or_empty()
1958 ? MuteState::RaisedHand
1959 : MuteState::ForceMuted);
1960 } else if (_instanceMode == InstanceMode::Stream) {
1961 LOG(("Call Info: Rejoin after unforcemute in stream mode."));
1962 setState(State::Joining);
1963 rejoin();
1964 } else if (mutedByAdmin()) {
1965 setMuted(MuteState::Muted);
1966 if (!_instanceTransitioning) {
1967 notifyAboutAllowedToSpeak();
1968 }
1969 } else if (data.is_muted() && muted() != MuteState::Muted) {
1970 setMuted(MuteState::Muted);
1971 }
1972 }
1973
applyOtherParticipantUpdate(const MTPDgroupCallParticipant & data)1974 void GroupCall::applyOtherParticipantUpdate(
1975 const MTPDgroupCallParticipant &data) {
1976 if (data.is_min()) {
1977 // No real information about mutedByMe or my custom volume.
1978 return;
1979 }
1980 const auto participantPeer = _peer->owner().peer(
1981 peerFromMTP(data.vpeer()));
1982 if (!LookupParticipant(_peer, _id, participantPeer)) {
1983 return;
1984 }
1985 _otherParticipantStateValue.fire(Group::ParticipantState{
1986 .peer = participantPeer,
1987 .volume = data.vvolume().value_or_empty(),
1988 .mutedByMe = data.is_muted_by_you(),
1989 });
1990 }
1991
setupMediaDevices()1992 void GroupCall::setupMediaDevices() {
1993 _mediaDevices->audioInputId(
1994 ) | rpl::start_with_next([=](QString id) {
1995 _audioInputId = id;
1996 if (_instance) {
1997 _instance->setAudioInputDevice(id.toStdString());
1998 }
1999 }, _lifetime);
2000
2001 _mediaDevices->audioOutputId(
2002 ) | rpl::start_with_next([=](QString id) {
2003 _audioOutputId = id;
2004 if (_instance) {
2005 _instance->setAudioOutputDevice(id.toStdString());
2006 }
2007 }, _lifetime);
2008
2009 _mediaDevices->videoInputId(
2010 ) | rpl::start_with_next([=](QString id) {
2011 _cameraInputId = id;
2012 if (_cameraCapture) {
2013 _cameraCapture->switchToDevice(id.toStdString(), false);
2014 }
2015 }, _lifetime);
2016 }
2017
activeVideoSendersCount() const2018 int GroupCall::activeVideoSendersCount() const {
2019 auto result = 0;
2020 for (const auto &[endpoint, track] : _activeVideoTracks) {
2021 if (endpoint.type == VideoEndpointType::Camera) {
2022 ++result;
2023 } else {
2024 auto sharesCameraToo = false;
2025 for (const auto &[other, _] : _activeVideoTracks) {
2026 if (other.type == VideoEndpointType::Camera
2027 && other.peer == endpoint.peer) {
2028 sharesCameraToo = true;
2029 break;
2030 }
2031 }
2032 if (!sharesCameraToo) {
2033 ++result;
2034 }
2035 }
2036 }
2037 return result;
2038 }
2039
emitShareCameraError()2040 bool GroupCall::emitShareCameraError() {
2041 const auto emitError = [=](Error error) {
2042 emitShareCameraError(error);
2043 return true;
2044 };
2045 if (const auto real = lookupReal()
2046 ; real && activeVideoSendersCount() >= real->unmutedVideoLimit()) {
2047 return emitError(Error::DisabledNoCamera);
2048 } else if (!videoIsWorking()) {
2049 return emitError(Error::DisabledNoCamera);
2050 } else if (mutedByAdmin()) {
2051 return emitError(Error::MutedNoCamera);
2052 } else if (Webrtc::GetVideoInputList().empty()) {
2053 return emitError(Error::NoCamera);
2054 }
2055 return false;
2056 }
2057
emitShareCameraError(Error error)2058 void GroupCall::emitShareCameraError(Error error) {
2059 _cameraState = Webrtc::VideoState::Inactive;
2060 if (error == Error::CameraFailed
2061 && Webrtc::GetVideoInputList().empty()) {
2062 error = Error::NoCamera;
2063 }
2064 _errors.fire_copy(error);
2065 }
2066
emitShareScreenError()2067 bool GroupCall::emitShareScreenError() {
2068 const auto emitError = [=](Error error) {
2069 emitShareScreenError(error);
2070 return true;
2071 };
2072 if (const auto real = lookupReal()
2073 ; real && activeVideoSendersCount() >= real->unmutedVideoLimit()) {
2074 return emitError(Error::DisabledNoScreen);
2075 } else if (!videoIsWorking()) {
2076 return emitError(Error::DisabledNoScreen);
2077 } else if (mutedByAdmin()) {
2078 return emitError(Error::MutedNoScreen);
2079 }
2080 return false;
2081 }
2082
emitShareScreenError(Error error)2083 void GroupCall::emitShareScreenError(Error error) {
2084 _screenState = Webrtc::VideoState::Inactive;
2085 _errors.fire_copy(error);
2086 }
2087
setupOutgoingVideo()2088 void GroupCall::setupOutgoingVideo() {
2089 using Webrtc::VideoState;
2090
2091 _cameraState.value(
2092 ) | rpl::combine_previous(
2093 ) | rpl::filter([=](VideoState previous, VideoState state) {
2094 // Recursive entrance may happen if error happens when activating.
2095 return (previous != state);
2096 }) | rpl::start_with_next([=](VideoState previous, VideoState state) {
2097 const auto wasActive = (previous != VideoState::Inactive);
2098 const auto nowPaused = (state == VideoState::Paused);
2099 const auto nowActive = (state != VideoState::Inactive);
2100 if (wasActive == nowActive) {
2101 Assert(wasActive && nowActive);
2102 sendSelfUpdate(SendUpdateType::CameraPaused);
2103 markTrackPaused({
2104 VideoEndpointType::Camera,
2105 joinAs(),
2106 _cameraEndpoint
2107 }, nowPaused);
2108 return;
2109 }
2110 if (nowActive) {
2111 if (emitShareCameraError()) {
2112 return;
2113 } else if (!_cameraCapture) {
2114 _cameraCapture = _delegate->groupCallGetVideoCapture(
2115 _cameraInputId);
2116 if (!_cameraCapture) {
2117 return emitShareCameraError(Error::CameraFailed);
2118 }
2119 const auto weak = base::make_weak(this);
2120 _cameraCapture->setOnFatalError([=] {
2121 crl::on_main(weak, [=] {
2122 emitShareCameraError(Error::CameraFailed);
2123 });
2124 });
2125 } else {
2126 _cameraCapture->switchToDevice(
2127 _cameraInputId.toStdString(),
2128 false);
2129 }
2130 if (_instance) {
2131 _instance->setVideoCapture(_cameraCapture);
2132 }
2133 _cameraCapture->setState(tgcalls::VideoState::Active);
2134 } else if (_cameraCapture) {
2135 _cameraCapture->setState(tgcalls::VideoState::Inactive);
2136 }
2137 _isSharingCamera = nowActive;
2138 markEndpointActive({
2139 VideoEndpointType::Camera,
2140 joinAs(),
2141 _cameraEndpoint
2142 }, nowActive, nowPaused);
2143 sendSelfUpdate(SendUpdateType::CameraStopped);
2144 applyMeInCallLocally();
2145 }, _lifetime);
2146
2147 _screenState.value(
2148 ) | rpl::combine_previous(
2149 ) | rpl::filter([=](VideoState previous, VideoState state) {
2150 // Recursive entrance may happen if error happens when activating.
2151 return (previous != state);
2152 }) | rpl::start_with_next([=](VideoState previous, VideoState state) {
2153 const auto wasActive = (previous != VideoState::Inactive);
2154 const auto nowPaused = (state == VideoState::Paused);
2155 const auto nowActive = (state != VideoState::Inactive);
2156 if (wasActive == nowActive) {
2157 Assert(wasActive && nowActive);
2158 sendSelfUpdate(SendUpdateType::ScreenPaused);
2159 markTrackPaused({
2160 VideoEndpointType::Screen,
2161 joinAs(),
2162 _screenEndpoint
2163 }, nowPaused);
2164 return;
2165 }
2166 if (nowActive) {
2167 if (emitShareScreenError()) {
2168 return;
2169 } else if (!_screenCapture) {
2170 _screenCapture = std::shared_ptr<
2171 tgcalls::VideoCaptureInterface
2172 >(tgcalls::VideoCaptureInterface::Create(
2173 tgcalls::StaticThreads::getThreads(),
2174 _screenDeviceId.toStdString()));
2175 if (!_screenCapture) {
2176 return emitShareScreenError(Error::ScreenFailed);
2177 }
2178 const auto weak = base::make_weak(this);
2179 _screenCapture->setOnFatalError([=] {
2180 crl::on_main(weak, [=] {
2181 emitShareScreenError(Error::ScreenFailed);
2182 });
2183 });
2184 _screenCapture->setOnPause([=](bool paused) {
2185 crl::on_main(weak, [=] {
2186 if (isSharingScreen()) {
2187 _screenState = paused
2188 ? VideoState::Paused
2189 : VideoState::Active;
2190 }
2191 });
2192 });
2193 } else {
2194 _screenCapture->switchToDevice(
2195 _screenDeviceId.toStdString(),
2196 true);
2197 }
2198 if (_screenInstance) {
2199 _screenInstance->setVideoCapture(_screenCapture);
2200 }
2201 _screenCapture->setState(tgcalls::VideoState::Active);
2202 } else if (_screenCapture) {
2203 _screenCapture->setState(tgcalls::VideoState::Inactive);
2204 }
2205 _isSharingScreen = nowActive;
2206 markEndpointActive({
2207 VideoEndpointType::Screen,
2208 joinAs(),
2209 _screenEndpoint
2210 }, nowActive, nowPaused);
2211 _screenJoinState.nextActionPending = true;
2212 checkNextJoinAction();
2213 }, _lifetime);
2214 }
2215
changeTitle(const QString & title)2216 void GroupCall::changeTitle(const QString &title) {
2217 const auto real = lookupReal();
2218 if (!real || real->title() == title) {
2219 return;
2220 }
2221
2222 _api.request(MTPphone_EditGroupCallTitle(
2223 inputCall(),
2224 MTP_string(title)
2225 )).done([=](const MTPUpdates &result) {
2226 _peer->session().api().applyUpdates(result);
2227 _titleChanged.fire({});
2228 }).fail([=](const MTP::Error &error) {
2229 }).send();
2230 }
2231
toggleRecording(bool enabled,const QString & title,bool video,bool videoPortrait)2232 void GroupCall::toggleRecording(
2233 bool enabled,
2234 const QString &title,
2235 bool video,
2236 bool videoPortrait) {
2237 const auto real = lookupReal();
2238 if (!real) {
2239 return;
2240 }
2241
2242 const auto already = (real->recordStartDate() != 0);
2243 if (already == enabled) {
2244 return;
2245 }
2246
2247 if (!enabled) {
2248 _recordingStoppedByMe = true;
2249 }
2250 using Flag = MTPphone_ToggleGroupCallRecord::Flag;
2251 _api.request(MTPphone_ToggleGroupCallRecord(
2252 MTP_flags((enabled ? Flag::f_start : Flag(0))
2253 | (video ? Flag::f_video : Flag(0))
2254 | (title.isEmpty() ? Flag(0) : Flag::f_title)),
2255 inputCall(),
2256 MTP_string(title),
2257 MTP_bool(videoPortrait)
2258 )).done([=](const MTPUpdates &result) {
2259 _peer->session().api().applyUpdates(result);
2260 _recordingStoppedByMe = false;
2261 }).fail([=](const MTP::Error &error) {
2262 _recordingStoppedByMe = false;
2263 }).send();
2264 }
2265
tryCreateController()2266 bool GroupCall::tryCreateController() {
2267 if (_instance) {
2268 return false;
2269 }
2270 const auto &settings = Core::App().settings();
2271
2272 const auto weak = base::make_weak(&_instanceGuard);
2273 const auto myLevel = std::make_shared<tgcalls::GroupLevelValue>();
2274 tgcalls::GroupInstanceDescriptor descriptor = {
2275 .threads = tgcalls::StaticThreads::getThreads(),
2276 .config = tgcalls::GroupConfig{
2277 },
2278 .networkStateUpdated = [=](tgcalls::GroupNetworkState networkState) {
2279 crl::on_main(weak, [=] { setInstanceConnected(networkState); });
2280 },
2281 .audioLevelsUpdated = [=](const tgcalls::GroupLevelsUpdate &data) {
2282 const auto &updates = data.updates;
2283 if (updates.empty()) {
2284 return;
2285 } else if (updates.size() == 1 && !updates.front().ssrc) {
2286 const auto &value = updates.front().value;
2287 // Don't send many 0 while we're muted.
2288 if (myLevel->level == value.level
2289 && myLevel->voice == value.voice) {
2290 return;
2291 }
2292 *myLevel = updates.front().value;
2293 }
2294 crl::on_main(weak, [=] { audioLevelsUpdated(data); });
2295 },
2296 .initialInputDeviceId = _audioInputId.toStdString(),
2297 .initialOutputDeviceId = _audioOutputId.toStdString(),
2298 .createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
2299 settings.callAudioBackend()),
2300 .videoCapture = _cameraCapture,
2301 .requestCurrentTime = [=, call = base::make_weak(this)](
2302 std::function<void(int64_t)> done) {
2303 auto result = std::make_shared<RequestCurrentTimeTask>(
2304 call,
2305 std::move(done));
2306 crl::on_main(weak, [=] {
2307 result->done(approximateServerTimeInMs());
2308 });
2309 return result;
2310 },
2311 .requestAudioBroadcastPart = [=, call = base::make_weak(this)](
2312 int64_t time,
2313 int64_t period,
2314 std::function<void(tgcalls::BroadcastPart &&)> done) {
2315 auto result = std::make_shared<LoadPartTask>(
2316 call,
2317 time,
2318 period,
2319 std::move(done));
2320 crl::on_main(weak, [=]() mutable {
2321 broadcastPartStart(std::move(result));
2322 });
2323 return result;
2324 },
2325 .requestVideoBroadcastPart = [=, call = base::make_weak(this)](
2326 int64_t time,
2327 int64_t period,
2328 int32_t channel,
2329 tgcalls::VideoChannelDescription::Quality quality,
2330 std::function<void(tgcalls::BroadcastPart &&)> done) {
2331 auto result = std::make_shared<LoadPartTask>(
2332 call,
2333 time,
2334 period,
2335 channel,
2336 quality,
2337 std::move(done));
2338 crl::on_main(weak, [=]() mutable {
2339 broadcastPartStart(std::move(result));
2340 });
2341 return result;
2342 },
2343 .videoContentType = tgcalls::VideoContentType::Generic,
2344 .initialEnableNoiseSuppression
2345 = settings.groupCallNoiseSuppression(),
2346 .requestMediaChannelDescriptions = [=, call = base::make_weak(this)](
2347 const std::vector<uint32_t> &ssrcs,
2348 std::function<void(
2349 std::vector<tgcalls::MediaChannelDescription> &&)> done) {
2350 auto result = std::make_shared<MediaChannelDescriptionsTask>(
2351 call,
2352 ssrcs,
2353 std::move(done));
2354 crl::on_main(weak, [=]() mutable {
2355 mediaChannelDescriptionsStart(std::move(result));
2356 });
2357 return result;
2358 },
2359 };
2360 if (Logs::DebugEnabled()) {
2361 auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
2362 auto callLogPath = callLogFolder + qsl("/last_group_call_log.txt");
2363 auto callLogNative = QDir::toNativeSeparators(callLogPath);
2364 descriptor.config.need_log = true;
2365 #ifdef Q_OS_WIN
2366 descriptor.config.logPath.data = callLogNative.toStdWString();
2367 #else // Q_OS_WIN
2368 const auto callLogUtf = QFile::encodeName(callLogNative);
2369 descriptor.config.logPath.data.resize(callLogUtf.size());
2370 ranges::copy(callLogUtf, descriptor.config.logPath.data.begin());
2371 #endif // Q_OS_WIN
2372 QFile(callLogPath).remove();
2373 QDir().mkpath(callLogFolder);
2374 } else {
2375 descriptor.config.need_log = false;
2376 }
2377
2378 LOG(("Call Info: Creating group instance"));
2379 _instance = std::make_unique<tgcalls::GroupInstanceCustomImpl>(
2380 std::move(descriptor));
2381
2382 updateInstanceMuteState();
2383 updateInstanceVolumes();
2384 for (auto &[endpoint, sink] : base::take(_pendingVideoOutputs)) {
2385 _instance->addIncomingVideoOutput(endpoint, std::move(sink.data));
2386 }
2387 //raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
2388 return true;
2389 }
2390
tryCreateScreencast()2391 bool GroupCall::tryCreateScreencast() {
2392 if (_screenInstance) {
2393 return false;
2394 }
2395
2396 const auto weak = base::make_weak(&_screenInstanceGuard);
2397 tgcalls::GroupInstanceDescriptor descriptor = {
2398 .threads = tgcalls::StaticThreads::getThreads(),
2399 .config = tgcalls::GroupConfig{
2400 },
2401 .networkStateUpdated = [=](tgcalls::GroupNetworkState networkState) {
2402 crl::on_main(weak, [=] {
2403 setScreenInstanceConnected(networkState);
2404 });
2405 },
2406 .createAudioDeviceModule = Webrtc::LoopbackAudioDeviceModuleCreator(),
2407 .videoCapture = _screenCapture,
2408 .videoContentType = tgcalls::VideoContentType::Screencast,
2409 };
2410
2411 LOG(("Call Info: Creating group screen instance"));
2412 _screenInstance = std::make_unique<tgcalls::GroupInstanceCustomImpl>(
2413 std::move(descriptor));
2414
2415 _screenInstance->setIsMuted(!_screenWithAudio);
2416
2417 return true;
2418 }
2419
broadcastPartStart(std::shared_ptr<LoadPartTask> task)2420 void GroupCall::broadcastPartStart(std::shared_ptr<LoadPartTask> task) {
2421 const auto raw = task.get();
2422 const auto time = raw->time();
2423 const auto scale = raw->scale();
2424 const auto videoChannel = raw->videoChannel();
2425 const auto videoQuality = raw->videoQuality();
2426 const auto finish = [=](tgcalls::BroadcastPart &&part) {
2427 raw->done(std::move(part));
2428 _broadcastParts.erase(raw);
2429 };
2430 using Status = tgcalls::BroadcastPart::Status;
2431 using Quality = tgcalls::VideoChannelDescription::Quality;
2432 using Flag = MTPDinputGroupCallStream::Flag;
2433 const auto requestId = _api.request(MTPupload_GetFile(
2434 MTP_flags(0),
2435 MTP_inputGroupCallStream(
2436 MTP_flags(videoChannel
2437 ? (Flag::f_video_channel | Flag::f_video_quality)
2438 : Flag(0)),
2439 inputCall(),
2440 MTP_long(time),
2441 MTP_int(scale),
2442 MTP_int(videoChannel),
2443 MTP_int((videoQuality == Quality::Full)
2444 ? 2
2445 : (videoQuality == Quality::Medium)
2446 ? 1
2447 : 0)),
2448 MTP_int(0),
2449 MTP_int(128 * 1024)
2450 )).done([=](
2451 const MTPupload_File &result,
2452 const MTP::Response &response) {
2453 result.match([&](const MTPDupload_file &data) {
2454 const auto size = data.vbytes().v.size();
2455 auto bytes = std::vector<uint8_t>(size);
2456 memcpy(bytes.data(), data.vbytes().v.constData(), size);
2457 finish({
2458 .timestampMilliseconds = time,
2459 .responseTimestamp = TimestampFromMsgId(response.outerMsgId),
2460 .status = Status::Success,
2461 .data = std::move(bytes),
2462 });
2463 }, [&](const MTPDupload_fileCdnRedirect &data) {
2464 LOG(("Voice Chat Stream Error: fileCdnRedirect received."));
2465 finish({
2466 .timestampMilliseconds = time,
2467 .responseTimestamp = TimestampFromMsgId(response.outerMsgId),
2468 .status = Status::ResyncNeeded,
2469 });
2470 });
2471 }).fail([=](const MTP::Error &error, const MTP::Response &response) {
2472 if (error.type() == u"GROUPCALL_JOIN_MISSING"_q
2473 || error.type() == u"GROUPCALL_FORBIDDEN"_q) {
2474 for (const auto &[task, part] : _broadcastParts) {
2475 _api.request(part.requestId).cancel();
2476 }
2477 setState(State::Joining);
2478 rejoin();
2479 return;
2480 }
2481 const auto status = (MTP::IsFloodError(error)
2482 || error.type() == u"TIME_TOO_BIG"_q)
2483 ? Status::NotReady
2484 : Status::ResyncNeeded;
2485 finish({
2486 .timestampMilliseconds = time,
2487 .responseTimestamp = TimestampFromMsgId(response.outerMsgId),
2488 .status = status,
2489 });
2490 }).handleAllErrors().toDC(
2491 MTP::groupCallStreamDcId(_broadcastDcId)
2492 ).send();
2493 _broadcastParts.emplace(raw, LoadingPart{ std::move(task), requestId });
2494 }
2495
broadcastPartCancel(not_null<LoadPartTask * > task)2496 void GroupCall::broadcastPartCancel(not_null<LoadPartTask*> task) {
2497 const auto i = _broadcastParts.find(task);
2498 if (i != end(_broadcastParts)) {
2499 _api.request(i->second.requestId).cancel();
2500 _broadcastParts.erase(i);
2501 }
2502 }
2503
mediaChannelDescriptionsStart(std::shared_ptr<MediaChannelDescriptionsTask> task)2504 void GroupCall::mediaChannelDescriptionsStart(
2505 std::shared_ptr<MediaChannelDescriptionsTask> task) {
2506 const auto real = lookupReal();
2507 if (!real || (_instanceMode == InstanceMode::None)) {
2508 for (const auto ssrc : task->ssrcs()) {
2509 _unresolvedSsrcs.emplace(ssrc);
2510 }
2511 _mediaChannelDescriptionses.emplace(std::move(task));
2512 return;
2513 }
2514 if (!mediaChannelDescriptionsFill(task.get())) {
2515 _mediaChannelDescriptionses.emplace(std::move(task));
2516 Assert(!_unresolvedSsrcs.empty());
2517 }
2518 if (!_unresolvedSsrcs.empty()) {
2519 real->resolveParticipants(base::take(_unresolvedSsrcs));
2520 }
2521 }
2522
mediaChannelDescriptionsFill(not_null<MediaChannelDescriptionsTask * > task,Fn<bool (uint32)> resolved)2523 bool GroupCall::mediaChannelDescriptionsFill(
2524 not_null<MediaChannelDescriptionsTask*> task,
2525 Fn<bool(uint32)> resolved) {
2526 using Channel = tgcalls::MediaChannelDescription;
2527 auto result = false;
2528 const auto real = lookupReal();
2529 Assert(real != nullptr);
2530 for (const auto ssrc : task->ssrcs()) {
2531 const auto add = [&](
2532 std::optional<Channel> channel,
2533 bool screen = false) {
2534 if (task->finishWithAdding(ssrc, std::move(channel), screen)) {
2535 result = true;
2536 }
2537 };
2538 if (const auto byAudio = real->participantPeerByAudioSsrc(ssrc)) {
2539 add(Channel{
2540 .type = Channel::Type::Audio,
2541 .audioSsrc = ssrc,
2542 });
2543 } else if (!resolved) {
2544 _unresolvedSsrcs.emplace(ssrc);
2545 } else if (resolved(ssrc)) {
2546 add(std::nullopt);
2547 }
2548 }
2549 return result;
2550 }
2551
mediaChannelDescriptionsCancel(not_null<MediaChannelDescriptionsTask * > task)2552 void GroupCall::mediaChannelDescriptionsCancel(
2553 not_null<MediaChannelDescriptionsTask*> task) {
2554 const auto i = _mediaChannelDescriptionses.find(task.get());
2555 if (i != end(_mediaChannelDescriptionses)) {
2556 _mediaChannelDescriptionses.erase(i);
2557 }
2558 }
2559
approximateServerTimeInMs() const2560 int64 GroupCall::approximateServerTimeInMs() const {
2561 Expects(_serverTimeMs != 0);
2562
2563 return _serverTimeMs + (crl::now() - _serverTimeMsGotAt);
2564 }
2565
updateRequestedVideoChannels()2566 void GroupCall::updateRequestedVideoChannels() {
2567 _requestedVideoChannelsUpdateScheduled = false;
2568 const auto real = lookupReal();
2569 if (!real || !_instance) {
2570 return;
2571 }
2572 auto channels = std::vector<tgcalls::VideoChannelDescription>();
2573 using Quality = tgcalls::VideoChannelDescription::Quality;
2574 channels.reserve(_activeVideoTracks.size());
2575 const auto &camera = cameraSharingEndpoint();
2576 const auto &screen = screenSharingEndpoint();
2577 auto mediums = 0;
2578 auto fullcameras = 0;
2579 auto fullscreencasts = 0;
2580 for (const auto &[endpoint, video] : _activeVideoTracks) {
2581 const auto &endpointId = endpoint.id;
2582 if (endpointId == camera || endpointId == screen) {
2583 continue;
2584 }
2585 const auto participant = real->participantByEndpoint(endpointId);
2586 const auto params = (participant && participant->ssrc)
2587 ? participant->videoParams.get()
2588 : nullptr;
2589 if (!params) {
2590 continue;
2591 }
2592 const auto min = (video->quality == Group::VideoQuality::Full
2593 && endpoint.type == VideoEndpointType::Screen)
2594 ? Quality::Full
2595 : Quality::Thumbnail;
2596 const auto max = (video->quality == Group::VideoQuality::Full)
2597 ? Quality::Full
2598 : (video->quality == Group::VideoQuality::Medium
2599 && endpoint.type != VideoEndpointType::Screen)
2600 ? Quality::Medium
2601 : Quality::Thumbnail;
2602 if (max == Quality::Full) {
2603 if (endpoint.type == VideoEndpointType::Screen) {
2604 ++fullscreencasts;
2605 } else {
2606 ++fullcameras;
2607 }
2608 } else if (max == Quality::Medium) {
2609 ++mediums;
2610 }
2611 channels.push_back({
2612 .audioSsrc = participant->ssrc,
2613 .endpointId = endpointId,
2614 .ssrcGroups = (params->camera.endpointId == endpointId
2615 ? params->camera.ssrcGroups
2616 : params->screen.ssrcGroups),
2617 .minQuality = min,
2618 .maxQuality = max,
2619 });
2620 }
2621
2622 // We limit `count(Full) * kFullAsMediumsCount + count(medium)`.
2623 //
2624 // Try to preserve all qualities; If not
2625 // Try to preserve all screencasts as Full and cameras as Medium; If not
2626 // Try to preserve all screencasts as Full; If not
2627 // Try to preserve all cameras as Medium;
2628 const auto mediumsCount = mediums
2629 + (fullcameras + fullscreencasts) * kFullAsMediumsCount;
2630 const auto downgradeSome = (mediumsCount > kMaxMediumQualities);
2631 const auto downgradeAll = (fullscreencasts * kFullAsMediumsCount)
2632 > kMaxMediumQualities;
2633 if (downgradeSome) {
2634 for (auto &channel : channels) {
2635 if (channel.maxQuality == Quality::Full) {
2636 const auto camera = (channel.minQuality != Quality::Full);
2637 if (camera) {
2638 channel.maxQuality = Quality::Medium;
2639 } else if (downgradeAll) {
2640 channel.maxQuality
2641 = channel.minQuality
2642 = Quality::Thumbnail;
2643 --fullscreencasts;
2644 }
2645 }
2646 }
2647 mediums += fullcameras;
2648 fullcameras = 0;
2649 if (downgradeAll) {
2650 fullscreencasts = 0;
2651 }
2652 }
2653 if (mediums > kMaxMediumQualities) {
2654 for (auto &channel : channels) {
2655 if (channel.maxQuality == Quality::Medium) {
2656 channel.maxQuality = Quality::Thumbnail;
2657 }
2658 }
2659 }
2660 _instance->setRequestedVideoChannels(std::move(channels));
2661 }
2662
updateRequestedVideoChannelsDelayed()2663 void GroupCall::updateRequestedVideoChannelsDelayed() {
2664 if (_requestedVideoChannelsUpdateScheduled) {
2665 return;
2666 }
2667 _requestedVideoChannelsUpdateScheduled = true;
2668 crl::on_main(this, [=] {
2669 if (_requestedVideoChannelsUpdateScheduled) {
2670 updateRequestedVideoChannels();
2671 }
2672 });
2673 }
2674
fillActiveVideoEndpoints()2675 void GroupCall::fillActiveVideoEndpoints() {
2676 const auto real = lookupReal();
2677 Assert(real != nullptr);
2678
2679 const auto me = real->participantByPeer(joinAs());
2680 if (me && me->videoJoined) {
2681 _videoIsWorking = true;
2682 } else {
2683 _videoIsWorking = false;
2684 toggleVideo(false);
2685 toggleScreenSharing(std::nullopt);
2686 }
2687
2688 const auto &large = _videoEndpointLarge.current();
2689 auto largeFound = false;
2690 auto endpoints = _activeVideoTracks | ranges::views::transform([](
2691 const auto &pair) {
2692 return pair.first;
2693 });
2694 auto removed = base::flat_set<VideoEndpoint>(
2695 begin(endpoints),
2696 end(endpoints));
2697 const auto feedOne = [&](VideoEndpoint endpoint, bool paused) {
2698 if (endpoint.empty()) {
2699 return;
2700 } else if (endpoint == large) {
2701 largeFound = true;
2702 }
2703 if (removed.remove(endpoint)) {
2704 markTrackPaused(endpoint, paused);
2705 } else {
2706 markEndpointActive(std::move(endpoint), true, paused);
2707 }
2708 };
2709 using Type = VideoEndpointType;
2710 for (const auto &participant : real->participants()) {
2711 const auto camera = GetCameraEndpoint(participant.videoParams);
2712 if (camera != _cameraEndpoint
2713 && camera != _screenEndpoint
2714 && participant.peer != joinAs()) {
2715 const auto paused = IsCameraPaused(participant.videoParams);
2716 feedOne({ Type::Camera, participant.peer, camera }, paused);
2717 }
2718 const auto screen = GetScreenEndpoint(participant.videoParams);
2719 if (screen != _cameraEndpoint
2720 && screen != _screenEndpoint
2721 && participant.peer != joinAs()) {
2722 const auto paused = IsScreenPaused(participant.videoParams);
2723 feedOne({ Type::Screen, participant.peer, screen }, paused);
2724 }
2725 }
2726 feedOne(
2727 { Type::Camera, joinAs(), cameraSharingEndpoint() },
2728 isCameraPaused());
2729 feedOne(
2730 { Type::Screen, joinAs(), screenSharingEndpoint() },
2731 isScreenPaused());
2732 if (large && !largeFound) {
2733 setVideoEndpointLarge({});
2734 }
2735 for (const auto &endpoint : removed) {
2736 markEndpointActive(endpoint, false, false);
2737 }
2738 updateRequestedVideoChannels();
2739 }
2740
updateInstanceMuteState()2741 void GroupCall::updateInstanceMuteState() {
2742 Expects(_instance != nullptr);
2743
2744 const auto state = muted();
2745 _instance->setIsMuted(state != MuteState::Active
2746 && state != MuteState::PushToTalk);
2747 }
2748
updateInstanceVolumes()2749 void GroupCall::updateInstanceVolumes() {
2750 const auto real = lookupReal();
2751 if (!real) {
2752 return;
2753 }
2754
2755 const auto &participants = real->participants();
2756 for (const auto &participant : participants) {
2757 updateInstanceVolume(std::nullopt, participant);
2758 }
2759 }
2760
updateInstanceVolume(const std::optional<Data::GroupCallParticipant> & was,const Data::GroupCallParticipant & now)2761 void GroupCall::updateInstanceVolume(
2762 const std::optional<Data::GroupCallParticipant> &was,
2763 const Data::GroupCallParticipant &now) {
2764 const auto nonDefault = now.mutedByMe
2765 || (now.volume != Group::kDefaultVolume);
2766 const auto volumeChanged = was
2767 ? (was->volume != now.volume || was->mutedByMe != now.mutedByMe)
2768 : nonDefault;
2769 const auto additionalSsrc = GetAdditionalAudioSsrc(now.videoParams);
2770 const auto set = now.ssrc
2771 && (volumeChanged || (was && was->ssrc != now.ssrc));
2772 const auto additionalSet = additionalSsrc
2773 && (volumeChanged
2774 || (was && (GetAdditionalAudioSsrc(was->videoParams)
2775 != additionalSsrc)));
2776 const auto localVolume = now.mutedByMe
2777 ? 0.
2778 : (now.volume / float64(Group::kDefaultVolume));
2779 if (set) {
2780 _instance->setVolume(now.ssrc, localVolume);
2781 }
2782 if (additionalSet) {
2783 _instance->setVolume(additionalSsrc, localVolume);
2784 }
2785 }
2786
audioLevelsUpdated(const tgcalls::GroupLevelsUpdate & data)2787 void GroupCall::audioLevelsUpdated(const tgcalls::GroupLevelsUpdate &data) {
2788 Expects(!data.updates.empty());
2789
2790 auto check = false;
2791 auto checkNow = false;
2792 const auto now = crl::now();
2793 const auto meMuted = [&] {
2794 const auto state = muted();
2795 return (state != MuteState::Active)
2796 && (state != MuteState::PushToTalk);
2797 };
2798 for (const auto &[ssrcOrZero, value] : data.updates) {
2799 const auto ssrc = ssrcOrZero ? ssrcOrZero : _joinState.ssrc;
2800 if (!ssrc) {
2801 continue;
2802 }
2803 const auto level = value.level;
2804 const auto voice = value.voice;
2805 const auto me = (ssrc == _joinState.ssrc);
2806 const auto ignore = me && meMuted();
2807 _levelUpdates.fire(LevelUpdate{
2808 .ssrc = ssrc,
2809 .value = ignore ? 0.f : level,
2810 .voice = (!ignore && voice),
2811 .me = me,
2812 });
2813 if (level <= kSpeakLevelThreshold) {
2814 continue;
2815 }
2816 if (me
2817 && voice
2818 && (!_lastSendProgressUpdate
2819 || _lastSendProgressUpdate + kUpdateSendActionEach < now)) {
2820 _lastSendProgressUpdate = now;
2821 _peer->session().sendProgressManager().update(
2822 _history,
2823 Api::SendProgressType::Speaking);
2824 }
2825
2826 check = true;
2827 const auto i = _lastSpoke.find(ssrc);
2828 if (i == _lastSpoke.end()) {
2829 _lastSpoke.emplace(ssrc, Data::LastSpokeTimes{
2830 .anything = now,
2831 .voice = voice ? now : 0,
2832 });
2833 checkNow = true;
2834 } else {
2835 if ((i->second.anything + kCheckLastSpokeInterval / 3 <= now)
2836 || (voice
2837 && i->second.voice + kCheckLastSpokeInterval / 3 <= now)) {
2838 checkNow = true;
2839 }
2840 i->second.anything = now;
2841 if (voice) {
2842 i->second.voice = now;
2843 }
2844 }
2845 }
2846 if (checkNow) {
2847 checkLastSpoke();
2848 } else if (check && !_lastSpokeCheckTimer.isActive()) {
2849 _lastSpokeCheckTimer.callEach(kCheckLastSpokeInterval / 2);
2850 }
2851 }
2852
checkLastSpoke()2853 void GroupCall::checkLastSpoke() {
2854 const auto real = lookupReal();
2855 if (!real) {
2856 return;
2857 }
2858
2859 constexpr auto kKeepInListFor = kCheckLastSpokeInterval * 2;
2860 static_assert(Data::GroupCall::kSoundStatusKeptFor
2861 <= kKeepInListFor - (kCheckLastSpokeInterval / 3));
2862
2863 auto hasRecent = false;
2864 const auto now = crl::now();
2865 auto list = base::take(_lastSpoke);
2866 for (auto i = list.begin(); i != list.end();) {
2867 const auto [ssrc, when] = *i;
2868 if (when.anything + kKeepInListFor >= now) {
2869 hasRecent = true;
2870 ++i;
2871 } else {
2872 i = list.erase(i);
2873 }
2874
2875 // Ignore my levels from microphone if I'm already muted.
2876 if (ssrc != _joinState.ssrc
2877 || muted() == MuteState::Active
2878 || muted() == MuteState::PushToTalk) {
2879 real->applyLastSpoke(ssrc, when, now);
2880 } else {
2881 real->applyLastSpoke(ssrc, { crl::time(), crl::time() }, now);
2882 }
2883 }
2884 _lastSpoke = std::move(list);
2885
2886 if (!hasRecent) {
2887 _lastSpokeCheckTimer.cancel();
2888 } else if (!_lastSpokeCheckTimer.isActive()) {
2889 _lastSpokeCheckTimer.callEach(kCheckLastSpokeInterval / 3);
2890 }
2891 }
2892
checkJoined()2893 void GroupCall::checkJoined() {
2894 if (state() != State::Connecting || !_id || !_joinState.ssrc) {
2895 return;
2896 }
2897 auto sources = QVector<MTPint>(1, MTP_int(_joinState.ssrc));
2898 if (_screenJoinState.ssrc) {
2899 sources.push_back(MTP_int(_screenJoinState.ssrc));
2900 }
2901 _api.request(MTPphone_CheckGroupCall(
2902 inputCall(),
2903 MTP_vector<MTPint>(std::move(sources))
2904 )).done([=](const MTPVector<MTPint> &result) {
2905 if (!ranges::contains(result.v, MTP_int(_joinState.ssrc))) {
2906 LOG(("Call Info: Rejoin after no my ssrc in checkGroupCall."));
2907 _joinState.nextActionPending = true;
2908 checkNextJoinAction();
2909 } else {
2910 if (state() == State::Connecting) {
2911 _checkJoinedTimer.callOnce(kCheckJoinedTimeout);
2912 }
2913 if (_screenJoinState.ssrc
2914 && !ranges::contains(
2915 result.v,
2916 MTP_int(_screenJoinState.ssrc))) {
2917 LOG(("Call Info: "
2918 "Screen rejoin after _screenSsrc not found."));
2919 _screenJoinState.nextActionPending = true;
2920 checkNextJoinAction();
2921 }
2922 }
2923 }).fail([=](const MTP::Error &error) {
2924 LOG(("Call Info: Full rejoin after error '%1' in checkGroupCall."
2925 ).arg(error.type()));
2926 rejoin();
2927 }).send();
2928 }
2929
setInstanceConnected(tgcalls::GroupNetworkState networkState)2930 void GroupCall::setInstanceConnected(
2931 tgcalls::GroupNetworkState networkState) {
2932 const auto inTransit = networkState.isTransitioningFromBroadcastToRtc;
2933 const auto instanceState = !networkState.isConnected
2934 ? InstanceState::Disconnected
2935 : inTransit
2936 ? InstanceState::TransitionToRtc
2937 : InstanceState::Connected;
2938 const auto connected = (instanceState != InstanceState::Disconnected);
2939 if (_instanceState.current() == instanceState
2940 && _instanceTransitioning == inTransit) {
2941 return;
2942 }
2943 const auto nowCanSpeak = connected
2944 && _instanceTransitioning
2945 && !inTransit
2946 && (muted() == MuteState::Muted);
2947 _instanceTransitioning = inTransit;
2948 _instanceState = instanceState;
2949 if (state() == State::Connecting && connected) {
2950 setState(State::Joined);
2951 } else if (state() == State::Joined && !connected) {
2952 setState(State::Connecting);
2953 }
2954 if (nowCanSpeak) {
2955 notifyAboutAllowedToSpeak();
2956 }
2957 if (!_hadJoinedState && state() == State::Joined) {
2958 checkFirstTimeJoined();
2959 }
2960 }
2961
setScreenInstanceConnected(tgcalls::GroupNetworkState networkState)2962 void GroupCall::setScreenInstanceConnected(
2963 tgcalls::GroupNetworkState networkState) {
2964 const auto inTransit = networkState.isTransitioningFromBroadcastToRtc;
2965 const auto screenInstanceState = !networkState.isConnected
2966 ? InstanceState::Disconnected
2967 : inTransit
2968 ? InstanceState::TransitionToRtc
2969 : InstanceState::Connected;
2970 if (_screenInstanceState.current() == screenInstanceState) {
2971 return;
2972 }
2973 _screenInstanceState = screenInstanceState;
2974 }
2975
checkFirstTimeJoined()2976 void GroupCall::checkFirstTimeJoined() {
2977 if (_hadJoinedState || state() != State::Joined) {
2978 return;
2979 }
2980 _hadJoinedState = true;
2981 applyGlobalShortcutChanges();
2982 _delegate->groupCallPlaySound(Delegate::GroupCallSound::Started);
2983 }
2984
notifyAboutAllowedToSpeak()2985 void GroupCall::notifyAboutAllowedToSpeak() {
2986 if (!_hadJoinedState) {
2987 return;
2988 }
2989 _delegate->groupCallPlaySound(
2990 Delegate::GroupCallSound::AllowedToSpeak);
2991 _allowedToSpeakNotifications.fire({});
2992 }
2993
setInstanceMode(InstanceMode mode)2994 void GroupCall::setInstanceMode(InstanceMode mode) {
2995 Expects(_instance != nullptr);
2996
2997 _instanceMode = mode;
2998
2999 using Mode = tgcalls::GroupConnectionMode;
3000 _instance->setConnectionMode([&] {
3001 switch (_instanceMode) {
3002 case InstanceMode::None: return Mode::GroupConnectionModeNone;
3003 case InstanceMode::Rtc: return Mode::GroupConnectionModeRtc;
3004 case InstanceMode::Stream: return Mode::GroupConnectionModeBroadcast;
3005 }
3006 Unexpected("Mode in GroupCall::setInstanceMode.");
3007 }(), true);
3008 }
3009
setScreenInstanceMode(InstanceMode mode)3010 void GroupCall::setScreenInstanceMode(InstanceMode mode) {
3011 Expects(_screenInstance != nullptr);
3012
3013 _screenInstanceMode = mode;
3014
3015 using Mode = tgcalls::GroupConnectionMode;
3016 _screenInstance->setConnectionMode([&] {
3017 switch (_screenInstanceMode) {
3018 case InstanceMode::None: return Mode::GroupConnectionModeNone;
3019 case InstanceMode::Rtc: return Mode::GroupConnectionModeRtc;
3020 case InstanceMode::Stream: return Mode::GroupConnectionModeBroadcast;
3021 }
3022 Unexpected("Mode in GroupCall::setInstanceMode.");
3023 }(), true);
3024 }
3025
maybeSendMutedUpdate(MuteState previous)3026 void GroupCall::maybeSendMutedUpdate(MuteState previous) {
3027 // Send Active <-> !Active or ForceMuted <-> RaisedHand changes.
3028 const auto now = muted();
3029 if ((previous == MuteState::Active && now == MuteState::Muted)
3030 || (now == MuteState::Active
3031 && (previous == MuteState::Muted
3032 || previous == MuteState::PushToTalk))) {
3033 sendSelfUpdate(SendUpdateType::Mute);
3034 } else if ((now == MuteState::ForceMuted
3035 && previous == MuteState::RaisedHand)
3036 || (now == MuteState::RaisedHand
3037 && previous == MuteState::ForceMuted)) {
3038 sendSelfUpdate(SendUpdateType::RaiseHand);
3039 }
3040 }
3041
sendPendingSelfUpdates()3042 void GroupCall::sendPendingSelfUpdates() {
3043 if ((state() != State::Connecting && state() != State::Joined)
3044 || _selfUpdateRequestId) {
3045 return;
3046 }
3047 const auto updates = {
3048 SendUpdateType::Mute,
3049 SendUpdateType::RaiseHand,
3050 SendUpdateType::CameraStopped,
3051 SendUpdateType::CameraPaused,
3052 SendUpdateType::ScreenPaused,
3053 };
3054 for (const auto type : updates) {
3055 if (type == SendUpdateType::ScreenPaused
3056 && _screenJoinState.action != JoinAction::None) {
3057 continue;
3058 }
3059 if (_pendingSelfUpdates & type) {
3060 _pendingSelfUpdates &= ~type;
3061 sendSelfUpdate(type);
3062 return;
3063 }
3064 }
3065 }
3066
sendSelfUpdate(SendUpdateType type)3067 void GroupCall::sendSelfUpdate(SendUpdateType type) {
3068 if ((state() != State::Connecting && state() != State::Joined)
3069 || _selfUpdateRequestId) {
3070 _pendingSelfUpdates |= type;
3071 return;
3072 }
3073 using Flag = MTPphone_EditGroupCallParticipant::Flag;
3074 _selfUpdateRequestId = _api.request(MTPphone_EditGroupCallParticipant(
3075 MTP_flags((type == SendUpdateType::RaiseHand)
3076 ? Flag::f_raise_hand
3077 : (type == SendUpdateType::CameraStopped)
3078 ? Flag::f_video_stopped
3079 : (type == SendUpdateType::CameraPaused)
3080 ? Flag::f_video_paused
3081 : (type == SendUpdateType::ScreenPaused)
3082 ? Flag::f_presentation_paused
3083 : Flag::f_muted),
3084 inputCall(),
3085 joinAs()->input,
3086 MTP_bool(muted() != MuteState::Active),
3087 MTP_int(100000), // volume
3088 MTP_bool(muted() == MuteState::RaisedHand),
3089 MTP_bool(!isSharingCamera()),
3090 MTP_bool(isCameraPaused()),
3091 MTP_bool(isScreenPaused())
3092 )).done([=](const MTPUpdates &result) {
3093 _selfUpdateRequestId = 0;
3094 _peer->session().api().applyUpdates(result);
3095 sendPendingSelfUpdates();
3096 }).fail([=](const MTP::Error &error) {
3097 _selfUpdateRequestId = 0;
3098 if (error.type() == u"GROUPCALL_FORBIDDEN"_q) {
3099 LOG(("Call Info: Rejoin after error '%1' in editGroupCallMember."
3100 ).arg(error.type()));
3101 rejoin();
3102 }
3103 }).send();
3104 }
3105
pinVideoEndpoint(VideoEndpoint endpoint)3106 void GroupCall::pinVideoEndpoint(VideoEndpoint endpoint) {
3107 _videoEndpointPinned = false;
3108 if (endpoint) {
3109 setVideoEndpointLarge(std::move(endpoint));
3110 _videoEndpointPinned = true;
3111 }
3112 }
3113
showVideoEndpointLarge(VideoEndpoint endpoint)3114 void GroupCall::showVideoEndpointLarge(VideoEndpoint endpoint) {
3115 if (_videoEndpointLarge.current() == endpoint) {
3116 return;
3117 }
3118 _videoEndpointPinned = false;
3119 setVideoEndpointLarge(std::move(endpoint));
3120 _videoLargeTillTime = crl::now() + kFixManualLargeVideoDuration;
3121 }
3122
setVideoEndpointLarge(VideoEndpoint endpoint)3123 void GroupCall::setVideoEndpointLarge(VideoEndpoint endpoint) {
3124 if (!endpoint) {
3125 _videoEndpointPinned = false;
3126 }
3127 _videoEndpointLarge = endpoint;
3128 }
3129
requestVideoQuality(const VideoEndpoint & endpoint,Group::VideoQuality quality)3130 void GroupCall::requestVideoQuality(
3131 const VideoEndpoint &endpoint,
3132 Group::VideoQuality quality) {
3133 if (!endpoint) {
3134 return;
3135 }
3136 const auto i = _activeVideoTracks.find(endpoint);
3137 if (i == end(_activeVideoTracks) || i->second->quality == quality) {
3138 return;
3139 }
3140 i->second->quality = quality;
3141 updateRequestedVideoChannelsDelayed();
3142 }
3143
setCurrentAudioDevice(bool input,const QString & deviceId)3144 void GroupCall::setCurrentAudioDevice(bool input, const QString &deviceId) {
3145 if (input) {
3146 _mediaDevices->switchToAudioInput(deviceId);
3147 } else {
3148 _mediaDevices->switchToAudioOutput(deviceId);
3149 }
3150 }
3151
toggleMute(const Group::MuteRequest & data)3152 void GroupCall::toggleMute(const Group::MuteRequest &data) {
3153 if (data.locallyOnly) {
3154 applyParticipantLocally(data.peer, data.mute, std::nullopt);
3155 } else {
3156 editParticipant(data.peer, data.mute, std::nullopt);
3157 }
3158 }
3159
changeVolume(const Group::VolumeRequest & data)3160 void GroupCall::changeVolume(const Group::VolumeRequest &data) {
3161 if (data.locallyOnly) {
3162 applyParticipantLocally(data.peer, false, data.volume);
3163 } else {
3164 editParticipant(data.peer, false, data.volume);
3165 }
3166 }
3167
editParticipant(not_null<PeerData * > participantPeer,bool mute,std::optional<int> volume)3168 void GroupCall::editParticipant(
3169 not_null<PeerData*> participantPeer,
3170 bool mute,
3171 std::optional<int> volume) {
3172 const auto participant = LookupParticipant(_peer, _id, participantPeer);
3173 if (!participant) {
3174 return;
3175 }
3176 applyParticipantLocally(participantPeer, mute, volume);
3177
3178 using Flag = MTPphone_EditGroupCallParticipant::Flag;
3179 const auto flags = Flag::f_muted
3180 | (volume.has_value() ? Flag::f_volume : Flag(0));
3181 _api.request(MTPphone_EditGroupCallParticipant(
3182 MTP_flags(flags),
3183 inputCall(),
3184 participantPeer->input,
3185 MTP_bool(mute),
3186 MTP_int(std::clamp(volume.value_or(0), 1, Group::kMaxVolume)),
3187 MTPBool(), // raise_hand
3188 MTPBool(), // video_muted
3189 MTPBool(), // video_paused
3190 MTPBool() // presentation_paused
3191 )).done([=](const MTPUpdates &result) {
3192 _peer->session().api().applyUpdates(result);
3193 }).fail([=](const MTP::Error &error) {
3194 if (error.type() == u"GROUPCALL_FORBIDDEN"_q) {
3195 LOG(("Call Info: Rejoin after error '%1' in editGroupCallMember."
3196 ).arg(error.type()));
3197 rejoin();
3198 }
3199 }).send();
3200 }
3201
inviteUsers(const std::vector<not_null<UserData * >> & users)3202 std::variant<int, not_null<UserData*>> GroupCall::inviteUsers(
3203 const std::vector<not_null<UserData*>> &users) {
3204 const auto real = lookupReal();
3205 if (!real) {
3206 return 0;
3207 }
3208 const auto owner = &_peer->owner();
3209
3210 auto count = 0;
3211 auto slice = QVector<MTPInputUser>();
3212 auto result = std::variant<int, not_null<UserData*>>(0);
3213 slice.reserve(kMaxInvitePerSlice);
3214 const auto sendSlice = [&] {
3215 count += slice.size();
3216 _api.request(MTPphone_InviteToGroupCall(
3217 inputCall(),
3218 MTP_vector<MTPInputUser>(slice)
3219 )).done([=](const MTPUpdates &result) {
3220 _peer->session().api().applyUpdates(result);
3221 }).send();
3222 slice.clear();
3223 };
3224 for (const auto &user : users) {
3225 if (!count && slice.empty()) {
3226 result = user;
3227 }
3228 owner->registerInvitedToCallUser(_id, _peer, user);
3229 slice.push_back(user->inputUser);
3230 if (slice.size() == kMaxInvitePerSlice) {
3231 sendSlice();
3232 }
3233 }
3234 if (count != 0 || slice.size() != 1) {
3235 result = int(count + slice.size());
3236 }
3237 if (!slice.empty()) {
3238 sendSlice();
3239 }
3240 return result;
3241 }
3242
ensureGlobalShortcutManager()3243 auto GroupCall::ensureGlobalShortcutManager()
3244 -> std::shared_ptr<GlobalShortcutManager> {
3245 if (!_shortcutManager) {
3246 _shortcutManager = base::CreateGlobalShortcutManager();
3247 }
3248 return _shortcutManager;
3249 }
3250
applyGlobalShortcutChanges()3251 void GroupCall::applyGlobalShortcutChanges() {
3252 auto &settings = Core::App().settings();
3253 if (!settings.groupCallPushToTalk()
3254 || settings.groupCallPushToTalkShortcut().isEmpty()
3255 || !base::GlobalShortcutsAvailable()
3256 || !base::GlobalShortcutsAllowed()) {
3257 _shortcutManager = nullptr;
3258 _pushToTalk = nullptr;
3259 return;
3260 }
3261 ensureGlobalShortcutManager();
3262 const auto shortcut = _shortcutManager->shortcutFromSerialized(
3263 settings.groupCallPushToTalkShortcut());
3264 if (!shortcut) {
3265 settings.setGroupCallPushToTalkShortcut(QByteArray());
3266 settings.setGroupCallPushToTalk(false);
3267 Core::App().saveSettingsDelayed();
3268 _shortcutManager = nullptr;
3269 _pushToTalk = nullptr;
3270 return;
3271 }
3272 if (_pushToTalk) {
3273 if (shortcut->serialize() == _pushToTalk->serialize()) {
3274 return;
3275 }
3276 _shortcutManager->stopWatching(_pushToTalk);
3277 }
3278 _pushToTalk = shortcut;
3279 _shortcutManager->startWatching(_pushToTalk, [=](bool pressed) {
3280 pushToTalk(
3281 pressed,
3282 Core::App().settings().groupCallPushToTalkDelay());
3283 });
3284 }
3285
pushToTalk(bool pressed,crl::time delay)3286 void GroupCall::pushToTalk(bool pressed, crl::time delay) {
3287 if (mutedByAdmin() || muted() == MuteState::Active) {
3288 return;
3289 } else if (pressed) {
3290 _pushToTalkCancelTimer.cancel();
3291 setMuted(MuteState::PushToTalk);
3292 } else if (delay) {
3293 _pushToTalkCancelTimer.callOnce(delay);
3294 } else {
3295 pushToTalkCancel();
3296 }
3297 }
3298
pushToTalkCancel()3299 void GroupCall::pushToTalkCancel() {
3300 _pushToTalkCancelTimer.cancel();
3301 if (muted() == MuteState::PushToTalk) {
3302 setMuted(MuteState::Muted);
3303 }
3304 }
3305
setNotRequireARGB32()3306 void GroupCall::setNotRequireARGB32() {
3307 _requireARGB32 = false;
3308 }
3309
otherParticipantStateValue() const3310 auto GroupCall::otherParticipantStateValue() const
3311 -> rpl::producer<Group::ParticipantState> {
3312 return _otherParticipantStateValue.events();
3313 }
3314
inputCall() const3315 MTPInputGroupCall GroupCall::inputCall() const {
3316 Expects(_id != 0);
3317
3318 return MTP_inputGroupCall(
3319 MTP_long(_id),
3320 MTP_long(_accessHash));
3321 }
3322
destroyController()3323 void GroupCall::destroyController() {
3324 if (_instance) {
3325 DEBUG_LOG(("Call Info: Destroying call controller.."));
3326 invalidate_weak_ptrs(&_instanceGuard);
3327
3328 crl::async([
3329 instance = base::take(_instance),
3330 done = _delegate->groupCallAddAsyncWaiter()
3331 ]() mutable {
3332 instance = nullptr;
3333 DEBUG_LOG(("Call Info: Call controller destroyed."));
3334 done();
3335 });
3336 }
3337 }
3338
destroyScreencast()3339 void GroupCall::destroyScreencast() {
3340 if (_screenInstance) {
3341 DEBUG_LOG(("Call Info: Destroying call screen controller.."));
3342 invalidate_weak_ptrs(&_screenInstanceGuard);
3343 crl::async([
3344 instance = base::take(_screenInstance),
3345 done = _delegate->groupCallAddAsyncWaiter()
3346 ]() mutable {
3347 instance = nullptr;
3348 DEBUG_LOG(("Call Info: Call screen controller destroyed."));
3349 done();
3350 });
3351 }
3352 }
3353
3354 } // namespace Calls
3355