mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-16 06:07:06 +02:00
Update API scheme.
This commit is contained in:
parent
3af0c37c6b
commit
70f0cce340
5 changed files with 161 additions and 117 deletions
|
@ -223,7 +223,7 @@ peerNotifySettings#af509d20 flags:# show_previews:flags.0?Bool silent:flags.1?Bo
|
|||
peerSettings#733f2961 flags:# report_spam:flags.0?true add_contact:flags.1?true block_contact:flags.2?true share_contact:flags.3?true need_contacts_exception:flags.4?true report_geo:flags.5?true autoarchived:flags.7?true invite_members:flags.8?true geo_distance:flags.6?int = PeerSettings;
|
||||
|
||||
wallPaper#a437c3ed id:long flags:# creator:flags.0?true default:flags.1?true pattern:flags.3?true dark:flags.4?true access_hash:long slug:string document:Document settings:flags.2?WallPaperSettings = WallPaper;
|
||||
wallPaperNoFile#8af40b25 flags:# default:flags.1?true dark:flags.4?true settings:flags.2?WallPaperSettings = WallPaper;
|
||||
wallPaperNoFile#e0804116 id:long flags:# default:flags.1?true dark:flags.4?true settings:flags.2?WallPaperSettings = WallPaper;
|
||||
|
||||
inputReportReasonSpam#58dbcab8 = ReportReason;
|
||||
inputReportReasonViolence#1e22c78d = ReportReason;
|
||||
|
@ -1070,7 +1070,7 @@ chatBannedRights#9f120418 flags:# view_messages:flags.0?true send_messages:flags
|
|||
|
||||
inputWallPaper#e630b979 id:long access_hash:long = InputWallPaper;
|
||||
inputWallPaperSlug#72091c80 slug:string = InputWallPaper;
|
||||
inputWallPaperNoFile#8427bbac = InputWallPaper;
|
||||
inputWallPaperNoFile#967a462e id:long = InputWallPaper;
|
||||
|
||||
account.wallPapersNotModified#1c199183 = account.WallPapers;
|
||||
account.wallPapers#702b65a9 hash:int wallpapers:Vector<WallPaper> = account.WallPapers;
|
||||
|
@ -1209,7 +1209,7 @@ groupCall#653dbaad flags:# join_muted:flags.1?true can_change_join_muted:flags.2
|
|||
|
||||
inputGroupCall#d8aa840f id:long access_hash:long = InputGroupCall;
|
||||
|
||||
groupCallParticipant#a8ba51a7 flags:# muted:flags.0?true left:flags.1?true can_self_unmute:flags.2?true just_joined:flags.4?true versioned:flags.5?true min:flags.8?true muted_by_you:flags.9?true volume_by_admin:flags.10?true self:flags.12?true peer:Peer date:int active_date:flags.3?int source:int volume:flags.7?int about:flags.11?string raise_hand_rating:flags.13?long video:flags.6?DataJSON presentation:flags.14?DataJSON = GroupCallParticipant;
|
||||
groupCallParticipant#eba636fe flags:# muted:flags.0?true left:flags.1?true can_self_unmute:flags.2?true just_joined:flags.4?true versioned:flags.5?true min:flags.8?true muted_by_you:flags.9?true volume_by_admin:flags.10?true self:flags.12?true video_joined:flags.15?true peer:Peer date:int active_date:flags.3?int source:int volume:flags.7?int about:flags.11?string raise_hand_rating:flags.13?long video:flags.6?GroupCallParticipantVideo presentation:flags.14?GroupCallParticipantVideo = GroupCallParticipant;
|
||||
|
||||
phone.groupCall#9e727aad call:GroupCall participants:Vector<GroupCallParticipant> participants_next_offset:string chats:Vector<Chat> users:Vector<User> = phone.GroupCall;
|
||||
|
||||
|
@ -1246,6 +1246,10 @@ phone.joinAsPeers#afe5623f peers:Vector<Peer> chats:Vector<Chat> users:Vector<Us
|
|||
|
||||
phone.exportedGroupCallInvite#204bd158 link:string = phone.ExportedGroupCallInvite;
|
||||
|
||||
groupCallParticipantVideoSourceGroup#dcb118b7 semantics:string sources:Vector<int> = GroupCallParticipantVideoSourceGroup;
|
||||
|
||||
groupCallParticipantVideo#78e41663 flags:# paused:flags.0?true endpoint:string source_groups:Vector<GroupCallParticipantVideoSourceGroup> = GroupCallParticipantVideo;
|
||||
|
||||
---functions---
|
||||
|
||||
invokeAfterMsg#cb9f372d {X:Type} msg_id:long query:!X = X;
|
||||
|
@ -1618,7 +1622,7 @@ phone.setCallRating#59ead627 flags:# user_initiative:flags.0?true peer:InputPhon
|
|||
phone.saveCallDebug#277add7e peer:InputPhoneCall debug:DataJSON = Bool;
|
||||
phone.sendSignalingData#ff7a9383 peer:InputPhoneCall data:bytes = Bool;
|
||||
phone.createGroupCall#48cdc6d8 flags:# peer:InputPeer random_id:int title:flags.0?string schedule_date:flags.1?int = Updates;
|
||||
phone.joinGroupCall#b132ff7b flags:# muted:flags.0?true video_muted:flags.2?true call:InputGroupCall join_as:InputPeer invite_hash:flags.1?string params:DataJSON = Updates;
|
||||
phone.joinGroupCall#b132ff7b flags:# muted:flags.0?true video_stopped:flags.2?true call:InputGroupCall join_as:InputPeer invite_hash:flags.1?string params:DataJSON = Updates;
|
||||
phone.leaveGroupCall#500377f9 call:InputGroupCall source:int = Updates;
|
||||
phone.inviteToGroupCall#7b393160 call:InputGroupCall users:Vector<InputUser> = Updates;
|
||||
phone.discardGroupCall#7a777135 call:InputGroupCall = Updates;
|
||||
|
@ -1627,7 +1631,7 @@ phone.getGroupCall#c7cb017 call:InputGroupCall = phone.GroupCall;
|
|||
phone.getGroupParticipants#c558d8ab call:InputGroupCall ids:Vector<InputPeer> sources:Vector<int> offset:string limit:int = phone.GroupParticipants;
|
||||
phone.checkGroupCall#b59cf977 call:InputGroupCall sources:Vector<int> = Vector<int>;
|
||||
phone.toggleGroupCallRecord#c02a66d7 flags:# start:flags.0?true call:InputGroupCall title:flags.1?string = Updates;
|
||||
phone.editGroupCallParticipant#aec610e4 flags:# call:InputGroupCall participant:InputPeer muted:flags.0?Bool volume:flags.1?int raise_hand:flags.2?Bool video_muted:flags.3?Bool = Updates;
|
||||
phone.editGroupCallParticipant#a5273abf flags:# call:InputGroupCall participant:InputPeer muted:flags.0?Bool volume:flags.1?int raise_hand:flags.2?Bool video_stopped:flags.3?Bool video_paused:flags.4?Bool presentation_paused:flags.5?Bool = Updates;
|
||||
phone.editGroupCallTitle#1ca6ac0a call:InputGroupCall title:string = Updates;
|
||||
phone.getGroupCallJoinAs#ef7c213a peer:InputPeer = phone.JoinAsPeers;
|
||||
phone.exportGroupCallInvite#e6aa647f flags:# can_self_unmute:flags.0?true call:InputGroupCall = phone.ExportedGroupCallInvite;
|
||||
|
|
|
@ -35,7 +35,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include <tgcalls/group/GroupInstanceCustomImpl.h>
|
||||
#include <tgcalls/VideoCaptureInterface.h>
|
||||
#include <tgcalls/StaticThreads.h>
|
||||
#include <xxhash.h>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QJsonArray>
|
||||
|
@ -214,49 +213,118 @@ struct GroupCall::SinkPointer {
|
|||
return false;
|
||||
}
|
||||
|
||||
[[nodiscard]] VideoParams ParseVideoParams(const QByteArray &json) {
|
||||
if (json.isEmpty()) {
|
||||
return {};
|
||||
}
|
||||
auto error = QJsonParseError{ 0, QJsonParseError::NoError };
|
||||
const auto document = QJsonDocument::fromJson(json, &error);
|
||||
if (error.error != QJsonParseError::NoError) {
|
||||
LOG(("API Error: "
|
||||
"Failed to parse group call video params, error: %1."
|
||||
).arg(error.errorString()));
|
||||
return {};
|
||||
} else if (!document.isObject()) {
|
||||
LOG(("API Error: "
|
||||
"Not an object received in group call video params."));
|
||||
return {};
|
||||
}
|
||||
struct VideoParams {
|
||||
std::string endpointId;
|
||||
std::vector<tgcalls::MediaSsrcGroup> ssrcGroups;
|
||||
|
||||
const auto object = document.object();
|
||||
auto result = VideoParams{
|
||||
.endpoint = ReadJsonString(object, "endpoint"),
|
||||
.json = json,
|
||||
};
|
||||
return result.empty() ? VideoParams() : result;
|
||||
[[nodiscard]] bool empty() const {
|
||||
return endpointId.empty() || ssrcGroups.empty();
|
||||
}
|
||||
[[nodiscard]] explicit operator bool() const {
|
||||
return !empty();
|
||||
}
|
||||
};
|
||||
|
||||
struct ParticipantVideoParams {
|
||||
VideoParams camera;
|
||||
VideoParams screen;
|
||||
};
|
||||
|
||||
[[nodiscard]] bool VideoParamsAreEqual(
|
||||
const VideoParams &was,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &now) {
|
||||
if (!now) {
|
||||
return !was;
|
||||
}
|
||||
return now->match([&](const MTPDgroupCallParticipantVideo &data) {
|
||||
if (gsl::make_span(data.vendpoint().v)
|
||||
!= gsl::make_span(was.endpointId)) {
|
||||
return false;
|
||||
}
|
||||
const auto &list = data.vsource_groups().v;
|
||||
if (list.size() != was.ssrcGroups.size()) {
|
||||
return false;
|
||||
}
|
||||
auto index = 0;
|
||||
for (const auto &group : list) {
|
||||
const auto equal = group.match([&](
|
||||
const MTPDgroupCallParticipantVideoSourceGroup &data) {
|
||||
const auto &group = was.ssrcGroups[index++];
|
||||
if (gsl::make_span(data.vsemantics().v)
|
||||
!= gsl::make_span(group.semantics)) {
|
||||
return false;
|
||||
}
|
||||
const auto list = data.vsources().v;
|
||||
if (list.size() != group.ssrcs.size()) {
|
||||
return false;
|
||||
}
|
||||
auto i = 0;
|
||||
for (const auto &ssrc : list) {
|
||||
if (ssrc.v != group.ssrcs[i++]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (!equal) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
[[nodiscard]] VideoParams ParseVideoParams(
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> ¶ms) {
|
||||
if (!params) {
|
||||
return VideoParams();
|
||||
}
|
||||
auto result = VideoParams();
|
||||
params->match([&](const MTPDgroupCallParticipantVideo &data) {
|
||||
result.endpointId = data.vendpoint().v.toStdString();
|
||||
const auto &list = data.vsource_groups().v;
|
||||
result.ssrcGroups.reserve(list.size());
|
||||
for (const auto &group : list) {
|
||||
group.match([&](
|
||||
const MTPDgroupCallParticipantVideoSourceGroup &data) {
|
||||
const auto &list = data.vsources().v;
|
||||
auto ssrcs = std::vector<uint32_t>();
|
||||
ssrcs.reserve(list.size());
|
||||
for (const auto &ssrc : list) {
|
||||
ssrcs.push_back(ssrc.v);
|
||||
}
|
||||
result.ssrcGroups.push_back({
|
||||
.semantics = data.vsemantics().v.toStdString(),
|
||||
.ssrcs = std::move(ssrcs),
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
const std::string &GetCameraEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms) {
|
||||
return params ? params->camera.endpointId : EmptyString();
|
||||
}
|
||||
|
||||
const std::string &GetScreenEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms) {
|
||||
return params ? params->screen.endpointId : EmptyString();
|
||||
}
|
||||
|
||||
std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
|
||||
const QByteArray &camera,
|
||||
const QByteArray &screen,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &camera,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &screen,
|
||||
const std::shared_ptr<ParticipantVideoParams> &existing) {
|
||||
using namespace tgcalls;
|
||||
|
||||
if (camera.isEmpty() && screen.isEmpty()) {
|
||||
if (!camera && !screen) {
|
||||
return nullptr;
|
||||
}
|
||||
const auto cameraHash = camera.isEmpty()
|
||||
? 0
|
||||
: XXH32(camera.data(), camera.size(), uint32(0));
|
||||
const auto screenHash = screen.isEmpty()
|
||||
? 0
|
||||
: XXH32(screen.data(), screen.size(), uint32(0));
|
||||
if (existing
|
||||
&& existing->camera.hash == cameraHash
|
||||
&& existing->screen.hash == screenHash) {
|
||||
&& VideoParamsAreEqual(existing->camera, camera)
|
||||
&& VideoParamsAreEqual(existing->screen, screen)) {
|
||||
return existing;
|
||||
}
|
||||
// We don't reuse existing pointer, that way we can compare pointers
|
||||
|
@ -265,9 +333,7 @@ std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
|
|||
? existing
|
||||
: */std::make_shared<ParticipantVideoParams>();
|
||||
data->camera = ParseVideoParams(camera);
|
||||
data->camera.hash = cameraHash;
|
||||
data->screen = ParseVideoParams(screen);
|
||||
data->screen.hash = screenHash;
|
||||
return data;
|
||||
}
|
||||
|
||||
|
@ -1048,7 +1114,7 @@ void GroupCall::rejoin(not_null<PeerData*> as) {
|
|||
|
||||
const auto json = QByteArray::fromStdString(payload.json);
|
||||
const auto wasMuteState = muted();
|
||||
const auto wasVideoMuted = !isSharingCamera();
|
||||
const auto wasVideoStopped = !isSharingCamera();
|
||||
using Flag = MTPphone_JoinGroupCall::Flag;
|
||||
const auto flags = (wasMuteState != MuteState::Active
|
||||
? Flag::f_muted
|
||||
|
@ -1056,8 +1122,8 @@ void GroupCall::rejoin(not_null<PeerData*> as) {
|
|||
| (_joinHash.isEmpty()
|
||||
? Flag(0)
|
||||
: Flag::f_invite_hash)
|
||||
| (wasVideoMuted
|
||||
? Flag::f_video_muted
|
||||
| (wasVideoStopped
|
||||
? Flag::f_video_stopped
|
||||
: Flag(0));
|
||||
_api.request(MTPphone_JoinGroupCall(
|
||||
MTP_flags(flags),
|
||||
|
@ -1078,8 +1144,8 @@ void GroupCall::rejoin(not_null<PeerData*> as) {
|
|||
_peer->session().api().applyUpdates(updates);
|
||||
applyQueuedSelfUpdates();
|
||||
checkFirstTimeJoined();
|
||||
if (wasVideoMuted == isSharingCamera()) {
|
||||
sendSelfUpdate(SendUpdateType::VideoMuted);
|
||||
if (wasVideoStopped == isSharingCamera()) {
|
||||
sendSelfUpdate(SendUpdateType::VideoStopped);
|
||||
}
|
||||
_screenJoinState.nextActionPending = true;
|
||||
checkNextJoinAction();
|
||||
|
@ -1255,9 +1321,6 @@ void GroupCall::applyMeInCallLocally() {
|
|||
: participant
|
||||
? participant->raisedHandRating
|
||||
: FindLocalRaisedHandRating(real->participants());
|
||||
const auto params = participant
|
||||
? participant->videoParams.get()
|
||||
: nullptr;
|
||||
const auto flags = (canSelfUnmute ? Flag::f_can_self_unmute : Flag(0))
|
||||
| (lastActive ? Flag::f_active_date : Flag(0))
|
||||
| (_joinState.ssrc ? Flag(0) : Flag::f_left)
|
||||
|
@ -1265,10 +1328,6 @@ void GroupCall::applyMeInCallLocally() {
|
|||
| Flag::f_volume // Without flag the volume is reset to 100%.
|
||||
| Flag::f_volume_by_admin // Self volume can only be set by admin.
|
||||
| ((muted() != MuteState::Active) ? Flag::f_muted : Flag(0))
|
||||
| ((params && !params->camera.empty()) ? Flag::f_video : Flag(0))
|
||||
| ((params && !params->screen.empty())
|
||||
? Flag::f_presentation
|
||||
: Flag(0))
|
||||
| (raisedHandRating > 0 ? Flag::f_raise_hand_rating : Flag(0));
|
||||
real->applyLocalUpdate(
|
||||
MTP_updateGroupCallParticipants(
|
||||
|
@ -1284,12 +1343,8 @@ void GroupCall::applyMeInCallLocally() {
|
|||
MTP_int(volume),
|
||||
MTPstring(), // Don't update about text in local updates.
|
||||
MTP_long(raisedHandRating),
|
||||
(params
|
||||
? MTP_dataJSON(MTP_bytes(params->camera.json))
|
||||
: MTPDataJSON()),
|
||||
(params
|
||||
? MTP_dataJSON(MTP_bytes(params->screen.json))
|
||||
: MTPDataJSON()))),
|
||||
MTPGroupCallParticipantVideo(),
|
||||
MTPGroupCallParticipantVideo())),
|
||||
MTP_int(0)).c_updateGroupCallParticipants());
|
||||
}
|
||||
|
||||
|
@ -1307,7 +1362,6 @@ void GroupCall::applyParticipantLocally(
|
|||
? participant->canSelfUnmute
|
||||
: (!mute || IsGroupCallAdmin(_peer, participantPeer));
|
||||
const auto isMutedByYou = mute && !canManageCall;
|
||||
const auto params = participant->videoParams.get();
|
||||
const auto mutedCount = 0/*participant->mutedCount*/;
|
||||
using Flag = MTPDgroupCallParticipant::Flag;
|
||||
const auto flags = (canSelfUnmute ? Flag::f_can_self_unmute : Flag(0))
|
||||
|
@ -1319,10 +1373,6 @@ void GroupCall::applyParticipantLocally(
|
|||
| (isMuted ? Flag::f_muted : Flag(0))
|
||||
| (isMutedByYou ? Flag::f_muted_by_you : Flag(0))
|
||||
| (participantPeer == _joinAs ? Flag::f_self : Flag(0))
|
||||
| ((params && !params->camera.empty()) ? Flag::f_video : Flag(0))
|
||||
| ((params && !params->screen.empty())
|
||||
? Flag::f_presentation
|
||||
: Flag(0))
|
||||
| (participant->raisedHandRating
|
||||
? Flag::f_raise_hand_rating
|
||||
: Flag(0));
|
||||
|
@ -1340,12 +1390,8 @@ void GroupCall::applyParticipantLocally(
|
|||
MTP_int(volume.value_or(participant->volume)),
|
||||
MTPstring(), // Don't update about text in local updates.
|
||||
MTP_long(participant->raisedHandRating),
|
||||
(params
|
||||
? MTP_dataJSON(MTP_bytes(params->camera.json))
|
||||
: MTPDataJSON()),
|
||||
(params
|
||||
? MTP_dataJSON(MTP_bytes(params->screen.json))
|
||||
: MTPDataJSON()))),
|
||||
MTPGroupCallParticipantVideo(),
|
||||
MTPGroupCallParticipantVideo())),
|
||||
MTP_int(0)).c_updateGroupCallParticipants());
|
||||
}
|
||||
|
||||
|
@ -1844,7 +1890,7 @@ void GroupCall::ensureOutgoingVideo() {
|
|||
_joinAs,
|
||||
_cameraEndpoint
|
||||
}, active);
|
||||
sendSelfUpdate(SendUpdateType::VideoMuted);
|
||||
sendSelfUpdate(SendUpdateType::VideoStopped);
|
||||
applyMeInCallLocally();
|
||||
}, _lifetime);
|
||||
|
||||
|
@ -2224,9 +2270,10 @@ void GroupCall::updateRequestedVideoChannels() {
|
|||
}
|
||||
channels.push_back({
|
||||
.audioSsrc = participant->ssrc,
|
||||
.videoInformation = (params->camera.endpoint == endpointId
|
||||
? params->camera.json.toStdString()
|
||||
: params->screen.json.toStdString()),
|
||||
.endpointId = endpointId,
|
||||
.ssrcGroups = (params->camera.endpointId == endpointId
|
||||
? params->camera.ssrcGroups
|
||||
: params->screen.ssrcGroups),
|
||||
.quality = (video.quality == Group::VideoQuality::Full
|
||||
? Quality::Full
|
||||
: video.quality == Group::VideoQuality::Medium
|
||||
|
@ -2582,15 +2629,17 @@ void GroupCall::sendSelfUpdate(SendUpdateType type) {
|
|||
_updateMuteRequestId = _api.request(MTPphone_EditGroupCallParticipant(
|
||||
MTP_flags((type == SendUpdateType::RaiseHand)
|
||||
? Flag::f_raise_hand
|
||||
: (type == SendUpdateType::VideoMuted)
|
||||
? Flag::f_video_muted
|
||||
: (type == SendUpdateType::VideoStopped)
|
||||
? Flag::f_video_stopped
|
||||
: Flag::f_muted),
|
||||
inputCall(),
|
||||
_joinAs->input,
|
||||
MTP_bool(muted() != MuteState::Active),
|
||||
MTP_int(100000), // volume
|
||||
MTP_bool(muted() == MuteState::RaisedHand),
|
||||
MTP_bool(!isSharingCamera())
|
||||
MTP_bool(!isSharingCamera()),
|
||||
MTP_bool(false), // video_paused
|
||||
MTP_bool(false) // presentation_paused
|
||||
)).done([=](const MTPUpdates &result) {
|
||||
_updateMuteRequestId = 0;
|
||||
_peer->session().api().applyUpdates(result);
|
||||
|
@ -2690,7 +2739,9 @@ void GroupCall::editParticipant(
|
|||
MTP_bool(mute),
|
||||
MTP_int(std::clamp(volume.value_or(0), 1, Group::kMaxVolume)),
|
||||
MTPBool(), // raise_hand
|
||||
MTPBool() // video_muted
|
||||
MTPBool(), // video_muted
|
||||
MTPBool(), // video_paused
|
||||
MTPBool() // presentation_paused
|
||||
)).done([=](const MTPUpdates &result) {
|
||||
_peer->session().api().applyUpdates(result);
|
||||
}).fail([=](const MTP::Error &error) {
|
||||
|
|
|
@ -141,29 +141,19 @@ struct VideoQualityRequest {
|
|||
Group::VideoQuality quality = Group::VideoQuality();
|
||||
};
|
||||
|
||||
struct VideoParams {
|
||||
std::string endpoint;
|
||||
QByteArray json;
|
||||
uint32 hash = 0;
|
||||
|
||||
[[nodiscard]] bool empty() const {
|
||||
return endpoint.empty() || json.isEmpty();
|
||||
}
|
||||
[[nodiscard]] explicit operator bool() const {
|
||||
return !empty();
|
||||
}
|
||||
};
|
||||
|
||||
struct ParticipantVideoParams {
|
||||
VideoParams camera;
|
||||
VideoParams screen;
|
||||
};
|
||||
struct ParticipantVideoParams;
|
||||
|
||||
[[nodiscard]] std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
|
||||
const QByteArray &camera,
|
||||
const QByteArray &screen,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &camera,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &screen,
|
||||
const std::shared_ptr<ParticipantVideoParams> &existing);
|
||||
|
||||
[[nodiscard]] const std::string &GetCameraEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
|
||||
[[nodiscard]] const std::string &GetScreenEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
|
||||
class GroupCall final : public base::has_weak_ptr {
|
||||
public:
|
||||
class Delegate {
|
||||
|
@ -422,7 +412,7 @@ private:
|
|||
enum class SendUpdateType {
|
||||
Mute,
|
||||
RaiseHand,
|
||||
VideoMuted,
|
||||
VideoStopped,
|
||||
};
|
||||
enum class JoinAction {
|
||||
None,
|
||||
|
|
|
@ -40,13 +40,12 @@ constexpr auto kWaitForUpdatesTimeout = 3 * crl::time(1000);
|
|||
|
||||
} // namespace
|
||||
|
||||
|
||||
const std::string &GroupCallParticipant::cameraEndpoint() const {
|
||||
return videoParams ? videoParams->camera.endpoint : EmptyEndpoint();
|
||||
return GetCameraEndpoint(videoParams);
|
||||
}
|
||||
|
||||
const std::string &GroupCallParticipant::screenEndpoint() const {
|
||||
return videoParams ? videoParams->screen.endpoint : EmptyEndpoint();
|
||||
return GetScreenEndpoint(videoParams);
|
||||
}
|
||||
|
||||
GroupCall::GroupCall(
|
||||
|
@ -232,11 +231,9 @@ const GroupCallParticipant *GroupCall::participantByEndpoint(
|
|||
return nullptr;
|
||||
}
|
||||
for (const auto &participant : _participants) {
|
||||
if (const auto params = participant.videoParams.get()) {
|
||||
if (params->camera.endpoint == endpoint
|
||||
|| params->screen.endpoint == endpoint) {
|
||||
return &participant;
|
||||
}
|
||||
if (participant.cameraEndpoint() == endpoint
|
||||
|| participant.screenEndpoint() == endpoint) {
|
||||
return &participant;
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
|
@ -587,18 +584,20 @@ void GroupCall::applyParticipantsSlice(
|
|||
&& (!was || was->onlyMinLoaded);
|
||||
const auto raisedHandRating
|
||||
= data.vraise_hand_rating().value_or_empty();
|
||||
const auto localUpdate = (sliceSource
|
||||
== ApplySliceSource::UpdateConstructed);
|
||||
const auto existingVideoParams = (i != end(_participants))
|
||||
? i->videoParams
|
||||
: nullptr;
|
||||
auto videoParams = localUpdate
|
||||
? existingVideoParams
|
||||
: Calls::ParseVideoParams(
|
||||
data.vvideo(),
|
||||
data.vpresentation(),
|
||||
existingVideoParams);
|
||||
const auto value = Participant{
|
||||
.peer = participantPeer,
|
||||
.videoParams = Calls::ParseVideoParams(
|
||||
(data.vvideo()
|
||||
? data.vvideo()->c_dataJSON().vdata().v
|
||||
: QByteArray()),
|
||||
(data.vpresentation()
|
||||
? data.vpresentation()->c_dataJSON().vdata().v
|
||||
: QByteArray()),
|
||||
(i != end(_participants)
|
||||
? i->videoParams
|
||||
: nullptr)),
|
||||
.videoParams = std::move(videoParams),
|
||||
.date = data.vdate().v,
|
||||
.lastActive = lastActive,
|
||||
.raisedHandRating = raisedHandRating,
|
||||
|
|
2
Telegram/ThirdParty/tgcalls
vendored
2
Telegram/ThirdParty/tgcalls
vendored
|
@ -1 +1 @@
|
|||
Subproject commit d78c12c9ca0b671d5b0064d44b45e35b37ec2af1
|
||||
Subproject commit 99c951b9c7c014666f67e1e972dfc43538a47ff7
|
Loading…
Add table
Reference in a new issue