Remove tracking of video ssrc-s.

This commit is contained in:
John Preston 2021-05-17 15:39:18 +04:00
parent 9a8812d00b
commit f4ae7ecbe7
5 changed files with 1 additions and 86 deletions

View file

@ -223,15 +223,6 @@ struct GroupCall::SinkPointer {
.endpoint = ReadJsonString(object, "endpoint"),
.json = json,
};
const auto ssrcGroups = object.value("ssrc-groups").toArray();
for (const auto &value : ssrcGroups) {
const auto inner = value.toObject();
const auto list = inner.value("sources").toArray();
for (const auto &source : list) {
const auto ssrc = uint32_t(source.toDouble());
result.ssrcs.emplace(ssrc);
}
}
return result.empty() ? VideoParams() : result;
}
@ -1897,32 +1888,11 @@ bool GroupCall::mediaChannelDescriptionsFill(
result = true;
}
};
const auto addVideoChannel = [&](
not_null<PeerData*> participantPeer,
const auto field) {
const auto participant = real->participantByPeer(
participantPeer);
Assert(participant != nullptr);
Assert(participant->videoParams != nullptr);
const auto &params = participant->videoParams.get()->*field;
Assert(!params.empty());
add(Channel{
.type = Channel::Type::Video,
.audioSsrc = participant->ssrc,
.videoInformation = params.json.toStdString(),
}, (field == &ParticipantVideoParams::screen));
};
if (const auto byAudio = real->participantPeerByAudioSsrc(ssrc)) {
add(Channel{
.type = Channel::Type::Audio,
.audioSsrc = ssrc,
});
} else if (const auto byCamera
= real->participantPeerByCameraSsrc(ssrc)) {
addVideoChannel(byCamera, &ParticipantVideoParams::camera);
} else if (const auto byScreen
= real->participantPeerByScreenSsrc(ssrc)) {
addVideoChannel(byScreen, &ParticipantVideoParams::screen);
} else if (!resolved) {
_unresolvedSsrcs.emplace(ssrc);
} else if (resolved(ssrc)) {

View file

@ -125,13 +125,12 @@ inline bool operator>=(
}
struct VideoParams {
base::flat_set<uint32> ssrcs;
std::string endpoint;
QByteArray json;
uint32 hash = 0;
[[nodiscard]] bool empty() const {
return endpoint.empty() || ssrcs.empty() || json.isEmpty();
return endpoint.empty() || json.isEmpty();
}
[[nodiscard]] explicit operator bool() const {
return !empty();

View file

@ -784,8 +784,6 @@ void MembersRow::paintComplexStatusText(
p.setPen(st::groupCallVideoSubTextFg);
} else if (_state == State::MutedByMe) {
p.setPen(st::groupCallMemberMutedIcon);
} else if (narrowMode && !about.isEmpty()) {
p.setPen(st::groupCallMembersFg);
} else {
p.setPen(st::groupCallMemberNotJoinedStatus);
}

View file

@ -215,20 +215,6 @@ PeerData *GroupCall::participantPeerByAudioSsrc(uint32 ssrc) const {
: nullptr;
}
PeerData *GroupCall::participantPeerByCameraSsrc(uint32 ssrc) const {
const auto i = _participantPeerByCameraSsrc.find(ssrc);
return (i != end(_participantPeerByCameraSsrc))
? i->second.get()
: nullptr;
}
PeerData *GroupCall::participantPeerByScreenSsrc(uint32 ssrc) const {
const auto i = _participantPeerByScreenSsrc.find(ssrc);
return (i != end(_participantPeerByScreenSsrc))
? i->second.get()
: nullptr;
}
const GroupCallParticipant *GroupCall::participantByPeer(
not_null<PeerData*> peer) const {
return const_cast<GroupCall*>(this)->findParticipant(peer);
@ -361,8 +347,6 @@ void GroupCall::processFullCallFields(const MTPphone_GroupCall &call) {
_participants.clear();
_speakingByActiveFinishes.clear();
_participantPeerByAudioSsrc.clear();
_participantPeerByCameraSsrc.clear();
_participantPeerByScreenSsrc.clear();
_allParticipantsLoaded = false;
applyParticipantsSlice(
@ -557,7 +541,6 @@ void GroupCall::applyParticipantsSlice(
.was = *i,
};
_participantPeerByAudioSsrc.erase(i->ssrc);
eraseVideoSsrcs(*i);
_speakingByActiveFinishes.remove(participantPeer);
_participants.erase(i);
if (sliceSource != ApplySliceSource::FullReloaded) {
@ -626,7 +609,6 @@ void GroupCall::applyParticipantsSlice(
_participantPeerByAudioSsrc.emplace(
value.ssrc,
participantPeer);
emplaceVideoSsrcs(value);
_participants.push_back(value);
if (const auto user = participantPeer->asUser()) {
_peer->owner().unregisterInvitedToCallUser(_id, user);
@ -638,10 +620,6 @@ void GroupCall::applyParticipantsSlice(
value.ssrc,
participantPeer);
}
if (i->videoParams != value.videoParams) {
eraseVideoSsrcs(*i);
emplaceVideoSsrcs(value);
}
*i = value;
}
if (data.is_just_joined()) {
@ -661,29 +639,6 @@ void GroupCall::applyParticipantsSlice(
}
}
void GroupCall::emplaceVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
const auto participantPeer = participant.peer;
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.emplace(ssrc, participantPeer);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.emplace(ssrc, participantPeer);
}
}
}
void GroupCall::eraseVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.erase(ssrc);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.erase(ssrc);
}
}
}
void GroupCall::applyLastSpoke(
uint32 ssrc,
LastSpokeTimes when,

View file

@ -106,8 +106,6 @@ public:
void requestParticipants();
[[nodiscard]] bool participantsLoaded() const;
[[nodiscard]] PeerData *participantPeerByAudioSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByCameraSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByScreenSsrc(uint32 ssrc) const;
[[nodiscard]] const Participant *participantByPeer(
not_null<PeerData*> peer) const;
[[nodiscard]] const Participant *participantByEndpoint(
@ -182,9 +180,6 @@ private:
void finishParticipantsSliceRequest();
[[nodiscard]] Participant *findParticipant(not_null<PeerData*> peer);
void emplaceVideoSsrcs(const Participant &participant);
void eraseVideoSsrcs(const Participant &participant);
const uint64 _id = 0;
const uint64 _accessHash = 0;
@ -202,8 +197,6 @@ private:
std::vector<Participant> _participants;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByAudioSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByCameraSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByScreenSsrc;
base::flat_map<not_null<PeerData*>, crl::time> _speakingByActiveFinishes;
base::Timer _speakingByActiveFinishTimer;
QString _nextOffset;