New API/tgcalls with two outgoing videos.

This commit is contained in:
John Preston 2021-04-30 19:20:30 +04:00
parent a48649987e
commit ba1dade4b0
7 changed files with 974 additions and 425 deletions

File diff suppressed because it is too large Load diff

View file

@ -75,18 +75,33 @@ struct LevelUpdate {
};
struct StreamsVideoUpdate {
uint32 ssrc = 0;
std::string endpoint;
bool streams = false;
};
struct VideoParams;
struct VideoParams {
base::flat_set<uint32> ssrcs;
std::string endpoint;
QByteArray json;
uint32 hash = 0;
[[nodiscard]] std::shared_ptr<VideoParams> ParseVideoParams(
const QByteArray &video,
const QByteArray &screencast,
const std::shared_ptr<VideoParams> &existing);
[[nodiscard]] const base::flat_set<uint32> &VideoSourcesFromParams(
const std::shared_ptr<VideoParams> &params);
[[nodiscard]] bool empty() const {
return endpoint.empty() || ssrcs.empty() || json.isEmpty();
}
[[nodiscard]] explicit operator bool() const {
return !empty();
}
};
struct ParticipantVideoParams {
VideoParams camera;
VideoParams screen;
};
[[nodiscard]] std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
const QByteArray &camera,
const QByteArray &screen,
const std::shared_ptr<ParticipantVideoParams> &existing);
class GroupCall final : public base::has_weak_ptr {
public:
@ -154,8 +169,11 @@ public:
void startScheduledNow();
void toggleScheduleStartSubscribed(bool subscribed);
void addVideoOutput(uint32 ssrc, not_null<Webrtc::VideoTrack*> track);
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingVideoTrack() const;
void addVideoOutput(
const std::string &endpoint,
not_null<Webrtc::VideoTrack*> track);
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingCameraTrack() const;
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingScreenTrack() const;
void setMuted(MuteState mute);
void setMutedAndUpdate(MuteState mute);
@ -213,20 +231,21 @@ public:
-> rpl::producer<StreamsVideoUpdate> {
return _streamsVideoUpdated.events();
}
[[nodiscard]] bool streamsVideo(uint32 ssrc) const {
return ssrc
&& _videoStreamSsrcs.contains(ssrc)
&& !_videoMuted.contains(ssrc);
[[nodiscard]] bool streamsVideo(const std::string &endpoint) const {
return !endpoint.empty()
&& _incomingVideoEndpoints.contains(endpoint)
&& _activeVideoEndpoints.contains(endpoint);
}
[[nodiscard]] uint32 videoStreamPinned() const {
return _videoStreamPinned;
[[nodiscard]] const std::string &videoEndpointPinned() const {
return _videoEndpointPinned;
}
void pinVideoStream(uint32 ssrc);
[[nodiscard]] uint32 videoStreamLarge() const {
return _videoStreamLarge.current();
void pinVideoEndpoint(const std::string &endpoint);
[[nodiscard]] std::string videoEndpointLarge() const {
return _videoEndpointLarge.current();
}
[[nodiscard]] rpl::producer<uint32> videoStreamLargeValue() const {
return _videoStreamLarge.value();
[[nodiscard]] auto videoEndpointLargeValue() const
-> rpl::producer<std::string> {
return _videoEndpointLarge.value();
}
[[nodiscard]] Webrtc::VideoTrack *videoLargeTrack() const {
return _videoLargeTrack.current();
@ -251,7 +270,7 @@ public:
[[nodiscard]] bool isScreenSharing() const;
[[nodiscard]] QString screenSharingDeviceId() const;
void toggleVideo(bool active);
void switchToScreenSharing(const QString &uniqueId);
void toggleScreenSharing(std::optional<QString> uniqueId);
void toggleMute(const Group::MuteRequest &data);
void changeVolume(const Group::VolumeRequest &data);
@ -269,10 +288,15 @@ public:
private:
class LoadPartTask;
class MediaChannelDescriptionsTask;
public:
void broadcastPartStart(std::shared_ptr<LoadPartTask> task);
void broadcastPartCancel(not_null<LoadPartTask*> task);
void mediaChannelDescriptionsStart(
std::shared_ptr<MediaChannelDescriptionsTask> task);
void mediaChannelDescriptionsCancel(
not_null<MediaChannelDescriptionsTask*> task);
private:
using GlobalShortcutValue = base::GlobalShortcutValue;
@ -299,12 +323,19 @@ private:
VideoMuted,
};
[[nodiscard]] bool mediaChannelDescriptionsFill(
not_null<MediaChannelDescriptionsTask*> task,
Fn<bool(uint32)> resolved = nullptr);
void checkMediaChannelDescriptions(Fn<bool(uint32)> resolved = nullptr);
void handlePossibleCreateOrJoinResponse(const MTPDgroupCall &data);
void handlePossibleDiscarded(const MTPDgroupCallDiscarded &data);
void handleUpdate(const MTPDupdateGroupCall &data);
void handleUpdate(const MTPDupdateGroupCallParticipants &data);
void ensureControllerCreated();
void destroyController();
void ensureScreencastCreated();
void destroyScreencast();
void setState(State state);
void finish(FinishType type);
@ -319,10 +350,15 @@ private:
void saveDefaultJoinAs(not_null<PeerData*> as);
void subscribeToReal(not_null<Data::GroupCall*> real);
void setScheduledDate(TimeId date);
void joinLeavePresentation();
void rejoinPresentation();
void leavePresentation();
void audioLevelsUpdated(const tgcalls::GroupLevelsUpdate &data);
void setInstanceConnected(tgcalls::GroupNetworkState networkState);
void setInstanceMode(InstanceMode mode);
void setScreenInstanceConnected(tgcalls::GroupNetworkState networkState);
void setScreenInstanceMode(InstanceMode mode);
void checkLastSpoke();
void pushToTalkCancel();
@ -335,14 +371,8 @@ private:
void stopConnectingSound();
void playConnectingSoundOnce();
void requestParticipantsInformation(const std::vector<uint32_t> &ssrcs);
void addParticipantsToInstance();
void prepareParticipantForAdding(
const Data::GroupCallParticipant &participant);
void addPreparedParticipants();
void addPreparedParticipantsDelayed();
void setVideoStreams(const std::vector<std::uint32_t> &ssrcs);
[[nodiscard]] uint32 chooseLargeVideoSsrc() const;
void setIncomingVideoStreams(const std::vector<std::string> &endpoints);
[[nodiscard]] std::string chooseLargeVideoEndpoint() const;
void editParticipant(
not_null<PeerData*> participantPeer,
@ -368,17 +398,15 @@ private:
MTP::Sender _api;
rpl::event_stream<not_null<Data::GroupCall*>> _realChanges;
rpl::variable<State> _state = State::Creating;
rpl::variable<InstanceState> _instanceState
= InstanceState::Disconnected;
bool _instanceTransitioning = false;
InstanceMode _instanceMode = InstanceMode::None;
base::flat_set<uint32> _unresolvedSsrcs;
std::vector<tgcalls::GroupParticipantDescription> _preparedParticipants;
bool _addPreparedParticipantsScheduled = false;
bool _recordingStoppedByMe = false;
MTP::DcId _broadcastDcId = 0;
base::flat_map<not_null<LoadPartTask*>, LoadingPart> _broadcastParts;
base::flat_set<
std::shared_ptr<
MediaChannelDescriptionsTask>,
base::pointer_comparator<MediaChannelDescriptionsTask>> _mediaChannelDescriptionses;
not_null<PeerData*> _joinAs;
std::vector<not_null<PeerData*>> _possibleJoinAs;
@ -395,21 +423,35 @@ private:
uint64 _id = 0;
uint64 _accessHash = 0;
uint32 _mySsrc = 0;
uint32 _screencastSsrc = 0;
uint32 _screenSsrc = 0;
TimeId _scheduleDate = 0;
base::flat_set<uint32> _mySsrcs;
mtpRequestId _createRequestId = 0;
mtpRequestId _updateMuteRequestId = 0;
rpl::variable<InstanceState> _instanceState
= InstanceState::Disconnected;
bool _instanceTransitioning = false;
InstanceMode _instanceMode = InstanceMode::None;
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
std::shared_ptr<tgcalls::VideoCaptureInterface> _cameraCapture;
const std::unique_ptr<Webrtc::VideoTrack> _cameraOutgoing;
rpl::variable<InstanceState> _screenInstanceState
= InstanceState::Disconnected;
InstanceMode _screenInstanceMode = InstanceMode::None;
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _screenInstance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _screenCapture;
const std::unique_ptr<Webrtc::VideoTrack> _screenOutgoing;
QString _screenDeviceId;
std::string _screenEndpoint;
rpl::event_stream<LevelUpdate> _levelUpdates;
rpl::event_stream<StreamsVideoUpdate> _streamsVideoUpdated;
base::flat_set<uint32> _videoStreamSsrcs;
base::flat_set<uint32> _videoMuted;
rpl::variable<uint32> _videoStreamLarge = 0;
uint32 _videoStreamPinned = 0;
base::flat_set<std::string> _incomingVideoEndpoints;
base::flat_set<std::string> _activeVideoEndpoints;
rpl::variable<std::string> _videoEndpointLarge;
std::string _videoEndpointPinned;
std::unique_ptr<LargeTrack> _videoLargeTrackWrap;
rpl::variable<Webrtc::VideoTrack*> _videoLargeTrack;
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
@ -430,8 +472,7 @@ private:
std::unique_ptr<Webrtc::MediaDevices> _mediaDevices;
QString _audioInputId;
QString _audioOutputId;
QString _videoInputId;
QString _videoDeviceId;
QString _cameraInputId;
rpl::lifetime _lifetime;

View file

@ -145,8 +145,10 @@ public:
return _raisedHandRating;
}
[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack();
[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack(
const std::string &endpoint);
void clearVideoTrack();
[[nodiscard]] const std::string &videoTrackEndpoint() const;
void setVideoTrack(not_null<Webrtc::VideoTrack*> track);
void addActionRipple(QPoint point, Fn<void()> updateCallback) override;
@ -276,6 +278,7 @@ private:
std::unique_ptr<StatusIcon> _statusIcon;
std::unique_ptr<Webrtc::VideoTrack> _videoTrack;
Webrtc::VideoTrack *_videoTrackShown = nullptr;
std::string _videoTrackEndpoint;
rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr.
Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon.
Ui::Animations::Simple _mutedAnimation; // For gray/red icon.
@ -380,14 +383,16 @@ private:
not_null<Row*> row,
uint64 raiseHandRating) const;
Row *findRow(not_null<PeerData*> participantPeer) const;
Row *findRow(uint32 audioSsrc) const;
const Data::GroupCallParticipant *findParticipant(
const std::string &endpoint) const;
Row *findRow(const std::string &endpoint) const;
void appendInvitedUsers();
void scheduleRaisedHandStatusRemove();
const not_null<GroupCall*> _call;
not_null<PeerData*> _peer;
uint32 _largeSsrc = 0;
std::string _largeEndpoint;
bool _prepared = false;
rpl::event_stream<MuteRequest> _toggleMuteRequests;
@ -1015,14 +1020,20 @@ void Row::refreshStatus() {
_speaking);
}
not_null<Webrtc::VideoTrack*> Row::createVideoTrack() {
not_null<Webrtc::VideoTrack*> Row::createVideoTrack(
const std::string &endpoint) {
_videoTrackShown = nullptr;
_videoTrackEndpoint = endpoint;
_videoTrack = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active);
setVideoTrack(_videoTrack.get());
return _videoTrack.get();
}
const std::string &Row::videoTrackEndpoint() const {
return _videoTrackEndpoint;
}
void Row::clearVideoTrack() {
_videoTrackLifetime.destroy();
_videoTrackShown = nullptr;
@ -1149,29 +1160,50 @@ void MembersController::setupListChangeViewers() {
}
}, _lifetime);
_call->videoStreamLargeValue(
) | rpl::filter([=](uint32 largeSsrc) {
return (_largeSsrc != largeSsrc);
}) | rpl::start_with_next([=](uint32 largeSsrc) {
if (const auto row = findRow(_largeSsrc)) {
_call->addVideoOutput(_largeSsrc, row->createVideoTrack());
_call->videoEndpointLargeValue(
) | rpl::filter([=](const std::string &largeEndpoint) {
return (_largeEndpoint != largeEndpoint);
}) | rpl::start_with_next([=](const std::string &largeEndpoint) {
if (const auto participant = findParticipant(_largeEndpoint)) {
if (participant->cameraEndpoint() == _largeEndpoint) {
if (const auto row = findRow(participant->peer)) {
_call->addVideoOutput(
_largeEndpoint,
row->createVideoTrack(_largeEndpoint));
}
_largeSsrc = largeSsrc;
if (const auto row = findRow(_largeSsrc)) {
}
}
_largeEndpoint = largeEndpoint;
if (const auto participant = findParticipant(_largeEndpoint)) {
if (participant->cameraEndpoint() == _largeEndpoint) {
if (const auto row = findRow(participant->peer)) {
row->clearVideoTrack();
}
}
}
}, _lifetime);
_call->streamsVideoUpdates(
) | rpl::start_with_next([=](StreamsVideoUpdate update) {
Assert(update.ssrc != _largeSsrc);
if (const auto row = findRow(update.ssrc)) {
Assert(update.endpoint != _largeEndpoint);
if (const auto participant = findParticipant(update.endpoint)) {
if (update.streams) {
_call->addVideoOutput(update.ssrc, row->createVideoTrack());
if (participant->cameraEndpoint() == update.endpoint
|| !_call->streamsVideo(participant->cameraEndpoint())) {
if (const auto row = findRow(participant->peer)) {
_call->addVideoOutput(
update.endpoint,
row->createVideoTrack(update.endpoint));
}
}
} else {
if (const auto row = findRow(participant->peer)) {
if (row->videoTrackEndpoint() == update.endpoint) {
row->clearVideoTrack();
}
}
}
}
}, _lifetime);
_call->rejoinEvents(
@ -1493,15 +1525,18 @@ Row *MembersController::findRow(not_null<PeerData*> participantPeer) const {
delegate()->peerListFindRow(participantPeer->id.value));
}
Row *MembersController::findRow(uint32 audioSsrc) const {
if (!audioSsrc) {
const Data::GroupCallParticipant *MembersController::findParticipant(
const std::string &endpoint) const {
if (endpoint.empty()) {
return nullptr;
}
const auto real = _call->lookupReal();
const auto participantPeer = real
? real->participantPeerByAudioSsrc(audioSsrc)
: nullptr;
return participantPeer ? findRow(participantPeer) : nullptr;
return real ? real->participantByEndpoint(endpoint) : nullptr;
}
Row *MembersController::findRow(const std::string &endpoint) const {
const auto participant = findParticipant(endpoint);
return participant ? findRow(participant->peer) : nullptr;
}
Main::Session &MembersController::session() const {
@ -1845,15 +1880,35 @@ base::unique_qptr<Ui::PopupMenu> MembersController::createRowContextMenu(
_kickParticipantRequests.fire_copy(participantPeer);
});
const auto ssrc = real->ssrc();
if (ssrc != 0 && _call->streamsVideo(ssrc)) {
const auto pinned = (_call->videoStreamPinned() == ssrc);
const auto phrase = pinned
? tr::lng_group_call_context_unpin_video(tr::now)
: tr::lng_group_call_context_pin_video(tr::now);
result->addAction(phrase, [=] {
_call->pinVideoStream(pinned ? 0 : ssrc);
});
if (const auto real = _call->lookupReal()) {
const auto pinnedEndpoint = _call->videoEndpointPinned();
const auto participant = real->participantByEndpoint(pinnedEndpoint);
if (participant && participant->peer == participantPeer) {
result->addAction(
tr::lng_group_call_context_unpin_video(tr::now),
[=] { _call->pinVideoEndpoint(std::string()); });
} else {
const auto &participants = real->participants();
const auto i = ranges::find(
participants,
participantPeer,
&Data::GroupCallParticipant::peer);
if (i != end(participants)) {
const auto camera = i->cameraEndpoint();
const auto screen = i->screenEndpoint();
const auto streamsScreen = _call->streamsVideo(screen);
if (streamsScreen || _call->streamsVideo(camera)) {
const auto callback = [=] {
_call->pinVideoEndpoint(streamsScreen
? screen
: camera);
};
result->addAction(
tr::lng_group_call_context_pin_video(tr::now),
callback);
}
}
}
}
if (real->ssrc() != 0
@ -2217,8 +2272,9 @@ void Members::setupPinnedVideo() {
_mode.changes() | rpl::filter(
_1 == PanelMode::Default
) | rpl::to_empty,
_call->videoStreamLargeValue() | rpl::filter([=](uint32 ssrc) {
return ssrc == _call->videoStreamPinned();
_call->videoEndpointLargeValue(
) | rpl::filter([=](const std::string &endpoint) {
return endpoint == _call->videoEndpointPinned();
}) | rpl::to_empty
) | rpl::start_with_next([=] {
_scroll->scrollToY(0);

View file

@ -493,11 +493,11 @@ rpl::lifetime &Panel::chooseSourceInstanceLifetime() {
}
void Panel::chooseSourceAccepted(const QString &deviceId) {
_call->switchToScreenSharing(deviceId);
_call->toggleScreenSharing(deviceId);
}
void Panel::chooseSourceStop() {
_call->toggleVideo(false);
_call->toggleScreenSharing(std::nullopt);
}
void Panel::initWindow() {
@ -722,10 +722,8 @@ void Panel::refreshLeftButton() {
&st::groupCallVideoActiveSmall);
_video->show();
_video->setClickedCallback([=] {
const auto sharing = _call->isScreenSharing();
const auto active = (_call->outgoingVideoTrack()->state()
== Webrtc::VideoState::Active);
_call->toggleVideo(sharing || !active);
_call->toggleVideo(_call->outgoingCameraTrack()->state()
!= Webrtc::VideoState::Active);
});
_video->setText(tr::lng_group_call_video());
_video->setColorOverrides(_mute->colorOverrides());

View file

@ -33,8 +33,22 @@ constexpr auto kWaitForUpdatesTimeout = 3 * crl::time(1000);
});
}
[[nodiscard]] const std::string &EmptyEndpoint() {
static const auto result = std::string();
return result;
}
} // namespace
const std::string &GroupCallParticipant::cameraEndpoint() const {
return videoParams ? videoParams->camera.endpoint : EmptyEndpoint();
}
const std::string &GroupCallParticipant::screenEndpoint() const {
return videoParams ? videoParams->screen.endpoint : EmptyEndpoint();
}
GroupCall::GroupCall(
not_null<PeerData*> peer,
uint64 id,
@ -193,13 +207,36 @@ PeerData *GroupCall::participantPeerByAudioSsrc(uint32 ssrc) const {
: nullptr;
}
PeerData *GroupCall::participantPeerByVideoSsrc(uint32 ssrc) const {
const auto i = _participantPeerByVideoSsrc.find(ssrc);
return (i != end(_participantPeerByVideoSsrc))
PeerData *GroupCall::participantPeerByCameraSsrc(uint32 ssrc) const {
const auto i = _participantPeerByCameraSsrc.find(ssrc);
return (i != end(_participantPeerByCameraSsrc))
? i->second.get()
: nullptr;
}
PeerData *GroupCall::participantPeerByScreenSsrc(uint32 ssrc) const {
const auto i = _participantPeerByScreenSsrc.find(ssrc);
return (i != end(_participantPeerByScreenSsrc))
? i->second.get()
: nullptr;
}
const GroupCallParticipant *GroupCall::participantByEndpoint(
const std::string &endpoint) const {
if (endpoint.empty()) {
return nullptr;
}
for (const auto &participant : _participants) {
if (const auto params = participant.videoParams.get()) {
if (params->camera.endpoint == endpoint
|| params->screen.endpoint == endpoint) {
return &participant;
}
}
}
return nullptr;
}
rpl::producer<> GroupCall::participantsSliceAdded() {
return _participantsSliceAdded.events();
}
@ -305,7 +342,8 @@ void GroupCall::processFullCallFields(const MTPphone_GroupCall &call) {
_participants.clear();
_speakingByActiveFinishes.clear();
_participantPeerByAudioSsrc.clear();
_participantPeerByVideoSsrc.clear();
_participantPeerByCameraSsrc.clear();
_participantPeerByScreenSsrc.clear();
_allParticipantsLoaded = false;
applyParticipantsSlice(
@ -499,10 +537,7 @@ void GroupCall::applyParticipantsSlice(
.was = *i,
};
_participantPeerByAudioSsrc.erase(i->ssrc);
const auto &all = VideoSourcesFromParams(i->videoParams);
for (const auto ssrc : all) {
_participantPeerByVideoSsrc.erase(ssrc);
}
eraseVideoSsrcs(*i);
_speakingByActiveFinishes.remove(participantPeer);
_participants.erase(i);
if (sliceSource != ApplySliceSource::SliceLoaded) {
@ -543,8 +578,8 @@ void GroupCall::applyParticipantsSlice(
&& (!was || was->onlyMinLoaded);
const auto raisedHandRating
= data.vraise_hand_rating().value_or_empty();
const auto hasVideoParamsInformation = (sliceSource
!= ApplySliceSource::UpdateConstructed);
const auto hasVideoParamsInformation = true/*(sliceSource
!= ApplySliceSource::UpdateConstructed)*/;
const auto value = Participant{
.peer = participantPeer,
.videoParams = (hasVideoParamsInformation
@ -571,19 +606,13 @@ void GroupCall::applyParticipantsSlice(
.muted = data.is_muted(),
.mutedByMe = mutedByMe,
.canSelfUnmute = canSelfUnmute,
.videoMuted = (data.vvideo() == nullptr),
.onlyMinLoaded = onlyMinLoaded,
};
if (i == end(_participants)) {
_participantPeerByAudioSsrc.emplace(
value.ssrc,
participantPeer);
const auto &all = VideoSourcesFromParams(value.videoParams);
for (const auto ssrc : all) {
_participantPeerByVideoSsrc.emplace(
ssrc,
participantPeer);
}
emplaceVideoSsrcs(value);
_participants.push_back(value);
if (const auto user = participantPeer->asUser()) {
_peer->owner().unregisterInvitedToCallUser(_id, user);
@ -596,17 +625,8 @@ void GroupCall::applyParticipantsSlice(
participantPeer);
}
if (i->videoParams != value.videoParams) {
const auto &old = VideoSourcesFromParams(i->videoParams);
for (const auto ssrc : old) {
_participantPeerByVideoSsrc.erase(ssrc);
}
const auto &now = VideoSourcesFromParams(
value.videoParams);
for (const auto ssrc : now) {
_participantPeerByVideoSsrc.emplace(
ssrc,
participantPeer);
}
eraseVideoSsrcs(*i);
emplaceVideoSsrcs(value);
}
*i = value;
}
@ -627,6 +647,29 @@ void GroupCall::applyParticipantsSlice(
}
}
void GroupCall::emplaceVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
const auto participantPeer = participant.peer;
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.emplace(ssrc, participantPeer);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.emplace(ssrc, participantPeer);
}
}
}
void GroupCall::eraseVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.erase(ssrc);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.erase(ssrc);
}
}
}
void GroupCall::applyLastSpoke(
uint32 ssrc,
LastSpokeTimes when,
@ -840,6 +883,9 @@ void GroupCall::requestUnknownParticipants() {
}
_unknownSpokenPeerIds.remove(id);
}
if (!ssrcs.empty()) {
_participantsResolved.fire(&ssrcs);
}
requestUnknownParticipants();
}).fail([=](const MTP::Error &error) {
_unknownParticipantPeersRequestId = 0;

View file

@ -14,7 +14,7 @@ class PeerData;
class ApiWrap;
namespace Calls {
struct VideoParams;
struct ParticipantVideoParams;
} // namespace Calls
namespace Data {
@ -26,7 +26,7 @@ struct LastSpokeTimes {
struct GroupCallParticipant {
not_null<PeerData*> peer;
std::shared_ptr<Calls::VideoParams> videoParams;
std::shared_ptr<Calls::ParticipantVideoParams> videoParams;
TimeId date = 0;
TimeId lastActive = 0;
uint64 raisedHandRating = 0;
@ -38,8 +38,10 @@ struct GroupCallParticipant {
bool muted = false;
bool mutedByMe = false;
bool canSelfUnmute = false;
bool videoMuted = true;
bool onlyMinLoaded = false;
[[nodiscard]] const std::string &cameraEndpoint() const;
[[nodiscard]] const std::string &screenEndpoint() const;
};
class GroupCall final {
@ -104,7 +106,10 @@ public:
void requestParticipants();
[[nodiscard]] bool participantsLoaded() const;
[[nodiscard]] PeerData *participantPeerByAudioSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByVideoSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByCameraSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByScreenSsrc(uint32 ssrc) const;
[[nodiscard]] const Participant *participantByEndpoint(
const std::string &endpoint) const;
[[nodiscard]] rpl::producer<> participantsSliceAdded();
[[nodiscard]] rpl::producer<ParticipantUpdate> participantUpdated() const;
@ -120,6 +125,12 @@ public:
PeerData *participantPeerLoaded);
void resolveParticipants(const base::flat_set<uint32> &ssrcs);
[[nodiscard]] rpl::producer<
not_null<const base::flat_map<
uint32,
LastSpokeTimes>*>> participantsResolved() const {
return _participantsResolved.events();
}
[[nodiscard]] int fullCount() const;
[[nodiscard]] rpl::producer<int> fullCountValue() const;
@ -167,6 +178,9 @@ private:
void processSavedFullCall();
void finishParticipantsSliceRequest();
void emplaceVideoSsrcs(const Participant &participant);
void eraseVideoSsrcs(const Participant &participant);
const uint64 _id = 0;
const uint64 _accessHash = 0;
@ -184,7 +198,8 @@ private:
std::vector<Participant> _participants;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByAudioSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByVideoSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByCameraSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByScreenSsrc;
base::flat_map<not_null<PeerData*>, crl::time> _speakingByActiveFinishes;
base::Timer _speakingByActiveFinishTimer;
QString _nextOffset;
@ -196,6 +211,10 @@ private:
base::flat_map<uint32, LastSpokeTimes> _unknownSpokenSsrcs;
base::flat_map<PeerId, LastSpokeTimes> _unknownSpokenPeerIds;
rpl::event_stream<
not_null<const base::flat_map<
uint32,
LastSpokeTimes>*>> _participantsResolved;
mtpRequestId _unknownParticipantPeersRequestId = 0;
rpl::event_stream<ParticipantUpdate> _participantUpdates;

@ -1 +1 @@
Subproject commit 9928c00d231c1194896d582a71e3bb6d70ee2765
Subproject commit 697ef2ed67cfcad81b0e61caf0945a057a847327