New API/tgcalls with two outgoing videos.

This commit is contained in:
John Preston 2021-04-30 19:20:30 +04:00
parent a48649987e
commit ba1dade4b0
7 changed files with 974 additions and 425 deletions

File diff suppressed because it is too large Load diff

View file

@ -75,18 +75,33 @@ struct LevelUpdate {
}; };
struct StreamsVideoUpdate { struct StreamsVideoUpdate {
uint32 ssrc = 0; std::string endpoint;
bool streams = false; bool streams = false;
}; };
struct VideoParams; struct VideoParams {
base::flat_set<uint32> ssrcs;
std::string endpoint;
QByteArray json;
uint32 hash = 0;
[[nodiscard]] std::shared_ptr<VideoParams> ParseVideoParams( [[nodiscard]] bool empty() const {
const QByteArray &video, return endpoint.empty() || ssrcs.empty() || json.isEmpty();
const QByteArray &screencast, }
const std::shared_ptr<VideoParams> &existing); [[nodiscard]] explicit operator bool() const {
[[nodiscard]] const base::flat_set<uint32> &VideoSourcesFromParams( return !empty();
const std::shared_ptr<VideoParams> &params); }
};
struct ParticipantVideoParams {
VideoParams camera;
VideoParams screen;
};
[[nodiscard]] std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
const QByteArray &camera,
const QByteArray &screen,
const std::shared_ptr<ParticipantVideoParams> &existing);
class GroupCall final : public base::has_weak_ptr { class GroupCall final : public base::has_weak_ptr {
public: public:
@ -154,8 +169,11 @@ public:
void startScheduledNow(); void startScheduledNow();
void toggleScheduleStartSubscribed(bool subscribed); void toggleScheduleStartSubscribed(bool subscribed);
void addVideoOutput(uint32 ssrc, not_null<Webrtc::VideoTrack*> track); void addVideoOutput(
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingVideoTrack() const; const std::string &endpoint,
not_null<Webrtc::VideoTrack*> track);
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingCameraTrack() const;
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingScreenTrack() const;
void setMuted(MuteState mute); void setMuted(MuteState mute);
void setMutedAndUpdate(MuteState mute); void setMutedAndUpdate(MuteState mute);
@ -213,20 +231,21 @@ public:
-> rpl::producer<StreamsVideoUpdate> { -> rpl::producer<StreamsVideoUpdate> {
return _streamsVideoUpdated.events(); return _streamsVideoUpdated.events();
} }
[[nodiscard]] bool streamsVideo(uint32 ssrc) const { [[nodiscard]] bool streamsVideo(const std::string &endpoint) const {
return ssrc return !endpoint.empty()
&& _videoStreamSsrcs.contains(ssrc) && _incomingVideoEndpoints.contains(endpoint)
&& !_videoMuted.contains(ssrc); && _activeVideoEndpoints.contains(endpoint);
} }
[[nodiscard]] uint32 videoStreamPinned() const { [[nodiscard]] const std::string &videoEndpointPinned() const {
return _videoStreamPinned; return _videoEndpointPinned;
} }
void pinVideoStream(uint32 ssrc); void pinVideoEndpoint(const std::string &endpoint);
[[nodiscard]] uint32 videoStreamLarge() const { [[nodiscard]] std::string videoEndpointLarge() const {
return _videoStreamLarge.current(); return _videoEndpointLarge.current();
} }
[[nodiscard]] rpl::producer<uint32> videoStreamLargeValue() const { [[nodiscard]] auto videoEndpointLargeValue() const
return _videoStreamLarge.value(); -> rpl::producer<std::string> {
return _videoEndpointLarge.value();
} }
[[nodiscard]] Webrtc::VideoTrack *videoLargeTrack() const { [[nodiscard]] Webrtc::VideoTrack *videoLargeTrack() const {
return _videoLargeTrack.current(); return _videoLargeTrack.current();
@ -251,7 +270,7 @@ public:
[[nodiscard]] bool isScreenSharing() const; [[nodiscard]] bool isScreenSharing() const;
[[nodiscard]] QString screenSharingDeviceId() const; [[nodiscard]] QString screenSharingDeviceId() const;
void toggleVideo(bool active); void toggleVideo(bool active);
void switchToScreenSharing(const QString &uniqueId); void toggleScreenSharing(std::optional<QString> uniqueId);
void toggleMute(const Group::MuteRequest &data); void toggleMute(const Group::MuteRequest &data);
void changeVolume(const Group::VolumeRequest &data); void changeVolume(const Group::VolumeRequest &data);
@ -269,10 +288,15 @@ public:
private: private:
class LoadPartTask; class LoadPartTask;
class MediaChannelDescriptionsTask;
public: public:
void broadcastPartStart(std::shared_ptr<LoadPartTask> task); void broadcastPartStart(std::shared_ptr<LoadPartTask> task);
void broadcastPartCancel(not_null<LoadPartTask*> task); void broadcastPartCancel(not_null<LoadPartTask*> task);
void mediaChannelDescriptionsStart(
std::shared_ptr<MediaChannelDescriptionsTask> task);
void mediaChannelDescriptionsCancel(
not_null<MediaChannelDescriptionsTask*> task);
private: private:
using GlobalShortcutValue = base::GlobalShortcutValue; using GlobalShortcutValue = base::GlobalShortcutValue;
@ -299,12 +323,19 @@ private:
VideoMuted, VideoMuted,
}; };
[[nodiscard]] bool mediaChannelDescriptionsFill(
not_null<MediaChannelDescriptionsTask*> task,
Fn<bool(uint32)> resolved = nullptr);
void checkMediaChannelDescriptions(Fn<bool(uint32)> resolved = nullptr);
void handlePossibleCreateOrJoinResponse(const MTPDgroupCall &data); void handlePossibleCreateOrJoinResponse(const MTPDgroupCall &data);
void handlePossibleDiscarded(const MTPDgroupCallDiscarded &data); void handlePossibleDiscarded(const MTPDgroupCallDiscarded &data);
void handleUpdate(const MTPDupdateGroupCall &data); void handleUpdate(const MTPDupdateGroupCall &data);
void handleUpdate(const MTPDupdateGroupCallParticipants &data); void handleUpdate(const MTPDupdateGroupCallParticipants &data);
void ensureControllerCreated(); void ensureControllerCreated();
void destroyController(); void destroyController();
void ensureScreencastCreated();
void destroyScreencast();
void setState(State state); void setState(State state);
void finish(FinishType type); void finish(FinishType type);
@ -319,10 +350,15 @@ private:
void saveDefaultJoinAs(not_null<PeerData*> as); void saveDefaultJoinAs(not_null<PeerData*> as);
void subscribeToReal(not_null<Data::GroupCall*> real); void subscribeToReal(not_null<Data::GroupCall*> real);
void setScheduledDate(TimeId date); void setScheduledDate(TimeId date);
void joinLeavePresentation();
void rejoinPresentation();
void leavePresentation();
void audioLevelsUpdated(const tgcalls::GroupLevelsUpdate &data); void audioLevelsUpdated(const tgcalls::GroupLevelsUpdate &data);
void setInstanceConnected(tgcalls::GroupNetworkState networkState); void setInstanceConnected(tgcalls::GroupNetworkState networkState);
void setInstanceMode(InstanceMode mode); void setInstanceMode(InstanceMode mode);
void setScreenInstanceConnected(tgcalls::GroupNetworkState networkState);
void setScreenInstanceMode(InstanceMode mode);
void checkLastSpoke(); void checkLastSpoke();
void pushToTalkCancel(); void pushToTalkCancel();
@ -335,14 +371,8 @@ private:
void stopConnectingSound(); void stopConnectingSound();
void playConnectingSoundOnce(); void playConnectingSoundOnce();
void requestParticipantsInformation(const std::vector<uint32_t> &ssrcs); void setIncomingVideoStreams(const std::vector<std::string> &endpoints);
void addParticipantsToInstance(); [[nodiscard]] std::string chooseLargeVideoEndpoint() const;
void prepareParticipantForAdding(
const Data::GroupCallParticipant &participant);
void addPreparedParticipants();
void addPreparedParticipantsDelayed();
void setVideoStreams(const std::vector<std::uint32_t> &ssrcs);
[[nodiscard]] uint32 chooseLargeVideoSsrc() const;
void editParticipant( void editParticipant(
not_null<PeerData*> participantPeer, not_null<PeerData*> participantPeer,
@ -368,17 +398,15 @@ private:
MTP::Sender _api; MTP::Sender _api;
rpl::event_stream<not_null<Data::GroupCall*>> _realChanges; rpl::event_stream<not_null<Data::GroupCall*>> _realChanges;
rpl::variable<State> _state = State::Creating; rpl::variable<State> _state = State::Creating;
rpl::variable<InstanceState> _instanceState
= InstanceState::Disconnected;
bool _instanceTransitioning = false;
InstanceMode _instanceMode = InstanceMode::None;
base::flat_set<uint32> _unresolvedSsrcs; base::flat_set<uint32> _unresolvedSsrcs;
std::vector<tgcalls::GroupParticipantDescription> _preparedParticipants;
bool _addPreparedParticipantsScheduled = false;
bool _recordingStoppedByMe = false; bool _recordingStoppedByMe = false;
MTP::DcId _broadcastDcId = 0; MTP::DcId _broadcastDcId = 0;
base::flat_map<not_null<LoadPartTask*>, LoadingPart> _broadcastParts; base::flat_map<not_null<LoadPartTask*>, LoadingPart> _broadcastParts;
base::flat_set<
std::shared_ptr<
MediaChannelDescriptionsTask>,
base::pointer_comparator<MediaChannelDescriptionsTask>> _mediaChannelDescriptionses;
not_null<PeerData*> _joinAs; not_null<PeerData*> _joinAs;
std::vector<not_null<PeerData*>> _possibleJoinAs; std::vector<not_null<PeerData*>> _possibleJoinAs;
@ -395,21 +423,35 @@ private:
uint64 _id = 0; uint64 _id = 0;
uint64 _accessHash = 0; uint64 _accessHash = 0;
uint32 _mySsrc = 0; uint32 _mySsrc = 0;
uint32 _screencastSsrc = 0; uint32 _screenSsrc = 0;
TimeId _scheduleDate = 0; TimeId _scheduleDate = 0;
base::flat_set<uint32> _mySsrcs; base::flat_set<uint32> _mySsrcs;
mtpRequestId _createRequestId = 0; mtpRequestId _createRequestId = 0;
mtpRequestId _updateMuteRequestId = 0; mtpRequestId _updateMuteRequestId = 0;
rpl::variable<InstanceState> _instanceState
= InstanceState::Disconnected;
bool _instanceTransitioning = false;
InstanceMode _instanceMode = InstanceMode::None;
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _instance; std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture; std::shared_ptr<tgcalls::VideoCaptureInterface> _cameraCapture;
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing; const std::unique_ptr<Webrtc::VideoTrack> _cameraOutgoing;
rpl::variable<InstanceState> _screenInstanceState
= InstanceState::Disconnected;
InstanceMode _screenInstanceMode = InstanceMode::None;
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _screenInstance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _screenCapture;
const std::unique_ptr<Webrtc::VideoTrack> _screenOutgoing;
QString _screenDeviceId;
std::string _screenEndpoint;
rpl::event_stream<LevelUpdate> _levelUpdates; rpl::event_stream<LevelUpdate> _levelUpdates;
rpl::event_stream<StreamsVideoUpdate> _streamsVideoUpdated; rpl::event_stream<StreamsVideoUpdate> _streamsVideoUpdated;
base::flat_set<uint32> _videoStreamSsrcs; base::flat_set<std::string> _incomingVideoEndpoints;
base::flat_set<uint32> _videoMuted; base::flat_set<std::string> _activeVideoEndpoints;
rpl::variable<uint32> _videoStreamLarge = 0; rpl::variable<std::string> _videoEndpointLarge;
uint32 _videoStreamPinned = 0; std::string _videoEndpointPinned;
std::unique_ptr<LargeTrack> _videoLargeTrackWrap; std::unique_ptr<LargeTrack> _videoLargeTrackWrap;
rpl::variable<Webrtc::VideoTrack*> _videoLargeTrack; rpl::variable<Webrtc::VideoTrack*> _videoLargeTrack;
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke; base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
@ -430,8 +472,7 @@ private:
std::unique_ptr<Webrtc::MediaDevices> _mediaDevices; std::unique_ptr<Webrtc::MediaDevices> _mediaDevices;
QString _audioInputId; QString _audioInputId;
QString _audioOutputId; QString _audioOutputId;
QString _videoInputId; QString _cameraInputId;
QString _videoDeviceId;
rpl::lifetime _lifetime; rpl::lifetime _lifetime;

View file

@ -145,8 +145,10 @@ public:
return _raisedHandRating; return _raisedHandRating;
} }
[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack(); [[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack(
const std::string &endpoint);
void clearVideoTrack(); void clearVideoTrack();
[[nodiscard]] const std::string &videoTrackEndpoint() const;
void setVideoTrack(not_null<Webrtc::VideoTrack*> track); void setVideoTrack(not_null<Webrtc::VideoTrack*> track);
void addActionRipple(QPoint point, Fn<void()> updateCallback) override; void addActionRipple(QPoint point, Fn<void()> updateCallback) override;
@ -276,6 +278,7 @@ private:
std::unique_ptr<StatusIcon> _statusIcon; std::unique_ptr<StatusIcon> _statusIcon;
std::unique_ptr<Webrtc::VideoTrack> _videoTrack; std::unique_ptr<Webrtc::VideoTrack> _videoTrack;
Webrtc::VideoTrack *_videoTrackShown = nullptr; Webrtc::VideoTrack *_videoTrackShown = nullptr;
std::string _videoTrackEndpoint;
rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr. rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr.
Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon. Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon.
Ui::Animations::Simple _mutedAnimation; // For gray/red icon. Ui::Animations::Simple _mutedAnimation; // For gray/red icon.
@ -380,14 +383,16 @@ private:
not_null<Row*> row, not_null<Row*> row,
uint64 raiseHandRating) const; uint64 raiseHandRating) const;
Row *findRow(not_null<PeerData*> participantPeer) const; Row *findRow(not_null<PeerData*> participantPeer) const;
Row *findRow(uint32 audioSsrc) const; const Data::GroupCallParticipant *findParticipant(
const std::string &endpoint) const;
Row *findRow(const std::string &endpoint) const;
void appendInvitedUsers(); void appendInvitedUsers();
void scheduleRaisedHandStatusRemove(); void scheduleRaisedHandStatusRemove();
const not_null<GroupCall*> _call; const not_null<GroupCall*> _call;
not_null<PeerData*> _peer; not_null<PeerData*> _peer;
uint32 _largeSsrc = 0; std::string _largeEndpoint;
bool _prepared = false; bool _prepared = false;
rpl::event_stream<MuteRequest> _toggleMuteRequests; rpl::event_stream<MuteRequest> _toggleMuteRequests;
@ -1015,14 +1020,20 @@ void Row::refreshStatus() {
_speaking); _speaking);
} }
not_null<Webrtc::VideoTrack*> Row::createVideoTrack() { not_null<Webrtc::VideoTrack*> Row::createVideoTrack(
const std::string &endpoint) {
_videoTrackShown = nullptr; _videoTrackShown = nullptr;
_videoTrackEndpoint = endpoint;
_videoTrack = std::make_unique<Webrtc::VideoTrack>( _videoTrack = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active); Webrtc::VideoState::Active);
setVideoTrack(_videoTrack.get()); setVideoTrack(_videoTrack.get());
return _videoTrack.get(); return _videoTrack.get();
} }
const std::string &Row::videoTrackEndpoint() const {
return _videoTrackEndpoint;
}
void Row::clearVideoTrack() { void Row::clearVideoTrack() {
_videoTrackLifetime.destroy(); _videoTrackLifetime.destroy();
_videoTrackShown = nullptr; _videoTrackShown = nullptr;
@ -1149,29 +1160,50 @@ void MembersController::setupListChangeViewers() {
} }
}, _lifetime); }, _lifetime);
_call->videoStreamLargeValue( _call->videoEndpointLargeValue(
) | rpl::filter([=](uint32 largeSsrc) { ) | rpl::filter([=](const std::string &largeEndpoint) {
return (_largeSsrc != largeSsrc); return (_largeEndpoint != largeEndpoint);
}) | rpl::start_with_next([=](uint32 largeSsrc) { }) | rpl::start_with_next([=](const std::string &largeEndpoint) {
if (const auto row = findRow(_largeSsrc)) { if (const auto participant = findParticipant(_largeEndpoint)) {
_call->addVideoOutput(_largeSsrc, row->createVideoTrack()); if (participant->cameraEndpoint() == _largeEndpoint) {
if (const auto row = findRow(participant->peer)) {
_call->addVideoOutput(
_largeEndpoint,
row->createVideoTrack(_largeEndpoint));
} }
_largeSsrc = largeSsrc; }
if (const auto row = findRow(_largeSsrc)) { }
_largeEndpoint = largeEndpoint;
if (const auto participant = findParticipant(_largeEndpoint)) {
if (participant->cameraEndpoint() == _largeEndpoint) {
if (const auto row = findRow(participant->peer)) {
row->clearVideoTrack(); row->clearVideoTrack();
} }
}
}
}, _lifetime); }, _lifetime);
_call->streamsVideoUpdates( _call->streamsVideoUpdates(
) | rpl::start_with_next([=](StreamsVideoUpdate update) { ) | rpl::start_with_next([=](StreamsVideoUpdate update) {
Assert(update.ssrc != _largeSsrc); Assert(update.endpoint != _largeEndpoint);
if (const auto row = findRow(update.ssrc)) { if (const auto participant = findParticipant(update.endpoint)) {
if (update.streams) { if (update.streams) {
_call->addVideoOutput(update.ssrc, row->createVideoTrack()); if (participant->cameraEndpoint() == update.endpoint
|| !_call->streamsVideo(participant->cameraEndpoint())) {
if (const auto row = findRow(participant->peer)) {
_call->addVideoOutput(
update.endpoint,
row->createVideoTrack(update.endpoint));
}
}
} else { } else {
if (const auto row = findRow(participant->peer)) {
if (row->videoTrackEndpoint() == update.endpoint) {
row->clearVideoTrack(); row->clearVideoTrack();
} }
} }
}
}
}, _lifetime); }, _lifetime);
_call->rejoinEvents( _call->rejoinEvents(
@ -1493,15 +1525,18 @@ Row *MembersController::findRow(not_null<PeerData*> participantPeer) const {
delegate()->peerListFindRow(participantPeer->id.value)); delegate()->peerListFindRow(participantPeer->id.value));
} }
Row *MembersController::findRow(uint32 audioSsrc) const { const Data::GroupCallParticipant *MembersController::findParticipant(
if (!audioSsrc) { const std::string &endpoint) const {
if (endpoint.empty()) {
return nullptr; return nullptr;
} }
const auto real = _call->lookupReal(); const auto real = _call->lookupReal();
const auto participantPeer = real return real ? real->participantByEndpoint(endpoint) : nullptr;
? real->participantPeerByAudioSsrc(audioSsrc) }
: nullptr;
return participantPeer ? findRow(participantPeer) : nullptr; Row *MembersController::findRow(const std::string &endpoint) const {
const auto participant = findParticipant(endpoint);
return participant ? findRow(participant->peer) : nullptr;
} }
Main::Session &MembersController::session() const { Main::Session &MembersController::session() const {
@ -1845,15 +1880,35 @@ base::unique_qptr<Ui::PopupMenu> MembersController::createRowContextMenu(
_kickParticipantRequests.fire_copy(participantPeer); _kickParticipantRequests.fire_copy(participantPeer);
}); });
const auto ssrc = real->ssrc(); if (const auto real = _call->lookupReal()) {
if (ssrc != 0 && _call->streamsVideo(ssrc)) { const auto pinnedEndpoint = _call->videoEndpointPinned();
const auto pinned = (_call->videoStreamPinned() == ssrc); const auto participant = real->participantByEndpoint(pinnedEndpoint);
const auto phrase = pinned if (participant && participant->peer == participantPeer) {
? tr::lng_group_call_context_unpin_video(tr::now) result->addAction(
: tr::lng_group_call_context_pin_video(tr::now); tr::lng_group_call_context_unpin_video(tr::now),
result->addAction(phrase, [=] { [=] { _call->pinVideoEndpoint(std::string()); });
_call->pinVideoStream(pinned ? 0 : ssrc); } else {
}); const auto &participants = real->participants();
const auto i = ranges::find(
participants,
participantPeer,
&Data::GroupCallParticipant::peer);
if (i != end(participants)) {
const auto camera = i->cameraEndpoint();
const auto screen = i->screenEndpoint();
const auto streamsScreen = _call->streamsVideo(screen);
if (streamsScreen || _call->streamsVideo(camera)) {
const auto callback = [=] {
_call->pinVideoEndpoint(streamsScreen
? screen
: camera);
};
result->addAction(
tr::lng_group_call_context_pin_video(tr::now),
callback);
}
}
}
} }
if (real->ssrc() != 0 if (real->ssrc() != 0
@ -2217,8 +2272,9 @@ void Members::setupPinnedVideo() {
_mode.changes() | rpl::filter( _mode.changes() | rpl::filter(
_1 == PanelMode::Default _1 == PanelMode::Default
) | rpl::to_empty, ) | rpl::to_empty,
_call->videoStreamLargeValue() | rpl::filter([=](uint32 ssrc) { _call->videoEndpointLargeValue(
return ssrc == _call->videoStreamPinned(); ) | rpl::filter([=](const std::string &endpoint) {
return endpoint == _call->videoEndpointPinned();
}) | rpl::to_empty }) | rpl::to_empty
) | rpl::start_with_next([=] { ) | rpl::start_with_next([=] {
_scroll->scrollToY(0); _scroll->scrollToY(0);

View file

@ -493,11 +493,11 @@ rpl::lifetime &Panel::chooseSourceInstanceLifetime() {
} }
void Panel::chooseSourceAccepted(const QString &deviceId) { void Panel::chooseSourceAccepted(const QString &deviceId) {
_call->switchToScreenSharing(deviceId); _call->toggleScreenSharing(deviceId);
} }
void Panel::chooseSourceStop() { void Panel::chooseSourceStop() {
_call->toggleVideo(false); _call->toggleScreenSharing(std::nullopt);
} }
void Panel::initWindow() { void Panel::initWindow() {
@ -722,10 +722,8 @@ void Panel::refreshLeftButton() {
&st::groupCallVideoActiveSmall); &st::groupCallVideoActiveSmall);
_video->show(); _video->show();
_video->setClickedCallback([=] { _video->setClickedCallback([=] {
const auto sharing = _call->isScreenSharing(); _call->toggleVideo(_call->outgoingCameraTrack()->state()
const auto active = (_call->outgoingVideoTrack()->state() != Webrtc::VideoState::Active);
== Webrtc::VideoState::Active);
_call->toggleVideo(sharing || !active);
}); });
_video->setText(tr::lng_group_call_video()); _video->setText(tr::lng_group_call_video());
_video->setColorOverrides(_mute->colorOverrides()); _video->setColorOverrides(_mute->colorOverrides());

View file

@ -33,8 +33,22 @@ constexpr auto kWaitForUpdatesTimeout = 3 * crl::time(1000);
}); });
} }
[[nodiscard]] const std::string &EmptyEndpoint() {
static const auto result = std::string();
return result;
}
} // namespace } // namespace
const std::string &GroupCallParticipant::cameraEndpoint() const {
return videoParams ? videoParams->camera.endpoint : EmptyEndpoint();
}
const std::string &GroupCallParticipant::screenEndpoint() const {
return videoParams ? videoParams->screen.endpoint : EmptyEndpoint();
}
GroupCall::GroupCall( GroupCall::GroupCall(
not_null<PeerData*> peer, not_null<PeerData*> peer,
uint64 id, uint64 id,
@ -193,13 +207,36 @@ PeerData *GroupCall::participantPeerByAudioSsrc(uint32 ssrc) const {
: nullptr; : nullptr;
} }
PeerData *GroupCall::participantPeerByVideoSsrc(uint32 ssrc) const { PeerData *GroupCall::participantPeerByCameraSsrc(uint32 ssrc) const {
const auto i = _participantPeerByVideoSsrc.find(ssrc); const auto i = _participantPeerByCameraSsrc.find(ssrc);
return (i != end(_participantPeerByVideoSsrc)) return (i != end(_participantPeerByCameraSsrc))
? i->second.get() ? i->second.get()
: nullptr; : nullptr;
} }
PeerData *GroupCall::participantPeerByScreenSsrc(uint32 ssrc) const {
const auto i = _participantPeerByScreenSsrc.find(ssrc);
return (i != end(_participantPeerByScreenSsrc))
? i->second.get()
: nullptr;
}
const GroupCallParticipant *GroupCall::participantByEndpoint(
const std::string &endpoint) const {
if (endpoint.empty()) {
return nullptr;
}
for (const auto &participant : _participants) {
if (const auto params = participant.videoParams.get()) {
if (params->camera.endpoint == endpoint
|| params->screen.endpoint == endpoint) {
return &participant;
}
}
}
return nullptr;
}
rpl::producer<> GroupCall::participantsSliceAdded() { rpl::producer<> GroupCall::participantsSliceAdded() {
return _participantsSliceAdded.events(); return _participantsSliceAdded.events();
} }
@ -305,7 +342,8 @@ void GroupCall::processFullCallFields(const MTPphone_GroupCall &call) {
_participants.clear(); _participants.clear();
_speakingByActiveFinishes.clear(); _speakingByActiveFinishes.clear();
_participantPeerByAudioSsrc.clear(); _participantPeerByAudioSsrc.clear();
_participantPeerByVideoSsrc.clear(); _participantPeerByCameraSsrc.clear();
_participantPeerByScreenSsrc.clear();
_allParticipantsLoaded = false; _allParticipantsLoaded = false;
applyParticipantsSlice( applyParticipantsSlice(
@ -499,10 +537,7 @@ void GroupCall::applyParticipantsSlice(
.was = *i, .was = *i,
}; };
_participantPeerByAudioSsrc.erase(i->ssrc); _participantPeerByAudioSsrc.erase(i->ssrc);
const auto &all = VideoSourcesFromParams(i->videoParams); eraseVideoSsrcs(*i);
for (const auto ssrc : all) {
_participantPeerByVideoSsrc.erase(ssrc);
}
_speakingByActiveFinishes.remove(participantPeer); _speakingByActiveFinishes.remove(participantPeer);
_participants.erase(i); _participants.erase(i);
if (sliceSource != ApplySliceSource::SliceLoaded) { if (sliceSource != ApplySliceSource::SliceLoaded) {
@ -543,8 +578,8 @@ void GroupCall::applyParticipantsSlice(
&& (!was || was->onlyMinLoaded); && (!was || was->onlyMinLoaded);
const auto raisedHandRating const auto raisedHandRating
= data.vraise_hand_rating().value_or_empty(); = data.vraise_hand_rating().value_or_empty();
const auto hasVideoParamsInformation = (sliceSource const auto hasVideoParamsInformation = true/*(sliceSource
!= ApplySliceSource::UpdateConstructed); != ApplySliceSource::UpdateConstructed)*/;
const auto value = Participant{ const auto value = Participant{
.peer = participantPeer, .peer = participantPeer,
.videoParams = (hasVideoParamsInformation .videoParams = (hasVideoParamsInformation
@ -571,19 +606,13 @@ void GroupCall::applyParticipantsSlice(
.muted = data.is_muted(), .muted = data.is_muted(),
.mutedByMe = mutedByMe, .mutedByMe = mutedByMe,
.canSelfUnmute = canSelfUnmute, .canSelfUnmute = canSelfUnmute,
.videoMuted = (data.vvideo() == nullptr),
.onlyMinLoaded = onlyMinLoaded, .onlyMinLoaded = onlyMinLoaded,
}; };
if (i == end(_participants)) { if (i == end(_participants)) {
_participantPeerByAudioSsrc.emplace( _participantPeerByAudioSsrc.emplace(
value.ssrc, value.ssrc,
participantPeer); participantPeer);
const auto &all = VideoSourcesFromParams(value.videoParams); emplaceVideoSsrcs(value);
for (const auto ssrc : all) {
_participantPeerByVideoSsrc.emplace(
ssrc,
participantPeer);
}
_participants.push_back(value); _participants.push_back(value);
if (const auto user = participantPeer->asUser()) { if (const auto user = participantPeer->asUser()) {
_peer->owner().unregisterInvitedToCallUser(_id, user); _peer->owner().unregisterInvitedToCallUser(_id, user);
@ -596,17 +625,8 @@ void GroupCall::applyParticipantsSlice(
participantPeer); participantPeer);
} }
if (i->videoParams != value.videoParams) { if (i->videoParams != value.videoParams) {
const auto &old = VideoSourcesFromParams(i->videoParams); eraseVideoSsrcs(*i);
for (const auto ssrc : old) { emplaceVideoSsrcs(value);
_participantPeerByVideoSsrc.erase(ssrc);
}
const auto &now = VideoSourcesFromParams(
value.videoParams);
for (const auto ssrc : now) {
_participantPeerByVideoSsrc.emplace(
ssrc,
participantPeer);
}
} }
*i = value; *i = value;
} }
@ -627,6 +647,29 @@ void GroupCall::applyParticipantsSlice(
} }
} }
void GroupCall::emplaceVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
const auto participantPeer = participant.peer;
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.emplace(ssrc, participantPeer);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.emplace(ssrc, participantPeer);
}
}
}
void GroupCall::eraseVideoSsrcs(const Participant &participant) {
if (const auto params = participant.videoParams.get()) {
for (const auto ssrc : params->camera.ssrcs) {
_participantPeerByCameraSsrc.erase(ssrc);
}
for (const auto ssrc : params->screen.ssrcs) {
_participantPeerByScreenSsrc.erase(ssrc);
}
}
}
void GroupCall::applyLastSpoke( void GroupCall::applyLastSpoke(
uint32 ssrc, uint32 ssrc,
LastSpokeTimes when, LastSpokeTimes when,
@ -840,6 +883,9 @@ void GroupCall::requestUnknownParticipants() {
} }
_unknownSpokenPeerIds.remove(id); _unknownSpokenPeerIds.remove(id);
} }
if (!ssrcs.empty()) {
_participantsResolved.fire(&ssrcs);
}
requestUnknownParticipants(); requestUnknownParticipants();
}).fail([=](const MTP::Error &error) { }).fail([=](const MTP::Error &error) {
_unknownParticipantPeersRequestId = 0; _unknownParticipantPeersRequestId = 0;

View file

@ -14,7 +14,7 @@ class PeerData;
class ApiWrap; class ApiWrap;
namespace Calls { namespace Calls {
struct VideoParams; struct ParticipantVideoParams;
} // namespace Calls } // namespace Calls
namespace Data { namespace Data {
@ -26,7 +26,7 @@ struct LastSpokeTimes {
struct GroupCallParticipant { struct GroupCallParticipant {
not_null<PeerData*> peer; not_null<PeerData*> peer;
std::shared_ptr<Calls::VideoParams> videoParams; std::shared_ptr<Calls::ParticipantVideoParams> videoParams;
TimeId date = 0; TimeId date = 0;
TimeId lastActive = 0; TimeId lastActive = 0;
uint64 raisedHandRating = 0; uint64 raisedHandRating = 0;
@ -38,8 +38,10 @@ struct GroupCallParticipant {
bool muted = false; bool muted = false;
bool mutedByMe = false; bool mutedByMe = false;
bool canSelfUnmute = false; bool canSelfUnmute = false;
bool videoMuted = true;
bool onlyMinLoaded = false; bool onlyMinLoaded = false;
[[nodiscard]] const std::string &cameraEndpoint() const;
[[nodiscard]] const std::string &screenEndpoint() const;
}; };
class GroupCall final { class GroupCall final {
@ -104,7 +106,10 @@ public:
void requestParticipants(); void requestParticipants();
[[nodiscard]] bool participantsLoaded() const; [[nodiscard]] bool participantsLoaded() const;
[[nodiscard]] PeerData *participantPeerByAudioSsrc(uint32 ssrc) const; [[nodiscard]] PeerData *participantPeerByAudioSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByVideoSsrc(uint32 ssrc) const; [[nodiscard]] PeerData *participantPeerByCameraSsrc(uint32 ssrc) const;
[[nodiscard]] PeerData *participantPeerByScreenSsrc(uint32 ssrc) const;
[[nodiscard]] const Participant *participantByEndpoint(
const std::string &endpoint) const;
[[nodiscard]] rpl::producer<> participantsSliceAdded(); [[nodiscard]] rpl::producer<> participantsSliceAdded();
[[nodiscard]] rpl::producer<ParticipantUpdate> participantUpdated() const; [[nodiscard]] rpl::producer<ParticipantUpdate> participantUpdated() const;
@ -120,6 +125,12 @@ public:
PeerData *participantPeerLoaded); PeerData *participantPeerLoaded);
void resolveParticipants(const base::flat_set<uint32> &ssrcs); void resolveParticipants(const base::flat_set<uint32> &ssrcs);
[[nodiscard]] rpl::producer<
not_null<const base::flat_map<
uint32,
LastSpokeTimes>*>> participantsResolved() const {
return _participantsResolved.events();
}
[[nodiscard]] int fullCount() const; [[nodiscard]] int fullCount() const;
[[nodiscard]] rpl::producer<int> fullCountValue() const; [[nodiscard]] rpl::producer<int> fullCountValue() const;
@ -167,6 +178,9 @@ private:
void processSavedFullCall(); void processSavedFullCall();
void finishParticipantsSliceRequest(); void finishParticipantsSliceRequest();
void emplaceVideoSsrcs(const Participant &participant);
void eraseVideoSsrcs(const Participant &participant);
const uint64 _id = 0; const uint64 _id = 0;
const uint64 _accessHash = 0; const uint64 _accessHash = 0;
@ -184,7 +198,8 @@ private:
std::vector<Participant> _participants; std::vector<Participant> _participants;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByAudioSsrc; base::flat_map<uint32, not_null<PeerData*>> _participantPeerByAudioSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByVideoSsrc; base::flat_map<uint32, not_null<PeerData*>> _participantPeerByCameraSsrc;
base::flat_map<uint32, not_null<PeerData*>> _participantPeerByScreenSsrc;
base::flat_map<not_null<PeerData*>, crl::time> _speakingByActiveFinishes; base::flat_map<not_null<PeerData*>, crl::time> _speakingByActiveFinishes;
base::Timer _speakingByActiveFinishTimer; base::Timer _speakingByActiveFinishTimer;
QString _nextOffset; QString _nextOffset;
@ -196,6 +211,10 @@ private:
base::flat_map<uint32, LastSpokeTimes> _unknownSpokenSsrcs; base::flat_map<uint32, LastSpokeTimes> _unknownSpokenSsrcs;
base::flat_map<PeerId, LastSpokeTimes> _unknownSpokenPeerIds; base::flat_map<PeerId, LastSpokeTimes> _unknownSpokenPeerIds;
rpl::event_stream<
not_null<const base::flat_map<
uint32,
LastSpokeTimes>*>> _participantsResolved;
mtpRequestId _unknownParticipantPeersRequestId = 0; mtpRequestId _unknownParticipantPeersRequestId = 0;
rpl::event_stream<ParticipantUpdate> _participantUpdates; rpl::event_stream<ParticipantUpdate> _participantUpdates;

@ -1 +1 @@
Subproject commit 9928c00d231c1194896d582a71e3bb6d70ee2765 Subproject commit 697ef2ed67cfcad81b0e61caf0945a057a847327