diff --git a/Telegram/SourceFiles/calls/calls_group_call.cpp b/Telegram/SourceFiles/calls/calls_group_call.cpp index 32b2da0d0..ece53bff9 100644 --- a/Telegram/SourceFiles/calls/calls_group_call.cpp +++ b/Telegram/SourceFiles/calls/calls_group_call.cpp @@ -991,6 +991,18 @@ void GroupCall::toggleScheduleStartSubscribed(bool subscribed) { }).send(); } +void GroupCall::addVideoOutput( + uint32 ssrc, + not_null track) { + if (_instance) { + _instance->addIncomingVideoOutput(ssrc, track->sink()); + } +} + +not_null GroupCall::outgoingVideoTrack() const { + return _videoOutgoing.get(); +} + void GroupCall::setMuted(MuteState mute) { const auto set = [=] { const auto wasMuted = (muted() == MuteState::Muted) @@ -1362,6 +1374,7 @@ void GroupCall::ensureControllerCreated() { if (!_videoCapture) { _videoCapture = _delegate->groupCallGetVideoCapture(); + _videoOutgoing->setState(Webrtc::VideoState::Active); _videoCapture->setOutput(_videoOutgoing->sink()); } @@ -1394,6 +1407,15 @@ void GroupCall::ensureControllerCreated() { .createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator( settings.callAudioBackend()), .videoCapture = _videoCapture, + //.getVideoSource = [=] { + // return _videoCapture-> + //}, + .incomingVideoSourcesUpdated = [=]( + const std::vector &ssrcs) { + crl::on_main(weak, [=] { + showVideoStreams(ssrcs); + }); + }, .participantDescriptionsRequired = [=]( const std::vector &ssrcs) { crl::on_main(weak, [=] { @@ -1413,7 +1435,8 @@ void GroupCall::ensureControllerCreated() { broadcastPartStart(std::move(result)); }); return result; - } + }, + .enableVideo = true, }; if (Logs::DebugEnabled()) { auto callLogFolder = cWorkingDir() + qsl("DebugLogs"); @@ -1542,6 +1565,12 @@ void GroupCall::requestParticipantsInformation( addPreparedParticipants(); } +void GroupCall::showVideoStreams(const std::vector &ssrcs) { + for (const auto ssrc : ssrcs) { + _videoStreamUpdated.fire_copy(ssrc); + } +} + void GroupCall::updateInstanceMuteState() { Expects(_instance != nullptr); diff --git a/Telegram/SourceFiles/calls/calls_group_call.h b/Telegram/SourceFiles/calls/calls_group_call.h index 917ca84b7..c1173780f 100644 --- a/Telegram/SourceFiles/calls/calls_group_call.h +++ b/Telegram/SourceFiles/calls/calls_group_call.h @@ -146,6 +146,9 @@ public: void startScheduledNow(); void toggleScheduleStartSubscribed(bool subscribed); + void addVideoOutput(uint32 ssrc, not_null track); + [[nodiscard]] not_null outgoingVideoTrack() const; + void setMuted(MuteState mute); void setMutedAndUpdate(MuteState mute); [[nodiscard]] MuteState muted() const { @@ -191,6 +194,9 @@ public: [[nodiscard]] rpl::producer levelUpdates() const { return _levelUpdates.events(); } + [[nodiscard]] rpl::producer videoStreamUpdated() const { + return _videoStreamUpdated.events(); + } [[nodiscard]] rpl::producer rejoinEvents() const { return _rejoinEvents.events(); } @@ -294,6 +300,7 @@ private: const Data::GroupCallParticipant &participant); void addPreparedParticipants(); void addPreparedParticipantsDelayed(); + void showVideoStreams(const std::vector &ssrcs); void editParticipant( not_null participantPeer, @@ -351,6 +358,7 @@ private: std::shared_ptr _videoCapture; const std::unique_ptr _videoOutgoing; rpl::event_stream _levelUpdates; + rpl::event_stream _videoStreamUpdated; base::flat_map _lastSpoke; rpl::event_stream _rejoinEvents; rpl::event_stream<> _allowedToSpeakNotifications; diff --git a/Telegram/SourceFiles/calls/calls_group_members.cpp b/Telegram/SourceFiles/calls/calls_group_members.cpp index 5ad91316f..1acdf4536 100644 --- a/Telegram/SourceFiles/calls/calls_group_members.cpp +++ b/Telegram/SourceFiles/calls/calls_group_members.cpp @@ -36,6 +36,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL #include "lang/lang_keys.h" #include "window/window_controller.h" // Controller::sessionController. #include "window/window_session_controller.h" +#include "webrtc/webrtc_video_track.h" #include "styles/style_calls.h" namespace Calls::Group { @@ -141,6 +142,9 @@ public: return _raisedHandRating; } + [[nodiscard]] not_null createVideoTrack(); + void setVideoTrack(not_null track); + void addActionRipple(QPoint point, Fn updateCallback) override; void stopLastActionRipple() override; @@ -244,6 +248,9 @@ private: std::unique_ptr _actionRipple; std::unique_ptr _blobsAnimation; std::unique_ptr _statusIcon; + std::unique_ptr _videoTrack; + Webrtc::VideoTrack *_videoTrackShown = nullptr; + rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr. Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon. Ui::Animations::Simple _mutedAnimation; // For gray/red icon. Ui::Animations::Simple _activeAnimation; // For icon cross animation. @@ -635,6 +642,28 @@ void Row::ensureUserpicCache( auto Row::generatePaintUserpicCallback() -> PaintRoundImageCallback { auto userpic = ensureUserpicView(); return [=](Painter &p, int x, int y, int outerWidth, int size) mutable { + const auto videoSize = _videoTrackShown + ? _videoTrackShown->frameSize() + : QSize(); + if (!videoSize.isEmpty()) { + const auto resize = (videoSize.width() > videoSize.height()) + ? QSize(videoSize.width() * size / videoSize.height(), size) + : QSize(size, videoSize.height() * size / videoSize.width()); + const auto request = Webrtc::FrameRequest{ + .resize = resize, + .outer = QSize(size, size), + }; + const auto frame = _videoTrackShown->frame(request); + auto copy = frame; // #TODO calls optimize. + copy.detach(); + Images::prepareCircle(copy); + p.drawImage(x, y, copy); + _videoTrackShown->markFrameShown(); + return; + } else if (_videoTrackShown) { + // We could skip the first notification. + _videoTrackShown->markFrameShown(); + } if (_blobsAnimation) { const auto mutedByMe = (_state == State::MutedByMe); const auto shift = QPointF(x + size / 2., y + size / 2.); @@ -876,6 +905,27 @@ void Row::refreshStatus() { _speaking); } +not_null Row::createVideoTrack() { + _videoTrackShown = nullptr; + _videoTrack = std::make_unique( + Webrtc::VideoState::Active); + setVideoTrack(_videoTrack.get()); + return _videoTrack.get(); +} + +void Row::setVideoTrack(not_null track) { + _videoTrackLifetime.destroy(); + _videoTrackShown = track; + _videoTrackShown->renderNextFrame( + ) | rpl::start_with_next([=] { + _delegate->rowUpdateRow(this); + if (_videoTrackShown->frameSize().isEmpty()) { + _videoTrackShown->markFrameShown(); + } + }, _videoTrackLifetime); + _delegate->rowUpdateRow(this); +} + void Row::addActionRipple(QPoint point, Fn updateCallback) { if (!_actionRipple) { auto mask = Ui::RippleAnimation::ellipseMask(QSize( @@ -980,6 +1030,20 @@ void MembersController::setupListChangeViewers() { } }, _lifetime); + _call->videoStreamUpdated( + ) | rpl::start_with_next([=](uint32 ssrc) { + const auto real = _call->lookupReal(); + const auto participantPeer = real + ? real->participantPeerByAudioSsrc(ssrc) + : nullptr; + const auto row = participantPeer + ? findRow(participantPeer) + : nullptr; + if (row) { + _call->addVideoOutput(ssrc, row->createVideoTrack()); + } + }, _lifetime); + _call->rejoinEvents( ) | rpl::start_with_next([=](const Group::RejoinEvent &event) { const auto guard = gsl::finally([&] { @@ -1266,6 +1330,11 @@ void MembersController::updateRow( Assert(nowSsrc != 0); _soundingRowBySsrc.emplace(nowSsrc, row); } + if (isMe(row->peer())) { + row->setVideoTrack(_call->outgoingVideoTrack()); + } else if (nowSsrc) { + _call->addVideoOutput(nowSsrc, row->createVideoTrack()); + } } const auto nowNoSounding = _soundingRowBySsrc.empty(); if (wasNoSounding && !nowNoSounding) {