mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-06-05 06:33:57 +02:00
Show video instead of userpics in members list.
This commit is contained in:
parent
ba02a5c46a
commit
ebdbe4a8d6
3 changed files with 107 additions and 1 deletions
|
@ -991,6 +991,18 @@ void GroupCall::toggleScheduleStartSubscribed(bool subscribed) {
|
||||||
}).send();
|
}).send();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GroupCall::addVideoOutput(
|
||||||
|
uint32 ssrc,
|
||||||
|
not_null<Webrtc::VideoTrack*> track) {
|
||||||
|
if (_instance) {
|
||||||
|
_instance->addIncomingVideoOutput(ssrc, track->sink());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
not_null<Webrtc::VideoTrack*> GroupCall::outgoingVideoTrack() const {
|
||||||
|
return _videoOutgoing.get();
|
||||||
|
}
|
||||||
|
|
||||||
void GroupCall::setMuted(MuteState mute) {
|
void GroupCall::setMuted(MuteState mute) {
|
||||||
const auto set = [=] {
|
const auto set = [=] {
|
||||||
const auto wasMuted = (muted() == MuteState::Muted)
|
const auto wasMuted = (muted() == MuteState::Muted)
|
||||||
|
@ -1362,6 +1374,7 @@ void GroupCall::ensureControllerCreated() {
|
||||||
|
|
||||||
if (!_videoCapture) {
|
if (!_videoCapture) {
|
||||||
_videoCapture = _delegate->groupCallGetVideoCapture();
|
_videoCapture = _delegate->groupCallGetVideoCapture();
|
||||||
|
_videoOutgoing->setState(Webrtc::VideoState::Active);
|
||||||
_videoCapture->setOutput(_videoOutgoing->sink());
|
_videoCapture->setOutput(_videoOutgoing->sink());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1394,6 +1407,15 @@ void GroupCall::ensureControllerCreated() {
|
||||||
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
|
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
|
||||||
settings.callAudioBackend()),
|
settings.callAudioBackend()),
|
||||||
.videoCapture = _videoCapture,
|
.videoCapture = _videoCapture,
|
||||||
|
//.getVideoSource = [=] {
|
||||||
|
// return _videoCapture->
|
||||||
|
//},
|
||||||
|
.incomingVideoSourcesUpdated = [=](
|
||||||
|
const std::vector<uint32_t> &ssrcs) {
|
||||||
|
crl::on_main(weak, [=] {
|
||||||
|
showVideoStreams(ssrcs);
|
||||||
|
});
|
||||||
|
},
|
||||||
.participantDescriptionsRequired = [=](
|
.participantDescriptionsRequired = [=](
|
||||||
const std::vector<uint32_t> &ssrcs) {
|
const std::vector<uint32_t> &ssrcs) {
|
||||||
crl::on_main(weak, [=] {
|
crl::on_main(weak, [=] {
|
||||||
|
@ -1413,7 +1435,8 @@ void GroupCall::ensureControllerCreated() {
|
||||||
broadcastPartStart(std::move(result));
|
broadcastPartStart(std::move(result));
|
||||||
});
|
});
|
||||||
return result;
|
return result;
|
||||||
}
|
},
|
||||||
|
.enableVideo = true,
|
||||||
};
|
};
|
||||||
if (Logs::DebugEnabled()) {
|
if (Logs::DebugEnabled()) {
|
||||||
auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
|
auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
|
||||||
|
@ -1542,6 +1565,12 @@ void GroupCall::requestParticipantsInformation(
|
||||||
addPreparedParticipants();
|
addPreparedParticipants();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GroupCall::showVideoStreams(const std::vector<std::uint32_t> &ssrcs) {
|
||||||
|
for (const auto ssrc : ssrcs) {
|
||||||
|
_videoStreamUpdated.fire_copy(ssrc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void GroupCall::updateInstanceMuteState() {
|
void GroupCall::updateInstanceMuteState() {
|
||||||
Expects(_instance != nullptr);
|
Expects(_instance != nullptr);
|
||||||
|
|
||||||
|
|
|
@ -146,6 +146,9 @@ public:
|
||||||
void startScheduledNow();
|
void startScheduledNow();
|
||||||
void toggleScheduleStartSubscribed(bool subscribed);
|
void toggleScheduleStartSubscribed(bool subscribed);
|
||||||
|
|
||||||
|
void addVideoOutput(uint32 ssrc, not_null<Webrtc::VideoTrack*> track);
|
||||||
|
[[nodiscard]] not_null<Webrtc::VideoTrack*> outgoingVideoTrack() const;
|
||||||
|
|
||||||
void setMuted(MuteState mute);
|
void setMuted(MuteState mute);
|
||||||
void setMutedAndUpdate(MuteState mute);
|
void setMutedAndUpdate(MuteState mute);
|
||||||
[[nodiscard]] MuteState muted() const {
|
[[nodiscard]] MuteState muted() const {
|
||||||
|
@ -191,6 +194,9 @@ public:
|
||||||
[[nodiscard]] rpl::producer<LevelUpdate> levelUpdates() const {
|
[[nodiscard]] rpl::producer<LevelUpdate> levelUpdates() const {
|
||||||
return _levelUpdates.events();
|
return _levelUpdates.events();
|
||||||
}
|
}
|
||||||
|
[[nodiscard]] rpl::producer<uint32> videoStreamUpdated() const {
|
||||||
|
return _videoStreamUpdated.events();
|
||||||
|
}
|
||||||
[[nodiscard]] rpl::producer<Group::RejoinEvent> rejoinEvents() const {
|
[[nodiscard]] rpl::producer<Group::RejoinEvent> rejoinEvents() const {
|
||||||
return _rejoinEvents.events();
|
return _rejoinEvents.events();
|
||||||
}
|
}
|
||||||
|
@ -294,6 +300,7 @@ private:
|
||||||
const Data::GroupCallParticipant &participant);
|
const Data::GroupCallParticipant &participant);
|
||||||
void addPreparedParticipants();
|
void addPreparedParticipants();
|
||||||
void addPreparedParticipantsDelayed();
|
void addPreparedParticipantsDelayed();
|
||||||
|
void showVideoStreams(const std::vector<std::uint32_t> &ssrcs);
|
||||||
|
|
||||||
void editParticipant(
|
void editParticipant(
|
||||||
not_null<PeerData*> participantPeer,
|
not_null<PeerData*> participantPeer,
|
||||||
|
@ -351,6 +358,7 @@ private:
|
||||||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||||
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
|
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
|
||||||
rpl::event_stream<LevelUpdate> _levelUpdates;
|
rpl::event_stream<LevelUpdate> _levelUpdates;
|
||||||
|
rpl::event_stream<uint32> _videoStreamUpdated;
|
||||||
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
|
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
|
||||||
rpl::event_stream<Group::RejoinEvent> _rejoinEvents;
|
rpl::event_stream<Group::RejoinEvent> _rejoinEvents;
|
||||||
rpl::event_stream<> _allowedToSpeakNotifications;
|
rpl::event_stream<> _allowedToSpeakNotifications;
|
||||||
|
|
|
@ -36,6 +36,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
#include "lang/lang_keys.h"
|
#include "lang/lang_keys.h"
|
||||||
#include "window/window_controller.h" // Controller::sessionController.
|
#include "window/window_controller.h" // Controller::sessionController.
|
||||||
#include "window/window_session_controller.h"
|
#include "window/window_session_controller.h"
|
||||||
|
#include "webrtc/webrtc_video_track.h"
|
||||||
#include "styles/style_calls.h"
|
#include "styles/style_calls.h"
|
||||||
|
|
||||||
namespace Calls::Group {
|
namespace Calls::Group {
|
||||||
|
@ -141,6 +142,9 @@ public:
|
||||||
return _raisedHandRating;
|
return _raisedHandRating;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack();
|
||||||
|
void setVideoTrack(not_null<Webrtc::VideoTrack*> track);
|
||||||
|
|
||||||
void addActionRipple(QPoint point, Fn<void()> updateCallback) override;
|
void addActionRipple(QPoint point, Fn<void()> updateCallback) override;
|
||||||
void stopLastActionRipple() override;
|
void stopLastActionRipple() override;
|
||||||
|
|
||||||
|
@ -244,6 +248,9 @@ private:
|
||||||
std::unique_ptr<Ui::RippleAnimation> _actionRipple;
|
std::unique_ptr<Ui::RippleAnimation> _actionRipple;
|
||||||
std::unique_ptr<BlobsAnimation> _blobsAnimation;
|
std::unique_ptr<BlobsAnimation> _blobsAnimation;
|
||||||
std::unique_ptr<StatusIcon> _statusIcon;
|
std::unique_ptr<StatusIcon> _statusIcon;
|
||||||
|
std::unique_ptr<Webrtc::VideoTrack> _videoTrack;
|
||||||
|
Webrtc::VideoTrack *_videoTrackShown = nullptr;
|
||||||
|
rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr.
|
||||||
Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon.
|
Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon.
|
||||||
Ui::Animations::Simple _mutedAnimation; // For gray/red icon.
|
Ui::Animations::Simple _mutedAnimation; // For gray/red icon.
|
||||||
Ui::Animations::Simple _activeAnimation; // For icon cross animation.
|
Ui::Animations::Simple _activeAnimation; // For icon cross animation.
|
||||||
|
@ -635,6 +642,28 @@ void Row::ensureUserpicCache(
|
||||||
auto Row::generatePaintUserpicCallback() -> PaintRoundImageCallback {
|
auto Row::generatePaintUserpicCallback() -> PaintRoundImageCallback {
|
||||||
auto userpic = ensureUserpicView();
|
auto userpic = ensureUserpicView();
|
||||||
return [=](Painter &p, int x, int y, int outerWidth, int size) mutable {
|
return [=](Painter &p, int x, int y, int outerWidth, int size) mutable {
|
||||||
|
const auto videoSize = _videoTrackShown
|
||||||
|
? _videoTrackShown->frameSize()
|
||||||
|
: QSize();
|
||||||
|
if (!videoSize.isEmpty()) {
|
||||||
|
const auto resize = (videoSize.width() > videoSize.height())
|
||||||
|
? QSize(videoSize.width() * size / videoSize.height(), size)
|
||||||
|
: QSize(size, videoSize.height() * size / videoSize.width());
|
||||||
|
const auto request = Webrtc::FrameRequest{
|
||||||
|
.resize = resize,
|
||||||
|
.outer = QSize(size, size),
|
||||||
|
};
|
||||||
|
const auto frame = _videoTrackShown->frame(request);
|
||||||
|
auto copy = frame; // #TODO calls optimize.
|
||||||
|
copy.detach();
|
||||||
|
Images::prepareCircle(copy);
|
||||||
|
p.drawImage(x, y, copy);
|
||||||
|
_videoTrackShown->markFrameShown();
|
||||||
|
return;
|
||||||
|
} else if (_videoTrackShown) {
|
||||||
|
// We could skip the first notification.
|
||||||
|
_videoTrackShown->markFrameShown();
|
||||||
|
}
|
||||||
if (_blobsAnimation) {
|
if (_blobsAnimation) {
|
||||||
const auto mutedByMe = (_state == State::MutedByMe);
|
const auto mutedByMe = (_state == State::MutedByMe);
|
||||||
const auto shift = QPointF(x + size / 2., y + size / 2.);
|
const auto shift = QPointF(x + size / 2., y + size / 2.);
|
||||||
|
@ -876,6 +905,27 @@ void Row::refreshStatus() {
|
||||||
_speaking);
|
_speaking);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
not_null<Webrtc::VideoTrack*> Row::createVideoTrack() {
|
||||||
|
_videoTrackShown = nullptr;
|
||||||
|
_videoTrack = std::make_unique<Webrtc::VideoTrack>(
|
||||||
|
Webrtc::VideoState::Active);
|
||||||
|
setVideoTrack(_videoTrack.get());
|
||||||
|
return _videoTrack.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Row::setVideoTrack(not_null<Webrtc::VideoTrack*> track) {
|
||||||
|
_videoTrackLifetime.destroy();
|
||||||
|
_videoTrackShown = track;
|
||||||
|
_videoTrackShown->renderNextFrame(
|
||||||
|
) | rpl::start_with_next([=] {
|
||||||
|
_delegate->rowUpdateRow(this);
|
||||||
|
if (_videoTrackShown->frameSize().isEmpty()) {
|
||||||
|
_videoTrackShown->markFrameShown();
|
||||||
|
}
|
||||||
|
}, _videoTrackLifetime);
|
||||||
|
_delegate->rowUpdateRow(this);
|
||||||
|
}
|
||||||
|
|
||||||
void Row::addActionRipple(QPoint point, Fn<void()> updateCallback) {
|
void Row::addActionRipple(QPoint point, Fn<void()> updateCallback) {
|
||||||
if (!_actionRipple) {
|
if (!_actionRipple) {
|
||||||
auto mask = Ui::RippleAnimation::ellipseMask(QSize(
|
auto mask = Ui::RippleAnimation::ellipseMask(QSize(
|
||||||
|
@ -980,6 +1030,20 @@ void MembersController::setupListChangeViewers() {
|
||||||
}
|
}
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
|
|
||||||
|
_call->videoStreamUpdated(
|
||||||
|
) | rpl::start_with_next([=](uint32 ssrc) {
|
||||||
|
const auto real = _call->lookupReal();
|
||||||
|
const auto participantPeer = real
|
||||||
|
? real->participantPeerByAudioSsrc(ssrc)
|
||||||
|
: nullptr;
|
||||||
|
const auto row = participantPeer
|
||||||
|
? findRow(participantPeer)
|
||||||
|
: nullptr;
|
||||||
|
if (row) {
|
||||||
|
_call->addVideoOutput(ssrc, row->createVideoTrack());
|
||||||
|
}
|
||||||
|
}, _lifetime);
|
||||||
|
|
||||||
_call->rejoinEvents(
|
_call->rejoinEvents(
|
||||||
) | rpl::start_with_next([=](const Group::RejoinEvent &event) {
|
) | rpl::start_with_next([=](const Group::RejoinEvent &event) {
|
||||||
const auto guard = gsl::finally([&] {
|
const auto guard = gsl::finally([&] {
|
||||||
|
@ -1266,6 +1330,11 @@ void MembersController::updateRow(
|
||||||
Assert(nowSsrc != 0);
|
Assert(nowSsrc != 0);
|
||||||
_soundingRowBySsrc.emplace(nowSsrc, row);
|
_soundingRowBySsrc.emplace(nowSsrc, row);
|
||||||
}
|
}
|
||||||
|
if (isMe(row->peer())) {
|
||||||
|
row->setVideoTrack(_call->outgoingVideoTrack());
|
||||||
|
} else if (nowSsrc) {
|
||||||
|
_call->addVideoOutput(nowSsrc, row->createVideoTrack());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const auto nowNoSounding = _soundingRowBySsrc.empty();
|
const auto nowNoSounding = _soundingRowBySsrc.empty();
|
||||||
if (wasNoSounding && !nowNoSounding) {
|
if (wasNoSounding && !nowNoSounding) {
|
||||||
|
|
Loading…
Add table
Reference in a new issue