First version of tiled layout.

This commit is contained in:
John Preston 2021-05-12 19:02:45 +04:00
parent bd83ed8130
commit 7f739065e8
12 changed files with 762 additions and 706 deletions

View file

@ -709,7 +709,7 @@ groupCallTitleLabel: FlatLabel(groupCallSubtitleLabel) {
} }
} }
groupCallAddButtonPosition: point(10px, 7px); groupCallAddButtonPosition: point(10px, 7px);
groupCallMembersWidthMax: 360px; groupCallMembersWidthMax: 480px;
groupCallRecordingMark: 6px; groupCallRecordingMark: 6px;
groupCallRecordingMarkSkip: 4px; groupCallRecordingMarkSkip: 4px;
groupCallRecordingMarkTop: 8px; groupCallRecordingMarkTop: 8px;
@ -1220,3 +1220,6 @@ groupCallLargeVideoPin: CrossLineAnimation {
} }
groupCallVideoEnlarge: icon {{ "calls/voice_enlarge", mediaviewPipControlsFgOver }}; groupCallVideoEnlarge: icon {{ "calls/voice_enlarge", mediaviewPipControlsFgOver }};
groupCallVideoMinimize: icon {{ "calls/voice_minimize", groupCallVideoSubTextFg }}; groupCallVideoMinimize: icon {{ "calls/voice_minimize", groupCallVideoSubTextFg }};
groupCallVideoSmallSkip: 4px;
groupCallVideoLargeSkip: 6px;

View file

@ -110,6 +110,12 @@ constexpr auto kFixLargeVideoDuration = 5 * crl::time(1000);
} // namespace } // namespace
//GroupCall::VideoTrack::VideoTrack() = default;
//GroupCall::VideoTrack::VideoTrack(VideoTrack &&other) = default;
//GroupCall::VideoTrack &GroupCall::VideoTrack::operator=(
// VideoTrack &&other) = default;
//GroupCall::VideoTrack::~VideoTrack() = default;
//
class GroupCall::LoadPartTask final : public tgcalls::BroadcastPartTask { class GroupCall::LoadPartTask final : public tgcalls::BroadcastPartTask {
public: public:
LoadPartTask( LoadPartTask(
@ -166,7 +172,7 @@ private:
}; };
struct GroupCall::SinkPointer { struct GroupCall::SinkPointer {
std::shared_ptr<Webrtc::SinkInterface> data; std::weak_ptr<Webrtc::SinkInterface> data;
}; };
[[nodiscard]] bool IsGroupCallAdmin( [[nodiscard]] bool IsGroupCallAdmin(
@ -538,10 +544,7 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
using Update = Data::GroupCall::ParticipantUpdate; using Update = Data::GroupCall::ParticipantUpdate;
real->participantUpdated( real->participantUpdated(
) | rpl::start_with_next([=](const Update &data) { ) | rpl::start_with_next([=](const Update &data) {
auto changed = false; const auto &pinned = _videoEndpointPinned.current();
auto newLarge = _videoEndpointLarge.current();
auto updateCameraNotStreams = std::string();
auto updateScreenNotStreams = std::string();
const auto regularEndpoint = [&](const std::string &endpoint) const auto regularEndpoint = [&](const std::string &endpoint)
-> const std::string & { -> const std::string & {
return (endpoint.empty() return (endpoint.empty()
@ -550,143 +553,30 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
? EmptyString() ? EmptyString()
: endpoint; : endpoint;
}; };
const auto guard = gsl::finally([&] {
if (!newLarge) {
newLarge = chooseLargeVideoEndpoint();
}
if (_videoEndpointLarge.current() != newLarge) {
setVideoEndpointLarge(newLarge);
} else if (changed) {
updateRequestedVideoChannelsDelayed();
}
if (!updateCameraNotStreams.empty()) {
_streamsVideoUpdated.fire({ updateCameraNotStreams, false });
}
if (!updateScreenNotStreams.empty()) {
_streamsVideoUpdated.fire({ updateScreenNotStreams, false });
}
});
const auto peer = data.was ? data.was->peer : data.now->peer; const auto peer = data.was ? data.was->peer : data.now->peer;
const auto &wasCameraEndpoint = (data.was && data.was->videoParams) if (peer == _joinAs) {
? regularEndpoint(data.was->videoParams->camera.endpoint) return;
}
const auto &wasCameraEndpoint = data.was
? regularEndpoint(data.was->cameraEndpoint())
: EmptyString(); : EmptyString();
const auto &nowCameraEndpoint = (data.now && data.now->videoParams) const auto &nowCameraEndpoint = data.now
? regularEndpoint(data.now->videoParams->camera.endpoint) ? regularEndpoint(data.now->cameraEndpoint())
: EmptyString(); : EmptyString();
if (wasCameraEndpoint != nowCameraEndpoint) { if (wasCameraEndpoint != nowCameraEndpoint) {
if (!nowCameraEndpoint.empty() markEndpointActive({ peer, nowCameraEndpoint }, true);
&& _activeVideoEndpoints.emplace( markEndpointActive({ peer, wasCameraEndpoint }, false);
nowCameraEndpoint,
EndpointType::Camera).second) {
changed = true;
_streamsVideoUpdated.fire({ nowCameraEndpoint, true });
}
if (!wasCameraEndpoint.empty()
&& _activeVideoEndpoints.remove(wasCameraEndpoint)) {
changed = true;
updateCameraNotStreams = wasCameraEndpoint;
if (newLarge.endpoint == wasCameraEndpoint) {
newLarge = VideoEndpoint();
_videoEndpointPinned = false;
}
}
} }
const auto &wasScreenEndpoint = (data.was && data.was->videoParams) const auto &wasScreenEndpoint = data.was
? data.was->videoParams->screen.endpoint ? regularEndpoint(data.was->screenEndpoint())
: EmptyString(); : EmptyString();
const auto &nowScreenEndpoint = (data.now && data.now->videoParams) const auto &nowScreenEndpoint = data.now
? data.now->videoParams->screen.endpoint ? regularEndpoint(data.now->screenEndpoint())
: EmptyString(); : EmptyString();
if (wasScreenEndpoint != nowScreenEndpoint) { if (wasScreenEndpoint != nowScreenEndpoint) {
if (!nowScreenEndpoint.empty() markEndpointActive({ peer, nowScreenEndpoint }, true);
&& _activeVideoEndpoints.emplace( markEndpointActive({ peer, wasScreenEndpoint }, false);
nowScreenEndpoint,
EndpointType::Screen).second) {
changed = true;
_streamsVideoUpdated.fire({ nowScreenEndpoint, true });
}
if (!wasScreenEndpoint.empty()
&& _activeVideoEndpoints.remove(wasScreenEndpoint)) {
changed = true;
updateScreenNotStreams = wasScreenEndpoint;
if (newLarge.endpoint == wasScreenEndpoint) {
newLarge = VideoEndpoint();
_videoEndpointPinned = false;
}
}
}
const auto nowSpeaking = data.now && data.now->speaking;
const auto nowSounding = data.now && data.now->sounding;
const auto wasSpeaking = data.was && data.was->speaking;
const auto wasSounding = data.was && data.was->sounding;
if (nowSpeaking == wasSpeaking && nowSounding == wasSounding) {
return;
} else if (_videoEndpointPinned.current()
|| (_videoLargeShowTime
&& _videoLargeShowTime + kFixLargeVideoDuration
> crl::now())) {
return;
}
if (nowScreenEndpoint != newLarge.endpoint
&& streamsVideo(nowScreenEndpoint)
&& (activeVideoEndpointType(newLarge.endpoint)
!= EndpointType::Screen)) {
newLarge = { peer, nowScreenEndpoint };
}
const auto &participants = real->participants();
if (!nowSpeaking
&& (wasSpeaking || wasSounding)
&& (wasCameraEndpoint == newLarge.endpoint)) {
auto screenEndpoint = VideoEndpoint();
auto speakingEndpoint = VideoEndpoint();
auto soundingEndpoint = VideoEndpoint();
for (const auto &participant : participants) {
const auto params = participant.videoParams.get();
if (!params) {
continue;
}
const auto peer = participant.peer;
if (streamsVideo(params->screen.endpoint)) {
screenEndpoint = { peer, params->screen.endpoint };
break;
} else if (participant.speaking
&& !speakingEndpoint) {
if (streamsVideo(params->camera.endpoint)) {
speakingEndpoint = { peer, params->camera.endpoint };
}
} else if (!nowSounding
&& participant.sounding
&& !soundingEndpoint) {
if (streamsVideo(params->camera.endpoint)) {
soundingEndpoint = { peer, params->camera.endpoint };
}
}
}
if (screenEndpoint) {
newLarge = screenEndpoint;
} else if (speakingEndpoint) {
newLarge = speakingEndpoint;
} else if (soundingEndpoint) {
newLarge = soundingEndpoint;
}
} else if ((nowSpeaking || nowSounding)
&& (nowCameraEndpoint != newLarge.endpoint)
&& (activeVideoEndpointType(newLarge.endpoint)
!= EndpointType::Screen)
&& streamsVideo(nowCameraEndpoint)) {
const auto participant = real->participantByEndpoint(
newLarge.endpoint);
const auto screen = participant
&& (participant->videoParams->screen.endpoint
== newLarge.endpoint);
const auto speaking = participant && participant->speaking;
const auto sounding = participant && participant->sounding;
if (!screen
&& ((nowSpeaking && !speaking)
|| (nowSounding && !sounding))) {
newLarge = { peer, nowCameraEndpoint };
}
} }
}, _lifetime); }, _lifetime);
@ -869,53 +759,19 @@ void GroupCall::join(const MTPInputGroupCall &inputCall) {
}); });
} }
void GroupCall::setMyEndpointType(
const std::string &endpoint,
EndpointType type) {
if (endpoint.empty()) {
return;
} else if (type == EndpointType::None) {
const auto was = _activeVideoEndpoints.remove(endpoint);
if (was) {
auto newLarge = _videoEndpointLarge.current();
if (newLarge.endpoint == endpoint) {
_videoEndpointPinned = false;
setVideoEndpointLarge(chooseLargeVideoEndpoint());
}
_streamsVideoUpdated.fire({ endpoint, false });
}
} else {
const auto now = _activeVideoEndpoints.emplace(
endpoint,
type).second;
if (now) {
_streamsVideoUpdated.fire({ endpoint, true });
}
const auto nowLarge = activeVideoEndpointType(
_videoEndpointLarge.current().endpoint);
if (!_videoEndpointPinned.current()
&& ((type == EndpointType::Screen
&& nowLarge != EndpointType::Screen)
|| (type == EndpointType::Camera
&& nowLarge == EndpointType::None))) {
setVideoEndpointLarge(VideoEndpoint{ _joinAs, endpoint });
}
}
}
void GroupCall::setScreenEndpoint(std::string endpoint) { void GroupCall::setScreenEndpoint(std::string endpoint) {
if (_screenEndpoint == endpoint) { if (_screenEndpoint == endpoint) {
return; return;
} }
if (!_screenEndpoint.empty()) { if (!_screenEndpoint.empty()) {
setMyEndpointType(_screenEndpoint, EndpointType::None); markEndpointActive({ _joinAs, _screenEndpoint }, false);
} }
_screenEndpoint = std::move(endpoint); _screenEndpoint = std::move(endpoint);
if (_screenEndpoint.empty()) { if (_screenEndpoint.empty()) {
return; return;
} }
if (isSharingScreen()) { if (isSharingScreen()) {
setMyEndpointType(_screenEndpoint, EndpointType::Screen); markEndpointActive({ _joinAs, _screenEndpoint }, true);
} }
} }
@ -924,14 +780,14 @@ void GroupCall::setCameraEndpoint(std::string endpoint) {
return; return;
} }
if (!_cameraEndpoint.empty()) { if (!_cameraEndpoint.empty()) {
setMyEndpointType(_cameraEndpoint, EndpointType::None); markEndpointActive({ _joinAs, _cameraEndpoint }, false);
} }
_cameraEndpoint = std::move(endpoint); _cameraEndpoint = std::move(endpoint);
if (_cameraEndpoint.empty()) { if (_cameraEndpoint.empty()) {
return; return;
} }
if (isSharingCamera()) { if (isSharingCamera()) {
setMyEndpointType(_cameraEndpoint, EndpointType::Camera); markEndpointActive({ _joinAs, _cameraEndpoint }, true);
} }
} }
@ -939,12 +795,42 @@ void GroupCall::addVideoOutput(
const std::string &endpoint, const std::string &endpoint,
SinkPointer sink) { SinkPointer sink) {
if (_cameraEndpoint == endpoint) { if (_cameraEndpoint == endpoint) {
_cameraCapture->setOutput(sink.data); if (auto strong = sink.data.lock()) {
_cameraCapture->setOutput(std::move(strong));
}
} else if (_screenEndpoint == endpoint) { } else if (_screenEndpoint == endpoint) {
_screenCapture->setOutput(sink.data); if (auto strong = sink.data.lock()) {
_screenCapture->setOutput(std::move(strong));
}
} else if (_instance) {
_instance->addIncomingVideoOutput(endpoint, std::move(sink.data));
} else { } else {
Assert(_instance != nullptr); _pendingVideoOutputs.emplace(endpoint, std::move(sink));
_instance->addIncomingVideoOutput(endpoint, sink.data); }
}
void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
if (!endpoint) {
return;
}
const auto changed = active
? !_activeVideoTracks.contains(endpoint)
: _activeVideoTracks.remove(endpoint);
if (active && changed) {
const auto i = _activeVideoTracks.emplace(
endpoint,
VideoTrack{
.track = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active),
.peer = endpoint.peer,
}).first;
addVideoOutput(i->first.id, { i->second.track->sink() });
} else if (!active && _videoEndpointPinned.current() == endpoint) {
_videoEndpointPinned = VideoEndpoint();
}
updateRequestedVideoChannelsDelayed();
if (changed) {
_videoStreamActiveUpdates.fire(std::move(endpoint));
} }
} }
@ -1676,12 +1562,12 @@ void GroupCall::ensureOutgoingVideo() {
_instance->setVideoCapture(_cameraCapture); _instance->setVideoCapture(_cameraCapture);
} }
_cameraCapture->setState(tgcalls::VideoState::Active); _cameraCapture->setState(tgcalls::VideoState::Active);
setMyEndpointType(_cameraEndpoint, EndpointType::Camera); markEndpointActive({ _joinAs, _cameraEndpoint }, true);
} else { } else {
if (_cameraCapture) { if (_cameraCapture) {
_cameraCapture->setState(tgcalls::VideoState::Inactive); _cameraCapture->setState(tgcalls::VideoState::Inactive);
} }
setMyEndpointType(_cameraEndpoint, EndpointType::None); markEndpointActive({ _joinAs, _cameraEndpoint }, false);
} }
sendSelfUpdate(SendUpdateType::VideoMuted); sendSelfUpdate(SendUpdateType::VideoMuted);
applyMeInCallLocally(); applyMeInCallLocally();
@ -1716,12 +1602,12 @@ void GroupCall::ensureOutgoingVideo() {
_screenInstance->setVideoCapture(_screenCapture); _screenInstance->setVideoCapture(_screenCapture);
} }
_screenCapture->setState(tgcalls::VideoState::Active); _screenCapture->setState(tgcalls::VideoState::Active);
setMyEndpointType(_screenEndpoint, EndpointType::Screen); markEndpointActive({ _joinAs, _screenEndpoint }, true);
} else { } else {
if (_screenCapture) { if (_screenCapture) {
_screenCapture->setState(tgcalls::VideoState::Inactive); _screenCapture->setState(tgcalls::VideoState::Inactive);
} }
setMyEndpointType(_screenEndpoint, EndpointType::None); markEndpointActive({ _joinAs, _screenEndpoint }, false);
} }
joinLeavePresentation(); joinLeavePresentation();
}, _lifetime); }, _lifetime);
@ -1855,26 +1741,11 @@ void GroupCall::ensureControllerCreated() {
_instance = std::make_unique<tgcalls::GroupInstanceCustomImpl>( _instance = std::make_unique<tgcalls::GroupInstanceCustomImpl>(
std::move(descriptor)); std::move(descriptor));
_videoEndpointLarge.value(
) | rpl::start_with_next([=](const VideoEndpoint &endpoint) {
updateRequestedVideoChannels();
_videoLargeTrack = LargeTrack();
_videoLargeTrackWrap = nullptr;
if (!endpoint) {
return;
}
_videoLargeTrackWrap = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active);
_videoLargeTrack = LargeTrack{
_videoLargeTrackWrap.get(),
endpoint.peer
};
addVideoOutput(endpoint.endpoint, { _videoLargeTrackWrap->sink() });
}, _lifetime);
updateInstanceMuteState(); updateInstanceMuteState();
updateInstanceVolumes(); updateInstanceVolumes();
for (auto &[endpoint, sink] : base::take(_pendingVideoOutputs)) {
_instance->addIncomingVideoOutput(endpoint, std::move(sink.data));
}
//raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled()); //raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
} }
@ -2077,15 +1948,15 @@ void GroupCall::updateRequestedVideoChannels() {
} }
auto channels = std::vector<tgcalls::VideoChannelDescription>(); auto channels = std::vector<tgcalls::VideoChannelDescription>();
using Quality = tgcalls::VideoChannelDescription::Quality; using Quality = tgcalls::VideoChannelDescription::Quality;
channels.reserve(_activeVideoEndpoints.size()); channels.reserve(_activeVideoTracks.size());
const auto &camera = cameraSharingEndpoint(); const auto &camera = cameraSharingEndpoint();
const auto &screen = screenSharingEndpoint(); const auto &screen = screenSharingEndpoint();
const auto &large = _videoEndpointLarge.current().endpoint; for (const auto &[endpoint, video] : _activeVideoTracks) {
for (const auto &[endpoint, endpointType] : _activeVideoEndpoints) { const auto &endpointId = endpoint.id;
if (endpoint == camera || endpoint == screen) { if (endpointId == camera || endpointId == screen) {
continue; continue;
} }
const auto participant = real->participantByEndpoint(endpoint); const auto participant = real->participantByEndpoint(endpointId);
const auto params = (participant && participant->ssrc) const auto params = (participant && participant->ssrc)
? participant->videoParams.get() ? participant->videoParams.get()
: nullptr; : nullptr;
@ -2094,11 +1965,13 @@ void GroupCall::updateRequestedVideoChannels() {
} }
channels.push_back({ channels.push_back({
.audioSsrc = participant->ssrc, .audioSsrc = participant->ssrc,
.videoInformation = (params->camera.endpoint == endpoint .videoInformation = (params->camera.endpoint == endpointId
? params->camera.json.toStdString() ? params->camera.json.toStdString()
: params->screen.json.toStdString()), : params->screen.json.toStdString()),
.quality = (endpoint == large .quality = (video.quality == Group::VideoQuality::Full
? Quality::Full ? Quality::Full
: video.quality == Group::VideoQuality::Medium
? Quality::Medium
: Quality::Thumbnail), : Quality::Thumbnail),
}); });
} }
@ -2122,109 +1995,50 @@ void GroupCall::fillActiveVideoEndpoints() {
Assert(real != nullptr); Assert(real != nullptr);
const auto &participants = real->participants(); const auto &participants = real->participants();
auto newLarge = _videoEndpointLarge.current(); const auto &pinned = _videoEndpointPinned.current();
auto newLargeFound = false; auto pinnedFound = false;
auto removed = _activeVideoEndpoints; auto endpoints = _activeVideoTracks | ranges::views::transform([](
const auto feedOne = [&]( const auto &pair) {
const std::string &endpoint, return pair.first;
EndpointType type) { });
auto removed = base::flat_set<VideoEndpoint>(
begin(endpoints),
end(endpoints));
const auto feedOne = [&](VideoEndpoint endpoint) {
if (endpoint.empty()) { if (endpoint.empty()) {
return; return;
} else if (endpoint == newLarge.endpoint) { } else if (endpoint == pinned) {
newLargeFound = true; pinnedFound = true;
} }
if (!removed.remove(endpoint)) { if (!removed.remove(endpoint)) {
_activeVideoEndpoints.emplace(endpoint, type); markEndpointActive(std::move(endpoint), true);
_streamsVideoUpdated.fire({ endpoint, true });
} }
}; };
for (const auto &participant : participants) { for (const auto &participant : participants) {
const auto camera = participant.cameraEndpoint(); const auto camera = participant.cameraEndpoint();
if (camera != _cameraEndpoint && camera != _screenEndpoint) { if (camera != _cameraEndpoint
feedOne(camera, EndpointType::Camera); && camera != _screenEndpoint
&& participant.peer != _joinAs) {
feedOne({ participant.peer, camera });
} }
const auto screen = participant.screenEndpoint(); const auto screen = participant.screenEndpoint();
if (screen != _cameraEndpoint && screen != _screenEndpoint) { if (screen != _cameraEndpoint
feedOne(screen, EndpointType::Screen); && screen != _screenEndpoint
&& participant.peer != _joinAs) {
feedOne({ participant.peer, screen });
} }
} }
feedOne(cameraSharingEndpoint(), EndpointType::Camera); feedOne({ _joinAs, cameraSharingEndpoint() });
feedOne(screenSharingEndpoint(), EndpointType::Screen); feedOne({ _joinAs, screenSharingEndpoint() });
if (!newLarge.empty() && !newLargeFound) { if (pinned && !pinnedFound) {
_videoEndpointPinned = false; _videoEndpointPinned = VideoEndpoint();
newLarge = VideoEndpoint();
} }
if (!newLarge) { for (const auto &endpoint : removed) {
setVideoEndpointLarge(chooseLargeVideoEndpoint()); markEndpointActive(endpoint, false);
}
for (const auto &[endpoint, type] : removed) {
if (_activeVideoEndpoints.remove(endpoint)) {
_streamsVideoUpdated.fire({ endpoint, false });
}
} }
updateRequestedVideoChannels(); updateRequestedVideoChannels();
} }
GroupCall::EndpointType GroupCall::activeVideoEndpointType(
const std::string &endpoint) const {
if (endpoint.empty()) {
return EndpointType::None;
}
const auto i = _activeVideoEndpoints.find(endpoint);
return (i != end(_activeVideoEndpoints))
? i->second
: EndpointType::None;
}
VideoEndpoint GroupCall::chooseLargeVideoEndpoint() const {
const auto real = lookupReal();
if (!real) {
return VideoEndpoint();
}
auto anyEndpoint = VideoEndpoint();
auto screenEndpoint = VideoEndpoint();
auto speakingEndpoint = VideoEndpoint();
auto soundingEndpoint = VideoEndpoint();
const auto &myCameraEndpoint = cameraSharingEndpoint();
const auto &myScreenEndpoint = screenSharingEndpoint();
const auto &participants = real->participants();
for (const auto &[endpoint, endpointType] : _activeVideoEndpoints) {
if (endpoint == _cameraEndpoint || endpoint == _screenEndpoint) {
continue;
}
if (const auto participant = real->participantByEndpoint(endpoint)) {
const auto peer = participant->peer;
if (screenEndpoint.empty()
&& participant->videoParams->screen.endpoint == endpoint) {
screenEndpoint = { peer, endpoint };
break;
}
if (speakingEndpoint.empty() && participant->speaking) {
speakingEndpoint = { peer, endpoint };
}
if (soundingEndpoint.empty() && participant->sounding) {
soundingEndpoint = { peer, endpoint };
}
if (anyEndpoint.empty()) {
anyEndpoint = { peer, endpoint };
}
}
}
return screenEndpoint
? screenEndpoint
: streamsVideo(myScreenEndpoint)
? VideoEndpoint{ _joinAs, myScreenEndpoint }
: speakingEndpoint
? speakingEndpoint
: soundingEndpoint
? soundingEndpoint
: anyEndpoint
? anyEndpoint
: streamsVideo(myCameraEndpoint)
? VideoEndpoint{ _joinAs, myCameraEndpoint }
: VideoEndpoint();
}
void GroupCall::updateInstanceMuteState() { void GroupCall::updateInstanceMuteState() {
Expects(_instance != nullptr); Expects(_instance != nullptr);
@ -2515,27 +2329,21 @@ void GroupCall::sendSelfUpdate(SendUpdateType type) {
} }
void GroupCall::pinVideoEndpoint(VideoEndpoint endpoint) { void GroupCall::pinVideoEndpoint(VideoEndpoint endpoint) {
if (!endpoint) { _videoEndpointPinned = endpoint;
_videoEndpointPinned = false;
} else if (streamsVideo(endpoint.endpoint)) {
_videoEndpointPinned = false;
setVideoEndpointLarge(std::move(endpoint));
_videoEndpointPinned = true;
}
} }
void GroupCall::showVideoEndpointLarge(VideoEndpoint endpoint) { void GroupCall::requestVideoQuality(
if (!streamsVideo(endpoint.endpoint)) { const VideoEndpoint &endpoint,
Group::VideoQuality quality) {
if (!endpoint) {
return; return;
} }
_videoEndpointPinned = false; const auto i = _activeVideoTracks.find(endpoint);
setVideoEndpointLarge(std::move(endpoint)); if (i == end(_activeVideoTracks) || i->second.quality == quality) {
_videoLargeShowTime = crl::now(); return;
} }
i->second.quality = quality;
void GroupCall::setVideoEndpointLarge(VideoEndpoint endpoint) { updateRequestedVideoChannelsDelayed();
_videoEndpointLarge = endpoint;
_videoLargeShowTime = 0;
} }
void GroupCall::setCurrentAudioDevice(bool input, const QString &deviceId) { void GroupCall::setCurrentAudioDevice(bool input, const QString &deviceId) {

View file

@ -47,6 +47,7 @@ struct VolumeRequest;
struct ParticipantState; struct ParticipantState;
struct JoinInfo; struct JoinInfo;
struct RejoinEvent; struct RejoinEvent;
enum class VideoQuality;
} // namespace Group } // namespace Group
enum class MuteState { enum class MuteState {
@ -76,10 +77,10 @@ struct LevelUpdate {
struct VideoEndpoint { struct VideoEndpoint {
PeerData *peer = nullptr; PeerData *peer = nullptr;
std::string endpoint; std::string id;
[[nodiscard]] bool empty() const noexcept { [[nodiscard]] bool empty() const noexcept {
return !peer; return id.empty();
} }
[[nodiscard]] explicit operator bool() const noexcept { [[nodiscard]] explicit operator bool() const noexcept {
return !empty(); return !empty();
@ -89,7 +90,7 @@ struct VideoEndpoint {
inline bool operator==( inline bool operator==(
const VideoEndpoint &a, const VideoEndpoint &a,
const VideoEndpoint &b) noexcept { const VideoEndpoint &b) noexcept {
return (a.peer == b.peer) && (a.endpoint == b.endpoint); return (a.id == b.id);
} }
inline bool operator!=( inline bool operator!=(
@ -102,7 +103,7 @@ inline bool operator<(
const VideoEndpoint &a, const VideoEndpoint &a,
const VideoEndpoint &b) noexcept { const VideoEndpoint &b) noexcept {
return (a.peer < b.peer) return (a.peer < b.peer)
|| (a.peer == b.peer && a.endpoint < b.endpoint); || (a.peer == b.peer && a.id < b.id);
} }
inline bool operator>( inline bool operator>(
@ -123,11 +124,6 @@ inline bool operator>=(
return !(a < b); return !(a < b);
} }
struct StreamsVideoUpdate {
std::string endpoint;
bool streams = false;
};
struct VideoParams { struct VideoParams {
base::flat_set<uint32> ssrcs; base::flat_set<uint32> ssrcs;
std::string endpoint; std::string endpoint;
@ -274,49 +270,44 @@ public:
[[nodiscard]] rpl::producer<LevelUpdate> levelUpdates() const { [[nodiscard]] rpl::producer<LevelUpdate> levelUpdates() const {
return _levelUpdates.events(); return _levelUpdates.events();
} }
[[nodiscard]] auto streamsVideoUpdates() const [[nodiscard]] auto videoStreamActiveUpdates() const
-> rpl::producer<StreamsVideoUpdate> { -> rpl::producer<VideoEndpoint> {
return _streamsVideoUpdated.events(); return _videoStreamActiveUpdates.events();
}
[[nodiscard]] bool streamsVideo(const std::string &endpoint) const {
return !endpoint.empty()
&& activeVideoEndpointType(endpoint) != EndpointType::None;
}
[[nodiscard]] bool videoEndpointPinned() const {
return _videoEndpointPinned.current();
}
[[nodiscard]] rpl::producer<bool> videoEndpointPinnedValue() const {
return _videoEndpointPinned.value();
} }
void pinVideoEndpoint(VideoEndpoint endpoint); void pinVideoEndpoint(VideoEndpoint endpoint);
[[nodiscard]] const VideoEndpoint &videoEndpointLarge() const { void requestVideoQuality(
return _videoEndpointLarge.current(); const VideoEndpoint &endpoint,
Group::VideoQuality quality);
[[nodiscard]] const VideoEndpoint &videoEndpointPinned() const {
return _videoEndpointPinned.current();
} }
[[nodiscard]] auto videoEndpointLargeValue() const [[nodiscard]] auto videoEndpointPinnedValue() const
-> rpl::producer<VideoEndpoint> { -> rpl::producer<VideoEndpoint> {
return _videoEndpointLarge.value(); return _videoEndpointPinned.value();
} }
void showVideoEndpointLarge(VideoEndpoint endpoint); struct VideoTrack {
struct LargeTrack { //VideoTrack();
Webrtc::VideoTrack *track = nullptr; //VideoTrack(VideoTrack &&other);
//VideoTrack &operator=(VideoTrack &&other);
//~VideoTrack();
std::unique_ptr<Webrtc::VideoTrack> track;
PeerData *peer = nullptr; PeerData *peer = nullptr;
Group::VideoQuality quality = Group::VideoQuality();
[[nodiscard]] explicit operator bool() const { [[nodiscard]] explicit operator bool() const {
return (track != nullptr); return (track != nullptr);
} }
[[nodiscard]] bool operator==(LargeTrack other) const { [[nodiscard]] bool operator==(const VideoTrack &other) const {
return (track == other.track) && (peer == other.peer); return (track == other.track) && (peer == other.peer);
} }
[[nodiscard]] bool operator!=(LargeTrack other) const { [[nodiscard]] bool operator!=(const VideoTrack &other) const {
return !(*this == other); return !(*this == other);
} }
}; };
[[nodiscard]] LargeTrack videoLargeTrack() const { [[nodiscard]] auto activeVideoTracks() const
return _videoLargeTrack.current(); -> const base::flat_map<VideoEndpoint, VideoTrack> & {
} return _activeVideoTracks;
[[nodiscard]] auto videoLargeTrackValue() const
-> rpl::producer<LargeTrack> {
return _videoLargeTrack.value();
} }
[[nodiscard]] rpl::producer<Group::RejoinEvent> rejoinEvents() const { [[nodiscard]] rpl::producer<Group::RejoinEvent> rejoinEvents() const {
return _rejoinEvents.events(); return _rejoinEvents.events();
@ -391,11 +382,6 @@ private:
RaiseHand, RaiseHand,
VideoMuted, VideoMuted,
}; };
enum class EndpointType {
None,
Camera,
Screen,
};
[[nodiscard]] bool mediaChannelDescriptionsFill( [[nodiscard]] bool mediaChannelDescriptionsFill(
not_null<MediaChannelDescriptionsTask*> task, not_null<MediaChannelDescriptionsTask*> task,
@ -448,10 +434,6 @@ private:
void updateRequestedVideoChannels(); void updateRequestedVideoChannels();
void updateRequestedVideoChannelsDelayed(); void updateRequestedVideoChannelsDelayed();
void fillActiveVideoEndpoints(); void fillActiveVideoEndpoints();
[[nodiscard]] VideoEndpoint chooseLargeVideoEndpoint() const;
[[nodiscard]] EndpointType activeVideoEndpointType(
const std::string &endpoint) const;
void setVideoEndpointLarge(VideoEndpoint endpoint);
void editParticipant( void editParticipant(
not_null<PeerData*> participantPeer, not_null<PeerData*> participantPeer,
@ -467,11 +449,12 @@ private:
void setupMediaDevices(); void setupMediaDevices();
void ensureOutgoingVideo(); void ensureOutgoingVideo();
void setMyEndpointType(const std::string &endpoint, EndpointType type);
void setScreenEndpoint(std::string endpoint); void setScreenEndpoint(std::string endpoint);
void setCameraEndpoint(std::string endpoint); void setCameraEndpoint(std::string endpoint);
void addVideoOutput(const std::string &endpoint, SinkPointer sink); void addVideoOutput(const std::string &endpoint, SinkPointer sink);
void markEndpointActive(VideoEndpoint endpoint, bool active);
[[nodiscard]] MTPInputGroupCall inputCall() const; [[nodiscard]] MTPInputGroupCall inputCall() const;
const not_null<Delegate*> _delegate; const not_null<Delegate*> _delegate;
@ -523,6 +506,7 @@ private:
base::has_weak_ptr _instanceGuard; base::has_weak_ptr _instanceGuard;
std::shared_ptr<tgcalls::VideoCaptureInterface> _cameraCapture; std::shared_ptr<tgcalls::VideoCaptureInterface> _cameraCapture;
std::unique_ptr<Webrtc::VideoTrack> _cameraOutgoing; std::unique_ptr<Webrtc::VideoTrack> _cameraOutgoing;
base::flat_map<std::string, SinkPointer> _pendingVideoOutputs;
rpl::variable<InstanceState> _screenInstanceState rpl::variable<InstanceState> _screenInstanceState
= InstanceState::Disconnected; = InstanceState::Disconnected;
@ -536,13 +520,9 @@ private:
bool _videoInited = false; bool _videoInited = false;
rpl::event_stream<LevelUpdate> _levelUpdates; rpl::event_stream<LevelUpdate> _levelUpdates;
rpl::event_stream<StreamsVideoUpdate> _streamsVideoUpdated; rpl::event_stream<VideoEndpoint> _videoStreamActiveUpdates;
base::flat_map<std::string, EndpointType> _activeVideoEndpoints; base::flat_map<VideoEndpoint, VideoTrack> _activeVideoTracks;
rpl::variable<VideoEndpoint> _videoEndpointLarge; rpl::variable<VideoEndpoint> _videoEndpointPinned;
rpl::variable<bool> _videoEndpointPinned;
std::unique_ptr<Webrtc::VideoTrack> _videoLargeTrackWrap;
rpl::variable<LargeTrack> _videoLargeTrack;
crl::time _videoLargeShowTime = 0;
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke; base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
rpl::event_stream<Group::RejoinEvent> _rejoinEvents; rpl::event_stream<Group::RejoinEvent> _rejoinEvents;
rpl::event_stream<> _allowedToSpeakNotifications; rpl::event_stream<> _allowedToSpeakNotifications;

View file

@ -53,4 +53,10 @@ enum class PanelMode {
Wide, Wide,
}; };
enum class VideoQuality {
Thumbnail,
Medium,
Full,
};
} // namespace Calls::Group } // namespace Calls::Group

View file

@ -7,6 +7,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/ */
#include "calls/group/calls_group_large_video.h" #include "calls/group/calls_group_large_video.h"
#include "calls/group/calls_group_common.h"
#include "calls/group/calls_group_members_row.h" #include "calls/group/calls_group_members_row.h"
#include "media/view/media_view_pip.h" #include "media/view/media_view_pip.h"
#include "webrtc/webrtc_video_track.h" #include "webrtc/webrtc_video_track.h"
@ -50,6 +51,15 @@ void LargeVideo::setVisible(bool visible) {
void LargeVideo::setGeometry(int x, int y, int width, int height) { void LargeVideo::setGeometry(int x, int y, int width, int height) {
_content.setGeometry(x, y, width, height); _content.setGeometry(x, y, width, height);
if (width > 0 && height > 0) {
const auto kMedium = style::ConvertScale(380);
const auto kSmall = style::ConvertScale(200);
_requestedQuality = (width > kMedium || height > kMedium)
? VideoQuality::Full
: (width > kSmall || height > kSmall)
? VideoQuality::Medium
: VideoQuality::Thumbnail;
}
} }
void LargeVideo::setControlsShown(bool shown) { void LargeVideo::setControlsShown(bool shown) {
@ -77,10 +87,24 @@ rpl::producer<float64> LargeVideo::controlsShown() const {
return _controlsShownRatio.value(); return _controlsShownRatio.value();
} }
QSize LargeVideo::trackSize() const {
return _trackSize.current();
}
rpl::producer<QSize> LargeVideo::trackSizeValue() const { rpl::producer<QSize> LargeVideo::trackSizeValue() const {
return _trackSize.value(); return _trackSize.value();
} }
rpl::producer<VideoQuality> LargeVideo::requestedQuality() const {
using namespace rpl::mappers;
return rpl::combine(
_content.shownValue(),
_requestedQuality.value()
) | rpl::filter([=](bool shown, auto) {
return shown;
}) | rpl::map(_2);
}
void LargeVideo::setup( void LargeVideo::setup(
rpl::producer<LargeVideoTrack> track, rpl::producer<LargeVideoTrack> track,
rpl::producer<bool> pinned) { rpl::producer<bool> pinned) {

View file

@ -27,6 +27,7 @@ class VideoTrack;
namespace Calls::Group { namespace Calls::Group {
class MembersRow; class MembersRow;
enum class VideoQuality;
struct LargeVideoTrack { struct LargeVideoTrack {
Webrtc::VideoTrack *track = nullptr; Webrtc::VideoTrack *track = nullptr;
@ -66,11 +67,14 @@ public:
[[nodiscard]] rpl::producer<bool> pinToggled() const; [[nodiscard]] rpl::producer<bool> pinToggled() const;
[[nodiscard]] rpl::producer<> minimizeClicks() const; [[nodiscard]] rpl::producer<> minimizeClicks() const;
[[nodiscard]] rpl::producer<float64> controlsShown() const; [[nodiscard]] rpl::producer<float64> controlsShown() const;
[[nodiscard]] rpl::producer<QSize> trackSizeValue() const;
[[nodiscard]] rpl::producer<> clicks() const { [[nodiscard]] rpl::producer<> clicks() const {
return _clicks.events(); return _clicks.events();
} }
[[nodiscard]] QSize trackSize() const;
[[nodiscard]] rpl::producer<QSize> trackSizeValue() const;
[[nodiscard]] rpl::producer<VideoQuality> requestedQuality() const;
[[nodiscard]] rpl::lifetime &lifetime() { [[nodiscard]] rpl::lifetime &lifetime() {
return _content.lifetime(); return _content.lifetime();
} }
@ -124,6 +128,7 @@ private:
bool _toggleControlsScheduled = false; bool _toggleControlsScheduled = false;
rpl::variable<float64> _controlsShownRatio = 1.; rpl::variable<float64> _controlsShownRatio = 1.;
rpl::variable<QSize> _trackSize; rpl::variable<QSize> _trackSize;
rpl::variable<VideoQuality> _requestedQuality;
rpl::lifetime _trackLifetime; rpl::lifetime _trackLifetime;
}; };

View file

@ -46,6 +46,11 @@ using Row = MembersRow;
} // namespace } // namespace
struct Members::VideoTile {
std::unique_ptr<LargeVideo> video;
VideoEndpoint endpoint;
};
class Members::Controller final class Members::Controller final
: public PeerListController : public PeerListController
, public MembersRowDelegate , public MembersRowDelegate
@ -156,13 +161,12 @@ private:
not_null<const Data::GroupCallParticipant*> participant) const; not_null<const Data::GroupCallParticipant*> participant) const;
const std::string &computeCameraEndpoint( const std::string &computeCameraEndpoint(
not_null<const Data::GroupCallParticipant*> participant) const; not_null<const Data::GroupCallParticipant*> participant) const;
void setRowVideoEndpoint( //void setRowVideoEndpoint(
not_null<Row*> row, // not_null<Row*> row,
const std::string &endpoint); // const std::string &endpoint);
bool toggleRowVideo(not_null<PeerListRow*> row); bool toggleRowVideo(not_null<PeerListRow*> row);
void showRowMenu(not_null<PeerListRow*> row); void showRowMenu(not_null<PeerListRow*> row);
void generateNarrowShadow();
void appendInvitedUsers(); void appendInvitedUsers();
void scheduleRaisedHandStatusRemove(); void scheduleRaisedHandStatusRemove();
@ -277,24 +281,24 @@ Members::Controller::~Controller() {
base::take(_menu); base::take(_menu);
} }
void Members::Controller::setRowVideoEndpoint( //void Members::Controller::setRowVideoEndpoint(
not_null<Row*> row, // not_null<Row*> row,
const std::string &endpoint) { // const std::string &endpoint) {
const auto was = row->videoTrackEndpoint(); // const auto was = row->videoTrackEndpoint();
if (was != endpoint) { // if (was != endpoint) {
if (!was.empty()) { // if (!was.empty()) {
_videoEndpoints.remove(was); // _videoEndpoints.remove(was);
} // }
if (!endpoint.empty()) { // if (!endpoint.empty()) {
_videoEndpoints.emplace(endpoint, row); // _videoEndpoints.emplace(endpoint, row);
} // }
} // }
if (endpoint.empty()) { // if (endpoint.empty()) {
row->clearVideoTrack(); // row->clearVideoTrack();
} else { // } else {
_call->addVideoOutput(endpoint, row->createVideoTrack(endpoint)); // _call->addVideoOutput(endpoint, row->createVideoTrack(endpoint));
} // }
} //}
void Members::Controller::setupListChangeViewers() { void Members::Controller::setupListChangeViewers() {
_call->real( _call->real(
@ -302,13 +306,6 @@ void Members::Controller::setupListChangeViewers() {
subscribeToChanges(real); subscribeToChanges(real);
}, _lifetime); }, _lifetime);
//_call->stateValue(
//) | rpl::start_with_next([=] {
// if (const auto real = _call->lookupReal()) {
// updateRow(channel->session().user());
// }
//}, _lifetime);
_call->levelUpdates( _call->levelUpdates(
) | rpl::start_with_next([=](const LevelUpdate &update) { ) | rpl::start_with_next([=](const LevelUpdate &update) {
const auto i = _soundingRowBySsrc.find(update.ssrc); const auto i = _soundingRowBySsrc.find(update.ssrc);
@ -317,88 +314,88 @@ void Members::Controller::setupListChangeViewers() {
} }
}, _lifetime); }, _lifetime);
_call->videoEndpointLargeValue( //_call->videoEndpointLargeValue(
) | rpl::filter([=](const VideoEndpoint &largeEndpoint) { //) | rpl::filter([=](const VideoEndpoint &largeEndpoint) {
return (_largeEndpoint != largeEndpoint.endpoint); // return (_largeEndpoint != largeEndpoint.endpoint);
}) | rpl::start_with_next([=](const VideoEndpoint &largeEndpoint) { //}) | rpl::start_with_next([=](const VideoEndpoint &largeEndpoint) {
if (_call->streamsVideo(_largeEndpoint)) { // if (_call->streamsVideo(_largeEndpoint)) {
if (const auto participant = findParticipant(_largeEndpoint)) { // if (const auto participant = findParticipant(_largeEndpoint)) {
if (const auto row = findRow(participant->peer)) { // if (const auto row = findRow(participant->peer)) {
const auto current = row->videoTrackEndpoint(); // const auto current = row->videoTrackEndpoint();
if (current.empty() // if (current.empty()
|| (computeScreenEndpoint(participant) == _largeEndpoint // || (computeScreenEndpoint(participant) == _largeEndpoint
&& computeCameraEndpoint(participant) == current)) { // && computeCameraEndpoint(participant) == current)) {
setRowVideoEndpoint(row, _largeEndpoint); // setRowVideoEndpoint(row, _largeEndpoint);
} // }
} // }
} // }
} // }
_largeEndpoint = largeEndpoint.endpoint; // _largeEndpoint = largeEndpoint.endpoint;
if (const auto participant = findParticipant(_largeEndpoint)) { // if (const auto participant = findParticipant(_largeEndpoint)) {
if (const auto row = findRow(participant->peer)) { // if (const auto row = findRow(participant->peer)) {
if (row->videoTrackEndpoint() == _largeEndpoint) { // if (row->videoTrackEndpoint() == _largeEndpoint) {
const auto &camera = computeCameraEndpoint(participant); // const auto &camera = computeCameraEndpoint(participant);
const auto &screen = computeScreenEndpoint(participant); // const auto &screen = computeScreenEndpoint(participant);
if (_largeEndpoint == camera // if (_largeEndpoint == camera
&& _call->streamsVideo(screen)) { // && _call->streamsVideo(screen)) {
setRowVideoEndpoint(row, screen); // setRowVideoEndpoint(row, screen);
} else if (_largeEndpoint == screen // } else if (_largeEndpoint == screen
&& _call->streamsVideo(camera)) { // && _call->streamsVideo(camera)) {
setRowVideoEndpoint(row, camera); // setRowVideoEndpoint(row, camera);
} else { // } else {
setRowVideoEndpoint(row, std::string()); // setRowVideoEndpoint(row, std::string());
} // }
} // }
} // }
} // }
}, _lifetime); //}, _lifetime);
_call->streamsVideoUpdates( //_call->streamsVideoUpdates(
) | rpl::start_with_next([=](StreamsVideoUpdate update) { //) | rpl::start_with_next([=](StreamsVideoUpdate update) {
Assert(update.endpoint != _largeEndpoint); // Assert(update.endpoint != _largeEndpoint);
if (update.streams) { // if (update.streams) {
if (const auto participant = findParticipant(update.endpoint)) { // if (const auto participant = findParticipant(update.endpoint)) {
if (const auto row = findRow(participant->peer)) { // if (const auto row = findRow(participant->peer)) {
const auto &camera = computeCameraEndpoint(participant); // const auto &camera = computeCameraEndpoint(participant);
const auto &screen = computeScreenEndpoint(participant); // const auto &screen = computeScreenEndpoint(participant);
if (update.endpoint == camera // if (update.endpoint == camera
&& (!_call->streamsVideo(screen) // && (!_call->streamsVideo(screen)
|| _largeEndpoint == screen)) { // || _largeEndpoint == screen)) {
setRowVideoEndpoint(row, camera); // setRowVideoEndpoint(row, camera);
} else if (update.endpoint == screen // } else if (update.endpoint == screen
&& (_largeEndpoint != screen)) { // && (_largeEndpoint != screen)) {
setRowVideoEndpoint(row, screen); // setRowVideoEndpoint(row, screen);
} // }
} // }
} // }
} else { // } else {
const auto i = _videoEndpoints.find(update.endpoint); // const auto i = _videoEndpoints.find(update.endpoint);
if (i != end(_videoEndpoints)) { // if (i != end(_videoEndpoints)) {
const auto row = i->second; // const auto row = i->second;
const auto real = _call->lookupReal(); // const auto real = _call->lookupReal();
Assert(real != nullptr); // Assert(real != nullptr);
const auto participant = real->participantByPeer( // const auto participant = real->participantByPeer(
row->peer()); // row->peer());
if (!participant) { // if (!participant) {
setRowVideoEndpoint(row, std::string()); // setRowVideoEndpoint(row, std::string());
} else { // } else {
const auto &camera = computeCameraEndpoint(participant); // const auto &camera = computeCameraEndpoint(participant);
const auto &screen = computeScreenEndpoint(participant); // const auto &screen = computeScreenEndpoint(participant);
if (update.endpoint == camera // if (update.endpoint == camera
&& (_largeEndpoint != screen) // && (_largeEndpoint != screen)
&& _call->streamsVideo(screen)) { // && _call->streamsVideo(screen)) {
setRowVideoEndpoint(row, screen); // setRowVideoEndpoint(row, screen);
} else if (update.endpoint == screen // } else if (update.endpoint == screen
&& (_largeEndpoint != camera) // && (_largeEndpoint != camera)
&& _call->streamsVideo(camera)) { // && _call->streamsVideo(camera)) {
setRowVideoEndpoint(row, camera); // setRowVideoEndpoint(row, camera);
} else { // } else {
setRowVideoEndpoint(row, std::string()); // setRowVideoEndpoint(row, std::string());
} // }
} // }
} // }
} // }
}, _lifetime); //}, _lifetime);
_call->rejoinEvents( _call->rejoinEvents(
) | rpl::start_with_next([=](const Group::RejoinEvent &event) { ) | rpl::start_with_next([=](const Group::RejoinEvent &event) {
@ -996,18 +993,18 @@ void Members::Controller::rowPaintNarrowBorder(
int x, int x,
int y, int y,
not_null<Row*> row) { not_null<Row*> row) {
if (_call->videoEndpointLarge().peer != row->peer().get()) { //if (_call->videoEndpointLarge().peer != row->peer().get()) {
return; // return;
} //}
auto hq = PainterHighQualityEnabler(p); //auto hq = PainterHighQualityEnabler(p);
p.setBrush(Qt::NoBrush); //p.setBrush(Qt::NoBrush);
auto pen = st::groupCallMemberActiveIcon->p; //auto pen = st::groupCallMemberActiveIcon->p;
pen.setWidthF(st::groupCallNarrowOutline); //pen.setWidthF(st::groupCallNarrowOutline);
p.setPen(pen); //p.setPen(pen);
p.drawRoundedRect( //p.drawRoundedRect(
QRect{ QPoint(x, y), st::groupCallNarrowSize }, // QRect{ QPoint(x, y), st::groupCallNarrowSize },
st::roundRadiusLarge, // st::roundRadiusLarge,
st::roundRadiusLarge); // st::roundRadiusLarge);
} }
void Members::Controller::rowPaintNarrowShadow( void Members::Controller::rowPaintNarrowShadow(
@ -1096,45 +1093,46 @@ void Members::Controller::showRowMenu(not_null<PeerListRow*> row) {
} }
bool Members::Controller::toggleRowVideo(not_null<PeerListRow*> row) { bool Members::Controller::toggleRowVideo(not_null<PeerListRow*> row) {
const auto real = _call->lookupReal(); return false;
if (!real) { //const auto real = _call->lookupReal();
return false; //if (!real) {
} // return false;
const auto participantPeer = row->peer(); //}
const auto isMe = (participantPeer == _call->joinAs()); //const auto participantPeer = row->peer();
const auto participant = real->participantByPeer(participantPeer); //const auto isMe = (participantPeer == _call->joinAs());
if (!participant) { //const auto participant = real->participantByPeer(participantPeer);
return false; //if (!participant) {
} // return false;
const auto params = participant->videoParams.get(); //}
const auto empty = std::string(); //const auto params = participant->videoParams.get();
const auto &camera = isMe //const auto empty = std::string();
? _call->cameraSharingEndpoint() //const auto &camera = isMe
: (params && _call->streamsVideo(params->camera.endpoint)) // ? _call->cameraSharingEndpoint()
? params->camera.endpoint // : (params && _call->streamsVideo(params->camera.endpoint))
: empty; // ? params->camera.endpoint
const auto &screen = isMe // : empty;
? _call->screenSharingEndpoint() //const auto &screen = isMe
: (params && _call->streamsVideo(params->screen.endpoint)) // ? _call->screenSharingEndpoint()
? params->screen.endpoint // : (params && _call->streamsVideo(params->screen.endpoint))
: empty; // ? params->screen.endpoint
const auto &large = _call->videoEndpointLarge().endpoint; // : empty;
const auto show = [&] { //const auto &large = _call->videoEndpointLarge().endpoint;
if (!screen.empty() && large != screen) { //const auto show = [&] {
return screen; // if (!screen.empty() && large != screen) {
} else if (!camera.empty() && large != camera) { // return screen;
return camera; // } else if (!camera.empty() && large != camera) {
} // return camera;
return std::string(); // }
}(); // return std::string();
if (show.empty()) { //}();
return false; //if (show.empty()) {
} else if (_call->videoEndpointPinned()) { // return false;
_call->pinVideoEndpoint({ participantPeer, show }); //} else if (_call->videoEndpointPinned()) {
} else { // _call->pinVideoEndpoint({ participantPeer, show });
_call->showVideoEndpointLarge({ participantPeer, show }); //} else {
} // _call->showVideoEndpointLarge({ participantPeer, show });
return true; //}
//return true;
} }
void Members::Controller::rowActionClicked( void Members::Controller::rowActionClicked(
@ -1223,13 +1221,11 @@ base::unique_qptr<Ui::PopupMenu> Members::Controller::createRowContextMenu(
if (const auto real = _call->lookupReal()) { if (const auto real = _call->lookupReal()) {
const auto participant = real->participantByPeer(participantPeer); const auto participant = real->participantByPeer(participantPeer);
if (participant) { if (participant) {
const auto pinnedEndpoint = _call->videoEndpointPinned() const auto &pinned = _call->videoEndpointPinned();
? _call->videoEndpointLarge().endpoint
: std::string();
const auto &camera = computeCameraEndpoint(participant); const auto &camera = computeCameraEndpoint(participant);
const auto &screen = computeScreenEndpoint(participant); const auto &screen = computeScreenEndpoint(participant);
if (_call->streamsVideo(camera)) { if (!camera.empty()) {
if (pinnedEndpoint == camera) { if (pinned.id == camera) {
result->addAction( result->addAction(
tr::lng_group_call_context_unpin_camera(tr::now), tr::lng_group_call_context_unpin_camera(tr::now),
[=] { _call->pinVideoEndpoint(VideoEndpoint()); }); [=] { _call->pinVideoEndpoint(VideoEndpoint()); });
@ -1241,8 +1237,8 @@ base::unique_qptr<Ui::PopupMenu> Members::Controller::createRowContextMenu(
camera }); }); camera }); });
} }
} }
if (_call->streamsVideo(screen)) { if (!screen.empty()) {
if (pinnedEndpoint == screen) { if (pinned.id == screen) {
result->addAction( result->addAction(
tr::lng_group_call_context_unpin_screen(tr::now), tr::lng_group_call_context_unpin_screen(tr::now),
[=] { _call->pinVideoEndpoint(VideoEndpoint()); }); [=] { _call->pinVideoEndpoint(VideoEndpoint()); });
@ -1445,14 +1441,13 @@ std::unique_ptr<Row> Members::Controller::createRow(
const Data::GroupCallParticipant &participant) { const Data::GroupCallParticipant &participant) {
auto result = std::make_unique<Row>(this, participant.peer); auto result = std::make_unique<Row>(this, participant.peer);
updateRow(result.get(), &participant); updateRow(result.get(), &participant);
//const auto &camera = computeCameraEndpoint(&participant);
const auto &camera = computeCameraEndpoint(&participant); //const auto &screen = computeScreenEndpoint(&participant);
const auto &screen = computeScreenEndpoint(&participant); //if (!screen.empty() && _largeEndpoint != screen) {
if (!screen.empty() && _largeEndpoint != screen) { // setRowVideoEndpoint(result.get(), screen);
setRowVideoEndpoint(result.get(), screen); //} else if (!camera.empty() && _largeEndpoint != camera) {
} else if (!camera.empty() && _largeEndpoint != camera) { // setRowVideoEndpoint(result.get(), camera);
setRowVideoEndpoint(result.get(), camera); //}
}
return result; return result;
} }
@ -1612,7 +1607,7 @@ void Members::setupAddMember(not_null<GroupCall*> call) {
} }
rpl::producer<> Members::enlargeVideo() const { rpl::producer<> Members::enlargeVideo() const {
return _pinnedVideo->clicks(); return _enlargeVideoClicks.events();
} }
Row *Members::lookupRow(not_null<PeerData*> peer) const { Row *Members::lookupRow(not_null<PeerData*> peer) const {
@ -1624,7 +1619,9 @@ void Members::setMode(PanelMode mode) {
return; return;
} }
_mode = mode; _mode = mode;
_pinnedVideo->setVisible(mode == PanelMode::Default); for (const auto &tile : _videoTiles) {
tile.video->setVisible(mode == PanelMode::Default);
}
_list->setMode((mode == PanelMode::Wide) _list->setMode((mode == PanelMode::Wide)
? PeerListContent::Mode::Custom ? PeerListContent::Mode::Custom
: PeerListContent::Mode::Default); : PeerListContent::Mode::Default);
@ -1655,29 +1652,121 @@ void Members::setupList() {
updateControlsGeometry(); updateControlsGeometry();
} }
void Members::refreshTilesGeometry() {
const auto width = _layout->width();
if (_videoTiles.empty()
|| !width
|| _mode.current() == PanelMode::Wide) {
_pinnedVideoWrap->resize(width, 0);
return;
}
auto sizes = base::flat_map<not_null<LargeVideo*>, QSize>();
sizes.reserve(_videoTiles.size());
for (const auto &tile : _videoTiles) {
const auto video = tile.video.get();
const auto size = video->trackSize();
if (size.isEmpty()) {
video->setGeometry(0, 0, width, 0);
} else {
sizes.emplace(video, size);
}
}
if (sizes.empty()) {
_pinnedVideoWrap->resize(width, 0);
return;
} else if (sizes.size() == 1) {
const auto size = sizes.front().second;
const auto heightMin = (width * 9) / 16;
const auto heightMax = (width * 3) / 4;
const auto scaled = size.scaled(
QSize(width, heightMax),
Qt::KeepAspectRatio);
const auto height = std::max(scaled.height(), heightMin);
sizes.front().first->setGeometry(0, 0, width, height);
_pinnedVideoWrap->resize(width, height);
return;
}
const auto square = (width - st::groupCallVideoSmallSkip) / 2;
const auto skip = (width - 2 * square);
const auto put = [&](not_null<LargeVideo*> video, int column, int row) {
video->setGeometry(
(column == 2) ? 0 : column ? (width - square) : 0,
row * (square + skip),
(column == 2) ? width : square,
square);
};
const auto rows = (sizes.size() + 1) / 2;
if (sizes.size() == 3) {
put(sizes.front().first, 2, 0);
put((sizes.begin() + 1)->first, 0, 1);
put((sizes.begin() + 1)->first, 1, 1);
} else {
auto row = 0;
auto column = 0;
for (const auto &[video, endpoint] : sizes) {
put(video, column, row);
if (column) {
++row;
column = (row + 1 == rows && sizes.size() % 2) ? 2 : 0;
} else {
column = 1;
}
}
}
_pinnedVideoWrap->resize(width, rows * (square + skip) - skip);
}
void Members::setupPinnedVideo() { void Members::setupPinnedVideo() {
using namespace rpl::mappers; using namespace rpl::mappers;
_pinnedVideo = std::make_unique<LargeVideo>( const auto setupTile = [=](
_pinnedVideoWrap.get(), const VideoEndpoint &endpoint,
st::groupCallLargeVideoNarrow, const GroupCall::VideoTrack &track) {
true, const auto row = lookupRow(track.peer);
_call->videoLargeTrackValue( Assert(row != nullptr);
) | rpl::map([=](GroupCall::LargeTrack track) { auto video = std::make_unique<LargeVideo>(
const auto row = track ? lookupRow(track.peer) : nullptr; _pinnedVideoWrap.get(),
Assert(!track || row != nullptr); st::groupCallLargeVideoNarrow,
return LargeVideoTrack{ row ? track.track : nullptr, row }; (_mode.current() == PanelMode::Default),
}), rpl::single(LargeVideoTrack{ track.track.get(), row }),
_call->videoEndpointPinnedValue()); _call->videoEndpointPinnedValue() | rpl::map(_1 == endpoint));
_pinnedVideo->pinToggled( video->pinToggled(
) | rpl::start_with_next([=](bool pinned) { ) | rpl::start_with_next([=](bool pinned) {
if (!pinned) { _call->pinVideoEndpoint(pinned ? endpoint : VideoEndpoint{});
_call->pinVideoEndpoint(VideoEndpoint{}); }, video->lifetime());
} else if (const auto &large = _call->videoEndpointLarge()) {
_call->pinVideoEndpoint(large); video->requestedQuality(
) | rpl::start_with_next([=](VideoQuality quality) {
_call->requestVideoQuality(endpoint, quality);
}, video->lifetime());
video->trackSizeValue(
) | rpl::start_with_next([=] {
refreshTilesGeometry();
}, video->lifetime());
return VideoTile{
.video = std::move(video),
.endpoint = endpoint,
};
};
for (const auto &[endpoint, track] : _call->activeVideoTracks()) {
_videoTiles.push_back(setupTile(endpoint, track));
}
_call->videoStreamActiveUpdates(
) | rpl::start_with_next([=](const VideoEndpoint &endpoint) {
const auto &tracks = _call->activeVideoTracks();
const auto i = tracks.find(endpoint);
if (i != end(tracks)) {
_videoTiles.push_back(setupTile(endpoint, i->second));
} else {
_videoTiles.erase(
ranges::remove(_videoTiles, endpoint, &VideoTile::endpoint),
end(_videoTiles));
refreshTilesGeometry();
} }
}, _pinnedVideo->lifetime()); }, _pinnedVideoWrap->lifetime());
// New video was pinned or mode changed. // New video was pinned or mode changed.
rpl::merge( rpl::merge(
@ -1689,23 +1778,9 @@ void Members::setupPinnedVideo() {
_scroll->scrollToY(0); _scroll->scrollToY(0);
}, _scroll->lifetime()); }, _scroll->lifetime());
rpl::combine( _layout->widthValue() | rpl::start_with_next([=] {
_layout->widthValue(), refreshTilesGeometry();
_pinnedVideo->trackSizeValue() }, _pinnedVideoWrap->lifetime());
) | rpl::start_with_next([=](int width, QSize size) {
if (size.isEmpty() || !width) {
_pinnedVideoWrap->resize(width, 0);
return;
}
const auto heightMin = (width * 9) / 16;
const auto heightMax = (width * 3) / 4;
const auto scaled = size.scaled(
QSize(width, heightMax),
Qt::KeepAspectRatio);
const auto height = std::max(scaled.height(), heightMin);
_pinnedVideo->setGeometry(0, 0, width, height);
_pinnedVideoWrap->resize(width, height);
}, _pinnedVideo->lifetime());
} }
void Members::resizeEvent(QResizeEvent *e) { void Members::resizeEvent(QResizeEvent *e) {

View file

@ -61,6 +61,7 @@ public:
private: private:
class Controller; class Controller;
struct VideoTile;
using ListWidget = PeerListContent; using ListWidget = PeerListContent;
void resizeEvent(QResizeEvent *e) override; void resizeEvent(QResizeEvent *e) override;
@ -87,6 +88,7 @@ private:
void setupFakeRoundCorners(); void setupFakeRoundCorners();
void updateControlsGeometry(); void updateControlsGeometry();
void refreshTilesGeometry();
const not_null<GroupCall*> _call; const not_null<GroupCall*> _call;
rpl::variable<PanelMode> _mode = PanelMode(); rpl::variable<PanelMode> _mode = PanelMode();
@ -94,7 +96,8 @@ private:
std::unique_ptr<Controller> _listController; std::unique_ptr<Controller> _listController;
not_null<Ui::VerticalLayout*> _layout; not_null<Ui::VerticalLayout*> _layout;
const not_null<Ui::RpWidget*> _pinnedVideoWrap; const not_null<Ui::RpWidget*> _pinnedVideoWrap;
std::unique_ptr<LargeVideo> _pinnedVideo; std::vector<VideoTile> _videoTiles;
rpl::event_stream<> _enlargeVideoClicks;
rpl::variable<Ui::RpWidget*> _addMemberButton = nullptr; rpl::variable<Ui::RpWidget*> _addMemberButton = nullptr;
ListWidget *_list = nullptr; ListWidget *_list = nullptr;
rpl::event_stream<> _addMemberRequests; rpl::event_stream<> _addMemberRequests;

View file

@ -377,38 +377,39 @@ bool MembersRow::paintVideo(
int sizew, int sizew,
int sizeh, int sizeh,
PanelMode mode) { PanelMode mode) {
if (!_videoTrackShown) { return false;
return false; //if (!_videoTrackShown) {
} // return false;
const auto guard = gsl::finally([&] { //}
_videoTrackShown->markFrameShown(); //const auto guard = gsl::finally([&] {
}); // _videoTrackShown->markFrameShown();
const auto videoSize = _videoTrackShown->frameSize(); //});
if (videoSize.isEmpty() //const auto videoSize = _videoTrackShown->frameSize();
|| _videoTrackShown->state() != Webrtc::VideoState::Active) { //if (videoSize.isEmpty()
return false; // || _videoTrackShown->state() != Webrtc::VideoState::Active) {
} // return false;
const auto videow = videoSize.width(); //}
const auto videoh = videoSize.height(); //const auto videow = videoSize.width();
const auto resize = (videow * sizeh > videoh * sizew) //const auto videoh = videoSize.height();
? QSize(videow * sizeh / videoh, sizeh) //const auto resize = (videow * sizeh > videoh * sizew)
: QSize(sizew, videoh * sizew / videow); // ? QSize(videow * sizeh / videoh, sizeh)
const auto request = Webrtc::FrameRequest{ // : QSize(sizew, videoh * sizew / videow);
.resize = resize * cIntRetinaFactor(), //const auto request = Webrtc::FrameRequest{
.outer = QSize(sizew, sizeh) * cIntRetinaFactor(), // .resize = resize * cIntRetinaFactor(),
}; // .outer = QSize(sizew, sizeh) * cIntRetinaFactor(),
const auto frame = _videoTrackShown->frame(request); //};
auto copy = frame; // #TODO calls optimize. //const auto frame = _videoTrackShown->frame(request);
copy.detach(); //auto copy = frame; // #TODO calls optimize.
if (mode == PanelMode::Default) { //copy.detach();
Images::prepareCircle(copy); //if (mode == PanelMode::Default) {
} else { // Images::prepareCircle(copy);
Images::prepareRound(copy, ImageRoundRadius::Large); //} else {
} // Images::prepareRound(copy, ImageRoundRadius::Large);
p.drawImage( //}
QRect(QPoint(x, y), copy.size() / cIntRetinaFactor()), //p.drawImage(
copy); // QRect(QPoint(x, y), copy.size() / cIntRetinaFactor()),
return true; // copy);
//return true;
} }
std::tuple<int, int, int> MembersRow::UserpicInNarrowMode( std::tuple<int, int, int> MembersRow::UserpicInNarrowMode(
@ -860,40 +861,40 @@ void MembersRow::refreshStatus() {
_speaking); _speaking);
} }
not_null<Webrtc::VideoTrack*> MembersRow::createVideoTrack( //not_null<Webrtc::VideoTrack*> MembersRow::createVideoTrack(
const std::string &endpoint) { // const std::string &endpoint) {
_videoTrackShown = nullptr; // _videoTrackShown = nullptr;
_videoTrackEndpoint = endpoint; // _videoTrackEndpoint = endpoint;
_videoTrack = std::make_unique<Webrtc::VideoTrack>( // _videoTrack = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active); // Webrtc::VideoState::Active);
setVideoTrack(_videoTrack.get()); // setVideoTrack(_videoTrack.get());
return _videoTrack.get(); // return _videoTrack.get();
} //}
//
const std::string &MembersRow::videoTrackEndpoint() const { //const std::string &MembersRow::videoTrackEndpoint() const {
return _videoTrackEndpoint; // return _videoTrackEndpoint;
} //}
//
void MembersRow::clearVideoTrack() { //void MembersRow::clearVideoTrack() {
_videoTrackLifetime.destroy(); // _videoTrackLifetime.destroy();
_videoTrackEndpoint = std::string(); // _videoTrackEndpoint = std::string();
_videoTrackShown = nullptr; // _videoTrackShown = nullptr;
_videoTrack = nullptr; // _videoTrack = nullptr;
_delegate->rowUpdateRow(this); // _delegate->rowUpdateRow(this);
} //}
//
void MembersRow::setVideoTrack(not_null<Webrtc::VideoTrack*> track) { //void MembersRow::setVideoTrack(not_null<Webrtc::VideoTrack*> track) {
_videoTrackLifetime.destroy(); // _videoTrackLifetime.destroy();
_videoTrackShown = track; // _videoTrackShown = track;
_videoTrackShown->renderNextFrame( // _videoTrackShown->renderNextFrame(
) | rpl::start_with_next([=] { // ) | rpl::start_with_next([=] {
_delegate->rowUpdateRow(this); // _delegate->rowUpdateRow(this);
if (_videoTrackShown->frameSize().isEmpty()) { // if (_videoTrackShown->frameSize().isEmpty()) {
_videoTrackShown->markFrameShown(); // _videoTrackShown->markFrameShown();
} // }
}, _videoTrackLifetime); // }, _videoTrackLifetime);
_delegate->rowUpdateRow(this); // _delegate->rowUpdateRow(this);
} //}
void MembersRow::addActionRipple(QPoint point, Fn<void()> updateCallback) { void MembersRow::addActionRipple(QPoint point, Fn<void()> updateCallback) {
if (!_actionRipple) { if (!_actionRipple) {

View file

@ -13,9 +13,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
class PeerData; class PeerData;
class Painter; class Painter;
namespace Webrtc { //namespace Webrtc {
class VideoTrack; //class VideoTrack;
} // namespace Webrtc //} // namespace Webrtc
namespace Data { namespace Data {
struct GroupCallParticipant; struct GroupCallParticipant;
@ -115,11 +115,11 @@ public:
return _raisedHandRating; return _raisedHandRating;
} }
[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack( //[[nodiscard]] not_null<Webrtc::VideoTrack*> createVideoTrack(
const std::string &endpoint); // const std::string &endpoint);
void clearVideoTrack(); //void clearVideoTrack();
[[nodiscard]] const std::string &videoTrackEndpoint() const; //[[nodiscard]] const std::string &videoTrackEndpoint() const;
void setVideoTrack(not_null<Webrtc::VideoTrack*> track); //void setVideoTrack(not_null<Webrtc::VideoTrack*> track);
void addActionRipple(QPoint point, Fn<void()> updateCallback) override; void addActionRipple(QPoint point, Fn<void()> updateCallback) override;
void stopLastActionRipple() override; void stopLastActionRipple() override;
@ -236,10 +236,10 @@ private:
std::unique_ptr<Ui::RippleAnimation> _actionRipple; std::unique_ptr<Ui::RippleAnimation> _actionRipple;
std::unique_ptr<BlobsAnimation> _blobsAnimation; std::unique_ptr<BlobsAnimation> _blobsAnimation;
std::unique_ptr<StatusIcon> _statusIcon; std::unique_ptr<StatusIcon> _statusIcon;
std::unique_ptr<Webrtc::VideoTrack> _videoTrack; //std::unique_ptr<Webrtc::VideoTrack> _videoTrack;
Webrtc::VideoTrack *_videoTrackShown = nullptr; //Webrtc::VideoTrack *_videoTrackShown = nullptr;
std::string _videoTrackEndpoint; //std::string _videoTrackEndpoint;
rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr. //rpl::lifetime _videoTrackLifetime; // #TODO calls move to unique_ptr.
Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon. Ui::Animations::Simple _speakingAnimation; // For gray-red/green icon.
Ui::Animations::Simple _mutedAnimation; // For gray/red icon. Ui::Animations::Simple _mutedAnimation; // For gray/red icon.
Ui::Animations::Simple _activeAnimation; // For icon cross animation. Ui::Animations::Simple _activeAnimation; // For icon cross animation.

View file

@ -379,6 +379,11 @@ std::unique_ptr<PeerListRow> InviteContactsController::createRow(
} // namespace } // namespace
struct Panel::VideoTile {
std::unique_ptr<LargeVideo> video;
VideoEndpoint endpoint;
};
Panel::Panel(not_null<GroupCall*> call) Panel::Panel(not_null<GroupCall*> call)
: _call(call) : _call(call)
, _peer(call->peer()) , _peer(call->peer())
@ -978,7 +983,7 @@ void Panel::setupMembers() {
}, _callLifetime); }, _callLifetime);
_call->videoEndpointPinnedValue( _call->videoEndpointPinnedValue(
) | rpl::filter([=](bool pinned) { ) | rpl::filter([=](const VideoEndpoint &pinned) {
return pinned && (_mode == PanelMode::Default); return pinned && (_mode == PanelMode::Default);
}) | rpl::start_with_next([=] { }) | rpl::start_with_next([=] {
enlargeVideo(); enlargeVideo();
@ -1072,50 +1077,190 @@ void Panel::raiseControls() {
_mute->raise(); _mute->raise();
} }
void Panel::setupPinnedVideo() { void Panel::refreshTilesGeometry() {
auto track = _call->videoLargeTrackValue( const auto outer = _pinnedVideoWrap->size();
) | rpl::map([=](GroupCall::LargeTrack track) { if (_videoTiles.empty()
const auto row = track ? _members->lookupRow(track.peer) : nullptr; || outer.isEmpty()
Assert(!track || row != nullptr); || _mode == PanelMode::Default) {
return LargeVideoTrack{ return;
row ? track.track : nullptr, }
row struct Geometry {
}; QSize size;
}); QRect columns;
const auto visible = (_mode == PanelMode::Wide); QRect rows;
_pinnedVideo = std::make_unique<LargeVideo>( };
widget(), auto sizes = base::flat_map<not_null<LargeVideo*>, Geometry>();
st::groupCallLargeVideoWide, sizes.reserve(_videoTiles.size());
visible, for (const auto &tile : _videoTiles) {
std::move(track), const auto video = tile.video.get();
_call->videoEndpointPinnedValue()); const auto size = video->trackSize();
_pinnedVideo->minimizeClicks( if (size.isEmpty()) {
) | rpl::start_with_next([=] { video->setGeometry(0, 0, outer.width(), 0);
minimizeVideo(); } else {
}, _pinnedVideo->lifetime()); sizes.emplace(video, Geometry{ size });
_pinnedVideo->pinToggled(
) | rpl::start_with_next([=](bool pinned) {
if (!pinned) {
_call->pinVideoEndpoint(VideoEndpoint{});
} else if (const auto &large = _call->videoEndpointLarge()) {
_call->pinVideoEndpoint(large);
} }
}, _pinnedVideo->lifetime()); }
_pinnedVideo->controlsShown( if (sizes.empty()) {
) | rpl::filter([=](float64 shown) { return;
return (_pinnedVideoControlsShown != shown); } else if (sizes.size() == 1) {
}) | rpl::start_with_next([=](float64 shown) { sizes.front().first->setGeometry(0, 0, outer.width(), outer.height());
const auto hiding = (shown <= _pinnedVideoControlsShown); return;
_pinnedVideoControlsShown = shown; }
if (_mode == PanelMode::Wide) {
if (hiding && _trackControlsLifetime) { auto columnsBlack = uint64();
_trackControlsLifetime.destroy(); auto rowsBlack = uint64();
} else if (!hiding && !_trackControlsLifetime) { const auto count = int(sizes.size());
trackControls(); const auto skip = st::groupCallVideoLargeSkip;
const auto slices = int(std::ceil(std::sqrt(float64(count))));
{
auto index = 0;
const auto columns = slices;
const auto sizew = (outer.width() + skip) / float64(columns);
for (auto column = 0; column != columns; ++column) {
const auto left = int(std::round(column * sizew));
const auto width = int(std::round(column * sizew + sizew - skip))
- left;
const auto rows = int(std::round((count - index)
/ float64(columns - column)));
const auto sizeh = (outer.height() + skip) / float64(rows);
for (auto row = 0; row != rows; ++row) {
const auto top = int(std::round(row * sizeh));
const auto height = int(std::round(
row * sizeh + sizeh - skip)) - top;
auto &geometry = (sizes.begin() + index)->second;
geometry.columns = {
left,
top,
width,
height };
const auto scaled = geometry.size.scaled(
width,
height,
Qt::KeepAspectRatio);
columnsBlack += (scaled.width() < width)
? (width - scaled.width()) * height
: (height - scaled.height()) * width;
++index;
} }
updateButtonsGeometry();
} }
}, _pinnedVideo->lifetime()); }
{
auto index = 0;
const auto rows = slices;
const auto sizeh = (outer.height() + skip) / float64(rows);
for (auto row = 0; row != rows; ++row) {
const auto top = int(std::round(row * sizeh));
const auto height = int(std::round(row * sizeh + sizeh - skip))
- top;
const auto columns = int(std::round((count - index)
/ float64(rows - row)));
const auto sizew = (outer.width() + skip) / float64(columns);
for (auto column = 0; column != columns; ++column) {
const auto left = int(std::round(column * sizew));
const auto width = int(std::round(
column * sizew + sizew - skip)) - left;
auto &geometry = (sizes.begin() + index)->second;
geometry.rows = {
left,
top,
width,
height };
const auto scaled = geometry.size.scaled(
width,
height,
Qt::KeepAspectRatio);
rowsBlack += (scaled.width() < width)
? (width - scaled.width()) * height
: (height - scaled.height()) * width;
++index;
}
}
}
for (const auto &[video, geometry] : sizes) {
const auto &rect = (columnsBlack < rowsBlack)
? geometry.columns
: geometry.rows;
video->setGeometry(rect.x(), rect.y(), rect.width(), rect.height());
}
}
void Panel::setupPinnedVideo() {
using namespace rpl::mappers;
_pinnedVideoWrap = std::make_unique<Ui::RpWidget>(widget());
const auto setupTile = [=](
const VideoEndpoint &endpoint,
const GroupCall::VideoTrack &track) {
const auto row = _members->lookupRow(track.peer);
Assert(row != nullptr);
auto video = std::make_unique<LargeVideo>(
_pinnedVideoWrap.get(),
st::groupCallLargeVideoNarrow,
(_mode == PanelMode::Wide),
rpl::single(LargeVideoTrack{ track.track.get(), row }),
_call->videoEndpointPinnedValue() | rpl::map(_1 == endpoint));
video->pinToggled(
) | rpl::start_with_next([=](bool pinned) {
_call->pinVideoEndpoint(pinned ? endpoint : VideoEndpoint{});
}, video->lifetime());
video->requestedQuality(
) | rpl::start_with_next([=](VideoQuality quality) {
_call->requestVideoQuality(endpoint, quality);
}, video->lifetime());
video->minimizeClicks(
) | rpl::start_with_next([=] {
minimizeVideo();
}, video->lifetime());
video->trackSizeValue(
) | rpl::start_with_next([=] {
refreshTilesGeometry();
}, video->lifetime());
video->controlsShown(
) | rpl::filter([=](float64 shown) {
return (_pinnedVideoControlsShown != shown);
}) | rpl::start_with_next([=](float64 shown) {
const auto hiding = (shown <= _pinnedVideoControlsShown);
_pinnedVideoControlsShown = shown;
if (_mode == PanelMode::Wide) {
if (hiding && _trackControlsLifetime) {
_trackControlsLifetime.destroy();
} else if (!hiding && !_trackControlsLifetime) {
trackControls();
}
updateButtonsGeometry();
}
}, video->lifetime());
return VideoTile{
.video = std::move(video),
.endpoint = endpoint,
};
};
for (const auto &[endpoint, track] : _call->activeVideoTracks()) {
_videoTiles.push_back(setupTile(endpoint, track));
}
_call->videoStreamActiveUpdates(
) | rpl::start_with_next([=](const VideoEndpoint &endpoint) {
const auto &tracks = _call->activeVideoTracks();
const auto i = tracks.find(endpoint);
if (i != end(tracks)) {
_videoTiles.push_back(setupTile(endpoint, i->second));
} else {
_videoTiles.erase(
ranges::remove(_videoTiles, endpoint, &VideoTile::endpoint),
end(_videoTiles));
refreshTilesGeometry();
}
}, _pinnedVideoWrap->lifetime());
_pinnedVideoWrap->sizeValue() | rpl::start_with_next([=] {
refreshTilesGeometry();
}, _pinnedVideoWrap->lifetime());
raiseControls(); raiseControls();
} }
@ -1626,8 +1771,11 @@ bool Panel::updateMode() {
if (_members) { if (_members) {
_members->setMode(mode); _members->setMode(mode);
} }
if (_pinnedVideo) { if (_pinnedVideoWrap) {
_pinnedVideo->setVisible(mode == PanelMode::Wide); _pinnedVideoWrap->setVisible(mode == PanelMode::Wide);
for (const auto &tile : _videoTiles) {
tile.video->setVisible(mode == PanelMode::Wide);
}
} }
refreshControlsBackground(); refreshControlsBackground();
updateControlsGeometry(); updateControlsGeometry();
@ -1669,11 +1817,11 @@ void Panel::trackControls() {
if (widget) { if (widget) {
widget->events( widget->events(
) | rpl::start_with_next([=](not_null<QEvent*> e) { ) | rpl::start_with_next([=](not_null<QEvent*> e) {
if (e->type() == QEvent::Enter) { //if (e->type() == QEvent::Enter) {
_pinnedVideo->setControlsShown(true); // _pinnedVideo->setControlsShown(true);
} else if (e->type() == QEvent::Leave) { //} else if (e->type() == QEvent::Leave) {
_pinnedVideo->setControlsShown(false); // _pinnedVideo->setControlsShown(false);
} //}
}, _trackControlsLifetime); }, _trackControlsLifetime);
} }
}; };
@ -1834,7 +1982,7 @@ void Panel::updateMembersGeometry() {
top, top,
membersWidth, membersWidth,
std::min(desiredHeight, widget()->height())); std::min(desiredHeight, widget()->height()));
_pinnedVideo->setGeometry( _pinnedVideoWrap->setGeometry(
membersWidth, membersWidth,
top, top,
widget()->width() - membersWidth - skip, widget()->width() - membersWidth - skip,

View file

@ -70,6 +70,7 @@ public:
private: private:
using State = GroupCall::State; using State = GroupCall::State;
struct VideoTile;
[[nodiscard]] not_null<Ui::RpWidget*> widget() const; [[nodiscard]] not_null<Ui::RpWidget*> widget() const;
@ -103,6 +104,7 @@ private:
void refreshControlsBackground(); void refreshControlsBackground();
void showControls(); void showControls();
void refreshLeftButton(); void refreshLeftButton();
void refreshTilesGeometry();
void endCall(); void endCall();
@ -146,8 +148,9 @@ private:
object_ptr<Ui::DropdownMenu> _menu = { nullptr }; object_ptr<Ui::DropdownMenu> _menu = { nullptr };
object_ptr<Ui::AbstractButton> _joinAsToggle = { nullptr }; object_ptr<Ui::AbstractButton> _joinAsToggle = { nullptr };
object_ptr<Members> _members = { nullptr }; object_ptr<Members> _members = { nullptr };
std::unique_ptr<LargeVideo> _pinnedVideo; std::unique_ptr<Ui::RpWidget> _pinnedVideoWrap;
float64 _pinnedVideoControlsShown = 1.; float64 _pinnedVideoControlsShown = 1.;
std::vector<VideoTile> _videoTiles;
rpl::lifetime _trackControlsLifetime; rpl::lifetime _trackControlsLifetime;
object_ptr<Ui::FlatLabel> _startsIn = { nullptr }; object_ptr<Ui::FlatLabel> _startsIn = { nullptr };
object_ptr<Ui::RpWidget> _countdown = { nullptr }; object_ptr<Ui::RpWidget> _countdown = { nullptr };