Show last blurred frame or userpic for paused videos.

This commit is contained in:
John Preston 2021-06-07 19:04:50 +04:00
parent 303ad02c61
commit 9a21d55de7
16 changed files with 394 additions and 112 deletions

View file

@ -216,6 +216,7 @@ struct GroupCall::SinkPointer {
struct VideoParams { struct VideoParams {
std::string endpointId; std::string endpointId;
std::vector<tgcalls::MediaSsrcGroup> ssrcGroups; std::vector<tgcalls::MediaSsrcGroup> ssrcGroups;
bool paused = false;
[[nodiscard]] bool empty() const { [[nodiscard]] bool empty() const {
return endpointId.empty() || ssrcGroups.empty(); return endpointId.empty() || ssrcGroups.empty();
@ -237,6 +238,9 @@ struct ParticipantVideoParams {
return !was; return !was;
} }
return now->match([&](const MTPDgroupCallParticipantVideo &data) { return now->match([&](const MTPDgroupCallParticipantVideo &data) {
if (data.is_paused() != was.paused) {
return false;
}
if (gsl::make_span(data.vendpoint().v) if (gsl::make_span(data.vendpoint().v)
!= gsl::make_span(was.endpointId)) { != gsl::make_span(was.endpointId)) {
return false; return false;
@ -281,6 +285,7 @@ struct ParticipantVideoParams {
} }
auto result = VideoParams(); auto result = VideoParams();
params->match([&](const MTPDgroupCallParticipantVideo &data) { params->match([&](const MTPDgroupCallParticipantVideo &data) {
result.paused = data.is_paused();
result.endpointId = data.vendpoint().v.toStdString(); result.endpointId = data.vendpoint().v.toStdString();
const auto &list = data.vsource_groups().v; const auto &list = data.vsource_groups().v;
result.ssrcGroups.reserve(list.size()); result.ssrcGroups.reserve(list.size());
@ -313,6 +318,14 @@ const std::string &GetScreenEndpoint(
return params ? params->screen.endpointId : EmptyString(); return params ? params->screen.endpointId : EmptyString();
} }
bool IsCameraPaused(const std::shared_ptr<ParticipantVideoParams> &params) {
return params && params->camera.paused;
}
bool IsScreenPaused(const std::shared_ptr<ParticipantVideoParams> &params) {
return params && params->screen.paused;
}
std::shared_ptr<ParticipantVideoParams> ParseVideoParams( std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
const tl::conditional<MTPGroupCallParticipantVideo> &camera, const tl::conditional<MTPGroupCallParticipantVideo> &camera,
const tl::conditional<MTPGroupCallParticipantVideo> &screen, const tl::conditional<MTPGroupCallParticipantVideo> &screen,
@ -601,7 +614,7 @@ bool GroupCall::hasVideoWithFrames() const {
rpl::producer<bool> GroupCall::hasVideoWithFramesValue() const { rpl::producer<bool> GroupCall::hasVideoWithFramesValue() const {
return _videoStreamShownUpdates.events_starting_with( return _videoStreamShownUpdates.events_starting_with(
VideoActiveToggle() VideoStateToggle()
) | rpl::map([=] { ) | rpl::map([=] {
return hasVideoWithFrames(); return hasVideoWithFrames();
}) | rpl::distinct_until_changed(); }) | rpl::distinct_until_changed();
@ -664,22 +677,32 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
return; return;
} }
const auto &wasCameraEndpoint = data.was const auto &wasCameraEndpoint = data.was
? regularEndpoint(data.was->cameraEndpoint()) ? regularEndpoint(GetCameraEndpoint(data.was->videoParams))
: EmptyString(); : EmptyString();
const auto &nowCameraEndpoint = data.now const auto &nowCameraEndpoint = data.now
? regularEndpoint(data.now->cameraEndpoint()) ? regularEndpoint(GetCameraEndpoint(data.now->videoParams))
: EmptyString(); : EmptyString();
const auto wasCameraPaused = !wasCameraEndpoint.empty()
&& IsCameraPaused(data.was->videoParams);
const auto nowCameraPaused = !nowCameraEndpoint.empty()
&& IsCameraPaused(data.now->videoParams);
if (wasCameraEndpoint != nowCameraEndpoint) { if (wasCameraEndpoint != nowCameraEndpoint) {
markEndpointActive({ markEndpointActive({
VideoEndpointType::Camera, VideoEndpointType::Camera,
peer, peer,
nowCameraEndpoint nowCameraEndpoint,
}, true); }, true, nowCameraPaused);
markEndpointActive({ markEndpointActive({
VideoEndpointType::Camera, VideoEndpointType::Camera,
peer, peer,
wasCameraEndpoint wasCameraEndpoint,
}, false); }, false, wasCameraPaused);
} else if (wasCameraPaused != nowCameraPaused) {
markTrackPaused({
VideoEndpointType::Camera,
peer,
nowCameraEndpoint,
}, nowCameraPaused);
} }
const auto &wasScreenEndpoint = data.was const auto &wasScreenEndpoint = data.was
? regularEndpoint(data.was->screenEndpoint()) ? regularEndpoint(data.was->screenEndpoint())
@ -687,17 +710,27 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
const auto &nowScreenEndpoint = data.now const auto &nowScreenEndpoint = data.now
? regularEndpoint(data.now->screenEndpoint()) ? regularEndpoint(data.now->screenEndpoint())
: EmptyString(); : EmptyString();
const auto wasScreenPaused = !wasScreenEndpoint.empty()
&& IsScreenPaused(data.was->videoParams);
const auto nowScreenPaused = !nowScreenEndpoint.empty()
&& IsScreenPaused(data.now->videoParams);
if (wasScreenEndpoint != nowScreenEndpoint) { if (wasScreenEndpoint != nowScreenEndpoint) {
markEndpointActive({ markEndpointActive({
VideoEndpointType::Screen, VideoEndpointType::Screen,
peer, peer,
nowScreenEndpoint nowScreenEndpoint,
}, true); }, true, nowScreenPaused);
markEndpointActive({ markEndpointActive({
VideoEndpointType::Screen, VideoEndpointType::Screen,
peer, peer,
wasScreenEndpoint wasScreenEndpoint,
}, false); }, false, wasScreenPaused);
} else if (wasScreenPaused != nowScreenPaused) {
markTrackPaused({
VideoEndpointType::Screen,
peer,
wasScreenEndpoint,
}, nowScreenPaused);
} }
}, _lifetime); }, _lifetime);
@ -725,7 +758,9 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
return; return;
} }
using Type = VideoEndpointType; using Type = VideoEndpointType;
if (p->cameraEndpoint().empty() && p->screenEndpoint().empty()) { const auto &params = p->videoParams;
if (GetCameraEndpoint(params).empty()
&& GetScreenEndpoint(params).empty()) {
return; return;
} }
const auto tryEndpoint = [&](Type type, const std::string &id) { const auto tryEndpoint = [&](Type type, const std::string &id) {
@ -739,8 +774,8 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
setVideoEndpointLarge(endpoint); setVideoEndpointLarge(endpoint);
return true; return true;
}; };
if (tryEndpoint(Type::Screen, p->screenEndpoint()) if (tryEndpoint(Type::Screen, GetScreenEndpoint(params))
|| tryEndpoint(Type::Camera, p->cameraEndpoint())) { || tryEndpoint(Type::Camera, GetCameraEndpoint(params))) {
_videoLargeTillTime = now + kFixSpeakingLargeVideoDuration; _videoLargeTillTime = now + kFixSpeakingLargeVideoDuration;
} }
}, _lifetime); }, _lifetime);
@ -930,7 +965,7 @@ void GroupCall::setScreenEndpoint(std::string endpoint) {
VideoEndpointType::Screen, VideoEndpointType::Screen,
_joinAs, _joinAs,
_screenEndpoint _screenEndpoint
}, false); }, false, false);
} }
_screenEndpoint = std::move(endpoint); _screenEndpoint = std::move(endpoint);
if (_screenEndpoint.empty()) { if (_screenEndpoint.empty()) {
@ -941,7 +976,7 @@ void GroupCall::setScreenEndpoint(std::string endpoint) {
VideoEndpointType::Screen, VideoEndpointType::Screen,
_joinAs, _joinAs,
_screenEndpoint _screenEndpoint
}, true); }, true, false);
} }
} }
@ -954,7 +989,7 @@ void GroupCall::setCameraEndpoint(std::string endpoint) {
VideoEndpointType::Camera, VideoEndpointType::Camera,
_joinAs, _joinAs,
_cameraEndpoint _cameraEndpoint
}, false); }, false, false);
} }
_cameraEndpoint = std::move(endpoint); _cameraEndpoint = std::move(endpoint);
if (_cameraEndpoint.empty()) { if (_cameraEndpoint.empty()) {
@ -965,7 +1000,7 @@ void GroupCall::setCameraEndpoint(std::string endpoint) {
VideoEndpointType::Camera, VideoEndpointType::Camera,
_joinAs, _joinAs,
_cameraEndpoint _cameraEndpoint
}, true); }, true, false);
} }
} }
@ -987,7 +1022,10 @@ void GroupCall::addVideoOutput(
} }
} }
void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) { void GroupCall::markEndpointActive(
VideoEndpoint endpoint,
bool active,
bool paused) {
if (!endpoint) { if (!endpoint) {
return; return;
} }
@ -996,6 +1034,9 @@ void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
? (i == end(_activeVideoTracks)) ? (i == end(_activeVideoTracks))
: (i != end(_activeVideoTracks)); : (i != end(_activeVideoTracks));
if (!changed) { if (!changed) {
if (active) {
markTrackPaused(endpoint, paused);
}
return; return;
} }
auto shown = false; auto shown = false;
@ -1004,30 +1045,49 @@ void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
endpoint, endpoint,
VideoTrack{ VideoTrack{
.track = std::make_unique<Webrtc::VideoTrack>( .track = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active, (paused
? Webrtc::VideoState::Paused
: Webrtc::VideoState::Active),
_requireARGB32), _requireARGB32),
.peer = endpoint.peer, .peer = endpoint.peer,
}).first; }).first;
const auto track = i->second.track.get(); const auto track = i->second.track.get();
track->renderNextFrame( if (!track->frameSize().isEmpty()
) | rpl::start_with_next([=] { || track->state() == Webrtc::VideoState::Paused) {
if (!track->frameSize().isEmpty()) {
markTrackShown(endpoint, true);
}
}, i->second.lifetime);
if (!track->frameSize().isEmpty()) {
shown = true; shown = true;
} else {
auto hasFrame = track->renderNextFrame() | rpl::map([=] {
return !track->frameSize().isEmpty();
});
auto isPaused = track->stateValue(
) | rpl::map([=](Webrtc::VideoState state) {
return (state == Webrtc::VideoState::Paused);
});
rpl::merge(
std::move(hasFrame),
std::move(isPaused)
) | rpl::filter([=](bool shouldShow) {
return shouldShow;
}) | rpl::start_with_next([=] {
_activeVideoTracks[endpoint].shownTrackingLifetime.destroy();
markTrackShown(endpoint, true);
}, i->second.shownTrackingLifetime);
} }
addVideoOutput(i->first.id, { track->sink() }); addVideoOutput(i->first.id, { track->sink() });
} else { } else {
if (_videoEndpointLarge.current() == endpoint) { if (_videoEndpointLarge.current() == endpoint) {
setVideoEndpointLarge({}); setVideoEndpointLarge({});
} }
markTrackShown(endpoint, false);
markTrackPaused(endpoint, false);
_activeVideoTracks.erase(i); _activeVideoTracks.erase(i);
} }
updateRequestedVideoChannelsDelayed(); updateRequestedVideoChannelsDelayed();
_videoStreamActiveUpdates.fire({ endpoint, active }); _videoStreamActiveUpdates.fire({ endpoint, active });
markTrackShown(endpoint, shown); if (active) {
markTrackShown(endpoint, shown);
markTrackPaused(endpoint, paused);
}
} }
void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) { void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) {
@ -1046,6 +1106,15 @@ void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) {
} }
} }
void GroupCall::markTrackPaused(const VideoEndpoint &endpoint, bool paused) {
const auto i = _activeVideoTracks.find(endpoint);
Assert(i != end(_activeVideoTracks));
i->second.track->setState(paused
? Webrtc::VideoState::Paused
: Webrtc::VideoState::Active);
}
void GroupCall::rejoin() { void GroupCall::rejoin() {
rejoin(_joinAs); rejoin(_joinAs);
} }
@ -1889,7 +1958,7 @@ void GroupCall::ensureOutgoingVideo() {
VideoEndpointType::Camera, VideoEndpointType::Camera,
_joinAs, _joinAs,
_cameraEndpoint _cameraEndpoint
}, active); }, active, false);
sendSelfUpdate(SendUpdateType::VideoStopped); sendSelfUpdate(SendUpdateType::VideoStopped);
applyMeInCallLocally(); applyMeInCallLocally();
}, _lifetime); }, _lifetime);
@ -1932,7 +2001,7 @@ void GroupCall::ensureOutgoingVideo() {
VideoEndpointType::Screen, VideoEndpointType::Screen,
_joinAs, _joinAs,
_screenEndpoint _screenEndpoint
}, active); }, active, false);
_screenJoinState.nextActionPending = true; _screenJoinState.nextActionPending = true;
checkNextJoinAction(); checkNextJoinAction();
}, _lifetime); }, _lifetime);
@ -2310,38 +2379,42 @@ void GroupCall::fillActiveVideoEndpoints() {
auto removed = base::flat_set<VideoEndpoint>( auto removed = base::flat_set<VideoEndpoint>(
begin(endpoints), begin(endpoints),
end(endpoints)); end(endpoints));
const auto feedOne = [&](VideoEndpoint endpoint) { const auto feedOne = [&](VideoEndpoint endpoint, bool paused) {
if (endpoint.empty()) { if (endpoint.empty()) {
return; return;
} else if (endpoint == large) { } else if (endpoint == large) {
largeFound = true; largeFound = true;
} }
if (!removed.remove(endpoint)) { if (removed.remove(endpoint)) {
markEndpointActive(std::move(endpoint), true); markTrackPaused(endpoint, paused);
} else {
markEndpointActive(std::move(endpoint), true, paused);
} }
}; };
using Type = VideoEndpointType; using Type = VideoEndpointType;
for (const auto &participant : participants) { for (const auto &participant : participants) {
const auto camera = participant.cameraEndpoint(); const auto camera = GetCameraEndpoint(participant.videoParams);
if (camera != _cameraEndpoint if (camera != _cameraEndpoint
&& camera != _screenEndpoint && camera != _screenEndpoint
&& participant.peer != _joinAs) { && participant.peer != _joinAs) {
feedOne({ Type::Camera, participant.peer, camera }); const auto paused = IsCameraPaused(participant.videoParams);
feedOne({ Type::Camera, participant.peer, camera }, paused);
} }
const auto screen = participant.screenEndpoint(); const auto screen = GetScreenEndpoint(participant.videoParams);
if (screen != _cameraEndpoint if (screen != _cameraEndpoint
&& screen != _screenEndpoint && screen != _screenEndpoint
&& participant.peer != _joinAs) { && participant.peer != _joinAs) {
feedOne({ Type::Screen, participant.peer, screen }); const auto paused = IsScreenPaused(participant.videoParams);
feedOne({ Type::Screen, participant.peer, screen }, paused);
} }
} }
feedOne({ Type::Camera, _joinAs, cameraSharingEndpoint() }); feedOne({ Type::Camera, _joinAs, cameraSharingEndpoint() }, false);
feedOne({ Type::Screen, _joinAs, screenSharingEndpoint() }); feedOne({ Type::Screen, _joinAs, screenSharingEndpoint() }, false);
if (large && !largeFound) { if (large && !largeFound) {
setVideoEndpointLarge({}); setVideoEndpointLarge({});
} }
for (const auto &endpoint : removed) { for (const auto &endpoint : removed) {
markEndpointActive(endpoint, false); markEndpointActive(endpoint, false, false);
} }
updateRequestedVideoChannels(); updateRequestedVideoChannels();
} }

View file

@ -82,6 +82,16 @@ enum class VideoEndpointType {
}; };
struct VideoEndpoint { struct VideoEndpoint {
VideoEndpoint() = default;
VideoEndpoint(
VideoEndpointType type,
not_null<PeerData*> peer,
std::string id)
: type(type)
, peer(peer)
, id(std::move(id)) {
}
VideoEndpointType type = VideoEndpointType::Camera; VideoEndpointType type = VideoEndpointType::Camera;
PeerData *peer = nullptr; PeerData *peer = nullptr;
std::string id; std::string id;
@ -131,9 +141,9 @@ inline bool operator>=(
return !(a < b); return !(a < b);
} }
struct VideoActiveToggle { struct VideoStateToggle {
VideoEndpoint endpoint; VideoEndpoint endpoint;
bool active = false; bool value = false;
}; };
struct VideoQualityRequest { struct VideoQualityRequest {
@ -150,9 +160,12 @@ struct ParticipantVideoParams;
[[nodiscard]] const std::string &GetCameraEndpoint( [[nodiscard]] const std::string &GetCameraEndpoint(
const std::shared_ptr<ParticipantVideoParams> &params); const std::shared_ptr<ParticipantVideoParams> &params);
[[nodiscard]] const std::string &GetScreenEndpoint( [[nodiscard]] const std::string &GetScreenEndpoint(
const std::shared_ptr<ParticipantVideoParams> &params); const std::shared_ptr<ParticipantVideoParams> &params);
[[nodiscard]] bool IsCameraPaused(
const std::shared_ptr<ParticipantVideoParams> &params);
[[nodiscard]] bool IsScreenPaused(
const std::shared_ptr<ParticipantVideoParams> &params);
class GroupCall final : public base::has_weak_ptr { class GroupCall final : public base::has_weak_ptr {
public: public:
@ -279,11 +292,11 @@ public:
return _levelUpdates.events(); return _levelUpdates.events();
} }
[[nodiscard]] auto videoStreamActiveUpdates() const [[nodiscard]] auto videoStreamActiveUpdates() const
-> rpl::producer<VideoActiveToggle> { -> rpl::producer<VideoStateToggle> {
return _videoStreamActiveUpdates.events(); return _videoStreamActiveUpdates.events();
} }
[[nodiscard]] auto videoStreamShownUpdates() const [[nodiscard]] auto videoStreamShownUpdates() const
-> rpl::producer<VideoActiveToggle> { -> rpl::producer<VideoStateToggle> {
return _videoStreamShownUpdates.events(); return _videoStreamShownUpdates.events();
} }
void requestVideoQuality( void requestVideoQuality(
@ -310,7 +323,7 @@ public:
struct VideoTrack { struct VideoTrack {
std::unique_ptr<Webrtc::VideoTrack> track; std::unique_ptr<Webrtc::VideoTrack> track;
PeerData *peer = nullptr; PeerData *peer = nullptr;
rpl::lifetime lifetime; rpl::lifetime shownTrackingLifetime;
Group::VideoQuality quality = Group::VideoQuality(); Group::VideoQuality quality = Group::VideoQuality();
[[nodiscard]] explicit operator bool() const { [[nodiscard]] explicit operator bool() const {
@ -505,7 +518,11 @@ private:
void addVideoOutput(const std::string &endpoint, SinkPointer sink); void addVideoOutput(const std::string &endpoint, SinkPointer sink);
void setVideoEndpointLarge(VideoEndpoint endpoint); void setVideoEndpointLarge(VideoEndpoint endpoint);
void markEndpointActive(VideoEndpoint endpoint, bool active); void markEndpointActive(
VideoEndpoint endpoint,
bool active,
bool paused);
void markTrackPaused(const VideoEndpoint &endpoint, bool paused);
void markTrackShown(const VideoEndpoint &endpoint, bool shown); void markTrackShown(const VideoEndpoint &endpoint, bool shown);
[[nodiscard]] MTPInputGroupCall inputCall() const; [[nodiscard]] MTPInputGroupCall inputCall() const;
@ -576,8 +593,9 @@ private:
bool _requireARGB32 = true; bool _requireARGB32 = true;
rpl::event_stream<LevelUpdate> _levelUpdates; rpl::event_stream<LevelUpdate> _levelUpdates;
rpl::event_stream<VideoActiveToggle> _videoStreamActiveUpdates; rpl::event_stream<VideoStateToggle> _videoStreamActiveUpdates;
rpl::event_stream<VideoActiveToggle> _videoStreamShownUpdates; rpl::event_stream<VideoStateToggle> _videoStreamPausedUpdates;
rpl::event_stream<VideoStateToggle> _videoStreamShownUpdates;
base::flat_map<VideoEndpoint, VideoTrack> _activeVideoTracks; base::flat_map<VideoEndpoint, VideoTrack> _activeVideoTracks;
base::flat_set<VideoEndpoint> _shownVideoTracks; base::flat_set<VideoEndpoint> _shownVideoTracks;
rpl::variable<VideoEndpoint> _videoEndpointLarge; rpl::variable<VideoEndpoint> _videoEndpointLarge;

View file

@ -132,11 +132,11 @@ private:
[[nodiscard]] bool allRowsAboveMoreImportantThanHand( [[nodiscard]] bool allRowsAboveMoreImportantThanHand(
not_null<Row*> row, not_null<Row*> row,
uint64 raiseHandRating) const; uint64 raiseHandRating) const;
const Data::GroupCallParticipant *findParticipant( [[nodiscard]] const Data::GroupCallParticipant *findParticipant(
const std::string &endpoint) const; const std::string &endpoint) const;
const std::string &computeScreenEndpoint( [[nodiscard]] const std::string &computeScreenEndpoint(
not_null<const Data::GroupCallParticipant*> participant) const; not_null<const Data::GroupCallParticipant*> participant) const;
const std::string &computeCameraEndpoint( [[nodiscard]] const std::string &computeCameraEndpoint(
not_null<const Data::GroupCallParticipant*> participant) const; not_null<const Data::GroupCallParticipant*> participant) const;
void showRowMenu(not_null<PeerListRow*> row, bool highlightRow); void showRowMenu(not_null<PeerListRow*> row, bool highlightRow);
@ -288,11 +288,11 @@ void Members::Controller::setupListChangeViewers() {
}, _lifetime); }, _lifetime);
_call->videoStreamShownUpdates( _call->videoStreamShownUpdates(
) | rpl::filter([=](const VideoActiveToggle &update) { ) | rpl::filter([=](const VideoStateToggle &update) {
const auto &large = _call->videoEndpointLarge(); const auto &large = _call->videoEndpointLarge();
return large && (update.endpoint != large); return large && (update.endpoint != large);
}) | rpl::start_with_next([=](const VideoActiveToggle &update) { }) | rpl::start_with_next([=](const VideoStateToggle &update) {
if (update.active) { if (update.value) {
hideRowWithVideo(update.endpoint); hideRowWithVideo(update.endpoint);
} else { } else {
showRowWithVideo(update.endpoint); showRowWithVideo(update.endpoint);
@ -403,8 +403,8 @@ void Members::Controller::subscribeToChanges(not_null<Data::GroupCall*> real) {
toggleVideoEndpointActive(endpoint, true); toggleVideoEndpointActive(endpoint, true);
} }
_call->videoStreamActiveUpdates( _call->videoStreamActiveUpdates(
) | rpl::start_with_next([=](const VideoActiveToggle &update) { ) | rpl::start_with_next([=](const VideoStateToggle &update) {
toggleVideoEndpointActive(update.endpoint, update.active); toggleVideoEndpointActive(update.endpoint, update.value);
}, _lifetime); }, _lifetime);
if (_prepared) { if (_prepared) {
@ -1206,12 +1206,12 @@ base::unique_qptr<Ui::PopupMenu> Members::Controller::createRowContextMenu(
const auto camera = VideoEndpoint{ const auto camera = VideoEndpoint{
VideoEndpointType::Camera, VideoEndpointType::Camera,
participantPeer, participantPeer,
computeCameraEndpoint(participant) computeCameraEndpoint(participant),
}; };
const auto screen = VideoEndpoint{ const auto screen = VideoEndpoint{
VideoEndpointType::Screen, VideoEndpointType::Screen,
participantPeer, participantPeer,
computeScreenEndpoint(participant) computeScreenEndpoint(participant),
}; };
if (shown.contains(camera)) { if (shown.contains(camera)) {
if (pinned && large == camera) { if (pinned && large == camera) {

View file

@ -843,8 +843,8 @@ void Panel::setupVideo(not_null<Viewport*> viewport) {
setupTile(endpoint, track); setupTile(endpoint, track);
} }
_call->videoStreamActiveUpdates( _call->videoStreamActiveUpdates(
) | rpl::start_with_next([=](const VideoActiveToggle &update) { ) | rpl::start_with_next([=](const VideoStateToggle &update) {
if (update.active) { if (update.value) {
// Add async (=> the participant row is definitely in Members). // Add async (=> the participant row is definitely in Members).
const auto endpoint = update.endpoint; const auto endpoint = update.endpoint;
crl::on_main(viewport->widget(), [=] { crl::on_main(viewport->widget(), [=] {

View file

@ -239,6 +239,11 @@ void Viewport::add(
}) | rpl::start_with_next([=] { }) | rpl::start_with_next([=] {
updateTilesGeometry(); updateTilesGeometry();
}, _tiles.back()->lifetime()); }, _tiles.back()->lifetime());
_tiles.back()->track()->stateValue(
) | rpl::start_with_next([=] {
updateTilesGeometry();
}, _tiles.back()->lifetime());
} }
void Viewport::remove(const VideoEndpoint &endpoint) { void Viewport::remove(const VideoEndpoint &endpoint) {
@ -434,7 +439,7 @@ Viewport::Layout Viewport::countWide(int outerWidth, int outerHeight) const {
sizes.reserve(_tiles.size()); sizes.reserve(_tiles.size());
for (const auto &tile : _tiles) { for (const auto &tile : _tiles) {
const auto video = tile.get(); const auto video = tile.get();
const auto size = video->trackSize(); const auto size = video->trackOrUserpicSize();
if (!size.isEmpty()) { if (!size.isEmpty()) {
sizes.push_back(Geometry{ video, size }); sizes.push_back(Geometry{ video, size });
} }
@ -529,7 +534,7 @@ void Viewport::showLarge(const VideoEndpoint &endpoint) {
startLargeChangeAnimation(); startLargeChangeAnimation();
} }
Ensures(!_large || !_large->trackSize().isEmpty()); Ensures(!_large || !_large->trackOrUserpicSize().isEmpty());
} }
void Viewport::updateTilesGeometry() { void Viewport::updateTilesGeometry() {
@ -564,7 +569,7 @@ void Viewport::refreshHasTwoOrMore() {
auto hasTwoOrMore = false; auto hasTwoOrMore = false;
auto oneFound = false; auto oneFound = false;
for (const auto &tile : _tiles) { for (const auto &tile : _tiles) {
if (!tile->trackSize().isEmpty()) { if (!tile->trackOrUserpicSize().isEmpty()) {
if (oneFound) { if (oneFound) {
hasTwoOrMore = true; hasTwoOrMore = true;
break; break;
@ -598,7 +603,7 @@ void Viewport::updateTilesGeometryWide(int outerWidth, int outerHeight) {
} }
_startTilesLayout = countWide(outerWidth, outerHeight); _startTilesLayout = countWide(outerWidth, outerHeight);
if (_large && !_large->trackSize().isEmpty()) { if (_large && !_large->trackOrUserpicSize().isEmpty()) {
for (const auto &geometry : _startTilesLayout.list) { for (const auto &geometry : _startTilesLayout.list) {
if (geometry.tile == _large) { if (geometry.tile == _large) {
setTileGeometry(_large, { 0, 0, outerWidth, outerHeight }); setTileGeometry(_large, { 0, 0, outerWidth, outerHeight });
@ -629,7 +634,7 @@ void Viewport::updateTilesGeometryNarrow(int outerWidth) {
sizes.reserve(_tiles.size()); sizes.reserve(_tiles.size());
for (const auto &tile : _tiles) { for (const auto &tile : _tiles) {
const auto video = tile.get(); const auto video = tile.get();
const auto size = video->trackSize(); const auto size = video->trackOrUserpicSize();
if (size.isEmpty()) { if (size.isEmpty()) {
video->hide(); video->hide();
} else { } else {
@ -691,7 +696,7 @@ void Viewport::updateTilesGeometryColumn(int outerWidth) {
const auto y = -_scrollTop; const auto y = -_scrollTop;
auto top = 0; auto top = 0;
const auto layoutNext = [&](not_null<VideoTile*> tile) { const auto layoutNext = [&](not_null<VideoTile*> tile) {
const auto size = tile->trackSize(); const auto size = tile->trackOrUserpicSize();
const auto shown = !size.isEmpty() && _large && tile != _large; const auto shown = !size.isEmpty() && _large && tile != _large;
const auto height = shown const auto height = shown
? st::groupCallNarrowVideoHeight ? st::groupCallNarrowVideoHeight
@ -707,7 +712,7 @@ void Viewport::updateTilesGeometryColumn(int outerWidth) {
for (const auto &tile : _tiles) { for (const auto &tile : _tiles) {
if (tile.get() != _large && tile->row()->peer() == topPeer) { if (tile.get() != _large && tile->row()->peer() == topPeer) {
return (tile.get() != _tiles.front().get()) return (tile.get() != _tiles.front().get())
&& !tile->trackSize().isEmpty(); && !tile->trackOrUserpicSize().isEmpty();
} }
} }
return false; return false;

View file

@ -66,13 +66,14 @@ const int diameter = 2 * radius + 1;
}; };
} }
// Depends on FragmetSampleTexture(). // Depends on FragmentSampleTexture().
[[nodiscard]] ShaderPart FragmentFrameColor() { [[nodiscard]] ShaderPart FragmentFrameColor() {
const auto blur = FragmentBlurTexture(true, 'b'); const auto blur = FragmentBlurTexture(true, 'b');
return { return {
.header = R"( .header = R"(
uniform vec4 frameBg; uniform vec4 frameBg;
uniform vec3 shadow; // fullHeight, shown, maxOpacity uniform vec3 shadow; // fullHeight, shown, maxOpacity
uniform float paused; // 0. <-> 1.
const float backgroundOpacity = )" + QString::number(kBlurOpacity) + R"(; const float backgroundOpacity = )" + QString::number(kBlurOpacity) + R"(;
float insideTexture() { float insideTexture() {
vec2 textureHalf = vec2(0.5, 0.5); vec2 textureHalf = vec2(0.5, 0.5);
@ -89,14 +90,14 @@ vec4 background() {
} }
)", )",
.body = R"( .body = R"(
float inside = insideTexture(); float inside = insideTexture() * (1. - paused);
result = result * inside result = result * inside
+ (1. - inside) * (backgroundOpacity * background() + (1. - inside) * (backgroundOpacity * background()
+ (1. - backgroundOpacity) * frameBg); + (1. - backgroundOpacity) * frameBg);
float shadowCoord = gl_FragCoord.y - roundRect.y; float shadowCoord = gl_FragCoord.y - roundRect.y;
float shadowValue = max(1. - (shadowCoord / shadow.x), 0.); float shadowValue = max(1. - (shadowCoord / shadow.x), 0.);
float shadowShown = shadowValue * shadow.y * shadow.z; float shadowShown = max(shadowValue * shadow.y, paused) * shadow.z;
result = vec4(result.rgb * (1. - shadowShown), result.a); result = vec4(result.rgb * (1. - shadowShown), result.a);
)", )",
}; };
@ -342,6 +343,30 @@ void Viewport::RendererGL::fillBackground(QOpenGLFunctions &f) {
_background.fill(f, region, _viewport, _factor, st::groupCallBg); _background.fill(f, region, _viewport, _factor, st::groupCallBg);
} }
void Viewport::RendererGL::validateUserpicFrame(
not_null<VideoTile*> tile,
TileData &tileData) {
if (!_userpicFrame) {
tileData.userpicFrame = QImage();
return;
} else if (!tileData.userpicFrame.isNull()) {
return;
}
tileData.userpicFrame = QImage(
tile->trackOrUserpicSize(),
QImage::Format_ARGB32_Premultiplied);
tileData.userpicFrame.fill(Qt::black);
{
auto p = Painter(&tileData.userpicFrame);
tile->row()->peer()->paintUserpicSquare(
p,
tile->row()->ensureUserpicView(),
0,
0,
tileData.userpicFrame.width());
}
}
void Viewport::RendererGL::paintTile( void Viewport::RendererGL::paintTile(
QOpenGLFunctions &f, QOpenGLFunctions &f,
GLuint defaultFramebufferObject, GLuint defaultFramebufferObject,
@ -349,12 +374,18 @@ void Viewport::RendererGL::paintTile(
TileData &tileData) { TileData &tileData) {
const auto track = tile->track(); const auto track = tile->track();
const auto data = track->frameWithInfo(false); const auto data = track->frameWithInfo(false);
if (data.format == Webrtc::FrameFormat::None) { _userpicFrame = (data.format == Webrtc::FrameFormat::None);
return; validateUserpicFrame(tile, tileData);
} const auto frameSize = _userpicFrame
Assert(!data.yuv420->size.isEmpty()); ? tileData.userpicFrame.size()
: data.yuv420->size;
const auto frameRotation = _userpicFrame
? 0
: data.rotation;
Assert(!frameSize.isEmpty());
_rgbaFrame = (data.format == Webrtc::FrameFormat::ARGB32); _rgbaFrame = (data.format == Webrtc::FrameFormat::ARGB32)
|| _userpicFrame;
const auto geometry = tile->geometry(); const auto geometry = tile->geometry();
const auto x = geometry.x(); const auto x = geometry.x();
const auto y = geometry.y(); const auto y = geometry.y();
@ -368,16 +399,18 @@ void Viewport::RendererGL::paintTile(
const auto style = row->computeIconState(MembersRowStyle::Video); const auto style = row->computeIconState(MembersRowStyle::Video);
validateOutlineAnimation(tile, tileData); validateOutlineAnimation(tile, tileData);
validatePausedAnimation(tile, tileData);
const auto outline = tileData.outlined.value(tileData.outline ? 1. : 0.); const auto outline = tileData.outlined.value(tileData.outline ? 1. : 0.);
const auto paused = tileData.paused.value(tileData.pause ? 1. : 0.);
ensureButtonsImage(); ensureButtonsImage();
// Frame. // Frame.
const auto unscaled = Media::View::FlipSizeByRotation( const auto unscaled = Media::View::FlipSizeByRotation(
data.yuv420->size, frameSize,
data.rotation); frameRotation);
const auto tileSize = geometry.size(); const auto tileSize = geometry.size();
const auto swap = (((data.rotation / 90) % 2) == 1); const auto swap = (((frameRotation / 90) % 2) == 1);
const auto expand = isExpanded(tile, unscaled, tileSize); const auto expand = isExpanded(tile, unscaled, tileSize);
const auto animation = tile->animation(); const auto animation = tile->animation();
const auto expandRatio = (animation.ratio >= 0.) const auto expandRatio = (animation.ratio >= 0.)
@ -386,7 +419,7 @@ void Viewport::RendererGL::paintTile(
? 1. ? 1.
: 0.; : 0.;
auto texCoords = CountTexCoords(unscaled, tileSize, expandRatio, swap); auto texCoords = CountTexCoords(unscaled, tileSize, expandRatio, swap);
auto blurTexCoords = (expandRatio == 1.) auto blurTexCoords = (expandRatio == 1. && !swap)
? texCoords ? texCoords
: CountTexCoords(unscaled, tileSize, 1.); : CountTexCoords(unscaled, tileSize, 1.);
const auto rect = transformRect(geometry); const auto rect = transformRect(geometry);
@ -396,7 +429,7 @@ void Viewport::RendererGL::paintTile(
{ { 1.f, 0.f } }, { { 1.f, 0.f } },
{ { 0.f, 0.f } }, { { 0.f, 0.f } },
} }; } };
if (const auto shift = (data.rotation / 90); shift > 0) { if (const auto shift = (frameRotation / 90); shift > 0) {
std::rotate( std::rotate(
toBlurTexCoords.begin(), toBlurTexCoords.begin(),
toBlurTexCoords.begin() + shift, toBlurTexCoords.begin() + shift,
@ -572,12 +605,12 @@ void Viewport::RendererGL::paintTile(
const auto uniformViewport = QSizeF(_viewport * _factor); const auto uniformViewport = QSizeF(_viewport * _factor);
program->setUniformValue("viewport", uniformViewport); program->setUniformValue("viewport", uniformViewport);
program->setUniformValue("frameBg", Uniform(st::groupCallBg->c)); program->setUniformValue("frameBg", st::groupCallBg->c);
program->setUniformValue("radiusOutline", QVector2D( program->setUniformValue("radiusOutline", QVector2D(
GLfloat(st::roundRadiusLarge * _factor), GLfloat(st::roundRadiusLarge * _factor),
(outline > 0) ? (st::groupCallOutline * _factor) : 0.f)); (outline > 0) ? (st::groupCallOutline * _factor) : 0.f));
program->setUniformValue("roundRect", Uniform(rect)); program->setUniformValue("roundRect", Uniform(rect));
program->setUniformValue("roundBg", Uniform(st::groupCallBg->c)); program->setUniformValue("roundBg", st::groupCallBg->c);
program->setUniformValue("outlineFg", QVector4D( program->setUniformValue("outlineFg", QVector4D(
st::groupCallMemberActiveIcon->c.redF(), st::groupCallMemberActiveIcon->c.redF(),
st::groupCallMemberActiveIcon->c.greenF(), st::groupCallMemberActiveIcon->c.greenF(),
@ -589,6 +622,7 @@ void Viewport::RendererGL::paintTile(
program->setUniformValue( program->setUniformValue(
"shadow", "shadow",
QVector3D(shadowHeight, shown, shadowAlpha)); QVector3D(shadowHeight, shown, shadowAlpha));
program->setUniformValue("paused", GLfloat(paused));
f.glActiveTexture(_rgbaFrame ? GL_TEXTURE1 : GL_TEXTURE3); f.glActiveTexture(_rgbaFrame ? GL_TEXTURE1 : GL_TEXTURE3);
tileData.textures.bind( tileData.textures.bind(
@ -634,6 +668,10 @@ void Viewport::RendererGL::paintTile(
FillTexturedRectangle(f, &*_imageProgram, 18); FillTexturedRectangle(f, &*_imageProgram, 18);
} }
if (paused > 0.) {
}
if (nameShift == fullNameShift) { if (nameShift == fullNameShift) {
return; return;
} }
@ -717,15 +755,18 @@ void Viewport::RendererGL::bindFrame(
const Webrtc::FrameWithInfo &data, const Webrtc::FrameWithInfo &data,
TileData &tileData, TileData &tileData,
Program &program) { Program &program) {
const auto upload = (tileData.trackIndex != data.index); const auto imageIndex = _userpicFrame ? 0 : (data.index + 1);
tileData.trackIndex = data.index; const auto upload = (tileData.trackIndex != imageIndex);
tileData.trackIndex = imageIndex;
if (_rgbaFrame) { if (_rgbaFrame) {
ensureARGB32Program(); ensureARGB32Program();
f.glUseProgram(program.argb32->programId()); f.glUseProgram(program.argb32->programId());
f.glActiveTexture(GL_TEXTURE0); f.glActiveTexture(GL_TEXTURE0);
tileData.textures.bind(f, tileData.textureIndex * 5 + 0); tileData.textures.bind(f, tileData.textureIndex * 5 + 0);
if (upload) { if (upload) {
const auto &image = data.original; const auto &image = _userpicFrame
? tileData.userpicFrame
: data.original;
const auto stride = image.bytesPerLine() / 4; const auto stride = image.bytesPerLine() / 4;
const auto data = image.constBits(); const auto data = image.constBits();
uploadTexture( uploadTexture(
@ -1007,6 +1048,8 @@ void Viewport::RendererGL::validateDatas() {
} }
const auto id = quintptr(tiles[i]->track().get()); const auto id = quintptr(tiles[i]->track().get());
const auto peer = tiles[i]->row()->peer(); const auto peer = tiles[i]->row()->peer();
const auto paused = (tiles[i]->track()->state()
== Webrtc::VideoState::Paused);
auto index = int(_tileData.size()); auto index = int(_tileData.size());
maybeStaleAfter = ranges::find( maybeStaleAfter = ranges::find(
maybeStaleAfter, maybeStaleAfter,
@ -1018,12 +1061,15 @@ void Viewport::RendererGL::validateDatas() {
maybeStaleAfter->id = id; maybeStaleAfter->id = id;
maybeStaleAfter->peer = peer; maybeStaleAfter->peer = peer;
maybeStaleAfter->stale = false; maybeStaleAfter->stale = false;
maybeStaleAfter->pause = paused;
maybeStaleAfter->paused.stop();
request.updating = true; request.updating = true;
} else { } else {
// This invalidates maybeStale*, but they're already equal. // This invalidates maybeStale*, but they're already equal.
_tileData.push_back({ _tileData.push_back({
.id = id, .id = id,
.peer = peer, .peer = peer,
.pause = paused,
}); });
} }
_tileData[index].nameVersion = peer->nameVersion; _tileData[index].nameVersion = peer->nameVersion;
@ -1114,4 +1160,22 @@ void Viewport::RendererGL::validateOutlineAnimation(
st::fadeWrapDuration); st::fadeWrapDuration);
} }
void Viewport::RendererGL::validatePausedAnimation(
not_null<VideoTile*> tile,
TileData &data) {
const auto paused = (_userpicFrame
&& tile->track()->frameSize().isEmpty())
|| (tile->track()->state() == Webrtc::VideoState::Paused);
if (data.pause == paused) {
return;
}
data.pause = paused;
data.paused.start(
[=] { _owner->widget()->update(); },
paused ? 0. : 1.,
paused ? 1. : 0.,
st::fadeWrapDuration);
}
} // namespace Calls::Group } // namespace Calls::Group

View file

@ -53,6 +53,8 @@ private:
Ui::GL::Textures<5> textures; Ui::GL::Textures<5> textures;
Ui::GL::Framebuffers<2> framebuffers; Ui::GL::Framebuffers<2> framebuffers;
Ui::Animations::Simple outlined; Ui::Animations::Simple outlined;
Ui::Animations::Simple paused;
QImage userpicFrame;
QRect nameRect; QRect nameRect;
int nameVersion = 0; int nameVersion = 0;
mutable int textureIndex = 0; mutable int textureIndex = 0;
@ -62,6 +64,7 @@ private:
mutable QSize textureChromaSize; mutable QSize textureChromaSize;
mutable QSize textureBlurSize; mutable QSize textureBlurSize;
bool stale = false; bool stale = false;
bool pause = false;
bool outline = false; bool outline = false;
}; };
struct Program { struct Program {
@ -102,6 +105,12 @@ private:
void validateOutlineAnimation( void validateOutlineAnimation(
not_null<VideoTile*> tile, not_null<VideoTile*> tile,
TileData &data); TileData &data);
void validatePausedAnimation(
not_null<VideoTile*> tile,
TileData &data);
void validateUserpicFrame(
not_null<VideoTile*> tile,
TileData &tileData);
void uploadTexture( void uploadTexture(
QOpenGLFunctions &f, QOpenGLFunctions &f,
@ -126,6 +135,7 @@ private:
GLfloat _factor = 1.; GLfloat _factor = 1.;
QSize _viewport; QSize _viewport;
bool _rgbaFrame = false; bool _rgbaFrame = false;
bool _userpicFrame;
Ui::GL::BackgroundFiller _background; Ui::GL::BackgroundFiller _background;
std::optional<QOpenGLBuffer> _frameBuffer; std::optional<QOpenGLBuffer> _frameBuffer;
Program _downscaleProgram; Program _downscaleProgram;
@ -148,6 +158,7 @@ private:
Ui::CrossLineAnimation _pinIcon; Ui::CrossLineAnimation _pinIcon;
Ui::CrossLineAnimation _muteIcon; Ui::CrossLineAnimation _muteIcon;
Ui::RoundRect _pinBackground; Ui::RoundRect _pinBackground;
rpl::lifetime _lifetime; rpl::lifetime _lifetime;

View file

@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/group/calls_group_common.h" #include "calls/group/calls_group_common.h"
#include "calls/group/calls_group_viewport_tile.h" #include "calls/group/calls_group_viewport_tile.h"
#include "calls/group/calls_group_members_row.h" #include "calls/group/calls_group_members_row.h"
#include "data/data_peer.h"
#include "media/view/media_view_pip.h" #include "media/view/media_view_pip.h"
#include "webrtc/webrtc_video_track.h" #include "webrtc/webrtc_video_track.h"
#include "lang/lang_keys.h" #include "lang/lang_keys.h"
@ -17,6 +18,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "styles/palette.h" #include "styles/palette.h"
namespace Calls::Group { namespace Calls::Group {
namespace {
constexpr auto kBlurRadius = 15;
} // namespace
Viewport::RendererSW::RendererSW(not_null<Viewport*> owner) Viewport::RendererSW::RendererSW(not_null<Viewport*> owner)
: _owner(owner) : _owner(owner)
@ -35,12 +41,52 @@ void Viewport::RendererSW::paintFallback(
auto bg = clip; auto bg = clip;
auto hq = PainterHighQualityEnabler(p); auto hq = PainterHighQualityEnabler(p);
const auto bounding = clip.boundingRect(); const auto bounding = clip.boundingRect();
for (auto &[tile, tileData] : _tileData) {
tileData.stale = true;
}
for (const auto &tile : _owner->_tiles) { for (const auto &tile : _owner->_tiles) {
if (!tile->shown()) {
continue;
}
paintTile(p, tile.get(), bounding, bg); paintTile(p, tile.get(), bounding, bg);
} }
for (const auto rect : bg) { for (const auto rect : bg) {
p.fillRect(rect, st::groupCallBg); p.fillRect(rect, st::groupCallBg);
} }
for (auto i = _tileData.begin(); i != _tileData.end();) {
if (i->second.stale) {
i = _tileData.erase(i);
} else {
++i;
}
}
}
void Viewport::RendererSW::validateUserpicFrame(
not_null<VideoTile*> tile,
TileData &data) {
if (!_userpicFrame) {
data.userpicFrame = QImage();
return;
} else if (!data.userpicFrame.isNull()) {
return;
}
auto userpic = QImage(
tile->trackOrUserpicSize(),
QImage::Format_ARGB32_Premultiplied);
userpic.fill(Qt::black);
{
auto p = Painter(&userpic);
tile->row()->peer()->paintUserpicSquare(
p,
tile->row()->ensureUserpicView(),
0,
0,
userpic.width());
}
data.userpicFrame = Images::BlurLargeImage(
std::move(userpic),
kBlurRadius);
} }
void Viewport::RendererSW::paintTile( void Viewport::RendererSW::paintTile(
@ -50,11 +96,27 @@ void Viewport::RendererSW::paintTile(
QRegion &bg) { QRegion &bg) {
const auto track = tile->track(); const auto track = tile->track();
const auto data = track->frameWithInfo(true); const auto data = track->frameWithInfo(true);
const auto &image = data.original; auto &tileData = _tileData[tile];
const auto rotation = data.rotation; tileData.stale = false;
if (image.isNull() || !tile->shown()) { _userpicFrame = (data.format == Webrtc::FrameFormat::None);
return; _pausedFrame = (track->state() == Webrtc::VideoState::Paused);
validateUserpicFrame(tile, tileData);
if (_userpicFrame || !_pausedFrame) {
tileData.blurredFrame = QImage();
} else if (tileData.blurredFrame.isNull()) {
tileData.blurredFrame = Images::BlurLargeImage(
data.original.scaled(
VideoTile::PausedVideoSize(),
Qt::KeepAspectRatio),
kBlurRadius);
} }
const auto &image = _userpicFrame
? tileData.userpicFrame
: _pausedFrame
? tileData.blurredFrame
: data.original;
const auto frameRotation = _userpicFrame ? 0 : data.rotation;
Assert(!image.isNull());
const auto fill = [&](QRect rect) { const auto fill = [&](QRect rect) {
const auto intersected = rect.intersected(clip); const auto intersected = rect.intersected(clip);
@ -72,22 +134,22 @@ void Viewport::RendererSW::paintTile(
const auto height = geometry.height(); const auto height = geometry.height();
const auto scaled = FlipSizeByRotation( const auto scaled = FlipSizeByRotation(
image.size(), image.size(),
rotation frameRotation
).scaled(QSize(width, height), Qt::KeepAspectRatio); ).scaled(QSize(width, height), Qt::KeepAspectRatio);
const auto left = (width - scaled.width()) / 2; const auto left = (width - scaled.width()) / 2;
const auto top = (height - scaled.height()) / 2; const auto top = (height - scaled.height()) / 2;
const auto target = QRect(QPoint(x + left, y + top), scaled); const auto target = QRect(QPoint(x + left, y + top), scaled);
if (UsePainterRotation(rotation, false)) { if (UsePainterRotation(frameRotation, false)) {
if (rotation) { if (frameRotation) {
p.save(); p.save();
p.rotate(rotation); p.rotate(frameRotation);
} }
p.drawImage(RotatedRect(target, rotation), image); p.drawImage(RotatedRect(target, frameRotation), image);
if (rotation) { if (frameRotation) {
p.restore(); p.restore();
} }
} else if (rotation) { } else if (frameRotation) {
p.drawImage(target, RotateFrameImage(image, rotation)); p.drawImage(target, RotateFrameImage(image, frameRotation));
} else { } else {
p.drawImage(target, image); p.drawImage(target, image);
} }
@ -168,6 +230,10 @@ void Viewport::RendererSW::paintTileControls(
&_pinBackground); &_pinBackground);
} }
if (_pausedFrame) {
p.fillRect(x, y, width, height, QColor(0, 0, 0, kShadowMaxAlpha));
}
const auto shown = _owner->_controlsShownRatio; const auto shown = _owner->_controlsShownRatio;
if (shown == 0.) { if (shown == 0.) {
return; return;
@ -191,14 +257,16 @@ void Viewport::RendererSW::paintTileControls(
return; return;
} }
const auto factor = style::DevicePixelRatio(); const auto factor = style::DevicePixelRatio();
p.drawImage( if (!_pausedFrame) {
shadowFill, p.drawImage(
_shadow, shadowFill,
QRect( _shadow,
0, QRect(
(shadowFill.y() - shadowRect.y()) * factor, 0,
_shadow.width(), (shadowFill.y() - shadowRect.y()) * factor,
shadowFill.height() * factor)); _shadow.width(),
shadowFill.height() * factor));
}
const auto row = tile->row(); const auto row = tile->row();
row->lazyInitialize(st::groupCallMembersListItem); row->lazyInitialize(st::groupCallMembersListItem);

View file

@ -25,6 +25,11 @@ public:
Ui::GL::Backend backend) override; Ui::GL::Backend backend) override;
private: private:
struct TileData {
QImage userpicFrame;
QImage blurredFrame;
bool stale = false;
};
void paintTile( void paintTile(
Painter &p, Painter &p,
not_null<VideoTile*> tile, not_null<VideoTile*> tile,
@ -44,10 +49,16 @@ private:
int width, int width,
int height, int height,
not_null<VideoTile*> tile); not_null<VideoTile*> tile);
void validateUserpicFrame(
not_null<VideoTile*> tile,
TileData &data);
const not_null<Viewport*> _owner; const not_null<Viewport*> _owner;
QImage _shadow; QImage _shadow;
bool _userpicFrame = false;
bool _pausedFrame = false;
base::flat_map<not_null<VideoTile*>, TileData> _tileData;
Ui::CrossLineAnimation _pinIcon; Ui::CrossLineAnimation _pinIcon;
Ui::RoundRect _pinBackground; Ui::RoundRect _pinBackground;

View file

@ -16,6 +16,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include <QtGui/QOpenGLFunctions> #include <QtGui/QOpenGLFunctions>
namespace Calls::Group { namespace Calls::Group {
namespace {
constexpr auto kPausedVideoSize = 90;
} // namespace
Viewport::VideoTile::VideoTile( Viewport::VideoTile::VideoTile(
const VideoEndpoint &endpoint, const VideoEndpoint &endpoint,
@ -54,6 +59,20 @@ int Viewport::VideoTile::topControlsSlide() const {
_topControlsShownAnimation.value(_topControlsShown ? 1. : 0.)); _topControlsShownAnimation.value(_topControlsShown ? 1. : 0.));
} }
QSize Viewport::VideoTile::PausedVideoSize() {
return QSize(kPausedVideoSize, kPausedVideoSize);
}
QSize Viewport::VideoTile::trackOrUserpicSize() const {
if (const auto size = trackSize(); !size.isEmpty()) {
return size;
} else if (_userpicSize.isEmpty()
&& _track.track->state() == Webrtc::VideoState::Paused) {
_userpicSize = PausedVideoSize();
}
return _userpicSize;
}
bool Viewport::VideoTile::screencast() const { bool Viewport::VideoTile::screencast() const {
return (_endpoint.type == VideoEndpointType::Screen); return (_endpoint.type == VideoEndpointType::Screen);
} }

View file

@ -60,6 +60,8 @@ public:
[[nodiscard]] rpl::producer<QSize> trackSizeValue() const { [[nodiscard]] rpl::producer<QSize> trackSizeValue() const {
return _trackSize.value(); return _trackSize.value();
} }
[[nodiscard]] QSize trackOrUserpicSize() const;
[[nodiscard]] static QSize PausedVideoSize();
[[nodiscard]] bool screencast() const; [[nodiscard]] bool screencast() const;
void setGeometry( void setGeometry(
@ -104,6 +106,7 @@ private:
QRect _geometry; QRect _geometry;
TileAnimation _animation; TileAnimation _animation;
rpl::variable<QSize> _trackSize; rpl::variable<QSize> _trackSize;
mutable QSize _userpicSize;
QRect _pinOuter; QRect _pinOuter;
QRect _pinInner; QRect _pinInner;
QRect _backOuter; QRect _backOuter;

View file

@ -48,6 +48,14 @@ const std::string &GroupCallParticipant::screenEndpoint() const {
return GetScreenEndpoint(videoParams); return GetScreenEndpoint(videoParams);
} }
bool GroupCallParticipant::cameraPaused() const {
return IsCameraPaused(videoParams);
}
bool GroupCallParticipant::screenPaused() const {
return IsScreenPaused(videoParams);
}
GroupCall::GroupCall( GroupCall::GroupCall(
not_null<PeerData*> peer, not_null<PeerData*> peer,
uint64 id, uint64 id,
@ -231,8 +239,8 @@ const GroupCallParticipant *GroupCall::participantByEndpoint(
return nullptr; return nullptr;
} }
for (const auto &participant : _participants) { for (const auto &participant : _participants) {
if (participant.cameraEndpoint() == endpoint if (GetCameraEndpoint(participant.videoParams) == endpoint
|| participant.screenEndpoint() == endpoint) { || GetScreenEndpoint(participant.videoParams) == endpoint) {
return &participant; return &participant;
} }
} }

View file

@ -42,6 +42,8 @@ struct GroupCallParticipant {
[[nodiscard]] const std::string &cameraEndpoint() const; [[nodiscard]] const std::string &cameraEndpoint() const;
[[nodiscard]] const std::string &screenEndpoint() const; [[nodiscard]] const std::string &screenEndpoint() const;
[[nodiscard]] bool cameraPaused() const;
[[nodiscard]] bool screenPaused() const;
}; };
class GroupCall final { class GroupCall final {

View file

@ -279,10 +279,10 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent(
if (fillTransparentBackground) { if (fillTransparentBackground) {
program->setUniformValue( program->setUniformValue(
"transparentBg", "transparentBg",
Uniform(st::mediaviewTransparentBg->c)); st::mediaviewTransparentBg->c);
program->setUniformValue( program->setUniformValue(
"transparentFg", "transparentFg",
Uniform(st::mediaviewTransparentFg->c)); st::mediaviewTransparentFg->c);
program->setUniformValue( program->setUniformValue(
"transparentSize", "transparentSize",
st::transparentPlaceholderSize * _factor); st::transparentPlaceholderSize * _factor);

@ -1 +1 @@
Subproject commit b1d5ed4ab503635f9e0f9ee5d2a34e2975042014 Subproject commit 35236988b7bc489b7683019db89636d4030db1ce

@ -1 +1 @@
Subproject commit 2873d4990f94d10b627f1880344c9357e86ff864 Subproject commit 539b9b51c730900ce724d2e329f3a877f2fcba30