mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-15 21:57:10 +02:00
Show last blurred frame or userpic for paused videos.
This commit is contained in:
parent
303ad02c61
commit
9a21d55de7
16 changed files with 394 additions and 112 deletions
|
@ -216,6 +216,7 @@ struct GroupCall::SinkPointer {
|
|||
struct VideoParams {
|
||||
std::string endpointId;
|
||||
std::vector<tgcalls::MediaSsrcGroup> ssrcGroups;
|
||||
bool paused = false;
|
||||
|
||||
[[nodiscard]] bool empty() const {
|
||||
return endpointId.empty() || ssrcGroups.empty();
|
||||
|
@ -237,6 +238,9 @@ struct ParticipantVideoParams {
|
|||
return !was;
|
||||
}
|
||||
return now->match([&](const MTPDgroupCallParticipantVideo &data) {
|
||||
if (data.is_paused() != was.paused) {
|
||||
return false;
|
||||
}
|
||||
if (gsl::make_span(data.vendpoint().v)
|
||||
!= gsl::make_span(was.endpointId)) {
|
||||
return false;
|
||||
|
@ -281,6 +285,7 @@ struct ParticipantVideoParams {
|
|||
}
|
||||
auto result = VideoParams();
|
||||
params->match([&](const MTPDgroupCallParticipantVideo &data) {
|
||||
result.paused = data.is_paused();
|
||||
result.endpointId = data.vendpoint().v.toStdString();
|
||||
const auto &list = data.vsource_groups().v;
|
||||
result.ssrcGroups.reserve(list.size());
|
||||
|
@ -313,6 +318,14 @@ const std::string &GetScreenEndpoint(
|
|||
return params ? params->screen.endpointId : EmptyString();
|
||||
}
|
||||
|
||||
bool IsCameraPaused(const std::shared_ptr<ParticipantVideoParams> ¶ms) {
|
||||
return params && params->camera.paused;
|
||||
}
|
||||
|
||||
bool IsScreenPaused(const std::shared_ptr<ParticipantVideoParams> ¶ms) {
|
||||
return params && params->screen.paused;
|
||||
}
|
||||
|
||||
std::shared_ptr<ParticipantVideoParams> ParseVideoParams(
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &camera,
|
||||
const tl::conditional<MTPGroupCallParticipantVideo> &screen,
|
||||
|
@ -601,7 +614,7 @@ bool GroupCall::hasVideoWithFrames() const {
|
|||
|
||||
rpl::producer<bool> GroupCall::hasVideoWithFramesValue() const {
|
||||
return _videoStreamShownUpdates.events_starting_with(
|
||||
VideoActiveToggle()
|
||||
VideoStateToggle()
|
||||
) | rpl::map([=] {
|
||||
return hasVideoWithFrames();
|
||||
}) | rpl::distinct_until_changed();
|
||||
|
@ -664,22 +677,32 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
|
|||
return;
|
||||
}
|
||||
const auto &wasCameraEndpoint = data.was
|
||||
? regularEndpoint(data.was->cameraEndpoint())
|
||||
? regularEndpoint(GetCameraEndpoint(data.was->videoParams))
|
||||
: EmptyString();
|
||||
const auto &nowCameraEndpoint = data.now
|
||||
? regularEndpoint(data.now->cameraEndpoint())
|
||||
? regularEndpoint(GetCameraEndpoint(data.now->videoParams))
|
||||
: EmptyString();
|
||||
const auto wasCameraPaused = !wasCameraEndpoint.empty()
|
||||
&& IsCameraPaused(data.was->videoParams);
|
||||
const auto nowCameraPaused = !nowCameraEndpoint.empty()
|
||||
&& IsCameraPaused(data.now->videoParams);
|
||||
if (wasCameraEndpoint != nowCameraEndpoint) {
|
||||
markEndpointActive({
|
||||
VideoEndpointType::Camera,
|
||||
peer,
|
||||
nowCameraEndpoint
|
||||
}, true);
|
||||
nowCameraEndpoint,
|
||||
}, true, nowCameraPaused);
|
||||
markEndpointActive({
|
||||
VideoEndpointType::Camera,
|
||||
peer,
|
||||
wasCameraEndpoint
|
||||
}, false);
|
||||
wasCameraEndpoint,
|
||||
}, false, wasCameraPaused);
|
||||
} else if (wasCameraPaused != nowCameraPaused) {
|
||||
markTrackPaused({
|
||||
VideoEndpointType::Camera,
|
||||
peer,
|
||||
nowCameraEndpoint,
|
||||
}, nowCameraPaused);
|
||||
}
|
||||
const auto &wasScreenEndpoint = data.was
|
||||
? regularEndpoint(data.was->screenEndpoint())
|
||||
|
@ -687,17 +710,27 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
|
|||
const auto &nowScreenEndpoint = data.now
|
||||
? regularEndpoint(data.now->screenEndpoint())
|
||||
: EmptyString();
|
||||
const auto wasScreenPaused = !wasScreenEndpoint.empty()
|
||||
&& IsScreenPaused(data.was->videoParams);
|
||||
const auto nowScreenPaused = !nowScreenEndpoint.empty()
|
||||
&& IsScreenPaused(data.now->videoParams);
|
||||
if (wasScreenEndpoint != nowScreenEndpoint) {
|
||||
markEndpointActive({
|
||||
VideoEndpointType::Screen,
|
||||
peer,
|
||||
nowScreenEndpoint
|
||||
}, true);
|
||||
nowScreenEndpoint,
|
||||
}, true, nowScreenPaused);
|
||||
markEndpointActive({
|
||||
VideoEndpointType::Screen,
|
||||
peer,
|
||||
wasScreenEndpoint
|
||||
}, false);
|
||||
wasScreenEndpoint,
|
||||
}, false, wasScreenPaused);
|
||||
} else if (wasScreenPaused != nowScreenPaused) {
|
||||
markTrackPaused({
|
||||
VideoEndpointType::Screen,
|
||||
peer,
|
||||
wasScreenEndpoint,
|
||||
}, nowScreenPaused);
|
||||
}
|
||||
}, _lifetime);
|
||||
|
||||
|
@ -725,7 +758,9 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
|
|||
return;
|
||||
}
|
||||
using Type = VideoEndpointType;
|
||||
if (p->cameraEndpoint().empty() && p->screenEndpoint().empty()) {
|
||||
const auto ¶ms = p->videoParams;
|
||||
if (GetCameraEndpoint(params).empty()
|
||||
&& GetScreenEndpoint(params).empty()) {
|
||||
return;
|
||||
}
|
||||
const auto tryEndpoint = [&](Type type, const std::string &id) {
|
||||
|
@ -739,8 +774,8 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
|
|||
setVideoEndpointLarge(endpoint);
|
||||
return true;
|
||||
};
|
||||
if (tryEndpoint(Type::Screen, p->screenEndpoint())
|
||||
|| tryEndpoint(Type::Camera, p->cameraEndpoint())) {
|
||||
if (tryEndpoint(Type::Screen, GetScreenEndpoint(params))
|
||||
|| tryEndpoint(Type::Camera, GetCameraEndpoint(params))) {
|
||||
_videoLargeTillTime = now + kFixSpeakingLargeVideoDuration;
|
||||
}
|
||||
}, _lifetime);
|
||||
|
@ -930,7 +965,7 @@ void GroupCall::setScreenEndpoint(std::string endpoint) {
|
|||
VideoEndpointType::Screen,
|
||||
_joinAs,
|
||||
_screenEndpoint
|
||||
}, false);
|
||||
}, false, false);
|
||||
}
|
||||
_screenEndpoint = std::move(endpoint);
|
||||
if (_screenEndpoint.empty()) {
|
||||
|
@ -941,7 +976,7 @@ void GroupCall::setScreenEndpoint(std::string endpoint) {
|
|||
VideoEndpointType::Screen,
|
||||
_joinAs,
|
||||
_screenEndpoint
|
||||
}, true);
|
||||
}, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -954,7 +989,7 @@ void GroupCall::setCameraEndpoint(std::string endpoint) {
|
|||
VideoEndpointType::Camera,
|
||||
_joinAs,
|
||||
_cameraEndpoint
|
||||
}, false);
|
||||
}, false, false);
|
||||
}
|
||||
_cameraEndpoint = std::move(endpoint);
|
||||
if (_cameraEndpoint.empty()) {
|
||||
|
@ -965,7 +1000,7 @@ void GroupCall::setCameraEndpoint(std::string endpoint) {
|
|||
VideoEndpointType::Camera,
|
||||
_joinAs,
|
||||
_cameraEndpoint
|
||||
}, true);
|
||||
}, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -987,7 +1022,10 @@ void GroupCall::addVideoOutput(
|
|||
}
|
||||
}
|
||||
|
||||
void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
|
||||
void GroupCall::markEndpointActive(
|
||||
VideoEndpoint endpoint,
|
||||
bool active,
|
||||
bool paused) {
|
||||
if (!endpoint) {
|
||||
return;
|
||||
}
|
||||
|
@ -996,6 +1034,9 @@ void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
|
|||
? (i == end(_activeVideoTracks))
|
||||
: (i != end(_activeVideoTracks));
|
||||
if (!changed) {
|
||||
if (active) {
|
||||
markTrackPaused(endpoint, paused);
|
||||
}
|
||||
return;
|
||||
}
|
||||
auto shown = false;
|
||||
|
@ -1004,30 +1045,49 @@ void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
|
|||
endpoint,
|
||||
VideoTrack{
|
||||
.track = std::make_unique<Webrtc::VideoTrack>(
|
||||
Webrtc::VideoState::Active,
|
||||
(paused
|
||||
? Webrtc::VideoState::Paused
|
||||
: Webrtc::VideoState::Active),
|
||||
_requireARGB32),
|
||||
.peer = endpoint.peer,
|
||||
}).first;
|
||||
const auto track = i->second.track.get();
|
||||
track->renderNextFrame(
|
||||
) | rpl::start_with_next([=] {
|
||||
if (!track->frameSize().isEmpty()) {
|
||||
markTrackShown(endpoint, true);
|
||||
}
|
||||
}, i->second.lifetime);
|
||||
if (!track->frameSize().isEmpty()) {
|
||||
if (!track->frameSize().isEmpty()
|
||||
|| track->state() == Webrtc::VideoState::Paused) {
|
||||
shown = true;
|
||||
} else {
|
||||
auto hasFrame = track->renderNextFrame() | rpl::map([=] {
|
||||
return !track->frameSize().isEmpty();
|
||||
});
|
||||
auto isPaused = track->stateValue(
|
||||
) | rpl::map([=](Webrtc::VideoState state) {
|
||||
return (state == Webrtc::VideoState::Paused);
|
||||
});
|
||||
rpl::merge(
|
||||
std::move(hasFrame),
|
||||
std::move(isPaused)
|
||||
) | rpl::filter([=](bool shouldShow) {
|
||||
return shouldShow;
|
||||
}) | rpl::start_with_next([=] {
|
||||
_activeVideoTracks[endpoint].shownTrackingLifetime.destroy();
|
||||
markTrackShown(endpoint, true);
|
||||
}, i->second.shownTrackingLifetime);
|
||||
}
|
||||
addVideoOutput(i->first.id, { track->sink() });
|
||||
} else {
|
||||
if (_videoEndpointLarge.current() == endpoint) {
|
||||
setVideoEndpointLarge({});
|
||||
}
|
||||
markTrackShown(endpoint, false);
|
||||
markTrackPaused(endpoint, false);
|
||||
_activeVideoTracks.erase(i);
|
||||
}
|
||||
updateRequestedVideoChannelsDelayed();
|
||||
_videoStreamActiveUpdates.fire({ endpoint, active });
|
||||
markTrackShown(endpoint, shown);
|
||||
if (active) {
|
||||
markTrackShown(endpoint, shown);
|
||||
markTrackPaused(endpoint, paused);
|
||||
}
|
||||
}
|
||||
|
||||
void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) {
|
||||
|
@ -1046,6 +1106,15 @@ void GroupCall::markTrackShown(const VideoEndpoint &endpoint, bool shown) {
|
|||
}
|
||||
}
|
||||
|
||||
void GroupCall::markTrackPaused(const VideoEndpoint &endpoint, bool paused) {
|
||||
const auto i = _activeVideoTracks.find(endpoint);
|
||||
Assert(i != end(_activeVideoTracks));
|
||||
|
||||
i->second.track->setState(paused
|
||||
? Webrtc::VideoState::Paused
|
||||
: Webrtc::VideoState::Active);
|
||||
}
|
||||
|
||||
void GroupCall::rejoin() {
|
||||
rejoin(_joinAs);
|
||||
}
|
||||
|
@ -1889,7 +1958,7 @@ void GroupCall::ensureOutgoingVideo() {
|
|||
VideoEndpointType::Camera,
|
||||
_joinAs,
|
||||
_cameraEndpoint
|
||||
}, active);
|
||||
}, active, false);
|
||||
sendSelfUpdate(SendUpdateType::VideoStopped);
|
||||
applyMeInCallLocally();
|
||||
}, _lifetime);
|
||||
|
@ -1932,7 +2001,7 @@ void GroupCall::ensureOutgoingVideo() {
|
|||
VideoEndpointType::Screen,
|
||||
_joinAs,
|
||||
_screenEndpoint
|
||||
}, active);
|
||||
}, active, false);
|
||||
_screenJoinState.nextActionPending = true;
|
||||
checkNextJoinAction();
|
||||
}, _lifetime);
|
||||
|
@ -2310,38 +2379,42 @@ void GroupCall::fillActiveVideoEndpoints() {
|
|||
auto removed = base::flat_set<VideoEndpoint>(
|
||||
begin(endpoints),
|
||||
end(endpoints));
|
||||
const auto feedOne = [&](VideoEndpoint endpoint) {
|
||||
const auto feedOne = [&](VideoEndpoint endpoint, bool paused) {
|
||||
if (endpoint.empty()) {
|
||||
return;
|
||||
} else if (endpoint == large) {
|
||||
largeFound = true;
|
||||
}
|
||||
if (!removed.remove(endpoint)) {
|
||||
markEndpointActive(std::move(endpoint), true);
|
||||
if (removed.remove(endpoint)) {
|
||||
markTrackPaused(endpoint, paused);
|
||||
} else {
|
||||
markEndpointActive(std::move(endpoint), true, paused);
|
||||
}
|
||||
};
|
||||
using Type = VideoEndpointType;
|
||||
for (const auto &participant : participants) {
|
||||
const auto camera = participant.cameraEndpoint();
|
||||
const auto camera = GetCameraEndpoint(participant.videoParams);
|
||||
if (camera != _cameraEndpoint
|
||||
&& camera != _screenEndpoint
|
||||
&& participant.peer != _joinAs) {
|
||||
feedOne({ Type::Camera, participant.peer, camera });
|
||||
const auto paused = IsCameraPaused(participant.videoParams);
|
||||
feedOne({ Type::Camera, participant.peer, camera }, paused);
|
||||
}
|
||||
const auto screen = participant.screenEndpoint();
|
||||
const auto screen = GetScreenEndpoint(participant.videoParams);
|
||||
if (screen != _cameraEndpoint
|
||||
&& screen != _screenEndpoint
|
||||
&& participant.peer != _joinAs) {
|
||||
feedOne({ Type::Screen, participant.peer, screen });
|
||||
const auto paused = IsScreenPaused(participant.videoParams);
|
||||
feedOne({ Type::Screen, participant.peer, screen }, paused);
|
||||
}
|
||||
}
|
||||
feedOne({ Type::Camera, _joinAs, cameraSharingEndpoint() });
|
||||
feedOne({ Type::Screen, _joinAs, screenSharingEndpoint() });
|
||||
feedOne({ Type::Camera, _joinAs, cameraSharingEndpoint() }, false);
|
||||
feedOne({ Type::Screen, _joinAs, screenSharingEndpoint() }, false);
|
||||
if (large && !largeFound) {
|
||||
setVideoEndpointLarge({});
|
||||
}
|
||||
for (const auto &endpoint : removed) {
|
||||
markEndpointActive(endpoint, false);
|
||||
markEndpointActive(endpoint, false, false);
|
||||
}
|
||||
updateRequestedVideoChannels();
|
||||
}
|
||||
|
|
|
@ -82,6 +82,16 @@ enum class VideoEndpointType {
|
|||
};
|
||||
|
||||
struct VideoEndpoint {
|
||||
VideoEndpoint() = default;
|
||||
VideoEndpoint(
|
||||
VideoEndpointType type,
|
||||
not_null<PeerData*> peer,
|
||||
std::string id)
|
||||
: type(type)
|
||||
, peer(peer)
|
||||
, id(std::move(id)) {
|
||||
}
|
||||
|
||||
VideoEndpointType type = VideoEndpointType::Camera;
|
||||
PeerData *peer = nullptr;
|
||||
std::string id;
|
||||
|
@ -131,9 +141,9 @@ inline bool operator>=(
|
|||
return !(a < b);
|
||||
}
|
||||
|
||||
struct VideoActiveToggle {
|
||||
struct VideoStateToggle {
|
||||
VideoEndpoint endpoint;
|
||||
bool active = false;
|
||||
bool value = false;
|
||||
};
|
||||
|
||||
struct VideoQualityRequest {
|
||||
|
@ -150,9 +160,12 @@ struct ParticipantVideoParams;
|
|||
|
||||
[[nodiscard]] const std::string &GetCameraEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
|
||||
[[nodiscard]] const std::string &GetScreenEndpoint(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
[[nodiscard]] bool IsCameraPaused(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
[[nodiscard]] bool IsScreenPaused(
|
||||
const std::shared_ptr<ParticipantVideoParams> ¶ms);
|
||||
|
||||
class GroupCall final : public base::has_weak_ptr {
|
||||
public:
|
||||
|
@ -279,11 +292,11 @@ public:
|
|||
return _levelUpdates.events();
|
||||
}
|
||||
[[nodiscard]] auto videoStreamActiveUpdates() const
|
||||
-> rpl::producer<VideoActiveToggle> {
|
||||
-> rpl::producer<VideoStateToggle> {
|
||||
return _videoStreamActiveUpdates.events();
|
||||
}
|
||||
[[nodiscard]] auto videoStreamShownUpdates() const
|
||||
-> rpl::producer<VideoActiveToggle> {
|
||||
-> rpl::producer<VideoStateToggle> {
|
||||
return _videoStreamShownUpdates.events();
|
||||
}
|
||||
void requestVideoQuality(
|
||||
|
@ -310,7 +323,7 @@ public:
|
|||
struct VideoTrack {
|
||||
std::unique_ptr<Webrtc::VideoTrack> track;
|
||||
PeerData *peer = nullptr;
|
||||
rpl::lifetime lifetime;
|
||||
rpl::lifetime shownTrackingLifetime;
|
||||
Group::VideoQuality quality = Group::VideoQuality();
|
||||
|
||||
[[nodiscard]] explicit operator bool() const {
|
||||
|
@ -505,7 +518,11 @@ private:
|
|||
void addVideoOutput(const std::string &endpoint, SinkPointer sink);
|
||||
void setVideoEndpointLarge(VideoEndpoint endpoint);
|
||||
|
||||
void markEndpointActive(VideoEndpoint endpoint, bool active);
|
||||
void markEndpointActive(
|
||||
VideoEndpoint endpoint,
|
||||
bool active,
|
||||
bool paused);
|
||||
void markTrackPaused(const VideoEndpoint &endpoint, bool paused);
|
||||
void markTrackShown(const VideoEndpoint &endpoint, bool shown);
|
||||
|
||||
[[nodiscard]] MTPInputGroupCall inputCall() const;
|
||||
|
@ -576,8 +593,9 @@ private:
|
|||
bool _requireARGB32 = true;
|
||||
|
||||
rpl::event_stream<LevelUpdate> _levelUpdates;
|
||||
rpl::event_stream<VideoActiveToggle> _videoStreamActiveUpdates;
|
||||
rpl::event_stream<VideoActiveToggle> _videoStreamShownUpdates;
|
||||
rpl::event_stream<VideoStateToggle> _videoStreamActiveUpdates;
|
||||
rpl::event_stream<VideoStateToggle> _videoStreamPausedUpdates;
|
||||
rpl::event_stream<VideoStateToggle> _videoStreamShownUpdates;
|
||||
base::flat_map<VideoEndpoint, VideoTrack> _activeVideoTracks;
|
||||
base::flat_set<VideoEndpoint> _shownVideoTracks;
|
||||
rpl::variable<VideoEndpoint> _videoEndpointLarge;
|
||||
|
|
|
@ -132,11 +132,11 @@ private:
|
|||
[[nodiscard]] bool allRowsAboveMoreImportantThanHand(
|
||||
not_null<Row*> row,
|
||||
uint64 raiseHandRating) const;
|
||||
const Data::GroupCallParticipant *findParticipant(
|
||||
[[nodiscard]] const Data::GroupCallParticipant *findParticipant(
|
||||
const std::string &endpoint) const;
|
||||
const std::string &computeScreenEndpoint(
|
||||
[[nodiscard]] const std::string &computeScreenEndpoint(
|
||||
not_null<const Data::GroupCallParticipant*> participant) const;
|
||||
const std::string &computeCameraEndpoint(
|
||||
[[nodiscard]] const std::string &computeCameraEndpoint(
|
||||
not_null<const Data::GroupCallParticipant*> participant) const;
|
||||
void showRowMenu(not_null<PeerListRow*> row, bool highlightRow);
|
||||
|
||||
|
@ -288,11 +288,11 @@ void Members::Controller::setupListChangeViewers() {
|
|||
}, _lifetime);
|
||||
|
||||
_call->videoStreamShownUpdates(
|
||||
) | rpl::filter([=](const VideoActiveToggle &update) {
|
||||
) | rpl::filter([=](const VideoStateToggle &update) {
|
||||
const auto &large = _call->videoEndpointLarge();
|
||||
return large && (update.endpoint != large);
|
||||
}) | rpl::start_with_next([=](const VideoActiveToggle &update) {
|
||||
if (update.active) {
|
||||
}) | rpl::start_with_next([=](const VideoStateToggle &update) {
|
||||
if (update.value) {
|
||||
hideRowWithVideo(update.endpoint);
|
||||
} else {
|
||||
showRowWithVideo(update.endpoint);
|
||||
|
@ -403,8 +403,8 @@ void Members::Controller::subscribeToChanges(not_null<Data::GroupCall*> real) {
|
|||
toggleVideoEndpointActive(endpoint, true);
|
||||
}
|
||||
_call->videoStreamActiveUpdates(
|
||||
) | rpl::start_with_next([=](const VideoActiveToggle &update) {
|
||||
toggleVideoEndpointActive(update.endpoint, update.active);
|
||||
) | rpl::start_with_next([=](const VideoStateToggle &update) {
|
||||
toggleVideoEndpointActive(update.endpoint, update.value);
|
||||
}, _lifetime);
|
||||
|
||||
if (_prepared) {
|
||||
|
@ -1206,12 +1206,12 @@ base::unique_qptr<Ui::PopupMenu> Members::Controller::createRowContextMenu(
|
|||
const auto camera = VideoEndpoint{
|
||||
VideoEndpointType::Camera,
|
||||
participantPeer,
|
||||
computeCameraEndpoint(participant)
|
||||
computeCameraEndpoint(participant),
|
||||
};
|
||||
const auto screen = VideoEndpoint{
|
||||
VideoEndpointType::Screen,
|
||||
participantPeer,
|
||||
computeScreenEndpoint(participant)
|
||||
computeScreenEndpoint(participant),
|
||||
};
|
||||
if (shown.contains(camera)) {
|
||||
if (pinned && large == camera) {
|
||||
|
|
|
@ -843,8 +843,8 @@ void Panel::setupVideo(not_null<Viewport*> viewport) {
|
|||
setupTile(endpoint, track);
|
||||
}
|
||||
_call->videoStreamActiveUpdates(
|
||||
) | rpl::start_with_next([=](const VideoActiveToggle &update) {
|
||||
if (update.active) {
|
||||
) | rpl::start_with_next([=](const VideoStateToggle &update) {
|
||||
if (update.value) {
|
||||
// Add async (=> the participant row is definitely in Members).
|
||||
const auto endpoint = update.endpoint;
|
||||
crl::on_main(viewport->widget(), [=] {
|
||||
|
|
|
@ -239,6 +239,11 @@ void Viewport::add(
|
|||
}) | rpl::start_with_next([=] {
|
||||
updateTilesGeometry();
|
||||
}, _tiles.back()->lifetime());
|
||||
|
||||
_tiles.back()->track()->stateValue(
|
||||
) | rpl::start_with_next([=] {
|
||||
updateTilesGeometry();
|
||||
}, _tiles.back()->lifetime());
|
||||
}
|
||||
|
||||
void Viewport::remove(const VideoEndpoint &endpoint) {
|
||||
|
@ -434,7 +439,7 @@ Viewport::Layout Viewport::countWide(int outerWidth, int outerHeight) const {
|
|||
sizes.reserve(_tiles.size());
|
||||
for (const auto &tile : _tiles) {
|
||||
const auto video = tile.get();
|
||||
const auto size = video->trackSize();
|
||||
const auto size = video->trackOrUserpicSize();
|
||||
if (!size.isEmpty()) {
|
||||
sizes.push_back(Geometry{ video, size });
|
||||
}
|
||||
|
@ -529,7 +534,7 @@ void Viewport::showLarge(const VideoEndpoint &endpoint) {
|
|||
startLargeChangeAnimation();
|
||||
}
|
||||
|
||||
Ensures(!_large || !_large->trackSize().isEmpty());
|
||||
Ensures(!_large || !_large->trackOrUserpicSize().isEmpty());
|
||||
}
|
||||
|
||||
void Viewport::updateTilesGeometry() {
|
||||
|
@ -564,7 +569,7 @@ void Viewport::refreshHasTwoOrMore() {
|
|||
auto hasTwoOrMore = false;
|
||||
auto oneFound = false;
|
||||
for (const auto &tile : _tiles) {
|
||||
if (!tile->trackSize().isEmpty()) {
|
||||
if (!tile->trackOrUserpicSize().isEmpty()) {
|
||||
if (oneFound) {
|
||||
hasTwoOrMore = true;
|
||||
break;
|
||||
|
@ -598,7 +603,7 @@ void Viewport::updateTilesGeometryWide(int outerWidth, int outerHeight) {
|
|||
}
|
||||
|
||||
_startTilesLayout = countWide(outerWidth, outerHeight);
|
||||
if (_large && !_large->trackSize().isEmpty()) {
|
||||
if (_large && !_large->trackOrUserpicSize().isEmpty()) {
|
||||
for (const auto &geometry : _startTilesLayout.list) {
|
||||
if (geometry.tile == _large) {
|
||||
setTileGeometry(_large, { 0, 0, outerWidth, outerHeight });
|
||||
|
@ -629,7 +634,7 @@ void Viewport::updateTilesGeometryNarrow(int outerWidth) {
|
|||
sizes.reserve(_tiles.size());
|
||||
for (const auto &tile : _tiles) {
|
||||
const auto video = tile.get();
|
||||
const auto size = video->trackSize();
|
||||
const auto size = video->trackOrUserpicSize();
|
||||
if (size.isEmpty()) {
|
||||
video->hide();
|
||||
} else {
|
||||
|
@ -691,7 +696,7 @@ void Viewport::updateTilesGeometryColumn(int outerWidth) {
|
|||
const auto y = -_scrollTop;
|
||||
auto top = 0;
|
||||
const auto layoutNext = [&](not_null<VideoTile*> tile) {
|
||||
const auto size = tile->trackSize();
|
||||
const auto size = tile->trackOrUserpicSize();
|
||||
const auto shown = !size.isEmpty() && _large && tile != _large;
|
||||
const auto height = shown
|
||||
? st::groupCallNarrowVideoHeight
|
||||
|
@ -707,7 +712,7 @@ void Viewport::updateTilesGeometryColumn(int outerWidth) {
|
|||
for (const auto &tile : _tiles) {
|
||||
if (tile.get() != _large && tile->row()->peer() == topPeer) {
|
||||
return (tile.get() != _tiles.front().get())
|
||||
&& !tile->trackSize().isEmpty();
|
||||
&& !tile->trackOrUserpicSize().isEmpty();
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -66,13 +66,14 @@ const int diameter = 2 * radius + 1;
|
|||
};
|
||||
}
|
||||
|
||||
// Depends on FragmetSampleTexture().
|
||||
// Depends on FragmentSampleTexture().
|
||||
[[nodiscard]] ShaderPart FragmentFrameColor() {
|
||||
const auto blur = FragmentBlurTexture(true, 'b');
|
||||
return {
|
||||
.header = R"(
|
||||
uniform vec4 frameBg;
|
||||
uniform vec3 shadow; // fullHeight, shown, maxOpacity
|
||||
uniform float paused; // 0. <-> 1.
|
||||
const float backgroundOpacity = )" + QString::number(kBlurOpacity) + R"(;
|
||||
float insideTexture() {
|
||||
vec2 textureHalf = vec2(0.5, 0.5);
|
||||
|
@ -89,14 +90,14 @@ vec4 background() {
|
|||
}
|
||||
)",
|
||||
.body = R"(
|
||||
float inside = insideTexture();
|
||||
float inside = insideTexture() * (1. - paused);
|
||||
result = result * inside
|
||||
+ (1. - inside) * (backgroundOpacity * background()
|
||||
+ (1. - backgroundOpacity) * frameBg);
|
||||
|
||||
float shadowCoord = gl_FragCoord.y - roundRect.y;
|
||||
float shadowValue = max(1. - (shadowCoord / shadow.x), 0.);
|
||||
float shadowShown = shadowValue * shadow.y * shadow.z;
|
||||
float shadowShown = max(shadowValue * shadow.y, paused) * shadow.z;
|
||||
result = vec4(result.rgb * (1. - shadowShown), result.a);
|
||||
)",
|
||||
};
|
||||
|
@ -342,6 +343,30 @@ void Viewport::RendererGL::fillBackground(QOpenGLFunctions &f) {
|
|||
_background.fill(f, region, _viewport, _factor, st::groupCallBg);
|
||||
}
|
||||
|
||||
void Viewport::RendererGL::validateUserpicFrame(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &tileData) {
|
||||
if (!_userpicFrame) {
|
||||
tileData.userpicFrame = QImage();
|
||||
return;
|
||||
} else if (!tileData.userpicFrame.isNull()) {
|
||||
return;
|
||||
}
|
||||
tileData.userpicFrame = QImage(
|
||||
tile->trackOrUserpicSize(),
|
||||
QImage::Format_ARGB32_Premultiplied);
|
||||
tileData.userpicFrame.fill(Qt::black);
|
||||
{
|
||||
auto p = Painter(&tileData.userpicFrame);
|
||||
tile->row()->peer()->paintUserpicSquare(
|
||||
p,
|
||||
tile->row()->ensureUserpicView(),
|
||||
0,
|
||||
0,
|
||||
tileData.userpicFrame.width());
|
||||
}
|
||||
}
|
||||
|
||||
void Viewport::RendererGL::paintTile(
|
||||
QOpenGLFunctions &f,
|
||||
GLuint defaultFramebufferObject,
|
||||
|
@ -349,12 +374,18 @@ void Viewport::RendererGL::paintTile(
|
|||
TileData &tileData) {
|
||||
const auto track = tile->track();
|
||||
const auto data = track->frameWithInfo(false);
|
||||
if (data.format == Webrtc::FrameFormat::None) {
|
||||
return;
|
||||
}
|
||||
Assert(!data.yuv420->size.isEmpty());
|
||||
_userpicFrame = (data.format == Webrtc::FrameFormat::None);
|
||||
validateUserpicFrame(tile, tileData);
|
||||
const auto frameSize = _userpicFrame
|
||||
? tileData.userpicFrame.size()
|
||||
: data.yuv420->size;
|
||||
const auto frameRotation = _userpicFrame
|
||||
? 0
|
||||
: data.rotation;
|
||||
Assert(!frameSize.isEmpty());
|
||||
|
||||
_rgbaFrame = (data.format == Webrtc::FrameFormat::ARGB32);
|
||||
_rgbaFrame = (data.format == Webrtc::FrameFormat::ARGB32)
|
||||
|| _userpicFrame;
|
||||
const auto geometry = tile->geometry();
|
||||
const auto x = geometry.x();
|
||||
const auto y = geometry.y();
|
||||
|
@ -368,16 +399,18 @@ void Viewport::RendererGL::paintTile(
|
|||
const auto style = row->computeIconState(MembersRowStyle::Video);
|
||||
|
||||
validateOutlineAnimation(tile, tileData);
|
||||
validatePausedAnimation(tile, tileData);
|
||||
const auto outline = tileData.outlined.value(tileData.outline ? 1. : 0.);
|
||||
const auto paused = tileData.paused.value(tileData.pause ? 1. : 0.);
|
||||
|
||||
ensureButtonsImage();
|
||||
|
||||
// Frame.
|
||||
const auto unscaled = Media::View::FlipSizeByRotation(
|
||||
data.yuv420->size,
|
||||
data.rotation);
|
||||
frameSize,
|
||||
frameRotation);
|
||||
const auto tileSize = geometry.size();
|
||||
const auto swap = (((data.rotation / 90) % 2) == 1);
|
||||
const auto swap = (((frameRotation / 90) % 2) == 1);
|
||||
const auto expand = isExpanded(tile, unscaled, tileSize);
|
||||
const auto animation = tile->animation();
|
||||
const auto expandRatio = (animation.ratio >= 0.)
|
||||
|
@ -386,7 +419,7 @@ void Viewport::RendererGL::paintTile(
|
|||
? 1.
|
||||
: 0.;
|
||||
auto texCoords = CountTexCoords(unscaled, tileSize, expandRatio, swap);
|
||||
auto blurTexCoords = (expandRatio == 1.)
|
||||
auto blurTexCoords = (expandRatio == 1. && !swap)
|
||||
? texCoords
|
||||
: CountTexCoords(unscaled, tileSize, 1.);
|
||||
const auto rect = transformRect(geometry);
|
||||
|
@ -396,7 +429,7 @@ void Viewport::RendererGL::paintTile(
|
|||
{ { 1.f, 0.f } },
|
||||
{ { 0.f, 0.f } },
|
||||
} };
|
||||
if (const auto shift = (data.rotation / 90); shift > 0) {
|
||||
if (const auto shift = (frameRotation / 90); shift > 0) {
|
||||
std::rotate(
|
||||
toBlurTexCoords.begin(),
|
||||
toBlurTexCoords.begin() + shift,
|
||||
|
@ -572,12 +605,12 @@ void Viewport::RendererGL::paintTile(
|
|||
const auto uniformViewport = QSizeF(_viewport * _factor);
|
||||
|
||||
program->setUniformValue("viewport", uniformViewport);
|
||||
program->setUniformValue("frameBg", Uniform(st::groupCallBg->c));
|
||||
program->setUniformValue("frameBg", st::groupCallBg->c);
|
||||
program->setUniformValue("radiusOutline", QVector2D(
|
||||
GLfloat(st::roundRadiusLarge * _factor),
|
||||
(outline > 0) ? (st::groupCallOutline * _factor) : 0.f));
|
||||
program->setUniformValue("roundRect", Uniform(rect));
|
||||
program->setUniformValue("roundBg", Uniform(st::groupCallBg->c));
|
||||
program->setUniformValue("roundBg", st::groupCallBg->c);
|
||||
program->setUniformValue("outlineFg", QVector4D(
|
||||
st::groupCallMemberActiveIcon->c.redF(),
|
||||
st::groupCallMemberActiveIcon->c.greenF(),
|
||||
|
@ -589,6 +622,7 @@ void Viewport::RendererGL::paintTile(
|
|||
program->setUniformValue(
|
||||
"shadow",
|
||||
QVector3D(shadowHeight, shown, shadowAlpha));
|
||||
program->setUniformValue("paused", GLfloat(paused));
|
||||
|
||||
f.glActiveTexture(_rgbaFrame ? GL_TEXTURE1 : GL_TEXTURE3);
|
||||
tileData.textures.bind(
|
||||
|
@ -634,6 +668,10 @@ void Viewport::RendererGL::paintTile(
|
|||
FillTexturedRectangle(f, &*_imageProgram, 18);
|
||||
}
|
||||
|
||||
if (paused > 0.) {
|
||||
|
||||
}
|
||||
|
||||
if (nameShift == fullNameShift) {
|
||||
return;
|
||||
}
|
||||
|
@ -717,15 +755,18 @@ void Viewport::RendererGL::bindFrame(
|
|||
const Webrtc::FrameWithInfo &data,
|
||||
TileData &tileData,
|
||||
Program &program) {
|
||||
const auto upload = (tileData.trackIndex != data.index);
|
||||
tileData.trackIndex = data.index;
|
||||
const auto imageIndex = _userpicFrame ? 0 : (data.index + 1);
|
||||
const auto upload = (tileData.trackIndex != imageIndex);
|
||||
tileData.trackIndex = imageIndex;
|
||||
if (_rgbaFrame) {
|
||||
ensureARGB32Program();
|
||||
f.glUseProgram(program.argb32->programId());
|
||||
f.glActiveTexture(GL_TEXTURE0);
|
||||
tileData.textures.bind(f, tileData.textureIndex * 5 + 0);
|
||||
if (upload) {
|
||||
const auto &image = data.original;
|
||||
const auto &image = _userpicFrame
|
||||
? tileData.userpicFrame
|
||||
: data.original;
|
||||
const auto stride = image.bytesPerLine() / 4;
|
||||
const auto data = image.constBits();
|
||||
uploadTexture(
|
||||
|
@ -1007,6 +1048,8 @@ void Viewport::RendererGL::validateDatas() {
|
|||
}
|
||||
const auto id = quintptr(tiles[i]->track().get());
|
||||
const auto peer = tiles[i]->row()->peer();
|
||||
const auto paused = (tiles[i]->track()->state()
|
||||
== Webrtc::VideoState::Paused);
|
||||
auto index = int(_tileData.size());
|
||||
maybeStaleAfter = ranges::find(
|
||||
maybeStaleAfter,
|
||||
|
@ -1018,12 +1061,15 @@ void Viewport::RendererGL::validateDatas() {
|
|||
maybeStaleAfter->id = id;
|
||||
maybeStaleAfter->peer = peer;
|
||||
maybeStaleAfter->stale = false;
|
||||
maybeStaleAfter->pause = paused;
|
||||
maybeStaleAfter->paused.stop();
|
||||
request.updating = true;
|
||||
} else {
|
||||
// This invalidates maybeStale*, but they're already equal.
|
||||
_tileData.push_back({
|
||||
.id = id,
|
||||
.peer = peer,
|
||||
.pause = paused,
|
||||
});
|
||||
}
|
||||
_tileData[index].nameVersion = peer->nameVersion;
|
||||
|
@ -1114,4 +1160,22 @@ void Viewport::RendererGL::validateOutlineAnimation(
|
|||
st::fadeWrapDuration);
|
||||
}
|
||||
|
||||
void Viewport::RendererGL::validatePausedAnimation(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &data) {
|
||||
const auto paused = (_userpicFrame
|
||||
&& tile->track()->frameSize().isEmpty())
|
||||
|| (tile->track()->state() == Webrtc::VideoState::Paused);
|
||||
if (data.pause == paused) {
|
||||
return;
|
||||
}
|
||||
data.pause = paused;
|
||||
data.paused.start(
|
||||
[=] { _owner->widget()->update(); },
|
||||
paused ? 0. : 1.,
|
||||
paused ? 1. : 0.,
|
||||
st::fadeWrapDuration);
|
||||
}
|
||||
|
||||
|
||||
} // namespace Calls::Group
|
||||
|
|
|
@ -53,6 +53,8 @@ private:
|
|||
Ui::GL::Textures<5> textures;
|
||||
Ui::GL::Framebuffers<2> framebuffers;
|
||||
Ui::Animations::Simple outlined;
|
||||
Ui::Animations::Simple paused;
|
||||
QImage userpicFrame;
|
||||
QRect nameRect;
|
||||
int nameVersion = 0;
|
||||
mutable int textureIndex = 0;
|
||||
|
@ -62,6 +64,7 @@ private:
|
|||
mutable QSize textureChromaSize;
|
||||
mutable QSize textureBlurSize;
|
||||
bool stale = false;
|
||||
bool pause = false;
|
||||
bool outline = false;
|
||||
};
|
||||
struct Program {
|
||||
|
@ -102,6 +105,12 @@ private:
|
|||
void validateOutlineAnimation(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &data);
|
||||
void validatePausedAnimation(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &data);
|
||||
void validateUserpicFrame(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &tileData);
|
||||
|
||||
void uploadTexture(
|
||||
QOpenGLFunctions &f,
|
||||
|
@ -126,6 +135,7 @@ private:
|
|||
GLfloat _factor = 1.;
|
||||
QSize _viewport;
|
||||
bool _rgbaFrame = false;
|
||||
bool _userpicFrame;
|
||||
Ui::GL::BackgroundFiller _background;
|
||||
std::optional<QOpenGLBuffer> _frameBuffer;
|
||||
Program _downscaleProgram;
|
||||
|
@ -148,6 +158,7 @@ private:
|
|||
|
||||
Ui::CrossLineAnimation _pinIcon;
|
||||
Ui::CrossLineAnimation _muteIcon;
|
||||
|
||||
Ui::RoundRect _pinBackground;
|
||||
|
||||
rpl::lifetime _lifetime;
|
||||
|
|
|
@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "calls/group/calls_group_common.h"
|
||||
#include "calls/group/calls_group_viewport_tile.h"
|
||||
#include "calls/group/calls_group_members_row.h"
|
||||
#include "data/data_peer.h"
|
||||
#include "media/view/media_view_pip.h"
|
||||
#include "webrtc/webrtc_video_track.h"
|
||||
#include "lang/lang_keys.h"
|
||||
|
@ -17,6 +18,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "styles/palette.h"
|
||||
|
||||
namespace Calls::Group {
|
||||
namespace {
|
||||
|
||||
constexpr auto kBlurRadius = 15;
|
||||
|
||||
} // namespace
|
||||
|
||||
Viewport::RendererSW::RendererSW(not_null<Viewport*> owner)
|
||||
: _owner(owner)
|
||||
|
@ -35,12 +41,52 @@ void Viewport::RendererSW::paintFallback(
|
|||
auto bg = clip;
|
||||
auto hq = PainterHighQualityEnabler(p);
|
||||
const auto bounding = clip.boundingRect();
|
||||
for (auto &[tile, tileData] : _tileData) {
|
||||
tileData.stale = true;
|
||||
}
|
||||
for (const auto &tile : _owner->_tiles) {
|
||||
if (!tile->shown()) {
|
||||
continue;
|
||||
}
|
||||
paintTile(p, tile.get(), bounding, bg);
|
||||
}
|
||||
for (const auto rect : bg) {
|
||||
p.fillRect(rect, st::groupCallBg);
|
||||
}
|
||||
for (auto i = _tileData.begin(); i != _tileData.end();) {
|
||||
if (i->second.stale) {
|
||||
i = _tileData.erase(i);
|
||||
} else {
|
||||
++i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Viewport::RendererSW::validateUserpicFrame(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &data) {
|
||||
if (!_userpicFrame) {
|
||||
data.userpicFrame = QImage();
|
||||
return;
|
||||
} else if (!data.userpicFrame.isNull()) {
|
||||
return;
|
||||
}
|
||||
auto userpic = QImage(
|
||||
tile->trackOrUserpicSize(),
|
||||
QImage::Format_ARGB32_Premultiplied);
|
||||
userpic.fill(Qt::black);
|
||||
{
|
||||
auto p = Painter(&userpic);
|
||||
tile->row()->peer()->paintUserpicSquare(
|
||||
p,
|
||||
tile->row()->ensureUserpicView(),
|
||||
0,
|
||||
0,
|
||||
userpic.width());
|
||||
}
|
||||
data.userpicFrame = Images::BlurLargeImage(
|
||||
std::move(userpic),
|
||||
kBlurRadius);
|
||||
}
|
||||
|
||||
void Viewport::RendererSW::paintTile(
|
||||
|
@ -50,11 +96,27 @@ void Viewport::RendererSW::paintTile(
|
|||
QRegion &bg) {
|
||||
const auto track = tile->track();
|
||||
const auto data = track->frameWithInfo(true);
|
||||
const auto &image = data.original;
|
||||
const auto rotation = data.rotation;
|
||||
if (image.isNull() || !tile->shown()) {
|
||||
return;
|
||||
auto &tileData = _tileData[tile];
|
||||
tileData.stale = false;
|
||||
_userpicFrame = (data.format == Webrtc::FrameFormat::None);
|
||||
_pausedFrame = (track->state() == Webrtc::VideoState::Paused);
|
||||
validateUserpicFrame(tile, tileData);
|
||||
if (_userpicFrame || !_pausedFrame) {
|
||||
tileData.blurredFrame = QImage();
|
||||
} else if (tileData.blurredFrame.isNull()) {
|
||||
tileData.blurredFrame = Images::BlurLargeImage(
|
||||
data.original.scaled(
|
||||
VideoTile::PausedVideoSize(),
|
||||
Qt::KeepAspectRatio),
|
||||
kBlurRadius);
|
||||
}
|
||||
const auto &image = _userpicFrame
|
||||
? tileData.userpicFrame
|
||||
: _pausedFrame
|
||||
? tileData.blurredFrame
|
||||
: data.original;
|
||||
const auto frameRotation = _userpicFrame ? 0 : data.rotation;
|
||||
Assert(!image.isNull());
|
||||
|
||||
const auto fill = [&](QRect rect) {
|
||||
const auto intersected = rect.intersected(clip);
|
||||
|
@ -72,22 +134,22 @@ void Viewport::RendererSW::paintTile(
|
|||
const auto height = geometry.height();
|
||||
const auto scaled = FlipSizeByRotation(
|
||||
image.size(),
|
||||
rotation
|
||||
frameRotation
|
||||
).scaled(QSize(width, height), Qt::KeepAspectRatio);
|
||||
const auto left = (width - scaled.width()) / 2;
|
||||
const auto top = (height - scaled.height()) / 2;
|
||||
const auto target = QRect(QPoint(x + left, y + top), scaled);
|
||||
if (UsePainterRotation(rotation, false)) {
|
||||
if (rotation) {
|
||||
if (UsePainterRotation(frameRotation, false)) {
|
||||
if (frameRotation) {
|
||||
p.save();
|
||||
p.rotate(rotation);
|
||||
p.rotate(frameRotation);
|
||||
}
|
||||
p.drawImage(RotatedRect(target, rotation), image);
|
||||
if (rotation) {
|
||||
p.drawImage(RotatedRect(target, frameRotation), image);
|
||||
if (frameRotation) {
|
||||
p.restore();
|
||||
}
|
||||
} else if (rotation) {
|
||||
p.drawImage(target, RotateFrameImage(image, rotation));
|
||||
} else if (frameRotation) {
|
||||
p.drawImage(target, RotateFrameImage(image, frameRotation));
|
||||
} else {
|
||||
p.drawImage(target, image);
|
||||
}
|
||||
|
@ -168,6 +230,10 @@ void Viewport::RendererSW::paintTileControls(
|
|||
&_pinBackground);
|
||||
}
|
||||
|
||||
if (_pausedFrame) {
|
||||
p.fillRect(x, y, width, height, QColor(0, 0, 0, kShadowMaxAlpha));
|
||||
}
|
||||
|
||||
const auto shown = _owner->_controlsShownRatio;
|
||||
if (shown == 0.) {
|
||||
return;
|
||||
|
@ -191,14 +257,16 @@ void Viewport::RendererSW::paintTileControls(
|
|||
return;
|
||||
}
|
||||
const auto factor = style::DevicePixelRatio();
|
||||
p.drawImage(
|
||||
shadowFill,
|
||||
_shadow,
|
||||
QRect(
|
||||
0,
|
||||
(shadowFill.y() - shadowRect.y()) * factor,
|
||||
_shadow.width(),
|
||||
shadowFill.height() * factor));
|
||||
if (!_pausedFrame) {
|
||||
p.drawImage(
|
||||
shadowFill,
|
||||
_shadow,
|
||||
QRect(
|
||||
0,
|
||||
(shadowFill.y() - shadowRect.y()) * factor,
|
||||
_shadow.width(),
|
||||
shadowFill.height() * factor));
|
||||
}
|
||||
const auto row = tile->row();
|
||||
row->lazyInitialize(st::groupCallMembersListItem);
|
||||
|
||||
|
|
|
@ -25,6 +25,11 @@ public:
|
|||
Ui::GL::Backend backend) override;
|
||||
|
||||
private:
|
||||
struct TileData {
|
||||
QImage userpicFrame;
|
||||
QImage blurredFrame;
|
||||
bool stale = false;
|
||||
};
|
||||
void paintTile(
|
||||
Painter &p,
|
||||
not_null<VideoTile*> tile,
|
||||
|
@ -44,10 +49,16 @@ private:
|
|||
int width,
|
||||
int height,
|
||||
not_null<VideoTile*> tile);
|
||||
void validateUserpicFrame(
|
||||
not_null<VideoTile*> tile,
|
||||
TileData &data);
|
||||
|
||||
const not_null<Viewport*> _owner;
|
||||
|
||||
QImage _shadow;
|
||||
bool _userpicFrame = false;
|
||||
bool _pausedFrame = false;
|
||||
base::flat_map<not_null<VideoTile*>, TileData> _tileData;
|
||||
Ui::CrossLineAnimation _pinIcon;
|
||||
Ui::RoundRect _pinBackground;
|
||||
|
||||
|
|
|
@ -16,6 +16,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include <QtGui/QOpenGLFunctions>
|
||||
|
||||
namespace Calls::Group {
|
||||
namespace {
|
||||
|
||||
constexpr auto kPausedVideoSize = 90;
|
||||
|
||||
} // namespace
|
||||
|
||||
Viewport::VideoTile::VideoTile(
|
||||
const VideoEndpoint &endpoint,
|
||||
|
@ -54,6 +59,20 @@ int Viewport::VideoTile::topControlsSlide() const {
|
|||
_topControlsShownAnimation.value(_topControlsShown ? 1. : 0.));
|
||||
}
|
||||
|
||||
QSize Viewport::VideoTile::PausedVideoSize() {
|
||||
return QSize(kPausedVideoSize, kPausedVideoSize);
|
||||
}
|
||||
|
||||
QSize Viewport::VideoTile::trackOrUserpicSize() const {
|
||||
if (const auto size = trackSize(); !size.isEmpty()) {
|
||||
return size;
|
||||
} else if (_userpicSize.isEmpty()
|
||||
&& _track.track->state() == Webrtc::VideoState::Paused) {
|
||||
_userpicSize = PausedVideoSize();
|
||||
}
|
||||
return _userpicSize;
|
||||
}
|
||||
|
||||
bool Viewport::VideoTile::screencast() const {
|
||||
return (_endpoint.type == VideoEndpointType::Screen);
|
||||
}
|
||||
|
|
|
@ -60,6 +60,8 @@ public:
|
|||
[[nodiscard]] rpl::producer<QSize> trackSizeValue() const {
|
||||
return _trackSize.value();
|
||||
}
|
||||
[[nodiscard]] QSize trackOrUserpicSize() const;
|
||||
[[nodiscard]] static QSize PausedVideoSize();
|
||||
|
||||
[[nodiscard]] bool screencast() const;
|
||||
void setGeometry(
|
||||
|
@ -104,6 +106,7 @@ private:
|
|||
QRect _geometry;
|
||||
TileAnimation _animation;
|
||||
rpl::variable<QSize> _trackSize;
|
||||
mutable QSize _userpicSize;
|
||||
QRect _pinOuter;
|
||||
QRect _pinInner;
|
||||
QRect _backOuter;
|
||||
|
|
|
@ -48,6 +48,14 @@ const std::string &GroupCallParticipant::screenEndpoint() const {
|
|||
return GetScreenEndpoint(videoParams);
|
||||
}
|
||||
|
||||
bool GroupCallParticipant::cameraPaused() const {
|
||||
return IsCameraPaused(videoParams);
|
||||
}
|
||||
|
||||
bool GroupCallParticipant::screenPaused() const {
|
||||
return IsScreenPaused(videoParams);
|
||||
}
|
||||
|
||||
GroupCall::GroupCall(
|
||||
not_null<PeerData*> peer,
|
||||
uint64 id,
|
||||
|
@ -231,8 +239,8 @@ const GroupCallParticipant *GroupCall::participantByEndpoint(
|
|||
return nullptr;
|
||||
}
|
||||
for (const auto &participant : _participants) {
|
||||
if (participant.cameraEndpoint() == endpoint
|
||||
|| participant.screenEndpoint() == endpoint) {
|
||||
if (GetCameraEndpoint(participant.videoParams) == endpoint
|
||||
|| GetScreenEndpoint(participant.videoParams) == endpoint) {
|
||||
return &participant;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,6 +42,8 @@ struct GroupCallParticipant {
|
|||
|
||||
[[nodiscard]] const std::string &cameraEndpoint() const;
|
||||
[[nodiscard]] const std::string &screenEndpoint() const;
|
||||
[[nodiscard]] bool cameraPaused() const;
|
||||
[[nodiscard]] bool screenPaused() const;
|
||||
};
|
||||
|
||||
class GroupCall final {
|
||||
|
|
|
@ -279,10 +279,10 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent(
|
|||
if (fillTransparentBackground) {
|
||||
program->setUniformValue(
|
||||
"transparentBg",
|
||||
Uniform(st::mediaviewTransparentBg->c));
|
||||
st::mediaviewTransparentBg->c);
|
||||
program->setUniformValue(
|
||||
"transparentFg",
|
||||
Uniform(st::mediaviewTransparentFg->c));
|
||||
st::mediaviewTransparentFg->c);
|
||||
program->setUniformValue(
|
||||
"transparentSize",
|
||||
st::transparentPlaceholderSize * _factor);
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit b1d5ed4ab503635f9e0f9ee5d2a34e2975042014
|
||||
Subproject commit 35236988b7bc489b7683019db89636d4030db1ce
|
|
@ -1 +1 @@
|
|||
Subproject commit 2873d4990f94d10b627f1880344c9357e86ff864
|
||||
Subproject commit 539b9b51c730900ce724d2e329f3a877f2fcba30
|
Loading…
Add table
Reference in a new issue