Make GIFs and videos with captions larger.

This commit is contained in:
John Preston 2022-09-05 19:03:52 +04:00
parent 9ef2f370ac
commit d6ba092697
21 changed files with 410 additions and 253 deletions

View file

@ -45,6 +45,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "platform/platform_specific.h" #include "platform/platform_specific.h"
#include "base/platform/base_platform_info.h" #include "base/platform/base_platform_info.h"
#include "base/power_save_blocker.h" #include "base/power_save_blocker.h"
#include "media/streaming/media_streaming_utility.h"
#include "window/main_window.h" #include "window/main_window.h"
#include "webrtc/webrtc_video_track.h" #include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h" #include "webrtc/webrtc_media_devices.h"
@ -335,16 +336,10 @@ void Panel::refreshIncomingGeometry() {
return; return;
} }
const auto to = widget()->size(); const auto to = widget()->size();
const auto small = _incomingFrameSize.scaled(to, Qt::KeepAspectRatio); const auto use = ::Media::Streaming::DecideFrameResize(
const auto big = _incomingFrameSize.scaled(
to, to,
Qt::KeepAspectRatioByExpanding); _incomingFrameSize
).result;
// If we cut out no more than 0.25 of the original, let's use expanding.
const auto use = ((big.width() * 3 <= to.width() * 4)
&& (big.height() * 3 <= to.height() * 4))
? big
: small;
const auto pos = QPoint( const auto pos = QPoint(
(to.width() - use.width()) / 2, (to.width() - use.width()) / 2,
(to.height() - use.height()) / 2); (to.height() - use.height()) / 2);

View file

@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/group/calls_group_viewport_tile.h" #include "calls/group/calls_group_viewport_tile.h"
#include "webrtc/webrtc_video_track.h" #include "webrtc/webrtc_video_track.h"
#include "media/view/media_view_pip.h" #include "media/view/media_view_pip.h"
#include "media/streaming/media_streaming_utility.h"
#include "calls/group/calls_group_members_row.h" #include "calls/group/calls_group_members_row.h"
#include "lang/lang_keys.h" #include "lang/lang_keys.h"
#include "ui/gl/gl_shader.h" #include "ui/gl/gl_shader.h"
@ -31,7 +32,6 @@ constexpr auto kNoiseTextureSize = 256;
constexpr auto kBlurTextureSizeFactor = 4.; constexpr auto kBlurTextureSizeFactor = 4.;
constexpr auto kBlurOpacity = 0.65; constexpr auto kBlurOpacity = 0.65;
constexpr auto kDitherNoiseAmount = 0.002; constexpr auto kDitherNoiseAmount = 0.002;
constexpr auto kMinCameraVisiblePart = 0.75;
constexpr auto kQuads = 9; constexpr auto kQuads = 9;
constexpr auto kQuadVertices = kQuads * 4; constexpr auto kQuadVertices = kQuads * 4;
@ -224,13 +224,8 @@ vec4 background() {
} }
[[nodiscard]] bool UseExpandForCamera(QSize original, QSize viewport) { [[nodiscard]] bool UseExpandForCamera(QSize original, QSize viewport) {
const auto big = original.scaled( using namespace ::Media::Streaming;
viewport, return DecideFrameResize(viewport, original).expanding;
Qt::KeepAspectRatioByExpanding);
// If we cut out no more than 0.25 of the original, let's use expanding.
return (big.width() * kMinCameraVisiblePart <= viewport.width())
&& (big.height() * kMinCameraVisiblePart <= viewport.height());
} }
[[nodiscard]] QSize NonEmpty(QSize size) { [[nodiscard]] QSize NonEmpty(QSize size) {

View file

@ -482,6 +482,10 @@ bool RotationSwapWidthHeight(int rotation) {
return (rotation == 90 || rotation == 270); return (rotation == 90 || rotation == 270);
} }
QSize TransposeSizeByRotation(QSize size, int rotation) {
return RotationSwapWidthHeight(rotation) ? size.transposed() : size;
}
bool GoodStorageForFrame(const QImage &storage, QSize size) { bool GoodStorageForFrame(const QImage &storage, QSize size) {
return !storage.isNull() return !storage.isNull()
&& (storage.format() == kImageFormat) && (storage.format() == kImageFormat)

View file

@ -180,6 +180,7 @@ void LogError(QLatin1String method, FFmpeg::AvErrorWrap error);
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream); [[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
[[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect); [[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect);
[[nodiscard]] bool RotationSwapWidthHeight(int rotation); [[nodiscard]] bool RotationSwapWidthHeight(int rotation);
[[nodiscard]] QSize TransposeSizeByRotation(QSize size, int rotation);
[[nodiscard]] QSize CorrectByAspect(QSize size, AVRational aspect); [[nodiscard]] QSize CorrectByAspect(QSize size, AVRational aspect);
[[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size); [[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size);

View file

@ -142,6 +142,28 @@ void PaintWaveform(
} }
} }
[[nodiscard]] int MaxStatusWidth(not_null<DocumentData*> document) {
using namespace Ui;
auto result = 0;
const auto add = [&](const QString &text) {
accumulate_max(result, st::normalFont->width(text));
};
add(FormatDownloadText(document->size, document->size));
const auto duration = document->getDuration();
if (const auto song = document->song()) {
add(FormatPlayedText(duration, duration));
add(FormatDurationAndSizeText(duration, document->size));
} else if (const auto voice = document->voice()) {
add(FormatPlayedText(duration, duration));
add(FormatDurationAndSizeText(duration, document->size));
} else if (document->isVideoFile()) {
add(FormatDurationAndSizeText(duration, document->size));
} else {
add(FormatSizeText(document->size));
}
return result;
}
} // namespace } // namespace
Document::Document( Document::Document(
@ -317,10 +339,10 @@ QSize Document::countOptimalSize() {
const auto tleft = st.padding.left() + st.thumbSize + st.padding.right(); const auto tleft = st.padding.left() + st.thumbSize + st.padding.right();
const auto tright = st.padding.left(); const auto tright = st.padding.left();
if (thumbed) { if (thumbed) {
accumulate_max(maxWidth, tleft + documentMaxStatusWidth(_data) + tright); accumulate_max(maxWidth, tleft + MaxStatusWidth(_data) + tright);
} else { } else {
auto unread = _data->isVoiceMessage() ? (st::mediaUnreadSkip + st::mediaUnreadSize) : 0; auto unread = _data->isVoiceMessage() ? (st::mediaUnreadSkip + st::mediaUnreadSize) : 0;
accumulate_max(maxWidth, tleft + documentMaxStatusWidth(_data) + unread + _parent->skipBlockWidth() + st::msgPadding.right()); accumulate_max(maxWidth, tleft + MaxStatusWidth(_data) + unread + _parent->skipBlockWidth() + st::msgPadding.right());
} }
if (auto named = Get<HistoryDocumentNamed>()) { if (auto named = Get<HistoryDocumentNamed>()) {

View file

@ -48,7 +48,7 @@ Game::Game(
} }
QSize Game::countOptimalSize() { QSize Game::countOptimalSize() {
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
const auto item = _parent->data(); const auto item = _parent->data();
if (!_openl && item->isRegular()) { if (!_openl && item->isRegular()) {
@ -149,7 +149,7 @@ QSize Game::countCurrentSize(int newWidth) {
// enable any count of lines in game description / message // enable any count of lines in game description / message
auto linesMax = 4096; auto linesMax = 4096;
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
auto newHeight = 0; auto newHeight = 0;
if (_title.isEmpty()) { if (_title.isEmpty()) {
_titleLines = 0; _titleLines = 0;
@ -225,7 +225,7 @@ void Game::draw(Painter &p, const PaintContext &context) const {
QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width())); QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width()));
p.fillRect(bar, barfg); p.fillRect(bar, barfg);
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (_titleLines) { if (_titleLines) {
p.setPen(semibold); p.setPen(semibold);
p.setTextPalette(stm->semiboldPalette); p.setTextPalette(stm->semiboldPalette);
@ -301,7 +301,7 @@ TextState Game::textState(QPoint point, StateRequest request) const {
auto inThumb = false; auto inThumb = false;
auto symbolAdd = 0; auto symbolAdd = 0;
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (_titleLines) { if (_titleLines) {
if (point.y() >= tshift && point.y() < tshift + _titleLines * lineHeight) { if (point.y() >= tshift && point.y() < tshift + _titleLines * lineHeight) {
Ui::Text::StateRequestElided titleRequest = request.forText(); Ui::Text::StateRequestElided titleRequest = request.forText();

View file

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/player/media_player_instance.h" #include "media/player/media_player_instance.h"
#include "media/streaming/media_streaming_instance.h" #include "media/streaming/media_streaming_instance.h"
#include "media/streaming/media_streaming_player.h" #include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_utility.h"
#include "media/view/media_view_playback_progress.h" #include "media/view/media_view_playback_progress.h"
#include "ui/boxes/confirm_box.h" #include "ui/boxes/confirm_box.h"
#include "history/history_item_components.h" #include "history/history_item_components.h"
@ -147,21 +148,23 @@ QSize Gif::countOptimalSize() {
auto thumbMaxWidth = st::msgMaxWidth; auto thumbMaxWidth = st::msgMaxWidth;
const auto scaled = countThumbSize(thumbMaxWidth); const auto scaled = countThumbSize(thumbMaxWidth);
_thumbw = scaled.width(); const auto minWidthByInfo = _parent->infoWidth()
_thumbh = scaled.height(); + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x());
auto maxWidth = std::clamp( auto maxWidth = std::clamp(
std::max(_thumbw, _parent->infoWidth() + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())), std::max(scaled.width(), minWidthByInfo),
st::minPhotoSize, st::minPhotoSize,
thumbMaxWidth); thumbMaxWidth);
auto minHeight = qMax(_thumbh, st::minPhotoSize); auto minHeight = qMax(scaled.height(), st::minPhotoSize);
if (!activeCurrentStreamed()) { if (!activeCurrentStreamed()) {
accumulate_max(maxWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())); accumulate_max(maxWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x()));
} }
if (_parent->hasBubble()) { if (_parent->hasBubble()) {
accumulate_max(maxWidth, _parent->minWidthForMedia()); accumulate_max(maxWidth, _parent->minWidthForMedia());
if (!_caption.isEmpty()) { if (!_caption.isEmpty()) {
auto captionw = maxWidth - st::msgPadding.left() - st::msgPadding.right(); maxWidth = qMax(maxWidth, st::msgPadding.left()
minHeight += st::mediaCaptionSkip + _caption.countHeight(captionw); + _caption.maxWidth()
+ st::msgPadding.right());
minHeight += st::mediaCaptionSkip + _caption.minHeight();
if (isBubbleBottom()) { if (isBubbleBottom()) {
minHeight += st::msgPadding.bottom(); minHeight += st::msgPadding.bottom();
} }
@ -185,21 +188,28 @@ QSize Gif::countCurrentSize(int newWidth) {
auto thumbMaxWidth = newWidth; auto thumbMaxWidth = newWidth;
const auto scaled = countThumbSize(thumbMaxWidth); const auto scaled = countThumbSize(thumbMaxWidth);
_thumbw = scaled.width(); const auto minWidthByInfo = _parent->infoWidth()
_thumbh = scaled.height(); + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x());
newWidth = std::clamp( newWidth = std::clamp(
std::max(_thumbw, _parent->infoWidth() + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())), std::max(scaled.width(), minWidthByInfo),
st::minPhotoSize, st::minPhotoSize,
thumbMaxWidth); thumbMaxWidth);
auto newHeight = qMax(_thumbh, st::minPhotoSize); auto newHeight = qMax(scaled.height(), st::minPhotoSize);
if (!activeCurrentStreamed()) { if (!activeCurrentStreamed()) {
accumulate_max(newWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())); accumulate_max(newWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x()));
} }
if (_parent->hasBubble()) { if (_parent->hasBubble()) {
accumulate_max(newWidth, _parent->minWidthForMedia()); accumulate_max(newWidth, _parent->minWidthForMedia());
if (!_caption.isEmpty()) { if (!_caption.isEmpty()) {
auto captionw = newWidth - st::msgPadding.left() - st::msgPadding.right(); const auto maxWithCaption = qMin(
st::msgMaxWidth,
(st::msgPadding.left()
+ _caption.maxWidth()
+ st::msgPadding.right()));
newWidth = qMax(newWidth, maxWithCaption);
const auto captionw = newWidth
- st::msgPadding.left()
- st::msgPadding.right();
newHeight += st::mediaCaptionSkip + _caption.countHeight(captionw); newHeight += st::mediaCaptionSkip + _caption.countHeight(captionw);
if (isBubbleBottom()) { if (isBubbleBottom()) {
newHeight += st::msgPadding.bottom(); newHeight += st::msgPadding.bottom();
@ -380,11 +390,12 @@ void Gif::draw(Painter &p, const PaintContext &context) const {
displayMute = true; displayMute = true;
} }
} }
auto request = ::Media::Streaming::FrameRequest(); auto request = ::Media::Streaming::FrameRequest{
request.outer = QSize(usew, painth) * cIntRetinaFactor(); .outer = QSize(usew, painth) * cIntRetinaFactor(),
request.resize = QSize(_thumbw, _thumbh) * cIntRetinaFactor(); .radius = roundRadius,
request.corners = roundCorners; .corners = roundCorners,
request.radius = roundRadius; .blurredBackground = true,
};
if (!activeRoundPlaying && activeOwnPlaying->instance.playerLocked()) { if (!activeRoundPlaying && activeOwnPlaying->instance.playerLocked()) {
if (activeOwnPlaying->frozenFrame.isNull()) { if (activeOwnPlaying->frozenFrame.isNull()) {
activeOwnPlaying->frozenRequest = request; activeOwnPlaying->frozenRequest = request;
@ -433,49 +444,8 @@ void Gif::draw(Painter &p, const PaintContext &context) const {
} }
} else if (!skipDrawingContent) { } else if (!skipDrawingContent) {
ensureDataMediaCreated(); ensureDataMediaCreated();
const auto size = QSize(_thumbw, _thumbh); validateThumbCache({ usew, painth }, roundRadius, roundCorners);
const auto args = Images::PrepareArgs{ p.drawImage(rthumb, _thumbCache);
.options = Images::RoundOptions(roundRadius, roundCorners),
.outer = QSize(usew, painth),
};
if (const auto good = _dataMedia->goodThumbnail()) {
p.drawPixmap(rthumb.topLeft(), good->pixSingle(size, args));
} else {
const auto normal = _dataMedia->thumbnail();
if (normal) {
const auto blurred = (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold);
p.drawPixmap(
rthumb.topLeft(),
normal->pixSingle(size, blurred ? args.blurred() : args));
} else {
_data->loadThumbnail(_realParent->fullId());
validateVideoThumbnail();
if (_videoThumbnailFrame) {
p.drawPixmap(rthumb.topLeft(), _videoThumbnailFrame->pixSingle(size, args));
} else if (const auto blurred = _dataMedia->thumbnailInline()) {
p.drawPixmap(rthumb.topLeft(), blurred->pixSingle(size, args.blurred()));
} else if (!unwrapped) {
if (roundRadius == ImageRoundRadius::Ellipse) {
PainterHighQualityEnabler hq(p);
p.setPen(Qt::NoPen);
p.setBrush(st->imageBg());
p.drawEllipse(rthumb);
} else {
const auto roundTop = (roundCorners & RectPart::TopLeft);
const auto roundBottom = (roundCorners & RectPart::BottomLeft);
const auto margin = inWebPage
? st::roundRadiusSmall
: st::historyMessageRadius;
const auto parts = roundCorners
| RectPart::NoTopBottom
| (roundTop ? RectPart::Top : RectPart::None)
| (roundBottom ? RectPart::Bottom : RectPart::None);
Ui::FillRoundRect(p, rthumb.marginsAdded({ 0, roundTop ? 0 : margin, 0, roundBottom ? 0 : margin }), st->imageBg(), roundRadius, parts);
}
}
}
}
} }
if (context.selected()) { if (context.selected()) {
@ -688,6 +658,74 @@ void Gif::validateVideoThumbnail() const {
: info.thumbnail); : info.thumbnail);
} }
void Gif::validateThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const {
const auto intRadius = static_cast<int>(radius);
const auto intCorners = static_cast<int>(corners);
const auto good = _dataMedia->goodThumbnail();
const auto normal = good ? good : _dataMedia->thumbnail();
if (!normal) {
_data->loadThumbnail(_realParent->fullId());
validateVideoThumbnail();
}
const auto videothumb = normal ? nullptr : _videoThumbnailFrame.get();
const auto blurred = normal
? (!good
&& (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold))
: !videothumb;
const auto ratio = style::DevicePixelRatio();
const auto shouldBeBlurred = blurred ? 1 : 0;
if (_thumbCache.size() == (outer * ratio)
&& _thumbCacheRoundRadius == intRadius
&& _thumbCacheRoundCorners == intCorners
&& _thumbCacheBlurred == shouldBeBlurred) {
return;
}
_thumbCache = prepareThumbCache(outer, radius, corners);
_thumbCacheRoundRadius = intRadius;
_thumbCacheRoundCorners = intCorners;
_thumbCacheBlurred = shouldBeBlurred;
}
QImage Gif::prepareThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const {
return Images::Round(prepareThumbCache(outer), radius, corners);
}
QImage Gif::prepareThumbCache(QSize outer) const {
const auto good = _dataMedia->goodThumbnail();
const auto normal = good ? good : _dataMedia->thumbnail();
const auto videothumb = normal ? nullptr : _videoThumbnailFrame.get();
const auto ratio = style::DevicePixelRatio();
auto blurred = (!good
&& normal
&& (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold))
? normal
: nullptr;
const auto blurFromLarge = good || (normal && !blurred);
const auto large = blurFromLarge ? normal : videothumb;
if (videothumb) {
} else if (const auto embedded = _dataMedia->thumbnailInline()) {
blurred = embedded;
}
const auto resize = large
? ::Media::Streaming::DecideVideoFrameResize(
outer,
good ? large->size() : _data->dimensions)
: ::Media::Streaming::ExpandDecision();
return PrepareWithBlurredBackground(
outer,
resize,
large,
blurFromLarge ? large : blurred);
}
void Gif::drawCornerStatus( void Gif::drawCornerStatus(
Painter &p, Painter &p,
const PaintContext &context, const PaintContext &context,
@ -996,17 +1034,18 @@ void Gif::drawGrouped(
if (streamed) { if (streamed) {
const auto paused = autoPaused; const auto paused = autoPaused;
auto request = ::Media::Streaming::FrameRequest();
const auto original = sizeForAspectRatio(); const auto original = sizeForAspectRatio();
const auto originalWidth = style::ConvertScale(original.width()); const auto originalWidth = style::ConvertScale(original.width());
const auto originalHeight = style::ConvertScale(original.height()); const auto originalHeight = style::ConvertScale(original.height());
const auto pixSize = Ui::GetImageScaleSizeForGeometry( const auto pixSize = Ui::GetImageScaleSizeForGeometry(
{ originalWidth, originalHeight }, { originalWidth, originalHeight },
{ geometry.width(), geometry.height() }); { geometry.width(), geometry.height() });
request.outer = geometry.size() * cIntRetinaFactor(); auto request = ::Media::Streaming::FrameRequest{
request.resize = pixSize * cIntRetinaFactor(); .resize = pixSize * cIntRetinaFactor(),
request.corners = corners; .outer = geometry.size() * cIntRetinaFactor(),
request.radius = roundRadius; .radius = roundRadius,
.corners = corners,
};
if (activeOwnPlaying->instance.playerLocked()) { if (activeOwnPlaying->instance.playerLocked()) {
if (activeOwnPlaying->frozenFrame.isNull()) { if (activeOwnPlaying->frozenFrame.isNull()) {
activeOwnPlaying->frozenRequest = request; activeOwnPlaying->frozenRequest = request;
@ -1408,6 +1447,7 @@ bool Gif::hasHeavyPart() const {
void Gif::unloadHeavyPart() { void Gif::unloadHeavyPart() {
stopAnimation(); stopAnimation();
_dataMedia = nullptr; _dataMedia = nullptr;
_thumbCache = QImage();
_videoThumbnailFrame = nullptr; _videoThumbnailFrame = nullptr;
_caption.unloadCustomEmoji(); _caption.unloadCustomEmoji();
} }

View file

@ -158,6 +158,16 @@ private:
[[nodiscard]] int additionalWidth() const; [[nodiscard]] int additionalWidth() const;
[[nodiscard]] bool isUnwrapped() const; [[nodiscard]] bool isUnwrapped() const;
void validateThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const;
[[nodiscard]] QImage prepareThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const;
[[nodiscard]] QImage prepareThumbCache(QSize outer) const;
void validateGroupedCache( void validateGroupedCache(
const QRect &geometry, const QRect &geometry,
RectParts corners, RectParts corners,
@ -178,13 +188,15 @@ private:
QPoint position) const; QPoint position) const;
const not_null<DocumentData*> _data; const not_null<DocumentData*> _data;
int _thumbw = 1;
int _thumbh = 1;
Ui::Text::String _caption; Ui::Text::String _caption;
std::unique_ptr<Streamed> _streamed; std::unique_ptr<Streamed> _streamed;
mutable std::shared_ptr<Data::DocumentMedia> _dataMedia; mutable std::shared_ptr<Data::DocumentMedia> _dataMedia;
mutable std::unique_ptr<Image> _videoThumbnailFrame; mutable std::unique_ptr<Image> _videoThumbnailFrame;
QString _downloadSize; QString _downloadSize;
mutable QImage _thumbCache;
mutable int _thumbCacheRoundRadius : 4 = 0;
mutable int _thumbCacheRoundCorners : 12 = 0;
mutable int _thumbCacheBlurred : 1 = 0;
}; };

View file

@ -83,7 +83,7 @@ void Invoice::fillFromData(not_null<Data::Invoice*> invoice) {
} }
QSize Invoice::countOptimalSize() { QSize Invoice::countOptimalSize() {
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (_attach) { if (_attach) {
if (_status.hasSkipBlock()) { if (_status.hasSkipBlock()) {
@ -139,7 +139,7 @@ QSize Invoice::countCurrentSize(int newWidth) {
accumulate_min(newWidth, maxWidth()); accumulate_min(newWidth, maxWidth());
auto innerWidth = newWidth - st::msgPadding.left() - st::msgPadding.right(); auto innerWidth = newWidth - st::msgPadding.left() - st::msgPadding.right();
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
auto newHeight = 0; auto newHeight = 0;
if (_title.isEmpty()) { if (_title.isEmpty()) {
@ -211,7 +211,7 @@ void Invoice::draw(Painter &p, const PaintContext &context) const {
auto tshift = padding.top(); auto tshift = padding.top();
paintw -= padding.left() + padding.right(); paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (_titleHeight) { if (_titleHeight) {
p.setPen(semibold); p.setPen(semibold);
p.setTextPalette(stm->semiboldPalette); p.setTextPalette(stm->semiboldPalette);
@ -283,7 +283,7 @@ TextState Invoice::textState(QPoint point, StateRequest request) const {
} }
paintw -= padding.left() + padding.right(); paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
auto symbolAdd = 0; auto symbolAdd = 0;
if (_titleHeight) { if (_titleHeight) {
if (point.y() >= tshift && point.y() < tshift + _titleHeight) { if (point.y() >= tshift && point.y() < tshift + _titleHeight) {

View file

@ -18,27 +18,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "history/view/media/history_view_document.h" #include "history/view/media/history_view_document.h"
#include "history/view/media/history_view_sticker.h" #include "history/view/media/history_view_sticker.h"
#include "history/view/media/history_view_theme_document.h" #include "history/view/media/history_view_theme_document.h"
#include "media/streaming/media_streaming_utility.h"
#include "styles/style_chat.h" #include "styles/style_chat.h"
namespace HistoryView { namespace HistoryView {
int documentMaxStatusWidth(DocumentData *document) {
auto result = st::normalFont->width(Ui::FormatDownloadText(document->size, document->size));
const auto duration = document->getDuration();
if (const auto song = document->song()) {
accumulate_max(result, st::normalFont->width(Ui::FormatPlayedText(duration, duration)));
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else if (const auto voice = document->voice()) {
accumulate_max(result, st::normalFont->width(Ui::FormatPlayedText(duration, duration)));
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else if (document->isVideoFile()) {
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else {
accumulate_max(result, st::normalFont->width(Ui::FormatSizeText(document->size)));
}
return result;
}
void PaintInterpolatedIcon( void PaintInterpolatedIcon(
Painter &p, Painter &p,
const style::icon &a, const style::icon &a,
@ -105,8 +89,49 @@ std::unique_ptr<Media> CreateAttach(
return nullptr; return nullptr;
} }
int unitedLineHeight() { int UnitedLineHeight() {
return qMax(st::webPageTitleFont->height, st::webPageDescriptionFont->height); return std::max(st::semiboldFont->height, st::normalFont->height);
}
QImage PrepareWithBlurredBackground(
QSize outer,
::Media::Streaming::ExpandDecision resize,
Image *large,
Image *blurred) {
const auto ratio = style::DevicePixelRatio();
if (resize.expanding) {
return Images::Prepare(large->original(), resize.result * ratio, {
.outer = outer,
});
}
auto background = QImage(
outer * ratio,
QImage::Format_ARGB32_Premultiplied);
background.setDevicePixelRatio(ratio);
if (!blurred) {
background.fill(Qt::black);
if (!large) {
return background;
}
}
auto p = QPainter(&background);
if (blurred) {
using namespace ::Media::Streaming;
FillBlurredBackground(p, outer, blurred->original());
}
if (large) {
auto image = large->original().scaled(
resize.result * ratio,
Qt::IgnoreAspectRatio,
Qt::SmoothTransformation);
image.setDevicePixelRatio(ratio);
p.drawImage(
(outer.width() - resize.result.width()) / 2,
(outer.height() - resize.result.height()) / 2,
image);
}
p.end();
return background;
} }
} // namespace HistoryView } // namespace HistoryView

View file

@ -7,6 +7,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/ */
#pragma once #pragma once
class DocumentData;
class PhotoData;
class Image;
namespace HistoryView { namespace HistoryView {
class Element; class Element;
} // namespace HistoryView } // namespace HistoryView
@ -15,15 +19,14 @@ namespace Data {
class Media; class Media;
} // namespace Data } // namespace Data
class DocumentData; namespace Media::Streaming {
class PhotoData; struct ExpandDecision;
} // namespace Media::Streaming
namespace HistoryView { namespace HistoryView {
class Media; class Media;
int documentMaxStatusWidth(DocumentData *document);
void PaintInterpolatedIcon( void PaintInterpolatedIcon(
Painter &p, Painter &p,
const style::icon &a, const style::icon &a,
@ -41,7 +44,7 @@ void PaintInterpolatedIcon(
PhotoData *photo, PhotoData *photo,
const std::vector<std::unique_ptr<Data::Media>> &collage, const std::vector<std::unique_ptr<Data::Media>> &collage,
const QString &webpageUrl); const QString &webpageUrl);
int unitedLineHeight(); [[nodiscard]] int UnitedLineHeight();
[[nodiscard]] inline QSize NonEmptySize(QSize size) { [[nodiscard]] inline QSize NonEmptySize(QSize size) {
return QSize(std::max(size.width(), 1), std::max(size.height(), 1)); return QSize(std::max(size.width(), 1), std::max(size.height(), 1));
@ -54,4 +57,10 @@ int unitedLineHeight();
: size)); : size));
} }
[[nodiscard]] QImage PrepareWithBlurredBackground(
QSize outer,
::Media::Streaming::ExpandDecision resize,
Image *large,
Image *blurred);
} // namespace HistoryView } // namespace HistoryView

View file

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_instance.h" #include "media/streaming/media_streaming_instance.h"
#include "media/streaming/media_streaming_player.h" #include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_document.h" #include "media/streaming/media_streaming_document.h"
#include "media/streaming/media_streaming_utility.h"
#include "main/main_session.h" #include "main/main_session.h"
#include "main/main_session_settings.h" #include "main/main_session_settings.h"
#include "ui/image/image.h" #include "ui/image/image.h"
@ -185,7 +186,7 @@ QSize Photo::countOptimalSize() {
return { maxWidth, minHeight }; return { maxWidth, minHeight };
} }
QSize Photo::countCurrentSize(int newWidth) { QSize Photo::pixmapSizeFromData(int newWidth) const {
auto tw = style::ConvertScale(_data->width()); auto tw = style::ConvertScale(_data->width());
auto th = style::ConvertScale(_data->height()); auto th = style::ConvertScale(_data->height());
if (tw > st::maxMediaSize) { if (tw > st::maxMediaSize) {
@ -197,33 +198,38 @@ QSize Photo::countCurrentSize(int newWidth) {
th = st::maxMediaSize; th = st::maxMediaSize;
} }
_pixw = qMin(newWidth, maxWidth()); auto pixw = qMin(newWidth, maxWidth());
_pixh = th; auto pixh = th;
if (tw > _pixw) { if (tw > pixw) {
_pixh = (_pixw * _pixh / tw); pixh = (pixw * pixh / tw);
} else { } else {
_pixw = tw; pixw = tw;
} }
if (_pixh > newWidth) { if (pixh > newWidth) {
_pixw = (_pixw * newWidth) / _pixh; pixw = (pixw * newWidth) / pixh;
_pixh = newWidth; pixh = newWidth;
} }
if (_pixw < 1) _pixw = 1; return { pixw, pixh };
if (_pixh < 1) _pixh = 1; }
QSize Photo::countCurrentSize(int newWidth) {
if (_serviceWidth) {
return { _serviceWidth, _serviceWidth };
}
const auto minWidth = std::clamp( const auto minWidth = std::clamp(
_parent->minWidthForMedia(), _parent->minWidthForMedia(),
(_parent->hasBubble() ? st::historyPhotoBubbleMinWidth : st::minPhotoSize), (_parent->hasBubble() ? st::historyPhotoBubbleMinWidth : st::minPhotoSize),
std::min(newWidth, st::maxMediaSize)); std::min(newWidth, st::maxMediaSize));
newWidth = qMax(_pixw, minWidth); auto pix = pixmapSizeFromData(newWidth);
auto newHeight = qMax(_pixh, st::minPhotoSize); newWidth = qMax(pix.width(), minWidth);
auto newHeight = qMax(pix.height(), st::minPhotoSize);
if (_parent->hasBubble() && !_caption.isEmpty()) { if (_parent->hasBubble() && !_caption.isEmpty()) {
const auto maxWithCaption = qMin( const auto maxWithCaption = qMin(
st::msgMaxWidth, st::msgMaxWidth,
(st::msgPadding.left() (st::msgPadding.left()
+ _caption.maxWidth() + _caption.maxWidth()
+ st::msgPadding.right())); + st::msgPadding.right()));
newWidth = qMin(maxWidth(), maxWithCaption); newWidth = qMax(newWidth, maxWithCaption);
const auto captionw = newWidth const auto captionw = newWidth
- st::msgPadding.left() - st::msgPadding.left()
- st::msgPadding.right(); - st::msgPadding.right();
@ -390,68 +396,10 @@ QImage Photo::prepareImageCache(QSize outer) const {
} else { } else {
blurred = large; blurred = large;
} }
if (large) { const auto resize = large
const auto from = large->size(); ? ::Media::Streaming::DecideFrameResize(outer, large->size())
// If we cut out no more than 0.25 of the original, let's expand. : ::Media::Streaming::ExpandDecision();
const auto big = from.scaled(outer, Qt::KeepAspectRatioByExpanding); return PrepareWithBlurredBackground(outer, resize, large, blurred);
if ((big.width() * 3 <= outer.width() * 4)
&& (big.height() * 3 <= outer.height() * 4)) {
return Images::Prepare(large->original(), big * ratio, {
.outer = outer,
});
}
}
auto background = QImage(
outer * ratio,
QImage::Format_ARGB32_Premultiplied);
background.setDevicePixelRatio(ratio);
if (!blurred) {
background.fill(Qt::black);
return background;
}
const auto bsize = blurred->size();
const auto copyw = std::min(
bsize.width(),
outer.width() * bsize.height() / outer.height());
const auto copyh = std::min(
bsize.height(),
outer.height() * bsize.width() / outer.width());
auto copy = (bsize == QSize(copyw, copyh))
? blurred->original()
: blurred->original().copy(
(bsize.width() - copyw) / 2,
(bsize.height() - copyh) / 2,
copyw,
copyh);
auto scaled = Images::Blur((outer.width() < 10
|| outer.height() < 10
|| (copy.width() * 5 < background.width()
&& copy.height() * 5 < background.height()))
? std::move(copy)
: copy.scaled(
std::min(copy.width(), background.width() / 5),
std::min(copy.height(), background.height() / 5),
Qt::KeepAspectRatio,
Qt::FastTransformation));
auto p = QPainter(&background);
{
auto hq = PainterHighQualityEnabler(p);
p.drawImage(QRect(QPoint(), outer), scaled);
}
if (large) {
auto image = large->original().scaled(
background.size(),
Qt::KeepAspectRatio,
Qt::SmoothTransformation);
image.setDevicePixelRatio(ratio);
const auto size = image.size() / ratio;
p.drawImage(
(outer.width() - size.width()) / 2,
(outer.height() - size.height()) / 2,
image);
}
p.end();
return background;
} }
void Photo::paintUserpicFrame( void Photo::paintUserpicFrame(
@ -466,7 +414,7 @@ void Photo::paintUserpicFrame(
checkStreamedIsStarted(); checkStreamedIsStarted();
} }
const auto size = QSize(_pixw, _pixh); const auto size = QSize(width(), height());
const auto rect = QRect(photoPosition, size); const auto rect = QRect(photoPosition, size);
const auto st = context.st; const auto st = context.st;
const auto sti = context.imageStyle(); const auto sti = context.imageStyle();

View file

@ -116,6 +116,7 @@ private:
QSize countOptimalSize() override; QSize countOptimalSize() override;
QSize countCurrentSize(int newWidth) override; QSize countCurrentSize(int newWidth) override;
[[nodiscard]] QSize pixmapSizeFromData(int newWidth) const;
bool needInfoDisplay() const; bool needInfoDisplay() const;
void validateGroupedCache( void validateGroupedCache(
@ -152,8 +153,6 @@ private:
mutable std::unique_ptr<Streamed> _streamed; mutable std::unique_ptr<Streamed> _streamed;
mutable QImage _imageCache; mutable QImage _imageCache;
int _serviceWidth = 0; int _serviceWidth = 0;
int _pixw = 1;
int _pixh = 1;
mutable int _imageCacheRoundRadius : 4 = 0; mutable int _imageCacheRoundRadius : 4 = 0;
mutable int _imageCacheRoundCorners : 12 = 0; mutable int _imageCacheRoundCorners : 12 = 0;
mutable int _imageCacheBlurred : 1 = 0; mutable int _imageCacheBlurred : 1 = 0;

View file

@ -106,7 +106,7 @@ QSize WebPage::countOptimalSize() {
_title = Ui::Text::String(st::msgMinWidth - st::webPageLeft); _title = Ui::Text::String(st::msgMinWidth - st::webPageLeft);
_description = Ui::Text::String(st::msgMinWidth - st::webPageLeft); _description = Ui::Text::String(st::msgMinWidth - st::webPageLeft);
} }
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (!_openl && !_data->url.isEmpty()) { if (!_openl && !_data->url.isEmpty()) {
const auto previewOfHiddenUrl = [&] { const auto previewOfHiddenUrl = [&] {
@ -328,7 +328,7 @@ QSize WebPage::countCurrentSize(int newWidth) {
auto innerWidth = newWidth - st::msgPadding.left() - st::webPageLeft - st::msgPadding.right(); auto innerWidth = newWidth - st::msgPadding.left() - st::webPageLeft - st::msgPadding.right();
auto newHeight = 0; auto newHeight = 0;
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
auto linesMax = isLogEntryOriginal() ? kMaxOriginalEntryLines : 5; auto linesMax = isLogEntryOriginal() ? kMaxOriginalEntryLines : 5;
auto siteNameHeight = _siteNameLines ? lineHeight : 0; auto siteNameHeight = _siteNameLines ? lineHeight : 0;
if (asArticle()) { if (asArticle()) {
@ -498,7 +498,7 @@ void WebPage::draw(Painter &p, const PaintContext &context) const {
QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width())); QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width()));
p.fillRect(bar, barfg); p.fillRect(bar, barfg);
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
if (asArticle()) { if (asArticle()) {
ensurePhotoMediaCreated(); ensurePhotoMediaCreated();
@ -650,7 +650,7 @@ TextState WebPage::textState(QPoint point, StateRequest request) const {
} }
paintw -= padding.left() + padding.right(); paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight(); auto lineHeight = UnitedLineHeight();
auto inThumb = false; auto inThumb = false;
if (asArticle()) { if (asArticle()) {
auto pw = qMax(_pixw, lineHeight); auto pw = qMax(_pixw, lineHeight);

View file

@ -123,6 +123,7 @@ struct FrameRequest {
ImageRoundRadius radius = ImageRoundRadius(); ImageRoundRadius radius = ImageRoundRadius();
RectParts corners = RectPart::AllCorners; RectParts corners = RectPart::AllCorners;
QColor colored = QColor(0, 0, 0, 0); QColor colored = QColor(0, 0, 0, 0);
bool blurredBackground = false;
bool requireARGB32 = true; bool requireARGB32 = true;
bool keepAlpha = false; bool keepAlpha = false;
bool strict = true; bool strict = true;
@ -134,7 +135,7 @@ struct FrameRequest {
} }
[[nodiscard]] bool empty() const { [[nodiscard]] bool empty() const {
return resize.isEmpty(); return blurredBackground ? outer.isEmpty() : resize.isEmpty();
} }
[[nodiscard]] bool operator==(const FrameRequest &other) const { [[nodiscard]] bool operator==(const FrameRequest &other) const {
@ -144,14 +145,16 @@ struct FrameRequest {
&& (corners == other.corners) && (corners == other.corners)
&& (colored == other.colored) && (colored == other.colored)
&& (keepAlpha == other.keepAlpha) && (keepAlpha == other.keepAlpha)
&& (requireARGB32 == other.requireARGB32); && (requireARGB32 == other.requireARGB32)
&& (blurredBackground == other.blurredBackground);
} }
[[nodiscard]] bool operator!=(const FrameRequest &other) const { [[nodiscard]] bool operator!=(const FrameRequest &other) const {
return !(*this == other); return !(*this == other);
} }
[[nodiscard]] bool goodFor(const FrameRequest &other) const { [[nodiscard]] bool goodFor(const FrameRequest &other) const {
return (requireARGB32 == other.requireARGB32) return (blurredBackground == other.blurredBackground)
&& (requireARGB32 == other.requireARGB32)
&& (keepAlpha == other.keepAlpha) && (keepAlpha == other.keepAlpha)
&& (colored == other.colored) && (colored == other.colored)
&& ((strict && !other.strict) || (*this == other)); && ((strict && !other.strict) || (*this == other));

View file

@ -177,7 +177,8 @@ Stream File::Context::initStream(
return result; return result;
} }
result.rotation = FFmpeg::ReadRotationFromMetadata(info); result.rotation = FFmpeg::ReadRotationFromMetadata(info);
result.aspect = FFmpeg::ValidateAspectRatio(info->sample_aspect_ratio); result.aspect = FFmpeg::ValidateAspectRatio(
info->sample_aspect_ratio);
} else if (type == AVMEDIA_TYPE_AUDIO) { } else if (type == AVMEDIA_TYPE_AUDIO) {
result.frequency = info->codecpar->sample_rate; result.frequency = info->codecpar->sample_rate;
if (!result.frequency) { if (!result.frequency) {

View file

@ -96,7 +96,7 @@ bool GoodForRequest(
|| (hasAlpha && !request.keepAlpha) || (hasAlpha && !request.keepAlpha)
|| request.colored.alpha() != 0) { || request.colored.alpha() != 0) {
return false; return false;
} else if (request.resize.isEmpty()) { } else if (!request.blurredBackground && request.resize.isEmpty()) {
return true; return true;
} else if (rotation != 0) { } else if (rotation != 0) {
return false; return false;
@ -104,8 +104,10 @@ bool GoodForRequest(
&& ((request.corners & RectPart::AllCorners) != 0)) { && ((request.corners & RectPart::AllCorners) != 0)) {
return false; return false;
} }
return (request.resize == request.outer) const auto size = request.blurredBackground
&& (request.resize == image.size()); ? request.outer
: request.resize;
return (size == request.outer) && (size == image.size());
} }
bool TransferFrame( bool TransferFrame(
@ -279,28 +281,69 @@ void PaintFrameInner(
p.drawImage(rect, original); p.drawImage(rect, original);
} }
QImage PrepareBlurredBackground(QSize outer, QImage frame) {
const auto bsize = frame.size();
const auto copyw = std::min(
bsize.width(),
outer.width() * bsize.height() / outer.height());
const auto copyh = std::min(
bsize.height(),
outer.height() * bsize.width() / outer.width());
auto copy = (bsize == QSize(copyw, copyh))
? std::move(frame)
: frame.copy(
(bsize.width() - copyw) / 2,
(bsize.height() - copyh) / 2,
copyw,
copyh);
auto scaled = (copy.width() <= 100 && copy.height() <= 100)
? std::move(copy)
: copy.scaled(40, 40, Qt::KeepAspectRatio, Qt::FastTransformation);
return Images::Blur(std::move(scaled), true);
}
void FillBlurredBackground(QPainter &p, QSize outer, QImage bg) {
auto hq = PainterHighQualityEnabler(p);
const auto rect = QRect(QPoint(), outer);
const auto ratio = p.device()->devicePixelRatio();
p.drawImage(
rect,
PrepareBlurredBackground(outer * ratio, std::move(bg)));
p.fillRect(rect, QColor(0, 0, 0, 48));
}
void PaintFrameContent( void PaintFrameContent(
QPainter &p, QPainter &p,
const QImage &original, const QImage &original,
bool alpha, bool hasAlpha,
const AVRational &aspect,
int rotation, int rotation,
const FrameRequest &request) { const FrameRequest &request) {
const auto full = request.outer.isEmpty() const auto outer = request.outer;
? original.size() const auto full = request.outer.isEmpty() ? original.size() : outer;
: request.outer; const auto deAlpha = hasAlpha && !request.keepAlpha;
const auto size = request.resize.isEmpty() const auto resize = request.blurredBackground
? original.size() ? DecideVideoFrameResize(
: request.resize; outer,
const auto to = QRect( FFmpeg::TransposeSizeByRotation(
FFmpeg::CorrectByAspect(original.size(), aspect), rotation))
: ExpandDecision{ request.resize.isEmpty()
? original.size()
: request.resize };
const auto size = resize.result;
const auto target = QRect(
(full.width() - size.width()) / 2, (full.width() - size.width()) / 2,
(full.height() - size.height()) / 2, (full.height() - size.height()) / 2,
size.width(), size.width(),
size.height()); size.height());
if (!alpha || !request.keepAlpha) { if (request.blurredBackground) {
PaintFrameOuter(p, to, full); if (!resize.expanding) {
FillBlurredBackground(p, full, original);
}
} else if (!hasAlpha || !request.keepAlpha) {
PaintFrameOuter(p, target, full);
} }
const auto deAlpha = alpha && !request.keepAlpha; PaintFrameInner(p, target, original, deAlpha, rotation);
PaintFrameInner(p, to, original, deAlpha, rotation);
} }
void ApplyFrameRounding(QImage &storage, const FrameRequest &request) { void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
@ -314,13 +357,41 @@ void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
request.corners); request.corners);
} }
ExpandDecision DecideFrameResize(
QSize outer,
QSize original,
int minVisibleNominator,
int minVisibleDenominator) {
if (outer.isEmpty()) {
// Often "expanding" means that we don't need to fill the background.
return { .result = original, .expanding = true };
}
const auto big = original.scaled(outer, Qt::KeepAspectRatioByExpanding);
if ((big.width() * minVisibleNominator
<= outer.width() * minVisibleDenominator)
&& (big.height() * minVisibleNominator
<= outer.height() * minVisibleDenominator)) {
return { .result = big, .expanding = true };
}
return { .result = original.scaled(outer, Qt::KeepAspectRatio) };
}
ExpandDecision DecideVideoFrameResize(QSize outer, QSize original) {
return DecideFrameResize(outer, original, 1, 2);
}
QSize CalculateResizeFromOuter(QSize outer, QSize original) {
return DecideVideoFrameResize(outer, original).result;
}
QImage PrepareByRequest( QImage PrepareByRequest(
const QImage &original, const QImage &original,
bool alpha, bool hasAlpha,
const AVRational &aspect,
int rotation, int rotation,
const FrameRequest &request, const FrameRequest &request,
QImage storage) { QImage storage) {
Expects(!request.outer.isEmpty() || alpha); Expects(!request.outer.isEmpty() || hasAlpha);
const auto outer = request.outer.isEmpty() const auto outer = request.outer.isEmpty()
? original.size() ? original.size()
@ -329,12 +400,12 @@ QImage PrepareByRequest(
storage = FFmpeg::CreateFrameStorage(outer); storage = FFmpeg::CreateFrameStorage(outer);
} }
if (alpha && request.keepAlpha) { if (hasAlpha && request.keepAlpha) {
storage.fill(Qt::transparent); storage.fill(Qt::transparent);
} }
QPainter p(&storage); QPainter p(&storage);
PaintFrameContent(p, original, alpha, rotation, request); PaintFrameContent(p, original, hasAlpha, aspect, rotation, request);
p.end(); p.end();
ApplyFrameRounding(storage, request); ApplyFrameRounding(storage, request);

View file

@ -65,9 +65,26 @@ struct Stream {
QSize resize, QSize resize,
QImage storage); QImage storage);
[[nodiscard]] FrameYUV ExtractYUV(Stream &stream, AVFrame *frame); [[nodiscard]] FrameYUV ExtractYUV(Stream &stream, AVFrame *frame);
struct ExpandDecision {
QSize result;
bool expanding = false;
};
[[nodiscard]] ExpandDecision DecideFrameResize(
QSize outer,
QSize original,
int minVisibleNominator = 3, // If we cut out no more than 0.25 of
int minVisibleDenominator = 4); // the original, let's expand.
[[nodiscard]] ExpandDecision DecideVideoFrameResize(
QSize outer,
QSize original);
[[nodiscard]] QSize CalculateResizeFromOuter(QSize outer, QSize original);
[[nodiscard]] QImage PrepareBlurredBackground(QSize outer, QImage frame);
void FillBlurredBackground(QPainter &p, QSize outer, QImage bg);
[[nodiscard]] QImage PrepareByRequest( [[nodiscard]] QImage PrepareByRequest(
const QImage &original, const QImage &original,
bool alpha, bool hasAlpha,
const AVRational &aspect,
int rotation, int rotation,
const FrameRequest &request, const FrameRequest &request,
QImage storage); QImage storage);

View file

@ -133,7 +133,7 @@ private:
[[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime); [[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime);
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame); [[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
void fillRequests(not_null<Frame*> frame) const; void fillRequests(not_null<Frame*> frame) const;
[[nodiscard]] QSize chooseOriginalResize() const; [[nodiscard]] QSize chooseOriginalResize(QSize encoded) const;
void presentFrameIfNeeded(); void presentFrameIfNeeded();
void callReady(); void callReady();
[[nodiscard]] bool loopAround(); [[nodiscard]] bool loopAround();
@ -402,16 +402,22 @@ void VideoTrackObject::fillRequests(not_null<Frame*> frame) const {
} }
} }
QSize VideoTrackObject::chooseOriginalResize() const { QSize VideoTrackObject::chooseOriginalResize(QSize encoded) const {
auto chosen = QSize(); auto chosen = QSize();
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
encoded.transpose();
}
for (const auto &[_, request] : _requests) { for (const auto &[_, request] : _requests) {
if (request.resize.isEmpty()) { const auto resize = request.blurredBackground
? CalculateResizeFromOuter(request.outer, encoded)
: request.resize;
if (resize.isEmpty()) {
return QSize(); return QSize();
} }
const auto byWidth = (request.resize.width() >= chosen.width()); const auto byWidth = (resize.width() >= chosen.width());
const auto byHeight = (request.resize.height() >= chosen.height()); const auto byHeight = (resize.height() >= chosen.height());
if (byWidth && byHeight) { if (byWidth && byHeight) {
chosen = request.resize; chosen = resize;
} else if (byWidth || byHeight) { } else if (byWidth || byHeight) {
return QSize(); return QSize();
} }
@ -483,7 +489,8 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
frame->original = ConvertFrame( frame->original = ConvertFrame(
_stream, _stream,
frameWithData, frameWithData,
chooseOriginalResize(), chooseOriginalResize(
{ frameWithData->width, frameWithData->height }),
std::move(frame->original)); std::move(frame->original));
if (frame->original.isNull()) { if (frame->original.isNull()) {
frame->prepared.clear(); frame->prepared.clear();
@ -493,7 +500,10 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
frame->format = FrameFormat::ARGB32; frame->format = FrameFormat::ARGB32;
} }
VideoTrack::PrepareFrameByRequests(frame, _stream.rotation); VideoTrack::PrepareFrameByRequests(
frame,
_stream.aspect,
_stream.rotation);
Ensures(VideoTrack::IsRasterized(frame)); Ensures(VideoTrack::IsRasterized(frame));
} }
@ -706,22 +716,21 @@ void VideoTrackObject::callReady() {
const auto frame = _shared->frameForPaint(); const auto frame = _shared->frameForPaint();
++_frameIndex; ++_frameIndex;
auto data = VideoInformation(); base::take(_ready)({ VideoInformation{
data.size = FFmpeg::CorrectByAspect( .state = {
frame->original.size(), .position = _syncTimePoint.trackTime,
_stream.aspect); .receivedTill = (_readTillEnd
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) { ? _stream.duration
data.size.transpose(); : _syncTimePoint.trackTime),
} .duration = _stream.duration,
data.cover = frame->original; },
data.rotation = _stream.rotation; .size = FFmpeg::TransposeSizeByRotation(
data.alpha = frame->alpha; FFmpeg::CorrectByAspect(frame->original.size(), _stream.aspect),
data.state.duration = _stream.duration; _stream.rotation),
data.state.position = _syncTimePoint.trackTime; .cover = frame->original,
data.state.receivedTill = _readTillEnd .rotation = _stream.rotation,
? _stream.duration .alpha = frame->alpha,
: _syncTimePoint.trackTime; } });
base::take(_ready)({ data });
} }
TimePoint VideoTrackObject::trackTime() const { TimePoint VideoTrackObject::trackTime() const {
@ -1060,7 +1069,7 @@ VideoTrack::VideoTrack(
, _streamTimeBase(stream.timeBase) , _streamTimeBase(stream.timeBase)
, _streamDuration(stream.duration) , _streamDuration(stream.duration)
, _streamRotation(stream.rotation) , _streamRotation(stream.rotation)
//, _streamAspect(stream.aspect) , _streamAspect(stream.aspect)
, _shared(std::make_unique<Shared>()) , _shared(std::make_unique<Shared>())
, _wrapped( , _wrapped(
options, options,
@ -1232,6 +1241,7 @@ QImage VideoTrack::frameImage(
j->second.image = PrepareByRequest( j->second.image = PrepareByRequest(
frame->original, frame->original,
frame->alpha, frame->alpha,
_streamAspect,
_streamRotation, _streamRotation,
useRequest, useRequest,
std::move(j->second.image)); std::move(j->second.image));
@ -1258,6 +1268,7 @@ void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
void VideoTrack::PrepareFrameByRequests( void VideoTrack::PrepareFrameByRequests(
not_null<Frame*> frame, not_null<Frame*> frame,
const AVRational &aspect,
int rotation) { int rotation) {
Expects(frame->format != FrameFormat::ARGB32 Expects(frame->format != FrameFormat::ARGB32
|| !frame->original.isNull()); || !frame->original.isNull());
@ -1286,6 +1297,7 @@ void VideoTrack::PrepareFrameByRequests(
prepared.image = PrepareByRequest( prepared.image = PrepareByRequest(
frame->original, frame->original,
frame->alpha, frame->alpha,
aspect,
rotation, rotation,
prepared.request, prepared.request,
std::move(prepared.image)); std::move(prepared.image));

View file

@ -155,7 +155,10 @@ private:
}; };
static void PrepareFrameByRequests(not_null<Frame*> frame, int rotation); static void PrepareFrameByRequests(
not_null<Frame*> frame,
const AVRational &aspect,
int rotation);
[[nodiscard]] static bool IsDecoded(not_null<const Frame*> frame); [[nodiscard]] static bool IsDecoded(not_null<const Frame*> frame);
[[nodiscard]] static bool IsRasterized(not_null<const Frame*> frame); [[nodiscard]] static bool IsRasterized(not_null<const Frame*> frame);
[[nodiscard]] static bool IsStale( [[nodiscard]] static bool IsStale(
@ -171,7 +174,7 @@ private:
const AVRational _streamTimeBase; const AVRational _streamTimeBase;
const crl::time _streamDuration = 0; const crl::time _streamDuration = 0;
const int _streamRotation = 0; const int _streamRotation = 0;
//AVRational _streamAspect = kNormalAspect; const AVRational _streamAspect = FFmpeg::kNormalAspect;
std::unique_ptr<Shared> _shared; std::unique_ptr<Shared> _shared;
using Implementation = VideoTrackObject; using Implementation = VideoTrackObject;

@ -1 +1 @@
Subproject commit 95dd2c8465d4b5bfcbcdb47ce7a1d1e743d04477 Subproject commit 2e63c6103e3b23bfcd65dcb8afb19c020511b168