From 607263b8befc5424bc3041b390b6b9144d60f91d Mon Sep 17 00:00:00 2001 From: John Preston Date: Fri, 4 Jun 2021 13:50:41 +0400 Subject: [PATCH] Fix seek / cancel of video playback in media viewer. --- .../history/view/media/history_view_gif.cpp | 3 +- .../streaming/media_streaming_document.cpp | 6 +- .../streaming/media_streaming_player.cpp | 6 ++ .../media/streaming/media_streaming_player.h | 2 + .../streaming/media_streaming_utility.cpp | 4 +- .../streaming/media_streaming_video_track.cpp | 71 ++++++++++++++++++- .../streaming/media_streaming_video_track.h | 1 + .../media/view/media_view_overlay_opengl.cpp | 8 +-- .../media/view/media_view_overlay_opengl.h | 2 +- .../media/view/media_view_overlay_widget.cpp | 8 ++- .../media/view/media_view_overlay_widget.h | 5 +- 11 files changed, 98 insertions(+), 18 deletions(-) diff --git a/Telegram/SourceFiles/history/view/media/history_view_gif.cpp b/Telegram/SourceFiles/history/view/media/history_view_gif.cpp index a95e45df2..a6823b06b 100644 --- a/Telegram/SourceFiles/history/view/media/history_view_gif.cpp +++ b/Telegram/SourceFiles/history/view/media/history_view_gif.cpp @@ -389,7 +389,8 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms } p.drawImage(rthumb, activeOwnPlaying->frozenFrame); } else { - if (activeOwnPlaying) { + if (activeOwnPlaying + && !activeOwnPlaying->frozenFrame.isNull()) { activeOwnPlaying->frozenFrame = QImage(); activeOwnPlaying->frozenStatusText = QString(); } diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_document.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_document.cpp index e6cf39e2a..c8e805d3d 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_document.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_document.cpp @@ -91,12 +91,8 @@ void Document::play(const PlaybackOptions &options) { } void Document::saveFrameToCover() { - auto request = Streaming::FrameRequest(); - //request.radius = (_doc && _doc->isVideoMessage()) - // ? ImageRoundRadius::Ellipse - // : ImageRoundRadius::None; _info.video.cover = _player.ready() - ? _player.frame(request) + ? _player.currentFrameImage() : _info.video.cover; } diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp index f94637f1c..fc6d036d6 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp @@ -887,6 +887,12 @@ FrameWithInfo Player::frameWithInfo(const Instance *instance) const { return _video->frameWithInfo(instance); } +QImage Player::currentFrameImage() const { + Expects(_video != nullptr); + + return _video->currentFrameImage(); +} + void Player::unregisterInstance(not_null instance) { if (_video) { _video->unregisterInstance(instance); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_player.h b/Telegram/SourceFiles/media/streaming/media_streaming_player.h index 5249116a4..45fb6f07c 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_player.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_player.h @@ -68,6 +68,8 @@ public: [[nodiscard]] FrameWithInfo frameWithInfo( const Instance *instance = nullptr) const; // !requireARGB32 + [[nodiscard]] QImage currentFrameImage() const; // Converts if needed. + void unregisterInstance(not_null instance); bool markFrameShown(); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp index 444262ff4..a183779c2 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp @@ -91,7 +91,9 @@ bool GoodForRequest( const QImage &image, int rotation, const FrameRequest &request) { - if (request.resize.isEmpty()) { + if (image.isNull()) { + return false; + } else if (request.resize.isEmpty()) { return true; } else if (rotation != 0) { return false; diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp index da11accd8..3a14138cf 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp @@ -7,6 +7,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL */ #include "media/streaming/media_streaming_video_track.h" +#include "ffmpeg/ffmpeg_utility.h" #include "media/audio/media_audio.h" #include "base/concurrent_timer.h" @@ -19,6 +20,55 @@ constexpr auto kDisplaySkipped = crl::time(-1); constexpr auto kFinishedPosition = std::numeric_limits::max(); static_assert(kDisplaySkipped != kTimeUnknown); +[[nodiscard]] QImage ConvertToARGB32(const FrameYUV420 &data) { + Expects(data.y.data != nullptr); + Expects(data.u.data != nullptr); + Expects(data.v.data != nullptr); + Expects(!data.size.isEmpty()); + + //if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) { + // resize.transpose(); + //} + + auto result = FFmpeg::CreateFrameStorage(data.size); + const auto format = AV_PIX_FMT_BGRA; + const auto swscale = FFmpeg::MakeSwscalePointer( + data.size, + AV_PIX_FMT_YUV420P, + data.size, + AV_PIX_FMT_BGRA); + if (!swscale) { + return QImage(); + } + + // AV_NUM_DATA_POINTERS defined in AVFrame struct + const uint8_t *srcData[AV_NUM_DATA_POINTERS] = { + static_cast(data.y.data), + static_cast(data.u.data), + static_cast(data.v.data), + nullptr, + }; + int srcLinesize[AV_NUM_DATA_POINTERS] = { + data.y.stride, + data.u.stride, + data.v.stride, + 0, + }; + uint8_t *dstData[AV_NUM_DATA_POINTERS] = { result.bits(), nullptr }; + int dstLinesize[AV_NUM_DATA_POINTERS] = { result.bytesPerLine(), 0 }; + + sws_scale( + swscale.get(), + srcData, + srcLinesize, + 0, + data.size.height(), + dstData, + dstLinesize); + + return result; +} + } // namespace class VideoTrackObject final { @@ -384,6 +434,12 @@ void VideoTrackObject::rasterizeFrame(not_null frame) { fail(Error::InvalidData); return; } + if (!frame->original.isNull()) { + frame->original = QImage(); + for (auto &[_, prepared] : frame->prepared) { + prepared.image = QImage(); + } + } frame->format = FrameFormat::YUV420; } else { frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA); @@ -1020,8 +1076,7 @@ bool VideoTrack::markFrameShown() { QImage VideoTrack::frame( const FrameRequest &request, const Instance *instance) { - const auto data = _shared->frameForPaintWithIndex(); - const auto frame = data.frame; + const auto frame = _shared->frameForPaint(); const auto i = frame->prepared.find(instance); const auto none = (i == frame->prepared.end()); const auto preparedFor = frame->prepared.empty() @@ -1034,6 +1089,10 @@ QImage VideoTrack::frame( unwrapped.updateFrameRequest(instance, useRequest); }); } + if (frame->original.isNull() + && frame->format == FrameFormat::YUV420) { + frame->original = ConvertToARGB32(frame->yuv420); + } if (!frame->alpha && GoodForRequest(frame->original, _streamRotation, useRequest)) { return frame->original; @@ -1083,6 +1142,14 @@ FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) { }; } +QImage VideoTrack::currentFrameImage() { + const auto frame = _shared->frameForPaint(); + if (frame->original.isNull() && frame->format == FrameFormat::YUV420) { + frame->original = ConvertToARGB32(frame->yuv420); + } + return frame->original; +} + void VideoTrack::unregisterInstance(not_null instance) { _wrapped.with([=](Implementation &unwrapped) { unwrapped.removeFrameRequest(instance); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h index 26413d094..707ff297d 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h @@ -59,6 +59,7 @@ public: const FrameRequest &request, const Instance *instance); [[nodiscard]] FrameWithInfo frameWithInfo(const Instance *instance); + [[nodiscard]] QImage currentFrameImage(); void unregisterInstance(not_null instance); [[nodiscard]] rpl::producer<> checkNextFrame() const; [[nodiscard]] rpl::producer<> waitingForData() const; diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp index a77013196..cf379cf61 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp +++ b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp @@ -200,7 +200,7 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame( _streamedIndex = _owner->streamedIndex(); _f->glActiveTexture(GL_TEXTURE0); - _textures.bind(*_f, 0); + _textures.bind(*_f, 1); if (upload) { _f->glPixelStorei(GL_UNPACK_ALIGNMENT, 1); uploadTexture( @@ -211,10 +211,9 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame( yuv->y.stride, yuv->y.data); _lumaSize = yuv->size; - _rgbaSize = QSize(); } _f->glActiveTexture(GL_TEXTURE1); - _textures.bind(*_f, 1); + _textures.bind(*_f, 2); if (upload) { uploadTexture( GL_RED, @@ -225,7 +224,7 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame( yuv->u.data); } _f->glActiveTexture(GL_TEXTURE2); - _textures.bind(*_f, 2); + _textures.bind(*_f, 3); if (upload) { uploadTexture( GL_RED, @@ -280,7 +279,6 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent( stride, data); _rgbaSize = image.size(); - _lumaSize = QSize(); } paintTransformedContent(&*program, rect, rotation); diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h index d00679f74..6b8f383f4 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h +++ b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h @@ -106,7 +106,7 @@ private: QOpenGLShader *_texturedVertexShader = nullptr; std::optional _withTransparencyProgram; std::optional _yuv420Program; - Ui::GL::Textures<3> _textures; + Ui::GL::Textures<4> _textures; QSize _rgbaSize; QSize _lumaSize; QSize _chromaSize; diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp b/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp index 1d67bfdaf..99f276e70 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp +++ b/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp @@ -567,6 +567,12 @@ Streaming::FrameWithInfo OverlayWidget::videoFrameWithInfo() const { }; } +QImage OverlayWidget::currentVideoFrameImage() const { + return _streamed->instance.player().ready() + ? _streamed->instance.player().currentFrameImage() + : _streamed->instance.info().video.cover; +} + int OverlayWidget::streamedIndex() const { return _streamedCreated; } @@ -2654,7 +2660,7 @@ bool OverlayWidget::createStreamingObjects() { QImage OverlayWidget::transformedShownContent() const { return transformShownContent( - videoShown() ? videoFrame() : _staticContent, + videoShown() ? currentVideoFrameImage() : _staticContent, contentRotation()); } diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_widget.h b/Telegram/SourceFiles/media/view/media_view_overlay_widget.h index acb7a67f0..8a18215ef 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_widget.h +++ b/Telegram/SourceFiles/media/view/media_view_overlay_widget.h @@ -397,8 +397,9 @@ private: [[nodiscard]] bool videoShown() const; [[nodiscard]] QSize videoSize() const; [[nodiscard]] bool videoIsGifOrUserpic() const; - [[nodiscard]] QImage videoFrame() const; - [[nodiscard]] Streaming::FrameWithInfo videoFrameWithInfo() const; + [[nodiscard]] QImage videoFrame() const; // ARGB (changes prepare format) + [[nodiscard]] QImage currentVideoFrameImage() const; // RGB (may convert) + [[nodiscard]] Streaming::FrameWithInfo videoFrameWithInfo() const; // YUV [[nodiscard]] int streamedIndex() const; [[nodiscard]] QImage transformedShownContent() const; [[nodiscard]] QImage transformShownContent(