mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-19 15:47:11 +02:00
Fix seek / cancel of video playback in media viewer.
This commit is contained in:
parent
42b62e90ca
commit
607263b8be
11 changed files with 98 additions and 18 deletions
|
@ -389,7 +389,8 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms
|
|||
}
|
||||
p.drawImage(rthumb, activeOwnPlaying->frozenFrame);
|
||||
} else {
|
||||
if (activeOwnPlaying) {
|
||||
if (activeOwnPlaying
|
||||
&& !activeOwnPlaying->frozenFrame.isNull()) {
|
||||
activeOwnPlaying->frozenFrame = QImage();
|
||||
activeOwnPlaying->frozenStatusText = QString();
|
||||
}
|
||||
|
|
|
@ -91,12 +91,8 @@ void Document::play(const PlaybackOptions &options) {
|
|||
}
|
||||
|
||||
void Document::saveFrameToCover() {
|
||||
auto request = Streaming::FrameRequest();
|
||||
//request.radius = (_doc && _doc->isVideoMessage())
|
||||
// ? ImageRoundRadius::Ellipse
|
||||
// : ImageRoundRadius::None;
|
||||
_info.video.cover = _player.ready()
|
||||
? _player.frame(request)
|
||||
? _player.currentFrameImage()
|
||||
: _info.video.cover;
|
||||
}
|
||||
|
||||
|
|
|
@ -887,6 +887,12 @@ FrameWithInfo Player::frameWithInfo(const Instance *instance) const {
|
|||
return _video->frameWithInfo(instance);
|
||||
}
|
||||
|
||||
QImage Player::currentFrameImage() const {
|
||||
Expects(_video != nullptr);
|
||||
|
||||
return _video->currentFrameImage();
|
||||
}
|
||||
|
||||
void Player::unregisterInstance(not_null<const Instance*> instance) {
|
||||
if (_video) {
|
||||
_video->unregisterInstance(instance);
|
||||
|
|
|
@ -68,6 +68,8 @@ public:
|
|||
[[nodiscard]] FrameWithInfo frameWithInfo(
|
||||
const Instance *instance = nullptr) const; // !requireARGB32
|
||||
|
||||
[[nodiscard]] QImage currentFrameImage() const; // Converts if needed.
|
||||
|
||||
void unregisterInstance(not_null<const Instance*> instance);
|
||||
bool markFrameShown();
|
||||
|
||||
|
|
|
@ -91,7 +91,9 @@ bool GoodForRequest(
|
|||
const QImage &image,
|
||||
int rotation,
|
||||
const FrameRequest &request) {
|
||||
if (request.resize.isEmpty()) {
|
||||
if (image.isNull()) {
|
||||
return false;
|
||||
} else if (request.resize.isEmpty()) {
|
||||
return true;
|
||||
} else if (rotation != 0) {
|
||||
return false;
|
||||
|
|
|
@ -7,6 +7,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
*/
|
||||
#include "media/streaming/media_streaming_video_track.h"
|
||||
|
||||
#include "ffmpeg/ffmpeg_utility.h"
|
||||
#include "media/audio/media_audio.h"
|
||||
#include "base/concurrent_timer.h"
|
||||
|
||||
|
@ -19,6 +20,55 @@ constexpr auto kDisplaySkipped = crl::time(-1);
|
|||
constexpr auto kFinishedPosition = std::numeric_limits<crl::time>::max();
|
||||
static_assert(kDisplaySkipped != kTimeUnknown);
|
||||
|
||||
[[nodiscard]] QImage ConvertToARGB32(const FrameYUV420 &data) {
|
||||
Expects(data.y.data != nullptr);
|
||||
Expects(data.u.data != nullptr);
|
||||
Expects(data.v.data != nullptr);
|
||||
Expects(!data.size.isEmpty());
|
||||
|
||||
//if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
|
||||
// resize.transpose();
|
||||
//}
|
||||
|
||||
auto result = FFmpeg::CreateFrameStorage(data.size);
|
||||
const auto format = AV_PIX_FMT_BGRA;
|
||||
const auto swscale = FFmpeg::MakeSwscalePointer(
|
||||
data.size,
|
||||
AV_PIX_FMT_YUV420P,
|
||||
data.size,
|
||||
AV_PIX_FMT_BGRA);
|
||||
if (!swscale) {
|
||||
return QImage();
|
||||
}
|
||||
|
||||
// AV_NUM_DATA_POINTERS defined in AVFrame struct
|
||||
const uint8_t *srcData[AV_NUM_DATA_POINTERS] = {
|
||||
static_cast<const uint8_t*>(data.y.data),
|
||||
static_cast<const uint8_t*>(data.u.data),
|
||||
static_cast<const uint8_t*>(data.v.data),
|
||||
nullptr,
|
||||
};
|
||||
int srcLinesize[AV_NUM_DATA_POINTERS] = {
|
||||
data.y.stride,
|
||||
data.u.stride,
|
||||
data.v.stride,
|
||||
0,
|
||||
};
|
||||
uint8_t *dstData[AV_NUM_DATA_POINTERS] = { result.bits(), nullptr };
|
||||
int dstLinesize[AV_NUM_DATA_POINTERS] = { result.bytesPerLine(), 0 };
|
||||
|
||||
sws_scale(
|
||||
swscale.get(),
|
||||
srcData,
|
||||
srcLinesize,
|
||||
0,
|
||||
data.size.height(),
|
||||
dstData,
|
||||
dstLinesize);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
class VideoTrackObject final {
|
||||
|
@ -384,6 +434,12 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
|
|||
fail(Error::InvalidData);
|
||||
return;
|
||||
}
|
||||
if (!frame->original.isNull()) {
|
||||
frame->original = QImage();
|
||||
for (auto &[_, prepared] : frame->prepared) {
|
||||
prepared.image = QImage();
|
||||
}
|
||||
}
|
||||
frame->format = FrameFormat::YUV420;
|
||||
} else {
|
||||
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA);
|
||||
|
@ -1020,8 +1076,7 @@ bool VideoTrack::markFrameShown() {
|
|||
QImage VideoTrack::frame(
|
||||
const FrameRequest &request,
|
||||
const Instance *instance) {
|
||||
const auto data = _shared->frameForPaintWithIndex();
|
||||
const auto frame = data.frame;
|
||||
const auto frame = _shared->frameForPaint();
|
||||
const auto i = frame->prepared.find(instance);
|
||||
const auto none = (i == frame->prepared.end());
|
||||
const auto preparedFor = frame->prepared.empty()
|
||||
|
@ -1034,6 +1089,10 @@ QImage VideoTrack::frame(
|
|||
unwrapped.updateFrameRequest(instance, useRequest);
|
||||
});
|
||||
}
|
||||
if (frame->original.isNull()
|
||||
&& frame->format == FrameFormat::YUV420) {
|
||||
frame->original = ConvertToARGB32(frame->yuv420);
|
||||
}
|
||||
if (!frame->alpha
|
||||
&& GoodForRequest(frame->original, _streamRotation, useRequest)) {
|
||||
return frame->original;
|
||||
|
@ -1083,6 +1142,14 @@ FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) {
|
|||
};
|
||||
}
|
||||
|
||||
QImage VideoTrack::currentFrameImage() {
|
||||
const auto frame = _shared->frameForPaint();
|
||||
if (frame->original.isNull() && frame->format == FrameFormat::YUV420) {
|
||||
frame->original = ConvertToARGB32(frame->yuv420);
|
||||
}
|
||||
return frame->original;
|
||||
}
|
||||
|
||||
void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
|
||||
_wrapped.with([=](Implementation &unwrapped) {
|
||||
unwrapped.removeFrameRequest(instance);
|
||||
|
|
|
@ -59,6 +59,7 @@ public:
|
|||
const FrameRequest &request,
|
||||
const Instance *instance);
|
||||
[[nodiscard]] FrameWithInfo frameWithInfo(const Instance *instance);
|
||||
[[nodiscard]] QImage currentFrameImage();
|
||||
void unregisterInstance(not_null<const Instance*> instance);
|
||||
[[nodiscard]] rpl::producer<> checkNextFrame() const;
|
||||
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||
|
|
|
@ -200,7 +200,7 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame(
|
|||
_streamedIndex = _owner->streamedIndex();
|
||||
|
||||
_f->glActiveTexture(GL_TEXTURE0);
|
||||
_textures.bind(*_f, 0);
|
||||
_textures.bind(*_f, 1);
|
||||
if (upload) {
|
||||
_f->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||
uploadTexture(
|
||||
|
@ -211,10 +211,9 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame(
|
|||
yuv->y.stride,
|
||||
yuv->y.data);
|
||||
_lumaSize = yuv->size;
|
||||
_rgbaSize = QSize();
|
||||
}
|
||||
_f->glActiveTexture(GL_TEXTURE1);
|
||||
_textures.bind(*_f, 1);
|
||||
_textures.bind(*_f, 2);
|
||||
if (upload) {
|
||||
uploadTexture(
|
||||
GL_RED,
|
||||
|
@ -225,7 +224,7 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame(
|
|||
yuv->u.data);
|
||||
}
|
||||
_f->glActiveTexture(GL_TEXTURE2);
|
||||
_textures.bind(*_f, 2);
|
||||
_textures.bind(*_f, 3);
|
||||
if (upload) {
|
||||
uploadTexture(
|
||||
GL_RED,
|
||||
|
@ -280,7 +279,6 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent(
|
|||
stride,
|
||||
data);
|
||||
_rgbaSize = image.size();
|
||||
_lumaSize = QSize();
|
||||
}
|
||||
|
||||
paintTransformedContent(&*program, rect, rotation);
|
||||
|
|
|
@ -106,7 +106,7 @@ private:
|
|||
QOpenGLShader *_texturedVertexShader = nullptr;
|
||||
std::optional<QOpenGLShaderProgram> _withTransparencyProgram;
|
||||
std::optional<QOpenGLShaderProgram> _yuv420Program;
|
||||
Ui::GL::Textures<3> _textures;
|
||||
Ui::GL::Textures<4> _textures;
|
||||
QSize _rgbaSize;
|
||||
QSize _lumaSize;
|
||||
QSize _chromaSize;
|
||||
|
|
|
@ -567,6 +567,12 @@ Streaming::FrameWithInfo OverlayWidget::videoFrameWithInfo() const {
|
|||
};
|
||||
}
|
||||
|
||||
QImage OverlayWidget::currentVideoFrameImage() const {
|
||||
return _streamed->instance.player().ready()
|
||||
? _streamed->instance.player().currentFrameImage()
|
||||
: _streamed->instance.info().video.cover;
|
||||
}
|
||||
|
||||
int OverlayWidget::streamedIndex() const {
|
||||
return _streamedCreated;
|
||||
}
|
||||
|
@ -2654,7 +2660,7 @@ bool OverlayWidget::createStreamingObjects() {
|
|||
|
||||
QImage OverlayWidget::transformedShownContent() const {
|
||||
return transformShownContent(
|
||||
videoShown() ? videoFrame() : _staticContent,
|
||||
videoShown() ? currentVideoFrameImage() : _staticContent,
|
||||
contentRotation());
|
||||
}
|
||||
|
||||
|
|
|
@ -397,8 +397,9 @@ private:
|
|||
[[nodiscard]] bool videoShown() const;
|
||||
[[nodiscard]] QSize videoSize() const;
|
||||
[[nodiscard]] bool videoIsGifOrUserpic() const;
|
||||
[[nodiscard]] QImage videoFrame() const;
|
||||
[[nodiscard]] Streaming::FrameWithInfo videoFrameWithInfo() const;
|
||||
[[nodiscard]] QImage videoFrame() const; // ARGB (changes prepare format)
|
||||
[[nodiscard]] QImage currentVideoFrameImage() const; // RGB (may convert)
|
||||
[[nodiscard]] Streaming::FrameWithInfo videoFrameWithInfo() const; // YUV
|
||||
[[nodiscard]] int streamedIndex() const;
|
||||
[[nodiscard]] QImage transformedShownContent() const;
|
||||
[[nodiscard]] QImage transformShownContent(
|
||||
|
|
Loading…
Add table
Reference in a new issue