diff --git a/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp b/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp index 749fe795cb..c5744a40a1 100644 --- a/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp +++ b/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp @@ -169,10 +169,19 @@ bool FFMpegReaderImplementation::readFramesTill(int64 ms) { } } +int64 FFMpegReaderImplementation::frameRealTime() const { + return _frameMs; +} + uint64 FFMpegReaderImplementation::framePresentationTime() const { return static_cast(qMax(_frameTime + _frameTimeCorrection, 0LL)); } +int64 FFMpegReaderImplementation::durationMs() const { + if (_fmtContext->streams[_streamId]->duration == AV_NOPTS_VALUE) return 0; + return (_fmtContext->streams[_streamId]->duration * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den; +} + bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) { t_assert(_frameRead); _frameRead = false; @@ -322,11 +331,6 @@ QString FFMpegReaderImplementation::logData() const { return qsl("for file '%1', data size '%2'").arg(_location ? _location->name() : QString()).arg(_data->size()); } -int FFMpegReaderImplementation::duration() const { - if (_fmtContext->streams[_streamId]->duration == AV_NOPTS_VALUE) return 0; - return (_fmtContext->streams[_streamId]->duration * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den; -} - FFMpegReaderImplementation::~FFMpegReaderImplementation() { if (_mode == Mode::Normal && _audioStreamId >= 0) { audioPlayer()->stop(AudioMsgId::Type::Video); diff --git a/Telegram/SourceFiles/media/media_clip_ffmpeg.h b/Telegram/SourceFiles/media/media_clip_ffmpeg.h index ed47711b77..639ebb0346 100644 --- a/Telegram/SourceFiles/media/media_clip_ffmpeg.h +++ b/Telegram/SourceFiles/media/media_clip_ffmpeg.h @@ -38,11 +38,12 @@ public: FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId); bool readFramesTill(int64 ms) override; + int64 frameRealTime() const override; uint64 framePresentationTime() const override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; + int64 durationMs() const override; bool start(Mode mode) override; - int duration() const; QString logData() const; ~FFMpegReaderImplementation(); diff --git a/Telegram/SourceFiles/media/media_clip_implementation.h b/Telegram/SourceFiles/media/media_clip_implementation.h index e2dd48361b..072522325b 100644 --- a/Telegram/SourceFiles/media/media_clip_implementation.h +++ b/Telegram/SourceFiles/media/media_clip_implementation.h @@ -41,12 +41,15 @@ public: // Read frames till current frame will have presentation time > ms. virtual bool readFramesTill(int64 ms) = 0; - // Get current frame presentation time. + // Get current frame real and presentation time. + virtual int64 frameRealTime() const = 0; virtual uint64 framePresentationTime() const = 0; // Render current frame to an image with specific size. virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0; + virtual int64 durationMs() const = 0; + virtual bool start(Mode mode) = 0; virtual ~ReaderImplementation() { } diff --git a/Telegram/SourceFiles/media/media_clip_qtgif.cpp b/Telegram/SourceFiles/media/media_clip_qtgif.cpp index 8117e5e748..8cb0f1cf43 100644 --- a/Telegram/SourceFiles/media/media_clip_qtgif.cpp +++ b/Telegram/SourceFiles/media/media_clip_qtgif.cpp @@ -47,6 +47,10 @@ bool QtGifReaderImplementation::readFramesTill(int64 ms) { return true; } +int64 QtGifReaderImplementation::frameRealTime() const { + return _frameRealTime; +} + uint64 QtGifReaderImplementation::framePresentationTime() const { return static_cast(qMax(_frameTime, 0LL)); } @@ -63,6 +67,7 @@ bool QtGifReaderImplementation::readNextFrame() { } --_framesLeft; _frameTime += _frameDelay; + _frameRealTime += _frameDelay; return true; } @@ -90,6 +95,10 @@ bool QtGifReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QS return true; } +int64 QtGifReaderImplementation::durationMs() const { + return 0; // not supported +} + bool QtGifReaderImplementation::start(Mode mode) { if (mode == Mode::OnlyGifv) return false; return jumpToStart(); diff --git a/Telegram/SourceFiles/media/media_clip_qtgif.h b/Telegram/SourceFiles/media/media_clip_qtgif.h index 78205ded3f..b03da95c56 100644 --- a/Telegram/SourceFiles/media/media_clip_qtgif.h +++ b/Telegram/SourceFiles/media/media_clip_qtgif.h @@ -32,8 +32,10 @@ public: QtGifReaderImplementation(FileLocation *location, QByteArray *data); bool readFramesTill(int64 ms) override; + int64 frameRealTime() const override; uint64 framePresentationTime() const override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; + int64 durationMs() const override; bool start(Mode mode) override; ~QtGifReaderImplementation(); @@ -44,6 +46,7 @@ private: QImageReader *_reader = nullptr; int _framesLeft = 0; + int64 _frameRealTime = 0; int64 _frameTime = 0; int _frameDelay = 0; QImage _frame; diff --git a/Telegram/SourceFiles/media/media_clip_reader.cpp b/Telegram/SourceFiles/media/media_clip_reader.cpp index f750dcd119..7630b93257 100644 --- a/Telegram/SourceFiles/media/media_clip_reader.cpp +++ b/Telegram/SourceFiles/media/media_clip_reader.cpp @@ -112,7 +112,7 @@ Reader::Frame *Reader::frameToShow(int32 *index) const { // 0 means not ready int32 step = _step.loadAcquire(), i; if (step == WaitingForDimensionsStep) { if (index) *index = 0; - return 0; + return nullptr; } else if (step == WaitingForRequestStep) { i = 0; } else if (step == WaitingForFirstFrameStep) { @@ -130,7 +130,7 @@ Reader::Frame *Reader::frameToWrite(int32 *index) const { // 0 means not ready i = 0; } else if (step == WaitingForRequestStep) { if (index) *index = 0; - return 0; + return nullptr; } else if (step == WaitingForFirstFrameStep) { i = 0; } else { @@ -144,7 +144,7 @@ Reader::Frame *Reader::frameToWriteNext(bool checkNotWriting, int32 *index) cons int32 step = _step.loadAcquire(), i; if (step == WaitingForDimensionsStep || step == WaitingForRequestStep || (checkNotWriting && (step % 2))) { if (index) *index = 0; - return 0; + return nullptr; } i = ((step + 4) / 2) % 3; if (index) *index = i; @@ -258,6 +258,21 @@ bool Reader::ready() const { return false; } +bool Reader::hasAudio() const { + return ready() ? _hasAudio : false; +} + +int64 Reader::getPositionMs() const { + if (auto frame = frameToShow()) { + return frame->positionMs; + } + return 0; +} + +int64 Reader::getDurationMs() const { + return ready() ? _durationMs : 0; +} + int32 Reader::width() const { return _width; } @@ -313,6 +328,7 @@ public: } _width = frame()->original.width(); _height = frame()->original.height(); + _durationMs = _implementation->durationMs(); return ProcessResult::Started; } return ProcessResult::Wait; @@ -335,6 +351,7 @@ public: if (!_implementation->readFramesTill(ms - _animationStarted)) { return error(); } + _nextFramePositionMs = _implementation->frameRealTime(); _nextFrameWhen = _animationStarted + _implementation->framePresentationTime(); if (!renderFrame()) { @@ -352,6 +369,7 @@ public: frame()->pix = QPixmap(); frame()->pix = _prepareFrame(_request, frame()->original, frame()->alpha, frame()->cache); frame()->when = _nextFrameWhen; + frame()->positionMs = _nextFramePositionMs; return true; } @@ -427,6 +445,9 @@ private: QImage original, cache; bool alpha = true; uint64 when = 0; + + // Counted from the end, so that positionMs <= durationMs despite keep up delays. + int64 positionMs = 0; }; Frame _frames[3]; int _frame = 0; @@ -437,8 +458,10 @@ private: int _width = 0; int _height = 0; + int64 _durationMs = 0; uint64 _animationStarted = 0; uint64 _nextFrameWhen = 0; + int64 _nextFramePositionMs = 0; bool _paused = false; @@ -531,6 +554,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u if (result == ProcessResult::Started) { _loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize); + it.key()->_durationMs = reader->_durationMs; } // See if we need to pause GIF because it is not displayed right now. if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) { @@ -552,6 +576,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u frame->pix = reader->frame()->pix; frame->original = reader->frame()->original; frame->displayed.storeRelease(0); + frame->positionMs = reader->frame()->positionMs; if (result == ProcessResult::Started) { reader->startedAt(ms); it.key()->moveToNextWrite(); @@ -704,7 +729,7 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data request.factor = 1; cover = _prepareFrame(request, cover, hasAlpha, cacheForResize).toImage(); } - int duration = reader->duration(); + int duration = reader->durationMs() / 1000; return MTP_documentAttributeVideo(MTP_int(duration), MTP_int(cover.width()), MTP_int(cover.height())); } } diff --git a/Telegram/SourceFiles/media/media_clip_reader.h b/Telegram/SourceFiles/media/media_clip_reader.h index 85ee447ccd..31336a59d7 100644 --- a/Telegram/SourceFiles/media/media_clip_reader.h +++ b/Telegram/SourceFiles/media/media_clip_reader.h @@ -101,6 +101,10 @@ public: } bool ready() const; + bool hasAudio() const; + int64 getPositionMs() const; + int64 getDurationMs() const; + void stop(); void error(); @@ -118,6 +122,8 @@ private: State _state = State::Reading; uint64 _playId; + bool _hasAudio = false; + int64 _durationMs = 0; mutable int _width = 0; mutable int _height = 0; @@ -125,8 +131,6 @@ private: // -2, -1 - init, 0-5 - work, show ((state + 1) / 2) % 3 state, write ((state + 3) / 2) % 3 mutable QAtomicInt _step = WaitingForDimensionsStep; struct Frame { - Frame() : displayed(false) { - } void clear() { pix = QPixmap(); original = QImage(); @@ -134,7 +138,11 @@ private: QPixmap pix; QImage original; FrameRequest request; - QAtomicInt displayed; + QAtomicInt displayed = 0; + + // Should be counted from the end, + // so that positionMs <= _durationMs. + int64 positionMs = 0; }; mutable Frame _frames[3]; Frame *frameToShow(int *index = 0) const; // 0 means not ready diff --git a/Telegram/SourceFiles/media/view/media_clip_playback.cpp b/Telegram/SourceFiles/media/view/media_clip_playback.cpp index 29ca259248..14f93c9e83 100644 --- a/Telegram/SourceFiles/media/view/media_clip_playback.cpp +++ b/Telegram/SourceFiles/media/view/media_clip_playback.cpp @@ -44,7 +44,9 @@ void Playback::updateState(const AudioPlaybackState &playbackState) { } float64 progress = 0.; - if (duration) { + if (position > duration) { + progress = 1.; + } else if (duration) { progress = duration ? snap(float64(position) / duration, 0., 1.) : 0.; } if (duration != _duration || position != _position) { diff --git a/Telegram/SourceFiles/mediaview.cpp b/Telegram/SourceFiles/mediaview.cpp index 925d9d769b..7e5aee2f67 100644 --- a/Telegram/SourceFiles/mediaview.cpp +++ b/Telegram/SourceFiles/mediaview.cpp @@ -701,10 +701,16 @@ void MediaView::clipCallback(Media::Clip::Notification notification) { switch (notification) { case NotificationReinit: { - if (HistoryItem *item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) { + if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) { if (_gif->state() == State::Error) { _current = QPixmap(); } + _videoIsSilent = _doc->isVideo() && !_gif->hasAudio(); + if (_videoIsSilent) { + _videoDurationMs = _gif->getDurationMs(); + _videoPositionMs = _gif->getPositionMs(); + updateSilentVideoPlaybackState(); + } displayDocument(_doc, item); } else { stopGif(); @@ -713,6 +719,10 @@ void MediaView::clipCallback(Media::Clip::Notification notification) { case NotificationRepaint: { if (!_gif->currentDisplayed()) { + if (_videoIsSilent) { + _videoPositionMs = _gif->getPositionMs(); + updateSilentVideoPlaybackState(); + } update(_x, _y, _w, _h); } } break; @@ -1230,6 +1240,11 @@ void MediaView::createClipReader() { auto mode = _doc->isVideo() ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif; _gif = std_::make_unique(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), mode); + // Correct values will be set when gif gets inited. + _videoIsSilent = false; + _videoPositionMs = 0ULL; + _videoDurationMs = _doc->duration() * 1000ULL; + createClipController(); } @@ -1305,11 +1320,30 @@ void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) { t_assert(_gif != nullptr); t_assert(audioPlayer() != nullptr); auto state = audioPlayer()->currentVideoState(_gif->playId()); + updateVideoPlaybackState(state); +} + +void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state) { if (state.frequency) { _clipController->updatePlayback(state); } } +void MediaView::updateSilentVideoPlaybackState() { + AudioPlaybackState state; + if (_videoPaused) { + state.state = AudioPlayerPaused; + } else if (_videoPositionMs == _videoDurationMs) { + state.state = AudioPlayerStoppedAtEnd; + } else { + state.state = AudioPlayerPlaying; + } + state.position = _videoPositionMs; + state.duration = _videoDurationMs; + state.frequency = _videoFrequencyMs; + updateVideoPlaybackState(state); +} + void MediaView::paintEvent(QPaintEvent *e) { QRect r(e->rect()); QRegion region(e->region()); diff --git a/Telegram/SourceFiles/mediaview.h b/Telegram/SourceFiles/mediaview.h index 9ff96395cd..1e696c1472 100644 --- a/Telegram/SourceFiles/mediaview.h +++ b/Telegram/SourceFiles/mediaview.h @@ -28,6 +28,8 @@ class Controller; } // namespace Clip } // namespace Media +struct AudioPlaybackState; + class MediaView : public TWidget, public RPCSender, public ClickHandlerHost { Q_OBJECT @@ -130,6 +132,9 @@ private: void findCurrent(); void loadBack(); + void updateVideoPlaybackState(const AudioPlaybackState &state); + void updateSilentVideoPlaybackState(); + void createClipController(); void setClipControllerGeometry(); @@ -197,6 +202,13 @@ private: std_::unique_ptr _gif; int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full + // Video without audio stream playback information. + bool _videoIsSilent = false; + bool _videoPaused = false; + int64 _videoPositionMs = 0; + int64 _videoDurationMs = 0; + int32 _videoFrequencyMs = 1000; // 1000 ms per second. + bool fileShown() const; bool gifShown() const; void stopGif();