Use FFmpeg hardware acceleration in media viewer / PiP.

This commit is contained in:
John Preston 2022-03-22 17:56:43 +04:00
parent 207cb35c55
commit 0dca556843
14 changed files with 272 additions and 57 deletions

View file

@ -31,6 +31,15 @@ constexpr auto kAvioBlockSize = 4096;
constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min(); constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
constexpr auto kDurationMax = crl::time(std::numeric_limits<int>::max()); constexpr auto kDurationMax = crl::time(std::numeric_limits<int>::max());
using GetFormatMethod = enum AVPixelFormat(*)(
struct AVCodecContext *s,
const enum AVPixelFormat *fmt);
struct HwAccelDescriptor {
GetFormatMethod getFormat = nullptr;
AVPixelFormat format = AV_PIX_FMT_NONE;
};
void AlignedImageBufferCleanupHandler(void* data) { void AlignedImageBufferCleanupHandler(void* data) {
const auto buffer = static_cast<uchar*>(data); const auto buffer = static_cast<uchar*>(data);
delete[] buffer; delete[] buffer;
@ -76,6 +85,69 @@ void PremultiplyLine(uchar *dst, const uchar *src, int intsCount) {
#endif // LIB_FFMPEG_USE_QT_PRIVATE_API #endif // LIB_FFMPEG_USE_QT_PRIVATE_API
} }
template <AVPixelFormat Required>
enum AVPixelFormat GetFormatImplementation(
AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts) {
const enum AVPixelFormat *p = nullptr;
for (p = pix_fmts; *p != -1; p++) {
if (*p == Required) {
return *p;
}
}
return AV_PIX_FMT_NONE;
}
template <AVPixelFormat Format>
[[nodiscard]] HwAccelDescriptor HwAccelByFormat() {
return {
.getFormat = GetFormatImplementation<Format>,
.format = Format,
};
}
[[nodiscard]] HwAccelDescriptor ResolveHwAccel(
not_null<const AVCodec*> decoder,
AVHWDeviceType type) {
Expects(type != AV_HWDEVICE_TYPE_NONE);
const auto format = [&] {
for (auto i = 0;; i++) {
const auto config = avcodec_get_hw_config(decoder, i);
if (!config) {
break;
} else if (config->device_type == type
&& (config->methods
& AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)) {
return config->pix_fmt;
}
}
return AV_PIX_FMT_NONE;
}();
switch (format) {
#ifdef Q_OS_WIN
case AV_PIX_FMT_D3D11:
return HwAccelByFormat<AV_PIX_FMT_D3D11>();
case AV_PIX_FMT_DXVA2_VLD:
return HwAccelByFormat<AV_PIX_FMT_DXVA2_VLD>();
case AV_PIX_FMT_D3D11VA_VLD:
return HwAccelByFormat<AV_PIX_FMT_D3D11VA_VLD>();
#elif defined Q_OS_MAC // Q_OS_WIN
case AV_PIX_FMT_VIDEOTOOLBOX:
return HwAccelByFormat<AV_PIX_FMT_VIDEOTOOLBOX>();
#else // Q_OS_WIN || Q_OS_MAC
case AV_PIX_FMT_VAAPI:
return HwAccelByFormat<AV_PIX_FMT_VAAPI>();
case AV_PIX_FMT_VDPAU:
return HwAccelByFormat<AV_PIX_FMT_VDPAU>();
#endif // Q_OS_WIN || Q_OS_MAC
case AV_PIX_FMT_CUDA:
return HwAccelByFormat<AV_PIX_FMT_CUDA>();
}
return {};
}
} // namespace } // namespace
IOPointer MakeIOPointer( IOPointer MakeIOPointer(
@ -161,7 +233,7 @@ const AVCodec *FindDecoder(not_null<AVCodecContext*> context) {
: avcodec_find_decoder(context->codec_id); : avcodec_find_decoder(context->codec_id);
} }
CodecPointer MakeCodecPointer(not_null<AVStream*> stream) { CodecPointer MakeCodecPointer(CodecDescriptor descriptor) {
auto error = AvErrorWrap(); auto error = AvErrorWrap();
auto result = CodecPointer(avcodec_alloc_context3(nullptr)); auto result = CodecPointer(avcodec_alloc_context3(nullptr));
@ -170,6 +242,7 @@ CodecPointer MakeCodecPointer(not_null<AVStream*> stream) {
LogError(qstr("avcodec_alloc_context3")); LogError(qstr("avcodec_alloc_context3"));
return {}; return {};
} }
const auto stream = descriptor.stream;
error = avcodec_parameters_to_context(context, stream->codecpar); error = avcodec_parameters_to_context(context, stream->codecpar);
if (error) { if (error) {
LogError(qstr("avcodec_parameters_to_context"), error); LogError(qstr("avcodec_parameters_to_context"), error);
@ -183,7 +256,37 @@ CodecPointer MakeCodecPointer(not_null<AVStream*> stream) {
if (!codec) { if (!codec) {
LogError(qstr("avcodec_find_decoder"), context->codec_id); LogError(qstr("avcodec_find_decoder"), context->codec_id);
return {}; return {};
} else if ((error = avcodec_open2(context, codec, nullptr))) { }
if (descriptor.type != AV_HWDEVICE_TYPE_NONE) {
const auto hw = ResolveHwAccel(codec, descriptor.type);
if (!hw.getFormat) {
return {};
}
context->get_format = hw.getFormat;
auto hwDeviceContext = (AVBufferRef*)nullptr;
error = av_hwdevice_ctx_create(
&hwDeviceContext,
descriptor.type,
nullptr,
nullptr,
0);
if (error || !hwDeviceContext) {
LogError(qstr("av_hwdevice_ctx_create"), error);
return {};
}
DEBUG_LOG(("Video Info: "
"Using \"%1\" hardware acceleration for \"%2\" decoder."
).arg(av_hwdevice_get_type_name(descriptor.type)
).arg(codec->name));
context->hw_device_ctx = av_buffer_ref(hwDeviceContext);
av_buffer_unref(&hwDeviceContext);
} else {
DEBUG_LOG(("Video Info: Using software \"%2\" decoder."
).arg(codec->name));
}
if ((error = avcodec_open2(context, codec, nullptr))) {
LogError(qstr("avcodec_open2"), error); LogError(qstr("avcodec_open2"), error);
return {}; return {};
} }

View file

@ -125,7 +125,12 @@ struct CodecDeleter {
void operator()(AVCodecContext *value); void operator()(AVCodecContext *value);
}; };
using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>; using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>;
[[nodiscard]] CodecPointer MakeCodecPointer(not_null<AVStream*> stream);
struct CodecDescriptor {
not_null<AVStream*> stream;
AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
};
[[nodiscard]] CodecPointer MakeCodecPointer(CodecDescriptor descriptor);
struct FrameDeleter { struct FrameDeleter {
void operator()(AVFrame *value); void operator()(AVFrame *value);

View file

@ -86,7 +86,7 @@ bool AudioTrack::tryReadFirstFrame(FFmpeg::Packet &&packet) {
return false; return false;
} }
// Return the last valid frame if we seek too far. // Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame); _stream.decodedFrame = std::move(_initialSkippingFrame);
return processFirstFrame(); return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) { } else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
return false; return false;
@ -102,15 +102,15 @@ bool AudioTrack::tryReadFirstFrame(FFmpeg::Packet &&packet) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames. // Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position. // Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame); std::swap(_initialSkippingFrame, _stream.decodedFrame);
if (!_stream.frame) { if (!_stream.decodedFrame) {
_stream.frame = FFmpeg::MakeFramePointer(); _stream.decodedFrame = FFmpeg::MakeFramePointer();
} }
} }
} }
bool AudioTrack::processFirstFrame() { bool AudioTrack::processFirstFrame() {
if (!FFmpeg::FrameHasData(_stream.frame.get())) { if (!FFmpeg::FrameHasData(_stream.decodedFrame.get())) {
return false; return false;
} }
mixerInit(); mixerInit();
@ -131,7 +131,7 @@ void AudioTrack::mixerInit() {
Expects(!initialized()); Expects(!initialized());
auto data = std::make_unique<ExternalSoundData>(); auto data = std::make_unique<ExternalSoundData>();
data->frame = std::move(_stream.frame); data->frame = std::move(_stream.decodedFrame);
data->codec = std::move(_stream.codec); data->codec = std::move(_stream.codec);
data->frequency = _stream.frequency; data->frequency = _stream.frequency;
data->length = (_stream.duration * data->frequency) / 1000LL; data->length = (_stream.duration * data->frequency) / 1000LL;

View file

@ -45,6 +45,7 @@ struct PlaybackOptions {
AudioMsgId audioId; AudioMsgId audioId;
bool syncVideoByAudio = true; bool syncVideoByAudio = true;
bool waitForMarkAsShown = false; bool waitForMarkAsShown = false;
bool hwAllow = false;
bool loop = false; bool loop = false;
}; };

View file

@ -148,7 +148,8 @@ void File::Context::logFatal(
Stream File::Context::initStream( Stream File::Context::initStream(
not_null<AVFormatContext*> format, not_null<AVFormatContext*> format,
AVMediaType type, AVMediaType type,
Mode mode) { Mode mode,
bool hwAllowed) {
auto result = Stream(); auto result = Stream();
const auto index = result.index = av_find_best_stream( const auto index = result.index = av_find_best_stream(
format, format,
@ -158,31 +159,56 @@ Stream File::Context::initStream(
nullptr, nullptr,
0); 0);
if (index < 0) { if (index < 0) {
return result; return {};
} }
const auto info = format->streams[index]; const auto info = format->streams[index];
const auto tryCreateCodec = [&](AVHWDeviceType type) {
result.codec = FFmpeg::MakeCodecPointer({
.stream = info,
.type = type,
});
return (result.codec != nullptr);
};
if (type == AVMEDIA_TYPE_VIDEO) { if (type == AVMEDIA_TYPE_VIDEO) {
if (info->disposition & AV_DISPOSITION_ATTACHED_PIC) { if (info->disposition & AV_DISPOSITION_ATTACHED_PIC) {
// ignore cover streams // ignore cover streams
return Stream(); return Stream();
} }
const auto hwAccelTypes = std::array{
#ifdef Q_OS_WIN
AV_HWDEVICE_TYPE_D3D11VA,
AV_HWDEVICE_TYPE_DXVA2,
#elif defined Q_OS_MAC // Q_OS_WIN
AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
#else // Q_OS_WIN || Q_OS_MAC
AV_HWDEVICE_TYPE_VAAPI,
AV_HWDEVICE_TYPE_VDPAU,
#endif // Q_OS_WIN || Q_OS_MAC
AV_HWDEVICE_TYPE_CUDA,
AV_HWDEVICE_TYPE_NONE,
};
for (const auto type : hwAccelTypes) {
if (tryCreateCodec(type)) {
break;
}
}
if (!result.codec) {
return result;
}
result.rotation = FFmpeg::ReadRotationFromMetadata(info); result.rotation = FFmpeg::ReadRotationFromMetadata(info);
result.aspect = FFmpeg::ValidateAspectRatio(info->sample_aspect_ratio); result.aspect = FFmpeg::ValidateAspectRatio(info->sample_aspect_ratio);
} else if (type == AVMEDIA_TYPE_AUDIO) { } else if (type == AVMEDIA_TYPE_AUDIO) {
result.frequency = info->codecpar->sample_rate; result.frequency = info->codecpar->sample_rate;
if (!result.frequency) { if (!result.frequency) {
return result; return result;
} else if (!tryCreateCodec(AV_HWDEVICE_TYPE_NONE)) {
return result;
} }
} }
result.codec = FFmpeg::MakeCodecPointer(info); result.decodedFrame = FFmpeg::MakeFramePointer();
if (!result.codec) { if (!result.decodedFrame) {
return result;
}
result.frame = FFmpeg::MakeFramePointer();
if (!result.frame) {
result.codec = nullptr; result.codec = nullptr;
return result; return result;
} }
@ -260,7 +286,7 @@ std::variant<FFmpeg::Packet, FFmpeg::AvErrorWrap> File::Context::readPacket() {
return error; return error;
} }
void File::Context::start(crl::time position) { void File::Context::start(crl::time position, bool hwAllow) {
auto error = FFmpeg::AvErrorWrap(); auto error = FFmpeg::AvErrorWrap();
if (unroll()) { if (unroll()) {
@ -280,12 +306,12 @@ void File::Context::start(crl::time position) {
} }
const auto mode = _delegate->fileOpenMode(); const auto mode = _delegate->fileOpenMode();
auto video = initStream(format.get(), AVMEDIA_TYPE_VIDEO, mode); auto video = initStream(format.get(), AVMEDIA_TYPE_VIDEO, mode, hwAllow);
if (unroll()) { if (unroll()) {
return; return;
} }
auto audio = initStream(format.get(), AVMEDIA_TYPE_AUDIO, mode); auto audio = initStream(format.get(), AVMEDIA_TYPE_AUDIO, mode, false);
if (unroll()) { if (unroll()) {
return; return;
} }
@ -425,7 +451,10 @@ File::File(std::shared_ptr<Reader> reader)
: _reader(std::move(reader)) { : _reader(std::move(reader)) {
} }
void File::start(not_null<FileDelegate*> delegate, crl::time position) { void File::start(
not_null<FileDelegate*> delegate,
crl::time position,
bool hwAllow) {
stop(true); stop(true);
_reader->startStreaming(); _reader->startStreaming();
@ -433,7 +462,7 @@ void File::start(not_null<FileDelegate*> delegate, crl::time position) {
_thread = std::thread([=, context = &*_context] { _thread = std::thread([=, context = &*_context] {
crl::toggle_fp_exceptions(true); crl::toggle_fp_exceptions(true);
context->start(position); context->start(position, hwAllow);
while (!context->finished()) { while (!context->finished()) {
context->readNextPacket(); context->readNextPacket();
} }

View file

@ -28,7 +28,10 @@ public:
File(const File &other) = delete; File(const File &other) = delete;
File &operator=(const File &other) = delete; File &operator=(const File &other) = delete;
void start(not_null<FileDelegate*> delegate, crl::time position); void start(
not_null<FileDelegate*> delegate,
crl::time position,
bool hwAllow);
void wake(); void wake();
void stop(bool stillActive = false); void stop(bool stillActive = false);
@ -43,7 +46,7 @@ private:
Context(not_null<FileDelegate*> delegate, not_null<Reader*> reader); Context(not_null<FileDelegate*> delegate, not_null<Reader*> reader);
~Context(); ~Context();
void start(crl::time position); void start(crl::time position, bool hwAllow);
void readNextPacket(); void readNextPacket();
void interrupt(); void interrupt();
@ -75,7 +78,8 @@ private:
[[nodiscard]] Stream initStream( [[nodiscard]] Stream initStream(
not_null<AVFormatContext *> format, not_null<AVFormatContext *> format,
AVMediaType type, AVMediaType type,
Mode mode); Mode mode,
bool hwAllowed);
void seekToPosition( void seekToPosition(
not_null<AVFormatContext *> format, not_null<AVFormatContext *> format,
const Stream &stream, const Stream &stream,

View file

@ -544,7 +544,7 @@ void Player::play(const PlaybackOptions &options) {
_options.speed = 1.; _options.speed = 1.;
} }
_stage = Stage::Initializing; _stage = Stage::Initializing;
_file->start(delegate(), _options.position); _file->start(delegate(), _options.position, _options.hwAllow);
} }
void Player::savePreviousReceivedTill( void Player::savePreviousReceivedTill(

View file

@ -20,13 +20,13 @@ constexpr auto kSkipInvalidDataPackets = 10;
} // namespace } // namespace
crl::time FramePosition(const Stream &stream) { crl::time FramePosition(const Stream &stream) {
const auto pts = !stream.frame const auto pts = !stream.decodedFrame
? AV_NOPTS_VALUE ? AV_NOPTS_VALUE
: (stream.frame->best_effort_timestamp != AV_NOPTS_VALUE) : (stream.decodedFrame->best_effort_timestamp != AV_NOPTS_VALUE)
? stream.frame->best_effort_timestamp ? stream.decodedFrame->best_effort_timestamp
: (stream.frame->pts != AV_NOPTS_VALUE) : (stream.decodedFrame->pts != AV_NOPTS_VALUE)
? stream.frame->pts ? stream.decodedFrame->pts
: stream.frame->pkt_dts; : stream.decodedFrame->pkt_dts;
return FFmpeg::PtsToTime(pts, stream.timeBase); return FFmpeg::PtsToTime(pts, stream.timeBase);
} }
@ -66,14 +66,14 @@ FFmpeg::AvErrorWrap ProcessPacket(Stream &stream, FFmpeg::Packet &&packet) {
} }
FFmpeg::AvErrorWrap ReadNextFrame(Stream &stream) { FFmpeg::AvErrorWrap ReadNextFrame(Stream &stream) {
Expects(stream.frame != nullptr); Expects(stream.decodedFrame != nullptr);
auto error = FFmpeg::AvErrorWrap(); auto error = FFmpeg::AvErrorWrap();
do { do {
error = avcodec_receive_frame( error = avcodec_receive_frame(
stream.codec.get(), stream.codec.get(),
stream.frame.get()); stream.decodedFrame.get());
if (!error if (!error
|| error.code() != AVERROR(EAGAIN) || error.code() != AVERROR(EAGAIN)
|| stream.queue.empty()) { || stream.queue.empty()) {
@ -108,13 +108,27 @@ bool GoodForRequest(
&& (request.resize == image.size()); && (request.resize == image.size());
} }
bool TransferFrame(
Stream &stream,
not_null<AVFrame*> decodedFrame,
not_null<AVFrame*> transferredFrame) {
Expects(decodedFrame->hw_frames_ctx != nullptr);
const auto error = FFmpeg::AvErrorWrap(
av_hwframe_transfer_data(transferredFrame, decodedFrame, 0));
if (error) {
LogError(qstr("av_hwframe_transfer_data"), error);
return false;
}
FFmpeg::ClearFrameMemory(decodedFrame);
return true;
}
QImage ConvertFrame( QImage ConvertFrame(
Stream &stream, Stream &stream,
AVFrame *frame, not_null<AVFrame*> frame,
QSize resize, QSize resize,
QImage storage) { QImage storage) {
Expects(frame != nullptr);
const auto frameSize = QSize(frame->width, frame->height); const auto frameSize = QSize(frame->width, frame->height);
if (frameSize.isEmpty()) { if (frameSize.isEmpty()) {
LOG(("Streaming Error: Bad frame size %1,%2" LOG(("Streaming Error: Bad frame size %1,%2"
@ -134,6 +148,7 @@ QImage ConvertFrame(
if (!FFmpeg::GoodStorageForFrame(storage, resize)) { if (!FFmpeg::GoodStorageForFrame(storage, resize)) {
storage = FFmpeg::CreateFrameStorage(resize); storage = FFmpeg::CreateFrameStorage(resize);
} }
const auto format = AV_PIX_FMT_BGRA; const auto format = AV_PIX_FMT_BGRA;
const auto hasDesiredFormat = (frame->format == format); const auto hasDesiredFormat = (frame->format == format);
if (frameSize == storage.size() && hasDesiredFormat) { if (frameSize == storage.size() && hasDesiredFormat) {

View file

@ -30,7 +30,8 @@ struct Stream {
crl::time duration = kTimeUnknown; crl::time duration = kTimeUnknown;
AVRational timeBase = FFmpeg::kUniversalTimeBase; AVRational timeBase = FFmpeg::kUniversalTimeBase;
FFmpeg::CodecPointer codec; FFmpeg::CodecPointer codec;
FFmpeg::FramePointer frame; FFmpeg::FramePointer decodedFrame;
FFmpeg::FramePointer transferredFrame;
std::deque<FFmpeg::Packet> queue; std::deque<FFmpeg::Packet> queue;
int invalidDataPackets = 0; int invalidDataPackets = 0;
@ -54,9 +55,13 @@ struct Stream {
bool hasAlpha, bool hasAlpha,
int rotation, int rotation,
const FrameRequest &request); const FrameRequest &request);
[[nodiscard]] bool TransferFrame(
Stream &stream,
not_null<AVFrame*> decodedFrame,
not_null<AVFrame*> transferredFrame);
[[nodiscard]] QImage ConvertFrame( [[nodiscard]] QImage ConvertFrame(
Stream &stream, Stream &stream,
AVFrame *frame, not_null<AVFrame*> frame,
QSize resize, QSize resize,
QImage storage); QImage storage);
[[nodiscard]] FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame); [[nodiscard]] FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame);

View file

@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h" #include "media/audio/media_audio.h"
#include "base/concurrent_timer.h" #include "base/concurrent_timer.h"
#include "core/crash_reports.h" #include "core/crash_reports.h"
#include "base/debug_log.h"
namespace Media { namespace Media {
namespace Streaming { namespace Streaming {
@ -373,7 +374,8 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
fail(Error::InvalidData); fail(Error::InvalidData);
return FrameResult::Error; return FrameResult::Error;
} }
std::swap(frame->decoded, _stream.frame); std::swap(frame->decoded, _stream.decodedFrame);
std::swap(frame->transferred, _stream.transferredFrame);
frame->index = _frameIndex++; frame->index = _frameIndex++;
frame->position = position; frame->position = position;
frame->displayed = kTimeUnknown; frame->displayed = kTimeUnknown;
@ -427,9 +429,28 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
fillRequests(frame); fillRequests(frame);
frame->format = FrameFormat::None; frame->format = FrameFormat::None;
if (frame->decoded->format == AV_PIX_FMT_YUV420P && !requireARGB32()) { if (frame->decoded->hw_frames_ctx) {
if (!frame->transferred) {
frame->transferred = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
frame->decoded.get(),
frame->transferred.get());
if (!success) {
frame->prepared.clear();
fail(Error::InvalidData);
return;
}
} else {
frame->transferred = nullptr;
}
const auto frameWithData = frame->transferred
? frame->transferred.get()
: frame->decoded.get();
if (frameWithData->format == AV_PIX_FMT_YUV420P && !requireARGB32()) {
frame->alpha = false; frame->alpha = false;
frame->yuv420 = ExtractYUV420(_stream, frame->decoded.get()); frame->yuv420 = ExtractYUV420(_stream, frameWithData);
if (frame->yuv420.size.isEmpty() if (frame->yuv420.size.isEmpty()
|| frame->yuv420.chromaSize.isEmpty() || frame->yuv420.chromaSize.isEmpty()
|| !frame->yuv420.y.data || !frame->yuv420.y.data
@ -447,15 +468,15 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
} }
frame->format = FrameFormat::YUV420; frame->format = FrameFormat::YUV420;
} else { } else {
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA) frame->alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frame->decoded->format == AV_PIX_FMT_YUVA420P); || (frameWithData->format == AV_PIX_FMT_YUVA420P);
frame->yuv420.size = { frame->yuv420.size = {
frame->decoded->width, frameWithData->width,
frame->decoded->height frameWithData->height
}; };
frame->original = ConvertFrame( frame->original = ConvertFrame(
_stream, _stream,
frame->decoded.get(), frameWithData,
chooseOriginalResize(), chooseOriginalResize(),
std::move(frame->original)); std::move(frame->original));
if (frame->original.isNull()) { if (frame->original.isNull()) {
@ -587,7 +608,7 @@ bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
return false; return false;
} }
// Return the last valid frame if we seek too far. // Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame); _stream.decodedFrame = std::move(_initialSkippingFrame);
return processFirstFrame(); return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) { } else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
return false; return false;
@ -603,22 +624,45 @@ bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames. // Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position. // Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame); std::swap(_initialSkippingFrame, _stream.decodedFrame);
if (!_stream.frame) { if (!_stream.decodedFrame) {
_stream.frame = FFmpeg::MakeFramePointer(); _stream.decodedFrame = FFmpeg::MakeFramePointer();
} }
} }
} }
bool VideoTrackObject::processFirstFrame() { bool VideoTrackObject::processFirstFrame() {
if (_stream.frame->width * _stream.frame->height > kMaxFrameArea) { const auto decodedFrame = _stream.decodedFrame.get();
if (decodedFrame->width * decodedFrame->height > kMaxFrameArea) {
return false; return false;
} else if (decodedFrame->hw_frames_ctx) {
if (!_stream.transferredFrame) {
_stream.transferredFrame = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
decodedFrame,
_stream.transferredFrame.get());
if (!success) {
LOG(("Video Error: Failed accelerated decoding from format %1."
).arg(int(decodedFrame->format)));
return false;
}
DEBUG_LOG(("Video Info: "
"Using accelerated decoding from format %1 to format %2."
).arg(int(decodedFrame->format)
).arg(int(_stream.transferredFrame->format)));
} else {
_stream.transferredFrame = nullptr;
} }
const auto alpha = (_stream.frame->format == AV_PIX_FMT_BGRA) const auto frameWithData = _stream.transferredFrame
|| (_stream.frame->format == AV_PIX_FMT_YUVA420P); ? _stream.transferredFrame.get()
: decodedFrame;
const auto alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frameWithData->format == AV_PIX_FMT_YUVA420P);
auto frame = ConvertFrame( auto frame = ConvertFrame(
_stream, _stream,
_stream.frame.get(), frameWithData,
QSize(), QSize(),
QImage()); QImage());
if (frame.isNull()) { if (frame.isNull()) {

View file

@ -82,6 +82,7 @@ private:
}; };
struct Frame { struct Frame {
FFmpeg::FramePointer decoded = FFmpeg::MakeFramePointer(); FFmpeg::FramePointer decoded = FFmpeg::MakeFramePointer();
FFmpeg::FramePointer transferred;
QImage original; QImage original;
FrameYUV420 yuv420; FrameYUV420 yuv420;
crl::time position = kTimeUnknown; crl::time position = kTimeUnknown;

View file

@ -3124,6 +3124,7 @@ void OverlayWidget::restartAtSeekPosition(crl::time position) {
} }
auto options = Streaming::PlaybackOptions(); auto options = Streaming::PlaybackOptions();
options.position = position; options.position = position;
options.hwAllow = true;
if (!_streamed->withSound) { if (!_streamed->withSound) {
options.mode = Streaming::Mode::Video; options.mode = Streaming::Mode::Video;
options.loop = true; options.loop = true;

View file

@ -1604,6 +1604,7 @@ void Pip::restartAtSeekPosition(crl::time position) {
auto options = Streaming::PlaybackOptions(); auto options = Streaming::PlaybackOptions();
options.position = position; options.position = position;
options.hwAllow = true;
options.audioId = _instance.player().prepareLegacyState().id; options.audioId = _instance.player().prepareLegacyState().id;
Assert(8 && _delegate->pipPlaybackSpeed() >= 0.5 Assert(8 && _delegate->pipPlaybackSpeed() >= 0.5

View file

@ -400,7 +400,7 @@ if customRunCommand:
stage('patches', """ stage('patches', """
git clone https://github.com/desktop-app/patches.git git clone https://github.com/desktop-app/patches.git
cd patches cd patches
git checkout b0ae34e08f git checkout 0947a28160
""") """)
stage('depot_tools', """ stage('depot_tools', """
@ -685,6 +685,12 @@ depends:yasm/yasm
make install make install
""") """)
stage('nv-codec-headers', """
git clone https://github.com/FFmpeg/nv-codec-headers.git
cd nv-codec-headers
git checkout n11.1.5.1
""")
stage('ffmpeg', """ stage('ffmpeg', """
git clone https://github.com/FFmpeg/FFmpeg.git ffmpeg git clone https://github.com/FFmpeg/FFmpeg.git ffmpeg
cd ffmpeg cd ffmpeg