Use FFmpeg hardware acceleration in media viewer / PiP.

This commit is contained in:
John Preston 2022-03-22 17:56:43 +04:00
parent 207cb35c55
commit 0dca556843
14 changed files with 272 additions and 57 deletions

View file

@ -31,6 +31,15 @@ constexpr auto kAvioBlockSize = 4096;
constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
constexpr auto kDurationMax = crl::time(std::numeric_limits<int>::max());
using GetFormatMethod = enum AVPixelFormat(*)(
struct AVCodecContext *s,
const enum AVPixelFormat *fmt);
struct HwAccelDescriptor {
GetFormatMethod getFormat = nullptr;
AVPixelFormat format = AV_PIX_FMT_NONE;
};
void AlignedImageBufferCleanupHandler(void* data) {
const auto buffer = static_cast<uchar*>(data);
delete[] buffer;
@ -76,6 +85,69 @@ void PremultiplyLine(uchar *dst, const uchar *src, int intsCount) {
#endif // LIB_FFMPEG_USE_QT_PRIVATE_API
}
template <AVPixelFormat Required>
enum AVPixelFormat GetFormatImplementation(
AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts) {
const enum AVPixelFormat *p = nullptr;
for (p = pix_fmts; *p != -1; p++) {
if (*p == Required) {
return *p;
}
}
return AV_PIX_FMT_NONE;
}
template <AVPixelFormat Format>
[[nodiscard]] HwAccelDescriptor HwAccelByFormat() {
return {
.getFormat = GetFormatImplementation<Format>,
.format = Format,
};
}
[[nodiscard]] HwAccelDescriptor ResolveHwAccel(
not_null<const AVCodec*> decoder,
AVHWDeviceType type) {
Expects(type != AV_HWDEVICE_TYPE_NONE);
const auto format = [&] {
for (auto i = 0;; i++) {
const auto config = avcodec_get_hw_config(decoder, i);
if (!config) {
break;
} else if (config->device_type == type
&& (config->methods
& AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)) {
return config->pix_fmt;
}
}
return AV_PIX_FMT_NONE;
}();
switch (format) {
#ifdef Q_OS_WIN
case AV_PIX_FMT_D3D11:
return HwAccelByFormat<AV_PIX_FMT_D3D11>();
case AV_PIX_FMT_DXVA2_VLD:
return HwAccelByFormat<AV_PIX_FMT_DXVA2_VLD>();
case AV_PIX_FMT_D3D11VA_VLD:
return HwAccelByFormat<AV_PIX_FMT_D3D11VA_VLD>();
#elif defined Q_OS_MAC // Q_OS_WIN
case AV_PIX_FMT_VIDEOTOOLBOX:
return HwAccelByFormat<AV_PIX_FMT_VIDEOTOOLBOX>();
#else // Q_OS_WIN || Q_OS_MAC
case AV_PIX_FMT_VAAPI:
return HwAccelByFormat<AV_PIX_FMT_VAAPI>();
case AV_PIX_FMT_VDPAU:
return HwAccelByFormat<AV_PIX_FMT_VDPAU>();
#endif // Q_OS_WIN || Q_OS_MAC
case AV_PIX_FMT_CUDA:
return HwAccelByFormat<AV_PIX_FMT_CUDA>();
}
return {};
}
} // namespace
IOPointer MakeIOPointer(
@ -161,7 +233,7 @@ const AVCodec *FindDecoder(not_null<AVCodecContext*> context) {
: avcodec_find_decoder(context->codec_id);
}
CodecPointer MakeCodecPointer(not_null<AVStream*> stream) {
CodecPointer MakeCodecPointer(CodecDescriptor descriptor) {
auto error = AvErrorWrap();
auto result = CodecPointer(avcodec_alloc_context3(nullptr));
@ -170,6 +242,7 @@ CodecPointer MakeCodecPointer(not_null<AVStream*> stream) {
LogError(qstr("avcodec_alloc_context3"));
return {};
}
const auto stream = descriptor.stream;
error = avcodec_parameters_to_context(context, stream->codecpar);
if (error) {
LogError(qstr("avcodec_parameters_to_context"), error);
@ -183,7 +256,37 @@ CodecPointer MakeCodecPointer(not_null<AVStream*> stream) {
if (!codec) {
LogError(qstr("avcodec_find_decoder"), context->codec_id);
return {};
} else if ((error = avcodec_open2(context, codec, nullptr))) {
}
if (descriptor.type != AV_HWDEVICE_TYPE_NONE) {
const auto hw = ResolveHwAccel(codec, descriptor.type);
if (!hw.getFormat) {
return {};
}
context->get_format = hw.getFormat;
auto hwDeviceContext = (AVBufferRef*)nullptr;
error = av_hwdevice_ctx_create(
&hwDeviceContext,
descriptor.type,
nullptr,
nullptr,
0);
if (error || !hwDeviceContext) {
LogError(qstr("av_hwdevice_ctx_create"), error);
return {};
}
DEBUG_LOG(("Video Info: "
"Using \"%1\" hardware acceleration for \"%2\" decoder."
).arg(av_hwdevice_get_type_name(descriptor.type)
).arg(codec->name));
context->hw_device_ctx = av_buffer_ref(hwDeviceContext);
av_buffer_unref(&hwDeviceContext);
} else {
DEBUG_LOG(("Video Info: Using software \"%2\" decoder."
).arg(codec->name));
}
if ((error = avcodec_open2(context, codec, nullptr))) {
LogError(qstr("avcodec_open2"), error);
return {};
}

View file

@ -125,7 +125,12 @@ struct CodecDeleter {
void operator()(AVCodecContext *value);
};
using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>;
[[nodiscard]] CodecPointer MakeCodecPointer(not_null<AVStream*> stream);
struct CodecDescriptor {
not_null<AVStream*> stream;
AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
};
[[nodiscard]] CodecPointer MakeCodecPointer(CodecDescriptor descriptor);
struct FrameDeleter {
void operator()(AVFrame *value);

View file

@ -86,7 +86,7 @@ bool AudioTrack::tryReadFirstFrame(FFmpeg::Packet &&packet) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
_stream.decodedFrame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
return false;
@ -102,15 +102,15 @@ bool AudioTrack::tryReadFirstFrame(FFmpeg::Packet &&packet) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = FFmpeg::MakeFramePointer();
std::swap(_initialSkippingFrame, _stream.decodedFrame);
if (!_stream.decodedFrame) {
_stream.decodedFrame = FFmpeg::MakeFramePointer();
}
}
}
bool AudioTrack::processFirstFrame() {
if (!FFmpeg::FrameHasData(_stream.frame.get())) {
if (!FFmpeg::FrameHasData(_stream.decodedFrame.get())) {
return false;
}
mixerInit();
@ -131,7 +131,7 @@ void AudioTrack::mixerInit() {
Expects(!initialized());
auto data = std::make_unique<ExternalSoundData>();
data->frame = std::move(_stream.frame);
data->frame = std::move(_stream.decodedFrame);
data->codec = std::move(_stream.codec);
data->frequency = _stream.frequency;
data->length = (_stream.duration * data->frequency) / 1000LL;

View file

@ -45,6 +45,7 @@ struct PlaybackOptions {
AudioMsgId audioId;
bool syncVideoByAudio = true;
bool waitForMarkAsShown = false;
bool hwAllow = false;
bool loop = false;
};

View file

@ -148,7 +148,8 @@ void File::Context::logFatal(
Stream File::Context::initStream(
not_null<AVFormatContext*> format,
AVMediaType type,
Mode mode) {
Mode mode,
bool hwAllowed) {
auto result = Stream();
const auto index = result.index = av_find_best_stream(
format,
@ -158,31 +159,56 @@ Stream File::Context::initStream(
nullptr,
0);
if (index < 0) {
return result;
return {};
}
const auto info = format->streams[index];
const auto tryCreateCodec = [&](AVHWDeviceType type) {
result.codec = FFmpeg::MakeCodecPointer({
.stream = info,
.type = type,
});
return (result.codec != nullptr);
};
if (type == AVMEDIA_TYPE_VIDEO) {
if (info->disposition & AV_DISPOSITION_ATTACHED_PIC) {
// ignore cover streams
return Stream();
}
const auto hwAccelTypes = std::array{
#ifdef Q_OS_WIN
AV_HWDEVICE_TYPE_D3D11VA,
AV_HWDEVICE_TYPE_DXVA2,
#elif defined Q_OS_MAC // Q_OS_WIN
AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
#else // Q_OS_WIN || Q_OS_MAC
AV_HWDEVICE_TYPE_VAAPI,
AV_HWDEVICE_TYPE_VDPAU,
#endif // Q_OS_WIN || Q_OS_MAC
AV_HWDEVICE_TYPE_CUDA,
AV_HWDEVICE_TYPE_NONE,
};
for (const auto type : hwAccelTypes) {
if (tryCreateCodec(type)) {
break;
}
}
if (!result.codec) {
return result;
}
result.rotation = FFmpeg::ReadRotationFromMetadata(info);
result.aspect = FFmpeg::ValidateAspectRatio(info->sample_aspect_ratio);
} else if (type == AVMEDIA_TYPE_AUDIO) {
result.frequency = info->codecpar->sample_rate;
if (!result.frequency) {
return result;
} else if (!tryCreateCodec(AV_HWDEVICE_TYPE_NONE)) {
return result;
}
}
result.codec = FFmpeg::MakeCodecPointer(info);
if (!result.codec) {
return result;
}
result.frame = FFmpeg::MakeFramePointer();
if (!result.frame) {
result.decodedFrame = FFmpeg::MakeFramePointer();
if (!result.decodedFrame) {
result.codec = nullptr;
return result;
}
@ -260,7 +286,7 @@ std::variant<FFmpeg::Packet, FFmpeg::AvErrorWrap> File::Context::readPacket() {
return error;
}
void File::Context::start(crl::time position) {
void File::Context::start(crl::time position, bool hwAllow) {
auto error = FFmpeg::AvErrorWrap();
if (unroll()) {
@ -280,12 +306,12 @@ void File::Context::start(crl::time position) {
}
const auto mode = _delegate->fileOpenMode();
auto video = initStream(format.get(), AVMEDIA_TYPE_VIDEO, mode);
auto video = initStream(format.get(), AVMEDIA_TYPE_VIDEO, mode, hwAllow);
if (unroll()) {
return;
}
auto audio = initStream(format.get(), AVMEDIA_TYPE_AUDIO, mode);
auto audio = initStream(format.get(), AVMEDIA_TYPE_AUDIO, mode, false);
if (unroll()) {
return;
}
@ -425,7 +451,10 @@ File::File(std::shared_ptr<Reader> reader)
: _reader(std::move(reader)) {
}
void File::start(not_null<FileDelegate*> delegate, crl::time position) {
void File::start(
not_null<FileDelegate*> delegate,
crl::time position,
bool hwAllow) {
stop(true);
_reader->startStreaming();
@ -433,7 +462,7 @@ void File::start(not_null<FileDelegate*> delegate, crl::time position) {
_thread = std::thread([=, context = &*_context] {
crl::toggle_fp_exceptions(true);
context->start(position);
context->start(position, hwAllow);
while (!context->finished()) {
context->readNextPacket();
}

View file

@ -28,7 +28,10 @@ public:
File(const File &other) = delete;
File &operator=(const File &other) = delete;
void start(not_null<FileDelegate*> delegate, crl::time position);
void start(
not_null<FileDelegate*> delegate,
crl::time position,
bool hwAllow);
void wake();
void stop(bool stillActive = false);
@ -43,7 +46,7 @@ private:
Context(not_null<FileDelegate*> delegate, not_null<Reader*> reader);
~Context();
void start(crl::time position);
void start(crl::time position, bool hwAllow);
void readNextPacket();
void interrupt();
@ -75,7 +78,8 @@ private:
[[nodiscard]] Stream initStream(
not_null<AVFormatContext *> format,
AVMediaType type,
Mode mode);
Mode mode,
bool hwAllowed);
void seekToPosition(
not_null<AVFormatContext *> format,
const Stream &stream,

View file

@ -544,7 +544,7 @@ void Player::play(const PlaybackOptions &options) {
_options.speed = 1.;
}
_stage = Stage::Initializing;
_file->start(delegate(), _options.position);
_file->start(delegate(), _options.position, _options.hwAllow);
}
void Player::savePreviousReceivedTill(

View file

@ -20,13 +20,13 @@ constexpr auto kSkipInvalidDataPackets = 10;
} // namespace
crl::time FramePosition(const Stream &stream) {
const auto pts = !stream.frame
const auto pts = !stream.decodedFrame
? AV_NOPTS_VALUE
: (stream.frame->best_effort_timestamp != AV_NOPTS_VALUE)
? stream.frame->best_effort_timestamp
: (stream.frame->pts != AV_NOPTS_VALUE)
? stream.frame->pts
: stream.frame->pkt_dts;
: (stream.decodedFrame->best_effort_timestamp != AV_NOPTS_VALUE)
? stream.decodedFrame->best_effort_timestamp
: (stream.decodedFrame->pts != AV_NOPTS_VALUE)
? stream.decodedFrame->pts
: stream.decodedFrame->pkt_dts;
return FFmpeg::PtsToTime(pts, stream.timeBase);
}
@ -66,14 +66,14 @@ FFmpeg::AvErrorWrap ProcessPacket(Stream &stream, FFmpeg::Packet &&packet) {
}
FFmpeg::AvErrorWrap ReadNextFrame(Stream &stream) {
Expects(stream.frame != nullptr);
Expects(stream.decodedFrame != nullptr);
auto error = FFmpeg::AvErrorWrap();
do {
error = avcodec_receive_frame(
stream.codec.get(),
stream.frame.get());
stream.decodedFrame.get());
if (!error
|| error.code() != AVERROR(EAGAIN)
|| stream.queue.empty()) {
@ -108,13 +108,27 @@ bool GoodForRequest(
&& (request.resize == image.size());
}
bool TransferFrame(
Stream &stream,
not_null<AVFrame*> decodedFrame,
not_null<AVFrame*> transferredFrame) {
Expects(decodedFrame->hw_frames_ctx != nullptr);
const auto error = FFmpeg::AvErrorWrap(
av_hwframe_transfer_data(transferredFrame, decodedFrame, 0));
if (error) {
LogError(qstr("av_hwframe_transfer_data"), error);
return false;
}
FFmpeg::ClearFrameMemory(decodedFrame);
return true;
}
QImage ConvertFrame(
Stream &stream,
AVFrame *frame,
not_null<AVFrame*> frame,
QSize resize,
QImage storage) {
Expects(frame != nullptr);
const auto frameSize = QSize(frame->width, frame->height);
if (frameSize.isEmpty()) {
LOG(("Streaming Error: Bad frame size %1,%2"
@ -134,6 +148,7 @@ QImage ConvertFrame(
if (!FFmpeg::GoodStorageForFrame(storage, resize)) {
storage = FFmpeg::CreateFrameStorage(resize);
}
const auto format = AV_PIX_FMT_BGRA;
const auto hasDesiredFormat = (frame->format == format);
if (frameSize == storage.size() && hasDesiredFormat) {

View file

@ -30,7 +30,8 @@ struct Stream {
crl::time duration = kTimeUnknown;
AVRational timeBase = FFmpeg::kUniversalTimeBase;
FFmpeg::CodecPointer codec;
FFmpeg::FramePointer frame;
FFmpeg::FramePointer decodedFrame;
FFmpeg::FramePointer transferredFrame;
std::deque<FFmpeg::Packet> queue;
int invalidDataPackets = 0;
@ -54,9 +55,13 @@ struct Stream {
bool hasAlpha,
int rotation,
const FrameRequest &request);
[[nodiscard]] bool TransferFrame(
Stream &stream,
not_null<AVFrame*> decodedFrame,
not_null<AVFrame*> transferredFrame);
[[nodiscard]] QImage ConvertFrame(
Stream &stream,
AVFrame *frame,
not_null<AVFrame*> frame,
QSize resize,
QImage storage);
[[nodiscard]] FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame);

View file

@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h"
#include "base/concurrent_timer.h"
#include "core/crash_reports.h"
#include "base/debug_log.h"
namespace Media {
namespace Streaming {
@ -373,7 +374,8 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
fail(Error::InvalidData);
return FrameResult::Error;
}
std::swap(frame->decoded, _stream.frame);
std::swap(frame->decoded, _stream.decodedFrame);
std::swap(frame->transferred, _stream.transferredFrame);
frame->index = _frameIndex++;
frame->position = position;
frame->displayed = kTimeUnknown;
@ -427,9 +429,28 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
fillRequests(frame);
frame->format = FrameFormat::None;
if (frame->decoded->format == AV_PIX_FMT_YUV420P && !requireARGB32()) {
if (frame->decoded->hw_frames_ctx) {
if (!frame->transferred) {
frame->transferred = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
frame->decoded.get(),
frame->transferred.get());
if (!success) {
frame->prepared.clear();
fail(Error::InvalidData);
return;
}
} else {
frame->transferred = nullptr;
}
const auto frameWithData = frame->transferred
? frame->transferred.get()
: frame->decoded.get();
if (frameWithData->format == AV_PIX_FMT_YUV420P && !requireARGB32()) {
frame->alpha = false;
frame->yuv420 = ExtractYUV420(_stream, frame->decoded.get());
frame->yuv420 = ExtractYUV420(_stream, frameWithData);
if (frame->yuv420.size.isEmpty()
|| frame->yuv420.chromaSize.isEmpty()
|| !frame->yuv420.y.data
@ -447,15 +468,15 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
}
frame->format = FrameFormat::YUV420;
} else {
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA)
|| (frame->decoded->format == AV_PIX_FMT_YUVA420P);
frame->alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frameWithData->format == AV_PIX_FMT_YUVA420P);
frame->yuv420.size = {
frame->decoded->width,
frame->decoded->height
frameWithData->width,
frameWithData->height
};
frame->original = ConvertFrame(
_stream,
frame->decoded.get(),
frameWithData,
chooseOriginalResize(),
std::move(frame->original));
if (frame->original.isNull()) {
@ -587,7 +608,7 @@ bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
_stream.decodedFrame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
return false;
@ -603,22 +624,45 @@ bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = FFmpeg::MakeFramePointer();
std::swap(_initialSkippingFrame, _stream.decodedFrame);
if (!_stream.decodedFrame) {
_stream.decodedFrame = FFmpeg::MakeFramePointer();
}
}
}
bool VideoTrackObject::processFirstFrame() {
if (_stream.frame->width * _stream.frame->height > kMaxFrameArea) {
const auto decodedFrame = _stream.decodedFrame.get();
if (decodedFrame->width * decodedFrame->height > kMaxFrameArea) {
return false;
} else if (decodedFrame->hw_frames_ctx) {
if (!_stream.transferredFrame) {
_stream.transferredFrame = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
decodedFrame,
_stream.transferredFrame.get());
if (!success) {
LOG(("Video Error: Failed accelerated decoding from format %1."
).arg(int(decodedFrame->format)));
return false;
}
DEBUG_LOG(("Video Info: "
"Using accelerated decoding from format %1 to format %2."
).arg(int(decodedFrame->format)
).arg(int(_stream.transferredFrame->format)));
} else {
_stream.transferredFrame = nullptr;
}
const auto alpha = (_stream.frame->format == AV_PIX_FMT_BGRA)
|| (_stream.frame->format == AV_PIX_FMT_YUVA420P);
const auto frameWithData = _stream.transferredFrame
? _stream.transferredFrame.get()
: decodedFrame;
const auto alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frameWithData->format == AV_PIX_FMT_YUVA420P);
auto frame = ConvertFrame(
_stream,
_stream.frame.get(),
frameWithData,
QSize(),
QImage());
if (frame.isNull()) {

View file

@ -82,6 +82,7 @@ private:
};
struct Frame {
FFmpeg::FramePointer decoded = FFmpeg::MakeFramePointer();
FFmpeg::FramePointer transferred;
QImage original;
FrameYUV420 yuv420;
crl::time position = kTimeUnknown;

View file

@ -3124,6 +3124,7 @@ void OverlayWidget::restartAtSeekPosition(crl::time position) {
}
auto options = Streaming::PlaybackOptions();
options.position = position;
options.hwAllow = true;
if (!_streamed->withSound) {
options.mode = Streaming::Mode::Video;
options.loop = true;

View file

@ -1604,6 +1604,7 @@ void Pip::restartAtSeekPosition(crl::time position) {
auto options = Streaming::PlaybackOptions();
options.position = position;
options.hwAllow = true;
options.audioId = _instance.player().prepareLegacyState().id;
Assert(8 && _delegate->pipPlaybackSpeed() >= 0.5

View file

@ -400,7 +400,7 @@ if customRunCommand:
stage('patches', """
git clone https://github.com/desktop-app/patches.git
cd patches
git checkout b0ae34e08f
git checkout 0947a28160
""")
stage('depot_tools', """
@ -685,6 +685,12 @@ depends:yasm/yasm
make install
""")
stage('nv-codec-headers', """
git clone https://github.com/FFmpeg/nv-codec-headers.git
cd nv-codec-headers
git checkout n11.1.5.1
""")
stage('ffmpeg', """
git clone https://github.com/FFmpeg/FFmpeg.git ffmpeg
cd ffmpeg