diff --git a/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.cpp b/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.cpp index 513231ffa..7948a79e6 100644 --- a/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.cpp +++ b/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.cpp @@ -85,6 +85,62 @@ void PremultiplyLine(uchar *dst, const uchar *src, int intsCount) { #endif // LIB_FFMPEG_USE_QT_PRIVATE_API } +[[nodiscard]] bool InitHw(AVCodecContext *context, AVHWDeviceType type) { + auto hwDeviceContext = (AVBufferRef*)nullptr; + AvErrorWrap error = av_hwdevice_ctx_create( + &hwDeviceContext, + type, + nullptr, + nullptr, + 0); + if (error || !hwDeviceContext) { + LogError(qstr("av_hwdevice_ctx_create"), error); + return false; + } + DEBUG_LOG(("Video Info: " + "Trying \"%1\" hardware acceleration for \"%2\" decoder." + ).arg(av_hwdevice_get_type_name(type) + ).arg(context->codec->name)); + if (context->hw_device_ctx) { + av_buffer_unref(&context->hw_device_ctx); + } + context->hw_device_ctx = av_buffer_ref(hwDeviceContext); + av_buffer_unref(&hwDeviceContext); + return true; +} + +[[nodiscard]] enum AVPixelFormat GetHwFormat( + AVCodecContext *context, + const enum AVPixelFormat *formats) { + const enum AVPixelFormat *p = nullptr; + for (p = formats; *p != AV_PIX_FMT_NONE; p++) { + const auto type = [&] { + switch (*p) { +#ifdef Q_OS_WIN + case AV_PIX_FMT_D3D11: return AV_HWDEVICE_TYPE_D3D11VA; + case AV_PIX_FMT_DXVA2_VLD: return AV_HWDEVICE_TYPE_DXVA2; + case AV_PIX_FMT_D3D11VA_VLD: return AV_HWDEVICE_TYPE_D3D11VA; +#elif defined Q_OS_MAC // Q_OS_WIN + case AV_PIX_FMT_VIDEOTOOLBOX: + return AV_HWDEVICE_TYPE_VIDEOTOOLBOX; +#else // Q_OS_WIN || Q_OS_MAC + case AV_PIX_FMT_VAAPI: return AV_HWDEVICE_TYPE_VAAPI; + case AV_PIX_FMT_VDPAU: return AV_HWDEVICE_TYPE_VDPAU; +#endif // Q_OS_WIN || Q_OS_MAC + case AV_PIX_FMT_CUDA: return AV_HWDEVICE_TYPE_CUDA; + } + return AV_HWDEVICE_TYPE_NONE; + }(); + if (type != AV_HWDEVICE_TYPE_NONE && !InitHw(context, type)) { + continue; + } else if (type == AV_HWDEVICE_TYPE_NONE && context->hw_device_ctx) { + av_buffer_unref(&context->hw_device_ctx); + } + return *p; + } + return AV_PIX_FMT_NONE; +} + template enum AVPixelFormat GetFormatImplementation( AVCodecContext *ctx, @@ -258,29 +314,8 @@ CodecPointer MakeCodecPointer(CodecDescriptor descriptor) { return {}; } - if (descriptor.type != AV_HWDEVICE_TYPE_NONE) { - const auto hw = ResolveHwAccel(codec, descriptor.type); - if (!hw.getFormat) { - return {}; - } - context->get_format = hw.getFormat; - auto hwDeviceContext = (AVBufferRef*)nullptr; - error = av_hwdevice_ctx_create( - &hwDeviceContext, - descriptor.type, - nullptr, - nullptr, - 0); - if (error || !hwDeviceContext) { - LogError(qstr("av_hwdevice_ctx_create"), error); - return {}; - } - DEBUG_LOG(("Video Info: " - "Using \"%1\" hardware acceleration for \"%2\" decoder." - ).arg(av_hwdevice_get_type_name(descriptor.type) - ).arg(codec->name)); - context->hw_device_ctx = av_buffer_ref(hwDeviceContext); - av_buffer_unref(&hwDeviceContext); + if (descriptor.hwAllowed) { + context->get_format = GetHwFormat; } else { DEBUG_LOG(("Video Info: Using software \"%2\" decoder." ).arg(codec->name)); diff --git a/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.h b/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.h index 1a117961d..5e334b0e0 100644 --- a/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.h +++ b/Telegram/SourceFiles/ffmpeg/ffmpeg_utility.h @@ -128,7 +128,7 @@ using CodecPointer = std::unique_ptr; struct CodecDescriptor { not_null stream; - AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE; + bool hwAllowed = false; }; [[nodiscard]] CodecPointer MakeCodecPointer(CodecDescriptor descriptor); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_common.h b/Telegram/SourceFiles/media/streaming/media_streaming_common.h index e8b77234f..3fdb120a4 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_common.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_common.h @@ -45,7 +45,7 @@ struct PlaybackOptions { AudioMsgId audioId; bool syncVideoByAudio = true; bool waitForMarkAsShown = false; - bool hwAllow = false; + bool hwAllowed = false; bool loop = false; }; diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_file.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_file.cpp index 403f13001..a966e89e3 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_file.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_file.cpp @@ -163,36 +163,15 @@ Stream File::Context::initStream( } const auto info = format->streams[index]; - const auto tryCreateCodec = [&](AVHWDeviceType type) { - result.codec = FFmpeg::MakeCodecPointer({ - .stream = info, - .type = type, - }); - return (result.codec != nullptr); - }; if (type == AVMEDIA_TYPE_VIDEO) { if (info->disposition & AV_DISPOSITION_ATTACHED_PIC) { // ignore cover streams return Stream(); } - const auto hwAccelTypes = std::array{ -#ifdef Q_OS_WIN - AV_HWDEVICE_TYPE_D3D11VA, - AV_HWDEVICE_TYPE_DXVA2, -#elif defined Q_OS_MAC // Q_OS_WIN - AV_HWDEVICE_TYPE_VIDEOTOOLBOX, -#else // Q_OS_WIN || Q_OS_MAC - AV_HWDEVICE_TYPE_VAAPI, - AV_HWDEVICE_TYPE_VDPAU, -#endif // Q_OS_WIN || Q_OS_MAC - AV_HWDEVICE_TYPE_CUDA, - AV_HWDEVICE_TYPE_NONE, - }; - for (const auto type : hwAccelTypes) { - if (tryCreateCodec(type)) { - break; - } - } + result.codec = FFmpeg::MakeCodecPointer({ + .stream = info, + .hwAllowed = hwAllowed, + }); if (!result.codec) { return result; } @@ -202,7 +181,9 @@ Stream File::Context::initStream( result.frequency = info->codecpar->sample_rate; if (!result.frequency) { return result; - } else if (!tryCreateCodec(AV_HWDEVICE_TYPE_NONE)) { + } + result.codec = FFmpeg::MakeCodecPointer({ .stream = info }); + if (!result.codec) { return result; } } diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp index 0dce3142f..316af0105 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp @@ -544,7 +544,7 @@ void Player::play(const PlaybackOptions &options) { _options.speed = 1.; } _stage = Stage::Initializing; - _file->start(delegate(), _options.position, _options.hwAllow); + _file->start(delegate(), _options.position, _options.hwAllowed); } void Player::savePreviousReceivedTill( diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp b/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp index 8977a20d4..992129ee5 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp +++ b/Telegram/SourceFiles/media/view/media_view_overlay_widget.cpp @@ -3116,7 +3116,7 @@ void OverlayWidget::restartAtSeekPosition(crl::time position) { } auto options = Streaming::PlaybackOptions(); options.position = position; - options.hwAllow = true; + options.hwAllowed = true; if (!_streamed->withSound) { options.mode = Streaming::Mode::Video; options.loop = true; diff --git a/Telegram/SourceFiles/media/view/media_view_pip.cpp b/Telegram/SourceFiles/media/view/media_view_pip.cpp index 8c1e963de..e63f961b4 100644 --- a/Telegram/SourceFiles/media/view/media_view_pip.cpp +++ b/Telegram/SourceFiles/media/view/media_view_pip.cpp @@ -1604,7 +1604,7 @@ void Pip::restartAtSeekPosition(crl::time position) { auto options = Streaming::PlaybackOptions(); options.position = position; - options.hwAllow = true; + options.hwAllowed = true; options.audioId = _instance.player().prepareLegacyState().id; Assert(8 && _delegate->pipPlaybackSpeed() >= 0.5