diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_common.h b/Telegram/SourceFiles/media/streaming/media_streaming_common.h index 997445783..e8b77234f 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_common.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_common.h @@ -162,6 +162,7 @@ enum class FrameFormat { None, ARGB32, YUV420, + NV12, }; struct FrameChannel { @@ -169,7 +170,7 @@ struct FrameChannel { int stride = 0; }; -struct FrameYUV420 { +struct FrameYUV { QSize size; QSize chromaSize; FrameChannel y; @@ -179,7 +180,7 @@ struct FrameYUV420 { struct FrameWithInfo { QImage image; - FrameYUV420 *yuv420 = nullptr; + FrameYUV *yuv = nullptr; FrameFormat format = FrameFormat::None; int index = -1; bool alpha = false; diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp index 9a9ec5082..7f1e2f4b1 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_utility.cpp @@ -198,7 +198,7 @@ QImage ConvertFrame( return storage; } -FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame) { +FrameYUV ExtractYUV(Stream &stream, AVFrame *frame) { return { .size = { frame->width, frame->height }, .chromaSize = { diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_utility.h b/Telegram/SourceFiles/media/streaming/media_streaming_utility.h index 144ae103f..72dd6441a 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_utility.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_utility.h @@ -64,7 +64,7 @@ struct Stream { not_null frame, QSize resize, QImage storage); -[[nodiscard]] FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame); +[[nodiscard]] FrameYUV ExtractYUV(Stream &stream, AVFrame *frame); [[nodiscard]] QImage PrepareByRequest( const QImage &original, bool alpha, diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp index b0f69e579..191593814 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp @@ -22,10 +22,12 @@ constexpr auto kDisplaySkipped = crl::time(-1); constexpr auto kFinishedPosition = std::numeric_limits::max(); static_assert(kDisplaySkipped != kTimeUnknown); -[[nodiscard]] QImage ConvertToARGB32(const FrameYUV420 &data) { +[[nodiscard]] QImage ConvertToARGB32( + FrameFormat format, + const FrameYUV &data) { Expects(data.y.data != nullptr); Expects(data.u.data != nullptr); - Expects(data.v.data != nullptr); + Expects((format == FrameFormat::NV12) || (data.v.data != nullptr)); Expects(!data.size.isEmpty()); //if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) { @@ -35,7 +37,9 @@ static_assert(kDisplaySkipped != kTimeUnknown); auto result = FFmpeg::CreateFrameStorage(data.size); const auto swscale = FFmpeg::MakeSwscalePointer( data.size, - AV_PIX_FMT_YUV420P, + (format == FrameFormat::YUV420 + ? AV_PIX_FMT_YUV420P + : AV_PIX_FMT_NV12), data.size, AV_PIX_FMT_BGRA); if (!swscale) { @@ -448,14 +452,16 @@ void VideoTrackObject::rasterizeFrame(not_null frame) { const auto frameWithData = frame->transferred ? frame->transferred.get() : frame->decoded.get(); - if (frameWithData->format == AV_PIX_FMT_YUV420P && !requireARGB32()) { + if ((frameWithData->format == AV_PIX_FMT_YUV420P + || frameWithData->format == AV_PIX_FMT_NV12) && !requireARGB32()) { + const auto nv12 = (frameWithData->format == AV_PIX_FMT_NV12); frame->alpha = false; - frame->yuv420 = ExtractYUV420(_stream, frameWithData); - if (frame->yuv420.size.isEmpty() - || frame->yuv420.chromaSize.isEmpty() - || !frame->yuv420.y.data - || !frame->yuv420.u.data - || !frame->yuv420.v.data) { + frame->yuv = ExtractYUV(_stream, frameWithData); + if (frame->yuv.size.isEmpty() + || frame->yuv.chromaSize.isEmpty() + || !frame->yuv.y.data + || !frame->yuv.u.data + || (!nv12 && !frame->yuv.v.data)) { frame->prepared.clear(); fail(Error::InvalidData); return; @@ -466,11 +472,11 @@ void VideoTrackObject::rasterizeFrame(not_null frame) { prepared.image = QImage(); } } - frame->format = FrameFormat::YUV420; + frame->format = nv12 ? FrameFormat::NV12 : FrameFormat::YUV420; } else { frame->alpha = (frameWithData->format == AV_PIX_FMT_BGRA) || (frameWithData->format == AV_PIX_FMT_YUVA420P); - frame->yuv420.size = { + frame->yuv.size = { frameWithData->width, frameWithData->height }; @@ -1173,7 +1179,7 @@ FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) { } return { .image = data.frame->original, - .yuv420 = &data.frame->yuv420, + .yuv = &data.frame->yuv, .format = data.frame->format, .index = data.index, .alpha = data.frame->alpha, @@ -1197,8 +1203,9 @@ QImage VideoTrack::frameImage( }); } if (frame->original.isNull() - && frame->format == FrameFormat::YUV420) { - frame->original = ConvertToARGB32(frame->yuv420); + && (frame->format == FrameFormat::YUV420 + || frame->format == FrameFormat::NV12)) { + frame->original = ConvertToARGB32(frame->format, frame->yuv); } if (GoodForRequest( frame->original, @@ -1235,8 +1242,10 @@ QImage VideoTrack::frameImage( QImage VideoTrack::currentFrameImage() { const auto frame = _shared->frameForPaint(); - if (frame->original.isNull() && frame->format == FrameFormat::YUV420) { - frame->original = ConvertToARGB32(frame->yuv420); + if (frame->original.isNull() + && (frame->format == FrameFormat::YUV420 + || frame->format == FrameFormat::NV12)) { + frame->original = ConvertToARGB32(frame->format, frame->yuv); } return frame->original; } @@ -1293,7 +1302,8 @@ bool VideoTrack::IsDecoded(not_null frame) { bool VideoTrack::IsRasterized(not_null frame) { return IsDecoded(frame) && (!frame->original.isNull() - || frame->format == FrameFormat::YUV420); + || frame->format == FrameFormat::YUV420 + || frame->format == FrameFormat::NV12); } bool VideoTrack::IsStale(not_null frame, crl::time trackTime) { diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h index be3b14d72..95957b5d9 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h @@ -84,7 +84,7 @@ private: FFmpeg::FramePointer decoded = FFmpeg::MakeFramePointer(); FFmpeg::FramePointer transferred; QImage original; - FrameYUV420 yuv420; + FrameYUV yuv; crl::time position = kTimeUnknown; crl::time displayed = kTimeUnknown; crl::time display = kTimeUnknown; diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp index a1e13b50c..6a5d4b44c 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp +++ b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.cpp @@ -106,6 +106,14 @@ void OverlayWidget::RendererGL::init( FragmentSampleYUV420Texture(), })); + _nv12Program.emplace(); + LinkProgram( + &*_nv12Program, + _texturedVertexShader, + FragmentShader({ + FragmentSampleNV12Texture(), + })); + _fillProgram.emplace(); LinkProgram( &*_fillProgram, @@ -136,6 +144,7 @@ void OverlayWidget::RendererGL::deinit( _texturedVertexShader = nullptr; _withTransparencyProgram = std::nullopt; _yuv420Program = std::nullopt; + _nv12Program = std::nullopt; _fillProgram = std::nullopt; _controlsProgram = std::nullopt; _contentBuffer = std::nullopt; @@ -196,10 +205,13 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame( data.alpha); return; } - Assert(data.format == Streaming::FrameFormat::YUV420); - Assert(!data.yuv420->size.isEmpty()); - const auto yuv = data.yuv420; - _yuv420Program->bind(); + Assert(!data.yuv->size.isEmpty()); + const auto program = (data.format == Streaming::FrameFormat::NV12) + ? &*_nv12Program + : &*_yuv420Program; + program->bind(); + const auto nv12 = (data.format == Streaming::FrameFormat::NV12); + const auto yuv = data.yuv; const auto upload = (_trackFrameIndex != data.index) || (_streamedIndex != _owner->streamedIndex()); @@ -223,32 +235,38 @@ void OverlayWidget::RendererGL::paintTransformedVideoFrame( _textures.bind(*_f, 2); if (upload) { uploadTexture( - GL_ALPHA, - GL_ALPHA, + nv12 ? GL_RG : GL_ALPHA, + nv12 ? GL_RG : GL_ALPHA, yuv->chromaSize, _chromaSize, - yuv->u.stride, + yuv->u.stride / (nv12 ? 2 : 1), yuv->u.data); } - _f->glActiveTexture(GL_TEXTURE2); - _textures.bind(*_f, 3); - if (upload) { - uploadTexture( - GL_ALPHA, - GL_ALPHA, - yuv->chromaSize, - _chromaSize, - yuv->v.stride, - yuv->v.data); - _chromaSize = yuv->chromaSize; - _f->glPixelStorei(GL_UNPACK_ALIGNMENT, 4); + if (!nv12) { + _f->glActiveTexture(GL_TEXTURE2); + _textures.bind(*_f, 3); + if (upload) { + uploadTexture( + GL_ALPHA, + GL_ALPHA, + yuv->chromaSize, + _chromaSize, + yuv->v.stride, + yuv->v.data); + _chromaSize = yuv->chromaSize; + _f->glPixelStorei(GL_UNPACK_ALIGNMENT, 4); + } + } + program->setUniformValue("y_texture", GLint(0)); + if (nv12) { + program->setUniformValue("uv_texture", GLint(1)); + } else { + program->setUniformValue("u_texture", GLint(1)); + program->setUniformValue("v_texture", GLint(2)); } - _yuv420Program->setUniformValue("y_texture", GLint(0)); - _yuv420Program->setUniformValue("u_texture", GLint(1)); - _yuv420Program->setUniformValue("v_texture", GLint(2)); toggleBlending(false); - paintTransformedContent(&*_yuv420Program, geometry); + paintTransformedContent(program, geometry); } void OverlayWidget::RendererGL::paintTransformedStaticContent( diff --git a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h index d9b5dff34..b7a2c2ca7 100644 --- a/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h +++ b/Telegram/SourceFiles/media/view/media_view_overlay_opengl.h @@ -107,6 +107,7 @@ private: QOpenGLShader *_texturedVertexShader = nullptr; std::optional _withTransparencyProgram; std::optional _yuv420Program; + std::optional _nv12Program; std::optional _fillProgram; std::optional _controlsProgram; Ui::GL::Textures<4> _textures; diff --git a/Telegram/SourceFiles/media/view/media_view_pip_opengl.cpp b/Telegram/SourceFiles/media/view/media_view_pip_opengl.cpp index 71c462a1c..5c41cfa56 100644 --- a/Telegram/SourceFiles/media/view/media_view_pip_opengl.cpp +++ b/Telegram/SourceFiles/media/view/media_view_pip_opengl.cpp @@ -195,6 +195,16 @@ void Pip::RendererGL::init( FragmentRoundToShadow(), })); + _nv12Program.emplace(); + LinkProgram( + &*_nv12Program, + _texturedVertexShader, + FragmentShader({ + FragmentSampleNV12Texture(), + FragmentApplyFade(), + FragmentRoundToShadow(), + })); + _imageProgram.emplace(); LinkProgram( &*_imageProgram, @@ -231,6 +241,7 @@ void Pip::RendererGL::deinit( _texturedVertexShader = nullptr; _argb32Program = std::nullopt; _yuv420Program = std::nullopt; + _nv12Program = std::nullopt; _controlsProgram = std::nullopt; _contentBuffer = std::nullopt; } @@ -289,10 +300,13 @@ void Pip::RendererGL::paintTransformedVideoFrame( paintTransformedStaticContent(data.image, geometry); return; } - Assert(data.format == Streaming::FrameFormat::YUV420); - Assert(!data.yuv420->size.isEmpty()); - const auto yuv = data.yuv420; - _yuv420Program->bind(); + Assert(!data.yuv->size.isEmpty()); + const auto program = (data.format == Streaming::FrameFormat::NV12) + ? &*_nv12Program + : &*_yuv420Program; + program->bind(); + const auto nv12 = (data.format == Streaming::FrameFormat::NV12); + const auto yuv = data.yuv; const auto upload = (_trackFrameIndex != data.index); _trackFrameIndex = data.index; @@ -314,31 +328,37 @@ void Pip::RendererGL::paintTransformedVideoFrame( _textures.bind(*_f, 2); if (upload) { uploadTexture( - GL_ALPHA, - GL_ALPHA, + nv12 ? GL_RG : GL_ALPHA, + nv12 ? GL_RG : GL_ALPHA, yuv->chromaSize, _chromaSize, - yuv->u.stride, + yuv->u.stride / (nv12 ? 2 : 1), yuv->u.data); } - _f->glActiveTexture(GL_TEXTURE2); - _textures.bind(*_f, 3); - if (upload) { - uploadTexture( - GL_ALPHA, - GL_ALPHA, - yuv->chromaSize, - _chromaSize, - yuv->v.stride, - yuv->v.data); - _chromaSize = yuv->chromaSize; - _f->glPixelStorei(GL_UNPACK_ALIGNMENT, 4); + if (!nv12) { + _f->glActiveTexture(GL_TEXTURE2); + _textures.bind(*_f, 3); + if (upload) { + uploadTexture( + GL_ALPHA, + GL_ALPHA, + yuv->chromaSize, + _chromaSize, + yuv->v.stride, + yuv->v.data); + _chromaSize = yuv->chromaSize; + _f->glPixelStorei(GL_UNPACK_ALIGNMENT, 4); + } + } + program->setUniformValue("y_texture", GLint(0)); + if (nv12) { + program->setUniformValue("uv_texture", GLint(1)); + } else { + program->setUniformValue("u_texture", GLint(1)); + program->setUniformValue("v_texture", GLint(2)); } - _yuv420Program->setUniformValue("y_texture", GLint(0)); - _yuv420Program->setUniformValue("u_texture", GLint(1)); - _yuv420Program->setUniformValue("v_texture", GLint(2)); - paintTransformedContent(&*_yuv420Program, geometry); + paintTransformedContent(program, geometry); } void Pip::RendererGL::paintTransformedStaticContent( diff --git a/Telegram/SourceFiles/media/view/media_view_pip_opengl.h b/Telegram/SourceFiles/media/view/media_view_pip_opengl.h index e5b005019..7c9af83fc 100644 --- a/Telegram/SourceFiles/media/view/media_view_pip_opengl.h +++ b/Telegram/SourceFiles/media/view/media_view_pip_opengl.h @@ -101,6 +101,7 @@ private: QOpenGLShader *_texturedVertexShader = nullptr; std::optional _argb32Program; std::optional _yuv420Program; + std::optional _nv12Program; Ui::GL::Textures<4> _textures; QSize _rgbaSize; QSize _lumaSize; diff --git a/Telegram/lib_ui b/Telegram/lib_ui index 5fa3d7a9d..8700c2223 160000 --- a/Telegram/lib_ui +++ b/Telegram/lib_ui @@ -1 +1 @@ -Subproject commit 5fa3d7a9daa62fb82713bc822a5138116a6015f2 +Subproject commit 8700c2223ab60db994376b39e4e74a6309d5154a