mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-19 15:47:11 +02:00
Render YUV420 video in media viewer.
This commit is contained in:
parent
1858e7e8ac
commit
f6f0b02333
16 changed files with 370 additions and 54 deletions
|
@ -119,6 +119,7 @@ struct FrameRequest {
|
|||
QSize outer;
|
||||
ImageRoundRadius radius = ImageRoundRadius();
|
||||
RectParts corners = RectPart::AllCorners;
|
||||
bool requireARGB32 = true;
|
||||
bool strict = true;
|
||||
|
||||
static FrameRequest NonStrict() {
|
||||
|
@ -135,16 +136,44 @@ struct FrameRequest {
|
|||
return (resize == other.resize)
|
||||
&& (outer == other.outer)
|
||||
&& (radius == other.radius)
|
||||
&& (corners == other.corners);
|
||||
&& (corners == other.corners)
|
||||
&& (requireARGB32 == other.requireARGB32);
|
||||
}
|
||||
[[nodiscard]] bool operator!=(const FrameRequest &other) const {
|
||||
return !(*this == other);
|
||||
}
|
||||
|
||||
[[nodiscard]] bool goodFor(const FrameRequest &other) const {
|
||||
return (*this == other) || (strict && !other.strict);
|
||||
return (requireARGB32 == other.requireARGB32)
|
||||
&& ((*this == other) || (strict && !other.strict));
|
||||
}
|
||||
};
|
||||
|
||||
enum class FrameFormat {
|
||||
None,
|
||||
ARGB32,
|
||||
YUV420,
|
||||
};
|
||||
|
||||
struct FrameChannel {
|
||||
const void *data = nullptr;
|
||||
int stride = 0;
|
||||
};
|
||||
|
||||
struct FrameYUV420 {
|
||||
QSize size;
|
||||
QSize chromaSize;
|
||||
FrameChannel y;
|
||||
FrameChannel u;
|
||||
FrameChannel v;
|
||||
};
|
||||
|
||||
struct FrameWithInfo {
|
||||
QImage original;
|
||||
FrameYUV420 *yuv420 = nullptr;
|
||||
FrameFormat format = FrameFormat::None;
|
||||
int index = -1;
|
||||
};
|
||||
|
||||
} // namespace Streaming
|
||||
} // namespace Media
|
||||
|
|
|
@ -172,6 +172,10 @@ QImage Instance::frame(const FrameRequest &request) const {
|
|||
return player().frame(request, this);
|
||||
}
|
||||
|
||||
FrameWithInfo Instance::frameWithInfo() const {
|
||||
return player().frameWithInfo(this);
|
||||
}
|
||||
|
||||
bool Instance::markFrameShown() {
|
||||
Expects(_shared != nullptr);
|
||||
|
||||
|
|
|
@ -69,6 +69,7 @@ public:
|
|||
void callWaitingCallback();
|
||||
|
||||
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
||||
[[nodiscard]] FrameWithInfo frameWithInfo() const;
|
||||
bool markFrameShown();
|
||||
|
||||
void lockPlayer();
|
||||
|
|
|
@ -881,6 +881,12 @@ QImage Player::frame(
|
|||
return _video->frame(request, instance);
|
||||
}
|
||||
|
||||
FrameWithInfo Player::frameWithInfo(const Instance *instance) const {
|
||||
Expects(_video != nullptr);
|
||||
|
||||
return _video->frameWithInfo(instance);
|
||||
}
|
||||
|
||||
void Player::unregisterInstance(not_null<const Instance*> instance) {
|
||||
if (_video) {
|
||||
_video->unregisterInstance(instance);
|
||||
|
|
|
@ -64,6 +64,10 @@ public:
|
|||
[[nodiscard]] QImage frame(
|
||||
const FrameRequest &request,
|
||||
const Instance *instance = nullptr) const;
|
||||
|
||||
[[nodiscard]] FrameWithInfo frameWithInfo(
|
||||
const Instance *instance = nullptr) const; // !requireARGB32
|
||||
|
||||
void unregisterInstance(not_null<const Instance*> instance);
|
||||
bool markFrameShown();
|
||||
|
||||
|
|
|
@ -174,6 +174,19 @@ QImage ConvertFrame(
|
|||
return storage;
|
||||
}
|
||||
|
||||
FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame) {
|
||||
return {
|
||||
.size = { frame->width, frame->height },
|
||||
.chromaSize = {
|
||||
AV_CEIL_RSHIFT(frame->width, 1), // SWScale does that.
|
||||
AV_CEIL_RSHIFT(frame->height, 1)
|
||||
},
|
||||
.y = { .data = frame->data[0], .stride = frame->linesize[0] },
|
||||
.u = { .data = frame->data[1], .stride = frame->linesize[1] },
|
||||
.v = { .data = frame->data[2], .stride = frame->linesize[2] },
|
||||
};
|
||||
}
|
||||
|
||||
void PaintFrameOuter(QPainter &p, const QRect &inner, QSize outer) {
|
||||
const auto left = inner.x();
|
||||
const auto right = outer.width() - inner.width() - left;
|
||||
|
|
|
@ -58,6 +58,7 @@ struct Stream {
|
|||
AVFrame *frame,
|
||||
QSize resize,
|
||||
QImage storage);
|
||||
[[nodiscard]] FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame);
|
||||
[[nodiscard]] QImage PrepareByRequest(
|
||||
const QImage &original,
|
||||
bool alpha,
|
||||
|
|
|
@ -53,6 +53,7 @@ public:
|
|||
void removeFrameRequest(const Instance *instance);
|
||||
|
||||
void rasterizeFrame(not_null<Frame*> frame);
|
||||
[[nodiscard]] bool requireARGB32() const;
|
||||
|
||||
private:
|
||||
enum class FrameResult {
|
||||
|
@ -357,20 +358,50 @@ QSize VideoTrackObject::chooseOriginalResize() const {
|
|||
return chosen;
|
||||
}
|
||||
|
||||
bool VideoTrackObject::requireARGB32() const {
|
||||
for (const auto &[_, request] : _requests) {
|
||||
if (!request.requireARGB32) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
|
||||
Expects(frame->position != kFinishedPosition);
|
||||
|
||||
fillRequests(frame);
|
||||
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA);
|
||||
frame->original = ConvertFrame(
|
||||
_stream,
|
||||
frame->decoded.get(),
|
||||
chooseOriginalResize(),
|
||||
std::move(frame->original));
|
||||
if (frame->original.isNull()) {
|
||||
frame->prepared.clear();
|
||||
fail(Error::InvalidData);
|
||||
return;
|
||||
frame->format = FrameFormat::None;
|
||||
if (frame->decoded->format == AV_PIX_FMT_YUV420P && !requireARGB32()) {
|
||||
frame->alpha = false;
|
||||
frame->yuv420 = ExtractYUV420(_stream, frame->decoded.get());
|
||||
if (frame->yuv420.size.isEmpty()
|
||||
|| frame->yuv420.chromaSize.isEmpty()
|
||||
|| !frame->yuv420.y.data
|
||||
|| !frame->yuv420.u.data
|
||||
|| !frame->yuv420.v.data) {
|
||||
frame->prepared.clear();
|
||||
fail(Error::InvalidData);
|
||||
return;
|
||||
}
|
||||
frame->format = FrameFormat::YUV420;
|
||||
} else {
|
||||
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA);
|
||||
frame->yuv420.size = {
|
||||
frame->decoded->width,
|
||||
frame->decoded->height
|
||||
};
|
||||
frame->original = ConvertFrame(
|
||||
_stream,
|
||||
frame->decoded.get(),
|
||||
chooseOriginalResize(),
|
||||
std::move(frame->original));
|
||||
if (frame->original.isNull()) {
|
||||
frame->prepared.clear();
|
||||
fail(Error::InvalidData);
|
||||
return;
|
||||
}
|
||||
frame->format = FrameFormat::ARGB32;
|
||||
}
|
||||
|
||||
VideoTrack::PrepareFrameByRequests(frame, _stream.rotation);
|
||||
|
@ -613,6 +644,7 @@ void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
|
|||
|
||||
_frames[0].original = std::move(cover);
|
||||
_frames[0].position = position;
|
||||
_frames[0].format = FrameFormat::ARGB32;
|
||||
|
||||
// Usually main thread sets displayed time before _counter increment.
|
||||
// But in this case we update _counter, so we set a fake displayed time.
|
||||
|
@ -722,7 +754,7 @@ auto VideoTrack::Shared::presentFrame(
|
|||
|
||||
// Release this frame to the main thread for rendering.
|
||||
_counter.store(
|
||||
(counter + 1) % (2 * kFramesCount),
|
||||
counter + 1,
|
||||
std::memory_order_release);
|
||||
return { position, crl::time(0), addedWorldTimeDelay };
|
||||
};
|
||||
|
@ -847,6 +879,9 @@ bool VideoTrack::Shared::markFrameShown() {
|
|||
if (frame->displayed == kTimeUnknown) {
|
||||
return false;
|
||||
}
|
||||
if (counter == 2 * kFramesCount - 1) {
|
||||
++_counterCycle;
|
||||
}
|
||||
_counter.store(
|
||||
next,
|
||||
std::memory_order_release);
|
||||
|
@ -867,12 +902,20 @@ bool VideoTrack::Shared::markFrameShown() {
|
|||
}
|
||||
|
||||
not_null<VideoTrack::Frame*> VideoTrack::Shared::frameForPaint() {
|
||||
const auto result = getFrame(counter() / 2);
|
||||
Assert(!result->original.isNull());
|
||||
Assert(result->position != kTimeUnknown);
|
||||
Assert(result->displayed != kTimeUnknown);
|
||||
return frameForPaintWithIndex().frame;
|
||||
}
|
||||
|
||||
VideoTrack::FrameWithIndex VideoTrack::Shared::frameForPaintWithIndex() {
|
||||
const auto index = counter() / 2;
|
||||
const auto frame = getFrame(index);
|
||||
Assert(frame->format != FrameFormat::None);
|
||||
Assert(frame->position != kTimeUnknown);
|
||||
Assert(frame->displayed != kTimeUnknown);
|
||||
return {
|
||||
.frame = frame,
|
||||
.index = (_counterCycle * 2 * kFramesCount) + index,
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
VideoTrack::VideoTrack(
|
||||
|
@ -977,7 +1020,8 @@ bool VideoTrack::markFrameShown() {
|
|||
QImage VideoTrack::frame(
|
||||
const FrameRequest &request,
|
||||
const Instance *instance) {
|
||||
const auto frame = _shared->frameForPaint();
|
||||
const auto data = _shared->frameForPaintWithIndex();
|
||||
const auto frame = data.frame;
|
||||
const auto i = frame->prepared.find(instance);
|
||||
const auto none = (i == frame->prepared.end());
|
||||
const auto preparedFor = frame->prepared.empty()
|
||||
|
@ -1020,6 +1064,25 @@ QImage VideoTrack::frame(
|
|||
return i->second.image;
|
||||
}
|
||||
|
||||
FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) {
|
||||
const auto data = _shared->frameForPaintWithIndex();
|
||||
const auto i = data.frame->prepared.find(instance);
|
||||
const auto none = (i == data.frame->prepared.end());
|
||||
if (none || i->second.request.requireARGB32) {
|
||||
_wrapped.with([=](Implementation &unwrapped) {
|
||||
unwrapped.updateFrameRequest(
|
||||
instance,
|
||||
{ .requireARGB32 = false });
|
||||
});
|
||||
}
|
||||
return {
|
||||
.original = data.frame->original,
|
||||
.yuv420 = &data.frame->yuv420,
|
||||
.format = data.frame->format,
|
||||
.index = data.index,
|
||||
};
|
||||
}
|
||||
|
||||
void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
|
||||
_wrapped.with([=](Implementation &unwrapped) {
|
||||
unwrapped.removeFrameRequest(instance);
|
||||
|
@ -1029,7 +1092,12 @@ void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
|
|||
void VideoTrack::PrepareFrameByRequests(
|
||||
not_null<Frame*> frame,
|
||||
int rotation) {
|
||||
Expects(!frame->original.isNull());
|
||||
Expects(frame->format != FrameFormat::ARGB32
|
||||
|| !frame->original.isNull());
|
||||
|
||||
if (frame->format != FrameFormat::ARGB32) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auto begin = frame->prepared.begin();
|
||||
const auto end = frame->prepared.end();
|
||||
|
@ -1063,7 +1131,8 @@ bool VideoTrack::IsDecoded(not_null<const Frame*> frame) {
|
|||
|
||||
bool VideoTrack::IsRasterized(not_null<const Frame*> frame) {
|
||||
return IsDecoded(frame)
|
||||
&& !frame->original.isNull();
|
||||
&& (!frame->original.isNull()
|
||||
|| frame->format == FrameFormat::YUV420);
|
||||
}
|
||||
|
||||
bool VideoTrack::IsStale(not_null<const Frame*> frame, crl::time trackTime) {
|
||||
|
|
|
@ -58,6 +58,7 @@ public:
|
|||
[[nodiscard]] QImage frame(
|
||||
const FrameRequest &request,
|
||||
const Instance *instance);
|
||||
[[nodiscard]] FrameWithInfo frameWithInfo(const Instance *instance);
|
||||
void unregisterInstance(not_null<const Instance*> instance);
|
||||
[[nodiscard]] rpl::producer<> checkNextFrame() const;
|
||||
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||
|
@ -78,14 +79,20 @@ private:
|
|||
struct Frame {
|
||||
FFmpeg::FramePointer decoded = FFmpeg::MakeFramePointer();
|
||||
QImage original;
|
||||
FrameYUV420 yuv420;
|
||||
crl::time position = kTimeUnknown;
|
||||
crl::time displayed = kTimeUnknown;
|
||||
crl::time display = kTimeUnknown;
|
||||
FrameFormat format = FrameFormat::None;
|
||||
|
||||
base::flat_map<const Instance*, Prepared> prepared;
|
||||
|
||||
bool alpha = false;
|
||||
};
|
||||
struct FrameWithIndex {
|
||||
not_null<Frame*> frame;
|
||||
int index = -1;
|
||||
};
|
||||
|
||||
class Shared {
|
||||
public:
|
||||
|
@ -123,6 +130,7 @@ private:
|
|||
bool markFrameShown();
|
||||
[[nodiscard]] crl::time nextFrameDisplayTime() const;
|
||||
[[nodiscard]] not_null<Frame*> frameForPaint();
|
||||
[[nodiscard]] FrameWithIndex frameForPaintWithIndex();
|
||||
|
||||
private:
|
||||
[[nodiscard]] not_null<Frame*> getFrame(int index);
|
||||
|
@ -132,6 +140,9 @@ private:
|
|||
static constexpr auto kCounterUninitialized = -1;
|
||||
std::atomic<int> _counter = kCounterUninitialized;
|
||||
|
||||
// Main thread.
|
||||
int _counterCycle = 0;
|
||||
|
||||
static constexpr auto kFramesCount = 4;
|
||||
std::array<Frame, kFramesCount> _frames;
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "media/view/media_view_overlay_opengl.h"
|
||||
|
||||
#include "ui/gl/gl_shader.h"
|
||||
#include "media/streaming/media_streaming_common.h"
|
||||
#include "base/platform/base_platform_info.h"
|
||||
|
||||
namespace Media::View {
|
||||
|
@ -31,6 +32,23 @@ constexpr auto kFooterOffset = kSaveMsgOffset + 4;
|
|||
constexpr auto kCaptionOffset = kFooterOffset + 4;
|
||||
constexpr auto kGroupThumbsOffset = kCaptionOffset + 4;
|
||||
|
||||
[[nodiscard]] ShaderPart FragmentPlaceOnTransparentBackground() {
|
||||
return {
|
||||
.header = R"(
|
||||
uniform vec4 transparentBg;
|
||||
uniform vec4 transparentFg;
|
||||
uniform float transparentSize;
|
||||
)",
|
||||
.body = R"(
|
||||
vec2 checkboardLadder = floor(gl_FragCoord.xy / transparentSize);
|
||||
float checkboard = mod(checkboardLadder.x + checkboardLadder.y, 2.0);
|
||||
vec4 checkboardColor = checkboard * transparentBg
|
||||
+ (1. - checkboard) * transparentFg;
|
||||
result += checkboardColor * (1. - result.a);
|
||||
)",
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
OverlayWidget::RendererGL::RendererGL(not_null<OverlayWidget*> owner)
|
||||
|
@ -59,7 +77,7 @@ void OverlayWidget::RendererGL::init(
|
|||
_textures.ensureCreated(f);
|
||||
|
||||
_imageProgram.emplace();
|
||||
LinkProgram(
|
||||
_texturedVertexShader = LinkProgram(
|
||||
&*_imageProgram,
|
||||
VertexShader({
|
||||
VertexViewportTransform(),
|
||||
|
@ -67,6 +85,23 @@ void OverlayWidget::RendererGL::init(
|
|||
}),
|
||||
FragmentShader({
|
||||
FragmentSampleARGB32Texture(),
|
||||
})).vertex;
|
||||
|
||||
_withTransparencyProgram.emplace();
|
||||
LinkProgram(
|
||||
&*_withTransparencyProgram,
|
||||
_texturedVertexShader,
|
||||
FragmentShader({
|
||||
FragmentSampleARGB32Texture(),
|
||||
FragmentPlaceOnTransparentBackground(),
|
||||
}));
|
||||
|
||||
_yuv420Program.emplace();
|
||||
LinkProgram(
|
||||
&*_yuv420Program,
|
||||
_texturedVertexShader,
|
||||
FragmentShader({
|
||||
FragmentSampleYUV420Texture(),
|
||||
}));
|
||||
|
||||
_background.init(f);
|
||||
|
@ -78,6 +113,9 @@ void OverlayWidget::RendererGL::deinit(
|
|||
_background.deinit(f);
|
||||
_textures.destroy(f);
|
||||
_imageProgram = std::nullopt;
|
||||
_texturedVertexShader = nullptr;
|
||||
_withTransparencyProgram = std::nullopt;
|
||||
_yuv420Program = std::nullopt;
|
||||
_contentBuffer = std::nullopt;
|
||||
}
|
||||
|
||||
|
@ -138,11 +176,72 @@ void OverlayWidget::RendererGL::paintBackground() {
|
|||
void OverlayWidget::RendererGL::paintTransformedVideoFrame(
|
||||
QRect rect,
|
||||
int rotation) {
|
||||
paintTransformedStaticContent(
|
||||
_owner->videoFrame(),
|
||||
rect,
|
||||
rotation,
|
||||
false);
|
||||
const auto data = _owner->videoFrameWithInfo();
|
||||
if (data.format == Streaming::FrameFormat::None) {
|
||||
return;
|
||||
}
|
||||
if (data.format == Streaming::FrameFormat::ARGB32) {
|
||||
Assert(!data.original.isNull());
|
||||
paintTransformedStaticContent(
|
||||
data.original,
|
||||
rect,
|
||||
rotation,
|
||||
false);
|
||||
return;
|
||||
}
|
||||
Assert(data.format == Streaming::FrameFormat::YUV420);
|
||||
Assert(!data.yuv420->size.isEmpty());
|
||||
const auto yuv = data.yuv420;
|
||||
_f->glUseProgram(_yuv420Program->programId());
|
||||
|
||||
const auto upload = (_trackFrameIndex != data.index)
|
||||
|| (_streamedIndex != _owner->streamedIndex());
|
||||
_trackFrameIndex = data.index;
|
||||
_streamedIndex = _owner->streamedIndex();
|
||||
|
||||
_f->glActiveTexture(GL_TEXTURE0);
|
||||
_textures.bind(*_f, 0);
|
||||
if (upload) {
|
||||
_f->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||
uploadTexture(
|
||||
GL_RED,
|
||||
GL_RED,
|
||||
yuv->size,
|
||||
_lumaSize,
|
||||
yuv->y.stride,
|
||||
yuv->y.data);
|
||||
_lumaSize = yuv->size;
|
||||
_rgbaSize = QSize();
|
||||
}
|
||||
_f->glActiveTexture(GL_TEXTURE1);
|
||||
_textures.bind(*_f, 1);
|
||||
if (upload) {
|
||||
uploadTexture(
|
||||
GL_RED,
|
||||
GL_RED,
|
||||
yuv->chromaSize,
|
||||
_chromaSize,
|
||||
yuv->u.stride,
|
||||
yuv->u.data);
|
||||
}
|
||||
_f->glActiveTexture(GL_TEXTURE2);
|
||||
_textures.bind(*_f, 2);
|
||||
if (upload) {
|
||||
uploadTexture(
|
||||
GL_RED,
|
||||
GL_RED,
|
||||
yuv->chromaSize,
|
||||
_chromaSize,
|
||||
yuv->v.stride,
|
||||
yuv->v.data);
|
||||
_chromaSize = yuv->chromaSize;
|
||||
_f->glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
|
||||
}
|
||||
_yuv420Program->setUniformValue("y_texture", GLint(0));
|
||||
_yuv420Program->setUniformValue("u_texture", GLint(1));
|
||||
_yuv420Program->setUniformValue("v_texture", GLint(2));
|
||||
|
||||
paintTransformedContent(&*_yuv420Program, rect, rotation);
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererGL::paintTransformedStaticContent(
|
||||
|
@ -150,7 +249,47 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent(
|
|||
QRect rect,
|
||||
int rotation,
|
||||
bool fillTransparentBackground) {
|
||||
AssertIsDebug(fillTransparentBackground);
|
||||
auto &program = fillTransparentBackground
|
||||
? _withTransparencyProgram
|
||||
: _imageProgram;
|
||||
_f->glUseProgram(program->programId());
|
||||
if (fillTransparentBackground) {
|
||||
program->setUniformValue(
|
||||
"transparentBg",
|
||||
Uniform(st::mediaviewTransparentBg->c));
|
||||
program->setUniformValue(
|
||||
"transparentFg",
|
||||
Uniform(st::mediaviewTransparentFg->c));
|
||||
program->setUniformValue(
|
||||
"transparentSize",
|
||||
st::transparentPlaceholderSize * _factor);
|
||||
}
|
||||
|
||||
_f->glActiveTexture(GL_TEXTURE0);
|
||||
_textures.bind(*_f, 0);
|
||||
const auto cacheKey = image.cacheKey();
|
||||
const auto upload = (_cacheKey != cacheKey);
|
||||
if (upload) {
|
||||
const auto stride = image.bytesPerLine() / 4;
|
||||
const auto data = image.constBits();
|
||||
uploadTexture(
|
||||
GL_RGBA,
|
||||
GL_RGBA,
|
||||
image.size(),
|
||||
_rgbaSize,
|
||||
stride,
|
||||
data);
|
||||
_rgbaSize = image.size();
|
||||
_lumaSize = QSize();
|
||||
}
|
||||
|
||||
paintTransformedContent(&*program, rect, rotation);
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererGL::paintTransformedContent(
|
||||
not_null<QOpenGLShaderProgram*> program,
|
||||
QRect rect,
|
||||
int rotation) {
|
||||
auto texCoords = std::array<std::array<GLfloat, 2>, 4> { {
|
||||
{ { 0.f, 1.f } },
|
||||
{ { 1.f, 1.f } },
|
||||
|
@ -182,30 +321,11 @@ void OverlayWidget::RendererGL::paintTransformedStaticContent(
|
|||
_contentBuffer->bind();
|
||||
_contentBuffer->write(0, coords, sizeof(coords));
|
||||
|
||||
_f->glUseProgram(_imageProgram->programId());
|
||||
_imageProgram->setUniformValue("viewport", QSizeF(_viewport * _factor));
|
||||
_imageProgram->setUniformValue("s_texture", GLint(0));
|
||||
|
||||
_f->glActiveTexture(GL_TEXTURE0);
|
||||
_textures.bind(*_f, 0);
|
||||
const auto cacheKey = image.cacheKey();
|
||||
const auto upload = (_cacheKey != cacheKey);
|
||||
if (upload) {
|
||||
const auto stride = image.bytesPerLine() / 4;
|
||||
const auto data = image.constBits();
|
||||
uploadTexture(
|
||||
GL_RGBA,
|
||||
GL_RGBA,
|
||||
image.size(),
|
||||
_rgbaSize,
|
||||
stride,
|
||||
data);
|
||||
_rgbaSize = image.size();
|
||||
_ySize = QSize();
|
||||
}
|
||||
program->setUniformValue("viewport", QSizeF(_viewport * _factor));
|
||||
program->setUniformValue("s_texture", GLint(0));
|
||||
|
||||
toggleBlending(false);
|
||||
FillTexturedRectangle(*_f, &*_imageProgram);
|
||||
FillTexturedRectangle(*_f, &*program);
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererGL::uploadTexture(
|
||||
|
@ -310,6 +430,23 @@ void OverlayWidget::RendererGL::paintGroupThumbs(
|
|||
}, kGroupThumbsOffset, true);
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererGL::invalidate() {
|
||||
_trackFrameIndex = -1;
|
||||
_streamedIndex = -1;
|
||||
const auto images = {
|
||||
&_radialImage,
|
||||
&_documentBubbleImage,
|
||||
&_themePreviewImage,
|
||||
&_saveMsgImage,
|
||||
&_footerImage,
|
||||
&_captionImage,
|
||||
&_groupThumbsImage,
|
||||
};
|
||||
for (const auto image : images) {
|
||||
image->setImage(QImage());
|
||||
}
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererGL::paintUsingRaster(
|
||||
Ui::GL::Image &image,
|
||||
QRect rect,
|
||||
|
|
|
@ -49,6 +49,10 @@ private:
|
|||
QRect rect,
|
||||
int rotation,
|
||||
bool fillTransparentBackground) override;
|
||||
void paintTransformedContent(
|
||||
not_null<QOpenGLShaderProgram*> program,
|
||||
QRect rect,
|
||||
int rotation);
|
||||
void paintRadialLoading(
|
||||
QRect inner,
|
||||
bool radial,
|
||||
|
@ -67,6 +71,8 @@ private:
|
|||
void paintCaption(QRect outer, float64 opacity) override;
|
||||
void paintGroupThumbs(QRect outer, float64 opacity) override;
|
||||
|
||||
void invalidate() override;
|
||||
|
||||
void paintUsingRaster(
|
||||
Ui::GL::Image &image,
|
||||
QRect rect,
|
||||
|
@ -97,11 +103,16 @@ private:
|
|||
|
||||
std::optional<QOpenGLBuffer> _contentBuffer;
|
||||
std::optional<QOpenGLShaderProgram> _imageProgram;
|
||||
|
||||
QOpenGLShader *_texturedVertexShader = nullptr;
|
||||
std::optional<QOpenGLShaderProgram> _withTransparencyProgram;
|
||||
std::optional<QOpenGLShaderProgram> _yuv420Program;
|
||||
Ui::GL::Textures<3> _textures;
|
||||
QSize _rgbaSize;
|
||||
QSize _ySize;
|
||||
QSize _lumaSize;
|
||||
QSize _chromaSize;
|
||||
quint64 _cacheKey = 0;
|
||||
int _trackFrameIndex = 0;
|
||||
int _streamedIndex = 0;
|
||||
|
||||
Ui::GL::Image _radialImage;
|
||||
Ui::GL::Image _documentBubbleImage;
|
||||
|
|
|
@ -165,4 +165,7 @@ void OverlayWidget::RendererSW::paintGroupThumbs(
|
|||
}
|
||||
}
|
||||
|
||||
void OverlayWidget::RendererSW::invalidate() {
|
||||
}
|
||||
|
||||
} // namespace Media::View
|
||||
|
|
|
@ -51,6 +51,8 @@ private:
|
|||
void paintCaption(QRect outer, float64 opacity) override;
|
||||
void paintGroupThumbs(QRect outer, float64 opacity) override;
|
||||
|
||||
void invalidate() override;
|
||||
|
||||
const not_null<OverlayWidget*> _owner;
|
||||
QBrush _transparentBrush;
|
||||
|
||||
|
|
|
@ -38,6 +38,8 @@ public:
|
|||
virtual void paintCaption(QRect outer, float64 opacity) = 0;
|
||||
virtual void paintGroupThumbs(QRect outer, float64 opacity) = 0;
|
||||
|
||||
virtual void invalidate() = 0;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Media::View
|
||||
|
|
|
@ -555,6 +555,22 @@ QImage OverlayWidget::videoFrame() const {
|
|||
: _streamed->instance.info().video.cover;
|
||||
}
|
||||
|
||||
Streaming::FrameWithInfo OverlayWidget::videoFrameWithInfo() const {
|
||||
Expects(videoShown());
|
||||
|
||||
return _streamed->instance.player().ready()
|
||||
? _streamed->instance.frameWithInfo()
|
||||
: Streaming::FrameWithInfo{
|
||||
.original = _streamed->instance.info().video.cover,
|
||||
.format = Streaming::FrameFormat::ARGB32,
|
||||
.index = -2,
|
||||
};
|
||||
}
|
||||
|
||||
int OverlayWidget::streamedIndex() const {
|
||||
return _streamedCreated;
|
||||
}
|
||||
|
||||
bool OverlayWidget::documentContentShown() const {
|
||||
return _document && (!_staticContent.isNull() || videoShown());
|
||||
}
|
||||
|
@ -2618,6 +2634,7 @@ bool OverlayWidget::createStreamingObjects() {
|
|||
_streamed = nullptr;
|
||||
return false;
|
||||
}
|
||||
++_streamedCreated;
|
||||
_streamed->instance.setPriority(kOverlayLoaderPriority);
|
||||
_streamed->instance.lockPlayer();
|
||||
_streamed->withSound = _document
|
||||
|
@ -4429,8 +4446,10 @@ void OverlayWidget::clearBeforeHide() {
|
|||
_controlsOpacity = anim::value(1, 1);
|
||||
_groupThumbs = nullptr;
|
||||
_groupThumbsRect = QRect();
|
||||
if (_streamed) {
|
||||
_streamed->controls.hide();
|
||||
for (const auto child : _widget->children()) {
|
||||
if (child->isWidgetType()) {
|
||||
static_cast<QWidget*>(child)->hide();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ struct TrackState;
|
|||
namespace Streaming {
|
||||
struct Information;
|
||||
struct Update;
|
||||
struct FrameWithInfo;
|
||||
enum class Error;
|
||||
} // namespace Streaming
|
||||
} // namespace Media
|
||||
|
@ -397,6 +398,8 @@ private:
|
|||
[[nodiscard]] QSize videoSize() const;
|
||||
[[nodiscard]] bool videoIsGifOrUserpic() const;
|
||||
[[nodiscard]] QImage videoFrame() const;
|
||||
[[nodiscard]] Streaming::FrameWithInfo videoFrameWithInfo() const;
|
||||
[[nodiscard]] int streamedIndex() const;
|
||||
[[nodiscard]] QImage transformedShownContent() const;
|
||||
[[nodiscard]] QImage transformShownContent(
|
||||
QImage content,
|
||||
|
@ -474,6 +477,7 @@ private:
|
|||
|
||||
std::unique_ptr<Streamed> _streamed;
|
||||
std::unique_ptr<PipWrap> _pip;
|
||||
int _streamedCreated = 0;
|
||||
bool _showAsPip = false;
|
||||
|
||||
const style::icon *_docIcon = nullptr;
|
||||
|
|
Loading…
Add table
Reference in a new issue