Convert YUV420 -> ARGB32 in fragment shader.

This commit is contained in:
John Preston 2021-05-24 18:23:07 +04:00
parent 5f393babd6
commit 42baa3e1bc
11 changed files with 194 additions and 75 deletions

View file

@ -134,7 +134,7 @@ Ui::GL::ChosenRenderer Panel::Incoming::chooseRenderer(
}
void Panel::Incoming::paint(QPainter &p, const QRegion &clip, bool opengl) {
const auto data = _track->frameWithInfo();
const auto data = _track->frameWithInfo(true);
const auto &image = data.original;
const auto rotation = data.rotation;
if (image.isNull()) {

View file

@ -110,12 +110,6 @@ constexpr auto kFixLargeVideoDuration = 5 * crl::time(1000);
} // namespace
//GroupCall::VideoTrack::VideoTrack() = default;
//GroupCall::VideoTrack::VideoTrack(VideoTrack &&other) = default;
//GroupCall::VideoTrack &GroupCall::VideoTrack::operator=(
// VideoTrack &&other) = default;
//GroupCall::VideoTrack::~VideoTrack() = default;
//
class GroupCall::LoadPartTask final : public tgcalls::BroadcastPartTask {
public:
LoadPartTask(
@ -372,10 +366,6 @@ GroupCall::GroupCall(
, _joinHash(info.joinHash)
, _id(inputCall.c_inputGroupCall().vid().v)
, _scheduleDate(info.scheduleDate)
, _cameraOutgoing(std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Inactive))
, _screenOutgoing(std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Inactive))
, _lastSpokeCheckTimer([=] { checkLastSpoke(); })
, _checkJoinedTimer([=] { checkJoined(); })
, _pushToTalkCancelTimer([=] { pushToTalkCancel(); })
@ -448,15 +438,12 @@ GroupCall::~GroupCall() {
}
bool GroupCall::isSharingScreen() const {
return (_screenOutgoing->state() == Webrtc::VideoState::Active);
return _screenOutgoing
&& (_screenOutgoing->state() == Webrtc::VideoState::Active);
}
rpl::producer<bool> GroupCall::isSharingScreenValue() const {
using namespace rpl::mappers;
return _screenOutgoing->stateValue(
) | rpl::map(
_1 == Webrtc::VideoState::Active
) | rpl::distinct_until_changed();
return _isSharingScreen.value();
}
const std::string &GroupCall::screenSharingEndpoint() const {
@ -464,15 +451,12 @@ const std::string &GroupCall::screenSharingEndpoint() const {
}
bool GroupCall::isSharingCamera() const {
return (_cameraOutgoing->state() == Webrtc::VideoState::Active);
return _cameraOutgoing
&& (_cameraOutgoing->state() == Webrtc::VideoState::Active);
}
rpl::producer<bool> GroupCall::isSharingCameraValue() const {
using namespace rpl::mappers;
return _cameraOutgoing->stateValue(
) | rpl::map(
_1 == Webrtc::VideoState::Active
) | rpl::distinct_until_changed();
return _isSharingCamera.value();
}
const std::string &GroupCall::cameraSharingEndpoint() const {
@ -526,11 +510,17 @@ void GroupCall::subscribeToReal(not_null<Data::GroupCall*> real) {
setScheduledDate(date);
}, _lifetime);
real->participantsReloaded(
) | rpl::start_with_next([=] {
fillActiveVideoEndpoints();
}, _lifetime);
fillActiveVideoEndpoints();
// Postpone creating video tracks, so that we know if Panel
// supports OpenGL and we don't need ARGB32 frames at all.
Ui::PostponeCall(this, [=] {
if (const auto real = lookupReal()) {
real->participantsReloaded(
) | rpl::start_with_next([=] {
fillActiveVideoEndpoints();
}, _lifetime);
fillActiveVideoEndpoints();
}
});
using Update = Data::GroupCall::ParticipantUpdate;
real->participantUpdated(
@ -816,7 +806,8 @@ void GroupCall::markEndpointActive(VideoEndpoint endpoint, bool active) {
endpoint,
VideoTrack{
.track = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Active),
Webrtc::VideoState::Active,
_requireARGB32),
.peer = endpoint.peer,
}).first;
addVideoOutput(i->first.id, { i->second.track->sink() });
@ -955,7 +946,8 @@ void GroupCall::rejoin(not_null<PeerData*> as) {
}
void GroupCall::joinLeavePresentation() {
if (_screenOutgoing->state() == Webrtc::VideoState::Active) {
if (_screenOutgoing
&& _screenOutgoing->state() == Webrtc::VideoState::Active) {
rejoinPresentation();
} else {
leavePresentation();
@ -1528,6 +1520,19 @@ void GroupCall::ensureOutgoingVideo() {
}
_videoInited = true;
_cameraOutgoing = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Inactive,
_requireARGB32);
_screenOutgoing = std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Inactive,
_requireARGB32);
using namespace rpl::mappers;
_isSharingCamera = _cameraOutgoing->stateValue(
) | rpl::map(_1 == Webrtc::VideoState::Active);
_isSharingScreen = _screenOutgoing->stateValue(
) | rpl::map(_1 == Webrtc::VideoState::Active);
//static const auto hasDevices = [] {
// return !Webrtc::GetVideoInputList().empty();
//};
@ -2289,7 +2294,8 @@ void GroupCall::sendSelfUpdate(SendUpdateType type) {
MTP_bool(muted() != MuteState::Active),
MTP_int(100000), // volume
MTP_bool(muted() == MuteState::RaisedHand),
MTP_bool(_cameraOutgoing->state() != Webrtc::VideoState::Active)
MTP_bool(!_cameraOutgoing
|| _cameraOutgoing->state() != Webrtc::VideoState::Active)
)).done([=](const MTPUpdates &result) {
_updateMuteRequestId = 0;
_peer->session().api().applyUpdates(result);
@ -2492,6 +2498,10 @@ void GroupCall::pushToTalkCancel() {
}
}
void GroupCall::setNotRequireARGB32() {
_requireARGB32 = false;
}
auto GroupCall::otherParticipantStateValue() const
-> rpl::producer<Group::ParticipantState> {
return _otherParticipantStateValue.events();

View file

@ -352,6 +352,7 @@ public:
void applyGlobalShortcutChanges();
void pushToTalk(bool pressed, crl::time delay);
void setNotRequireARGB32();
[[nodiscard]] rpl::lifetime &lifetime() {
return _lifetime;
@ -517,6 +518,7 @@ private:
base::has_weak_ptr _instanceGuard;
std::shared_ptr<tgcalls::VideoCaptureInterface> _cameraCapture;
std::unique_ptr<Webrtc::VideoTrack> _cameraOutgoing;
rpl::variable<bool> _isSharingCamera = false;
base::flat_map<std::string, SinkPointer> _pendingVideoOutputs;
rpl::variable<InstanceState> _screenInstanceState
@ -526,9 +528,11 @@ private:
base::has_weak_ptr _screenInstanceGuard;
std::shared_ptr<tgcalls::VideoCaptureInterface> _screenCapture;
std::unique_ptr<Webrtc::VideoTrack> _screenOutgoing;
rpl::variable<bool> _isSharingScreen = false;
QString _screenDeviceId;
bool _videoInited = false;
bool _requireARGB32 = true;
rpl::event_stream<LevelUpdate> _levelUpdates;
rpl::event_stream<VideoEndpoint> _videoStreamActiveUpdates;

View file

@ -421,6 +421,10 @@ Panel::Panel(not_null<GroupCall*> call)
_layerBg->setStyleOverrides(&st::groupCallBox, &st::groupCallLayerBox);
_layerBg->setHideByBackgroundClick(true);
if (!_viewport->requireARGB32()) {
_call->setNotRequireARGB32();
}
SubscribeToMigration(
_peer,
_window->lifetime(),

View file

@ -523,6 +523,10 @@ Ui::GL::ChosenRenderer Viewport::chooseRenderer(
};
}
bool Viewport::requireARGB32() const {
return !_freeTextures;
}
int Viewport::fullHeight() const {
return _fullHeight.current();
}

View file

@ -80,6 +80,7 @@ public:
void remove(const VideoEndpoint &endpoint);
void showLarge(const VideoEndpoint &endpoint);
[[nodiscard]] bool requireARGB32() const;
[[nodiscard]] int fullHeight() const;
[[nodiscard]] rpl::producer<int> fullHeightValue() const;
[[nodiscard]] rpl::producer<VideoPinToggle> pinToggled() const;

View file

@ -65,7 +65,7 @@ out vec2 v_texcoord;
};
}
[[nodiscard]] ShaderPart FragmentSampleTexture() {
[[nodiscard]] ShaderPart FragmentSampleARGB32Texture() {
return {
.header = R"(
in vec2 v_texcoord;
@ -78,6 +78,23 @@ uniform sampler2D s_texture;
};
}
[[nodiscard]] ShaderPart FragmentSampleYUV420Texture() {
return {
.header = R"(
in vec2 v_texcoord;
uniform sampler2D y_texture;
uniform sampler2D u_texture;
uniform sampler2D v_texture;
)",
.body = R"(
float y = texture(y_texture, v_texcoord).r;
float u = texture(u_texture, v_texcoord).r - 0.5;
float v = texture(v_texture, v_texcoord).r - 0.5;
result = vec4(y + 1.403 * v, y - 0.344 * u - 0.714 * v, y + 1.77 * u, 1);
)",
};
}
[[nodiscard]] ShaderPart VertexViewportTransform() {
return {
.header = R"(
@ -162,15 +179,39 @@ not_null<QOpenGLShader*> MakeShader(
return result;
}
void LinkProgram(
struct Program {
not_null<QOpenGLShader*> vertex;
not_null<QOpenGLShader*> fragment;
};
Program LinkProgram(
not_null<QOpenGLShaderProgram*> program,
const QString &vertexSource,
const QString &fragmentSource) {
MakeShader(program, QOpenGLShader::Vertex, vertexSource);
MakeShader(program, QOpenGLShader::Fragment, fragmentSource);
std::variant<QString, not_null<QOpenGLShader*>> vertex,
std::variant<QString, not_null<QOpenGLShader*>> fragment) {
const auto vertexAsSource = v::is<QString>(vertex);
const auto v = vertexAsSource
? MakeShader(
program,
QOpenGLShader::Vertex,
v::get<QString>(vertex))
: v::get<not_null<QOpenGLShader*>>(vertex);
if (!vertexAsSource) {
program->addShader(v);
}
const auto fragmentAsSource = v::is<QString>(fragment);
const auto f = fragmentAsSource
? MakeShader(
program,
QOpenGLShader::Fragment,
v::get<QString>(fragment))
: v::get<not_null<QOpenGLShader*>>(fragment);
if (!fragmentAsSource) {
program->addShader(f);
}
if (!program->link()) {
LOG(("Shader Link Failed: %1.").arg(program->log()));
}
return { v, f };
}
[[nodiscard]] QVector4D Uniform(const QRect &rect) {
@ -251,18 +292,19 @@ void Viewport::RendererGL::init(
_frameBuffer.emplace();
_frameBuffer->setUsagePattern(QOpenGLBuffer::DynamicDraw);
_frameBuffer->create();
_frameProgram.emplace();
LinkProgram(
&*_frameProgram,
_yuv420Program.emplace();
_frameVertexShader = LinkProgram(
&*_yuv420Program,
VertexShader({
VertexViewportTransform(),
VertexPassTextureCoord(),
}),
FragmentShader({
FragmentSampleTexture(),
FragmentSampleYUV420Texture(),
FragmentFrameColor(),
FragmentRoundCorners(),
}));
})).vertex;
_bgBuffer.emplace();
_bgBuffer->setUsagePattern(QOpenGLBuffer::DynamicDraw);
@ -274,12 +316,28 @@ void Viewport::RendererGL::init(
FragmentShader({ FragmentStaticColor() }));
}
void Viewport::RendererGL::ensureARGB32Program() {
Expects(_frameVertexShader != nullptr);
_argb32Program.emplace();
LinkProgram(
&*_argb32Program,
_frameVertexShader,
FragmentShader({
FragmentSampleARGB32Texture(),
FragmentFrameColor(),
FragmentRoundCorners(),
}));
}
void Viewport::RendererGL::deinit(
not_null<QOpenGLWidget*> widget,
not_null<QOpenGLFunctions*> f) {
_frameBuffer = std::nullopt;
_bgBuffer = std::nullopt;
_frameProgram = std::nullopt;
_frameVertexShader = nullptr;
_argb32Program = std::nullopt;
_yuv420Program = std::nullopt;
_bgProgram = std::nullopt;
for (const auto &tile : _owner->_tiles) {
if (const auto textures = tile->takeTextures()) {
@ -337,9 +395,8 @@ void Viewport::RendererGL::paintTile(
not_null<QOpenGLFunctions*> f,
not_null<VideoTile*> tile) {
const auto track = tile->track();
const auto data = track->frameWithInfo();
const auto &image = data.original;
if (image.isNull()) {
const auto data = track->frameWithInfo(false);
if (data.format == Webrtc::FrameFormat::None) {
return;
}
@ -350,7 +407,7 @@ void Viewport::RendererGL::paintTile(
const auto height = geometry.height();
const auto expand = !_owner->wide()/* && !tile->screencast()*/;
const auto scaled = Media::View::FlipSizeByRotation(
image.size(),
data.yuv420->size,
data.rotation
).scaled(
QSize(width, height),
@ -394,43 +451,79 @@ void Viewport::RendererGL::paintTile(
tile->ensureTexturesCreated(f);
const auto &textures = tile->textures();
const auto upload = (textures.trackIndex != data.index);
if (upload) {
textures.textureIndex = 1 - textures.textureIndex;
}
const auto texture = textures.values[textures.textureIndex];
f->glUseProgram(_frameProgram->programId());
f->glActiveTexture(GL_TEXTURE0);
f->glBindTexture(GL_TEXTURE_2D, texture);
if (upload) {
f->glPixelStorei(GL_UNPACK_ROW_LENGTH, image.bytesPerLine() / 4);
const auto uploadOne = [&](GLint internalformat, GLint format, QSize size, int stride, const void *data) {
f->glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
f->glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB,
image.width(),
image.height(),
internalformat,
size.width(),
size.height(),
0,
GL_RGBA,
format,
GL_UNSIGNED_BYTE,
image.constBits());
data);
f->glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
};
if (upload) {
textures.textureIndex = 1 - textures.textureIndex;
}
const auto rgba = (data.format == Webrtc::FrameFormat::ARGB32);
if (rgba) {
ensureARGB32Program();
const auto texture = textures.values[textures.textureIndex];
f->glUseProgram(_argb32Program->programId());
f->glActiveTexture(GL_TEXTURE0);
f->glBindTexture(GL_TEXTURE_2D, texture);
if (upload) {
const auto &image = data.original;
const auto stride = image.bytesPerLine() / 4;
const auto data = image.constBits();
uploadOne(GL_RGB, GL_RGBA, image.size(), stride, data);
}
_argb32Program->setUniformValue("s_texture", GLint(0));
} else {
const auto yuv = data.yuv420;
const auto otherSize = yuv->chromaSize;
const auto textureY = textures.values[textures.textureIndex * 3 + 0];
const auto textureU = textures.values[textures.textureIndex * 3 + 1];
const auto textureV = textures.values[textures.textureIndex * 3 + 2];
f->glUseProgram(_yuv420Program->programId());
f->glActiveTexture(GL_TEXTURE0);
f->glBindTexture(GL_TEXTURE_2D, textureY);
if (upload) {
uploadOne(GL_RED, GL_RED, yuv->size, yuv->y.stride, yuv->y.data);
}
f->glActiveTexture(GL_TEXTURE1);
f->glBindTexture(GL_TEXTURE_2D, textureU);
if (upload) {
uploadOne(GL_RED, GL_RED, otherSize, yuv->u.stride, yuv->u.data);
}
f->glActiveTexture(GL_TEXTURE2);
f->glBindTexture(GL_TEXTURE_2D, textureV);
if (upload) {
uploadOne(GL_RED, GL_RED, otherSize, yuv->v.stride, yuv->v.data);
}
_yuv420Program->setUniformValue("y_texture", GLint(0));
_yuv420Program->setUniformValue("u_texture", GLint(1));
_yuv420Program->setUniformValue("v_texture", GLint(2));
}
tile->track()->markFrameShown();
_frameBuffer->bind();
_frameBuffer->allocate(coords, sizeof(coords));
_frameProgram->setUniformValue("viewport", QSizeF(_viewport));
_frameProgram->setUniformValue("s_texture", GLint(0));
_frameProgram->setUniformValue(
const auto program = rgba ? &*_argb32Program : &*_yuv420Program;
program->setUniformValue("viewport", QSizeF(_viewport));
program->setUniformValue(
"frameBg",
Uniform(st::groupCallMembersBg->c));
_frameProgram->setUniformValue("roundRadius", radius);
_frameProgram->setUniformValue("roundRect", Uniform(geometry));
_frameProgram->setUniformValue("roundBg", Uniform(st::groupCallBg->c));
program->setUniformValue("roundRadius", radius);
program->setUniformValue("roundRect", Uniform(geometry));
program->setUniformValue("roundBg", Uniform(st::groupCallBg->c));
GLint position = _frameProgram->attributeLocation("position");
GLint position = program->attributeLocation("position");
f->glVertexAttribPointer(
position,
2,
@ -440,7 +533,7 @@ void Viewport::RendererGL::paintTile(
nullptr);
f->glEnableVertexAttribArray(position);
GLint texcoord = _frameProgram->attributeLocation("texcoord");
GLint texcoord = program->attributeLocation("texcoord");
f->glVertexAttribPointer(
texcoord,
2,

View file

@ -46,14 +46,17 @@ private:
not_null<VideoTile*> tile);
void freeTextures(not_null<QOpenGLFunctions*> f);
[[nodiscard]] QRect tileGeometry(not_null<VideoTile*> tile) const;
void ensureARGB32Program();
const not_null<Viewport*> _owner;
QSize _viewport;
std::optional<QOpenGLBuffer> _frameBuffer;
std::optional<QOpenGLBuffer> _bgBuffer;
std::optional<QOpenGLShaderProgram> _frameProgram;
std::optional<QOpenGLShaderProgram> _argb32Program;
std::optional<QOpenGLShaderProgram> _yuv420Program;
std::optional<QOpenGLShaderProgram> _bgProgram;
QOpenGLShader *_frameVertexShader = nullptr;
std::vector<GLfloat> _bgTriangles;
std::vector<Textures> _texturesToFree;

View file

@ -58,7 +58,7 @@ void Viewport::Renderer::paintTile(
bool opengl,
QRegion &bg) {
const auto track = tile->track();
const auto data = track->frameWithInfo();
const auto data = track->frameWithInfo(true);
const auto &image = data.original;
const auto rotation = data.rotation;
if (image.isNull()) {

View file

@ -18,12 +18,12 @@ class QOpenGLFunctions;
namespace Calls::Group {
struct Viewport::Textures {
std::array<GLuint, 2> values = { { 0 } };
std::array<GLuint, 6> values = { { 0 } };
mutable int textureIndex = 0;
mutable int trackIndex = -1;
explicit operator bool() const {
return values[0] || values[1];
return (values[0] != 0);
}
};

@ -1 +1 @@
Subproject commit 802145e0de64013b91a0c05e760ea10c0978a973
Subproject commit 42a2041ca7b6c67716c6918c0fab49d4a1e298b2