Parse and serialize video parameters.

This commit is contained in:
John Preston 2021-04-15 17:55:32 +04:00
parent a41b7b62ac
commit a6f379a17a
8 changed files with 251 additions and 9 deletions

View file

@ -380,7 +380,7 @@ void Call::setupOutgoingVideo() {
// Paused not supported right now.
Assert(state == Webrtc::VideoState::Active);
if (!_videoCapture) {
_videoCapture = _delegate->getVideoCapture();
_videoCapture = _delegate->callGetVideoCapture();
_videoCapture->setOutput(_videoOutgoing->sink());
}
if (_instance) {

View file

@ -72,7 +72,7 @@ public:
Fn<void()> onSuccess,
bool video) = 0;
virtual auto getVideoCapture()
virtual auto callGetVideoCapture()
-> std::shared_ptr<tgcalls::VideoCaptureInterface> = 0;
virtual ~Delegate() = default;

View file

@ -27,12 +27,14 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_session.h"
#include "base/global_shortcuts.h"
#include "base/openssl_help.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h"
#include "webrtc/webrtc_create_adm.h"
#include <tgcalls/group/GroupInstanceCustomImpl.h>
#include <tgcalls/VideoCaptureInterface.h>
#include <tgcalls/StaticThreads.h>
#include <xxhash.h>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <QtCore/QJsonArray>
@ -76,6 +78,11 @@ constexpr auto kPlayConnectingEach = crl::time(1056) + 2 * crl::time(1000);
} // namespace
struct VideoParams {
tgcalls::GroupParticipantDescription description;
uint32 hash = 0;
};
class GroupCall::LoadPartTask final : public tgcalls::BroadcastPartTask {
public:
LoadPartTask(
@ -129,6 +136,114 @@ private:
return false;
}
std::shared_ptr<VideoParams> ParseVideoParams(
const QByteArray &json,
const std::shared_ptr<VideoParams> &existing) {
using namespace tgcalls;
if (json.isEmpty()) {
return nullptr;
}
const auto hash = XXH32(json.data(), json.size(), uint32(0));
if (existing && existing->hash == hash) {
return existing;
}
const auto data = existing ? existing : std::make_shared<VideoParams>();
data->hash = hash;
auto error = QJsonParseError{ 0, QJsonParseError::NoError };
const auto document = QJsonDocument::fromJson(json, &error);
if (error.error != QJsonParseError::NoError) {
LOG(("API Error: "
"Failed to parse group call video params, error: %1."
).arg(error.errorString()));
return data;
} else if (!document.isObject()) {
LOG(("API Error: "
"Not an object received in group call video params."));
return data;
}
const auto readString = [](
const QJsonObject &object,
const char *key) {
return object.value(key).toString().toStdString();
};
const auto object = document.object();
data->description.endpointId = readString(object, "endpoint");
const auto ssrcGroups = object.value("ssrc-groups").toArray();
data->description.videoSourceGroups.reserve(ssrcGroups.size());
for (const auto &value : ssrcGroups) {
const auto inner = value.toObject();
auto sources = std::vector<uint32_t>();
{
const auto list = inner.value("sources").toArray();
sources.reserve(list.size());
for (const auto &source : list) {
sources.push_back(uint32_t(source.toDouble()));
}
}
data->description.videoSourceGroups.push_back({
.ssrcs = std::move(sources),
.semantics = readString(inner, "semantics"),
});
}
const auto payloadTypes = object.value("payload-types").toArray();
data->description.videoPayloadTypes.reserve(payloadTypes.size());
for (const auto &value : payloadTypes) {
const auto inner = value.toObject();
auto types = std::vector<GroupJoinPayloadVideoPayloadFeedbackType>();
{
const auto list = inner.value("rtcp-fbs").toArray();
types.reserve(list.size());
for (const auto &type : list) {
const auto inside = type.toObject();
types.push_back({
.type = readString(inside, "type"),
.subtype = readString(inside, "subtype"),
});
}
}
auto parameters = std::vector<std::pair<std::string, std::string>>();
{
const auto list = inner.value("parameters").toArray();
parameters.reserve(list.size());
for (const auto &parameter : list) {
const auto inside = parameter.toObject();
for (auto i = inside.begin(); i != inside.end(); ++i) {
parameters.push_back({
i.key().toStdString(),
i.value().toString().toStdString(),
});
}
}
}
data->description.videoPayloadTypes.push_back({
.id = uint32_t(inner.value("id").toDouble()),
.name = readString(inner, "name"),
.clockrate = uint32_t(inner.value("clockrate").toDouble()),
.channels = uint32_t(inner.value("channels").toDouble()),
.feedbackTypes = std::move(types),
.parameters = std::move(parameters),
});
}
const auto extensionMap = object.value("rtp-hdrexts").toArray();
data->description.videoExtensionMap.reserve(extensionMap.size());
for (const auto &extension : extensionMap) {
const auto inner = extension.toObject();
data->description.videoExtensionMap.push_back({
uint32_t(inner.value("id").toDouble()),
readString(inner, "uri"),
});
}
return data;
}
GroupCall::LoadPartTask::LoadPartTask(
base::weak_ptr<GroupCall> call,
int64 time,
@ -186,6 +301,8 @@ GroupCall::GroupCall(
, _joinHash(info.joinHash)
, _id(inputCall.c_inputGroupCall().vid().v)
, _scheduleDate(info.scheduleDate)
, _videoOutgoing(std::make_unique<Webrtc::VideoTrack>(
Webrtc::VideoState::Inactive))
, _lastSpokeCheckTimer([=] { checkLastSpoke(); })
, _checkJoinedTimer([=] { checkJoined(); })
, _pushToTalkCancelTimer([=] { pushToTalkCancel(); })
@ -429,6 +546,13 @@ void GroupCall::join(const MTPInputGroupCall &inputCall) {
const auto volumeChanged = was
? (was->volume != now.volume || was->mutedByMe != now.mutedByMe)
: (now.volume != Group::kDefaultVolume || now.mutedByMe);
if (now.videoParams) {
auto participants = std::vector<tgcalls::GroupParticipantDescription>();
participants.push_back(now.videoParams->description);
participants.back().audioSsrc = now.ssrc;
_instance->addParticipants(std::move(participants));
}
if (volumeChanged) {
_instance->setVolume(
now.ssrc,
@ -511,12 +635,80 @@ void GroupCall::rejoin(not_null<PeerData*> as) {
fingerprints.push_back(object);
}
auto extensionMap = QJsonArray();
for (const auto &extension : payload.videoExtensionMap) {
auto object = QJsonObject();
object.insert("id", int64(extension.first));
object.insert(
"uri",
QString::fromStdString(extension.second));
extensionMap.push_back(object);
}
auto payloadTypes = QJsonArray();
for (const auto &type : payload.videoPayloadTypes) {
auto object = QJsonObject();
object.insert("id", int64(type.id));
object.insert("name", QString::fromStdString(type.name));
object.insert("clockrate", int64(type.clockrate));
if (!type.parameters.empty()) {
auto parameters = QJsonObject();
for (const auto &parameter : type.parameters) {
parameters.insert(
QString::fromStdString(parameter.first),
QString::fromStdString(parameter.second));
}
object.insert("parameters", parameters);
}
if (type.name != "rtx") {
object.insert("channels", int64(type.channels));
auto fbs = QJsonArray();
for (const auto &element : type.feedbackTypes) {
auto inner = QJsonObject();
inner.insert(
"type",
QString::fromStdString(element.type));
if (!element.subtype.empty()) {
inner.insert(
"subtype",
QString::fromStdString(element.subtype));
}
fbs.push_back(inner);
}
object.insert("rtcp-fbs", fbs);
}
payloadTypes.push_back(object);
}
auto sourceGroups = QJsonArray();
for (const auto &group : payload.videoSourceGroups) {
auto object = QJsonObject();
object.insert(
"semantics",
QString::fromStdString(group.semantics));
auto list = QJsonArray();
for (const auto source : group.ssrcs) {
list.push_back(int64(source));
}
object.insert("sources", list);
sourceGroups.push_back(object);
}
auto root = QJsonObject();
const auto ssrc = payload.ssrc;
root.insert("ufrag", QString::fromStdString(payload.ufrag));
root.insert("pwd", QString::fromStdString(payload.pwd));
root.insert("fingerprints", fingerprints);
root.insert("ssrc", double(payload.ssrc));
if (!extensionMap.isEmpty()) {
root.insert("rtp-hdrexts", extensionMap);
}
if (!payloadTypes.isEmpty()) {
root.insert("payload-types", payloadTypes);
}
if (!sourceGroups.isEmpty()) {
root.insert("ssrc-groups", sourceGroups);
}
LOG(("Call Info: Join payload received, joining with ssrc: %1."
).arg(ssrc));
@ -895,7 +1087,7 @@ void GroupCall::handlePossibleCreateOrJoinResponse(
const auto candidates = root.value("candidates").toArray();
for (const auto &print : prints) {
const auto object = print.toObject();
payload.fingerprints.push_back(tgcalls::GroupJoinPayloadFingerprint{
payload.fingerprints.push_back({
.hash = readString(object, "hash"),
.setup = readString(object, "setup"),
.fingerprint = readString(object, "fingerprint"),
@ -903,7 +1095,7 @@ void GroupCall::handlePossibleCreateOrJoinResponse(
}
for (const auto &candidate : candidates) {
const auto object = candidate.toObject();
payload.candidates.push_back(tgcalls::GroupJoinResponseCandidate{
payload.candidates.push_back({
.port = readString(object, "port"),
.protocol = readString(object, "protocol"),
.network = readString(object, "network"),
@ -945,7 +1137,9 @@ void GroupCall::addParticipantsToInstance() {
void GroupCall::prepareParticipantForAdding(
const Data::GroupCallParticipant &participant) {
_preparedParticipants.push_back(tgcalls::GroupParticipantDescription());
_preparedParticipants.push_back(participant.videoParams
? participant.videoParams->description
: tgcalls::GroupParticipantDescription());
auto &added = _preparedParticipants.back();
added.audioSsrc = participant.ssrc;
_unresolvedSsrcs.remove(added.audioSsrc);
@ -1142,6 +1336,11 @@ void GroupCall::ensureControllerCreated() {
}
const auto &settings = Core::App().settings();
if (!_videoCapture) {
_videoCapture = _delegate->groupCallGetVideoCapture();
_videoCapture->setOutput(_videoOutgoing->sink());
}
const auto weak = base::make_weak(this);
const auto myLevel = std::make_shared<tgcalls::GroupLevelValue>();
tgcalls::GroupInstanceDescriptor descriptor = {
@ -1170,6 +1369,7 @@ void GroupCall::ensureControllerCreated() {
.initialOutputDeviceId = _audioOutputId.toStdString(),
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
settings.callAudioBackend()),
.videoCapture = _videoCapture,
.participantDescriptionsRequired = [=](
const std::vector<uint32_t> &ssrcs) {
crl::on_main(weak, [=] {

View file

@ -20,6 +20,7 @@ class GroupInstanceCustomImpl;
struct GroupLevelsUpdate;
struct GroupNetworkState;
struct GroupParticipantDescription;
class VideoCaptureInterface;
} // namespace tgcalls
namespace base {
@ -29,6 +30,7 @@ class GlobalShortcutValue;
namespace Webrtc {
class MediaDevices;
class VideoTrack;
} // namespace Webrtc
namespace Data {
@ -72,6 +74,12 @@ struct LevelUpdate {
bool me = false;
};
struct VideoParams;
[[nodiscard]] std::shared_ptr<VideoParams> ParseVideoParams(
const QByteArray &json,
const std::shared_ptr<VideoParams> &existing);
class GroupCall final : public base::has_weak_ptr {
public:
class Delegate {
@ -90,6 +98,8 @@ public:
Ended,
};
virtual void groupCallPlaySound(GroupCallSound sound) = 0;
virtual auto groupCallGetVideoCapture()
-> std::shared_ptr<tgcalls::VideoCaptureInterface> = 0;
};
using GlobalShortcutManager = base::GlobalShortcutManager;
@ -336,6 +346,8 @@ private:
mtpRequestId _updateMuteRequestId = 0;
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
rpl::event_stream<LevelUpdate> _levelUpdates;
base::flat_map<uint32, Data::LastSpokeTimes> _lastSpoke;
rpl::event_stream<Group::RejoinEvent> _rejoinEvents;

View file

@ -592,6 +592,14 @@ void Instance::requestPermissionOrFail(Platform::PermissionType type, Fn<void()>
}
}
std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::callGetVideoCapture() {
return getVideoCapture();
}
std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::groupCallGetVideoCapture() {
return getVideoCapture();
}
std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::getVideoCapture() {
if (auto result = _videoCapture.lock()) {
return result;

View file

@ -69,8 +69,7 @@ public:
bool activateCurrentCall(const QString &joinHash = QString());
bool minimizeCurrentActiveCall();
bool closeCurrentActiveCall();
auto getVideoCapture()
-> std::shared_ptr<tgcalls::VideoCaptureInterface> override;
std::shared_ptr<tgcalls::VideoCaptureInterface> getVideoCapture();
void requestPermissionsOrFail(Fn<void()> onSuccess, bool video = true);
void setCurrentAudioDevice(bool input, const QString &deviceId);
@ -103,6 +102,8 @@ private:
requestPermissionsOrFail(std::move(onSuccess), video);
}
void callPlaySound(CallSound sound) override;
auto callGetVideoCapture()
->std::shared_ptr<tgcalls::VideoCaptureInterface> override;
void groupCallFinished(not_null<GroupCall*> call) override;
void groupCallFailed(not_null<GroupCall*> call) override;
@ -110,6 +111,8 @@ private:
requestPermissionsOrFail(std::move(onSuccess), false);
}
void groupCallPlaySound(GroupCallSound sound) override;
auto groupCallGetVideoCapture()
->std::shared_ptr<tgcalls::VideoCaptureInterface> override;
void createCall(not_null<UserData*> user, Call::Type type, bool video);
void destroyCall(not_null<Call*> call);

View file

@ -343,7 +343,7 @@ void GroupCall::applyLocalUpdate(
const MTPDupdateGroupCallParticipants &update) {
applyParticipantsSlice(
update.vparticipants().v,
ApplySliceSource::UpdateReceived);
ApplySliceSource::UpdateConstructed);
}
void GroupCall::applyEnqueuedUpdate(const MTPUpdate &update) {
@ -529,8 +529,21 @@ void GroupCall::applyParticipantsSlice(
&& (!was || was->onlyMinLoaded);
const auto raisedHandRating
= data.vraise_hand_rating().value_or_empty();
const auto hasVideoParamsInformation = (sliceSource
!= ApplySliceSource::UpdateConstructed);
const auto value = Participant{
.peer = participantPeer,
.videoParams = (hasVideoParamsInformation
? Calls::ParseVideoParams(
(data.vparams()
? data.vparams()->c_dataJSON().vdata().v
: QByteArray()),
(i != end(_participants)
? i->videoParams
: nullptr))
: (i != end(_participants))
? i->videoParams
: nullptr),
.date = data.vdate().v,
.lastActive = lastActive,
.raisedHandRating = raisedHandRating,

View file

@ -13,6 +13,10 @@ class PeerData;
class ApiWrap;
namespace Calls {
struct VideoParams;
} // namespace Calls
namespace Data {
struct LastSpokeTimes {
@ -22,6 +26,7 @@ struct LastSpokeTimes {
struct GroupCallParticipant {
not_null<PeerData*> peer;
std::shared_ptr<Calls::VideoParams> videoParams;
TimeId date = 0;
TimeId lastActive = 0;
uint64 raisedHandRating = 0;
@ -131,6 +136,7 @@ private:
SliceLoaded,
UnknownLoaded,
UpdateReceived,
UpdateConstructed,
};
enum class QueuedType : uint8 {
VersionedParticipant,