mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-16 06:07:06 +02:00
Support three-value VideoState.
This commit is contained in:
parent
a89634b767
commit
95de762529
5 changed files with 92 additions and 55 deletions
|
@ -143,6 +143,11 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
|
|||
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
|
||||
}
|
||||
|
||||
[[nodiscard]] webrtc::VideoState StartVideoState(bool enabled) {
|
||||
using State = webrtc::VideoState;
|
||||
return enabled ? State::Active : State::Inactive;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Call::Call(
|
||||
|
@ -154,8 +159,8 @@ Call::Call(
|
|||
, _user(user)
|
||||
, _api(&_user->session().mtp())
|
||||
, _type(type)
|
||||
, _videoIncoming(std::make_unique<webrtc::VideoTrack>(video))
|
||||
, _videoOutgoing(std::make_unique<webrtc::VideoTrack>(video)) {
|
||||
, _videoIncoming(std::make_unique<webrtc::VideoTrack>(StartVideoState(video)))
|
||||
, _videoOutgoing(std::make_unique<webrtc::VideoTrack>(StartVideoState(video))) {
|
||||
_discardByTimeoutTimer.setCallback([=] { hangup(); });
|
||||
|
||||
if (_type == Type::Outgoing) {
|
||||
|
@ -338,24 +343,26 @@ void Call::setMuted(bool mute) {
|
|||
}
|
||||
|
||||
void Call::setupOutgoingVideo() {
|
||||
const auto started = _videoOutgoing->enabled();
|
||||
_videoOutgoing->enabledValue(
|
||||
) | rpl::start_with_next([=](bool enabled) {
|
||||
if (_state.current() != State::Established && enabled != started) {
|
||||
_videoOutgoing->setEnabled(started);
|
||||
} else if (enabled) {
|
||||
const auto started = _videoOutgoing->state();
|
||||
_videoOutgoing->stateValue(
|
||||
) | rpl::start_with_next([=](webrtc::VideoState state) {
|
||||
if (_state.current() != State::Established
|
||||
&& state != started
|
||||
&& !_videoCapture) {
|
||||
_videoOutgoing->setState(started);
|
||||
} else if (state != webrtc::VideoState::Inactive) {
|
||||
// Paused not supported right now.
|
||||
Assert(state == webrtc::VideoState::Active);
|
||||
if (!_videoCapture) {
|
||||
_videoCapture = tgcalls::VideoCaptureInterface::Create();
|
||||
_videoCapture->setVideoOutput(_videoOutgoing->sink());
|
||||
_videoCapture->setOutput(_videoOutgoing->sink());
|
||||
}
|
||||
if (_instance) {
|
||||
_instance->requestVideo(_videoCapture);
|
||||
} else {
|
||||
_videoState = VideoState::OutgoingRequested;
|
||||
_instance->setVideoCapture(_videoCapture);
|
||||
}
|
||||
_videoCapture->setIsVideoEnabled(true);
|
||||
_videoCapture->setState(tgcalls::VideoState::Active);
|
||||
} else if (_videoCapture) {
|
||||
_videoCapture->setIsVideoEnabled(false);
|
||||
_videoCapture->setState(tgcalls::VideoState::Inactive);
|
||||
}
|
||||
}, _lifetime);
|
||||
}
|
||||
|
@ -568,6 +575,30 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
|
|||
Unexpected("phoneCall type inside an existing call handleUpdate()");
|
||||
}
|
||||
|
||||
void Call::updateRemoteMediaState(
|
||||
tgcalls::AudioState audio,
|
||||
tgcalls::VideoState video) {
|
||||
_remoteAudioState = [&] {
|
||||
using From = tgcalls::AudioState;
|
||||
using To = RemoteAudioState;
|
||||
switch (audio) {
|
||||
case From::Active: return To::Active;
|
||||
case From::Muted: return To::Muted;
|
||||
}
|
||||
Unexpected("Audio state in remoteMediaStateUpdated.");
|
||||
}();
|
||||
_videoIncoming->setState([&] {
|
||||
using From = tgcalls::VideoState;
|
||||
using To = webrtc::VideoState;
|
||||
switch (video) {
|
||||
case From::Inactive: return To::Inactive;
|
||||
case From::Paused: return To::Paused;
|
||||
case From::Active: return To::Active;
|
||||
}
|
||||
Unexpected("Video state in remoteMediaStateUpdated.");
|
||||
}());
|
||||
}
|
||||
|
||||
bool Call::handleSignalingData(
|
||||
const MTPDupdatePhoneCallSignalingData &data) {
|
||||
if (data.vphone_call_id().v != _id || !_instance) {
|
||||
|
@ -685,10 +716,10 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
|
|||
.encryptionKey = tgcalls::EncryptionKey(
|
||||
std::move(encryptionKeyValue),
|
||||
(_type == Type::Outgoing)),
|
||||
.videoCapture = _videoOutgoing->enabled() ? _videoCapture : nullptr,
|
||||
.stateUpdated = [=](tgcalls::State state, tgcalls::VideoState videoState) {
|
||||
.videoCapture = _videoCapture,
|
||||
.stateUpdated = [=](tgcalls::State state) {
|
||||
crl::on_main(weak, [=] {
|
||||
handleControllerStateChange(state, videoState);
|
||||
handleControllerStateChange(state);
|
||||
});
|
||||
},
|
||||
.signalBarsUpdated = [=](int count) {
|
||||
|
@ -696,9 +727,9 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
|
|||
handleControllerBarCountChange(count);
|
||||
});
|
||||
},
|
||||
.remoteVideoIsActiveUpdated = [=](bool active) {
|
||||
.remoteMediaStateUpdated = [=](tgcalls::AudioState audio, tgcalls::VideoState video) {
|
||||
crl::on_main(weak, [=] {
|
||||
_videoIncoming->setEnabled(active);
|
||||
updateRemoteMediaState(audio, video);
|
||||
});
|
||||
},
|
||||
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
|
||||
|
@ -780,22 +811,7 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
|
|||
raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
|
||||
}
|
||||
|
||||
void Call::handleControllerStateChange(
|
||||
tgcalls::State state,
|
||||
tgcalls::VideoState videoState) {
|
||||
_videoState = [&] {
|
||||
switch (videoState) {
|
||||
case tgcalls::VideoState::Possible: return VideoState::Disabled;
|
||||
case tgcalls::VideoState::OutgoingRequested:
|
||||
return VideoState::OutgoingRequested;
|
||||
case tgcalls::VideoState::IncomingRequested:
|
||||
case tgcalls::VideoState::IncomingRequestedAndActive:
|
||||
return VideoState::IncomingRequested;
|
||||
case tgcalls::VideoState::Active: return VideoState::Enabled;
|
||||
}
|
||||
Unexpected("VideoState value in Call::handleControllerStateChange.");
|
||||
}();
|
||||
|
||||
void Call::handleControllerStateChange(tgcalls::State state) {
|
||||
switch (state) {
|
||||
case tgcalls::State::WaitInit: {
|
||||
DEBUG_LOG(("Call Info: State changed to WaitingInit."));
|
||||
|
@ -976,7 +992,8 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
|
|||
auto duration = getDurationMs() / 1000;
|
||||
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
|
||||
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
|
||||
const auto flags = (_videoState.current() == VideoState::Enabled)
|
||||
const auto flags = ((_videoIncoming->state() != webrtc::VideoState::Inactive)
|
||||
|| (_videoOutgoing->state() != webrtc::VideoState::Inactive))
|
||||
? MTPphone_DiscardCall::Flag::f_video
|
||||
: MTPphone_DiscardCall::Flag(0);
|
||||
_api.request(MTPphone_DiscardCall(
|
||||
|
|
|
@ -24,9 +24,11 @@ class Instance;
|
|||
class VideoCaptureInterface;
|
||||
enum class State;
|
||||
enum class VideoState;
|
||||
enum class AudioState;
|
||||
} // namespace tgcalls
|
||||
|
||||
namespace webrtc {
|
||||
enum class VideoState;
|
||||
class VideoTrack;
|
||||
} // namespace webrtc
|
||||
|
||||
|
@ -103,17 +105,24 @@ public:
|
|||
return _state.value();
|
||||
}
|
||||
|
||||
enum class VideoState {
|
||||
Disabled,
|
||||
OutgoingRequested,
|
||||
IncomingRequested,
|
||||
Enabled
|
||||
enum class RemoteAudioState {
|
||||
Muted,
|
||||
Active,
|
||||
};
|
||||
[[nodiscard]] VideoState videoState() const {
|
||||
return _videoState.current();
|
||||
[[nodiscard]] RemoteAudioState remoteAudioState() const {
|
||||
return _remoteAudioState.current();
|
||||
}
|
||||
[[nodiscard]] rpl::producer<VideoState> videoStateValue() const {
|
||||
return _videoState.value();
|
||||
[[nodiscard]] auto remoteAudioStateValue() const
|
||||
-> rpl::producer<RemoteAudioState> {
|
||||
return _remoteAudioState.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] webrtc::VideoState remoteVideoState() const {
|
||||
return _remoteVideoState.current();
|
||||
}
|
||||
[[nodiscard]] auto remoteVideoStateValue() const
|
||||
-> rpl::producer<webrtc::VideoState> {
|
||||
return _remoteVideoState.value();
|
||||
}
|
||||
|
||||
static constexpr auto kSignalBarStarting = -1;
|
||||
|
@ -164,14 +173,17 @@ private:
|
|||
};
|
||||
void handleRequestError(const RPCError &error);
|
||||
void handleControllerError(const QString &error);
|
||||
void finish(FinishType type, const MTPPhoneCallDiscardReason &reason = MTP_phoneCallDiscardReasonDisconnect());
|
||||
void finish(
|
||||
FinishType type,
|
||||
const MTPPhoneCallDiscardReason &reason
|
||||
= MTP_phoneCallDiscardReasonDisconnect());
|
||||
void startOutgoing();
|
||||
void startIncoming();
|
||||
void startWaitingTrack();
|
||||
void sendSignalingData(const QByteArray &data);
|
||||
|
||||
void generateModExpFirst(bytes::const_span randomSeed);
|
||||
void handleControllerStateChange(tgcalls::State state, tgcalls::VideoState videoState);
|
||||
void handleControllerStateChange(tgcalls::State state);
|
||||
void handleControllerBarCountChange(int count);
|
||||
void createAndStartController(const MTPDphoneCall &call);
|
||||
|
||||
|
@ -190,13 +202,17 @@ private:
|
|||
void destroyController();
|
||||
|
||||
void setupOutgoingVideo();
|
||||
void updateRemoteMediaState(
|
||||
tgcalls::AudioState audio,
|
||||
tgcalls::VideoState video);
|
||||
|
||||
const not_null<Delegate*> _delegate;
|
||||
const not_null<UserData*> _user;
|
||||
MTP::Sender _api;
|
||||
Type _type = Type::Outgoing;
|
||||
rpl::variable<State> _state = State::Starting;
|
||||
rpl::variable<VideoState> _videoState = VideoState::Disabled;
|
||||
rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
|
||||
rpl::variable<webrtc::VideoState> _remoteVideoState;
|
||||
FinishType _finishAfterRequestingCall = FinishType::None;
|
||||
bool _answerAfterDhConfigReceived = false;
|
||||
rpl::variable<int> _signalBarCount = kSignalBarStarting;
|
||||
|
|
|
@ -361,8 +361,10 @@ void Panel::initControls() {
|
|||
});
|
||||
_camera->setClickedCallback([=] {
|
||||
if (_call) {
|
||||
_call->videoOutgoing()->setEnabled(
|
||||
!_call->videoOutgoing()->enabled());
|
||||
_call->videoOutgoing()->setState(
|
||||
(_call->videoOutgoing()->state() == webrtc::VideoState::Active)
|
||||
? webrtc::VideoState::Inactive
|
||||
: webrtc::VideoState::Active);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -430,9 +432,11 @@ void Panel::reinitWithCall(Call *call) {
|
|||
_mute->setIconOverride(mute ? &st::callUnmuteIcon : nullptr);
|
||||
}, _callLifetime);
|
||||
|
||||
_call->videoOutgoing()->enabledValue(
|
||||
) | rpl::start_with_next([=](bool enabled) {
|
||||
_camera->setIconOverride(enabled ? nullptr : &st::callNoCameraIcon);
|
||||
_call->videoOutgoing()->stateValue(
|
||||
) | rpl::start_with_next([=](webrtc::VideoState state) {
|
||||
_camera->setIconOverride((state == webrtc::VideoState::Active)
|
||||
? nullptr
|
||||
: &st::callNoCameraIcon);
|
||||
}, _callLifetime);
|
||||
|
||||
_call->stateValue(
|
||||
|
|
2
Telegram/ThirdParty/tgcalls
vendored
2
Telegram/ThirdParty/tgcalls
vendored
|
@ -1 +1 @@
|
|||
Subproject commit c3ba7d2068c0658468c8e924b6f421beface7ccb
|
||||
Subproject commit 13a261d8ed3717e1a2a324b9e13a4e351af8fe83
|
|
@ -1 +1 @@
|
|||
Subproject commit 503e551331f45cbccc29cad0bc158f11c85169d3
|
||||
Subproject commit 4d13b96b4c4e4be2ada7e460203eea9fecde458d
|
Loading…
Add table
Reference in a new issue