Support three-value VideoState.

This commit is contained in:
John Preston 2020-08-05 16:11:18 +04:00
parent a89634b767
commit 95de762529
5 changed files with 92 additions and 55 deletions

View file

@ -143,6 +143,11 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse); return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
} }
[[nodiscard]] webrtc::VideoState StartVideoState(bool enabled) {
using State = webrtc::VideoState;
return enabled ? State::Active : State::Inactive;
}
} // namespace } // namespace
Call::Call( Call::Call(
@ -154,8 +159,8 @@ Call::Call(
, _user(user) , _user(user)
, _api(&_user->session().mtp()) , _api(&_user->session().mtp())
, _type(type) , _type(type)
, _videoIncoming(std::make_unique<webrtc::VideoTrack>(video)) , _videoIncoming(std::make_unique<webrtc::VideoTrack>(StartVideoState(video)))
, _videoOutgoing(std::make_unique<webrtc::VideoTrack>(video)) { , _videoOutgoing(std::make_unique<webrtc::VideoTrack>(StartVideoState(video))) {
_discardByTimeoutTimer.setCallback([=] { hangup(); }); _discardByTimeoutTimer.setCallback([=] { hangup(); });
if (_type == Type::Outgoing) { if (_type == Type::Outgoing) {
@ -338,24 +343,26 @@ void Call::setMuted(bool mute) {
} }
void Call::setupOutgoingVideo() { void Call::setupOutgoingVideo() {
const auto started = _videoOutgoing->enabled(); const auto started = _videoOutgoing->state();
_videoOutgoing->enabledValue( _videoOutgoing->stateValue(
) | rpl::start_with_next([=](bool enabled) { ) | rpl::start_with_next([=](webrtc::VideoState state) {
if (_state.current() != State::Established && enabled != started) { if (_state.current() != State::Established
_videoOutgoing->setEnabled(started); && state != started
} else if (enabled) { && !_videoCapture) {
_videoOutgoing->setState(started);
} else if (state != webrtc::VideoState::Inactive) {
// Paused not supported right now.
Assert(state == webrtc::VideoState::Active);
if (!_videoCapture) { if (!_videoCapture) {
_videoCapture = tgcalls::VideoCaptureInterface::Create(); _videoCapture = tgcalls::VideoCaptureInterface::Create();
_videoCapture->setVideoOutput(_videoOutgoing->sink()); _videoCapture->setOutput(_videoOutgoing->sink());
} }
if (_instance) { if (_instance) {
_instance->requestVideo(_videoCapture); _instance->setVideoCapture(_videoCapture);
} else {
_videoState = VideoState::OutgoingRequested;
} }
_videoCapture->setIsVideoEnabled(true); _videoCapture->setState(tgcalls::VideoState::Active);
} else if (_videoCapture) { } else if (_videoCapture) {
_videoCapture->setIsVideoEnabled(false); _videoCapture->setState(tgcalls::VideoState::Inactive);
} }
}, _lifetime); }, _lifetime);
} }
@ -568,6 +575,30 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
Unexpected("phoneCall type inside an existing call handleUpdate()"); Unexpected("phoneCall type inside an existing call handleUpdate()");
} }
void Call::updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video) {
_remoteAudioState = [&] {
using From = tgcalls::AudioState;
using To = RemoteAudioState;
switch (audio) {
case From::Active: return To::Active;
case From::Muted: return To::Muted;
}
Unexpected("Audio state in remoteMediaStateUpdated.");
}();
_videoIncoming->setState([&] {
using From = tgcalls::VideoState;
using To = webrtc::VideoState;
switch (video) {
case From::Inactive: return To::Inactive;
case From::Paused: return To::Paused;
case From::Active: return To::Active;
}
Unexpected("Video state in remoteMediaStateUpdated.");
}());
}
bool Call::handleSignalingData( bool Call::handleSignalingData(
const MTPDupdatePhoneCallSignalingData &data) { const MTPDupdatePhoneCallSignalingData &data) {
if (data.vphone_call_id().v != _id || !_instance) { if (data.vphone_call_id().v != _id || !_instance) {
@ -685,10 +716,10 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
.encryptionKey = tgcalls::EncryptionKey( .encryptionKey = tgcalls::EncryptionKey(
std::move(encryptionKeyValue), std::move(encryptionKeyValue),
(_type == Type::Outgoing)), (_type == Type::Outgoing)),
.videoCapture = _videoOutgoing->enabled() ? _videoCapture : nullptr, .videoCapture = _videoCapture,
.stateUpdated = [=](tgcalls::State state, tgcalls::VideoState videoState) { .stateUpdated = [=](tgcalls::State state) {
crl::on_main(weak, [=] { crl::on_main(weak, [=] {
handleControllerStateChange(state, videoState); handleControllerStateChange(state);
}); });
}, },
.signalBarsUpdated = [=](int count) { .signalBarsUpdated = [=](int count) {
@ -696,9 +727,9 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
handleControllerBarCountChange(count); handleControllerBarCountChange(count);
}); });
}, },
.remoteVideoIsActiveUpdated = [=](bool active) { .remoteMediaStateUpdated = [=](tgcalls::AudioState audio, tgcalls::VideoState video) {
crl::on_main(weak, [=] { crl::on_main(weak, [=] {
_videoIncoming->setEnabled(active); updateRemoteMediaState(audio, video);
}); });
}, },
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) { .signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
@ -780,22 +811,7 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled()); raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
} }
void Call::handleControllerStateChange( void Call::handleControllerStateChange(tgcalls::State state) {
tgcalls::State state,
tgcalls::VideoState videoState) {
_videoState = [&] {
switch (videoState) {
case tgcalls::VideoState::Possible: return VideoState::Disabled;
case tgcalls::VideoState::OutgoingRequested:
return VideoState::OutgoingRequested;
case tgcalls::VideoState::IncomingRequested:
case tgcalls::VideoState::IncomingRequestedAndActive:
return VideoState::IncomingRequested;
case tgcalls::VideoState::Active: return VideoState::Enabled;
}
Unexpected("VideoState value in Call::handleControllerStateChange.");
}();
switch (state) { switch (state) {
case tgcalls::State::WaitInit: { case tgcalls::State::WaitInit: {
DEBUG_LOG(("Call Info: State changed to WaitingInit.")); DEBUG_LOG(("Call Info: State changed to WaitingInit."));
@ -976,7 +992,8 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
auto duration = getDurationMs() / 1000; auto duration = getDurationMs() / 1000;
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0; auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); }); _finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
const auto flags = (_videoState.current() == VideoState::Enabled) const auto flags = ((_videoIncoming->state() != webrtc::VideoState::Inactive)
|| (_videoOutgoing->state() != webrtc::VideoState::Inactive))
? MTPphone_DiscardCall::Flag::f_video ? MTPphone_DiscardCall::Flag::f_video
: MTPphone_DiscardCall::Flag(0); : MTPphone_DiscardCall::Flag(0);
_api.request(MTPphone_DiscardCall( _api.request(MTPphone_DiscardCall(

View file

@ -24,9 +24,11 @@ class Instance;
class VideoCaptureInterface; class VideoCaptureInterface;
enum class State; enum class State;
enum class VideoState; enum class VideoState;
enum class AudioState;
} // namespace tgcalls } // namespace tgcalls
namespace webrtc { namespace webrtc {
enum class VideoState;
class VideoTrack; class VideoTrack;
} // namespace webrtc } // namespace webrtc
@ -103,17 +105,24 @@ public:
return _state.value(); return _state.value();
} }
enum class VideoState { enum class RemoteAudioState {
Disabled, Muted,
OutgoingRequested, Active,
IncomingRequested,
Enabled
}; };
[[nodiscard]] VideoState videoState() const { [[nodiscard]] RemoteAudioState remoteAudioState() const {
return _videoState.current(); return _remoteAudioState.current();
} }
[[nodiscard]] rpl::producer<VideoState> videoStateValue() const { [[nodiscard]] auto remoteAudioStateValue() const
return _videoState.value(); -> rpl::producer<RemoteAudioState> {
return _remoteAudioState.value();
}
[[nodiscard]] webrtc::VideoState remoteVideoState() const {
return _remoteVideoState.current();
}
[[nodiscard]] auto remoteVideoStateValue() const
-> rpl::producer<webrtc::VideoState> {
return _remoteVideoState.value();
} }
static constexpr auto kSignalBarStarting = -1; static constexpr auto kSignalBarStarting = -1;
@ -164,14 +173,17 @@ private:
}; };
void handleRequestError(const RPCError &error); void handleRequestError(const RPCError &error);
void handleControllerError(const QString &error); void handleControllerError(const QString &error);
void finish(FinishType type, const MTPPhoneCallDiscardReason &reason = MTP_phoneCallDiscardReasonDisconnect()); void finish(
FinishType type,
const MTPPhoneCallDiscardReason &reason
= MTP_phoneCallDiscardReasonDisconnect());
void startOutgoing(); void startOutgoing();
void startIncoming(); void startIncoming();
void startWaitingTrack(); void startWaitingTrack();
void sendSignalingData(const QByteArray &data); void sendSignalingData(const QByteArray &data);
void generateModExpFirst(bytes::const_span randomSeed); void generateModExpFirst(bytes::const_span randomSeed);
void handleControllerStateChange(tgcalls::State state, tgcalls::VideoState videoState); void handleControllerStateChange(tgcalls::State state);
void handleControllerBarCountChange(int count); void handleControllerBarCountChange(int count);
void createAndStartController(const MTPDphoneCall &call); void createAndStartController(const MTPDphoneCall &call);
@ -190,13 +202,17 @@ private:
void destroyController(); void destroyController();
void setupOutgoingVideo(); void setupOutgoingVideo();
void updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video);
const not_null<Delegate*> _delegate; const not_null<Delegate*> _delegate;
const not_null<UserData*> _user; const not_null<UserData*> _user;
MTP::Sender _api; MTP::Sender _api;
Type _type = Type::Outgoing; Type _type = Type::Outgoing;
rpl::variable<State> _state = State::Starting; rpl::variable<State> _state = State::Starting;
rpl::variable<VideoState> _videoState = VideoState::Disabled; rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
rpl::variable<webrtc::VideoState> _remoteVideoState;
FinishType _finishAfterRequestingCall = FinishType::None; FinishType _finishAfterRequestingCall = FinishType::None;
bool _answerAfterDhConfigReceived = false; bool _answerAfterDhConfigReceived = false;
rpl::variable<int> _signalBarCount = kSignalBarStarting; rpl::variable<int> _signalBarCount = kSignalBarStarting;

View file

@ -361,8 +361,10 @@ void Panel::initControls() {
}); });
_camera->setClickedCallback([=] { _camera->setClickedCallback([=] {
if (_call) { if (_call) {
_call->videoOutgoing()->setEnabled( _call->videoOutgoing()->setState(
!_call->videoOutgoing()->enabled()); (_call->videoOutgoing()->state() == webrtc::VideoState::Active)
? webrtc::VideoState::Inactive
: webrtc::VideoState::Active);
} }
}); });
@ -430,9 +432,11 @@ void Panel::reinitWithCall(Call *call) {
_mute->setIconOverride(mute ? &st::callUnmuteIcon : nullptr); _mute->setIconOverride(mute ? &st::callUnmuteIcon : nullptr);
}, _callLifetime); }, _callLifetime);
_call->videoOutgoing()->enabledValue( _call->videoOutgoing()->stateValue(
) | rpl::start_with_next([=](bool enabled) { ) | rpl::start_with_next([=](webrtc::VideoState state) {
_camera->setIconOverride(enabled ? nullptr : &st::callNoCameraIcon); _camera->setIconOverride((state == webrtc::VideoState::Active)
? nullptr
: &st::callNoCameraIcon);
}, _callLifetime); }, _callLifetime);
_call->stateValue( _call->stateValue(

@ -1 +1 @@
Subproject commit c3ba7d2068c0658468c8e924b6f421beface7ccb Subproject commit 13a261d8ed3717e1a2a324b9e13a4e351af8fe83

@ -1 +1 @@
Subproject commit 503e551331f45cbccc29cad0bc158f11c85169d3 Subproject commit 4d13b96b4c4e4be2ada7e460203eea9fecde458d