mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-16 06:07:06 +02:00
Allow sharing screen or window in one-on-one calls.
This commit is contained in:
parent
ae30366cbf
commit
c100055fac
20 changed files with 376 additions and 138 deletions
|
@ -220,6 +220,7 @@ PRIVATE
|
|||
calls/group/calls_choose_join_as.h
|
||||
calls/group/calls_group_call.cpp
|
||||
calls/group/calls_group_call.h
|
||||
calls/group/calls_group_common.cpp
|
||||
calls/group/calls_group_common.h
|
||||
calls/group/calls_group_invite_controller.cpp
|
||||
calls/group/calls_group_invite_controller.h
|
||||
|
|
|
@ -2009,6 +2009,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
|
||||
"lng_call_start_video" = "Start Video";
|
||||
"lng_call_stop_video" = "Stop Video";
|
||||
"lng_call_screencast" = "Screencast";
|
||||
"lng_call_end_call" = "End Call";
|
||||
"lng_call_mute_audio" = "Mute";
|
||||
"lng_call_unmute_audio" = "Unmute";
|
||||
|
|
|
@ -173,6 +173,24 @@ callCameraUnmute: CallButton(callMicrophoneUnmute) {
|
|||
}
|
||||
}
|
||||
}
|
||||
callScreencastOn: CallButton(callMicrophoneMute) {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "calls/calls_present", callIconFg }};
|
||||
iconPosition: point(-1px, 22px);
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callMuteRipple;
|
||||
}
|
||||
}
|
||||
}
|
||||
callScreencastOff: CallButton(callMicrophoneUnmute) {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "calls/calls_present", callIconFgActive }};
|
||||
iconPosition: point(-1px, 22px);
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callIconActiveRipple;
|
||||
}
|
||||
}
|
||||
}
|
||||
callBottomShadowSize: 124px;
|
||||
|
||||
CallMuteButton {
|
||||
|
|
|
@ -290,16 +290,8 @@ void Call::startIncoming() {
|
|||
}).send();
|
||||
}
|
||||
|
||||
void Call::switchVideoOutgoing() {
|
||||
const auto video = _videoOutgoing->state() == Webrtc::VideoState::Active;
|
||||
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
|
||||
videoOutgoing()->setState(StartVideoState(!video));
|
||||
}), true);
|
||||
|
||||
}
|
||||
|
||||
void Call::answer() {
|
||||
const auto video = _videoOutgoing->state() == Webrtc::VideoState::Active;
|
||||
const auto video = isSharingVideo();
|
||||
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
|
||||
actuallyAnswer();
|
||||
}), video);
|
||||
|
@ -366,7 +358,9 @@ void Call::setupOutgoingVideo() {
|
|||
}
|
||||
_videoOutgoing->stateValue(
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
if (state != Webrtc::VideoState::Inactive && !hasDevices()) {
|
||||
if (state != Webrtc::VideoState::Inactive
|
||||
&& !hasDevices()
|
||||
&& !_videoCaptureIsScreencast) {
|
||||
_errors.fire({ ErrorType::NoCamera });
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
} else if (_state.current() != State::Established
|
||||
|
@ -383,7 +377,8 @@ void Call::setupOutgoingVideo() {
|
|||
// Paused not supported right now.
|
||||
Assert(state == Webrtc::VideoState::Active);
|
||||
if (!_videoCapture) {
|
||||
_videoCapture = _delegate->callGetVideoCapture();
|
||||
_videoCapture = _delegate->callGetVideoCapture(
|
||||
_videoCaptureDeviceId);
|
||||
_videoCapture->setOutput(_videoOutgoing->sink());
|
||||
}
|
||||
if (_instance) {
|
||||
|
@ -986,9 +981,12 @@ void Call::setCurrentAudioDevice(bool input, const QString &deviceId) {
|
|||
}
|
||||
}
|
||||
|
||||
void Call::setCurrentVideoDevice(const QString &deviceId) {
|
||||
if (_videoCapture) {
|
||||
_videoCapture->switchToDevice(deviceId.toStdString());
|
||||
void Call::setCurrentCameraDevice(const QString &deviceId) {
|
||||
if (!_videoCaptureIsScreencast) {
|
||||
_videoCaptureDeviceId = deviceId;
|
||||
if (_videoCapture) {
|
||||
_videoCapture->switchToDevice(deviceId.toStdString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1008,6 +1006,71 @@ void Call::setAudioDuckingEnabled(bool enabled) {
|
|||
}
|
||||
}
|
||||
|
||||
bool Call::isSharingVideo() const {
|
||||
return (_videoOutgoing->state() != Webrtc::VideoState::Inactive);
|
||||
}
|
||||
|
||||
bool Call::isSharingCamera() const {
|
||||
return !_videoCaptureIsScreencast && isSharingVideo();
|
||||
}
|
||||
|
||||
bool Call::isSharingScreen() const {
|
||||
return _videoCaptureIsScreencast && isSharingVideo();
|
||||
}
|
||||
|
||||
QString Call::cameraSharingDeviceId() const {
|
||||
return isSharingCamera() ? _videoCaptureDeviceId : QString();
|
||||
}
|
||||
|
||||
QString Call::screenSharingDeviceId() const {
|
||||
return isSharingScreen() ? _videoCaptureDeviceId : QString();
|
||||
}
|
||||
|
||||
void Call::toggleCameraSharing(bool enabled) {
|
||||
if (isSharingCamera() == enabled) {
|
||||
return;
|
||||
} else if (!enabled) {
|
||||
if (_videoCapture) {
|
||||
_videoCapture->setState(tgcalls::VideoState::Inactive);
|
||||
}
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
_videoCaptureDeviceId = QString();
|
||||
return;
|
||||
}
|
||||
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
|
||||
toggleScreenSharing(std::nullopt);
|
||||
const auto deviceId = Core::App().settings().callVideoInputDeviceId();
|
||||
_videoCaptureDeviceId = deviceId;
|
||||
if (_videoCapture) {
|
||||
_videoCapture->switchToDevice(deviceId.toStdString());
|
||||
}
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Active);
|
||||
}), true);
|
||||
}
|
||||
|
||||
void Call::toggleScreenSharing(std::optional<QString> uniqueId) {
|
||||
if (!uniqueId) {
|
||||
if (isSharingScreen()) {
|
||||
if (_videoCapture) {
|
||||
_videoCapture->setState(tgcalls::VideoState::Inactive);
|
||||
}
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
}
|
||||
_videoCaptureDeviceId = QString();
|
||||
_videoCaptureIsScreencast = false;
|
||||
return;
|
||||
} else if (screenSharingDeviceId() == *uniqueId) {
|
||||
return;
|
||||
}
|
||||
toggleCameraSharing(false);
|
||||
_videoCaptureIsScreencast = true;
|
||||
_videoCaptureDeviceId = *uniqueId;
|
||||
if (_videoCapture) {
|
||||
_videoCapture->switchToDevice(uniqueId->toStdString());
|
||||
}
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Active);
|
||||
}
|
||||
|
||||
void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
|
||||
Expects(type != FinishType::None);
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ public:
|
|||
Fn<void()> onSuccess,
|
||||
bool video) = 0;
|
||||
|
||||
virtual auto callGetVideoCapture()
|
||||
virtual auto callGetVideoCapture(const QString &deviceId)
|
||||
-> std::shared_ptr<tgcalls::VideoCaptureInterface> = 0;
|
||||
|
||||
virtual ~Delegate() = default;
|
||||
|
@ -174,7 +174,6 @@ public:
|
|||
crl::time getDurationMs() const;
|
||||
float64 getWaitingSoundPeakValue() const;
|
||||
|
||||
void switchVideoOutgoing();
|
||||
void answer();
|
||||
void hangup();
|
||||
void redial();
|
||||
|
@ -185,10 +184,22 @@ public:
|
|||
QString getDebugLog() const;
|
||||
|
||||
void setCurrentAudioDevice(bool input, const QString &deviceId);
|
||||
void setCurrentVideoDevice(const QString &deviceId);
|
||||
//void setAudioVolume(bool input, float level);
|
||||
void setAudioDuckingEnabled(bool enabled);
|
||||
|
||||
void setCurrentCameraDevice(const QString &deviceId);
|
||||
[[nodiscard]] QString videoDeviceId() const {
|
||||
return _videoCaptureDeviceId;
|
||||
}
|
||||
|
||||
[[nodiscard]] bool isSharingVideo() const;
|
||||
[[nodiscard]] bool isSharingCamera() const;
|
||||
[[nodiscard]] bool isSharingScreen() const;
|
||||
[[nodiscard]] QString cameraSharingDeviceId() const;
|
||||
[[nodiscard]] QString screenSharingDeviceId() const;
|
||||
void toggleCameraSharing(bool enabled);
|
||||
void toggleScreenSharing(std::optional<QString> uniqueId);
|
||||
|
||||
[[nodiscard]] rpl::lifetime &lifetime() {
|
||||
return _lifetime;
|
||||
}
|
||||
|
@ -268,6 +279,8 @@ private:
|
|||
|
||||
std::unique_ptr<tgcalls::Instance> _instance;
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||
QString _videoCaptureDeviceId;
|
||||
bool _videoCaptureIsScreencast = false;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoIncoming;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ public:
|
|||
Fn<void()> onSuccess,
|
||||
bool video) override;
|
||||
void callPlaySound(CallSound sound) override;
|
||||
auto callGetVideoCapture()
|
||||
auto callGetVideoCapture(const QString &deviceId)
|
||||
-> std::shared_ptr<tgcalls::VideoCaptureInterface> override;
|
||||
|
||||
void groupCallFinished(not_null<GroupCall*> call) override;
|
||||
|
@ -123,9 +123,9 @@ void Instance::Delegate::callPlaySound(CallSound sound) {
|
|||
}());
|
||||
}
|
||||
|
||||
auto Instance::Delegate::callGetVideoCapture()
|
||||
auto Instance::Delegate::callGetVideoCapture(const QString &deviceId)
|
||||
-> std::shared_ptr<tgcalls::VideoCaptureInterface> {
|
||||
return _instance->getVideoCapture();
|
||||
return _instance->getVideoCapture(deviceId);
|
||||
}
|
||||
|
||||
void Instance::Delegate::groupCallFinished(not_null<GroupCall*> call) {
|
||||
|
@ -699,18 +699,22 @@ void Instance::requestPermissionOrFail(Platform::PermissionType type, Fn<void()>
|
|||
}
|
||||
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::getVideoCapture(
|
||||
QString deviceId) {
|
||||
if (deviceId.isEmpty()) {
|
||||
deviceId = Core::App().settings().callVideoInputDeviceId();
|
||||
}
|
||||
std::optional<QString> deviceId) {
|
||||
if (auto result = _videoCapture.lock()) {
|
||||
result->switchToDevice(deviceId.toStdString());
|
||||
if (deviceId) {
|
||||
result->switchToDevice((deviceId->isEmpty()
|
||||
? Core::App().settings().callVideoInputDeviceId()
|
||||
: *deviceId).toStdString());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const auto startDeviceId = (deviceId && !deviceId->isEmpty())
|
||||
? *deviceId
|
||||
: Core::App().settings().callVideoInputDeviceId();
|
||||
auto result = std::shared_ptr<tgcalls::VideoCaptureInterface>(
|
||||
tgcalls::VideoCaptureInterface::Create(
|
||||
tgcalls::StaticThreads::getThreads(),
|
||||
deviceId.toStdString()));
|
||||
startDeviceId.toStdString()));
|
||||
_videoCapture = result;
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -75,7 +75,8 @@ public:
|
|||
bool activateCurrentCall(const QString &joinHash = QString());
|
||||
bool minimizeCurrentActiveCall();
|
||||
bool closeCurrentActiveCall();
|
||||
[[nodiscard]] auto getVideoCapture(QString deviceId = QString())
|
||||
[[nodiscard]] auto getVideoCapture(
|
||||
std::optional<QString> deviceId = std::nullopt)
|
||||
-> std::shared_ptr<tgcalls::VideoCaptureInterface>;
|
||||
void requestPermissionsOrFail(Fn<void()> onSuccess, bool video = true);
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "data/data_photo_media.h"
|
||||
#include "data/data_cloud_file.h"
|
||||
#include "data/data_changes.h"
|
||||
#include "calls/group/calls_group_common.h"
|
||||
#include "calls/calls_emoji_fingerprint.h"
|
||||
#include "calls/calls_signal_bars.h"
|
||||
#include "calls/calls_userpic.h"
|
||||
|
@ -25,6 +26,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "ui/widgets/labels.h"
|
||||
#include "ui/widgets/shadow.h"
|
||||
#include "ui/widgets/rp_window.h"
|
||||
#include "ui/layers/layer_manager.h"
|
||||
#include "ui/layers/generic_box.h"
|
||||
#include "ui/image/image.h"
|
||||
#include "ui/text/format_values.h"
|
||||
#include "ui/wrap/fade_wrap.h"
|
||||
|
@ -44,6 +47,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "window/main_window.h"
|
||||
#include "app.h"
|
||||
#include "webrtc/webrtc_video_track.h"
|
||||
#include "webrtc/webrtc_media_devices.h"
|
||||
#include "styles/style_calls.h"
|
||||
#include "styles/style_chat.h"
|
||||
|
||||
|
@ -57,6 +61,7 @@ namespace Calls {
|
|||
Panel::Panel(not_null<Call*> call)
|
||||
: _call(call)
|
||||
, _user(call->user())
|
||||
, _layerBg(std::make_unique<Ui::LayerManager>(widget()))
|
||||
#ifndef Q_OS_MAC
|
||||
, _controls(std::make_unique<Ui::Platform::TitleControls>(
|
||||
widget(),
|
||||
|
@ -67,6 +72,7 @@ Panel::Panel(not_null<Call*> call)
|
|||
, _answerHangupRedial(widget(), st::callAnswer, &st::callHangup)
|
||||
, _decline(widget(), object_ptr<Ui::CallButton>(widget(), st::callHangup))
|
||||
, _cancel(widget(), object_ptr<Ui::CallButton>(widget(), st::callCancel))
|
||||
, _screencast(widget(), st::callScreencastOn, &st::callScreencastOff)
|
||||
, _camera(widget(), st::callCameraMute, &st::callCameraUnmute)
|
||||
, _mute(widget(), st::callMicrophoneMute, &st::callMicrophoneUnmute)
|
||||
, _name(widget(), st::callName)
|
||||
|
@ -205,9 +211,28 @@ void Panel::initControls() {
|
|||
_call->setMuted(!_call->muted());
|
||||
}
|
||||
});
|
||||
_screencast->setClickedCallback([=] {
|
||||
if (!_call) {
|
||||
return;
|
||||
} else if (!Webrtc::DesktopCaptureAllowed()) {
|
||||
if (auto box = Group::ScreenSharingPrivacyRequestBox()) {
|
||||
_layerBg->showBox(std::move(box));
|
||||
}
|
||||
} else if (const auto source = Webrtc::UniqueDesktopCaptureSource()) {
|
||||
if (_call->isSharingScreen()) {
|
||||
_call->toggleScreenSharing(std::nullopt);
|
||||
} else {
|
||||
chooseSourceAccepted(*source, false);
|
||||
}
|
||||
} else {
|
||||
Group::Ui::DesktopCapture::ChooseSource(this);
|
||||
}
|
||||
});
|
||||
_camera->setClickedCallback([=] {
|
||||
if (_call) {
|
||||
_call->switchVideoOutgoing();
|
||||
if (!_call) {
|
||||
return;
|
||||
} else {
|
||||
_call->toggleCameraSharing(!_call->isSharingCamera());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -218,7 +243,8 @@ void Panel::initControls() {
|
|||
});
|
||||
_updateOuterRippleTimer.setCallback([this] {
|
||||
if (_call) {
|
||||
_answerHangupRedial->setOuterValue(_call->getWaitingSoundPeakValue());
|
||||
_answerHangupRedial->setOuterValue(
|
||||
_call->getWaitingSoundPeakValue());
|
||||
} else {
|
||||
_answerHangupRedial->setOuterValue(0.);
|
||||
_updateOuterRippleTimer.cancel();
|
||||
|
@ -260,6 +286,40 @@ void Panel::setIncomingSize(QSize size) {
|
|||
showControls();
|
||||
}
|
||||
|
||||
QWidget *Panel::chooseSourceParent() {
|
||||
return window().get();
|
||||
}
|
||||
|
||||
QString Panel::chooseSourceActiveDeviceId() {
|
||||
return _call->screenSharingDeviceId();
|
||||
}
|
||||
|
||||
bool Panel::chooseSourceActiveWithAudio() {
|
||||
return false;// _call->screenSharingWithAudio();
|
||||
}
|
||||
|
||||
bool Panel::chooseSourceWithAudioSupported() {
|
||||
//#ifdef Q_OS_WIN
|
||||
// return true;
|
||||
//#else // Q_OS_WIN
|
||||
return false;
|
||||
//#endif // Q_OS_WIN
|
||||
}
|
||||
|
||||
rpl::lifetime &Panel::chooseSourceInstanceLifetime() {
|
||||
return lifetime();
|
||||
}
|
||||
|
||||
void Panel::chooseSourceAccepted(
|
||||
const QString &deviceId,
|
||||
bool withAudio) {
|
||||
_call->toggleScreenSharing(deviceId/*, withAudio*/);
|
||||
}
|
||||
|
||||
void Panel::chooseSourceStop() {
|
||||
_call->toggleScreenSharing(std::nullopt);
|
||||
}
|
||||
|
||||
void Panel::refreshIncomingGeometry() {
|
||||
Expects(_call != nullptr);
|
||||
Expects(_incoming != nullptr);
|
||||
|
@ -332,12 +392,19 @@ void Panel::reinitWithCall(Call *call) {
|
|||
}, _callLifetime);
|
||||
|
||||
_call->videoOutgoing()->stateValue(
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
const auto active = (state == Webrtc::VideoState::Active);
|
||||
_camera->setProgress(active ? 0. : 1.);
|
||||
_camera->setText(active
|
||||
? tr::lng_call_stop_video()
|
||||
: tr::lng_call_start_video());
|
||||
) | rpl::start_with_next([=] {
|
||||
{
|
||||
const auto active = _call->isSharingCamera();
|
||||
_camera->setProgress(active ? 0. : 1.);
|
||||
_camera->setText(active
|
||||
? tr::lng_call_stop_video()
|
||||
: tr::lng_call_start_video());
|
||||
}
|
||||
{
|
||||
const auto active = _call->isSharingScreen();
|
||||
_screencast->setProgress(active ? 0. : 1.);
|
||||
_screencast->setText(tr::lng_call_screencast());
|
||||
}
|
||||
}, _callLifetime);
|
||||
|
||||
_call->stateValue(
|
||||
|
@ -646,9 +713,11 @@ void Panel::updateControlsGeometry() {
|
|||
updateOutgoingVideoBubbleGeometry();
|
||||
}
|
||||
|
||||
auto bothWidth = _answerHangupRedial->width() + st::callCancel.button.width;
|
||||
_decline->moveToLeft((widget()->width() - bothWidth) / 2, _buttonsTop);
|
||||
_cancel->moveToLeft((widget()->width() - bothWidth) / 2, _buttonsTop);
|
||||
auto threeWidth = _answerHangupRedial->width()
|
||||
+ st::callCancel.button.width
|
||||
- _screencast->width();
|
||||
_decline->moveToLeft((widget()->width() - threeWidth) / 2, _buttonsTop);
|
||||
_cancel->moveToLeft((widget()->width() - threeWidth) / 2, _buttonsTop);
|
||||
|
||||
updateHangupGeometry();
|
||||
}
|
||||
|
@ -670,16 +739,19 @@ void Panel::updateOutgoingVideoBubbleGeometry() {
|
|||
}
|
||||
|
||||
void Panel::updateHangupGeometry() {
|
||||
auto singleWidth = _answerHangupRedial->width();
|
||||
auto bothWidth = singleWidth + st::callCancel.button.width;
|
||||
auto rightFrom = (widget()->width() - bothWidth) / 2;
|
||||
auto rightTo = (widget()->width() - singleWidth) / 2;
|
||||
auto twoWidth = _answerHangupRedial->width() + _screencast->width();
|
||||
auto threeWidth = twoWidth + st::callCancel.button.width;
|
||||
auto rightFrom = (widget()->width() - threeWidth) / 2;
|
||||
auto rightTo = (widget()->width() - twoWidth) / 2;
|
||||
auto hangupProgress = _hangupShownProgress.value(_hangupShown ? 1. : 0.);
|
||||
auto hangupRight = anim::interpolate(rightFrom, rightTo, hangupProgress);
|
||||
_answerHangupRedial->moveToRight(hangupRight, _buttonsTop);
|
||||
_answerHangupRedial->setProgress(hangupProgress);
|
||||
_mute->moveToRight(hangupRight - _mute->width(), _buttonsTop);
|
||||
_camera->moveToLeft(hangupRight - _mute->width(), _buttonsTop);
|
||||
_screencast->moveToLeft(hangupRight - _mute->width(), _buttonsTop);
|
||||
_camera->moveToLeft(
|
||||
hangupRight - _mute->width() + _screencast->width(),
|
||||
_buttonsTop);
|
||||
}
|
||||
|
||||
void Panel::updateStatusGeometry() {
|
||||
|
|
|
@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "base/timer.h"
|
||||
#include "base/object_ptr.h"
|
||||
#include "calls/calls_call.h"
|
||||
#include "calls/group/ui/desktop_capture_choose_source.h"
|
||||
#include "ui/effects/animations.h"
|
||||
#include "ui/gl/gl_window.h"
|
||||
#include "ui/rp_widget.h"
|
||||
|
@ -25,6 +26,7 @@ class CloudImageView;
|
|||
namespace Ui {
|
||||
class IconButton;
|
||||
class CallButton;
|
||||
class LayerManager;
|
||||
class FlatLabel;
|
||||
template <typename Widget>
|
||||
class FadeWrap;
|
||||
|
@ -50,7 +52,7 @@ class Userpic;
|
|||
class SignalBars;
|
||||
class VideoBubble;
|
||||
|
||||
class Panel final {
|
||||
class Panel final : private Group::Ui::DesktopCapture::ChooseSourceDelegate {
|
||||
public:
|
||||
Panel(not_null<Call*> call);
|
||||
~Panel();
|
||||
|
@ -61,7 +63,17 @@ public:
|
|||
void replaceCall(not_null<Call*> call);
|
||||
void closeBeforeDestroy();
|
||||
|
||||
rpl::lifetime &lifetime();
|
||||
QWidget *chooseSourceParent() override;
|
||||
QString chooseSourceActiveDeviceId() override;
|
||||
bool chooseSourceActiveWithAudio() override;
|
||||
bool chooseSourceWithAudioSupported() override;
|
||||
rpl::lifetime &chooseSourceInstanceLifetime() override;
|
||||
void chooseSourceAccepted(
|
||||
const QString &deviceId,
|
||||
bool withAudio) override;
|
||||
void chooseSourceStop() override;
|
||||
|
||||
[[nodiscard]] rpl::lifetime &lifetime();
|
||||
|
||||
private:
|
||||
class Incoming;
|
||||
|
@ -110,6 +122,7 @@ private:
|
|||
not_null<UserData*> _user;
|
||||
|
||||
Ui::GL::Window _window;
|
||||
const std::unique_ptr<Ui::LayerManager> _layerBg;
|
||||
std::unique_ptr<Incoming> _incoming;
|
||||
|
||||
#ifndef Q_OS_MAC
|
||||
|
@ -128,6 +141,7 @@ private:
|
|||
bool _outgoingPreviewInBody = false;
|
||||
std::optional<AnswerHangupRedialState> _answerHangupRedialState;
|
||||
Ui::Animations::Simple _hangupShownProgress;
|
||||
object_ptr<Ui::CallButton> _screencast;
|
||||
object_ptr<Ui::CallButton> _camera;
|
||||
object_ptr<Ui::CallButton> _mute;
|
||||
object_ptr<Ui::FlatLabel> _name;
|
||||
|
|
|
@ -3049,10 +3049,6 @@ void GroupCall::setCurrentAudioDevice(bool input, const QString &deviceId) {
|
|||
}
|
||||
}
|
||||
|
||||
void GroupCall::setCurrentVideoDevice(const QString &deviceId) {
|
||||
_mediaDevices->switchToVideoInput(deviceId);
|
||||
}
|
||||
|
||||
void GroupCall::toggleMute(const Group::MuteRequest &data) {
|
||||
if (data.locallyOnly) {
|
||||
applyParticipantLocally(data.peer, data.mute, std::nullopt);
|
||||
|
|
|
@ -370,7 +370,6 @@ public:
|
|||
}
|
||||
|
||||
void setCurrentAudioDevice(bool input, const QString &deviceId);
|
||||
void setCurrentVideoDevice(const QString &deviceId);
|
||||
[[nodiscard]] bool isSharingScreen() const;
|
||||
[[nodiscard]] rpl::producer<bool> isSharingScreenValue() const;
|
||||
[[nodiscard]] bool isScreenPaused() const;
|
||||
|
|
54
Telegram/SourceFiles/calls/group/calls_group_common.cpp
Normal file
54
Telegram/SourceFiles/calls/group/calls_group_common.cpp
Normal file
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#include "calls/group/calls_group_common.h"
|
||||
|
||||
#include "base/platform/base_platform_info.h"
|
||||
#include "ui/widgets/labels.h"
|
||||
#include "ui/layers/generic_box.h"
|
||||
#include "ui/text/text_utilities.h"
|
||||
#include "lang/lang_keys.h"
|
||||
#include "styles/style_layers.h"
|
||||
#include "styles/style_calls.h"
|
||||
|
||||
namespace Calls::Group {
|
||||
|
||||
object_ptr<Ui::GenericBox> ScreenSharingPrivacyRequestBox() {
|
||||
#ifndef Q_OS_MAC
|
||||
if (!Platform::IsMac10_15OrGreater()) {
|
||||
return { nullptr };
|
||||
}
|
||||
const auto requestInputMonitoring = Platform::IsMac10_15OrGreater();
|
||||
return Box([=](not_null<Ui::GenericBox*> box) {
|
||||
box->addRow(
|
||||
object_ptr<Ui::FlatLabel>(
|
||||
box.get(),
|
||||
rpl::combine(
|
||||
tr::lng_group_call_mac_screencast_access(),
|
||||
tr::lng_group_call_mac_recording()
|
||||
) | rpl::map([](QString a, QString b) {
|
||||
auto result = Ui::Text::RichLangValue(a);
|
||||
result.append("\n\n").append(Ui::Text::RichLangValue(b));
|
||||
return result;
|
||||
}),
|
||||
st::groupCallBoxLabel),
|
||||
style::margins(
|
||||
st::boxRowPadding.left(),
|
||||
st::boxPadding.top(),
|
||||
st::boxRowPadding.right(),
|
||||
st::boxPadding.bottom()));
|
||||
box->addButton(tr::lng_group_call_mac_settings(), [=] {
|
||||
//Platform::OpenDesktopCapturePrivacySettings();
|
||||
});
|
||||
box->addButton(tr::lng_cancel(), [=] { box->closeBox(); });
|
||||
});
|
||||
#else // Q_OS_MAC
|
||||
return { nullptr };
|
||||
#endif // Q_OS_MAC
|
||||
}
|
||||
|
||||
} // namespace Calls::Group
|
|
@ -7,8 +7,14 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include "base/object_ptr.h"
|
||||
|
||||
class UserData;
|
||||
|
||||
namespace Ui {
|
||||
class GenericBox;
|
||||
} // namespace Ui
|
||||
|
||||
namespace Calls::Group {
|
||||
|
||||
constexpr auto kDefaultVolume = 10000;
|
||||
|
@ -78,4 +84,6 @@ constexpr inline bool is_flag_type(StickedTooltip) {
|
|||
}
|
||||
using StickedTooltips = base::flags<StickedTooltip>;
|
||||
|
||||
[[nodiscard]] object_ptr<Ui::GenericBox> ScreenSharingPrivacyRequestBox();
|
||||
|
||||
} // namespace Calls::Group
|
||||
|
|
|
@ -70,10 +70,6 @@ constexpr auto kRecordingOpacity = 0.6;
|
|||
constexpr auto kStartNoConfirmation = TimeId(10);
|
||||
constexpr auto kControlsBackgroundOpacity = 0.8;
|
||||
constexpr auto kOverrideActiveColorBgAlpha = 172;
|
||||
constexpr auto kMicrophoneTooltipAfterLoudCount = 3;
|
||||
constexpr auto kDropLoudAfterQuietCount = 5;
|
||||
constexpr auto kMicrophoneTooltipLevelThreshold = 0.2;
|
||||
constexpr auto kMicrophoneTooltipCheckInterval = crl::time(500);
|
||||
|
||||
} // namespace
|
||||
|
||||
|
@ -87,49 +83,6 @@ struct Panel::ControlsBackgroundNarrow {
|
|||
Ui::RpWidget blocker;
|
||||
};
|
||||
|
||||
class Panel::MicLevelTester final {
|
||||
public:
|
||||
explicit MicLevelTester(Fn<void()> show);
|
||||
|
||||
[[nodiscard]] bool showTooltip() const;
|
||||
|
||||
private:
|
||||
void check();
|
||||
|
||||
Fn<void()> _show;
|
||||
base::Timer _timer;
|
||||
Webrtc::AudioInputTester _tester;
|
||||
int _loudCount = 0;
|
||||
int _quietCount = 0;
|
||||
|
||||
};
|
||||
|
||||
Panel::MicLevelTester::MicLevelTester(Fn<void()> show)
|
||||
: _show(std::move(show))
|
||||
, _timer([=] { check(); })
|
||||
, _tester(
|
||||
Core::App().settings().callAudioBackend(),
|
||||
Core::App().settings().callInputDeviceId()) {
|
||||
_timer.callEach(kMicrophoneTooltipCheckInterval);
|
||||
}
|
||||
|
||||
bool Panel::MicLevelTester::showTooltip() const {
|
||||
return (_loudCount >= kMicrophoneTooltipAfterLoudCount);
|
||||
}
|
||||
|
||||
void Panel::MicLevelTester::check() {
|
||||
const auto level = _tester.getAndResetLevel();
|
||||
if (level >= kMicrophoneTooltipLevelThreshold) {
|
||||
_quietCount = 0;
|
||||
if (++_loudCount >= kMicrophoneTooltipAfterLoudCount) {
|
||||
_show();
|
||||
}
|
||||
} else if (_loudCount > 0 && ++_quietCount >= kDropLoudAfterQuietCount) {
|
||||
_quietCount = 0;
|
||||
_loudCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
Panel::Panel(not_null<GroupCall*> call)
|
||||
: _call(call)
|
||||
, _peer(call->peer())
|
||||
|
@ -1155,35 +1108,9 @@ void Panel::refreshTopButton() {
|
|||
}
|
||||
|
||||
void Panel::screenSharingPrivacyRequest() {
|
||||
#ifdef Q_OS_MAC
|
||||
if (!Platform::IsMac10_15OrGreater()) {
|
||||
return;
|
||||
if (auto box = ScreenSharingPrivacyRequestBox()) {
|
||||
_layerBg->showBox(std::move(box));
|
||||
}
|
||||
const auto requestInputMonitoring = Platform::IsMac10_15OrGreater();
|
||||
_layerBg->showBox(Box([=](not_null<Ui::GenericBox*> box) {
|
||||
box->addRow(
|
||||
object_ptr<Ui::FlatLabel>(
|
||||
box.get(),
|
||||
rpl::combine(
|
||||
tr::lng_group_call_mac_screencast_access(),
|
||||
tr::lng_group_call_mac_recording()
|
||||
) | rpl::map([](QString a, QString b) {
|
||||
auto result = Ui::Text::RichLangValue(a);
|
||||
result.append("\n\n").append(Ui::Text::RichLangValue(b));
|
||||
return result;
|
||||
}),
|
||||
st::groupCallBoxLabel),
|
||||
style::margins(
|
||||
st::boxRowPadding.left(),
|
||||
st::boxPadding.top(),
|
||||
st::boxRowPadding.right(),
|
||||
st::boxPadding.bottom()));
|
||||
box->addButton(tr::lng_group_call_mac_settings(), [=] {
|
||||
Platform::OpenDesktopCapturePrivacySettings();
|
||||
});
|
||||
box->addButton(tr::lng_cancel(), [=] { box->closeBox(); });
|
||||
}));
|
||||
#endif // Q_OS_MAC
|
||||
}
|
||||
|
||||
void Panel::chooseShareScreenSource() {
|
||||
|
|
|
@ -63,6 +63,7 @@ class Members;
|
|||
class Viewport;
|
||||
enum class PanelMode;
|
||||
enum class StickedTooltip;
|
||||
class MicLevelTester;
|
||||
|
||||
class Panel final : private Ui::DesktopCapture::ChooseSourceDelegate {
|
||||
public:
|
||||
|
@ -94,7 +95,6 @@ private:
|
|||
Activated,
|
||||
Discarded,
|
||||
};
|
||||
class MicLevelTester;
|
||||
|
||||
[[nodiscard]] not_null<Ui::RpWindow*> window() const;
|
||||
[[nodiscard]] not_null<Ui::RpWidget*> widget() const;
|
||||
|
|
|
@ -53,6 +53,10 @@ namespace Calls::Group {
|
|||
namespace {
|
||||
|
||||
constexpr auto kDelaysCount = 201;
|
||||
constexpr auto kMicrophoneTooltipAfterLoudCount = 3;
|
||||
constexpr auto kDropLoudAfterQuietCount = 5;
|
||||
constexpr auto kMicrophoneTooltipLevelThreshold = 0.2;
|
||||
constexpr auto kMicrophoneTooltipCheckInterval = crl::time(500);
|
||||
|
||||
#ifdef Q_OS_MAC
|
||||
constexpr auto kCheckAccessibilityInterval = crl::time(500);
|
||||
|
@ -735,4 +739,31 @@ std::pair<Fn<void()>, rpl::lifetime> ShareInviteLinkAction(
|
|||
return { std::move(callback), std::move(lifetime) };
|
||||
}
|
||||
|
||||
MicLevelTester::MicLevelTester(Fn<void()> show)
|
||||
: _show(std::move(show))
|
||||
, _timer([=] { check(); })
|
||||
, _tester(
|
||||
std::make_unique<Webrtc::AudioInputTester>(
|
||||
Core::App().settings().callAudioBackend(),
|
||||
Core::App().settings().callInputDeviceId())) {
|
||||
_timer.callEach(kMicrophoneTooltipCheckInterval);
|
||||
}
|
||||
|
||||
bool MicLevelTester::showTooltip() const {
|
||||
return (_loudCount >= kMicrophoneTooltipAfterLoudCount);
|
||||
}
|
||||
|
||||
void MicLevelTester::check() {
|
||||
const auto level = _tester->getAndResetLevel();
|
||||
if (level >= kMicrophoneTooltipLevelThreshold) {
|
||||
_quietCount = 0;
|
||||
if (++_loudCount >= kMicrophoneTooltipAfterLoudCount) {
|
||||
_show();
|
||||
}
|
||||
} else if (_loudCount > 0 && ++_quietCount >= kDropLoudAfterQuietCount) {
|
||||
_quietCount = 0;
|
||||
_loudCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace Calls::Group
|
||||
|
|
|
@ -9,6 +9,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
|
||||
#include "ui/layers/generic_box.h"
|
||||
|
||||
namespace Webrtc {
|
||||
class AudioInputTester;
|
||||
} // namespace Webrtc
|
||||
|
||||
namespace Calls {
|
||||
class GroupCall;
|
||||
} // namespace Calls
|
||||
|
@ -24,4 +28,21 @@ void SettingsBox(
|
|||
Fn<void(object_ptr<Ui::BoxContent>)> showBox,
|
||||
Fn<void(QString)> showToast);
|
||||
|
||||
class MicLevelTester final {
|
||||
public:
|
||||
explicit MicLevelTester(Fn<void()> show);
|
||||
|
||||
[[nodiscard]] bool showTooltip() const;
|
||||
|
||||
private:
|
||||
void check();
|
||||
|
||||
Fn<void()> _show;
|
||||
base::Timer _timer;
|
||||
std::unique_ptr<Webrtc::AudioInputTester> _tester;
|
||||
int _loudCount = 0;
|
||||
int _quietCount = 0;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Calls::Group
|
||||
|
|
|
@ -423,7 +423,7 @@ void ChooseSourceProcess::setupPanel() {
|
|||
+ rows * st::desktopCaptureSourceSize.height()
|
||||
+ (rows - 1) * skips.height()
|
||||
+ margins.bottom();
|
||||
_inner->resize(width, std::max(height, innerHeight));
|
||||
_inner->resize(width, innerHeight);
|
||||
}, _inner->lifetime());
|
||||
|
||||
if (const auto parent = _delegate->chooseSourceParent()) {
|
||||
|
|
|
@ -69,9 +69,9 @@ void Calls::setupContent() {
|
|||
if (!cameras.empty()) {
|
||||
const auto hasCall = (Core::App().calls().currentCall() != nullptr);
|
||||
|
||||
auto capturerOwner = Core::App().calls().getVideoCapture();
|
||||
const auto capturer = capturerOwner.get();
|
||||
content->lifetime().add([owner = std::move(capturerOwner)]{});
|
||||
auto capturerOwner = content->lifetime().make_state<
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface>
|
||||
>();
|
||||
|
||||
const auto track = content->lifetime().make_state<VideoTrack>(
|
||||
(hasCall
|
||||
|
@ -102,7 +102,8 @@ void Calls::setupContent() {
|
|||
)->addClickHandler([=] {
|
||||
const auto &devices = GetVideoInputList();
|
||||
const auto options = ranges::views::concat(
|
||||
ranges::views::single(tr::lng_settings_call_device_default(tr::now)),
|
||||
ranges::views::single(
|
||||
tr::lng_settings_call_device_default(tr::now)),
|
||||
devices | ranges::views::transform(&VideoInput::name)
|
||||
) | ranges::to_vector;
|
||||
const auto i = ranges::find(
|
||||
|
@ -117,11 +118,13 @@ void Calls::setupContent() {
|
|||
const auto deviceId = option
|
||||
? devices[option - 1].id
|
||||
: "default";
|
||||
capturer->switchToDevice(deviceId.toStdString());
|
||||
Core::App().settings().setCallVideoInputDeviceId(deviceId);
|
||||
Core::App().saveSettingsDelayed();
|
||||
if (const auto call = Core::App().calls().currentCall()) {
|
||||
call->setCurrentVideoDevice(deviceId);
|
||||
call->setCurrentCameraDevice(deviceId);
|
||||
}
|
||||
if (*capturerOwner) {
|
||||
(*capturerOwner)->switchToDevice(deviceId.toStdString());
|
||||
}
|
||||
});
|
||||
_controller->show(Box([=](not_null<Ui::GenericBox*> box) {
|
||||
|
@ -169,6 +172,19 @@ void Calls::setupContent() {
|
|||
}, bubbleWrap->lifetime());
|
||||
|
||||
using namespace rpl::mappers;
|
||||
const auto checkCapturer = [=] {
|
||||
if (*capturerOwner
|
||||
|| Core::App().calls().currentCall()
|
||||
|| Core::App().calls().currentGroupCall()) {
|
||||
return;
|
||||
}
|
||||
*capturerOwner = Core::App().calls().getVideoCapture(
|
||||
Core::App().settings().callVideoInputDeviceId());
|
||||
(*capturerOwner)->setPreferredAspectRatio(0.);
|
||||
track->setState(VideoState::Active);
|
||||
(*capturerOwner)->setState(tgcalls::VideoState::Active);
|
||||
(*capturerOwner)->setOutput(track->sink());
|
||||
};
|
||||
rpl::combine(
|
||||
Core::App().calls().currentCallValue(),
|
||||
Core::App().calls().currentGroupCallValue(),
|
||||
|
@ -177,10 +193,9 @@ void Calls::setupContent() {
|
|||
if (has) {
|
||||
track->setState(VideoState::Inactive);
|
||||
bubbleWrap->resize(bubbleWrap->width(), 0);
|
||||
*capturerOwner = nullptr;
|
||||
} else {
|
||||
capturer->setPreferredAspectRatio(0.);
|
||||
track->setState(VideoState::Active);
|
||||
capturer->setOutput(track->sink());
|
||||
crl::on_main(content, checkCapturer);
|
||||
}
|
||||
}, content->lifetime());
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit d452ae8ba6fcfc83accb6a1213a15d3e3a301740
|
||||
Subproject commit 868f2671fa9f0bf180eef0795d5c31d4ee4e3770
|
Loading…
Add table
Reference in a new issue